repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/gui_rounding.rs
crates/emath/src/gui_rounding.rs
/// We (sometimes) round sizes and coordinates to an even multiple of this value. /// /// This is only used for rounding _logical UI points_, used for widget coordinates and sizes. /// When rendering, you may want to round to an integer multiple of the physical _pixels_ instead, /// using [`GuiRounding::round_to_pixels`]. /// /// See [`GuiRounding::round_ui`] for more information. /// /// This constant has to be a (negative) power of two so that it can be represented exactly /// by a floating point number. /// /// If we pick too large a value (e.g. 1 or 1/2), then we get judder during scrolling and animations. /// If we pick too small a value (e.g. 1/4096), we run the risk of rounding errors again. /// /// `f32` has 23 bits of mantissa, so if we use e.g. 1/8 as the rounding factor, /// we can represent all numbers up to 2^20 exactly, which is plenty /// (to my knowledge there are no displays that are a million pixels wide). pub const GUI_ROUNDING: f32 = 1.0 / 32.0; /// Trait for rounding coordinates and sizes to align with either . /// /// See [`GuiRounding::round_ui`] for more information. pub trait GuiRounding { /// Rounds floating point numbers to an even multiple of the GUI rounding factor, [`crate::GUI_ROUNDING`]. /// /// Use this for widget coordinates and sizes. /// /// Rounding sizes and positions prevent rounding errors when doing sizing calculations. /// We don't round to integers, because that would be too coarse (causing visible juddering when scrolling, for instance). /// Instead we round to an even multiple of [`GUI_ROUNDING`]. fn round_ui(self) -> Self; /// Like [`Self::round_ui`], but always rounds towards negative infinity. fn floor_ui(self) -> Self; /// Round a size or position to an even multiple of the physical pixel size. /// /// This can be useful for crisp rendering. /// /// The `self` should be in coordinates of _logical UI points_. /// The argument `pixels_per_point` is the number of _physical pixels_ per logical UI point. /// For instance, on a high-DPI screen, `pixels_per_point` could be `2.0`. fn round_to_pixels(self, pixels_per_point: f32) -> Self; /// Will round the position to be in the center of a pixel. /// /// The pixel size is `1.0 / pixels_per_point`. /// /// So if `pixels_per_point = 2` (i.e. `pixel size = 0.5`), /// then the position will be rounded to the closest of `…, 0.25, 0.75, 1.25, …`. /// /// This is useful, for instance, when picking the center of a line that is one pixel wide. fn round_to_pixel_center(self, pixels_per_point: f32) -> Self; } impl GuiRounding for f32 { #[inline] fn round_ui(self) -> Self { (self / GUI_ROUNDING).round() * GUI_ROUNDING } #[inline] fn floor_ui(self) -> Self { (self / GUI_ROUNDING).floor() * GUI_ROUNDING } #[inline] fn round_to_pixels(self, pixels_per_point: f32) -> Self { (self * pixels_per_point).round() / pixels_per_point } #[inline] fn round_to_pixel_center(self, pixels_per_point: f32) -> Self { ((self * pixels_per_point - 0.5).round() + 0.5) / pixels_per_point } } impl GuiRounding for f64 { #[inline] fn round_ui(self) -> Self { (self / GUI_ROUNDING as Self).round() * GUI_ROUNDING as Self } #[inline] fn floor_ui(self) -> Self { (self / GUI_ROUNDING as Self).floor() * GUI_ROUNDING as Self } #[inline] fn round_to_pixels(self, pixels_per_point: f32) -> Self { (self * pixels_per_point as Self).round() / pixels_per_point as Self } #[inline] fn round_to_pixel_center(self, pixels_per_point: f32) -> Self { ((self * pixels_per_point as Self - 0.5).round() + 0.5) / pixels_per_point as Self } } impl GuiRounding for crate::Vec2 { #[inline] fn round_ui(self) -> Self { Self::new(self.x.round_ui(), self.y.round_ui()) } #[inline] fn floor_ui(self) -> Self { Self::new(self.x.floor_ui(), self.y.floor_ui()) } #[inline] fn round_to_pixels(self, pixels_per_point: f32) -> Self { Self::new( self.x.round_to_pixels(pixels_per_point), self.y.round_to_pixels(pixels_per_point), ) } // This doesn't really make sense for a Vec2, but 🤷‍♂️ #[inline] fn round_to_pixel_center(self, pixels_per_point: f32) -> Self { Self::new( self.x.round_to_pixel_center(pixels_per_point), self.y.round_to_pixel_center(pixels_per_point), ) } } impl GuiRounding for crate::Pos2 { #[inline] fn round_ui(self) -> Self { Self::new(self.x.round_ui(), self.y.round_ui()) } #[inline] fn floor_ui(self) -> Self { Self::new(self.x.floor_ui(), self.y.floor_ui()) } #[inline] fn round_to_pixels(self, pixels_per_point: f32) -> Self { Self::new( self.x.round_to_pixels(pixels_per_point), self.y.round_to_pixels(pixels_per_point), ) } #[inline] fn round_to_pixel_center(self, pixels_per_point: f32) -> Self { Self::new( self.x.round_to_pixel_center(pixels_per_point), self.y.round_to_pixel_center(pixels_per_point), ) } } impl GuiRounding for crate::Rect { /// Rounded so that two adjacent rects that tile perfectly /// will continue to tile perfectly. #[inline] fn round_ui(self) -> Self { Self::from_min_max(self.min.round_ui(), self.max.round_ui()) } /// Rounded so that two adjacent rects that tile perfectly /// will continue to tile perfectly. #[inline] fn floor_ui(self) -> Self { Self::from_min_max(self.min.floor_ui(), self.max.floor_ui()) } /// Rounded so that two adjacent rects that tile perfectly /// will continue to tile perfectly. #[inline] fn round_to_pixels(self, pixels_per_point: f32) -> Self { Self::from_min_max( self.min.round_to_pixels(pixels_per_point), self.max.round_to_pixels(pixels_per_point), ) } /// Rounded so that two adjacent rects that tile perfectly /// will continue to tile perfectly. #[inline] fn round_to_pixel_center(self, pixels_per_point: f32) -> Self { Self::from_min_max( self.min.round_to_pixel_center(pixels_per_point), self.max.round_to_pixel_center(pixels_per_point), ) } } #[test] fn test_gui_rounding() { assert_eq!(0.0_f32.round_ui(), 0.0); assert_eq!((GUI_ROUNDING * 1.11).round_ui(), GUI_ROUNDING); assert_eq!((-GUI_ROUNDING * 1.11).round_ui(), -GUI_ROUNDING); assert_eq!(f32::NEG_INFINITY.round_ui(), f32::NEG_INFINITY); assert_eq!(f32::INFINITY.round_ui(), f32::INFINITY); assert_eq!(0.17_f32.round_to_pixel_center(2.0), 0.25); }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/lib.rs
crates/emath/src/lib.rs
//! Opinionated 2D math library for building GUIs. //! //! Includes vectors, positions, rectangles etc. //! //! Conventions (unless otherwise specified): //! //! * All angles are in radians //! * X+ is right and Y+ is down. //! * (0,0) is left top. //! * Dimension order is always `x y` //! //! ## Integrating with other math libraries. //! `emath` does not strive to become a general purpose or all-powerful math library. //! //! For that, use something else ([`glam`](https://docs.rs/glam), [`nalgebra`](https://docs.rs/nalgebra), …) //! and enable the `mint` feature flag in `emath` to enable implicit conversion to/from `emath`. //! //! ## Feature flags #![cfg_attr(feature = "document-features", doc = document_features::document_features!())] //! #![expect(clippy::float_cmp)] use std::ops::{Add, Div, Mul, RangeInclusive, Sub}; // ---------------------------------------------------------------------------- pub mod align; pub mod easing; mod gui_rounding; mod history; mod numeric; mod ordered_float; mod pos2; mod range; mod rect; mod rect_align; mod rect_transform; mod rot2; pub mod smart_aim; mod ts_transform; mod vec2; mod vec2b; pub use self::{ align::{Align, Align2}, gui_rounding::{GUI_ROUNDING, GuiRounding}, history::History, numeric::*, ordered_float::*, pos2::*, range::Rangef, rect::*, rect_align::RectAlign, rect_transform::*, rot2::*, ts_transform::*, vec2::*, vec2b::*, }; // ---------------------------------------------------------------------------- /// Helper trait to implement [`lerp`] and [`remap`]. pub trait One { const ONE: Self; } impl One for f32 { const ONE: Self = 1.0; } impl One for f64 { const ONE: Self = 1.0; } /// Helper trait to implement [`lerp`] and [`remap`]. pub trait Real: Copy + PartialEq + PartialOrd + One + Add<Self, Output = Self> + Sub<Self, Output = Self> + Mul<Self, Output = Self> + Div<Self, Output = Self> { } impl Real for f32 {} impl Real for f64 {} // ---------------------------------------------------------------------------- /// Linear interpolation. /// /// ``` /// # use emath::lerp; /// assert_eq!(lerp(1.0..=5.0, 0.0), 1.0); /// assert_eq!(lerp(1.0..=5.0, 0.5), 3.0); /// assert_eq!(lerp(1.0..=5.0, 1.0), 5.0); /// assert_eq!(lerp(1.0..=5.0, 2.0), 9.0); /// ``` #[inline(always)] pub fn lerp<R, T>(range: impl Into<RangeInclusive<R>>, t: T) -> R where T: Real + Mul<R, Output = R>, R: Copy + Add<R, Output = R>, { let range = range.into(); (T::ONE - t) * *range.start() + t * *range.end() } /// This is a faster version of [`f32::midpoint`] which doesn't handle overflow. /// /// ``` /// # use emath::fast_midpoint; /// assert_eq!(fast_midpoint(1.0, 5.0), 3.0); /// ``` #[inline(always)] pub fn fast_midpoint<R>(a: R, b: R) -> R where R: Copy + Add<R, Output = R> + Div<R, Output = R> + One, { let two = R::ONE + R::ONE; (a + b) / two } /// Where in the range is this value? Returns 0-1 if within the range. /// /// Returns <0 if before and >1 if after. /// /// Returns `None` if the input range is zero-width. /// /// ``` /// # use emath::inverse_lerp; /// assert_eq!(inverse_lerp(1.0..=5.0, 1.0), Some(0.0)); /// assert_eq!(inverse_lerp(1.0..=5.0, 3.0), Some(0.5)); /// assert_eq!(inverse_lerp(1.0..=5.0, 5.0), Some(1.0)); /// assert_eq!(inverse_lerp(1.0..=5.0, 9.0), Some(2.0)); /// assert_eq!(inverse_lerp(1.0..=1.0, 3.0), None); /// ``` #[inline] pub fn inverse_lerp<R>(range: RangeInclusive<R>, value: R) -> Option<R> where R: Copy + PartialEq + Sub<R, Output = R> + Div<R, Output = R>, { let min = *range.start(); let max = *range.end(); if min == max { None } else { Some((value - min) / (max - min)) } } /// Linearly remap a value from one range to another, /// so that when `x == from.start()` returns `to.start()` /// and when `x == from.end()` returns `to.end()`. pub fn remap<T>(x: T, from: impl Into<RangeInclusive<T>>, to: impl Into<RangeInclusive<T>>) -> T where T: Real, { let from = from.into(); let to = to.into(); debug_assert!( from.start() != from.end(), "from.start() and from.end() should not be equal" ); let t = (x - *from.start()) / (*from.end() - *from.start()); lerp(to, t) } /// Like [`remap`], but also clamps the value so that the returned value is always in the `to` range. pub fn remap_clamp<T>( x: T, from: impl Into<RangeInclusive<T>>, to: impl Into<RangeInclusive<T>>, ) -> T where T: Real, { let from = from.into(); let to = to.into(); if from.end() < from.start() { return remap_clamp(x, *from.end()..=*from.start(), *to.end()..=*to.start()); } if x <= *from.start() { *to.start() } else if *from.end() <= x { *to.end() } else { debug_assert!( from.start() != from.end(), "from.start() and from.end() should not be equal" ); let t = (x - *from.start()) / (*from.end() - *from.start()); // Ensure no numerical inaccuracies sneak in: if T::ONE <= t { *to.end() } else { lerp(to, t) } } } /// Round a value to the given number of decimal places. pub fn round_to_decimals(value: f64, decimal_places: usize) -> f64 { // This is a stupid way of doing this, but stupid works. format!("{value:.decimal_places$}").parse().unwrap_or(value) } pub fn format_with_minimum_decimals(value: f64, decimals: usize) -> String { format_with_decimals_in_range(value, decimals..=6) } /// Use as few decimals as possible to show the value accurately, but within the given range. /// /// Decimals are counted after the decimal point. pub fn format_with_decimals_in_range(value: f64, decimal_range: RangeInclusive<usize>) -> String { let min_decimals = *decimal_range.start(); let max_decimals = *decimal_range.end(); debug_assert!( min_decimals <= max_decimals, "min_decimals should be <= max_decimals, but got min_decimals: {min_decimals}, max_decimals: {max_decimals}" ); debug_assert!( max_decimals < 100, "max_decimals should be < 100, but got {max_decimals}" ); let max_decimals = max_decimals.min(16); let min_decimals = min_decimals.min(max_decimals); if min_decimals < max_decimals { // Ugly/slow way of doing this. TODO(emilk): clean up precision. for decimals in min_decimals..max_decimals { let text = format!("{value:.decimals$}"); let epsilon = 16.0 * f32::EPSILON; // margin large enough to handle most peoples round-tripping needs if let Ok(parsed_value) = text.parse::<f32>() && almost_equal(parsed_value, value as f32, epsilon) { // Enough precision to show the value accurately - good! return text; } } // The value has more precision than we expected. // Probably the value was set not by the slider, but from outside. // In any case: show the full value } format!("{value:.max_decimals$}") } /// Return true when arguments are the same within some rounding error. /// /// For instance `almost_equal(x, x.to_degrees().to_radians(), f32::EPSILON)` should hold true for all x. /// The `epsilon` can be `f32::EPSILON` to handle simple transforms (like degrees -> radians) /// but should be higher to handle more complex transformations. pub fn almost_equal(a: f32, b: f32, epsilon: f32) -> bool { if a == b { true // handle infinites } else { let abs_max = a.abs().max(b.abs()); abs_max <= epsilon || ((a - b).abs() / abs_max) <= epsilon } } #[expect(clippy::approx_constant)] #[test] fn test_format() { assert_eq!(format_with_minimum_decimals(1_234_567.0, 0), "1234567"); assert_eq!(format_with_minimum_decimals(1_234_567.0, 1), "1234567.0"); assert_eq!(format_with_minimum_decimals(3.14, 2), "3.14"); assert_eq!(format_with_minimum_decimals(3.14, 3), "3.140"); assert_eq!( format_with_minimum_decimals(std::f64::consts::PI, 2), "3.14159" ); } #[test] fn test_almost_equal() { for &x in &[ 0.0_f32, f32::MIN_POSITIVE, 1e-20, 1e-10, f32::EPSILON, 0.1, 0.99, 1.0, 1.001, 1e10, f32::MAX / 100.0, // f32::MAX, // overflows in rad<->deg test f32::INFINITY, ] { for &x in &[-x, x] { for roundtrip in &[ |x: f32| x.to_degrees().to_radians(), |x: f32| x.to_radians().to_degrees(), ] { let epsilon = f32::EPSILON; assert!( almost_equal(x, roundtrip(x), epsilon), "{} vs {}", x, roundtrip(x) ); } } } } #[test] fn test_remap() { assert_eq!(remap_clamp(1.0, 0.0..=1.0, 0.0..=16.0), 16.0); assert_eq!(remap_clamp(1.0, 1.0..=0.0, 16.0..=0.0), 16.0); assert_eq!(remap_clamp(0.5, 1.0..=0.0, 16.0..=0.0), 8.0); } // ---------------------------------------------------------------------------- /// Extends `f32`, [`Vec2`] etc with `at_least` and `at_most` as aliases for `max` and `min`. pub trait NumExt { /// More readable version of `self.max(lower_limit)` #[must_use] fn at_least(self, lower_limit: Self) -> Self; /// More readable version of `self.min(upper_limit)` #[must_use] fn at_most(self, upper_limit: Self) -> Self; } macro_rules! impl_num_ext { ($t: ty) => { impl NumExt for $t { #[inline(always)] fn at_least(self, lower_limit: Self) -> Self { self.max(lower_limit) } #[inline(always)] fn at_most(self, upper_limit: Self) -> Self { self.min(upper_limit) } } }; } impl_num_ext!(u8); impl_num_ext!(u16); impl_num_ext!(u32); impl_num_ext!(u64); impl_num_ext!(u128); impl_num_ext!(usize); impl_num_ext!(i8); impl_num_ext!(i16); impl_num_ext!(i32); impl_num_ext!(i64); impl_num_ext!(i128); impl_num_ext!(isize); impl_num_ext!(f32); impl_num_ext!(f64); impl_num_ext!(Vec2); impl_num_ext!(Pos2); // ---------------------------------------------------------------------------- /// Wrap angle to `[-PI, PI]` range. pub fn normalized_angle(mut angle: f32) -> f32 { use std::f32::consts::{PI, TAU}; angle %= TAU; if angle > PI { angle -= TAU; } else if angle < -PI { angle += TAU; } angle } #[test] fn test_normalized_angle() { macro_rules! almost_eq { ($left: expr, $right: expr) => { let left = $left; let right = $right; assert!((left - right).abs() < 1e-6, "{} != {}", left, right); }; } use std::f32::consts::TAU; almost_eq!(normalized_angle(-3.0 * TAU), 0.0); almost_eq!(normalized_angle(-2.3 * TAU), -0.3 * TAU); almost_eq!(normalized_angle(-TAU), 0.0); almost_eq!(normalized_angle(0.0), 0.0); almost_eq!(normalized_angle(TAU), 0.0); almost_eq!(normalized_angle(2.7 * TAU), -0.3 * TAU); } // ---------------------------------------------------------------------------- /// Calculate a lerp-factor for exponential smoothing using a time step. /// /// * `exponential_smooth_factor(0.90, 1.0, dt)`: reach 90% in 1.0 seconds /// * `exponential_smooth_factor(0.50, 0.2, dt)`: reach 50% in 0.2 seconds /// /// Example: /// ``` /// # use emath::{lerp, exponential_smooth_factor}; /// # let (mut smoothed_value, target_value, dt) = (0.0_f32, 1.0_f32, 0.01_f32); /// let t = exponential_smooth_factor(0.90, 0.2, dt); // reach 90% in 0.2 seconds /// smoothed_value = lerp(smoothed_value..=target_value, t); /// ``` pub fn exponential_smooth_factor( reach_this_fraction: f32, in_this_many_seconds: f32, dt: f32, ) -> f32 { 1.0 - (1.0 - reach_this_fraction).powf(dt / in_this_many_seconds) } /// If you have a value animating over time, /// how much towards its target do you need to move it this frame? /// /// You only need to store the start time and target value in order to animate using this function. /// /// ``` rs /// struct Animation { /// current_value: f32, /// /// animation_time_span: (f64, f64), /// target_value: f32, /// } /// /// impl Animation { /// fn update(&mut self, now: f64, dt: f32) { /// let t = interpolation_factor(self.animation_time_span, now, dt, ease_in_ease_out); /// self.current_value = emath::lerp(self.current_value..=self.target_value, t); /// } /// } /// ``` pub fn interpolation_factor( (start_time, end_time): (f64, f64), current_time: f64, dt: f32, easing: impl Fn(f32) -> f32, ) -> f32 { let animation_duration = (end_time - start_time) as f32; let prev_time = current_time - dt as f64; let prev_t = easing((prev_time - start_time) as f32 / animation_duration); let end_t = easing((current_time - start_time) as f32 / animation_duration); if end_t < 1.0 { (end_t - prev_t) / (1.0 - prev_t) } else { 1.0 } } /// Ease in, ease out. /// /// `f(0) = 0, f'(0) = 0, f(1) = 1, f'(1) = 0`. #[inline] pub fn ease_in_ease_out(t: f32) -> f32 { let t = t.clamp(0.0, 1.0); (3.0 * t * t - 2.0 * t * t * t).clamp(0.0, 1.0) }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/range.rs
crates/emath/src/range.rs
use std::ops::{RangeFrom, RangeFull, RangeInclusive, RangeToInclusive}; /// Inclusive range of floats, i.e. `min..=max`, but more ergonomic than [`RangeInclusive`]. #[repr(C)] #[derive(Clone, Copy, Debug, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct Rangef { pub min: f32, pub max: f32, } impl Rangef { /// Infinite range that contains everything, from -∞ to +∞, inclusive. pub const EVERYTHING: Self = Self { min: f32::NEG_INFINITY, max: f32::INFINITY, }; /// The inverse of [`Self::EVERYTHING`]: stretches from positive infinity to negative infinity. /// Contains nothing. pub const NOTHING: Self = Self { min: f32::INFINITY, max: f32::NEG_INFINITY, }; /// An invalid [`Rangef`] filled with [`f32::NAN`]. pub const NAN: Self = Self { min: f32::NAN, max: f32::NAN, }; #[inline] pub fn new(min: f32, max: f32) -> Self { Self { min, max } } #[inline] pub fn point(min_and_max: f32) -> Self { Self { min: min_and_max, max: min_and_max, } } /// The length of the range, i.e. `max - min`. #[inline] pub fn span(self) -> f32 { self.max - self.min } /// The center of the range #[inline] pub fn center(self) -> f32 { 0.5 * (self.min + self.max) } #[inline] #[must_use] pub fn contains(self, x: f32) -> bool { self.min <= x && x <= self.max } /// Equivalent to `x.clamp(min, max)` #[inline] #[must_use] pub fn clamp(self, x: f32) -> f32 { x.clamp(self.min, self.max) } /// Flip `min` and `max` if needed, so that `min <= max` after. #[inline] pub fn as_positive(self) -> Self { Self { min: self.min.min(self.max), max: self.min.max(self.max), } } /// Shrink by this much on each side, keeping the center #[inline] #[must_use] pub fn shrink(self, amnt: f32) -> Self { Self { min: self.min + amnt, max: self.max - amnt, } } /// Expand by this much on each side, keeping the center #[inline] #[must_use] pub fn expand(self, amnt: f32) -> Self { Self { min: self.min - amnt, max: self.max + amnt, } } /// Flip the min and the max #[inline] #[must_use] pub fn flip(self) -> Self { Self { min: self.max, max: self.min, } } /// The overlap of two ranges, i.e. the range that is contained by both. /// /// If the ranges do not overlap, returns a range with `span() < 0.0`. /// /// ``` /// # use emath::Rangef; /// assert_eq!(Rangef::new(0.0, 10.0).intersection(Rangef::new(5.0, 15.0)), Rangef::new(5.0, 10.0)); /// assert_eq!(Rangef::new(0.0, 10.0).intersection(Rangef::new(10.0, 20.0)), Rangef::new(10.0, 10.0)); /// assert!(Rangef::new(0.0, 10.0).intersection(Rangef::new(20.0, 30.0)).span() < 0.0); /// ``` #[inline] #[must_use] pub fn intersection(self, other: Self) -> Self { Self { min: self.min.max(other.min), max: self.max.min(other.max), } } /// Do the two ranges intersect? /// /// ``` /// # use emath::Rangef; /// assert!(Rangef::new(0.0, 10.0).intersects(Rangef::new(5.0, 15.0))); /// assert!(Rangef::new(0.0, 10.0).intersects(Rangef::new(5.0, 6.0))); /// assert!(Rangef::new(0.0, 10.0).intersects(Rangef::new(10.0, 20.0))); /// assert!(!Rangef::new(0.0, 10.0).intersects(Rangef::new(20.0, 30.0))); /// ``` #[inline] #[must_use] pub fn intersects(self, other: Self) -> bool { other.min <= self.max && self.min <= other.max } } impl From<Rangef> for RangeInclusive<f32> { #[inline] fn from(Rangef { min, max }: Rangef) -> Self { min..=max } } impl From<&Rangef> for RangeInclusive<f32> { #[inline] fn from(&Rangef { min, max }: &Rangef) -> Self { min..=max } } impl From<RangeInclusive<f32>> for Rangef { #[inline] fn from(range: RangeInclusive<f32>) -> Self { Self::new(*range.start(), *range.end()) } } impl From<&RangeInclusive<f32>> for Rangef { #[inline] fn from(range: &RangeInclusive<f32>) -> Self { Self::new(*range.start(), *range.end()) } } impl From<RangeFrom<f32>> for Rangef { #[inline] fn from(range: RangeFrom<f32>) -> Self { Self::new(range.start, f32::INFINITY) } } impl From<&RangeFrom<f32>> for Rangef { #[inline] fn from(range: &RangeFrom<f32>) -> Self { Self::new(range.start, f32::INFINITY) } } impl From<RangeFull> for Rangef { #[inline] fn from(_: RangeFull) -> Self { Self::new(f32::NEG_INFINITY, f32::INFINITY) } } impl From<&RangeFull> for Rangef { #[inline] fn from(_: &RangeFull) -> Self { Self::new(f32::NEG_INFINITY, f32::INFINITY) } } impl From<RangeToInclusive<f32>> for Rangef { #[inline] fn from(range: RangeToInclusive<f32>) -> Self { Self::new(f32::NEG_INFINITY, range.end) } } impl PartialEq<RangeInclusive<f32>> for Rangef { #[inline] fn eq(&self, other: &RangeInclusive<f32>) -> bool { self.min == *other.start() && self.max == *other.end() } } impl PartialEq<Rangef> for RangeInclusive<f32> { #[inline] fn eq(&self, other: &Rangef) -> bool { *self.start() == other.min && *self.end() == other.max } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/vec2b.rs
crates/emath/src/vec2b.rs
use crate::Vec2; /// Two bools, one for each axis (X and Y). #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Vec2b { pub x: bool, pub y: bool, } impl Vec2b { pub const FALSE: Self = Self { x: false, y: false }; pub const TRUE: Self = Self { x: true, y: true }; #[inline] pub fn new(x: bool, y: bool) -> Self { Self { x, y } } #[inline] pub fn any(&self) -> bool { self.x || self.y } /// Are both `x` and `y` true? #[inline] pub fn all(&self) -> bool { self.x && self.y } #[inline] pub fn and(&self, other: impl Into<Self>) -> Self { let other = other.into(); Self { x: self.x && other.x, y: self.y && other.y, } } #[inline] pub fn or(&self, other: impl Into<Self>) -> Self { let other = other.into(); Self { x: self.x || other.x, y: self.y || other.y, } } /// Convert to a float `Vec2` where the components are 1.0 for `true` and 0.0 for `false`. #[inline] pub fn to_vec2(self) -> Vec2 { Vec2::new(self.x.into(), self.y.into()) } } impl From<bool> for Vec2b { #[inline] fn from(val: bool) -> Self { Self { x: val, y: val } } } impl From<[bool; 2]> for Vec2b { #[inline] fn from([x, y]: [bool; 2]) -> Self { Self { x, y } } } impl std::ops::Index<usize> for Vec2b { type Output = bool; #[inline(always)] fn index(&self, index: usize) -> &bool { match index { 0 => &self.x, 1 => &self.y, _ => panic!("Vec2b index out of bounds: {index}"), } } } impl std::ops::IndexMut<usize> for Vec2b { #[inline(always)] fn index_mut(&mut self, index: usize) -> &mut bool { match index { 0 => &mut self.x, 1 => &mut self.y, _ => panic!("Vec2b index out of bounds: {index}"), } } } impl std::ops::Not for Vec2b { type Output = Self; #[inline] fn not(self) -> Self::Output { Self { x: !self.x, y: !self.y, } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/smart_aim.rs
crates/emath/src/smart_aim.rs
//! Find "simple" numbers is some range. Used by sliders. use crate::fast_midpoint; const NUM_DECIMALS: usize = 15; /// Find the "simplest" number in a closed range [min, max], i.e. the one with the fewest decimal digits. /// /// So in the range `[0.83, 1.354]` you will get `1.0`, and for `[0.37, 0.48]` you will get `0.4`. /// This is used when dragging sliders etc to get the values that users are most likely to desire. /// This assumes a decimal centric user. pub fn best_in_range_f64(min: f64, max: f64) -> f64 { // Avoid NaN if we can: if min.is_nan() { return max; } if max.is_nan() { return min; } if max < min { return best_in_range_f64(max, min); } if min == max { return min; } if min <= 0.0 && 0.0 <= max { return 0.0; // always prefer zero } if min < 0.0 { return -best_in_range_f64(-max, -min); } debug_assert!(0.0 < min && min < max, "Logic bug"); // Prefer finite numbers: if !max.is_finite() { return min; } debug_assert!( min.is_finite() && max.is_finite(), "min: {min:?}, max: {max:?}" ); let min_exponent = min.log10(); let max_exponent = max.log10(); if min_exponent.floor() != max_exponent.floor() { // Different orders of magnitude. // Pick the geometric center of the two: let exponent = fast_midpoint(min_exponent, max_exponent); return 10.0_f64.powi(exponent.round() as i32); } if is_integer(min_exponent) { return 10.0_f64.powf(min_exponent); } if is_integer(max_exponent) { return 10.0_f64.powf(max_exponent); } // Find the proper scale, and then convert to integers: let scale = NUM_DECIMALS as i32 - max_exponent.floor() as i32 - 1; let scale_factor = 10.0_f64.powi(scale); let min_str = to_decimal_string((min * scale_factor).round() as u64); let max_str = to_decimal_string((max * scale_factor).round() as u64); // We now have two positive integers of the same length. // We want to find the first non-matching digit, // which we will call the "deciding digit". // Everything before it will be the same, // everything after will be zero, // and the deciding digit itself will be picked as a "smart average" // min: 12345 // max: 12780 // output: 12500 let mut ret_str = [0; NUM_DECIMALS]; for i in 0..NUM_DECIMALS { if min_str[i] == max_str[i] { ret_str[i] = min_str[i]; } else { // Found the deciding digit at index `i` let mut deciding_digit_min = min_str[i]; let deciding_digit_max = max_str[i]; debug_assert!( deciding_digit_min < deciding_digit_max, "Bug in smart aim code" ); let rest_of_min_is_zeroes = min_str[i + 1..].iter().all(|&c| c == 0); if !rest_of_min_is_zeroes { // There are more digits coming after `deciding_digit_min`, so we cannot pick it. // So the true min of what we can pick is one greater: deciding_digit_min += 1; } let deciding_digit = if deciding_digit_min == 0 { 0 } else if deciding_digit_min <= 5 && 5 <= deciding_digit_max { 5 // 5 is the roundest number in the range } else { deciding_digit_min.midpoint(deciding_digit_max) }; ret_str[i] = deciding_digit; return from_decimal_string(ret_str) as f64 / scale_factor; } } min // All digits are the same. Already handled earlier, but better safe than sorry } fn is_integer(f: f64) -> bool { f.round() == f } fn to_decimal_string(v: u64) -> [u8; NUM_DECIMALS] { let mut ret = [0; NUM_DECIMALS]; let mut value = v; for i in (0..NUM_DECIMALS).rev() { ret[i] = (value % 10) as u8; value /= 10; } ret } fn from_decimal_string(s: [u8; NUM_DECIMALS]) -> u64 { let mut value = 0; for &c in &s { debug_assert!(c <= 9, "Bad number"); value = value * 10 + c as u64; } value } #[expect(clippy::approx_constant)] #[test] fn test_aim() { assert_eq!(best_in_range_f64(-0.2, 0.0), 0.0, "Prefer zero"); assert_eq!(best_in_range_f64(-10_004.23, 3.14), 0.0, "Prefer zero"); assert_eq!(best_in_range_f64(-0.2, 100.0), 0.0, "Prefer zero"); assert_eq!(best_in_range_f64(0.2, 0.0), 0.0, "Prefer zero"); assert_eq!(best_in_range_f64(7.8, 17.8), 10.0); assert_eq!(best_in_range_f64(99.0, 300.0), 100.0); assert_eq!(best_in_range_f64(-99.0, -300.0), -100.0); assert_eq!(best_in_range_f64(0.4, 0.9), 0.5, "Prefer ending on 5"); assert_eq!(best_in_range_f64(14.1, 19.99), 15.0, "Prefer ending on 5"); assert_eq!(best_in_range_f64(12.3, 65.9), 50.0, "Prefer leading 5"); assert_eq!(best_in_range_f64(493.0, 879.0), 500.0, "Prefer leading 5"); assert_eq!(best_in_range_f64(0.37, 0.48), 0.40); // assert_eq!(best_in_range_f64(123.71, 123.76), 123.75); // TODO(emilk): we get 123.74999999999999 here // assert_eq!(best_in_range_f32(123.71, 123.76), 123.75); assert_eq!(best_in_range_f64(7.5, 16.3), 10.0); assert_eq!(best_in_range_f64(7.5, 76.3), 10.0); assert_eq!(best_in_range_f64(7.5, 763.3), 100.0); assert_eq!(best_in_range_f64(7.5, 1_345.0), 100.0); assert_eq!(best_in_range_f64(7.5, 123_456.0), 1000.0, "Geometric mean"); assert_eq!(best_in_range_f64(9.9999, 99.999), 10.0); assert_eq!(best_in_range_f64(10.000, 99.999), 10.0); assert_eq!(best_in_range_f64(10.001, 99.999), 50.0); assert_eq!(best_in_range_f64(10.001, 100.000), 100.0); assert_eq!(best_in_range_f64(99.999, 100.000), 100.0); assert_eq!(best_in_range_f64(10.001, 100.001), 100.0); const NAN: f64 = f64::NAN; const INFINITY: f64 = f64::INFINITY; const NEG_INFINITY: f64 = f64::NEG_INFINITY; assert!(best_in_range_f64(NAN, NAN).is_nan()); assert_eq!(best_in_range_f64(NAN, 1.2), 1.2); assert_eq!(best_in_range_f64(NAN, INFINITY), INFINITY); assert_eq!(best_in_range_f64(1.2, NAN), 1.2); assert_eq!(best_in_range_f64(1.2, INFINITY), 1.2); assert_eq!(best_in_range_f64(INFINITY, 1.2), 1.2); assert_eq!(best_in_range_f64(NEG_INFINITY, 1.2), 0.0); assert_eq!(best_in_range_f64(NEG_INFINITY, -2.7), -2.7); assert_eq!(best_in_range_f64(INFINITY, INFINITY), INFINITY); assert_eq!(best_in_range_f64(NEG_INFINITY, NEG_INFINITY), NEG_INFINITY); assert_eq!(best_in_range_f64(NEG_INFINITY, INFINITY), 0.0); assert_eq!(best_in_range_f64(INFINITY, NEG_INFINITY), 0.0); #[track_caller] fn test_f64((min, max): (f64, f64), expected: f64) { let aimed = best_in_range_f64(min, max); assert!( aimed == expected, "smart_aim({min} – {max}) => {aimed}, but expected {expected}" ); } #[track_caller] fn test_i64((min, max): (i64, i64), expected: i64) { let aimed = best_in_range_f64(min as _, max as _); assert!( aimed == expected as f64, "smart_aim({min} – {max}) => {aimed}, but expected {expected}" ); } test_i64((99, 300), 100); test_i64((300, 99), 100); test_i64((-99, -300), -100); test_i64((-99, 123), 0); // Prefer zero test_i64((4, 9), 5); // Prefer ending on 5 test_i64((14, 19), 15); // Prefer ending on 5 test_i64((12, 65), 50); // Prefer leading 5 test_i64((493, 879), 500); // Prefer leading 5 test_i64((37, 48), 40); test_i64((100, 123), 100); test_i64((101, 1000), 1000); test_i64((999, 1000), 1000); test_i64((123, 500), 500); test_i64((500, 777), 500); test_i64((500, 999), 500); test_i64((12345, 12780), 12500); test_i64((12371, 12376), 12375); test_i64((12371, 12376), 12375); test_f64((7.5, 16.3), 10.0); test_f64((7.5, 76.3), 10.0); test_f64((7.5, 763.3), 100.0); test_f64((7.5, 1_345.0), 100.0); // Geometric mean test_f64((7.5, 123_456.0), 1_000.0); // Geometric mean test_f64((-0.2, 0.0), 0.0); // Prefer zero test_f64((-10_004.23, 4.14), 0.0); // Prefer zero test_f64((-0.2, 100.0), 0.0); // Prefer zero test_f64((0.2, 0.0), 0.0); // Prefer zero test_f64((7.8, 17.8), 10.0); test_f64((14.1, 19.1), 15.0); // Prefer ending on 5 test_f64((12.3, 65.9), 50.0); // Prefer leading 5 }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/pos2.rs
crates/emath/src/pos2.rs
use std::{ fmt, ops::{Add, AddAssign, MulAssign, Sub, SubAssign}, }; use crate::{Div, Mul, Vec2, lerp}; /// A position on screen. /// /// Normally given in points (logical pixels). /// /// Mathematically this is known as a "point", but the term position was chosen so not to /// conflict with the unit (one point = X physical pixels). #[repr(C)] #[derive(Clone, Copy, Default, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct Pos2 { /// How far to the right. pub x: f32, /// How far down. pub y: f32, // implicit w = 1 } /// `pos2(x, y) == Pos2::new(x, y)` #[inline(always)] pub const fn pos2(x: f32, y: f32) -> Pos2 { Pos2 { x, y } } // ---------------------------------------------------------------------------- // Compatibility and convenience conversions to and from [f32; 2]: impl From<[f32; 2]> for Pos2 { #[inline(always)] fn from(v: [f32; 2]) -> Self { Self { x: v[0], y: v[1] } } } impl From<&[f32; 2]> for Pos2 { #[inline(always)] fn from(v: &[f32; 2]) -> Self { Self { x: v[0], y: v[1] } } } impl From<Pos2> for [f32; 2] { #[inline(always)] fn from(v: Pos2) -> Self { [v.x, v.y] } } impl From<&Pos2> for [f32; 2] { #[inline(always)] fn from(v: &Pos2) -> Self { [v.x, v.y] } } // ---------------------------------------------------------------------------- // Compatibility and convenience conversions to and from (f32, f32): impl From<(f32, f32)> for Pos2 { #[inline(always)] fn from(v: (f32, f32)) -> Self { Self { x: v.0, y: v.1 } } } impl From<&(f32, f32)> for Pos2 { #[inline(always)] fn from(v: &(f32, f32)) -> Self { Self { x: v.0, y: v.1 } } } impl From<Pos2> for (f32, f32) { #[inline(always)] fn from(v: Pos2) -> Self { (v.x, v.y) } } impl From<&Pos2> for (f32, f32) { #[inline(always)] fn from(v: &Pos2) -> Self { (v.x, v.y) } } // ---------------------------------------------------------------------------- // Mint compatibility and convenience conversions #[cfg(feature = "mint")] impl From<mint::Point2<f32>> for Pos2 { #[inline(always)] fn from(v: mint::Point2<f32>) -> Self { Self::new(v.x, v.y) } } #[cfg(feature = "mint")] impl From<Pos2> for mint::Point2<f32> { #[inline(always)] fn from(v: Pos2) -> Self { Self { x: v.x, y: v.y } } } // ---------------------------------------------------------------------------- impl Pos2 { /// The zero position, the origin. /// The top left corner in a GUI. /// Same as `Pos2::default()`. pub const ZERO: Self = Self { x: 0.0, y: 0.0 }; #[inline(always)] pub const fn new(x: f32, y: f32) -> Self { Self { x, y } } /// The vector from origin to this position. /// `p.to_vec2()` is equivalent to `p - Pos2::default()`. #[inline(always)] pub fn to_vec2(self) -> Vec2 { Vec2 { x: self.x, y: self.y, } } #[inline] pub fn distance(self, other: Self) -> f32 { (self - other).length() } #[inline] pub fn distance_sq(self, other: Self) -> f32 { (self - other).length_sq() } #[inline(always)] pub fn floor(self) -> Self { pos2(self.x.floor(), self.y.floor()) } #[inline(always)] pub fn round(self) -> Self { pos2(self.x.round(), self.y.round()) } #[inline(always)] pub fn ceil(self) -> Self { pos2(self.x.ceil(), self.y.ceil()) } /// True if all members are also finite. #[inline(always)] pub fn is_finite(self) -> bool { self.x.is_finite() && self.y.is_finite() } /// True if any member is NaN. #[inline(always)] pub fn any_nan(self) -> bool { self.x.is_nan() || self.y.is_nan() } #[must_use] #[inline] pub fn min(self, other: Self) -> Self { pos2(self.x.min(other.x), self.y.min(other.y)) } #[must_use] #[inline] pub fn max(self, other: Self) -> Self { pos2(self.x.max(other.x), self.y.max(other.y)) } #[must_use] #[inline] pub fn clamp(self, min: Self, max: Self) -> Self { Self { x: self.x.clamp(min.x, max.x), y: self.y.clamp(min.y, max.y), } } /// Linearly interpolate towards another point, so that `0.0 => self, 1.0 => other`. pub fn lerp(&self, other: Self, t: f32) -> Self { Self { x: lerp(self.x..=other.x, t), y: lerp(self.y..=other.y, t), } } } impl std::ops::Index<usize> for Pos2 { type Output = f32; #[inline(always)] fn index(&self, index: usize) -> &f32 { match index { 0 => &self.x, 1 => &self.y, _ => panic!("Pos2 index out of bounds: {index}"), } } } impl std::ops::IndexMut<usize> for Pos2 { #[inline(always)] fn index_mut(&mut self, index: usize) -> &mut f32 { match index { 0 => &mut self.x, 1 => &mut self.y, _ => panic!("Pos2 index out of bounds: {index}"), } } } impl Eq for Pos2 {} impl AddAssign<Vec2> for Pos2 { #[inline(always)] fn add_assign(&mut self, rhs: Vec2) { *self = Self { x: self.x + rhs.x, y: self.y + rhs.y, }; } } impl SubAssign<Vec2> for Pos2 { #[inline(always)] fn sub_assign(&mut self, rhs: Vec2) { *self = Self { x: self.x - rhs.x, y: self.y - rhs.y, }; } } impl Add<Vec2> for Pos2 { type Output = Self; #[inline(always)] fn add(self, rhs: Vec2) -> Self { Self { x: self.x + rhs.x, y: self.y + rhs.y, } } } impl Sub for Pos2 { type Output = Vec2; #[inline(always)] fn sub(self, rhs: Self) -> Vec2 { Vec2 { x: self.x - rhs.x, y: self.y - rhs.y, } } } impl Sub<Vec2> for Pos2 { type Output = Self; #[inline(always)] fn sub(self, rhs: Vec2) -> Self { Self { x: self.x - rhs.x, y: self.y - rhs.y, } } } impl Mul<f32> for Pos2 { type Output = Self; #[inline(always)] fn mul(self, factor: f32) -> Self { Self { x: self.x * factor, y: self.y * factor, } } } impl Mul<Pos2> for f32 { type Output = Pos2; #[inline(always)] fn mul(self, vec: Pos2) -> Pos2 { Pos2 { x: self * vec.x, y: self * vec.y, } } } impl MulAssign<f32> for Pos2 { #[inline(always)] fn mul_assign(&mut self, rhs: f32) { self.x *= rhs; self.y *= rhs; } } impl Div<f32> for Pos2 { type Output = Self; #[inline(always)] fn div(self, factor: f32) -> Self { Self { x: self.x / factor, y: self.y / factor, } } } impl fmt::Debug for Pos2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(precision) = f.precision() { write!(f, "[{1:.0$} {2:.0$}]", precision, self.x, self.y) } else { write!(f, "[{:.1} {:.1}]", self.x, self.y) } } } impl fmt::Display for Pos2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("[")?; self.x.fmt(f)?; f.write_str(" ")?; self.y.fmt(f)?; f.write_str("]")?; Ok(()) } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/ts_transform.rs
crates/emath/src/ts_transform.rs
use crate::{Pos2, Rect, Vec2}; /// Linearly transforms positions via a translation, then a scaling. /// /// [`TSTransform`] first scales points with the scaling origin at `0, 0` /// (the top left corner), then translates them. #[repr(C)] #[derive(Clone, Copy, Debug, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct TSTransform { /// Scaling applied first, scaled around (0, 0). pub scaling: f32, /// Translation amount, applied after scaling. pub translation: Vec2, } impl Eq for TSTransform {} impl Default for TSTransform { #[inline] fn default() -> Self { Self::IDENTITY } } impl TSTransform { pub const IDENTITY: Self = Self { translation: Vec2::ZERO, scaling: 1.0, }; #[inline] /// Creates a new translation that first scales points around /// `(0, 0)`, then translates them. pub fn new(translation: Vec2, scaling: f32) -> Self { Self { scaling, translation, } } #[inline] pub fn from_translation(translation: Vec2) -> Self { Self::new(translation, 1.0) } #[inline] pub fn from_scaling(scaling: f32) -> Self { Self::new(Vec2::ZERO, scaling) } /// Is this a valid, invertible transform? pub fn is_valid(&self) -> bool { self.scaling.is_finite() && self.translation.x.is_finite() && self.scaling != 0.0 } /// Inverts the transform. /// /// ``` /// # use emath::{pos2, vec2, TSTransform}; /// let p1 = pos2(2.0, 3.0); /// let p2 = pos2(12.0, 5.0); /// let ts = TSTransform::new(vec2(2.0, 3.0), 2.0); /// let inv = ts.inverse(); /// assert_eq!(inv.mul_pos(p1), pos2(0.0, 0.0)); /// assert_eq!(inv.mul_pos(p2), pos2(5.0, 1.0)); /// /// assert_eq!(ts.inverse().inverse(), ts); /// ``` #[inline] pub fn inverse(&self) -> Self { Self::new(-self.translation / self.scaling, 1.0 / self.scaling) } /// Transforms the given coordinate. /// /// ``` /// # use emath::{pos2, vec2, TSTransform}; /// let p1 = pos2(0.0, 0.0); /// let p2 = pos2(5.0, 1.0); /// let ts = TSTransform::new(vec2(2.0, 3.0), 2.0); /// assert_eq!(ts.mul_pos(p1), pos2(2.0, 3.0)); /// assert_eq!(ts.mul_pos(p2), pos2(12.0, 5.0)); /// ``` #[inline] pub fn mul_pos(&self, pos: Pos2) -> Pos2 { self.scaling * pos + self.translation } /// Transforms the given rectangle. /// /// ``` /// # use emath::{pos2, vec2, Rect, TSTransform}; /// let rect = Rect::from_min_max(pos2(5.0, 5.0), pos2(15.0, 10.0)); /// let ts = TSTransform::new(vec2(1.0, 0.0), 3.0); /// let transformed = ts.mul_rect(rect); /// assert_eq!(transformed.min, pos2(16.0, 15.0)); /// assert_eq!(transformed.max, pos2(46.0, 30.0)); /// ``` #[inline] pub fn mul_rect(&self, rect: Rect) -> Rect { Rect { min: self.mul_pos(rect.min), max: self.mul_pos(rect.max), } } } /// Transforms the position. impl std::ops::Mul<Pos2> for TSTransform { type Output = Pos2; #[inline] fn mul(self, pos: Pos2) -> Pos2 { self.mul_pos(pos) } } /// Transforms the rectangle. impl std::ops::Mul<Rect> for TSTransform { type Output = Rect; #[inline] fn mul(self, rect: Rect) -> Rect { self.mul_rect(rect) } } impl std::ops::Mul<Self> for TSTransform { type Output = Self; #[inline] /// Applies the right hand side transform, then the left hand side. /// /// ``` /// # use emath::{TSTransform, vec2}; /// let ts1 = TSTransform::new(vec2(1.0, 0.0), 2.0); /// let ts2 = TSTransform::new(vec2(-1.0, -1.0), 3.0); /// let ts_combined = TSTransform::new(vec2(2.0, -1.0), 6.0); /// assert_eq!(ts_combined, ts2 * ts1); /// ``` fn mul(self, rhs: Self) -> Self::Output { // Apply rhs first. Self { scaling: self.scaling * rhs.scaling, translation: self.translation + self.scaling * rhs.translation, } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/numeric.rs
crates/emath/src/numeric.rs
/// Implemented for all builtin numeric types pub trait Numeric: Clone + Copy + PartialEq + PartialOrd + 'static { /// Is this an integer type? const INTEGRAL: bool; /// Smallest finite value const MIN: Self; /// Largest finite value const MAX: Self; fn to_f64(self) -> f64; fn from_f64(num: f64) -> Self; } macro_rules! impl_numeric_float { ($t: ident) => { impl Numeric for $t { const INTEGRAL: bool = false; const MIN: Self = $t::MIN; const MAX: Self = $t::MAX; #[inline(always)] fn to_f64(self) -> f64 { #[allow(clippy::allow_attributes, trivial_numeric_casts)] { self as f64 } } #[inline(always)] fn from_f64(num: f64) -> Self { #[allow(clippy::allow_attributes, trivial_numeric_casts)] { num as Self } } } }; } macro_rules! impl_numeric_integer { ($t: ident) => { impl Numeric for $t { const INTEGRAL: bool = true; const MIN: Self = $t::MIN; const MAX: Self = $t::MAX; #[inline(always)] fn to_f64(self) -> f64 { self as f64 } #[inline(always)] fn from_f64(num: f64) -> Self { num as Self } } }; } macro_rules! impl_numeric_non_zero_unsigned { ($t: path) => { impl Numeric for $t { const INTEGRAL: bool = true; const MIN: Self = Self::MIN; const MAX: Self = Self::MAX; #[inline(always)] fn to_f64(self) -> f64 { self.get() as f64 } #[inline(always)] fn from_f64(num: f64) -> Self { Self::new(num.round().max(1.0) as _).unwrap_or(Self::MIN) } } }; } impl_numeric_float!(f32); impl_numeric_float!(f64); impl_numeric_integer!(i8); impl_numeric_integer!(u8); impl_numeric_integer!(i16); impl_numeric_integer!(u16); impl_numeric_integer!(i32); impl_numeric_integer!(u32); impl_numeric_integer!(i64); impl_numeric_integer!(u64); impl_numeric_integer!(isize); impl_numeric_integer!(usize); impl_numeric_non_zero_unsigned!(std::num::NonZeroU8); impl_numeric_non_zero_unsigned!(std::num::NonZeroU16); impl_numeric_non_zero_unsigned!(std::num::NonZeroU32); impl_numeric_non_zero_unsigned!(std::num::NonZeroU64); impl_numeric_non_zero_unsigned!(std::num::NonZeroU128); impl_numeric_non_zero_unsigned!(std::num::NonZeroUsize);
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/rect.rs
crates/emath/src/rect.rs
use std::fmt; use crate::{Div, Mul, NumExt as _, Pos2, Rangef, Rot2, Vec2, fast_midpoint, lerp, pos2, vec2}; use std::ops::{BitOr, BitOrAssign}; /// A rectangular region of space. /// /// Usually a [`Rect`] has a positive (or zero) size, /// and then [`Self::min`] `<=` [`Self::max`]. /// In these cases [`Self::min`] is the left-top corner /// and [`Self::max`] is the right-bottom corner. /// /// A rectangle is allowed to have a negative size, which happens when the order /// of `min` and `max` are swapped. These are usually a sign of an error. /// /// Normally the unit is points (logical pixels) in screen space coordinates. /// /// `Rect` does NOT implement `Default`, because there is no obvious default value. /// [`Rect::ZERO`] may seem reasonable, but when used as a bounding box, [`Rect::NOTHING`] /// is a better default - so be explicit instead! #[repr(C)] #[derive(Clone, Copy, Eq, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct Rect { /// One of the corners of the rectangle, usually the left top one. pub min: Pos2, /// The other corner, opposing [`Self::min`]. Usually the right bottom one. pub max: Pos2, } impl Rect { /// Infinite rectangle that contains every point. pub const EVERYTHING: Self = Self { min: pos2(-f32::INFINITY, -f32::INFINITY), max: pos2(f32::INFINITY, f32::INFINITY), }; /// The inverse of [`Self::EVERYTHING`]: stretches from positive infinity to negative infinity. /// Contains no points. /// /// This is useful as the seed for bounding boxes. /// /// # Example: /// ``` /// # use emath::*; /// let mut rect = Rect::NOTHING; /// assert!(rect.size() == Vec2::splat(-f32::INFINITY)); /// assert!(rect.contains(pos2(0.0, 0.0)) == false); /// rect.extend_with(pos2(2.0, 1.0)); /// rect.extend_with(pos2(0.0, 3.0)); /// assert_eq!(rect, Rect::from_min_max(pos2(0.0, 1.0), pos2(2.0, 3.0))) /// ``` pub const NOTHING: Self = Self { min: pos2(f32::INFINITY, f32::INFINITY), max: pos2(-f32::INFINITY, -f32::INFINITY), }; /// An invalid [`Rect`] filled with [`f32::NAN`]. pub const NAN: Self = Self { min: pos2(f32::NAN, f32::NAN), max: pos2(f32::NAN, f32::NAN), }; /// A [`Rect`] filled with zeroes. pub const ZERO: Self = Self { min: Pos2::ZERO, max: Pos2::ZERO, }; #[inline(always)] pub const fn from_min_max(min: Pos2, max: Pos2) -> Self { Self { min, max } } /// left-top corner plus a size (stretching right-down). #[inline(always)] pub fn from_min_size(min: Pos2, size: Vec2) -> Self { Self { min, max: min + size, } } #[inline(always)] pub fn from_center_size(center: Pos2, size: Vec2) -> Self { Self { min: center - size * 0.5, max: center + size * 0.5, } } #[inline(always)] pub fn from_x_y_ranges(x_range: impl Into<Rangef>, y_range: impl Into<Rangef>) -> Self { let x_range = x_range.into(); let y_range = y_range.into(); Self { min: pos2(x_range.min, y_range.min), max: pos2(x_range.max, y_range.max), } } /// Returns the bounding rectangle of the two points. #[inline] pub fn from_two_pos(a: Pos2, b: Pos2) -> Self { Self { min: pos2(a.x.min(b.x), a.y.min(b.y)), max: pos2(a.x.max(b.x), a.y.max(b.y)), } } /// A zero-sized rect at a specific point. #[inline] pub fn from_pos(point: Pos2) -> Self { Self { min: point, max: point, } } /// Bounding-box around the points. pub fn from_points(points: &[Pos2]) -> Self { let mut rect = Self::NOTHING; for &p in points { rect.extend_with(p); } rect } /// A [`Rect`] that contains every point to the right of the given X coordinate. #[inline] pub fn everything_right_of(left_x: f32) -> Self { let mut rect = Self::EVERYTHING; rect.set_left(left_x); rect } /// A [`Rect`] that contains every point to the left of the given X coordinate. #[inline] pub fn everything_left_of(right_x: f32) -> Self { let mut rect = Self::EVERYTHING; rect.set_right(right_x); rect } /// A [`Rect`] that contains every point below a certain y coordinate #[inline] pub fn everything_below(top_y: f32) -> Self { let mut rect = Self::EVERYTHING; rect.set_top(top_y); rect } /// A [`Rect`] that contains every point above a certain y coordinate #[inline] pub fn everything_above(bottom_y: f32) -> Self { let mut rect = Self::EVERYTHING; rect.set_bottom(bottom_y); rect } #[must_use] #[inline] pub fn with_min_x(mut self, min_x: f32) -> Self { self.min.x = min_x; self } #[must_use] #[inline] pub fn with_min_y(mut self, min_y: f32) -> Self { self.min.y = min_y; self } #[must_use] #[inline] pub fn with_max_x(mut self, max_x: f32) -> Self { self.max.x = max_x; self } #[must_use] #[inline] pub fn with_max_y(mut self, max_y: f32) -> Self { self.max.y = max_y; self } /// Expand by this much in each direction, keeping the center #[must_use] pub fn expand(self, amnt: f32) -> Self { self.expand2(Vec2::splat(amnt)) } /// Expand by this much in each direction, keeping the center #[must_use] pub fn expand2(self, amnt: Vec2) -> Self { Self::from_min_max(self.min - amnt, self.max + amnt) } /// Scale up by this factor in each direction, keeping the center #[must_use] pub fn scale_from_center(self, scale_factor: f32) -> Self { self.scale_from_center2(Vec2::splat(scale_factor)) } /// Scale up by this factor in each direction, keeping the center #[must_use] pub fn scale_from_center2(self, scale_factor: Vec2) -> Self { Self::from_center_size(self.center(), self.size() * scale_factor) } /// Shrink by this much in each direction, keeping the center #[must_use] pub fn shrink(self, amnt: f32) -> Self { self.shrink2(Vec2::splat(amnt)) } /// Shrink by this much in each direction, keeping the center #[must_use] pub fn shrink2(self, amnt: Vec2) -> Self { Self::from_min_max(self.min + amnt, self.max - amnt) } #[must_use] #[inline] pub fn translate(self, amnt: Vec2) -> Self { Self::from_min_size(self.min + amnt, self.size()) } /// Rotate the bounds (will expand the [`Rect`]) #[must_use] #[inline] pub fn rotate_bb(self, rot: Rot2) -> Self { let a = rot * self.left_top().to_vec2(); let b = rot * self.right_top().to_vec2(); let c = rot * self.left_bottom().to_vec2(); let d = rot * self.right_bottom().to_vec2(); Self::from_min_max( a.min(b).min(c).min(d).to_pos2(), a.max(b).max(c).max(d).to_pos2(), ) } #[must_use] #[inline] pub fn intersects(self, other: Self) -> bool { self.min.x <= other.max.x && other.min.x <= self.max.x && self.min.y <= other.max.y && other.min.y <= self.max.y } /// keep min pub fn set_width(&mut self, w: f32) { self.max.x = self.min.x + w; } /// keep min pub fn set_height(&mut self, h: f32) { self.max.y = self.min.y + h; } /// Keep size pub fn set_center(&mut self, center: Pos2) { *self = self.translate(center - self.center()); } #[must_use] #[inline(always)] pub fn contains(&self, p: Pos2) -> bool { self.min.x <= p.x && p.x <= self.max.x && self.min.y <= p.y && p.y <= self.max.y } #[must_use] pub fn contains_rect(&self, other: Self) -> bool { self.contains(other.min) && self.contains(other.max) } /// Return the given points clamped to be inside the rectangle /// Panics if [`Self::is_negative`]. #[must_use] pub fn clamp(&self, p: Pos2) -> Pos2 { p.clamp(self.min, self.max) } #[inline(always)] pub fn extend_with(&mut self, p: Pos2) { self.min = self.min.min(p); self.max = self.max.max(p); } #[inline(always)] /// Expand to include the given x coordinate pub fn extend_with_x(&mut self, x: f32) { self.min.x = self.min.x.min(x); self.max.x = self.max.x.max(x); } #[inline(always)] /// Expand to include the given y coordinate pub fn extend_with_y(&mut self, y: f32) { self.min.y = self.min.y.min(y); self.max.y = self.max.y.max(y); } /// The union of two bounding rectangle, i.e. the minimum [`Rect`] /// that contains both input rectangles. #[inline(always)] #[must_use] pub fn union(self, other: Self) -> Self { Self { min: self.min.min(other.min), max: self.max.max(other.max), } } /// The intersection of two [`Rect`], i.e. the area covered by both. #[inline] #[must_use] pub fn intersect(self, other: Self) -> Self { Self { min: self.min.max(other.min), max: self.max.min(other.max), } } #[inline(always)] pub fn center(&self) -> Pos2 { Pos2 { x: fast_midpoint(self.min.x, self.max.x), y: fast_midpoint(self.min.y, self.max.y), } } /// `rect.size() == Vec2 { x: rect.width(), y: rect.height() }` #[inline(always)] pub fn size(&self) -> Vec2 { self.max - self.min } /// Note: this can be negative. #[inline(always)] pub fn width(&self) -> f32 { self.max.x - self.min.x } /// Note: this can be negative. #[inline(always)] pub fn height(&self) -> f32 { self.max.y - self.min.y } /// Width / height /// /// * `aspect_ratio < 1`: portrait / high /// * `aspect_ratio = 1`: square /// * `aspect_ratio > 1`: landscape / wide pub fn aspect_ratio(&self) -> f32 { self.width() / self.height() } /// `[2, 1]` for wide screen, and `[1, 2]` for portrait, etc. /// At least one dimension = 1, the other >= 1 /// Returns the proportions required to letter-box a square view area. pub fn square_proportions(&self) -> Vec2 { let w = self.width(); let h = self.height(); if w > h { vec2(w / h, 1.0) } else { vec2(1.0, h / w) } } /// This is never negative, and instead returns zero for negative rectangles. #[inline(always)] pub fn area(&self) -> f32 { self.width().at_least(0.0) * self.height().at_least(0.0) } /// The distance from the rect to the position. /// /// The distance is zero when the position is in the interior of the rectangle. /// /// [Negative rectangles](Self::is_negative) always return [`f32::INFINITY`]. #[inline] pub fn distance_to_pos(&self, pos: Pos2) -> f32 { self.distance_sq_to_pos(pos).sqrt() } /// The distance from the rect to the position, squared. /// /// The distance is zero when the position is in the interior of the rectangle. /// /// [Negative rectangles](Self::is_negative) always return [`f32::INFINITY`]. #[inline] pub fn distance_sq_to_pos(&self, pos: Pos2) -> f32 { if self.is_negative() { return f32::INFINITY; } let dx = if self.min.x > pos.x { self.min.x - pos.x } else if pos.x > self.max.x { pos.x - self.max.x } else { 0.0 }; let dy = if self.min.y > pos.y { self.min.y - pos.y } else if pos.y > self.max.y { pos.y - self.max.y } else { 0.0 }; dx * dx + dy * dy } /// Signed distance to the edge of the box. /// /// Negative inside the box. /// /// [Negative rectangles](Self::is_negative) always return [`f32::INFINITY`]. /// /// ``` /// # use emath::{pos2, Rect}; /// let rect = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0)); /// assert_eq!(rect.signed_distance_to_pos(pos2(0.50, 0.50)), -0.50); /// assert_eq!(rect.signed_distance_to_pos(pos2(0.75, 0.50)), -0.25); /// assert_eq!(rect.signed_distance_to_pos(pos2(1.50, 0.50)), 0.50); /// ``` pub fn signed_distance_to_pos(&self, pos: Pos2) -> f32 { if self.is_negative() { return f32::INFINITY; } let edge_distances = (pos - self.center()).abs() - self.size() * 0.5; let inside_dist = edge_distances.max_elem().min(0.0); let outside_dist = edge_distances.max(Vec2::ZERO).length(); inside_dist + outside_dist } /// Linearly interpolate so that `[0, 0]` is [`Self::min`] and /// `[1, 1]` is [`Self::max`]. #[inline] pub fn lerp_inside(&self, t: impl Into<Vec2>) -> Pos2 { let t = t.into(); Pos2 { x: lerp(self.min.x..=self.max.x, t.x), y: lerp(self.min.y..=self.max.y, t.y), } } /// Linearly self towards other rect. #[inline] pub fn lerp_towards(&self, other: &Self, t: f32) -> Self { Self { min: self.min.lerp(other.min, t), max: self.max.lerp(other.max, t), } } #[inline(always)] pub fn x_range(&self) -> Rangef { Rangef::new(self.min.x, self.max.x) } #[inline(always)] pub fn y_range(&self) -> Rangef { Rangef::new(self.min.y, self.max.y) } #[inline(always)] pub fn bottom_up_range(&self) -> Rangef { Rangef::new(self.max.y, self.min.y) } /// `width < 0 || height < 0` #[inline(always)] pub fn is_negative(&self) -> bool { self.max.x < self.min.x || self.max.y < self.min.y } /// `width > 0 && height > 0` #[inline(always)] pub fn is_positive(&self) -> bool { self.min.x < self.max.x && self.min.y < self.max.y } /// True if all members are also finite. #[inline(always)] pub fn is_finite(&self) -> bool { self.min.is_finite() && self.max.is_finite() } /// True if any member is NaN. #[inline(always)] pub fn any_nan(self) -> bool { self.min.any_nan() || self.max.any_nan() } } /// ## Convenience functions (assumes origin is towards left top): impl Rect { /// `min.x` #[inline(always)] pub fn left(&self) -> f32 { self.min.x } /// `min.x` #[inline(always)] pub fn left_mut(&mut self) -> &mut f32 { &mut self.min.x } /// `min.x` #[inline(always)] pub fn set_left(&mut self, x: f32) { self.min.x = x; } /// `max.x` #[inline(always)] pub fn right(&self) -> f32 { self.max.x } /// `max.x` #[inline(always)] pub fn right_mut(&mut self) -> &mut f32 { &mut self.max.x } /// `max.x` #[inline(always)] pub fn set_right(&mut self, x: f32) { self.max.x = x; } /// `min.y` #[inline(always)] pub fn top(&self) -> f32 { self.min.y } /// `min.y` #[inline(always)] pub fn top_mut(&mut self) -> &mut f32 { &mut self.min.y } /// `min.y` #[inline(always)] pub fn set_top(&mut self, y: f32) { self.min.y = y; } /// `max.y` #[inline(always)] pub fn bottom(&self) -> f32 { self.max.y } /// `max.y` #[inline(always)] pub fn bottom_mut(&mut self) -> &mut f32 { &mut self.max.y } /// `max.y` #[inline(always)] pub fn set_bottom(&mut self, y: f32) { self.max.y = y; } #[inline(always)] #[doc(alias = "top_left")] pub fn left_top(&self) -> Pos2 { pos2(self.left(), self.top()) } #[inline(always)] pub fn center_top(&self) -> Pos2 { pos2(self.center().x, self.top()) } #[inline(always)] #[doc(alias = "top_right")] pub fn right_top(&self) -> Pos2 { pos2(self.right(), self.top()) } #[inline(always)] pub fn left_center(&self) -> Pos2 { pos2(self.left(), self.center().y) } #[inline(always)] pub fn right_center(&self) -> Pos2 { pos2(self.right(), self.center().y) } #[inline(always)] #[doc(alias = "bottom_left")] pub fn left_bottom(&self) -> Pos2 { pos2(self.left(), self.bottom()) } #[inline(always)] pub fn center_bottom(&self) -> Pos2 { pos2(self.center().x, self.bottom()) } #[inline(always)] #[doc(alias = "bottom_right")] pub fn right_bottom(&self) -> Pos2 { pos2(self.right(), self.bottom()) } /// Split rectangle in left and right halves. `t` is expected to be in the (0,1) range. pub fn split_left_right_at_fraction(&self, t: f32) -> (Self, Self) { self.split_left_right_at_x(lerp(self.min.x..=self.max.x, t)) } /// Split rectangle in left and right halves at the given `x` coordinate. pub fn split_left_right_at_x(&self, split_x: f32) -> (Self, Self) { let left = Self::from_min_max(self.min, Pos2::new(split_x, self.max.y)); let right = Self::from_min_max(Pos2::new(split_x, self.min.y), self.max); (left, right) } /// Split rectangle in top and bottom halves. `t` is expected to be in the (0,1) range. pub fn split_top_bottom_at_fraction(&self, t: f32) -> (Self, Self) { self.split_top_bottom_at_y(lerp(self.min.y..=self.max.y, t)) } /// Split rectangle in top and bottom halves at the given `y` coordinate. pub fn split_top_bottom_at_y(&self, split_y: f32) -> (Self, Self) { let top = Self::from_min_max(self.min, Pos2::new(self.max.x, split_y)); let bottom = Self::from_min_max(Pos2::new(self.min.x, split_y), self.max); (top, bottom) } } impl Rect { /// Does this Rect intersect the given ray (where `d` is normalized)? /// /// A ray that starts inside the rect will return `true`. pub fn intersects_ray(&self, o: Pos2, d: Vec2) -> bool { debug_assert!( d.is_normalized(), "Debug assert: expected normalized direction, but `d` has length {}", d.length() ); let mut tmin = -f32::INFINITY; let mut tmax = f32::INFINITY; if d.x != 0.0 { let tx1 = (self.min.x - o.x) / d.x; let tx2 = (self.max.x - o.x) / d.x; tmin = tmin.max(tx1.min(tx2)); tmax = tmax.min(tx1.max(tx2)); } if d.y != 0.0 { let ty1 = (self.min.y - o.y) / d.y; let ty2 = (self.max.y - o.y) / d.y; tmin = tmin.max(ty1.min(ty2)); tmax = tmax.min(ty1.max(ty2)); } 0.0 <= tmax && tmin <= tmax } /// Where does a ray from the center intersect the rectangle? /// /// `d` is the direction of the ray and assumed to be normalized. pub fn intersects_ray_from_center(&self, d: Vec2) -> Pos2 { debug_assert!( d.is_normalized(), "expected normalized direction, but `d` has length {}", d.length() ); let mut tmin = f32::NEG_INFINITY; let mut tmax = f32::INFINITY; for i in 0..2 { let inv_d = 1.0 / -d[i]; let mut t0 = (self.min[i] - self.center()[i]) * inv_d; let mut t1 = (self.max[i] - self.center()[i]) * inv_d; if inv_d < 0.0 { std::mem::swap(&mut t0, &mut t1); } tmin = tmin.max(t0); tmax = tmax.min(t1); } let t = tmax.min(tmin); self.center() + t * -d } } impl fmt::Debug for Rect { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(precision) = f.precision() { write!(f, "[{1:.0$?} - {2:.0$?}]", precision, self.min, self.max) } else { write!(f, "[{:?} - {:?}]", self.min, self.max) } } } impl fmt::Display for Rect { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("[")?; self.min.fmt(f)?; f.write_str(" - ")?; self.max.fmt(f)?; f.write_str("]")?; Ok(()) } } /// from (min, max) or (left top, right bottom) impl From<[Pos2; 2]> for Rect { #[inline] fn from([min, max]: [Pos2; 2]) -> Self { Self { min, max } } } impl Mul<f32> for Rect { type Output = Self; #[inline] fn mul(self, factor: f32) -> Self { Self { min: self.min * factor, max: self.max * factor, } } } impl Mul<Rect> for f32 { type Output = Rect; #[inline] fn mul(self, vec: Rect) -> Rect { Rect { min: self * vec.min, max: self * vec.max, } } } impl Div<f32> for Rect { type Output = Self; #[inline] fn div(self, factor: f32) -> Self { Self { min: self.min / factor, max: self.max / factor, } } } impl BitOr for Rect { type Output = Self; #[inline] fn bitor(self, other: Self) -> Self { self.union(other) } } impl BitOrAssign for Rect { #[inline] fn bitor_assign(&mut self, other: Self) { *self = self.union(other); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_rect() { let r = Rect::from_min_max(pos2(10.0, 10.0), pos2(20.0, 20.0)); assert_eq!(r.distance_sq_to_pos(pos2(15.0, 15.0)), 0.0); assert_eq!(r.distance_sq_to_pos(pos2(10.0, 15.0)), 0.0); assert_eq!(r.distance_sq_to_pos(pos2(10.0, 10.0)), 0.0); assert_eq!(r.distance_sq_to_pos(pos2(5.0, 15.0)), 25.0); // left of assert_eq!(r.distance_sq_to_pos(pos2(25.0, 15.0)), 25.0); // right of assert_eq!(r.distance_sq_to_pos(pos2(15.0, 5.0)), 25.0); // above assert_eq!(r.distance_sq_to_pos(pos2(15.0, 25.0)), 25.0); // below assert_eq!(r.distance_sq_to_pos(pos2(25.0, 5.0)), 50.0); // right and above } #[test] fn scale_rect() { let c = pos2(100.0, 50.0); let r = Rect::from_center_size(c, vec2(30.0, 60.0)); assert_eq!( r.scale_from_center(2.0), Rect::from_center_size(c, vec2(60.0, 120.0)) ); assert_eq!( r.scale_from_center(0.5), Rect::from_center_size(c, vec2(15.0, 30.0)) ); assert_eq!( r.scale_from_center2(vec2(2.0, 3.0)), Rect::from_center_size(c, vec2(60.0, 180.0)) ); } #[expect(clippy::print_stdout)] #[test] fn test_ray_intersection() { let rect = Rect::from_min_max(pos2(1.0, 1.0), pos2(3.0, 3.0)); println!("Righward ray from left:"); assert!(rect.intersects_ray(pos2(0.0, 2.0), Vec2::RIGHT)); println!("Righward ray from center:"); assert!(rect.intersects_ray(pos2(2.0, 2.0), Vec2::RIGHT)); println!("Righward ray from right:"); assert!(!rect.intersects_ray(pos2(4.0, 2.0), Vec2::RIGHT)); println!("Leftward ray from left:"); assert!(!rect.intersects_ray(pos2(0.0, 2.0), Vec2::LEFT)); println!("Leftward ray from center:"); assert!(rect.intersects_ray(pos2(2.0, 2.0), Vec2::LEFT)); println!("Leftward ray from right:"); assert!(rect.intersects_ray(pos2(4.0, 2.0), Vec2::LEFT)); } #[test] fn test_ray_from_center_intersection() { let rect = Rect::from_min_max(pos2(1.0, 1.0), pos2(3.0, 3.0)); assert_eq!( rect.intersects_ray_from_center(Vec2::RIGHT), pos2(3.0, 2.0), "rightward ray" ); assert_eq!( rect.intersects_ray_from_center(Vec2::UP), pos2(2.0, 1.0), "upward ray" ); assert_eq!( rect.intersects_ray_from_center(Vec2::LEFT), pos2(1.0, 2.0), "leftward ray" ); assert_eq!( rect.intersects_ray_from_center(Vec2::DOWN), pos2(2.0, 3.0), "downward ray" ); assert_eq!( rect.intersects_ray_from_center((Vec2::LEFT + Vec2::DOWN).normalized()), pos2(1.0, 3.0), "bottom-left corner ray" ); assert_eq!( rect.intersects_ray_from_center((Vec2::LEFT + Vec2::UP).normalized()), pos2(1.0, 1.0), "top-left corner ray" ); assert_eq!( rect.intersects_ray_from_center((Vec2::RIGHT + Vec2::DOWN).normalized()), pos2(3.0, 3.0), "bottom-right corner ray" ); assert_eq!( rect.intersects_ray_from_center((Vec2::RIGHT + Vec2::UP).normalized()), pos2(3.0, 1.0), "top-right corner ray" ); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/history.rs
crates/emath/src/history.rs
use std::collections::VecDeque; /// This struct tracks recent values of some time series. /// /// It can be used as a smoothing filter for e.g. latency, fps etc, /// or to show a log or graph of recent events. /// /// It has a minimum and maximum length, as well as a maximum storage time. /// * The minimum length is to ensure you have enough data for an estimate. /// * The maximum length is to make sure the history doesn't take up too much space. /// * The maximum age is to make sure the estimate isn't outdated. /// /// Time difference between values can be zero, but never negative. /// /// This can be used for things like smoothed averages (for e.g. FPS) /// or for smoothed velocity (e.g. mouse pointer speed). /// All times are in seconds. #[derive(Clone, Debug)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct History<T> { /// In elements, i.e. of `values.len()`. /// The length is initially zero, but once past `min_len` will not shrink below it. min_len: usize, /// In elements, i.e. of `values.len()`. max_len: usize, /// In seconds. max_age: f32, /// Total number of elements seen ever total_count: u64, /// (time, value) pairs, oldest front, newest back. /// Time difference between values can be zero, but never negative. values: VecDeque<(f64, T)>, } impl<T> History<T> where T: Copy, { /// Example: /// ``` /// # use emath::History; /// # fn now() -> f64 { 0.0 } /// // Drop events that are older than one second, /// // as long we keep at least two events. Never keep more than a hundred events. /// let mut history = History::new(2..100, 1.0); /// assert_eq!(history.average(), None); /// history.add(now(), 40.0_f32); /// history.add(now(), 44.0_f32); /// assert_eq!(history.average(), Some(42.0)); /// ``` pub fn new(length_range: std::ops::Range<usize>, max_age: f32) -> Self { Self { min_len: length_range.start, max_len: length_range.end, max_age, total_count: 0, values: Default::default(), } } #[inline] pub fn max_len(&self) -> usize { self.max_len } #[inline] pub fn max_age(&self) -> f32 { self.max_age } #[inline] pub fn is_empty(&self) -> bool { self.values.is_empty() } /// Current number of values kept in history #[inline] pub fn len(&self) -> usize { self.values.len() } /// Total number of values seen. /// Includes those that have been discarded due to `max_len` or `max_age`. #[inline] pub fn total_count(&self) -> u64 { self.total_count } pub fn latest(&self) -> Option<T> { self.values.back().map(|(_, value)| *value) } pub fn latest_mut(&mut self) -> Option<&mut T> { self.values.back_mut().map(|(_, value)| value) } /// Amount of time contained from start to end in this [`History`]. pub fn duration(&self) -> f32 { if let (Some(front), Some(back)) = (self.values.front(), self.values.back()) { (back.0 - front.0) as f32 } else { 0.0 } } /// `(time, value)` pairs /// Time difference between values can be zero, but never negative. // TODO(emilk): impl IntoIter pub fn iter(&self) -> impl ExactSizeIterator<Item = (f64, T)> + '_ { self.values.iter().map(|(time, value)| (*time, *value)) } pub fn values(&self) -> impl ExactSizeIterator<Item = T> + '_ { self.values.iter().map(|(_time, value)| *value) } #[inline] pub fn clear(&mut self) { self.values.clear(); } /// Values must be added with a monotonically increasing time, or at least not decreasing. pub fn add(&mut self, now: f64, value: T) { if let Some((last_time, _)) = self.values.back() { debug_assert!(*last_time <= now, "Time shouldn't move backwards"); } self.total_count += 1; self.values.push_back((now, value)); self.flush(now); } /// Mean time difference between values in this [`History`]. pub fn mean_time_interval(&self) -> Option<f32> { if let (Some(first), Some(last)) = (self.values.front(), self.values.back()) { let n = self.len(); if n >= 2 { Some((last.0 - first.0) as f32 / ((n - 1) as f32)) } else { None } } else { None } } // Mean number of events per second. pub fn rate(&self) -> Option<f32> { self.mean_time_interval().map(|time| 1.0 / time) } /// Remove samples that are too old. pub fn flush(&mut self, now: f64) { while self.values.len() > self.max_len { self.values.pop_front(); } while self.values.len() > self.min_len { if let Some((front_time, _)) = self.values.front() { if *front_time < now - (self.max_age as f64) { self.values.pop_front(); } else { break; } } else { break; } } } } impl<T> History<T> where T: Copy, T: std::iter::Sum, T: std::ops::Div<f32, Output = T>, { #[inline] pub fn sum(&self) -> T { self.values().sum() } pub fn average(&self) -> Option<T> { let num = self.len(); if num > 0 { Some(self.sum() / (num as f32)) } else { None } } } impl<T> History<T> where T: Copy, T: std::iter::Sum, T: std::ops::Div<f32, Output = T>, T: std::ops::Mul<f32, Output = T>, { /// Average times rate. /// If you are keeping track of individual sizes of things (e.g. bytes), /// this will estimate the bandwidth (bytes per second). pub fn bandwidth(&self) -> Option<T> { Some(self.average()? * self.rate()?) } } impl<T, Vel> History<T> where T: Copy, T: std::ops::Sub<Output = Vel>, Vel: std::ops::Div<f32, Output = Vel>, { /// Calculate a smooth velocity (per second) over the entire time span. /// Calculated as the last value minus the first value over the elapsed time between them. pub fn velocity(&self) -> Option<Vel> { if let (Some(first), Some(last)) = (self.values.front(), self.values.back()) { let dt = (last.0 - first.0) as f32; if dt > 0.0 { Some((last.1 - first.1) / dt) } else { None } } else { None } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/rect_align.rs
crates/emath/src/rect_align.rs
use crate::{Align2, Pos2, Rect, Vec2}; /// Position a child [`Rect`] relative to a parent [`Rect`]. /// /// The corner from [`RectAlign::child`] on the new rect will be aligned to /// the corner from [`RectAlign::parent`] on the original rect. /// /// There are helper constants for the 12 common menu positions: /// ```text /// ┌───────────┐ ┌────────┐ ┌─────────┐ /// │ TOP_START │ │ TOP │ │ TOP_END │ /// └───────────┘ └────────┘ └─────────┘ /// ┌──────────┐ ┌────────────────────────────────────┐ ┌───────────┐ /// │LEFT_START│ │ │ │RIGHT_START│ /// └──────────┘ │ │ └───────────┘ /// ┌──────────┐ │ │ ┌───────────┐ /// │ LEFT │ │ some_rect │ │ RIGHT │ /// └──────────┘ │ │ └───────────┘ /// ┌──────────┐ │ │ ┌───────────┐ /// │ LEFT_END │ │ │ │ RIGHT_END │ /// └──────────┘ └────────────────────────────────────┘ └───────────┘ /// ┌────────────┐ ┌──────┐ ┌──────────┐ /// │BOTTOM_START│ │BOTTOM│ │BOTTOM_END│ /// └────────────┘ └──────┘ └──────────┘ /// ``` // There is no `new` function on purpose, since writing out `parent` and `child` is more // reasonable. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct RectAlign { /// The alignment in the parent (original) rect. pub parent: Align2, /// The alignment in the child (new) rect. pub child: Align2, } impl Default for RectAlign { fn default() -> Self { Self::BOTTOM_START } } impl RectAlign { /// Along the top edge, leftmost. pub const TOP_START: Self = Self { parent: Align2::LEFT_TOP, child: Align2::LEFT_BOTTOM, }; /// Along the top edge, centered. pub const TOP: Self = Self { parent: Align2::CENTER_TOP, child: Align2::CENTER_BOTTOM, }; /// Along the top edge, rightmost. pub const TOP_END: Self = Self { parent: Align2::RIGHT_TOP, child: Align2::RIGHT_BOTTOM, }; /// Along the right edge, topmost. pub const RIGHT_START: Self = Self { parent: Align2::RIGHT_TOP, child: Align2::LEFT_TOP, }; /// Along the right edge, centered. pub const RIGHT: Self = Self { parent: Align2::RIGHT_CENTER, child: Align2::LEFT_CENTER, }; /// Along the right edge, bottommost. pub const RIGHT_END: Self = Self { parent: Align2::RIGHT_BOTTOM, child: Align2::LEFT_BOTTOM, }; /// Along the bottom edge, rightmost. pub const BOTTOM_END: Self = Self { parent: Align2::RIGHT_BOTTOM, child: Align2::RIGHT_TOP, }; /// Along the bottom edge, centered. pub const BOTTOM: Self = Self { parent: Align2::CENTER_BOTTOM, child: Align2::CENTER_TOP, }; /// Along the bottom edge, leftmost. pub const BOTTOM_START: Self = Self { parent: Align2::LEFT_BOTTOM, child: Align2::LEFT_TOP, }; /// Along the left edge, bottommost. pub const LEFT_END: Self = Self { parent: Align2::LEFT_BOTTOM, child: Align2::RIGHT_BOTTOM, }; /// Along the left edge, centered. pub const LEFT: Self = Self { parent: Align2::LEFT_CENTER, child: Align2::RIGHT_CENTER, }; /// Along the left edge, topmost. pub const LEFT_START: Self = Self { parent: Align2::LEFT_TOP, child: Align2::RIGHT_TOP, }; /// The 12 most common menu positions as an array, for use with [`RectAlign::find_best_align`]. pub const MENU_ALIGNS: [Self; 12] = [ Self::BOTTOM_START, Self::BOTTOM_END, Self::TOP_START, Self::TOP_END, Self::RIGHT_END, Self::RIGHT_START, Self::LEFT_END, Self::LEFT_START, // These come last on purpose, we prefer the corner ones Self::TOP, Self::RIGHT, Self::BOTTOM, Self::LEFT, ]; /// Align in the parent rect. pub fn parent(&self) -> Align2 { self.parent } /// Align in the child rect. pub fn child(&self) -> Align2 { self.child } /// Convert an [`Align2`] to an [`RectAlign`], positioning the child rect inside the parent. pub fn from_align2(align: Align2) -> Self { Self { parent: align, child: align, } } /// The center of the child rect will be aligned to a corner of the parent rect. pub fn over_corner(align: Align2) -> Self { Self { parent: align, child: Align2::CENTER_CENTER, } } /// Position the child rect outside the parent rect. pub fn outside(align: Align2) -> Self { Self { parent: align, child: align.flip(), } } /// Calculate the child rect based on a size and some optional gap. pub fn align_rect(&self, parent_rect: &Rect, size: Vec2, gap: f32) -> Rect { let (pivot, anchor) = self.pivot_pos(parent_rect, gap); pivot.anchor_size(anchor, size) } /// Returns a [`Align2`] and a [`Pos2`] that you can e.g. use with `Area::fixed_pos` /// and `Area::pivot` to align an `Area` to some rect. pub fn pivot_pos(&self, parent_rect: &Rect, gap: f32) -> (Align2, Pos2) { (self.child(), self.anchor(parent_rect, gap)) } /// Returns a sign vector (-1, 0 or 1 in each direction) that can be used as an offset to the /// child rect, creating a gap between the rects while keeping the edges aligned. pub fn gap_vector(&self) -> Vec2 { let mut gap = -self.child.to_sign(); // Align the edges in these cases match *self { Self::TOP_START | Self::TOP_END | Self::BOTTOM_START | Self::BOTTOM_END => { gap.x = 0.0; } Self::LEFT_START | Self::LEFT_END | Self::RIGHT_START | Self::RIGHT_END => { gap.y = 0.0; } _ => {} } gap } /// Calculator the anchor point for the child rect, based on the parent rect and an optional gap. pub fn anchor(&self, parent_rect: &Rect, gap: f32) -> Pos2 { let pos = self.parent.pos_in_rect(parent_rect); let offset = self.gap_vector() * gap; pos + offset } /// Flip the alignment on the x-axis. pub fn flip_x(self) -> Self { Self { parent: self.parent.flip_x(), child: self.child.flip_x(), } } /// Flip the alignment on the y-axis. pub fn flip_y(self) -> Self { Self { parent: self.parent.flip_y(), child: self.child.flip_y(), } } /// Flip the alignment on both axes. pub fn flip(self) -> Self { Self { parent: self.parent.flip(), child: self.child.flip(), } } /// Returns the 3 alternative [`RectAlign`]s that are flipped in various ways, for use /// with [`RectAlign::find_best_align`]. pub fn symmetries(self) -> [Self; 3] { [self.flip_x(), self.flip_y(), self.flip()] } /// Look for the first alternative [`RectAlign`] that allows the child rect to fit /// inside the `content_rect`. /// /// If no alternative fits, the first is returned. /// If no alternatives are given, `None` is returned. /// /// See also: /// - [`RectAlign::symmetries`] to calculate alternatives /// - [`RectAlign::MENU_ALIGNS`] for the 12 common menu positions pub fn find_best_align( values_to_try: impl Iterator<Item = Self>, content_rect: Rect, parent_rect: Rect, gap: f32, expected_size: Vec2, ) -> Option<Self> { let mut first_choice = None; for align in values_to_try { first_choice = first_choice.or(Some(align)); // Remember the first alternative let suggested_popup_rect = align.align_rect(&parent_rect, expected_size, gap); if content_rect.contains_rect(suggested_popup_rect) { return Some(align); } } first_choice } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/align.rs
crates/emath/src/align.rs
//! One- and two-dimensional alignment ([`Align::Center`], [`Align2::LEFT_TOP`] etc). use crate::{Pos2, Rangef, Rect, Vec2, pos2, vec2}; /// left/center/right or top/center/bottom alignment for e.g. anchors and layouts. #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub enum Align { /// Left or top. #[default] Min, /// Horizontal or vertical center. Center, /// Right or bottom. Max, } impl Align { /// Convenience for [`Self::Min`] pub const LEFT: Self = Self::Min; /// Convenience for [`Self::Max`] pub const RIGHT: Self = Self::Max; /// Convenience for [`Self::Min`] pub const TOP: Self = Self::Min; /// Convenience for [`Self::Max`] pub const BOTTOM: Self = Self::Max; /// Convert `Min => 0.0`, `Center => 0.5` or `Max => 1.0`. #[inline(always)] pub fn to_factor(self) -> f32 { match self { Self::Min => 0.0, Self::Center => 0.5, Self::Max => 1.0, } } /// Convert `Min => -1.0`, `Center => 0.0` or `Max => 1.0`. #[inline(always)] pub fn to_sign(self) -> f32 { match self { Self::Min => -1.0, Self::Center => 0.0, Self::Max => 1.0, } } /// Returns the inverse alignment. /// `Min` becomes `Max`, `Center` stays the same, `Max` becomes `Min`. pub fn flip(self) -> Self { match self { Self::Min => Self::Max, Self::Center => Self::Center, Self::Max => Self::Min, } } /// Returns a range of given size within a specified range. /// /// If the requested `size` is bigger than the size of `range`, then the returned /// range will not fit into the available `range`. The extra space will be allocated /// from: /// /// |Align |Side | /// |------|------------| /// |Min |right (end) | /// |Center|both | /// |Max |left (start)| /// /// # Examples /// ``` /// use std::f32::{INFINITY, NEG_INFINITY}; /// use emath::Align::*; /// /// // The size is smaller than a range /// assert_eq!(Min .align_size_within_range(2.0, 10.0..=20.0), 10.0..=12.0); /// assert_eq!(Center.align_size_within_range(2.0, 10.0..=20.0), 14.0..=16.0); /// assert_eq!(Max .align_size_within_range(2.0, 10.0..=20.0), 18.0..=20.0); /// /// // The size is bigger than a range /// assert_eq!(Min .align_size_within_range(20.0, 10.0..=20.0), 10.0..=30.0); /// assert_eq!(Center.align_size_within_range(20.0, 10.0..=20.0), 5.0..=25.0); /// assert_eq!(Max .align_size_within_range(20.0, 10.0..=20.0), 0.0..=20.0); /// /// // The size is infinity, but range is finite - a special case of a previous example /// assert_eq!(Min .align_size_within_range(INFINITY, 10.0..=20.0), 10.0..=INFINITY); /// assert_eq!(Center.align_size_within_range(INFINITY, 10.0..=20.0), NEG_INFINITY..=INFINITY); /// assert_eq!(Max .align_size_within_range(INFINITY, 10.0..=20.0), NEG_INFINITY..=20.0); /// ``` /// /// The infinity-sized ranges can produce a surprising results, if the size is also infinity, /// use such ranges with carefully! /// /// ``` /// use std::f32::{INFINITY, NEG_INFINITY}; /// use emath::Align::*; /// /// // Allocating a size aligned for infinity bound will lead to empty ranges! /// assert_eq!(Min .align_size_within_range(2.0, 10.0..=INFINITY), 10.0..=12.0); /// assert_eq!(Center.align_size_within_range(2.0, 10.0..=INFINITY), INFINITY..=INFINITY);// (!) /// assert_eq!(Max .align_size_within_range(2.0, 10.0..=INFINITY), INFINITY..=INFINITY);// (!) /// /// assert_eq!(Min .align_size_within_range(2.0, NEG_INFINITY..=20.0), NEG_INFINITY..=NEG_INFINITY);// (!) /// assert_eq!(Center.align_size_within_range(2.0, NEG_INFINITY..=20.0), NEG_INFINITY..=NEG_INFINITY);// (!) /// assert_eq!(Max .align_size_within_range(2.0, NEG_INFINITY..=20.0), 18.0..=20.0); /// /// /// // The infinity size will always return the given range if it has at least one infinity bound /// assert_eq!(Min .align_size_within_range(INFINITY, 10.0..=INFINITY), 10.0..=INFINITY); /// assert_eq!(Center.align_size_within_range(INFINITY, 10.0..=INFINITY), 10.0..=INFINITY); /// assert_eq!(Max .align_size_within_range(INFINITY, 10.0..=INFINITY), 10.0..=INFINITY); /// /// assert_eq!(Min .align_size_within_range(INFINITY, NEG_INFINITY..=20.0), NEG_INFINITY..=20.0); /// assert_eq!(Center.align_size_within_range(INFINITY, NEG_INFINITY..=20.0), NEG_INFINITY..=20.0); /// assert_eq!(Max .align_size_within_range(INFINITY, NEG_INFINITY..=20.0), NEG_INFINITY..=20.0); /// ``` #[inline] pub fn align_size_within_range(self, size: f32, range: impl Into<Rangef>) -> Rangef { let range = range.into(); let Rangef { min, max } = range; if max - min == f32::INFINITY && size == f32::INFINITY { return range; } match self { Self::Min => Rangef::new(min, min + size), Self::Center => { if size == f32::INFINITY { Rangef::new(f32::NEG_INFINITY, f32::INFINITY) } else { let left = crate::fast_midpoint(min, max) - size / 2.0; Rangef::new(left, left + size) } } Self::Max => Rangef::new(max - size, max), } } } // ---------------------------------------------------------------------------- /// Two-dimension alignment, e.g. [`Align2::LEFT_TOP`]. #[derive(Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Align2(pub [Align; 2]); impl Align2 { pub const LEFT_BOTTOM: Self = Self([Align::Min, Align::Max]); pub const LEFT_CENTER: Self = Self([Align::Min, Align::Center]); pub const LEFT_TOP: Self = Self([Align::Min, Align::Min]); pub const CENTER_BOTTOM: Self = Self([Align::Center, Align::Max]); pub const CENTER_CENTER: Self = Self([Align::Center, Align::Center]); pub const CENTER_TOP: Self = Self([Align::Center, Align::Min]); pub const RIGHT_BOTTOM: Self = Self([Align::Max, Align::Max]); pub const RIGHT_CENTER: Self = Self([Align::Max, Align::Center]); pub const RIGHT_TOP: Self = Self([Align::Max, Align::Min]); } impl Align2 { /// Returns an alignment by the X (horizontal) axis #[inline(always)] pub fn x(self) -> Align { self.0[0] } /// Returns an alignment by the Y (vertical) axis #[inline(always)] pub fn y(self) -> Align { self.0[1] } /// -1, 0, or +1 for each axis pub fn to_sign(self) -> Vec2 { vec2(self.x().to_sign(), self.y().to_sign()) } /// Flip on the x-axis /// e.g. `TOP_LEFT` -> `TOP_RIGHT` pub fn flip_x(self) -> Self { Self([self.x().flip(), self.y()]) } /// Flip on the y-axis /// e.g. `TOP_LEFT` -> `BOTTOM_LEFT` pub fn flip_y(self) -> Self { Self([self.x(), self.y().flip()]) } /// Flip on both axes /// e.g. `TOP_LEFT` -> `BOTTOM_RIGHT` pub fn flip(self) -> Self { Self([self.x().flip(), self.y().flip()]) } /// Used e.g. to anchor a piece of text to a part of the rectangle. /// Give a position within the rect, specified by the aligns pub fn anchor_rect(self, rect: Rect) -> Rect { let x = match self.x() { Align::Min => rect.left(), Align::Center => rect.left() - 0.5 * rect.width(), Align::Max => rect.left() - rect.width(), }; let y = match self.y() { Align::Min => rect.top(), Align::Center => rect.top() - 0.5 * rect.height(), Align::Max => rect.top() - rect.height(), }; Rect::from_min_size(pos2(x, y), rect.size()) } /// Use this anchor to position something around `pos`, /// e.g. [`Self::RIGHT_TOP`] means the right-top of the rect /// will end up at `pos`. pub fn anchor_size(self, pos: Pos2, size: Vec2) -> Rect { let x = match self.x() { Align::Min => pos.x, Align::Center => pos.x - 0.5 * size.x, Align::Max => pos.x - size.x, }; let y = match self.y() { Align::Min => pos.y, Align::Center => pos.y - 0.5 * size.y, Align::Max => pos.y - size.y, }; Rect::from_min_size(pos2(x, y), size) } /// e.g. center a size within a given frame pub fn align_size_within_rect(self, size: Vec2, frame: Rect) -> Rect { let x_range = self.x().align_size_within_range(size.x, frame.x_range()); let y_range = self.y().align_size_within_range(size.y, frame.y_range()); Rect::from_x_y_ranges(x_range, y_range) } /// Returns the point on the rect's frame or in the center of a rect according /// to the alignments of this object. /// /// ```text /// (*)-----------+------(*)------+-----------(*)--> X /// | | | | /// | Min, Min | Center, Min | Max, Min | /// | | | | /// +------------+---------------+------------+ /// | | | | /// (*)Min, Center|Center(*)Center|Max, Center(*) /// | | | | /// +------------+---------------+------------+ /// | | | | /// | Min, Max | Center, Max | Max, Max | /// | | | | /// (*)-----------+------(*)------+-----------(*) /// | /// Y /// ``` pub fn pos_in_rect(self, frame: &Rect) -> Pos2 { let x = match self.x() { Align::Min => frame.left(), Align::Center => frame.center().x, Align::Max => frame.right(), }; let y = match self.y() { Align::Min => frame.top(), Align::Center => frame.center().y, Align::Max => frame.bottom(), }; pos2(x, y) } } impl std::ops::Index<usize> for Align2 { type Output = Align; #[inline(always)] fn index(&self, index: usize) -> &Align { &self.0[index] } } impl std::ops::IndexMut<usize> for Align2 { #[inline(always)] fn index_mut(&mut self, index: usize) -> &mut Align { &mut self.0[index] } } /// Allocates a rectangle of the specified `size` inside the `frame` rectangle /// around of its center. /// /// If `size` is bigger than the `frame`s size the returned rect will bounce out /// of the `frame`. pub fn center_size_in_rect(size: Vec2, frame: Rect) -> Rect { Align2::CENTER_CENTER.align_size_within_rect(size, frame) } impl std::fmt::Debug for Align2 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Align2({:?}, {:?})", self.x(), self.y()) } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/rot2.rs
crates/emath/src/rot2.rs
use super::Vec2; // {s,c} represents the rotation matrix: // // | c -s | // | s c | // // `vec2(c,s)` represents where the X axis will end up after rotation. // /// Represents a rotation in the 2D plane. /// /// A rotation of 𝞃/4 = 90° rotates the X axis to the Y axis. /// /// Normally a [`Rot2`] is normalized (unit-length). /// If not, it will also scale vectors. #[repr(C)] #[derive(Clone, Copy, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct Rot2 { /// `angle.sin()` s: f32, /// `angle.cos()` c: f32, } /// Identity rotation impl Default for Rot2 { /// Identity rotation #[inline] fn default() -> Self { Self { s: 0.0, c: 1.0 } } } impl Rot2 { /// The identity rotation: nothing rotates pub const IDENTITY: Self = Self { s: 0.0, c: 1.0 }; /// Angle is clockwise in radians. /// A 𝞃/4 = 90° rotation means rotating the X axis to the Y axis. #[inline] pub fn from_angle(angle: f32) -> Self { let (s, c) = angle.sin_cos(); Self { s, c } } #[inline] pub fn angle(self) -> f32 { self.s.atan2(self.c) } /// The factor by which vectors will be scaled. #[inline] pub fn length(self) -> f32 { self.c.hypot(self.s) } #[inline] pub fn length_squared(self) -> f32 { self.c.powi(2) + self.s.powi(2) } #[inline] pub fn is_finite(self) -> bool { self.c.is_finite() && self.s.is_finite() } #[must_use] #[inline] pub fn inverse(self) -> Self { Self { s: -self.s, c: self.c, } / self.length_squared() } #[must_use] #[inline] pub fn normalized(self) -> Self { let l = self.length(); let ret = Self { c: self.c / l, s: self.s / l, }; debug_assert!( ret.is_finite(), "Rot2::normalized produced a non-finite result" ); ret } } impl std::fmt::Debug for Rot2 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(precision) = f.precision() { write!( f, "Rot2 {{ angle: {:.2$}°, length: {} }}", self.angle().to_degrees(), self.length(), precision ) } else { write!( f, "Rot2 {{ angle: {:.1}°, length: {} }}", self.angle().to_degrees(), self.length(), ) } } } impl std::ops::Mul<Self> for Rot2 { type Output = Self; #[inline] fn mul(self, r: Self) -> Self { /* |lc -ls| * |rc -rs| |ls lc| |rs rc| */ Self { c: self.c * r.c - self.s * r.s, s: self.s * r.c + self.c * r.s, } } } /// Rotates (and maybe scales) the vector. impl std::ops::Mul<Vec2> for Rot2 { type Output = Vec2; #[inline] fn mul(self, v: Vec2) -> Vec2 { Vec2 { x: self.c * v.x - self.s * v.y, y: self.s * v.x + self.c * v.y, } } } /// Scales the rotor. impl std::ops::Mul<Rot2> for f32 { type Output = Rot2; #[inline] fn mul(self, r: Rot2) -> Rot2 { Rot2 { c: self * r.c, s: self * r.s, } } } /// Scales the rotor. impl std::ops::Mul<f32> for Rot2 { type Output = Self; #[inline] fn mul(self, r: f32) -> Self { Self { c: self.c * r, s: self.s * r, } } } /// Scales the rotor. impl std::ops::Div<f32> for Rot2 { type Output = Self; #[inline] fn div(self, r: f32) -> Self { Self { c: self.c / r, s: self.s / r, } } } #[cfg(test)] mod test { use super::Rot2; use crate::vec2; #[test] fn test_rotation2() { { let angle = std::f32::consts::TAU / 6.0; let rot = Rot2::from_angle(angle); assert!((rot.angle() - angle).abs() < 1e-5); assert!((rot * rot.inverse()).angle().abs() < 1e-5); assert!((rot.inverse() * rot).angle().abs() < 1e-5); } { let angle = std::f32::consts::TAU / 4.0; let rot = Rot2::from_angle(angle); assert!(((rot * vec2(1.0, 0.0)) - vec2(0.0, 1.0)).length() < 1e-5); } { // Test rotation and scaling let angle = std::f32::consts::TAU / 4.0; let rot = 3.0 * Rot2::from_angle(angle); let rotated = rot * vec2(1.0, 0.0); let expected = vec2(0.0, 3.0); assert!( (rotated - expected).length() < 1e-5, "Expected {rotated:?} to equal {expected:?}. rot: {rot:?}", ); let undone = rot.inverse() * rot; assert!(undone.angle().abs() < 1e-5); assert!((undone.length() - 1.0).abs() < 1e-5,); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/rect_transform.rs
crates/emath/src/rect_transform.rs
use crate::{Pos2, Rect, Vec2, pos2, remap, remap_clamp}; /// Linearly transforms positions from one [`Rect`] to another. /// /// [`RectTransform`] stores the rectangles, and therefore supports clamping and culling. #[repr(C)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct RectTransform { from: Rect, to: Rect, } impl RectTransform { pub fn identity(from_and_to: Rect) -> Self { Self::from_to(from_and_to, from_and_to) } pub fn from_to(from: Rect, to: Rect) -> Self { Self { from, to } } pub fn from(&self) -> &Rect { &self.from } pub fn to(&self) -> &Rect { &self.to } /// The scale factors. pub fn scale(&self) -> Vec2 { self.to.size() / self.from.size() } pub fn inverse(&self) -> Self { Self::from_to(self.to, self.from) } /// Transforms the given coordinate in the `from` space to the `to` space. pub fn transform_pos(&self, pos: Pos2) -> Pos2 { pos2( remap(pos.x, self.from.x_range(), self.to.x_range()), remap(pos.y, self.from.y_range(), self.to.y_range()), ) } /// Transforms the given rectangle in the `in`-space to a rectangle in the `out`-space. pub fn transform_rect(&self, rect: Rect) -> Rect { Rect { min: self.transform_pos(rect.min), max: self.transform_pos(rect.max), } } /// Transforms the given coordinate in the `from` space to the `to` space, /// clamping if necessary. pub fn transform_pos_clamped(&self, pos: Pos2) -> Pos2 { pos2( remap_clamp(pos.x, self.from.x_range(), self.to.x_range()), remap_clamp(pos.y, self.from.y_range(), self.to.y_range()), ) } } /// Transforms the position. impl std::ops::Mul<Pos2> for RectTransform { type Output = Pos2; fn mul(self, pos: Pos2) -> Pos2 { self.transform_pos(pos) } } /// Transforms the position. impl std::ops::Mul<Pos2> for &RectTransform { type Output = Pos2; fn mul(self, pos: Pos2) -> Pos2 { self.transform_pos(pos) } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/crates/emath/src/vec2.rs
crates/emath/src/vec2.rs
use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use crate::Vec2b; /// A vector has a direction and length. /// A [`Vec2`] is often used to represent a size. /// /// emath represents positions using [`crate::Pos2`]. /// /// Normally the units are points (logical pixels). #[repr(C)] #[derive(Clone, Copy, Default, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] #[cfg_attr(feature = "bytemuck", derive(bytemuck::Pod, bytemuck::Zeroable))] pub struct Vec2 { /// Rightwards. Width. pub x: f32, /// Downwards. Height. pub y: f32, } /// `vec2(x, y) == Vec2::new(x, y)` #[inline(always)] pub const fn vec2(x: f32, y: f32) -> Vec2 { Vec2 { x, y } } // ---------------------------------------------------------------------------- // Compatibility and convenience conversions to and from [f32; 2]: impl From<[f32; 2]> for Vec2 { #[inline(always)] fn from(v: [f32; 2]) -> Self { Self { x: v[0], y: v[1] } } } impl From<&[f32; 2]> for Vec2 { #[inline(always)] fn from(v: &[f32; 2]) -> Self { Self { x: v[0], y: v[1] } } } impl From<Vec2> for [f32; 2] { #[inline(always)] fn from(v: Vec2) -> Self { [v.x, v.y] } } impl From<&Vec2> for [f32; 2] { #[inline(always)] fn from(v: &Vec2) -> Self { [v.x, v.y] } } // ---------------------------------------------------------------------------- // Compatibility and convenience conversions to and from (f32, f32): impl From<(f32, f32)> for Vec2 { #[inline(always)] fn from(v: (f32, f32)) -> Self { Self { x: v.0, y: v.1 } } } impl From<&(f32, f32)> for Vec2 { #[inline(always)] fn from(v: &(f32, f32)) -> Self { Self { x: v.0, y: v.1 } } } impl From<Vec2> for (f32, f32) { #[inline(always)] fn from(v: Vec2) -> Self { (v.x, v.y) } } impl From<&Vec2> for (f32, f32) { #[inline(always)] fn from(v: &Vec2) -> Self { (v.x, v.y) } } impl From<Vec2b> for Vec2 { #[inline(always)] fn from(v: Vec2b) -> Self { Self { x: v.x as i32 as f32, y: v.y as i32 as f32, } } } // ---------------------------------------------------------------------------- // Mint compatibility and convenience conversions #[cfg(feature = "mint")] impl From<mint::Vector2<f32>> for Vec2 { #[inline] fn from(v: mint::Vector2<f32>) -> Self { Self::new(v.x, v.y) } } #[cfg(feature = "mint")] impl From<Vec2> for mint::Vector2<f32> { #[inline] fn from(v: Vec2) -> Self { Self { x: v.x, y: v.y } } } // ---------------------------------------------------------------------------- impl Vec2 { /// Right pub const X: Self = Self { x: 1.0, y: 0.0 }; /// Down pub const Y: Self = Self { x: 0.0, y: 1.0 }; /// +X pub const RIGHT: Self = Self { x: 1.0, y: 0.0 }; /// -X pub const LEFT: Self = Self { x: -1.0, y: 0.0 }; /// -Y pub const UP: Self = Self { x: 0.0, y: -1.0 }; /// +Y pub const DOWN: Self = Self { x: 0.0, y: 1.0 }; pub const ZERO: Self = Self { x: 0.0, y: 0.0 }; pub const ONE: Self = Self { x: 1.0, y: 1.0 }; pub const INFINITY: Self = Self::splat(f32::INFINITY); pub const NAN: Self = Self::splat(f32::NAN); #[inline(always)] pub const fn new(x: f32, y: f32) -> Self { Self { x, y } } /// Set both `x` and `y` to the same value. #[inline(always)] pub const fn splat(v: f32) -> Self { Self { x: v, y: v } } /// Treat this vector as a position. /// `v.to_pos2()` is equivalent to `Pos2::default() + v`. #[inline(always)] pub fn to_pos2(self) -> crate::Pos2 { crate::Pos2 { x: self.x, y: self.y, } } /// Safe normalize: returns zero if input is zero. #[must_use] #[inline(always)] pub fn normalized(self) -> Self { let len = self.length(); if len <= 0.0 { self } else { self / len } } /// Checks if `self` has length `1.0` up to a precision of `1e-6`. #[inline(always)] pub fn is_normalized(self) -> bool { (self.length_sq() - 1.0).abs() < 2e-6 } /// Rotates the vector by 90°, i.e positive X to positive Y /// (clockwise in egui coordinates). #[inline(always)] pub fn rot90(self) -> Self { vec2(self.y, -self.x) } #[inline(always)] pub fn length(self) -> f32 { self.x.hypot(self.y) } #[inline(always)] pub fn length_sq(self) -> f32 { self.x * self.x + self.y * self.y } /// Measures the angle of the vector. /// /// ``` /// # use emath::Vec2; /// use std::f32::consts::TAU; /// /// assert_eq!(Vec2::ZERO.angle(), 0.0); /// assert_eq!(Vec2::angled(0.0).angle(), 0.0); /// assert_eq!(Vec2::angled(1.0).angle(), 1.0); /// assert_eq!(Vec2::X.angle(), 0.0); /// assert_eq!(Vec2::Y.angle(), 0.25 * TAU); /// /// assert_eq!(Vec2::RIGHT.angle(), 0.0); /// assert_eq!(Vec2::DOWN.angle(), 0.25 * TAU); /// assert_eq!(Vec2::UP.angle(), -0.25 * TAU); /// ``` #[inline(always)] pub fn angle(self) -> f32 { self.y.atan2(self.x) } /// Create a unit vector with the given CW angle (in radians). /// * An angle of zero gives the unit X axis. /// * An angle of 𝞃/4 = 90° gives the unit Y axis. /// /// ``` /// # use emath::Vec2; /// use std::f32::consts::TAU; /// /// assert_eq!(Vec2::angled(0.0), Vec2::X); /// assert!((Vec2::angled(0.25 * TAU) - Vec2::Y).length() < 1e-5); /// ``` #[inline(always)] pub fn angled(angle: f32) -> Self { let (sin, cos) = angle.sin_cos(); vec2(cos, sin) } #[must_use] #[inline(always)] pub fn floor(self) -> Self { vec2(self.x.floor(), self.y.floor()) } #[must_use] #[inline(always)] pub fn round(self) -> Self { vec2(self.x.round(), self.y.round()) } #[must_use] #[inline(always)] pub fn ceil(self) -> Self { vec2(self.x.ceil(), self.y.ceil()) } #[must_use] #[inline] pub fn abs(self) -> Self { vec2(self.x.abs(), self.y.abs()) } /// True if all members are also finite. #[inline(always)] pub fn is_finite(self) -> bool { self.x.is_finite() && self.y.is_finite() } /// True if any member is NaN. #[inline(always)] pub fn any_nan(self) -> bool { self.x.is_nan() || self.y.is_nan() } #[must_use] #[inline] pub fn min(self, other: Self) -> Self { vec2(self.x.min(other.x), self.y.min(other.y)) } #[must_use] #[inline] pub fn max(self, other: Self) -> Self { vec2(self.x.max(other.x), self.y.max(other.y)) } /// The dot-product of two vectors. #[inline] pub fn dot(self, other: Self) -> f32 { self.x * other.x + self.y * other.y } /// Returns the minimum of `self.x` and `self.y`. #[must_use] #[inline(always)] pub fn min_elem(self) -> f32 { self.x.min(self.y) } /// Returns the maximum of `self.x` and `self.y`. #[inline(always)] #[must_use] pub fn max_elem(self) -> f32 { self.x.max(self.y) } /// Swizzle the axes. #[inline] #[must_use] pub fn yx(self) -> Self { Self { x: self.y, y: self.x, } } #[must_use] #[inline] pub fn clamp(self, min: Self, max: Self) -> Self { Self { x: self.x.clamp(min.x, max.x), y: self.y.clamp(min.y, max.y), } } } impl std::ops::Index<usize> for Vec2 { type Output = f32; #[inline(always)] fn index(&self, index: usize) -> &f32 { match index { 0 => &self.x, 1 => &self.y, _ => panic!("Vec2 index out of bounds: {index}"), } } } impl std::ops::IndexMut<usize> for Vec2 { #[inline(always)] fn index_mut(&mut self, index: usize) -> &mut f32 { match index { 0 => &mut self.x, 1 => &mut self.y, _ => panic!("Vec2 index out of bounds: {index}"), } } } impl Eq for Vec2 {} impl Neg for Vec2 { type Output = Self; #[inline(always)] fn neg(self) -> Self { vec2(-self.x, -self.y) } } impl AddAssign for Vec2 { #[inline(always)] fn add_assign(&mut self, rhs: Self) { *self = Self { x: self.x + rhs.x, y: self.y + rhs.y, }; } } impl SubAssign for Vec2 { #[inline(always)] fn sub_assign(&mut self, rhs: Self) { *self = Self { x: self.x - rhs.x, y: self.y - rhs.y, }; } } impl Add for Vec2 { type Output = Self; #[inline(always)] fn add(self, rhs: Self) -> Self { Self { x: self.x + rhs.x, y: self.y + rhs.y, } } } impl Sub for Vec2 { type Output = Self; #[inline(always)] fn sub(self, rhs: Self) -> Self { Self { x: self.x - rhs.x, y: self.y - rhs.y, } } } /// Element-wise multiplication impl Mul<Self> for Vec2 { type Output = Self; #[inline(always)] fn mul(self, vec: Self) -> Self { Self { x: self.x * vec.x, y: self.y * vec.y, } } } /// Element-wise division impl Div<Self> for Vec2 { type Output = Self; #[inline(always)] fn div(self, rhs: Self) -> Self { Self { x: self.x / rhs.x, y: self.y / rhs.y, } } } impl MulAssign<f32> for Vec2 { #[inline(always)] fn mul_assign(&mut self, rhs: f32) { self.x *= rhs; self.y *= rhs; } } impl DivAssign<f32> for Vec2 { #[inline(always)] fn div_assign(&mut self, rhs: f32) { self.x /= rhs; self.y /= rhs; } } impl Mul<f32> for Vec2 { type Output = Self; #[inline(always)] fn mul(self, factor: f32) -> Self { Self { x: self.x * factor, y: self.y * factor, } } } impl Mul<Vec2> for f32 { type Output = Vec2; #[inline(always)] fn mul(self, vec: Vec2) -> Vec2 { Vec2 { x: self * vec.x, y: self * vec.y, } } } impl Div<f32> for Vec2 { type Output = Self; #[inline(always)] fn div(self, factor: f32) -> Self { Self { x: self.x / factor, y: self.y / factor, } } } impl fmt::Debug for Vec2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(precision) = f.precision() { write!(f, "[{1:.0$} {2:.0$}]", precision, self.x, self.y) } else { write!(f, "[{:.1} {:.1}]", self.x, self.y) } } } impl fmt::Display for Vec2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("[")?; self.x.fmt(f)?; f.write_str(" ")?; self.y.fmt(f)?; f.write_str("]")?; Ok(()) } } #[cfg(test)] mod test { use super::*; macro_rules! almost_eq { ($left: expr, $right: expr) => { let left = $left; let right = $right; assert!((left - right).abs() < 1e-6, "{} != {}", left, right); }; } #[test] fn test_vec2() { use std::f32::consts::TAU; assert_eq!(Vec2::ZERO.angle(), 0.0); assert_eq!(Vec2::angled(0.0).angle(), 0.0); assert_eq!(Vec2::angled(1.0).angle(), 1.0); assert_eq!(Vec2::X.angle(), 0.0); assert_eq!(Vec2::Y.angle(), 0.25 * TAU); assert_eq!(Vec2::RIGHT.angle(), 0.0); assert_eq!(Vec2::DOWN.angle(), 0.25 * TAU); almost_eq!(Vec2::LEFT.angle(), 0.50 * TAU); assert_eq!(Vec2::UP.angle(), -0.25 * TAU); let mut assignment = vec2(1.0, 2.0); assignment += vec2(3.0, 4.0); assert_eq!(assignment, vec2(4.0, 6.0)); let mut assignment = vec2(4.0, 6.0); assignment -= vec2(1.0, 2.0); assert_eq!(assignment, vec2(3.0, 4.0)); let mut assignment = vec2(1.0, 2.0); assignment *= 2.0; assert_eq!(assignment, vec2(2.0, 4.0)); let mut assignment = vec2(2.0, 4.0); assignment /= 2.0; assert_eq!(assignment, vec2(1.0, 2.0)); } #[test] fn test_vec2_normalized() { fn generate_spiral(n: usize, start: Vec2, end: Vec2) -> impl Iterator<Item = Vec2> { let angle_step = 2.0 * std::f32::consts::PI / n as f32; let radius_step = (end.length() - start.length()) / n as f32; (0..n).map(move |i| { let angle = i as f32 * angle_step; let radius = start.length() + i as f32 * radius_step; let x = radius * angle.cos(); let y = radius * angle.sin(); vec2(x, y) }) } for v in generate_spiral(40, Vec2::splat(0.1), Vec2::splat(2.0)) { let vn = v.normalized(); almost_eq!(vn.length(), 1.0); assert!(vn.is_normalized()); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/external_eventloop_async/src/app.rs
examples/external_eventloop_async/src/app.rs
#![expect(clippy::unwrap_used)] // It's an example use std::{cell::Cell, io, os::fd::AsRawFd as _, rc::Rc, time::Duration}; use tokio::task::LocalSet; use winit::event_loop::{ControlFlow, EventLoop}; use eframe::{EframePumpStatus, UserEvent, egui}; pub fn run() -> io::Result<()> { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; let mut eventloop = EventLoop::<UserEvent>::with_user_event().build().unwrap(); eventloop.set_control_flow(ControlFlow::Poll); let mut winit_app = eframe::create_native( "External Eventloop Application", options, Box::new(|_| Ok(Box::<MyApp>::default())), &eventloop, ); let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .unwrap(); let local = LocalSet::new(); local.block_on(&rt, async { let eventloop_fd = tokio::io::unix::AsyncFd::new(eventloop.as_raw_fd())?; let mut control_flow = ControlFlow::Poll; loop { let mut guard = match control_flow { ControlFlow::Poll => None, ControlFlow::Wait => Some(eventloop_fd.readable().await?), ControlFlow::WaitUntil(deadline) => { tokio::time::timeout_at(deadline.into(), eventloop_fd.readable()) .await .ok() .transpose()? } }; match winit_app.pump_eframe_app(&mut eventloop, None) { EframePumpStatus::Continue(next) => control_flow = next, EframePumpStatus::Exit(code) => { log::info!("exit code: {code}"); break; } } if let Some(mut guard) = guard.take() { guard.clear_ready(); } } Ok::<_, io::Error>(()) }) } struct MyApp { value: Rc<Cell<u32>>, spin: bool, blinky: bool, } impl Default for MyApp { fn default() -> Self { Self { value: Rc::new(Cell::new(42)), spin: false, blinky: false, } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("My External Eventloop Application"); ui.horizontal(|ui| { if ui.button("Increment Now").clicked() { self.value.set(self.value.get() + 1); } if ui.button("Increment Later").clicked() { let value = Rc::clone(&self.value); let ctx = ui.ctx().clone(); tokio::task::spawn_local(async move { tokio::time::sleep(Duration::from_secs(1)).await; value.set(value.get() + 1); ctx.request_repaint(); }); } }); ui.label(format!("Value: {}", self.value.get())); if ui.button("Toggle Spinner").clicked() { self.spin = !self.spin; } if ui.button("Toggle Blinky").clicked() { self.blinky = !self.blinky; } if self.spin { ui.spinner(); } if self.blinky { let now = ui.input(|i| i.time); let blink = now % 1.0 < 0.5; egui::Frame::new() .inner_margin(3) .corner_radius(5) .fill(if blink { egui::Color32::RED } else { egui::Color32::TRANSPARENT }) .show(ui, |ui| { ui.label("Blinky!"); }); ui.request_repaint_after_secs((0.5 - (now % 0.5)) as f32); } }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/external_eventloop_async/src/main.rs
examples/external_eventloop_async/src/main.rs
#![expect(rustdoc::missing_crate_level_docs)] // it's an example #[cfg(target_os = "linux")] mod app; #[cfg(target_os = "linux")] fn main() -> std::io::Result<()> { app::run() } // Do not check `app` on unsupported platforms when check "--all-features" is used in CI. #[cfg(not(target_os = "linux"))] fn main() { #![expect(clippy::print_stdout)] println!("This example only supports Linux."); }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/external_eventloop/src/main.rs
examples/external_eventloop/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs, clippy::unwrap_used)] // it's an example use eframe::{UserEvent, egui}; use std::{cell::Cell, rc::Rc}; use winit::event_loop::{ControlFlow, EventLoop}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; let eventloop = EventLoop::<UserEvent>::with_user_event().build().unwrap(); eventloop.set_control_flow(ControlFlow::Poll); let mut winit_app = eframe::create_native( "External Eventloop Application", options, Box::new(|_| Ok(Box::<MyApp>::default())), &eventloop, ); eventloop.run_app(&mut winit_app)?; Ok(()) } struct MyApp { value: Rc<Cell<u32>>, spin: bool, blinky: bool, } impl Default for MyApp { fn default() -> Self { Self { value: Rc::new(Cell::new(42)), spin: false, blinky: false, } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("My External Eventloop Application"); ui.horizontal(|ui| { if ui.button("Increment Now").clicked() { self.value.set(self.value.get() + 1); } }); ui.label(format!("Value: {}", self.value.get())); if ui.button("Toggle Spinner").clicked() { self.spin = !self.spin; } if ui.button("Toggle Blinky").clicked() { self.blinky = !self.blinky; } if self.spin { ui.spinner(); } if self.blinky { let now = ui.input(|i| i.time); let blink = now % 1.0 < 0.5; egui::Frame::new() .inner_margin(3) .corner_radius(5) .fill(if blink { egui::Color32::RED } else { egui::Color32::TRANSPARENT }) .show(ui, |ui| { ui.label("Blinky!"); }); ui.request_repaint_after_secs((0.5 - (now % 0.5)) as f32); } }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_keypad/src/keypad.rs
examples/custom_keypad/src/keypad.rs
use eframe::egui::{self, Button, Ui, Vec2, pos2, vec2}; #[derive(Clone, Copy, Debug, Default, PartialEq)] enum Transition { #[default] None, CloseOnNextFrame, CloseImmediately, } #[derive(Clone, Debug)] struct State { open: bool, closable: bool, close_on_next_frame: bool, start_pos: egui::Pos2, focus: Option<egui::Id>, events: Option<Vec<egui::Event>>, } impl State { fn new() -> Self { Self { open: false, closable: false, close_on_next_frame: false, start_pos: pos2(100.0, 100.0), focus: None, events: None, } } fn queue_char(&mut self, c: char) { let events = self.events.get_or_insert(vec![]); if let Some(key) = egui::Key::from_name(&c.to_string()) { events.push(egui::Event::Key { key, physical_key: Some(key), pressed: true, repeat: false, modifiers: Default::default(), }); } events.push(egui::Event::Text(c.to_string())); } fn queue_key(&mut self, key: egui::Key) { let events = self.events.get_or_insert(vec![]); events.push(egui::Event::Key { key, physical_key: Some(key), pressed: true, repeat: false, modifiers: Default::default(), }); } } impl Default for State { fn default() -> Self { Self::new() } } /// A simple keypad widget. pub struct Keypad { id: egui::Id, } impl Keypad { pub fn new() -> Self { Self { id: egui::Id::new("keypad"), } } pub fn bump_events(&self, ctx: &egui::Context, raw_input: &mut egui::RawInput) { let events = ctx.memory_mut(|m| { m.data .get_temp_mut_or_default::<State>(self.id) .events .take() }); if let Some(mut events) = events { events.append(&mut raw_input.events); raw_input.events = events; } } fn buttons(ui: &mut Ui, state: &mut State) -> Transition { let mut trans = Transition::None; ui.vertical(|ui| { let window_margin = ui.spacing().window_margin; let size_1x1 = vec2(32.0, 26.0); let _size_1x2 = vec2(32.0, 52.0 + window_margin.topf()); let _size_2x1 = vec2(64.0 + window_margin.leftf(), 26.0); ui.spacing_mut().item_spacing = Vec2::splat(window_margin.leftf()); ui.horizontal(|ui| { if ui.add_sized(size_1x1, Button::new("1")).clicked() { state.queue_char('1'); } if ui.add_sized(size_1x1, Button::new("2")).clicked() { state.queue_char('2'); } if ui.add_sized(size_1x1, Button::new("3")).clicked() { state.queue_char('3'); } if ui.add_sized(size_1x1, Button::new("⏮")).clicked() { state.queue_key(egui::Key::Home); } if ui.add_sized(size_1x1, Button::new("🔙")).clicked() { state.queue_key(egui::Key::Backspace); } }); ui.horizontal(|ui| { if ui.add_sized(size_1x1, Button::new("4")).clicked() { state.queue_char('4'); } if ui.add_sized(size_1x1, Button::new("5")).clicked() { state.queue_char('5'); } if ui.add_sized(size_1x1, Button::new("6")).clicked() { state.queue_char('6'); } if ui.add_sized(size_1x1, Button::new("⏭")).clicked() { state.queue_key(egui::Key::End); } if ui.add_sized(size_1x1, Button::new("⎆")).clicked() { state.queue_key(egui::Key::Enter); trans = Transition::CloseOnNextFrame; } }); ui.horizontal(|ui| { if ui.add_sized(size_1x1, Button::new("7")).clicked() { state.queue_char('7'); } if ui.add_sized(size_1x1, Button::new("8")).clicked() { state.queue_char('8'); } if ui.add_sized(size_1x1, Button::new("9")).clicked() { state.queue_char('9'); } if ui.add_sized(size_1x1, Button::new("⏶")).clicked() { state.queue_key(egui::Key::ArrowUp); } if ui.add_sized(size_1x1, Button::new("⌨")).clicked() { trans = Transition::CloseImmediately; } }); ui.horizontal(|ui| { if ui.add_sized(size_1x1, Button::new("0")).clicked() { state.queue_char('0'); } if ui.add_sized(size_1x1, Button::new(".")).clicked() { state.queue_char('.'); } if ui.add_sized(size_1x1, Button::new("⏴")).clicked() { state.queue_key(egui::Key::ArrowLeft); } if ui.add_sized(size_1x1, Button::new("⏷")).clicked() { state.queue_key(egui::Key::ArrowDown); } if ui.add_sized(size_1x1, Button::new("⏵")).clicked() { state.queue_key(egui::Key::ArrowRight); } }); }); trans } pub fn show(&self, ctx: &egui::Context) { let (focus, mut state) = ctx.memory(|m| { ( m.focused(), m.data.get_temp::<State>(self.id).unwrap_or_default(), ) }); let is_first_show = ctx.egui_wants_keyboard_input() && state.focus != focus; if is_first_show { let y = ctx.global_style().spacing.interact_size.y * 1.25; state.open = true; state.start_pos = ctx.input(|i| { i.pointer .hover_pos() .map_or(pos2(100.0, 100.0), |p| p + vec2(0.0, y)) }); state.focus = focus; } if state.close_on_next_frame { state.open = false; state.close_on_next_frame = false; state.focus = None; } let mut open = state.open; let win = egui::Window::new("⌨ Keypad"); let win = if is_first_show { win.current_pos(state.start_pos) } else { win.default_pos(state.start_pos) }; let resp = win .movable(true) .resizable(false) .open(&mut open) .show(ctx, |ui| Self::buttons(ui, &mut state)); state.open = open; if let Some(resp) = resp { match resp.inner { Some(Transition::CloseOnNextFrame) => { state.close_on_next_frame = true; } Some(Transition::CloseImmediately) => { state.open = false; state.focus = None; } _ => {} } if !state.closable && resp.response.hovered() { state.closable = true; } if state.closable && resp.response.clicked_elsewhere() { state.open = false; state.closable = false; state.focus = None; } if is_first_show { ctx.move_to_top(resp.response.layer_id); } } if let (true, Some(focus)) = (state.open, state.focus) { ctx.memory_mut(|m| { m.request_focus(focus); }); } ctx.memory_mut(|m| m.data.insert_temp(self.id, state)); } } impl Default for Keypad { fn default() -> Self { Self::new() } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_keypad/src/main.rs
examples/custom_keypad/src/main.rs
// #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; mod keypad; use keypad::Keypad; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([640.0, 480.0]), ..Default::default() }; eframe::run_native( "Custom Keypad App", options, Box::new(|cc| { // Use the dark theme cc.egui_ctx.set_theme(egui::Theme::Dark); // This gives us image support: egui_extras::install_image_loaders(&cc.egui_ctx); Ok(Box::<MyApp>::default()) }), ) } struct MyApp { name: String, age: u32, keypad: Keypad, } impl MyApp {} impl Default for MyApp { fn default() -> Self { Self { name: "Arthur".to_owned(), age: 42, keypad: Keypad::new(), } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::Window::new("Custom Keypad") .default_pos([100.0, 100.0]) .title_bar(true) .show(ui.ctx(), |ui| { ui.horizontal(|ui| { ui.label("Your name: "); ui.text_edit_singleline(&mut self.name); }); ui.add(egui::Slider::new(&mut self.age, 0..=120).text("age")); if ui.button("Increment").clicked() { self.age += 1; } ui.label(format!("Hello '{}', age {}", self.name, self.age)); }); self.keypad.show(ui.ctx()); } fn raw_input_hook(&mut self, ctx: &egui::Context, raw_input: &mut egui::RawInput) { self.keypad.bump_events(ctx, raw_input); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_window_frame/src/main.rs
examples/custom_window_frame/src/main.rs
//! Show a custom window frame instead of the default OS window chrome decorations. #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![allow(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui::{self, ViewportCommand}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default() .with_decorations(false) // Hide the OS-specific "chrome" around the window .with_inner_size([400.0, 100.0]) .with_min_inner_size([400.0, 100.0]) .with_transparent(true), // To have rounded corners we need transparency ..Default::default() }; eframe::run_native( "Custom window frame", // unused title options, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } #[derive(Default)] struct MyApp {} impl eframe::App for MyApp { fn clear_color(&self, _visuals: &egui::Visuals) -> [f32; 4] { egui::Rgba::TRANSPARENT.to_array() // Make sure we don't paint anything behind the rounded corners } fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { custom_window_frame(ui, "egui with custom frame", |ui| { ui.label("This is just the contents of the window."); ui.horizontal(|ui| { ui.label("egui theme:"); egui::widgets::global_theme_preference_buttons(ui); }); }); } } fn custom_window_frame(ui: &mut egui::Ui, title: &str, add_contents: impl FnOnce(&mut egui::Ui)) { use egui::UiBuilder; let panel_frame = egui::Frame::new() .fill(ui.global_style().visuals.window_fill()) .corner_radius(10) .stroke(ui.global_style().visuals.widgets.noninteractive.fg_stroke) .outer_margin(1); // so the stroke is within the bounds panel_frame.show(ui, |ui| { let app_rect = ui.max_rect(); ui.expand_to_include_rect(app_rect); // Expand frame to include it all let title_bar_height = 32.0; let title_bar_rect = { let mut rect = app_rect; rect.max.y = rect.min.y + title_bar_height; rect }; title_bar_ui(ui, title_bar_rect, title); // Add the contents: let content_rect = { let mut rect = app_rect; rect.min.y = title_bar_rect.max.y; rect } .shrink(4.0); let mut content_ui = ui.new_child(UiBuilder::new().max_rect(content_rect)); add_contents(&mut content_ui); }); } fn title_bar_ui(ui: &mut egui::Ui, title_bar_rect: eframe::epaint::Rect, title: &str) { use egui::{Align2, FontId, Id, PointerButton, Sense, UiBuilder, vec2}; let painter = ui.painter(); let title_bar_response = ui.interact( title_bar_rect, Id::new("title_bar"), Sense::click_and_drag(), ); // Paint the title: painter.text( title_bar_rect.center(), Align2::CENTER_CENTER, title, FontId::proportional(20.0), ui.style().visuals.text_color(), ); // Paint the line under the title: painter.line_segment( [ title_bar_rect.left_bottom() + vec2(1.0, 0.0), title_bar_rect.right_bottom() + vec2(-1.0, 0.0), ], ui.visuals().widgets.noninteractive.bg_stroke, ); // Interact with the title bar (drag to move window): if title_bar_response.double_clicked() { let is_maximized = ui.input(|i| i.viewport().maximized.unwrap_or(false)); ui.send_viewport_cmd(ViewportCommand::Maximized(!is_maximized)); } if title_bar_response.drag_started_by(PointerButton::Primary) { ui.send_viewport_cmd(ViewportCommand::StartDrag); } ui.scope_builder( UiBuilder::new() .max_rect(title_bar_rect) .layout(egui::Layout::right_to_left(egui::Align::Center)), |ui| { ui.spacing_mut().item_spacing.x = 0.0; ui.visuals_mut().button_frame = false; ui.add_space(8.0); close_maximize_minimize(ui); }, ); } /// Show some close/maximize/minimize buttons for the native window. fn close_maximize_minimize(ui: &mut egui::Ui) { use egui::{Button, RichText}; let button_height = 12.0; let close_response = ui .add(Button::new(RichText::new("❌").size(button_height))) .on_hover_text("Close the window"); if close_response.clicked() { ui.send_viewport_cmd(egui::ViewportCommand::Close); } let is_maximized = ui.input(|i| i.viewport().maximized.unwrap_or(false)); if is_maximized { let maximized_response = ui .add(Button::new(RichText::new("🗗").size(button_height))) .on_hover_text("Restore window"); if maximized_response.clicked() { ui.send_viewport_cmd(ViewportCommand::Maximized(false)); } } else { let maximized_response = ui .add(Button::new(RichText::new("🗗").size(button_height))) .on_hover_text("Maximize window"); if maximized_response.clicked() { ui.send_viewport_cmd(ViewportCommand::Maximized(true)); } } let minimized_response = ui .add(Button::new(RichText::new("🗕").size(button_height))) .on_hover_text("Minimize the window"); if minimized_response.clicked() { ui.send_viewport_cmd(ViewportCommand::Minimized(true)); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/hello_android/src/lib.rs
examples/hello_android/src/lib.rs
#![doc = include_str!("../README.md")] use eframe::{CreationContext, egui}; #[cfg(target_os = "android")] #[no_mangle] fn android_main(app: winit::platform::android::activity::AndroidApp) { // Log to android output android_logger::init_once( android_logger::Config::default().with_max_level(log::LevelFilter::Info), ); let options = eframe::NativeOptions { android_app: Some(app), ..Default::default() }; eframe::run_native( "My egui App", options, Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) .unwrap() } pub struct MyApp { demo: egui_demo_lib::DemoWindows, } impl MyApp { pub fn new(cc: &CreationContext) -> Self { egui_extras::install_image_loaders(&cc.egui_ctx); Self { demo: egui_demo_lib::DemoWindows::default(), } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { // Reserve some space at the top so the demo ui isn't hidden behind the android status bar // TODO(lucasmerlin): This is a pretty big hack, should be fixed once safe_area implemented // for android: // https://github.com/rust-windowing/winit/issues/3910 egui::Panel::top("status_bar_space").show_inside(ui, |ui| { ui.set_height(32.0); }); egui::CentralPanel::default().show_inside(ui, |ui| { self.demo.ui(ui); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/hello_android/src/main.rs
examples/hello_android/src/main.rs
use hello_android::MyApp; fn main() -> eframe::Result { eframe::run_native( "hello_android", Default::default(), Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/confirm_exit/src/main.rs
examples/confirm_exit/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; eframe::run_native( "Confirm exit", options, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } #[derive(Default)] struct MyApp { show_confirmation_dialog: bool, allowed_to_close: bool, } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("Try to close the window"); }); if ui.input(|i| i.viewport().close_requested()) { if self.allowed_to_close { // do nothing - we will close } else { ui.send_viewport_cmd(egui::ViewportCommand::CancelClose); self.show_confirmation_dialog = true; } } if self.show_confirmation_dialog { egui::Window::new("Do you want to quit?") .collapsible(false) .resizable(false) .show(ui.ctx(), |ui| { ui.horizontal(|ui| { if ui.button("No").clicked() { self.show_confirmation_dialog = false; self.allowed_to_close = false; } if ui.button("Yes").clicked() { self.show_confirmation_dialog = false; self.allowed_to_close = true; ui.send_viewport_cmd(egui::ViewportCommand::Close); } }); }); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_font_style/src/main.rs
examples/custom_font_style/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; use egui::{FontFamily, FontId, RichText, TextStyle}; use std::collections::BTreeMap; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions::default(); eframe::run_native( "egui example: global font style", options, Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) } #[inline] fn heading2() -> TextStyle { TextStyle::Name("Heading2".into()) } #[inline] fn heading3() -> TextStyle { TextStyle::Name("ContextHeading".into()) } fn configure_text_styles(ctx: &egui::Context) { use FontFamily::{Monospace, Proportional}; let text_styles: BTreeMap<TextStyle, FontId> = [ (TextStyle::Heading, FontId::new(25.0, Proportional)), (heading2(), FontId::new(22.0, Proportional)), (heading3(), FontId::new(19.0, Proportional)), (TextStyle::Body, FontId::new(16.0, Proportional)), (TextStyle::Monospace, FontId::new(12.0, Monospace)), (TextStyle::Button, FontId::new(12.0, Proportional)), (TextStyle::Small, FontId::new(8.0, Proportional)), ] .into(); ctx.all_styles_mut(move |style| style.text_styles = text_styles.clone()); } fn content(ui: &mut egui::Ui) { ui.heading("Top Heading"); ui.add_space(5.); ui.label(LOREM_IPSUM); ui.add_space(15.); ui.label(RichText::new("Sub Heading").text_style(heading2()).strong()); ui.monospace(LOREM_IPSUM); ui.add_space(15.); ui.label(RichText::new("Context").text_style(heading3()).strong()); ui.add_space(5.); ui.label(LOREM_IPSUM); } struct MyApp; impl MyApp { fn new(cc: &eframe::CreationContext<'_>) -> Self { configure_text_styles(&cc.egui_ctx); Self } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, content); } } pub const LOREM_IPSUM: &str = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/multiple_viewports/src/main.rs
examples/multiple_viewports/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use std::sync::{ Arc, atomic::{AtomicBool, Ordering}, }; use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; eframe::run_native( "Multiple viewports", options, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } #[derive(Default)] struct MyApp { /// Immediate viewports are show immediately, so passing state to/from them is easy. /// The downside is that their painting is linked with the parent viewport: /// if either needs repainting, they are both repainted. show_immediate_viewport: bool, /// Deferred viewports run independent of the parent viewport, which can save /// CPU if only some of the viewports require repainting. /// However, this requires passing state with `Arc` and locks. show_deferred_viewport: Arc<AtomicBool>, } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Hello from the root viewport"); ui.checkbox( &mut self.show_immediate_viewport, "Show immediate child viewport", ); { let mut show_deferred_viewport = self.show_deferred_viewport.load(Ordering::Relaxed); ui.checkbox(&mut show_deferred_viewport, "Show deferred child viewport"); self.show_deferred_viewport .store(show_deferred_viewport, Ordering::Relaxed); } ui.add_space(16.0); { let mut embedded = ui.embed_viewports(); ui.checkbox(&mut embedded, "Embed all viewports"); ui.set_embed_viewports(embedded); } }); if self.show_immediate_viewport { ui.ctx().show_viewport_immediate( egui::ViewportId::from_hash_of("immediate_viewport"), egui::ViewportBuilder::default() .with_title("Immediate Viewport") .with_inner_size([200.0, 100.0]), |ui, class| { if class == egui::ViewportClass::EmbeddedWindow { ui.label( "This viewport is embedded in the parent window, and cannot be moved outside of it.", ); } else { egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Hello from immediate viewport"); if ui.input(|i| i.viewport().close_requested()) { // Tell parent viewport that we should not show next frame: self.show_immediate_viewport = false; } }); } }, ); } if self.show_deferred_viewport.load(Ordering::Relaxed) { let show_deferred_viewport = Arc::clone(&self.show_deferred_viewport); ui.ctx().show_viewport_deferred( egui::ViewportId::from_hash_of("deferred_viewport"), egui::ViewportBuilder::default() .with_title("Deferred Viewport") .with_inner_size([200.0, 100.0]), move |ui, class| { if class == egui::ViewportClass::EmbeddedWindow { ui.label( "This viewport is embedded in the parent window, and cannot be moved outside of it.", ); } else { egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Hello from deferred viewport"); if ui.input(|i| i.viewport().close_requested()) { // Tell parent to close us. show_deferred_viewport.store(false, Ordering::Relaxed); } }); } }, ); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/user_attention/src/main.rs
examples/user_attention/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::{CreationContext, NativeOptions, egui}; use egui::{Button, CentralPanel, UserAttentionType}; use std::time::{Duration, SystemTime}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let native_options = NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([400., 200.]), ..Default::default() }; eframe::run_native( "User attention test", native_options, Box::new(|cc| Ok(Box::new(Application::new(cc)))), ) } fn repr(attention: UserAttentionType) -> String { format!("{attention:?}") } struct Application { attention: UserAttentionType, request_at: Option<SystemTime>, auto_reset: bool, reset_at: Option<SystemTime>, } impl Application { fn new(_cc: &CreationContext<'_>) -> Self { Self { attention: UserAttentionType::Informational, request_at: None, auto_reset: false, reset_at: None, } } fn attention_reset_timeout() -> Duration { Duration::from_secs(3) } fn attention_request_timeout() -> Duration { Duration::from_secs(2) } fn repaint_max_timeout() -> Duration { Duration::from_secs(1) } } impl eframe::App for Application { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { if let Some(request_at) = self.request_at && request_at < SystemTime::now() { self.request_at = None; ui.send_viewport_cmd(egui::ViewportCommand::RequestUserAttention(self.attention)); if self.auto_reset { self.auto_reset = false; self.reset_at = Some(SystemTime::now() + Self::attention_reset_timeout()); } } if let Some(reset_at) = self.reset_at && reset_at < SystemTime::now() { self.reset_at = None; ui.send_viewport_cmd(egui::ViewportCommand::RequestUserAttention( UserAttentionType::Reset, )); } CentralPanel::default().show_inside(ui, |ui| { ui.vertical(|ui| { ui.horizontal(|ui| { ui.label("Attention type:"); egui::ComboBox::new("attention", "") .selected_text(repr(self.attention)) .show_ui(ui, |ui| { for kind in [ UserAttentionType::Informational, UserAttentionType::Critical, ] { ui.selectable_value(&mut self.attention, kind, repr(kind)); } }) }); let button_enabled = self.request_at.is_none() && self.reset_at.is_none(); let button_text = if button_enabled { format!( "Request in {} seconds", Self::attention_request_timeout().as_secs() ) } else { match self.reset_at { None => "Unfocus the window, fast!".to_owned(), Some(t) => { if let Ok(elapsed) = t.duration_since(SystemTime::now()) { format!("Resetting attention in {} s…", elapsed.as_secs()) } else { "Resetting attention…".to_owned() } } } }; let resp = ui .add_enabled(button_enabled, Button::new(button_text)) .on_hover_text_at_pointer( "After clicking, unfocus the application's window to see the effect", ); ui.checkbox( &mut self.auto_reset, format!( "Reset after {} seconds", Self::attention_reset_timeout().as_secs() ), ); if resp.clicked() { self.request_at = Some(SystemTime::now() + Self::attention_request_timeout()); } }); }); ui.request_repaint_after(Self::repaint_max_timeout()); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/hello_world_par/src/main.rs
examples/hello_world_par/src/main.rs
//! This example shows that you can use egui in parallel from multiple threads. #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(clippy::unwrap_used)] // it's an example use std::sync::mpsc; use std::thread::JoinHandle; use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([1024.0, 768.0]), ..Default::default() }; eframe::run_native( "My parallel egui App", options, Box::new(|_cc| Ok(Box::new(MyApp::new()))), ) } /// State per thread. struct ThreadState { thread_nr: usize, title: String, name: String, age: u32, } impl ThreadState { fn new(thread_nr: usize) -> Self { let title = format!("Background thread {thread_nr}"); Self { thread_nr, title, name: "Arthur".into(), age: 12 + thread_nr as u32 * 10, } } fn show(&mut self, ctx: &egui::Context) { let pos = egui::pos2(16.0, 128.0 * (self.thread_nr as f32 + 1.0)); egui::Window::new(&self.title) .default_pos(pos) .show(ctx, |ui| { ui.horizontal(|ui| { ui.label("Your name: "); ui.text_edit_singleline(&mut self.name); }); ui.add(egui::Slider::new(&mut self.age, 0..=120).text("age")); if ui.button("Increment").clicked() { self.age += 1; } ui.label(format!("Hello '{}', age {}", self.name, self.age)); }); } } fn new_worker( thread_nr: usize, on_done_tx: mpsc::SyncSender<()>, ) -> (JoinHandle<()>, mpsc::SyncSender<egui::Context>) { let (show_tx, show_rc) = mpsc::sync_channel(0); let handle = std::thread::Builder::new() .name(format!("EguiPanelWorker {thread_nr}")) .spawn(move || { let mut state = ThreadState::new(thread_nr); while let Ok(ctx) = show_rc.recv() { state.show(&ctx); let _ = on_done_tx.send(()); } }) .expect("failed to spawn thread"); (handle, show_tx) } struct MyApp { threads: Vec<(JoinHandle<()>, mpsc::SyncSender<egui::Context>)>, on_done_tx: mpsc::SyncSender<()>, on_done_rc: mpsc::Receiver<()>, } impl MyApp { fn new() -> Self { let threads = Vec::with_capacity(3); let (on_done_tx, on_done_rc) = mpsc::sync_channel(0); let mut slf = Self { threads, on_done_tx, on_done_rc, }; slf.spawn_thread(); slf.spawn_thread(); slf } fn spawn_thread(&mut self) { let thread_nr = self.threads.len(); self.threads .push(new_worker(thread_nr, self.on_done_tx.clone())); } } impl std::ops::Drop for MyApp { fn drop(&mut self) { for (handle, show_tx) in self.threads.drain(..) { std::mem::drop(show_tx); handle.join().unwrap(); } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::Window::new("Main thread").show(ui.ctx(), |ui| { if ui.button("Spawn another thread").clicked() { self.spawn_thread(); } }); for (_handle, show_tx) in &self.threads { let _ = show_tx.send(ui.ctx().clone()); } for _ in 0..self.threads.len() { let _ = self.on_done_rc.recv(); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/serial_windows/src/main.rs
examples/serial_windows/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { run_and_return: true, viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; log::info!("Starting first window…"); eframe::run_native( "First Window", options.clone(), Box::new(|_cc| Ok(Box::new(MyApp { has_next: true }))), )?; std::thread::sleep(std::time::Duration::from_secs(2)); log::info!("Starting second window…"); eframe::run_native( "Second Window", options.clone(), Box::new(|_cc| Ok(Box::new(MyApp { has_next: true }))), )?; std::thread::sleep(std::time::Duration::from_secs(2)); log::info!("Starting third window…"); eframe::run_native( "Third Window", options, Box::new(|_cc| Ok(Box::new(MyApp { has_next: false }))), ) } struct MyApp { pub(crate) has_next: bool, } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { let label_text = if self.has_next { "When this window is closed the next will be opened after a short delay" } else { "This is the last window. Program will end when closed" }; ui.label(label_text); if ui.button("Close").clicked() { log::info!("Pressed Close button"); ui.send_viewport_cmd(egui::ViewportCommand::Close); } }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/keyboard_events/src/main.rs
examples/keyboard_events/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; use egui::{Key, ScrollArea}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions::default(); eframe::run_native( "Keyboard events", options, Box::new(|_cc| Ok(Box::<Content>::default())), ) } #[derive(Default)] struct Content { text: String, } impl eframe::App for Content { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("Press/Hold/Release example. Press A to test."); if ui.button("Clear").clicked() { self.text.clear(); } ScrollArea::vertical() .auto_shrink(false) .stick_to_bottom(true) .show(ui, |ui| { ui.label(&self.text); }); if ui.input(|i| i.key_pressed(Key::A)) { self.text.push_str("\nPressed"); } if ui.input(|i| i.key_down(Key::A)) { self.text.push_str("\nHeld"); ui.request_repaint(); // make sure we note the holding. } if ui.input(|i| i.key_released(Key::A)) { self.text.push_str("\nReleased"); } }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_font/src/main.rs
examples/custom_font/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::{ egui, epaint::text::{FontInsert, InsertFontFamily}, }; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; eframe::run_native( "egui example: custom font", options, Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) } // Demonstrates how to add a font to the existing ones fn add_font(ctx: &egui::Context) { ctx.add_font(FontInsert::new( "my_font", egui::FontData::from_static(include_bytes!( "../../../crates/epaint_default_fonts/fonts/Hack-Regular.ttf" )), vec![ InsertFontFamily { family: egui::FontFamily::Proportional, priority: egui::epaint::text::FontPriority::Highest, }, InsertFontFamily { family: egui::FontFamily::Monospace, priority: egui::epaint::text::FontPriority::Lowest, }, ], )); } // Demonstrates how to replace all fonts. fn replace_fonts(ctx: &egui::Context) { // Start with the default fonts (we will be adding to them rather than replacing them). let mut fonts = egui::FontDefinitions::default(); // Install my own font (maybe supporting non-latin characters). // .ttf and .otf files supported. fonts.font_data.insert( "my_font".to_owned(), std::sync::Arc::new(egui::FontData::from_static(include_bytes!( "../../../crates/epaint_default_fonts/fonts/Hack-Regular.ttf" ))), ); // Put my font first (highest priority) for proportional text: fonts .families .entry(egui::FontFamily::Proportional) .or_default() .insert(0, "my_font".to_owned()); // Put my font as last fallback for monospace: fonts .families .entry(egui::FontFamily::Monospace) .or_default() .push("my_font".to_owned()); // Tell egui to use these fonts: ctx.set_fonts(fonts); } struct MyApp { text: String, } impl MyApp { fn new(cc: &eframe::CreationContext<'_>) -> Self { replace_fonts(&cc.egui_ctx); add_font(&cc.egui_ctx); Self { text: "Edit this text field if you want".to_owned(), } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("egui using custom fonts"); ui.text_edit_multiline(&mut self.text); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/hello_world_simple/src/main.rs
examples/hello_world_simple/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; // Our application state: let mut name = "Arthur".to_owned(); let mut age = 42; eframe::run_ui_native("My egui App", options, move |ui, _frame| { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("My egui Application"); ui.horizontal(|ui| { let name_label = ui.label("Your name: "); ui.text_edit_singleline(&mut name) .labelled_by(name_label.id); }); ui.add(egui::Slider::new(&mut age, 0..=120).text("age")); if ui.button("Increment").clicked() { age += 1; } ui.label(format!("Hello '{name}', age {age}")); }); }) }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_style/src/main.rs
examples/custom_style/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui::{ self, Color32, Stroke, Style, Theme, global_theme_preference_buttons, style::Selection, }; use egui_demo_lib::{View as _, WidgetGallery}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([350.0, 590.0]), ..Default::default() }; eframe::run_native( "egui example: custom style", options, Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) } fn setup_custom_style(ctx: &egui::Context) { ctx.style_mut_of(Theme::Light, use_light_green_accent); ctx.style_mut_of(Theme::Dark, use_dark_purple_accent); } fn use_light_green_accent(style: &mut Style) { style.visuals.hyperlink_color = Color32::from_rgb(18, 180, 85); style.visuals.text_cursor.stroke.color = Color32::from_rgb(28, 92, 48); style.visuals.selection = Selection { bg_fill: Color32::from_rgb(157, 218, 169), stroke: Stroke::new(1.0, Color32::from_rgb(28, 92, 48)), }; } fn use_dark_purple_accent(style: &mut Style) { style.visuals.hyperlink_color = Color32::from_rgb(202, 135, 227); style.visuals.text_cursor.stroke.color = Color32::from_rgb(234, 208, 244); style.visuals.selection = Selection { bg_fill: Color32::from_rgb(105, 67, 119), stroke: Stroke::new(1.0, Color32::from_rgb(234, 208, 244)), }; } struct MyApp { widget_gallery: WidgetGallery, } impl MyApp { fn new(cc: &eframe::CreationContext<'_>) -> Self { setup_custom_style(&cc.egui_ctx); egui_extras::install_image_loaders(&cc.egui_ctx); // Needed for the "Widget Gallery" demo Self { widget_gallery: WidgetGallery::default(), } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("egui using a customized style"); ui.label("Switch between dark and light mode to see the different styles in action."); global_theme_preference_buttons(ui); ui.separator(); self.widget_gallery.ui(ui); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/custom_3d_glow/src/main.rs
examples/custom_3d_glow/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example #![expect(unsafe_code)] #![expect(clippy::undocumented_unsafe_blocks)] use eframe::{egui, egui_glow, glow}; use egui::mutex::Mutex; use std::sync::Arc; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([350.0, 380.0]), multisampling: 4, renderer: eframe::Renderer::Glow, ..Default::default() }; eframe::run_native( "Custom 3D painting in eframe using glow", options, Box::new(|cc| Ok(Box::new(MyApp::new(cc)))), ) } struct MyApp { /// Behind an `Arc<Mutex<…>>` so we can pass it to [`egui::PaintCallback`] and paint later. rotating_triangle: Arc<Mutex<RotatingTriangle>>, angle: f32, } impl MyApp { fn new(cc: &eframe::CreationContext<'_>) -> Self { let gl = cc .gl .as_ref() .expect("You need to run eframe with the glow backend"); Self { rotating_triangle: Arc::new(Mutex::new(RotatingTriangle::new(gl))), angle: 0.0, } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.horizontal(|ui| { ui.spacing_mut().item_spacing.x = 0.0; ui.label("The triangle is being painted using "); ui.hyperlink_to("glow", "https://github.com/grovesNL/glow"); ui.label(" (OpenGL)."); }); egui::Frame::canvas(ui.style()).show(ui, |ui| { self.custom_painting(ui); }); ui.label("Drag to rotate!"); }); } fn on_exit(&mut self, gl: Option<&glow::Context>) { if let Some(gl) = gl { self.rotating_triangle.lock().destroy(gl); } } } impl MyApp { fn custom_painting(&mut self, ui: &mut egui::Ui) { let (rect, response) = ui.allocate_exact_size(egui::Vec2::splat(300.0), egui::Sense::drag()); self.angle += response.drag_motion().x * 0.01; // Clone locals so we can move them into the paint callback: let angle = self.angle; let rotating_triangle = Arc::clone(&self.rotating_triangle); let callback = egui::PaintCallback { rect, callback: std::sync::Arc::new(egui_glow::CallbackFn::new(move |_info, painter| { rotating_triangle.lock().paint(painter.gl(), angle); })), }; ui.painter().add(callback); } } struct RotatingTriangle { program: glow::Program, vertex_array: glow::VertexArray, } impl RotatingTriangle { fn new(gl: &glow::Context) -> Self { use glow::HasContext as _; let shader_version = if cfg!(target_arch = "wasm32") { "#version 300 es" } else { "#version 330" }; unsafe { let program = gl.create_program().expect("Cannot create program"); let (vertex_shader_source, fragment_shader_source) = ( r#" const vec2 verts[3] = vec2[3]( vec2(0.0, 1.0), vec2(-1.0, -1.0), vec2(1.0, -1.0) ); const vec4 colors[3] = vec4[3]( vec4(1.0, 0.0, 0.0, 1.0), vec4(0.0, 1.0, 0.0, 1.0), vec4(0.0, 0.0, 1.0, 1.0) ); out vec4 v_color; uniform float u_angle; void main() { v_color = colors[gl_VertexID]; gl_Position = vec4(verts[gl_VertexID], 0.0, 1.0); gl_Position.x *= cos(u_angle); } "#, r#" precision mediump float; in vec4 v_color; out vec4 out_color; void main() { out_color = v_color; } "#, ); let shader_sources = [ (glow::VERTEX_SHADER, vertex_shader_source), (glow::FRAGMENT_SHADER, fragment_shader_source), ]; let shaders: Vec<_> = shader_sources .iter() .map(|(shader_type, shader_source)| { let shader = gl .create_shader(*shader_type) .expect("Cannot create shader"); gl.shader_source(shader, &format!("{shader_version}\n{shader_source}")); gl.compile_shader(shader); assert!( gl.get_shader_compile_status(shader), "Failed to compile {shader_type}: {}", gl.get_shader_info_log(shader) ); gl.attach_shader(program, shader); shader }) .collect(); gl.link_program(program); assert!( gl.get_program_link_status(program), "{}", gl.get_program_info_log(program) ); for shader in shaders { gl.detach_shader(program, shader); gl.delete_shader(shader); } let vertex_array = gl .create_vertex_array() .expect("Cannot create vertex array"); Self { program, vertex_array, } } } fn destroy(&self, gl: &glow::Context) { use glow::HasContext as _; unsafe { gl.delete_program(self.program); gl.delete_vertex_array(self.vertex_array); } } fn paint(&self, gl: &glow::Context, angle: f32) { use glow::HasContext as _; unsafe { gl.use_program(Some(self.program)); gl.uniform_1_f32( gl.get_uniform_location(self.program, "u_angle").as_ref(), angle, ); gl.bind_vertex_array(Some(self.vertex_array)); gl.draw_arrays(glow::TRIANGLES, 0, 3); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/popups/src/main.rs
examples/popups/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui::{CentralPanel, ComboBox, Popup, PopupCloseBehavior}; fn main() -> Result<(), eframe::Error> { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions::default(); eframe::run_native("Popups", options, Box::new(|_| Ok(Box::<MyApp>::default()))) } #[derive(Default)] struct MyApp { checkbox: bool, number: u8, numbers: [bool; 10], } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut eframe::egui::Ui, _frame: &mut eframe::Frame) { CentralPanel::default().show_inside(ui, |ui| { ui.label("PopupCloseBehavior::CloseOnClick popup"); ComboBox::from_label("ComboBox") .selected_text(format!("{}", self.number)) .show_ui(ui, |ui| { for num in 0..10 { ui.selectable_value(&mut self.number, num, format!("{num}")); } }); ui.label("PopupCloseBehavior::CloseOnClickOutside popup"); ComboBox::from_label("Ignore Clicks") .close_behavior(PopupCloseBehavior::CloseOnClickOutside) .selected_text("Select Numbers") .show_ui(ui, |ui| { ui.label("This popup will be open even if you click the checkboxes"); for (i, num) in self.numbers.iter_mut().enumerate() { ui.checkbox(num, format!("Checkbox {}", i + 1)); } }); ui.label("PopupCloseBehavior::IgnoreClicks popup"); let response = ui.button("Open"); Popup::menu(&response) .close_behavior(PopupCloseBehavior::IgnoreClicks) .show(|ui| { ui.set_min_width(310.0); ui.label("This popup will be open until you press the button again"); ui.checkbox(&mut self.checkbox, "Checkbox"); }); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/hello_world/src/main.rs
examples/hello_world/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 240.0]), ..Default::default() }; eframe::run_native( "My egui App", options, Box::new(|cc| { // This gives us image support: egui_extras::install_image_loaders(&cc.egui_ctx); Ok(Box::<MyApp>::default()) }), ) } struct MyApp { name: String, age: u32, } impl Default for MyApp { fn default() -> Self { Self { name: "Arthur".to_owned(), age: 42, } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("My egui Application"); ui.horizontal(|ui| { let name_label = ui.label("Your name: "); ui.text_edit_singleline(&mut self.name) .labelled_by(name_label.id); }); ui.add(egui::Slider::new(&mut self.age, 0..=120).text("age")); if ui.button("Increment").clicked() { self.age += 1; } ui.label(format!("Hello '{}', age {}", self.name, self.age)); ui.image(egui::include_image!( "../../../crates/egui/assets/ferris.png" )); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/file_dialog/src/main.rs
examples/file_dialog/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default() .with_inner_size([640.0, 240.0]) // wide enough for the drag-drop overlay text .with_drag_and_drop(true), ..Default::default() }; eframe::run_native( "Native file dialogs and drag-and-drop files", options, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } #[derive(Default)] struct MyApp { dropped_files: Vec<egui::DroppedFile>, picked_path: Option<String>, } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Drag-and-drop files onto the window!"); if ui.button("Open file…").clicked() && let Some(path) = rfd::FileDialog::new().pick_file() { self.picked_path = Some(path.display().to_string()); } if let Some(picked_path) = &self.picked_path { ui.horizontal(|ui| { ui.label("Picked file:"); ui.monospace(picked_path); }); } // Show dropped files (if any): if !self.dropped_files.is_empty() { ui.group(|ui| { ui.label("Dropped files:"); for file in &self.dropped_files { let mut info = if let Some(path) = &file.path { path.display().to_string() } else if !file.name.is_empty() { file.name.clone() } else { "???".to_owned() }; let mut additional_info = vec![]; if !file.mime.is_empty() { additional_info.push(format!("type: {}", file.mime)); } if let Some(bytes) = &file.bytes { additional_info.push(format!("{} bytes", bytes.len())); } if !additional_info.is_empty() { info += &format!(" ({})", additional_info.join(", ")); } ui.label(info); } }); } }); preview_files_being_dropped(ui.ctx()); // Collect dropped files: ui.input(|i| { if !i.raw.dropped_files.is_empty() { self.dropped_files.clone_from(&i.raw.dropped_files); } }); } } /// Preview hovering files: fn preview_files_being_dropped(ctx: &egui::Context) { use egui::{Align2, Color32, Id, LayerId, Order, TextStyle}; use std::fmt::Write as _; if !ctx.input(|i| i.raw.hovered_files.is_empty()) { let text = ctx.input(|i| { let mut text = "Dropping files:\n".to_owned(); for file in &i.raw.hovered_files { if let Some(path) = &file.path { write!(text, "\n{}", path.display()).ok(); } else if !file.mime.is_empty() { write!(text, "\n{}", file.mime).ok(); } else { text += "\n???"; } } text }); let painter = ctx.layer_painter(LayerId::new(Order::Foreground, Id::new("file_drop_target"))); let content_rect = ctx.content_rect(); painter.rect_filled(content_rect, 0.0, Color32::from_black_alpha(192)); painter.text( content_rect.center(), Align2::CENTER_CENTER, text, TextStyle::Heading.resolve(&ctx.global_style()), Color32::WHITE, ); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/puffin_profiler/src/main.rs
examples/puffin_profiler/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use std::sync::{ Arc, atomic::{AtomicBool, Ordering}, }; use eframe::egui; fn main() -> eframe::Result { let rust_log = std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_owned()); // SAFETY: we call this from the main thread without any other threads running. #[expect(unsafe_code)] unsafe { std::env::set_var("RUST_LOG", rust_log); }; env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). start_puffin_server(); // NOTE: you may only want to call this if the users specifies some flag or clicks a button! eframe::run_native( "My egui App", eframe::NativeOptions { viewport: egui::ViewportBuilder::default(), #[cfg(feature = "wgpu")] renderer: eframe::Renderer::Wgpu, ..Default::default() }, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } struct MyApp { keep_repainting: bool, // It is useful to be able to inspect how eframe acts with multiple viewport // so we have two viewports here that we can toggle on/off. show_immediate_viewport: bool, show_deferred_viewport: Arc<AtomicBool>, } impl Default for MyApp { fn default() -> Self { Self { keep_repainting: true, show_immediate_viewport: Default::default(), show_deferred_viewport: Default::default(), } } } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { ui.heading("Example of how to use the puffin profiler with egui"); ui.separator(); let cmd = "cargo install puffin_viewer && puffin_viewer --url 127.0.0.1:8585"; ui.label("To connect, run this:"); ui.horizontal(|ui| { ui.monospace(cmd); if ui.small_button("📋").clicked() { ui.copy_text(cmd.into()); } }); ui.separator(); ui.horizontal(|ui| { ui.checkbox(&mut self.keep_repainting, "Keep repainting"); if self.keep_repainting { ui.spinner(); ui.request_repaint(); } else { ui.label("Repainting on events (e.g. mouse movement)"); } }); if ui .button( "Click to sleep a bit. That should be visible as a spike in the profiler view!", ) .clicked() { puffin::profile_scope!("long_sleep"); std::thread::sleep(std::time::Duration::from_millis(50)); } ui.checkbox( &mut self.show_immediate_viewport, "Show immediate child viewport", ); let mut show_deferred_viewport = self.show_deferred_viewport.load(Ordering::Relaxed); ui.checkbox(&mut show_deferred_viewport, "Show deferred child viewport"); self.show_deferred_viewport .store(show_deferred_viewport, Ordering::Relaxed); }); if self.show_immediate_viewport { ui.ctx().show_viewport_immediate( egui::ViewportId::from_hash_of("immediate_viewport"), egui::ViewportBuilder::default() .with_title("Immediate Viewport") .with_inner_size([200.0, 100.0]), |ui, class| { puffin::profile_scope!("immediate_viewport"); assert!( class == egui::ViewportClass::Immediate, "This egui backend doesn't support multiple viewports" ); egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Hello from immediate viewport"); }); if ui.input(|i| i.viewport().close_requested()) { // Tell parent viewport that we should not show next frame: self.show_immediate_viewport = false; } }, ); } if self.show_deferred_viewport.load(Ordering::Relaxed) { let show_deferred_viewport = Arc::clone(&self.show_deferred_viewport); ui.ctx().show_viewport_deferred( egui::ViewportId::from_hash_of("deferred_viewport"), egui::ViewportBuilder::default() .with_title("Deferred Viewport") .with_inner_size([200.0, 100.0]), move |ui, class| { puffin::profile_scope!("deferred_viewport"); assert!( class == egui::ViewportClass::Deferred, "This egui backend doesn't support multiple viewports" ); egui::CentralPanel::default().show_inside(ui, |ui| { ui.label("Hello from deferred viewport"); }); if ui.input(|i| i.viewport().close_requested()) { // Tell parent to close us. show_deferred_viewport.store(false, Ordering::Relaxed); } }, ); } } } fn start_puffin_server() { puffin::set_scopes_on(true); // tell puffin to collect data match puffin_http::Server::new("127.0.0.1:8585") { Ok(puffin_server) => { log::info!("Run: cargo install puffin_viewer && puffin_viewer --url 127.0.0.1:8585"); std::process::Command::new("puffin_viewer") .arg("--url") .arg("127.0.0.1:8585") .spawn() .ok(); // We can store the server if we want, but in this case we just want // it to keep running. Dropping it closes the server, so let's not drop it! #[expect(clippy::mem_forget)] std::mem::forget(puffin_server); } Err(err) => { log::error!("Failed to start puffin server: {err}"); } } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/images/src/main.rs
examples/images/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs)] // it's an example use eframe::egui; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default().with_inner_size([320.0, 880.0]), ..Default::default() }; eframe::run_native( "Image Viewer", options, Box::new(|cc| { // This gives us image support: egui_extras::install_image_loaders(&cc.egui_ctx); Ok(Box::<MyApp>::default()) }), ) } #[derive(Default)] struct MyApp {} impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { egui::ScrollArea::both().show(ui, |ui| { ui.image(egui::include_image!("cat.webp")) .on_hover_text_at_pointer("WebP"); ui.image(egui::include_image!("ferris.gif")) .on_hover_text_at_pointer("Gif"); ui.image(egui::include_image!("ferris.svg")) .on_hover_text_at_pointer("Svg"); let url = "https://picsum.photos/seed/1.759706314/1024"; ui.add(egui::Image::new(url).corner_radius(10)) .on_hover_text_at_pointer(url); }); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/examples/screenshot/src/main.rs
examples/screenshot/src/main.rs
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release #![expect(rustdoc::missing_crate_level_docs, clippy::unwrap_used)] // it's an example use std::sync::Arc; use eframe::egui::{self, ColorImage}; fn main() -> eframe::Result { env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). let options = eframe::NativeOptions { renderer: eframe::Renderer::Wgpu, ..Default::default() }; eframe::run_native( "Take screenshots and display with eframe/egui", options, Box::new(|_cc| Ok(Box::<MyApp>::default())), ) } #[derive(Default)] struct MyApp { continuously_take_screenshots: bool, texture: Option<egui::TextureHandle>, screenshot: Option<Arc<ColorImage>>, save_to_file: bool, } impl eframe::App for MyApp { fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show_inside(ui, |ui| { if let Some(screenshot) = self.screenshot.take() { self.texture = Some(ui.ctx().load_texture( "screenshot", screenshot, Default::default(), )); } ui.horizontal(|ui| { ui.checkbox( &mut self.continuously_take_screenshots, "continuously take screenshots", ); if ui.button("save to 'top_left.png'").clicked() { self.save_to_file = true; ui.send_viewport_cmd(egui::ViewportCommand::Screenshot(Default::default())); } ui.with_layout(egui::Layout::top_down(egui::Align::RIGHT), |ui| { if self.continuously_take_screenshots { if ui .add(egui::Label::new("hover me!").sense(egui::Sense::hover())) .hovered() { ui.ctx().set_theme(egui::Theme::Dark); } else { ui.ctx().set_theme(egui::Theme::Light); } ui.send_viewport_cmd(egui::ViewportCommand::Screenshot(Default::default())); } else if ui.button("take screenshot!").clicked() { ui.send_viewport_cmd(egui::ViewportCommand::Screenshot(Default::default())); } }); }); if let Some(texture) = self.texture.as_ref() { ui.image((texture.id(), ui.available_size())); } else { ui.spinner(); } // Check for returned screenshot: ui.input(|i| { for event in &i.raw.events { if let egui::Event::Screenshot { image, .. } = event { if self.save_to_file { let pixels_per_point = i.pixels_per_point(); let region = egui::Rect::from_two_pos( egui::Pos2::ZERO, egui::Pos2 { x: 100., y: 100. }, ); let top_left_corner = image.region(&region, Some(pixels_per_point)); image::save_buffer( "top_left.png", top_left_corner.as_raw(), top_left_corner.width() as u32, top_left_corner.height() as u32, image::ColorType::Rgba8, ) .unwrap(); self.save_to_file = false; } self.screenshot = Some(Arc::clone(image)); } } }); ui.request_repaint(); }); } }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/xtask/src/deny.rs
xtask/src/deny.rs
//! Run `cargo deny` //! //! Also installs the subcommand if it is not already installed. use std::process::Command; use super::DynError; pub fn deny(args: &[&str]) -> Result<(), DynError> { if !args.is_empty() { return Err(format!("Invalid arguments: {args:?}").into()); } install_cargo_deny()?; let targets = [ "aarch64-apple-darwin", "aarch64-linux-android", "i686-pc-windows-gnu", "i686-pc-windows-msvc", "i686-unknown-linux-gnu", "wasm32-unknown-unknown", "x86_64-apple-darwin", "x86_64-pc-windows-gnu", "x86_64-pc-windows-msvc", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl", "x86_64-unknown-redox", ]; for target in targets { let mut cmd = Command::new("cargo"); cmd.args([ "deny", "--all-features", "--log-level", "error", "--target", target, "check", ]); super::utils::print_cmd(&cmd); let status = cmd.status()?; if !status.success() { return Err(status.to_string().into()); } } Ok(()) } fn install_cargo_deny() -> Result<(), DynError> { let already_installed = Command::new("cargo") .args(["deny", "--version"]) .output() .is_ok_and(|out| out.status.success()); if already_installed { return Ok(()); } let mut cmd = Command::new("cargo"); cmd.args(["+stable", "install", "--quiet", "--locked", "cargo-deny"]); let reason = "install cargo-deny"; super::utils::ask_to_run(cmd, true, reason) }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/xtask/src/utils.rs
xtask/src/utils.rs
#![expect(clippy::unwrap_used)] use std::{ env, io::{self, Write as _}, process::Command, }; use super::DynError; /// Print the command and its arguments as if the user had typed them pub fn print_cmd(cmd: &Command) { print!("{} ", cmd.get_program().to_string_lossy()); for arg in cmd.get_args() { print!("{} ", arg.to_string_lossy()); } println!(); } /// Prompt user before running a command /// /// Adapted from [miri](https://github.com/rust-lang/miri/blob/dba35d2be72f4b78343d1a0f0b4737306f310672/cargo-miri/src/util.rs#L181-L204) pub fn ask_to_run(mut cmd: Command, ask: bool, reason: &str) -> Result<(), DynError> { // Disable interactive prompts in CI (GitHub Actions, Travis, AppVeyor, etc). // Azure doesn't set `CI` though (nothing to see here, just Microsoft being Microsoft), // so we also check their `TF_BUILD`. let is_ci = env::var_os("CI").is_some() || env::var_os("TF_BUILD").is_some(); if ask && !is_ci { let mut buf = String::new(); print!("The script is going to run: \n\n`{cmd:?}`\n\n To {reason}.\nProceed? [Y/n] ",); io::stdout().flush().unwrap(); io::stdin().read_line(&mut buf).unwrap(); match buf.trim().to_lowercase().as_ref() { "" | "y" | "yes" => {} "n" | "no" => return Err("Aborting as per your request".into()), a => return Err(format!("Invalid answer `{a}`").into()), } } else { println!("Running `{cmd:?}` to {reason}."); } let status = cmd.status()?; if !status.success() { return Err(format!("failed to {reason}: {status}").into()); } Ok(()) }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
emilk/egui
https://github.com/emilk/egui/blob/9e95b9ca50c224077617434707f7bd29bd70938b/xtask/src/main.rs
xtask/src/main.rs
//! Helper crate for running scripts within the `egui` repo #![expect(clippy::print_stderr, clippy::print_stdout)] #![allow(clippy::exit)] mod deny; pub(crate) mod utils; type DynError = Box<dyn std::error::Error>; fn main() { if let Err(e) = try_main() { eprintln!("{e}"); std::process::exit(-1); } } fn try_main() -> Result<(), DynError> { let arg_strings: Vec<_> = std::env::args().skip(1).collect(); let args: Vec<_> = arg_strings.iter().map(String::as_str).collect(); match args.as_slice() { &[] | &["-h"] | &["--help"] => print_help(), &["deny", ..] => deny::deny(&args[1..])?, c => Err(format!("Invalid arguments {c:?}"))?, } Ok(()) } fn print_help() { let help = " xtask help Subcommands deny: Run cargo-deny for all targets Options -h, --help: print help and exit "; println!("{help}"); }
rust
Apache-2.0
9e95b9ca50c224077617434707f7bd29bd70938b
2026-01-04T15:36:36.351731Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/build.rs
build.rs
use heck::ToUpperCamelCase; use indexmap::IndexMap; use std::path::Path; use std::{env, fs}; fn main() { cfg_aliases::cfg_aliases! { asdf: { any(feature = "asdf", not(target_os = "windows")) }, macos: { target_os = "macos" }, linux: { target_os = "linux" }, vfox: { any(feature = "vfox", target_os = "windows") }, } built::write_built_file().expect("Failed to acquire build-time information"); codegen_settings(); codegen_registry(); } /// Generate a raw string literal that safely contains the given content. /// Dynamically determines the minimum number of '#' needed. fn raw_string_literal(s: &str) -> String { // Find the longest sequence of '#' characters following a '"' in the string let mut max_hashes = 0; let mut current_hashes = 0; let mut after_quote = false; for c in s.chars() { if after_quote { if c == '#' { current_hashes += 1; max_hashes = max_hashes.max(current_hashes); } else { after_quote = false; current_hashes = 0; } } if c == '"' { after_quote = true; current_hashes = 0; } } // Use one more '#' than the longest sequence found let hashes = "#".repeat(max_hashes + 1); format!("r{hashes}\"{s}\"{hashes}") } /// Parse options from a TOML value into a Vec of (key, value) pairs fn parse_options(opts: Option<&toml::Value>) -> Vec<(String, String)> { opts.map(|opts| { if let Some(table) = opts.as_table() { table .iter() .map(|(k, v)| { let value = match v { toml::Value::String(s) => s.clone(), toml::Value::Table(t) => { // Serialize nested tables back to TOML string toml::to_string(t).unwrap_or_default() } _ => v.to_string(), }; (k.clone(), value) }) .collect::<Vec<_>>() } else { vec![] } }) .unwrap_or_default() } fn codegen_registry() { let out_dir = env::var_os("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("registry.rs"); let mut lines = vec!["[".to_string()]; let registry: toml::Table = fs::read_to_string("registry.toml") .unwrap() .parse() .unwrap(); let tools = registry.get("tools").unwrap().as_table().unwrap(); for (short, info) in tools { let info = info.as_table().unwrap(); let aliases = info .get("aliases") .cloned() .unwrap_or(toml::Value::Array(vec![])) .as_array() .unwrap() .iter() .map(|v| v.as_str().unwrap().to_string()) .collect::<Vec<_>>(); let test = info.get("test").map(|t| { let t = t.as_array().unwrap(); ( t[0].as_str().unwrap().to_string(), t[1].as_str().unwrap().to_string(), ) }); let mut backends = vec![]; for backend in info.get("backends").unwrap().as_array().unwrap() { match backend { toml::Value::String(backend) => { backends.push(format!( r##"RegistryBackend{{ full: r#"{backend}"#, platforms: &[], options: &[], }}"## )); } toml::Value::Table(backend) => { let full = backend.get("full").unwrap().as_str().unwrap(); let platforms = backend .get("platforms") .map(|p| { p.as_array() .unwrap() .iter() .map(|p| p.as_str().unwrap().to_string()) .collect::<Vec<_>>() }) .unwrap_or_default(); let backend_options = parse_options(backend.get("options")); backends.push(format!( r##"RegistryBackend{{ full: r#"{full}"#, platforms: &[{platforms}], options: &[{options}], }}"##, platforms = platforms .into_iter() .map(|p| format!("\"{p}\"")) .collect::<Vec<_>>() .join(", "), options = backend_options .iter() .map(|(k, v)| format!( "({}, {})", raw_string_literal(k), raw_string_literal(v) )) .collect::<Vec<_>>() .join(", ") )); } _ => panic!("Unknown backend type"), } } let os = info .get("os") .map(|os| { let os = os.as_array().unwrap(); let mut os = os .iter() .map(|o| o.as_str().unwrap().to_string()) .collect::<Vec<_>>(); os.sort(); os }) .unwrap_or_default(); let description = info .get("description") .map(|d| d.as_str().unwrap().to_string()); let depends = info .get("depends") .map(|depends| { let depends = depends.as_array().unwrap(); let mut depends = depends .iter() .map(|d| d.as_str().unwrap().to_string()) .collect::<Vec<_>>(); depends.sort(); depends }) .unwrap_or_default(); let idiomatic_files = info .get("idiomatic_files") .map(|idiomatic_files| { idiomatic_files .as_array() .unwrap() .iter() .map(|f| f.as_str().unwrap().to_string()) .collect::<Vec<_>>() }) .unwrap_or_default(); let rt = format!( r#"RegistryTool{{short: "{short}", description: {description}, backends: &[{backends}], aliases: &[{aliases}], test: &{test}, os: &[{os}], depends: &[{depends}], idiomatic_files: &[{idiomatic_files}]}}"#, description = description .map(|d| format!("Some({})", raw_string_literal(&d))) .unwrap_or("None".to_string()), backends = backends.into_iter().collect::<Vec<_>>().join(", "), aliases = aliases .iter() .map(|a| format!("\"{a}\"")) .collect::<Vec<_>>() .join(", "), test = test .map(|(t, v)| format!( "Some(({}, {}))", raw_string_literal(&t), raw_string_literal(&v) )) .unwrap_or("None".to_string()), os = os .iter() .map(|o| format!("\"{o}\"")) .collect::<Vec<_>>() .join(", "), depends = depends .iter() .map(|d| format!("\"{d}\"")) .collect::<Vec<_>>() .join(", "), idiomatic_files = idiomatic_files .iter() .map(|f| format!("\"{f}\"")) .collect::<Vec<_>>() .join(", "), ); lines.push(format!(r#" ("{short}", {rt}),"#)); for alias in aliases { lines.push(format!(r#" ("{alias}", {rt}),"#)); } } lines.push(r#"].into()"#.to_string()); fs::write(&dest_path, lines.join("\n")).unwrap(); } fn codegen_settings() { let out_dir = env::var_os("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("settings.rs"); let mut lines = vec![ r#"#[derive(Config, Default, Debug, Clone, Serialize)] #[config(partial_attr(derive(Clone, Serialize, Default)))] pub struct Settings {"# .to_string(), ]; let settings: toml::Table = fs::read_to_string("settings.toml") .unwrap() .parse() .unwrap(); let props_to_code = |key: &str, props: &toml::Value| { let mut lines = vec![]; let props = props.as_table().unwrap(); if let Some(description) = props.get("description") { lines.push(format!(" /// {}", description.as_str().unwrap())); } let type_ = props .get("rust_type") .map(|rt| rt.as_str().unwrap()) .or(props.get("type").map(|t| match t.as_str().unwrap() { "Bool" => "bool", "String" => "String", "Integer" => "i64", "Url" => "String", "Path" => "PathBuf", "Duration" => "String", "ListString" => "Vec<String>", "ListPath" => "Vec<PathBuf>", "SetString" => "BTreeSet<String>", "IndexMap<String, String>" => "IndexMap<String, String>", t => panic!("Unknown type: {t}"), })); if let Some(type_) = type_ { let type_ = if props.get("optional").is_some_and(|v| v.as_bool().unwrap()) { format!("Option<{type_}>") } else { type_.to_string() }; let mut opts = IndexMap::new(); if let Some(env) = props.get("env") { opts.insert("env".to_string(), env.to_string()); } if let Some(default) = props.get("default") { opts.insert("default".to_string(), default.to_string()); } else if type_ == "bool" { opts.insert("default".to_string(), "false".to_string()); } if let Some(parse_env) = props.get("parse_env") { opts.insert( "parse_env".to_string(), parse_env.as_str().unwrap().to_string(), ); } if let Some(deserialize_with) = props.get("deserialize_with") { opts.insert( "deserialize_with".to_string(), deserialize_with.as_str().unwrap().to_string(), ); } lines.push(format!( " #[config({})]", opts.iter() .map(|(k, v)| format!("{k} = {v}")) .collect::<Vec<_>>() .join(", ") )); lines.push(format!(" pub {key}: {type_},")); } else { lines.push(" #[config(nested)]".to_string()); lines.push(format!( " pub {}: Settings{},", key, key.to_upper_camel_case() )); } lines.join("\n") }; for (key, props) in &settings { lines.push(props_to_code(key, props)); } lines.push("}".to_string()); let nested_settings = settings .iter() .filter(|(_, v)| !v.as_table().unwrap().contains_key("type")) .collect::<Vec<_>>(); for (child, props) in &nested_settings { lines.push(format!( r#" #[derive(Config, Default, Debug, Clone, Serialize)] #[config(partial_attr(derive(Clone, Serialize, Default)))] #[config(partial_attr(serde(deny_unknown_fields)))] pub struct Settings{name} {{"#, name = child.to_upper_camel_case() )); for (key, props) in props.as_table().unwrap() { lines.push(props_to_code(key, props)); } lines.push("}".to_string()); } lines.push( r#" pub static SETTINGS_META: Lazy<IndexMap<&'static str, SettingsMeta>> = Lazy::new(|| { indexmap!{"# .to_string(), ); for (name, props) in &settings { let props = props.as_table().unwrap(); if let Some(type_) = props.get("type").map(|v| v.as_str().unwrap()) { // We could shadow the 'type_' variable, but its a best practice to avoid shadowing. // Thus, we introduce 'meta_type' here. let meta_type = match type_ { "IndexMap<String, String>" => "IndexMap", other => other, }; lines.push(format!( r#" "{name}" => SettingsMeta {{ type_: SettingsType::{meta_type},"#, )); if let Some(description) = props.get("description") { let description = description.as_str().unwrap().to_string(); lines.push(format!( " description: {},", raw_string_literal(&description) )); } lines.push(" },".to_string()); } } for (name, props) in &nested_settings { for (key, props) in props.as_table().unwrap() { let props = props.as_table().unwrap(); if let Some(type_) = props.get("type").map(|v| v.as_str().unwrap()) { // We could shadow the 'type_' variable, but its a best practice to avoid shadowing. // Thus, we introduce 'meta_type' here. let meta_type = match type_ { "IndexMap<String, String>" => "IndexMap", other => other, }; lines.push(format!( r#" "{name}.{key}" => SettingsMeta {{ type_: SettingsType::{meta_type},"#, )); } if let Some(description) = props.get("description") { let description = description.as_str().unwrap().to_string(); lines.push(format!( " description: {},", raw_string_literal(&description) )); } lines.push(" },".to_string()); } } lines.push( r#" } }); "# .to_string(), ); fs::write(&dest_path, lines.join("\n")).unwrap(); }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/timings.rs
src/timings.rs
use crate::env; use crate::ui::{style, time}; use std::time::{Duration, Instant}; pub fn start(module: &str) -> impl FnOnce() { let start = Instant::now(); let module = module.to_string(); move || { let diff = start.elapsed(); eprintln!("{}", render(module.as_str(), diff)); } } static START: std::sync::Mutex<Option<Instant>> = std::sync::Mutex::new(None); pub fn get_time_diff(module: &str) -> String { if *env::MISE_TIMINGS == 0 { return "".to_string(); } static PREV: std::sync::Mutex<Option<Instant>> = std::sync::Mutex::new(None); let now = Instant::now(); if PREV.lock().unwrap().is_none() { *START.lock().unwrap() = Some(now); *PREV.lock().unwrap() = Some(now); } let mut prev = PREV.lock().unwrap(); let diff = now.duration_since(prev.unwrap()); *prev = Some(now); render(module, diff) } fn render(module: &str, diff: Duration) -> String { let diff_str = if *env::MISE_TIMINGS == 2 { let relative = time::format_duration(diff); let from_start = time::format_duration(Instant::now().duration_since(START.lock().unwrap().unwrap())); format!("{relative} {from_start}") } else { time::format_duration(diff) }; let thread_id = crate::logger::thread_id(); let out = format!("[TIME] {thread_id} {module} {diff_str}") .trim() .to_string(); if diff.as_micros() > 8000 { style::eblack(out).on_red().bold() } else if diff.as_micros() > 4000 { style::eblack(out).on_red() } else if diff.as_micros() > 2000 { style::ered(out).bright() } else if diff.as_micros() > 1000 { style::eyellow(out).bright() } else if diff.as_micros() > 500 { style::eyellow(out).dim() } else if diff.as_micros() > 100 { style::ecyan(out).dim() } else { style::edim(out) } .to_string() } #[macro_export] macro_rules! time { ($fn:expr) => {{ if *$crate::env::MISE_TIMINGS > 1 { let module = format!("{}::{}", module_path!(), format!($fn)); eprintln!("{}", $crate::timings::get_time_diff(&module)); } else { trace!($fn); } }}; ($fn:expr, $($arg:tt)+) => {{ if *$crate::env::MISE_TIMINGS > 1 { let module = format!("{}::{}", module_path!(), format!($fn, $($arg)+)); eprintln!("{}", $crate::timings::get_time_diff(&module)); } else { trace!($fn, $($arg)+); } }}; } #[macro_export] macro_rules! progress_trace { ($($arg:tt)+) => {{ if *$crate::env::MISE_PROGRESS_TRACE { eprintln!("[PROGRESS] {}", format!($($arg)+)); } }}; } #[macro_export] macro_rules! measure { ($fmt:expr, $block:block) => {{ if *$crate::env::MISE_TIMINGS > 0 { let module = format!("{}::{}", module_path!(), format!($fmt)); let end = $crate::timings::start(&module); let result = $block; end(); result } else if log::log_enabled!(log::Level::Trace) { let msg = format!($fmt); trace!("{msg} start"); let result = $block; trace!("{msg} done"); result } else { $block } }}; }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shorthands.rs
src/shorthands.rs
use std::collections::HashMap; use std::path::PathBuf; use eyre::Result; use itertools::Itertools; use toml::Table; use crate::config::Settings; use crate::registry::REGISTRY; use crate::{dirs, file}; pub type Shorthands = HashMap<String, Vec<String>>; pub fn get_shorthands(settings: &Settings) -> Shorthands { let mut shorthands = HashMap::new(); if !settings.disable_default_registry { shorthands.extend( REGISTRY .iter() .map(|(id, rt)| { ( id.to_string(), rt.backends() .iter() .filter(|f| f.starts_with("asdf:") || f.starts_with("vfox:")) .map(|f| f.to_string()) .collect_vec(), ) }) .filter(|(_, fulls)| !fulls.is_empty()), ); }; if let Some(f) = &settings.shorthands_file { match parse_shorthands_file(f.clone()) { Ok(custom) => { shorthands.extend(custom); } Err(err) => { warn!("Failed to read shorthands file: {} {:#}", &f.display(), err); } } } shorthands } fn parse_shorthands_file(mut f: PathBuf) -> Result<Shorthands> { if f.starts_with("~") { f = dirs::HOME.join(f.strip_prefix("~")?); } let raw = file::read_to_string(&f)?; let toml = raw.parse::<Table>()?; let mut shorthands = HashMap::new(); for (k, v) in toml { if let Some(v) = v.as_str() { shorthands.insert(k, vec![v.to_string()]); } } Ok(shorthands) } #[cfg(test)] mod tests { use std::ops::Deref; #[cfg(unix)] use pretty_assertions::assert_str_eq; use crate::config::Config; use super::*; #[tokio::test] #[cfg(unix)] async fn test_get_shorthands() { use crate::config::Config; let _config = Config::get().await.unwrap(); Settings::reset(None); let mut settings = Settings::get().deref().clone(); settings.shorthands_file = Some("../fixtures/shorthands.toml".into()); let shorthands = get_shorthands(&settings); assert_str_eq!( shorthands["ephemeral-postgres"][0], "asdf:mise-plugins/mise-ephemeral-postgres" ); assert_str_eq!(shorthands["node"][0], "https://node"); assert_str_eq!(shorthands["xxxxxx"][0], "https://xxxxxx"); } #[tokio::test] async fn test_get_shorthands_missing_file() { let _config = Config::get().await.unwrap(); Settings::reset(None); let mut settings = Settings::get().deref().clone(); settings.shorthands_file = Some("test/fixtures/missing.toml".into()); let shorthands = get_shorthands(&settings); assert!(!shorthands.is_empty()); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/errors.rs
src/errors.rs
use std::path::PathBuf; use std::process::ExitStatus; use crate::cli::args::BackendArg; use crate::file::display_path; use crate::toolset::{ToolRequest, ToolSource, ToolVersion}; use eyre::Report; use thiserror::Error; #[derive(Debug, Error)] pub enum Error { #[error("[{ts}] {tr}: {source:#}")] FailedToResolveVersion { tr: Box<ToolRequest>, ts: ToolSource, source: Report, }, #[error("[{0}] plugin not installed")] PluginNotInstalled(String), #[error("{0}@{1} not installed")] VersionNotInstalled(Box<BackendArg>, String), #[error("{} exited with non-zero status: {}", .0, render_exit_status(.1))] ScriptFailed(String, Option<ExitStatus>), #[error( "Config files in {} are not trusted.\nTrust them with `mise trust`. See https://mise.jdx.dev/cli/trust.html for more information.", display_path(.0) )] UntrustedConfig(PathBuf), #[error("{}", format_install_failures(.failed_installations))] InstallFailed { successful_installations: Vec<ToolVersion>, failed_installations: Vec<(ToolRequest, Report)>, }, } fn render_exit_status(exit_status: &Option<ExitStatus>) -> String { match exit_status.and_then(|s| s.code()) { Some(exit_status) => format!("exit code {exit_status}"), None => "no exit status".into(), } } fn format_install_failures(failed_installations: &[(ToolRequest, Report)]) -> String { if failed_installations.is_empty() { return "Installation failed".to_string(); } // For a single failure, show the underlying error directly to preserve // the original error location for better debugging if failed_installations.len() == 1 { let (tr, error) = &failed_installations[0]; // Show the underlying error with the tool context // Use {:#} to show full error chain (includes wrapped errors) return format!( "Failed to install {}@{}: {:#}", tr.ba().full(), tr.version(), error ); } // For multiple failures, show a summary and then each error let mut output = vec![]; let failed_tools: Vec<String> = failed_installations .iter() .map(|(tr, _)| format!("{}@{}", tr.ba().full(), tr.version())) .collect(); output.push(format!( "Failed to install tools: {}", failed_tools.join(", ") )); // Show detailed errors for each failure // Use {:#} to show full error chain (includes wrapped errors) for (tr, error) in failed_installations.iter() { output.push(format!( "\n{}@{}: {:#}", tr.ba().full(), tr.version(), error )); } output.join("\n") } impl Error { pub fn get_exit_status(err: &Report) -> Option<i32> { if let Some(Error::ScriptFailed(_, Some(status))) = err.downcast_ref::<Error>() { status.code() } else { None } } pub fn is_argument_err(err: &Report) -> bool { err.downcast_ref::<Error>() .map(|e| { matches!( e, Error::FailedToResolveVersion { ts: ToolSource::Argument, .. } ) }) .unwrap_or(false) } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/wildcard.rs
src/wildcard.rs
use std::str::Chars; pub struct Wildcard { patterns: Vec<String>, } impl Wildcard { pub fn new(patterns: impl IntoIterator<Item = impl Into<String>>) -> Self { Self { patterns: patterns.into_iter().map(Into::into).collect(), } } pub fn match_any(&self, input: &str) -> bool { for pattern in &self.patterns { if wildcard_match_single(input, pattern) { return true; } } false } } fn wildcard_match_single(input: &str, wildcard: &str) -> bool { let mut input_chars = input.chars(); let mut wildcard_chars = wildcard.chars(); loop { match (input_chars.next(), wildcard_chars.next()) { (Some(input_char), Some(wildcard_char)) => { if wildcard_char == '*' { return wildcard_match_single_star(input_chars, wildcard_chars); } else if wildcard_char == '?' || input_char == wildcard_char { continue; } else { return false; } } (None, None) => return true, (None, Some(wildcard_char)) => return wildcard_char == '*', (Some(_), None) => return false, } } } fn wildcard_match_single_star(mut input_chars: Chars, mut wildcard_chars: Chars) -> bool { loop { match wildcard_chars.next() { Some(wildcard_char) => { if wildcard_char == '*' { continue; } else { while let Some(input_char) = input_chars.next() { if wildcard_match_single( &input_char.to_string(), &wildcard_char.to_string(), ) { return wildcard_match_single_star(input_chars, wildcard_chars); } } return false; } } None => return true, } } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/runtime_symlinks.rs
src/runtime_symlinks.rs
use std::path::{Path, PathBuf}; use std::sync::Arc; use crate::backend::Backend; use crate::config::{Alias, Config}; use crate::file::make_symlink_or_file; use crate::plugins::VERSION_REGEX; use crate::semver::split_version_prefix; use crate::{backend, file}; use eyre::Result; use indexmap::IndexMap; use itertools::Itertools; use versions::Versioning; pub async fn rebuild(config: &Config) -> Result<()> { for backend in backend::list() { let symlinks = list_symlinks(config, backend.clone()); let installs_dir = &backend.ba().installs_path; for (from, to) in symlinks { let from = installs_dir.join(from); if from.exists() { if is_runtime_symlink(&from) && file::resolve_symlink(&from)?.unwrap_or_default() != to { trace!("Removing existing symlink: {}", from.display()); file::remove_file(&from)?; } else { continue; } } make_symlink_or_file(&to, &from)?; } remove_missing_symlinks(backend.clone())?; } Ok(()) } fn list_symlinks(config: &Config, backend: Arc<dyn Backend>) -> IndexMap<String, PathBuf> { // TODO: make this a pure function and add test cases let mut symlinks = IndexMap::new(); let rel_path = |x: &String| PathBuf::from(".").join(x.clone()); for v in installed_versions(&backend) { let (prefix, version) = split_version_prefix(&v); let versions = Versioning::new(version).unwrap_or_default(); let mut partial = vec![]; while versions.nth(partial.len()).is_some() && versions.nth(partial.len() + 1).is_some() { let version = versions.nth(partial.len()).unwrap(); partial.push(version.to_string()); let from = format!("{}{}", prefix, partial.join(".")); symlinks.insert(from, rel_path(&v)); } symlinks.insert(format!("{prefix}latest"), rel_path(&v)); for (from, to) in &config .all_aliases .get(&backend.ba().short) .unwrap_or(&Alias::default()) .versions { if from.contains('/') { continue; } if !v.starts_with(to) { continue; } symlinks.insert(format!("{prefix}{from}"), rel_path(&v)); } } symlinks = symlinks .into_iter() .sorted_by_cached_key(|(k, _)| (Versioning::new(k), k.to_string())) .collect(); symlinks } fn installed_versions(backend: &Arc<dyn Backend>) -> Vec<String> { backend .list_installed_versions() .into_iter() .filter(|v| !VERSION_REGEX.is_match(v)) .collect() } pub fn remove_missing_symlinks(backend: Arc<dyn Backend>) -> Result<()> { let installs_dir = &backend.ba().installs_path; if !installs_dir.exists() { return Ok(()); } for entry in std::fs::read_dir(installs_dir)? { let entry = entry?; let path = entry.path(); if is_runtime_symlink(&path) && !path.exists() { trace!("Removing missing symlink: {}", path.display()); file::remove_file(path)?; } } // remove install dir if empty (ignore metadata) file::remove_dir_ignore(installs_dir, vec![".mise.backend.json", ".mise.backend"])?; Ok(()) } pub fn is_runtime_symlink(path: &Path) -> bool { if let Ok(Some(link)) = file::resolve_symlink(path) { return link.starts_with("./"); } false }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/fake_asdf.rs
src/fake_asdf.rs
use std::env::{join_paths, split_paths}; use std::fs; use std::os::unix::fs::PermissionsExt; use std::path::PathBuf; use color_eyre::eyre::ErrReport; use indoc::formatdoc; use once_cell::sync::OnceCell; use crate::env::PATH_KEY; use crate::{env, file}; pub fn setup() -> color_eyre::Result<PathBuf> { static SETUP: OnceCell<PathBuf> = OnceCell::new(); let path = SETUP.get_or_try_init(|| { let path = env::MISE_DATA_DIR.join(".fake-asdf"); let asdf_bin = path.join("asdf"); if !asdf_bin.exists() { file::create_dir_all(&path)?; file::write( &asdf_bin, formatdoc! {r#" #!/bin/sh mise asdf "$@" "#}, )?; let mut perms = asdf_bin.metadata()?.permissions(); perms.set_mode(0o755); fs::set_permissions(&asdf_bin, perms)?; } Ok::<PathBuf, ErrReport>(path) })?; Ok(path.clone()) } pub fn get_path_with_fake_asdf() -> String { let mut path = split_paths(&env::var_os(&*PATH_KEY).unwrap_or_default()).collect::<Vec<_>>(); match setup() { Ok(fake_asdf_path) => { path.insert(0, fake_asdf_path); } Err(e) => { warn!("Failed to setup fake asdf: {:#}", e); } }; join_paths(path).unwrap().to_string_lossy().to_string() }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/lockfile.rs
src/lockfile.rs
use crate::config::{Config, Settings}; use crate::env; use crate::file; use crate::file::display_path; use crate::path::PathExt; use crate::toolset::{ToolSource, ToolVersion, ToolVersionList, Toolset}; use eyre::{Report, Result, bail}; use itertools::Itertools; use serde_derive::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; use std::sync::LazyLock as Lazy; use std::sync::Mutex; use std::{ collections::{BTreeMap, BTreeSet, HashMap, HashSet}, sync::Arc, }; use toml_edit::DocumentMut; use xx::regex; #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct Lockfile { #[serde(skip)] tools: BTreeMap<String, Vec<LockfileTool>>, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct LockfileTool { pub version: String, pub backend: Option<String>, #[serde(skip_serializing_if = "BTreeMap::is_empty", default)] pub options: BTreeMap<String, String>, #[serde(skip_serializing_if = "Option::is_none")] pub env: Option<Vec<String>>, #[serde(skip_serializing_if = "BTreeMap::is_empty", default)] pub platforms: BTreeMap<String, PlatformInfo>, } #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct PlatformInfo { #[serde(skip_serializing_if = "Option::is_none")] pub checksum: Option<String>, // TODO: Add size back if we find a good way to generate it with `mise lock` #[serde(skip_serializing, default)] pub size: Option<u64>, #[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub url_api: Option<String>, } impl PlatformInfo { /// Returns true if this PlatformInfo has no meaningful data (for serde skip) pub fn is_empty(&self) -> bool { self.checksum.is_none() && self.url.is_none() && self.url_api.is_none() } } impl TryFrom<toml::Value> for PlatformInfo { type Error = Report; fn try_from(value: toml::Value) -> Result<Self> { match value { toml::Value::String(checksum) => Ok(PlatformInfo { checksum: Some(checksum), size: None, url: None, url_api: None, }), toml::Value::Table(mut t) => { let checksum = match t.remove("checksum") { Some(toml::Value::String(s)) => Some(s), _ => None, }; let size = t .remove("size") .and_then(|v| v.as_integer()) .map(|i| i.try_into()) .transpose()?; let url = match t.remove("url") { Some(toml::Value::String(s)) => Some(s), _ => None, }; let url_api = match t.remove("url_api") { Some(toml::Value::String(s)) => Some(s), _ => None, }; Ok(PlatformInfo { checksum, size, url, url_api, }) } _ => bail!("unsupported asset info format"), } } } impl From<PlatformInfo> for toml::Value { fn from(platform_info: PlatformInfo) -> Self { let mut table = toml::Table::new(); if let Some(checksum) = platform_info.checksum { table.insert("checksum".to_string(), checksum.into()); } if let Some(url) = platform_info.url { table.insert("url".to_string(), url.into()); } if let Some(url_api) = platform_info.url_api { table.insert("url_api".to_string(), url_api.into()); } toml::Value::Table(table) } } impl Lockfile { pub fn read<P: AsRef<Path>>(path: P) -> Result<Self> { let path = path.as_ref(); if !path.exists() { return Ok(Lockfile::default()); } trace!("reading lockfile {}", path.display_user()); let content = file::read_to_string(path)?; let mut table: toml::Table = toml::from_str(&content)?; let tools: toml::Table = table .remove("tools") .unwrap_or(toml::Table::new().into()) .try_into()?; let mut lockfile = Lockfile::default(); for (short, value) in tools { let versions = match value { toml::Value::Array(arr) => arr .into_iter() .map(LockfileTool::try_from) .collect::<Result<Vec<_>>>()?, _ => bail!( "invalid lockfile format for tool {short}: expected array ([[tools.{short}]])" ), }; lockfile.tools.insert(short, versions); } Ok(lockfile) } fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> { if self.is_empty() { let _ = file::remove_file(path); } else { let mut tools = toml::Table::new(); for (short, versions) in &self.tools { // Always write Multi-Version format (array format) for consistency let value: toml::Value = versions .iter() .cloned() .map(|version| version.into_toml_value()) .collect::<Vec<toml::Value>>() .into(); tools.insert(short.clone(), value); } let mut lockfile = toml::Table::new(); lockfile.insert("tools".to_string(), tools.into()); let content = toml::to_string_pretty(&toml::Value::Table(lockfile))?; let content = format(content.parse()?); file::write(path, content)?; } Ok(()) } fn is_empty(&self) -> bool { self.tools.is_empty() } /// Get all platform keys present in the lockfile pub fn all_platform_keys(&self) -> BTreeSet<String> { let mut platforms = BTreeSet::new(); for tools in self.tools.values() { for tool in tools { for platform_key in tool.platforms.keys() { platforms.insert(platform_key.clone()); } } } platforms } /// Update or add platform info for a tool version /// Merges with existing info, preserving fields we don't have new values for pub fn set_platform_info( &mut self, short: &str, version: &str, backend: Option<&str>, options: &BTreeMap<String, String>, platform_key: &str, platform_info: PlatformInfo, ) { let tools = self.tools.entry(short.to_string()).or_default(); // Find existing tool version with matching options or create new one if let Some(tool) = tools .iter_mut() .find(|t| t.version == version && &t.options == options) { // Merge with existing platform info, preferring new values when present let merged = if let Some(existing) = tool.platforms.get(platform_key) { PlatformInfo { checksum: platform_info.checksum.or_else(|| existing.checksum.clone()), size: platform_info.size.or(existing.size), url: platform_info.url.or_else(|| existing.url.clone()), url_api: platform_info.url_api.or_else(|| existing.url_api.clone()), } } else { platform_info }; // Only insert non-empty platform info to avoid `"platforms.linux-x64" = {}` if !merged.is_empty() { tool.platforms.insert(platform_key.to_string(), merged); } } else { let mut platforms = BTreeMap::new(); // Only insert non-empty platform info if !platform_info.is_empty() { platforms.insert(platform_key.to_string(), platform_info); } tools.push(LockfileTool { version: version.to_string(), backend: backend.map(|s| s.to_string()), options: options.clone(), env: None, platforms, }); } } /// Save the lockfile to disk (public for mise lock command) pub fn write<P: AsRef<Path>>(&self, path: P) -> Result<()> { self.save(path) } } /// Determines the lockfile path for a given config file path /// Returns (lockfile_path, is_local) /// /// Lockfiles are placed alongside their config files: /// - `mise.toml` -> `mise.lock` /// - `.config/mise.toml` -> `.config/mise.lock` /// - `.mise/config.toml` -> `.mise/mise.lock` /// - `.mise/conf.d/foo.toml` -> `.mise/mise.lock` (conf.d files share parent's lockfile) pub fn lockfile_path_for_config(config_path: &Path) -> (PathBuf, bool) { let is_local = is_local_config(config_path); let lockfile_name = if is_local { "mise.local.lock" } else { "mise.lock" }; let parent = config_path.parent().unwrap_or(Path::new(".")); let parent_name = parent .file_name() .and_then(|n| n.to_str()) .unwrap_or_default(); // For conf.d files, place lockfile at parent of conf.d so all conf.d files share one lockfile let lockfile_dir = if parent_name == "conf.d" { parent.parent().unwrap_or(parent) } else { parent }; (lockfile_dir.join(lockfile_name), is_local) } /// Checks if a config path is a "local" config (should go to mise.local.lock) fn is_local_config(path: &Path) -> bool { let filename = path .file_name() .and_then(|n| n.to_str()) .unwrap_or_default(); filename.contains(".local.") } /// Extracts environment name from config filename /// e.g., "mise.test.toml" -> Some("test"), "mise.test.local.toml" -> Some("test"), "mise.toml" -> None fn extract_env_from_config_path(path: &Path) -> Option<String> { let filename = path .file_name() .and_then(|n| n.to_str()) .unwrap_or_default(); // Pattern matches: // - mise.{env}.toml -> captures env // - mise.{env}.local.toml -> captures env (env-specific local config) // - .mise.{env}.toml -> captures env // - config.{env}.toml -> captures env // Does NOT match (returns None): // - mise.toml, .mise.toml, config.toml (base configs) // - mise.local.toml (local without env - filtered by "local" check) let re = regex!(r"^(?:\.?mise|config)\.([^.]+)(?:\.local)?\.toml$"); re.captures(filename) .and_then(|caps| caps.get(1)) .map(|m| m.as_str().to_string()) .filter(|s| s != "local") } pub fn update_lockfiles(config: &Config, ts: &Toolset, new_versions: &[ToolVersion]) -> Result<()> { if !Settings::get().lockfile || !Settings::get().experimental { return Ok(()); } // Collect tools by source (config file) let mut tools_by_source: HashMap<ToolSource, HashMap<String, ToolVersionList>> = HashMap::new(); for (source, group) in &ts.versions.iter().chunk_by(|(_, tvl)| &tvl.source) { for (ba, tvl) in group { tools_by_source .entry(source.clone()) .or_default() .insert(ba.short.to_string(), tvl.clone()); } } // Add versions added within this session (from `mise use` or `mise up`) for (backend, group) in &new_versions.iter().chunk_by(|tv| tv.ba()) { let tvs = group.cloned().collect_vec(); let source = tvs[0].request.source().clone(); let source_tools = tools_by_source.entry(source.clone()).or_default(); if let Some(existing_tvl) = source_tools.get_mut(&backend.short) { for new_tv in tvs { existing_tvl .versions .retain(|tv| tv.request.version() != new_tv.request.version()); existing_tvl.versions.push(new_tv); } } else { let mut tvl = ToolVersionList::new(Arc::new(backend.clone()), source.clone()); tvl.versions.extend(tvs); source_tools.insert(backend.short.to_string(), tvl); } } // Group config files by target lockfile path // Key: lockfile path, Value: list of (config_path, env) tuples let mut lockfile_configs: HashMap<PathBuf, Vec<(PathBuf, Option<String>)>> = HashMap::new(); for (config_path, cf) in config.config_files.iter().rev() { if !cf.source().is_mise_toml() { continue; } let (lockfile_path, _is_local) = lockfile_path_for_config(config_path); let env = extract_env_from_config_path(config_path); lockfile_configs .entry(lockfile_path) .or_default() .push((config_path.clone(), env)); } debug!("updating {} lockfiles", lockfile_configs.len()); // Process each lockfile for (lockfile_path, configs) in lockfile_configs { // Only update existing lockfiles - creation is done elsewhere (e.g., by `mise lock`) if !lockfile_path.exists() { continue; } trace!( "updating lockfile {} from {} config files", display_path(&lockfile_path), configs.len() ); let mut existing_lockfile = Lockfile::read(&lockfile_path) .unwrap_or_else(|err| handle_missing_lockfile(err, &lockfile_path)); // Collect all tools from all contributing configs with their env context // Key: tool short name, Value: list of (LockfileTool, env) let mut tools_with_env: HashMap<String, Vec<(LockfileTool, Option<String>)>> = HashMap::new(); for (config_path, env) in &configs { let tool_source = ToolSource::MiseToml(config_path.clone()); if let Some(tools) = tools_by_source.get(&tool_source) { for (short, tvl) in tools { let lockfile_tools: Vec<LockfileTool> = tvl.clone().into(); for tool in lockfile_tools { tools_with_env .entry(short.clone()) .or_default() .push((tool, env.clone())); } } } } // Preserve base entries from existing lockfile that were overridden by env configs // Without this, base entries (env=None) get dropped when env configs override them // Only preserve if ALL new entries are env-specific - if any new entry has env=None, // it means the base config was updated and old entries should be replaced, not preserved for (short, existing_entries) in &existing_lockfile.tools { if let Some(new_entries) = tools_with_env.get_mut(short) { // Only preserve if all new entries are env-specific (no base config update) let all_env_specific = new_entries.iter().all(|(_, env)| env.is_some()); if all_env_specific { for existing in existing_entries { // If existing entry has no env (base) and isn't already in new_entries, preserve it if existing.env.is_none() && !new_entries.iter().any(|(t, _)| { t.version == existing.version && t.options == existing.options }) { new_entries.push((existing.clone(), None)); } } } } } // Process each tool with deduplication and env merging for (short, entries) in tools_with_env { let merged_tools = merge_tool_entries_with_env(entries, existing_lockfile.tools.get(&short)); existing_lockfile.tools.insert(short, merged_tools); } existing_lockfile.save(&lockfile_path)?; } Ok(()) } /// Merge tool entries with environment tracking and deduplication /// Rules: /// - Same version+options: if any has no env (base), keep only base entry; otherwise merge env arrays /// - Different version/options: separate entries /// - Preserve existing env-specific entries that aren't in new entries (env configs may not be loaded) #[allow(clippy::type_complexity)] fn merge_tool_entries_with_env( entries: Vec<(LockfileTool, Option<String>)>, existing_tools: Option<&Vec<LockfileTool>>, ) -> Vec<LockfileTool> { // Group by (version, options) - the key for deduplication let mut by_key: HashMap< (String, BTreeMap<String, String>), (LockfileTool, BTreeSet<String>, bool), > = HashMap::new(); for (tool, env) in entries { let key = (tool.version.clone(), tool.options.clone()); let entry = by_key .entry(key) .or_insert_with(|| (tool.clone(), BTreeSet::new(), false)); // Merge platforms for (platform, info) in tool.platforms { entry.0.platforms.entry(platform).or_insert(info); } // Track env - if any entry has no env, mark as base if let Some(e) = env { entry.1.insert(e); } else { entry.2 = true; // has_base } } // Merge with existing tools to preserve platform info AND env-specific entries if let Some(existing) = existing_tools { for existing_tool in existing { let key = (existing_tool.version.clone(), existing_tool.options.clone()); if let Some(entry) = by_key.get_mut(&key) { // Merge platform info from existing for (platform, info) in &existing_tool.platforms { entry .0 .platforms .entry(platform.clone()) .or_insert(info.clone()); } // Preserve existing env if we have no new env info if entry.1.is_empty() && !entry.2 && let Some(ref existing_env) = existing_tool.env { for e in existing_env { entry.1.insert(e.clone()); } } } else if let Some(existing_envs) = &existing_tool.env { // Check if this env is already covered by a new entry // If so, the existing entry is stale and should not be preserved let env_already_covered = by_key .values() .any(|(_, new_envs, _)| existing_envs.iter().any(|e| new_envs.contains(e))); if !env_already_covered { // Preserve env-specific entries that have no match in new entries // and whose env is not covered by any new entry // This handles the case where env configs (e.g., mise.test.toml) aren't loaded // but we don't want to lose their lockfile entries by_key.insert( key, ( existing_tool.clone(), existing_tool .env .clone() .unwrap_or_default() .into_iter() .collect(), false, ), ); } } } } // Convert to final list by_key .into_values() .map(|(mut tool, envs, has_base)| { // If has_base (any entry had no env), don't set env field // Otherwise, set env field with merged envs tool.env = if has_base || envs.is_empty() { None } else { Some(envs.into_iter().sorted().collect()) }; tool }) .sorted_by(|a, b| a.version.cmp(&b.version)) .collect() } fn read_all_lockfiles(config: &Config) -> Arc<Lockfile> { // Cache by sorted config paths to avoid recomputing on every call static CACHE: Lazy<Mutex<HashMap<Vec<PathBuf>, Arc<Lockfile>>>> = Lazy::new(Default::default); // Create a cache key from the config file paths let cache_key: Vec<PathBuf> = config.config_files.keys().cloned().collect(); let mut cache = CACHE.lock().unwrap(); if let Some(cached) = cache.get(&cache_key) { return Arc::clone(cached); } let mut seen_roots: HashSet<PathBuf> = HashSet::new(); let mut all: Vec<Lockfile> = Vec::new(); for (path, cf) in config.config_files.iter().rev() { if !cf.source().is_mise_toml() { continue; } let (lockfile_path, _) = lockfile_path_for_config(path); let root = lockfile_path.parent().unwrap_or(path).to_path_buf(); if seen_roots.contains(&root) { continue; } seen_roots.insert(root.clone()); // Read both lockfiles (local takes precedence) let local_path = root.join("mise.local.lock"); if let Ok(local) = Lockfile::read(&local_path) { all.push(local); } let main_path = root.join("mise.lock"); if let Ok(main) = Lockfile::read(&main_path) { all.push(main); } } let result = all.into_iter().fold(Lockfile::default(), |mut acc, l| { for (short, tools) in l.tools { let existing = acc.tools.entry(short).or_default(); for tool in tools { // Avoid duplicates (same version+options+env) if !existing.iter().any(|t| { t.version == tool.version && t.options == tool.options && t.env == tool.env }) { existing.push(tool); } } } acc }); let result = Arc::new(result); cache.insert(cache_key, Arc::clone(&result)); result } fn read_lockfile_for(path: &Path) -> Arc<Lockfile> { // Cache by config path to avoid recomputing lockfile_path_for_config on every call static CACHE: Lazy<Mutex<HashMap<PathBuf, Arc<Lockfile>>>> = Lazy::new(Default::default); let mut cache = CACHE.lock().unwrap(); if let Some(cached) = cache.get(path) { return Arc::clone(cached); } // Only compute lockfile path when not cached let (lockfile_path, _is_local) = lockfile_path_for_config(path); let lockfile = Lockfile::read(&lockfile_path) .unwrap_or_else(|err| handle_missing_lockfile(err, &lockfile_path)); let lockfile = Arc::new(lockfile); cache.insert(path.to_path_buf(), Arc::clone(&lockfile)); lockfile } pub fn get_locked_version( config: &Config, path: Option<&Path>, short: &str, prefix: &str, request_options: &BTreeMap<String, String>, ) -> Result<Option<LockfileTool>> { if !Settings::get().lockfile || !Settings::get().experimental { return Ok(None); } let current_envs: HashSet<&str> = env::MISE_ENV.iter().map(|s| s.as_str()).collect(); let lockfile = match path { Some(path) => { trace!( "[{short}@{prefix}] reading lockfile for {}", display_path(path) ); read_lockfile_for(path) } None => { trace!("[{short}@{prefix}] reading all lockfiles"); read_all_lockfiles(config) } }; if let Some(tools) = lockfile.tools.get(short) { // Filter by version prefix and options let mut matching: Vec<_> = tools .iter() .filter(|v| { let version_matches = prefix == "latest" || v.version.starts_with(prefix); let options_match = &v.options == request_options; version_matches && options_match }) .collect(); // Only sort when prefix is "latest" and we have multiple matches // This is expensive, so avoid it for specific version prefixes if prefix == "latest" && matching.len() > 1 { matching.sort_by(|a, b| { versions::Versioning::new(&b.version).cmp(&versions::Versioning::new(&a.version)) }); } // Priority: 1) env-specific match, 2) base entry (no env) if !current_envs.is_empty() && let Some(env_match) = matching.iter().find(|t| { t.env .as_ref() .is_some_and(|envs| envs.iter().any(|e| current_envs.contains(e.as_str()))) }) { trace!( "[{short}@{prefix}] found {} in lockfile (env-specific: {:?})", env_match.version, env_match.env ); return Ok(Some((*env_match).clone())); } // Fall back to base entry (no env field) if let Some(base) = matching.iter().find(|t| t.env.is_none()) { trace!( "[{short}@{prefix}] found {} in lockfile (base)", base.version ); return Ok(Some((*base).clone())); } // Last resort: any matching entry if let Some(any) = matching.first() { trace!( "[{short}@{prefix}] found {} in lockfile (fallback)", any.version ); return Ok(Some((*any).clone())); } } Ok(None) } /// Get the backend for a tool from the lockfile, ignoring options. /// This is used for backend discovery where we just need any entry's backend. pub fn get_locked_backend(config: &Config, short: &str) -> Option<String> { if !Settings::get().lockfile || !Settings::get().experimental { return None; } let lockfile = read_all_lockfiles(config); lockfile .tools .get(short) .and_then(|tools| tools.first()) .and_then(|tool| tool.backend.clone()) } fn handle_missing_lockfile(err: Report, lockfile_path: &Path) -> Lockfile { warn!( "failed to read lockfile {}: {err:?}", display_path(lockfile_path) ); Lockfile::default() } impl TryFrom<toml::Value> for LockfileTool { type Error = Report; fn try_from(value: toml::Value) -> Result<Self> { let tool = match value { toml::Value::String(v) => LockfileTool { version: v, backend: Default::default(), options: Default::default(), env: None, platforms: Default::default(), }, toml::Value::Table(mut t) => { let mut platforms = BTreeMap::new(); // Handle nested platforms table format: [tools.X.platforms.linux-x64] if let Some(platforms_table) = t.remove("platforms") { let platforms_table: toml::Table = platforms_table.try_into()?; for (platform, platform_info) in platforms_table { platforms.insert(platform, platform_info.try_into()?); } } // Handle inline table format: "platforms.linux-x64" = { ... } let platform_keys: Vec<_> = t .keys() .filter(|k| k.starts_with("platforms.")) .cloned() .collect(); for key in platform_keys { if let Some(platform_info) = t.remove(&key) { let platform_name = key.strip_prefix("platforms.").unwrap().to_string(); platforms.insert(platform_name, platform_info.try_into()?); } } let mut options = BTreeMap::new(); if let Some(opts) = t.remove("options") { let opts_table: toml::Table = opts.try_into()?; for (key, value) in opts_table { if let toml::Value::String(s) = value { options.insert(key, s); } } } let env = t.remove("env").and_then(|v| match v { toml::Value::Array(arr) => Some( arr.into_iter() .filter_map(|v| v.as_str().map(String::from)) .collect(), ), _ => None, }); LockfileTool { version: t .remove("version") .map(|v| v.try_into()) .transpose()? .unwrap_or_default(), backend: t .remove("backend") .map(|v| v.try_into()) .transpose()? .unwrap_or_default(), options, env, platforms, } } _ => bail!("unsupported lockfile format {}", value), }; Ok(tool) } } impl LockfileTool { fn into_toml_value(self) -> toml::Value { let mut table = toml::Table::new(); table.insert("version".to_string(), self.version.into()); if let Some(backend) = self.backend { table.insert("backend".to_string(), backend.into()); } if !self.options.is_empty() { let opts_table: toml::Table = self .options .into_iter() .map(|(k, v)| (k, toml::Value::String(v))) .collect(); table.insert("options".to_string(), toml::Value::Table(opts_table)); } if let Some(env) = self.env { let env_arr: toml::Value = env .into_iter() .map(toml::Value::String) .collect::<Vec<_>>() .into(); table.insert("env".to_string(), env_arr); } if !self.platforms.is_empty() { table.insert("platforms".to_string(), self.platforms.clone().into()); } table.into() } } impl From<ToolVersionList> for Vec<LockfileTool> { fn from(tvl: ToolVersionList) -> Self { use crate::backend::platform_target::PlatformTarget; tvl.versions .iter() .map(|tv| { let mut platforms = BTreeMap::new(); // Convert tool version lock_platforms to lockfile platforms for (platform, platform_info) in &tv.lock_platforms { platforms.insert( platform.clone(), PlatformInfo { checksum: platform_info.checksum.clone(), size: platform_info.size, url: platform_info.url.clone(), url_api: platform_info.url_api.clone(), }, ); } // Resolve lockfile options from the backend let options = if let Ok(backend) = tv.request.backend() { let target = PlatformTarget::from_current(); backend.resolve_lockfile_options(&tv.request, &target) } else { BTreeMap::new() }; LockfileTool { version: tv.version.clone(), backend: Some(tv.ba().full()), options, env: None, // Set by merge_tool_entries_with_env based on config source platforms, } }) .collect() } } fn format(mut doc: DocumentMut) -> String { if let Some(tools) = doc.get_mut("tools") { for (_k, v) in tools.as_table_mut().unwrap().iter_mut() { if let toml_edit::Item::ArrayOfTables(art) = v { for t in art.iter_mut() { t.sort_values_by(|a, _, b, _| { if a == "version" { return std::cmp::Ordering::Less; } if b == "version" { return std::cmp::Ordering::Greater; } a.to_string().cmp(&b.to_string()) }); // Convert platforms to inline tables with dotted keys
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
true
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/build_time.rs
src/build_time.rs
use chrono::{DateTime, FixedOffset}; use std::sync::LazyLock as Lazy; pub mod built_info { include!(concat!(env!("OUT_DIR"), "/built.rs")); } pub static BUILD_TIME: Lazy<DateTime<FixedOffset>> = Lazy::new(|| DateTime::parse_from_rfc2822(built_info::BUILT_TIME_UTC).unwrap()); pub static TARGET: &str = built_info::TARGET;
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/test.rs
src/test.rs
use std::env::join_paths; use std::path::PathBuf; use indoc::indoc; use crate::{env, file}; #[ctor::ctor] fn init() { if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "debug") } console::set_colors_enabled(false); console::set_colors_enabled_stderr(false); env::set_var( "HOME", PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test"), ); env::remove_var("MISE_TRUSTED_CONFIG_PATHS"); env::remove_var("MISE_DISABLE_TOOLS"); env::set_var("NO_COLOR", "1"); env::set_var("MISE_CACHE_PRUNE_AGE", "0"); env::set_var("MISE_CACHE_DIR", env::HOME.join("data").join("cache")); env::set_var("MISE_CONFIG_DIR", env::HOME.join("config")); env::set_var("MISE_ENV", ""); env::set_var("MISE_DATA_DIR", env::HOME.join("data")); env::set_var("MISE_GLOBAL_CONFIG_FILE", "~/config/config.toml"); env::set_var("MISE_SYSTEM_CONFIG_FILE", "doesntexist"); env::set_var( "MISE_OVERRIDE_CONFIG_FILENAMES", ".test.mise.toml:test.config.toml", ); env::set_var( "MISE_OVERRIDE_TOOL_VERSIONS_FILENAMES", ".test-tool-versions", ); env::set_var("MISE_STATE_DIR", env::HOME.join("state")); env::set_var("MISE_USE_TOML", "0"); env::set_var("MISE_YES", "1"); file::remove_all(&*env::HOME.join("cwd")).unwrap(); file::create_dir_all(&*env::HOME.join("cwd").join(".mise").join("tasks")).unwrap(); env::set_current_dir(env::HOME.join("cwd")).unwrap(); file::write( env::HOME.join("config").join("config.toml"), indoc! {r#" [env] TEST_ENV_VAR = 'test-123' [alias.tiny.versions] "my/alias" = '3.0' [tasks.configtask] run = 'echo "configtask:"' [tasks.lint] run = 'echo "linting!"' [tasks.test] run = 'echo "testing!"' [settings] always_keep_download = true always_keep_install = true idiomatic_version_file = true plugin_autoupdate_last_check_duration = "20m" jobs = 2 "#}, ) .unwrap(); file::write( env::HOME.join(".test-tool-versions"), indoc! {r#" tiny 2 dummy ref:master "#}, ) .unwrap(); file::write( env::current_dir().unwrap().join(".test-tool-versions"), indoc! {r#" tiny 3 "#}, ) .unwrap(); file::write( ".mise/tasks/filetask", indoc! {r#"#!/usr/bin/env bash #MISE alias="ft" #MISE description="This is a test build script" #MISE depends=["lint", "test"] #MISE sources=[".test-tool-versions"] #MISE outputs=["$MISE_PROJECT_ROOT/test/test-build-output.txt"] #MISE env={TEST_BUILDSCRIPT_ENV_VAR = "VALID", BOOLEAN_VAR = true} #USAGE flag "--user <user>" help="The user to run as" set -exo pipefail cd "$MISE_PROJECT_ROOT" || exit 1 echo "running test-build script" echo "TEST_BUILDSCRIPT_ENV_VAR: $TEST_BUILDSCRIPT_ENV_VAR" > test-build-output.txt echo "user=$usage_user" "#}, ) .unwrap(); file::make_executable(".mise/tasks/filetask").unwrap(); } pub fn replace_path(input: &str) -> String { let path = join_paths(&*env::PATH) .unwrap() .to_string_lossy() .to_string(); let home = env::HOME.to_string_lossy().to_string(); input .replace(&path, "$PATH") .replace(&home, "~") .replace(&*env::MISE_BIN.to_string_lossy(), "mise") } #[macro_export] macro_rules! with_settings { ($body:block) => {{ let home = $crate::env::HOME.to_string_lossy().to_string(); insta::with_settings!({sort_maps => true, filters => vec![ (home.as_str(), "~"), ]}, {$body}) }} }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/maplit.rs
src/maplit.rs
#[macro_export] macro_rules! hashmap { (@single $($x:tt)*) => (()); (@count $($rest:expr),*) => (<[()]>::len(&[$(hashmap!(@single $rest)),*])); ($($key:expr => $value:expr,)+) => { hashmap!($($key => $value),+) }; ($($key:expr => $value:expr),*) => { { let _cap = hashmap!(@count $($key),*); let mut _map = ::std::collections::HashMap::with_capacity(_cap); $( let _ = _map.insert($key, $value); )* _map } }; }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/path.rs
src/path.rs
pub use std::path::*; use crate::dirs; pub trait PathExt { /// replaces $HOME with "~" fn display_user(&self) -> String; fn mount(&self, on: &Path) -> PathBuf; fn is_empty(&self) -> bool; } impl PathExt for Path { fn display_user(&self) -> String { let home = dirs::HOME.to_string_lossy(); let home_str: &str = home.as_ref(); match cfg!(unix) && self.starts_with(home_str) && home != "/" { true => self.to_string_lossy().replacen(home_str, "~", 1), false => self.to_string_lossy().to_string(), } } fn mount(&self, on: &Path) -> PathBuf { if PathExt::is_empty(self) { on.to_path_buf() } else { on.join(self) } } fn is_empty(&self) -> bool { self.as_os_str().is_empty() } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/agecrypt.rs
src/agecrypt.rs
use std::io::{BufReader, Read, Write}; use std::path::{Path, PathBuf}; use age::ssh; use age::{Decryptor, Encryptor, Identity, IdentityFile, Recipient}; use base64::Engine; use eyre::{Result, WrapErr, eyre}; use indexmap::IndexSet; use crate::config::Settings; use crate::config::env_directive::{AgeFormat, EnvDirective, EnvDirectiveOptions}; use crate::file::{self, replace_path}; use crate::{dirs, env}; const ZSTD_COMPRESSION_LEVEL: i32 = 3; const COMPRESSION_THRESHOLD: usize = 1024; // 1KB pub async fn create_age_directive( key: String, value: &str, recipients: &[Box<dyn Recipient + Send>], ) -> Result<EnvDirective> { if recipients.is_empty() { return Err(eyre!( "[experimental] No age recipients provided for encryption" )); } let encryptor = match Encryptor::with_recipients(recipients.iter().map(|r| r.as_ref() as &dyn Recipient)) { Ok(encryptor) => encryptor, Err(e) => return Err(eyre!("[experimental] Failed to create encryptor: {}", e)), }; let mut encrypted = Vec::new(); let mut writer = encryptor.wrap_output(&mut encrypted)?; writer.write_all(value.as_bytes())?; writer.finish()?; // Determine format based on size and compression let (encoded, format) = if encrypted.len() > COMPRESSION_THRESHOLD { let compressed = zstd::encode_all(&encrypted[..], ZSTD_COMPRESSION_LEVEL)?; let encoded = base64::engine::general_purpose::STANDARD_NO_PAD.encode(&compressed); (encoded, Some(AgeFormat::Zstd)) } else { let encoded = base64::engine::general_purpose::STANDARD_NO_PAD.encode(&encrypted); (encoded, None) // Use None for raw format (default) }; Ok(EnvDirective::Age { key, value: encoded, format, options: EnvDirectiveOptions::default(), }) } pub async fn decrypt_age_directive(directive: &EnvDirective) -> Result<String> { Settings::get().ensure_experimental("age encryption")?; match directive { EnvDirective::Age { value, format, .. } => { let decoded = base64::engine::general_purpose::STANDARD_NO_PAD .decode(value) .wrap_err("[experimental] Failed to decode base64")?; let ciphertext = match format { Some(AgeFormat::Zstd) => zstd::decode_all(&decoded[..]) .wrap_err("[experimental] Failed to decompress zstd")?, Some(AgeFormat::Raw) | None => decoded, }; let identities = load_all_identities().await?; if identities.is_empty() { return Err(eyre!( "[experimental] No age identities found for decryption" )); } let decryptor = Decryptor::new(&ciphertext[..])?; let mut decrypted = Vec::new(); let identity_refs: Vec<&dyn Identity> = identities .iter() .map(|i| i.as_ref() as &dyn Identity) .collect(); match decryptor.decrypt(identity_refs.into_iter()) { Ok(mut reader) => { reader.read_to_end(&mut decrypted)?; } Err(e) => { return Err(eyre!("[experimental] Failed to decrypt: {}", e)); } } String::from_utf8(decrypted) .wrap_err("[experimental] Decrypted value is not valid UTF-8") } _ => Err(eyre!("[experimental] Not an Age directive")), } } pub async fn load_recipients_from_defaults() -> Result<Vec<Box<dyn Recipient + Send>>> { let mut recipients: IndexSet<String> = IndexSet::new(); // Try to load from age key file if let Some(key_file) = get_default_key_file().await && key_file.exists() { let content = file::read_to_string(&key_file)?; // For age keys, we need to parse them as x25519 identities to get public keys for line in content.lines() { let line = line.trim(); if line.starts_with("AGE-SECRET-KEY-") && let Ok(identity) = line.parse::<age::x25519::Identity>() { recipients.insert(identity.to_public().to_string()); } } } // Try to load from SSH private keys let ssh_key_paths = get_default_ssh_key_paths(); for path in ssh_key_paths { if path.exists() && let Ok(recipient) = load_ssh_recipient_from_private_key(&path).await { recipients.insert(recipient); } } let mut parsed_recipients: Vec<Box<dyn Recipient + Send>> = Vec::new(); for recipient_str in recipients { if let Some(recipient) = parse_recipient(&recipient_str)? { parsed_recipients.push(recipient); } } if parsed_recipients.is_empty() { return Err(eyre!( "[experimental] No age recipients found. Provide --age-recipient, --age-ssh-recipient, or configure settings.age.key_file" )); } Ok(parsed_recipients) } pub async fn load_recipients_from_key_file(path: &Path) -> Result<Vec<Box<dyn Recipient + Send>>> { let mut recipients: Vec<Box<dyn Recipient + Send>> = Vec::new(); if !path.exists() { return Err(eyre!( "[experimental] Age key file not found: {}", path.display() )); } let content = file::read_to_string(path)?; // Parse age x25519 identities and convert to recipients for line in content.lines() { let line = line.trim(); if line.starts_with("AGE-SECRET-KEY-") && let Ok(identity) = line.parse::<age::x25519::Identity>() { let public_key = identity.to_public(); recipients.push(Box::new(public_key)); } } if recipients.is_empty() { return Err(eyre!( "[experimental] No valid age identities found in {}", path.display() )); } Ok(recipients) } pub fn parse_recipient(recipient_str: &str) -> Result<Option<Box<dyn Recipient + Send>>> { let trimmed = recipient_str.trim(); if trimmed.starts_with("age1") { match trimmed.parse::<age::x25519::Recipient>() { Ok(r) => Ok(Some(Box::new(r))), Err(e) => Err(eyre!("[experimental] Invalid age recipient: {}", e)), } } else if trimmed.starts_with("ssh-") { // SSH recipient parsing - the age crate will validate it match trimmed.parse::<ssh::Recipient>() { Ok(r) => Ok(Some(Box::new(r))), Err(e) => Err(eyre!("[experimental] Invalid SSH recipient: {:?}", e)), } } else { Ok(None) } } pub async fn load_ssh_recipient_from_path(path: &Path) -> Result<Box<dyn Recipient + Send>> { let content = file::read_to_string(path)?; let trimmed = content.trim(); // Check if it's a public key if trimmed.starts_with("ssh-") { match trimmed.parse::<ssh::Recipient>() { Ok(r) => return Ok(Box::new(r)), Err(e) => { return Err(eyre!( "[experimental] Invalid SSH public key at {}: {:?}", path.display(), e )); } } } // Try to load as private key and derive public if path.extension().and_then(|s| s.to_str()) == Some("pub") { Err(eyre!( "[experimental] Invalid SSH public key at {}", path.display() )) } else { load_ssh_recipient_from_private_key(path) .await .and_then(|s| { parse_recipient(&s)? .ok_or_else(|| eyre!("[experimental] Failed to parse SSH recipient")) }) } } async fn load_ssh_recipient_from_private_key(path: &Path) -> Result<String> { // For SSH keys, we can't easily derive the public key from the private key using the age crate // So we'll try to read the corresponding .pub file let pub_path = path.with_extension("pub"); if pub_path.exists() { let content = file::read_to_string(&pub_path)?; let trimmed = content.trim(); if trimmed.starts_with("ssh-") { return Ok(trimmed.to_string()); } } Err(eyre!( "[experimental] Could not find public key for SSH private key at {}. Expected {}.pub", path.display(), path.display() )) } async fn load_all_identities() -> Result<Vec<Box<dyn Identity>>> { // Get identity files first let identity_files = get_all_identity_files().await; let ssh_identity_files = get_all_ssh_identity_files(); // Now process identities without holding them across await points let mut identities: Vec<Box<dyn Identity>> = Vec::new(); // Check MISE_AGE_KEY environment variable if let Ok(age_key) = env::var("MISE_AGE_KEY") && !age_key.is_empty() { // First try to parse as a raw age secret key for line in age_key.lines() { let line = line.trim(); if line.starts_with("AGE-SECRET-KEY-") && let Ok(identity) = line.parse::<age::x25519::Identity>() { identities.push(Box::new(identity)); } } // If no keys were found, try parsing as an identity file if identities.is_empty() && let Ok(identity_file) = IdentityFile::from_buffer(age_key.as_bytes()) && let Ok(mut file_identities) = identity_file.into_identities() { identities.append(&mut file_identities); } } // Load from identity files for path in identity_files { if path.exists() { match file::read_to_string(&path) { Ok(content) => { if let Ok(identity_file) = IdentityFile::from_buffer(content.as_bytes()) && let Ok(mut file_identities) = identity_file.into_identities() { identities.append(&mut file_identities); } } Err(e) => { debug!( "[experimental] Failed to read identity file {:?}: {}", path, e ); } } } } // Load SSH identities for path in ssh_identity_files { if path.exists() { match std::fs::File::open(&path) { Ok(file) => { let mut reader = BufReader::new(file); match ssh::Identity::from_buffer(&mut reader, Some(path.display().to_string())) { Ok(identity) => { identities.push(Box::new(identity)); } Err(e) => { debug!( "[experimental] Failed to parse SSH identity from {:?}: {}", path, e ); } } } Err(e) => { debug!( "[experimental] Failed to read SSH identity file {:?}: {}", path, e ); } } } } Ok(identities) } async fn get_default_key_file() -> Option<PathBuf> { Settings::get() .age .key_file .clone() .map(replace_path) .or_else(|| { let default_path = dirs::CONFIG.join("age.txt"); if default_path.exists() { Some(default_path) } else { None } }) } async fn get_all_identity_files() -> Vec<PathBuf> { let mut files = Vec::new(); if let Some(ref identity_files) = Settings::get().age.identity_files { for path in identity_files { // Apply path expansion for tilde and environment variables files.push(replace_path(path.clone())); } } if let Some(key_file) = Settings::get().age.key_file.clone() { files.push(replace_path(key_file)); } let default_age_txt = dirs::CONFIG.join("age.txt"); if default_age_txt.exists() && !files.contains(&default_age_txt) { files.push(default_age_txt); } files } fn get_all_ssh_identity_files() -> Vec<PathBuf> { let mut files = Vec::new(); if let Some(ref ssh_identity_files) = Settings::get().age.ssh_identity_files { for path in ssh_identity_files { // Apply path expansion for tilde and environment variables files.push(replace_path(path.clone())); } } files.extend(get_default_ssh_key_paths()); files } fn get_default_ssh_key_paths() -> Vec<PathBuf> { let mut paths = Vec::new(); let home = &*dirs::HOME; let ssh_dir = home.join(".ssh"); paths.push(ssh_dir.join("id_ed25519")); paths.push(ssh_dir.join("id_rsa")); paths } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn test_age_x25519_round_trip_small() -> Result<()> { let key = age::x25519::Identity::generate(); let recipient = key.to_public(); // Small value should not be compressed let plaintext = "secret value"; let recipients: Vec<Box<dyn Recipient + Send>> = vec![Box::new(recipient)]; let directive = create_age_directive("TEST_VAR".to_string(), plaintext, &recipients).await?; if let crate::config::env_directive::EnvDirective::Age { value, format, .. } = directive { // Small value should not be compressed (format should be None/Raw) assert!( format.is_none() || matches!(format, Some(crate::config::env_directive::AgeFormat::Raw)) ); use age::secrecy::ExposeSecret; env::set_var("MISE_AGE_KEY", key.to_string().expose_secret()); let decrypted = decrypt_age_directive(&crate::config::env_directive::EnvDirective::Age { key: "TEST_VAR".to_string(), value, format, options: Default::default(), }) .await?; env::remove_var("MISE_AGE_KEY"); assert_eq!(decrypted, plaintext); } else { panic!("Expected Age directive"); } Ok(()) } #[tokio::test] async fn test_age_x25519_round_trip_large() -> Result<()> { let key = age::x25519::Identity::generate(); let recipient = key.to_public(); // Large value should be compressed (>1KB) let plaintext = "x".repeat(2000); let recipients: Vec<Box<dyn Recipient + Send>> = vec![Box::new(recipient)]; let directive = create_age_directive("TEST_VAR".to_string(), &plaintext, &recipients).await?; if let crate::config::env_directive::EnvDirective::Age { value, format, .. } = directive { // Large value should be compressed assert_eq!(format, Some(crate::config::env_directive::AgeFormat::Zstd)); use age::secrecy::ExposeSecret; env::set_var("MISE_AGE_KEY", key.to_string().expose_secret()); let decrypted = decrypt_age_directive(&crate::config::env_directive::EnvDirective::Age { key: "TEST_VAR".to_string(), value, format, options: Default::default(), }) .await?; env::remove_var("MISE_AGE_KEY"); assert_eq!(decrypted, plaintext); } else { panic!("Expected Age directive"); } Ok(()) } #[test] fn test_parse_recipient() -> Result<()> { let age_recipient = "age1ql3z7hjy54pw3hyww5ayyfg7zqgvc7w3j2elw8zmrj2kg5sfn9aqmcac8p"; let parsed = parse_recipient(age_recipient)?; assert!(parsed.is_some()); // Note: The SSH recipient parser in the age crate is strict about format // This is a valid format example let ssh_recipient = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJmkfJ8VZq4m5k7tJVts7+nR01fbRvLHLgeQCF6FWYr5"; let parsed = parse_recipient(ssh_recipient)?; assert!(parsed.is_some()); let invalid = "invalid_recipient"; let parsed = parse_recipient(invalid)?; assert!(parsed.is_none()); Ok(()) } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/timeout.rs
src/timeout.rs
use std::sync::mpsc; use std::thread; use std::time::Duration; use crate::ui::time::format_duration; use color_eyre::eyre::{Report, Result}; use std::fmt::{Display, Formatter}; #[derive(Debug, Clone, Copy)] pub struct TimeoutError { pub duration: Duration, } impl Display for TimeoutError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "timed out after {}", format_duration(self.duration)) } } impl std::error::Error for TimeoutError {} pub fn run_with_timeout<F, T>(f: F, timeout: Duration) -> Result<T> where F: FnOnce() -> Result<T> + Send, T: Send, { let (tx, rx) = mpsc::channel(); thread::scope(|s| { s.spawn(move || { let result = f(); // If sending fails, the timeout has already been reached. let _ = tx.send(result); }); let recv: Result<T> = rx .recv_timeout(timeout) .map_err(|_| Report::from(TimeoutError { duration: timeout }))?; recv }) } pub async fn run_with_timeout_async<F, Fut, T>(f: F, timeout: Duration) -> Result<T> where Fut: Future<Output = Result<T>> + Send, T: Send, F: FnOnce() -> Fut, { match tokio::time::timeout(timeout, f()).await { Ok(Ok(output)) => Ok(output), Ok(Err(e)) => Err(e), Err(_) => Err(TimeoutError { duration: timeout }.into()), } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/direnv.rs
src/direnv.rs
use std::collections::HashMap; use std::env::{join_paths, split_paths}; use std::fmt::{Display, Formatter}; use std::io::Write; use std::path::{Path, PathBuf}; use crate::env::PATH_KEY; use base64::prelude::*; use eyre::Result; use flate2::Compression; use flate2::write::{ZlibDecoder, ZlibEncoder}; use itertools::Itertools; use serde_derive::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] pub struct DirenvDiff { #[serde(default, rename = "p")] pub old: HashMap<String, String>, #[serde(default, rename = "n")] pub new: HashMap<String, String>, } impl DirenvDiff { pub fn parse(input: &str) -> Result<DirenvDiff> { // let bytes = BASE64_URL_SAFE.decode(input)?; // let uncompressed = inflate_bytes_zlib(&bytes).unwrap(); // Ok(serde_json::from_slice(&uncompressed[..])?) let mut writer = Vec::new(); let mut decoder = ZlibDecoder::new(writer); let bytes = BASE64_URL_SAFE.decode(input)?; decoder.write_all(&bytes[..])?; writer = decoder.finish()?; Ok(serde_json::from_slice(&writer[..])?) } pub fn new_path(&self) -> Vec<PathBuf> { let path = self.new.get(&*PATH_KEY); match path { Some(path) => split_paths(path).collect(), None => vec![], } } pub fn old_path(&self) -> Vec<PathBuf> { let path = self.old.get(&*PATH_KEY); match path { Some(path) => split_paths(path).collect(), None => vec![], } } /// this adds a directory to both the old and new path in DIRENV_DIFF /// the purpose is to trick direnv into thinking that this path has always been there /// that way it does not remove it when it modifies PATH /// it returns the old and new paths as vectors pub fn add_path_to_old_and_new(&mut self, path: &Path) -> Result<(Vec<PathBuf>, Vec<PathBuf>)> { let mut old = self.old_path(); let mut new = self.new_path(); old.insert(0, path.into()); new.insert(0, path.into()); self.old.insert( PATH_KEY.to_string(), join_paths(&old)?.into_string().unwrap(), ); self.new.insert( PATH_KEY.to_string(), join_paths(&new)?.into_string().unwrap(), ); Ok((old, new)) } pub fn remove_path_from_old_and_new( &mut self, path: &Path, ) -> Result<(Vec<PathBuf>, Vec<PathBuf>)> { let mut old = self.old_path(); let mut new = self.new_path(); // remove the path from both old and new but only once old.iter().position(|p| p == path).map(|i| old.remove(i)); new.iter().position(|p| p == path).map(|i| new.remove(i)); self.old.insert( PATH_KEY.to_string(), join_paths(&old)?.into_string().unwrap(), ); self.new.insert( PATH_KEY.to_string(), join_paths(&new)?.into_string().unwrap(), ); Ok((old, new)) } pub fn dump(&self) -> Result<String> { let mut gz = ZlibEncoder::new(Vec::new(), Compression::fast()); gz.write_all(&serde_json::to_vec(self)?)?; Ok(BASE64_URL_SAFE.encode(gz.finish()?)) } } impl Display for DirenvDiff { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let print_sorted = |hashmap: &HashMap<String, String>| { hashmap .iter() .map(|(k, v)| format!("{k}={v}")) .sorted() .collect::<Vec<_>>() }; f.debug_struct("DirenvDiff") .field("old", &print_sorted(&self.old)) .field("new", &print_sorted(&self.new)) .finish() } } #[cfg(test)] mod tests { use crate::config::Config; use super::*; use insta::assert_snapshot; #[tokio::test] async fn test_parse() { let _config = Config::get().await.unwrap(); let input = r#"eJys0c1yojAAwPF3ybmWaLB-zPSAGCqIQCGgeGGIELDlM2BEOr77zs7szr7AXv-H3-X_Axqw_gGabYM1qPk1A88XUP1OW93FVhBtdReswURq-FXEfSqJmEusLpKUdxLspALRJY1Yt2Bifk8aLhf5iiZIhhDCjEtE6svmteGuSJVHAV7-qppuYrAG_0WVXtNK8Ms__KgQdYc9sAapMXRj1-9XW8VX7A16UA4NPIs9xCK5WO51XnvfwWBT1R9N7zIcHvvJbZF5g8pk0V2c5CboIw8_NjOUWDK5qcxIcaFrp3anhwdr5FeKJmfd9stgqvuVZqcXsXHYJ-kSGWpoxyZLzf0a0LUcMgv17exenXXunfOTZZfybiVmb9OAhjDtHEcOk0lrRWG84OrRobW6IgGGZqwelglTq8UmJrbP9p0x9pTW5t3L21P1mZfL7_pMtIW599v-Cx_dmzEdCcZ1TAzkz7dvfO4QAefO6Y4VxYmijzgP_Oz9Hbz8uU5jDp7PXwEAAP__wB6qKg=="#; let diff = DirenvDiff::parse(input).unwrap(); assert_snapshot!(diff); } #[tokio::test] async fn test_dump() { let _config = Config::get().await.unwrap(); let diff = DirenvDiff { old: HashMap::from([("a".to_string(), "b".to_string())]), new: HashMap::from([("c".to_string(), "d".to_string())]), }; let output = diff.dump().unwrap(); assert_snapshot!(&output); let diff = DirenvDiff::parse(&output).unwrap(); assert_snapshot!(diff); } #[tokio::test] #[cfg(unix)] async fn test_add_path_to_old_and_new() { let _config = Config::get().await.unwrap(); let mut diff = DirenvDiff { old: HashMap::from([("PATH".to_string(), "/foo:/tmp:/bar:/old".to_string())]), new: HashMap::from([("PATH".to_string(), "/foo:/bar:/new".to_string())]), }; let path = PathBuf::from("/tmp"); diff.add_path_to_old_and_new(&path).unwrap(); assert_snapshot!(diff.old.get("PATH").unwrap()); assert_snapshot!(diff.new.get("PATH").unwrap()); } #[tokio::test] #[cfg(unix)] async fn test_null_path() { let _config = Config::get().await.unwrap(); let mut diff = DirenvDiff { old: HashMap::from([]), new: HashMap::from([]), }; let path = PathBuf::from("/tmp"); diff.add_path_to_old_and_new(&path).unwrap(); assert_snapshot!(diff.old.get("PATH").unwrap()); assert_snapshot!(diff.new.get("PATH").unwrap()); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/netrc.rs
src/netrc.rs
use std::path::PathBuf; use std::sync::LazyLock; use netrc_rs::Netrc; use crate::config::Settings; use crate::dirs; /// Cached parsed netrc file static NETRC: LazyLock<Option<Netrc>> = LazyLock::new(|| { let settings = Settings::get(); if !settings.netrc { return None; } let path = netrc_path(); if !path.exists() { return None; } // Check file permissions on Unix systems #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; if let Ok(metadata) = std::fs::metadata(&path) { let mode = metadata.permissions().mode(); if mode & 0o077 != 0 { warn!( "netrc file {} has insecure permissions (mode: {:o}). Should be 0600 or 0400", path.display(), mode & 0o777 ); } } } match std::fs::read_to_string(&path) { Ok(content) => match Netrc::parse(content, false) { Ok(netrc) => { debug!("Loaded netrc from {}", path.display()); Some(netrc) } Err(e) => { warn!("Failed to parse netrc file {}: {}", path.display(), e); None } }, Err(e) => { warn!("Failed to read netrc file {}: {}", path.display(), e); None } } }); /// Get the path to the netrc file /// /// Checks in order: /// 1. Custom path from settings (netrc_file) /// 2. %USERPROFILE%\_netrc on Windows (Windows convention) /// 3. ~/.netrc (Unix default, also Windows fallback) fn netrc_path() -> PathBuf { let settings = Settings::get(); if let Some(path) = &settings.netrc_file { return path.clone(); } #[cfg(windows)] { // On Windows, try _netrc first (Windows convention) let windows_netrc = dirs::HOME.join("_netrc"); if windows_netrc.exists() { return windows_netrc; } } dirs::HOME.join(".netrc") } /// Look up credentials for a given host from the netrc file /// /// Returns `Some((login, password))` if credentials are found, `None` otherwise pub fn get_credentials(host: &str) -> Option<(String, String)> { let netrc = NETRC.as_ref()?; // First try exact host match if let Some(machine) = netrc.machines.iter().find(|m| { m.name .as_ref() .is_some_and(|name| name.eq_ignore_ascii_case(host)) }) && let (Some(login), Some(password)) = (&machine.login, &machine.password) { trace!("Found netrc credentials for host: {}", host); return Some((login.clone(), password.clone())); } // Fall back to default machine if no exact match if let Some(machine) = netrc.machines.iter().find(|m| m.name.is_none()) && let (Some(login), Some(password)) = (&machine.login, &machine.password) { trace!("Using default netrc credentials for host: {}", host); return Some((login.clone(), password.clone())); } None }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/http.rs
src/http.rs
use std::collections::HashMap; use std::io::Write; use std::path::Path; use std::sync::{Arc, Mutex}; use std::time::Duration; use base64::Engine; use base64::prelude::BASE64_STANDARD; use eyre::{Report, Result, bail, ensure}; use regex::Regex; use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{ClientBuilder, IntoUrl, Method, Response}; use std::sync::LazyLock as Lazy; use tokio::sync::OnceCell; use tokio_retry::Retry; use tokio_retry::strategy::{ExponentialBackoff, jitter}; use url::Url; use crate::cli::version; use crate::config::Settings; use crate::file::display_path; use crate::netrc; use crate::ui::progress_report::SingleReport; use crate::ui::time::format_duration; use crate::{env, file}; #[cfg(not(test))] pub static HTTP_VERSION_CHECK: Lazy<Client> = Lazy::new(|| Client::new(Duration::from_secs(3), ClientKind::VersionCheck).unwrap()); pub static HTTP: Lazy<Client> = Lazy::new(|| Client::new(Settings::get().http_timeout(), ClientKind::Http).unwrap()); pub static HTTP_FETCH: Lazy<Client> = Lazy::new(|| { Client::new( Settings::get().fetch_remote_versions_timeout(), ClientKind::Fetch, ) .unwrap() }); /// In-memory cache for HTTP text responses, useful for requests that are repeated /// during a single operation (e.g., fetching SHASUMS256.txt for multiple platforms). /// Each URL gets its own OnceCell to ensure concurrent requests for the same URL /// wait for the first fetch to complete rather than all fetching simultaneously. type CachedResult = Arc<OnceCell<Result<String, String>>>; static HTTP_CACHE: Lazy<Mutex<HashMap<String, CachedResult>>> = Lazy::new(|| Mutex::new(HashMap::new())); #[derive(Debug)] pub struct Client { reqwest: reqwest::Client, timeout: Duration, kind: ClientKind, } #[derive(Debug, Clone, Copy)] enum ClientKind { Http, Fetch, #[allow(dead_code)] VersionCheck, } impl Client { fn new(timeout: Duration, kind: ClientKind) -> Result<Self> { Ok(Self { reqwest: Self::_new() .read_timeout(timeout) .connect_timeout(timeout) .build()?, timeout, kind, }) } fn _new() -> ClientBuilder { let v = &*version::VERSION; let shell = env::MISE_SHELL.map(|s| s.to_string()).unwrap_or_default(); ClientBuilder::new() .user_agent(format!("mise/{v} {shell}").trim()) .gzip(true) .zstd(true) } pub async fn get_bytes<U: IntoUrl>(&self, url: U) -> Result<impl AsRef<[u8]>> { let url = url.into_url().unwrap(); let resp = self.get_async(url.clone()).await?; Ok(resp.bytes().await?) } pub async fn get_async<U: IntoUrl>(&self, url: U) -> Result<Response> { let url = url.into_url().unwrap(); let headers = github_headers(&url); self.get_async_with_headers(url, &headers).await } async fn get_async_with_headers<U: IntoUrl>( &self, url: U, headers: &HeaderMap, ) -> Result<Response> { ensure!(!*env::OFFLINE, "offline mode is enabled"); let url = url.into_url().unwrap(); let resp = self .send_with_https_fallback(Method::GET, url, headers, "GET") .await?; resp.error_for_status_ref()?; Ok(resp) } pub async fn head<U: IntoUrl>(&self, url: U) -> Result<Response> { let url = url.into_url().unwrap(); let headers = github_headers(&url); self.head_async_with_headers(url, &headers).await } pub async fn head_async_with_headers<U: IntoUrl>( &self, url: U, headers: &HeaderMap, ) -> Result<Response> { ensure!(!*env::OFFLINE, "offline mode is enabled"); let url = url.into_url().unwrap(); let resp = self .send_with_https_fallback(Method::HEAD, url, headers, "HEAD") .await?; resp.error_for_status_ref()?; Ok(resp) } pub async fn get_text<U: IntoUrl>(&self, url: U) -> Result<String> { let mut url = url.into_url().unwrap(); let resp = self.get_async(url.clone()).await?; let text = resp.text().await?; if text.starts_with("<!DOCTYPE html>") { if url.scheme() == "http" { // try with https since http may be blocked url.set_scheme("https").unwrap(); return Box::pin(self.get_text(url)).await; } bail!("Got HTML instead of text from {}", url); } Ok(text) } /// Like get_text but caches results in memory for the duration of the process. /// Useful when the same URL will be requested multiple times (e.g., SHASUMS256.txt /// when locking multiple platforms). Concurrent requests for the same URL will /// wait for the first fetch to complete. pub async fn get_text_cached<U: IntoUrl>(&self, url: U) -> Result<String> { let url = url.into_url().unwrap(); let key = url.to_string(); // Get or create the OnceCell for this URL let cell = { let mut cache = HTTP_CACHE.lock().unwrap(); cache.entry(key).or_default().clone() }; // Initialize the cell if needed - concurrent callers will wait let result = cell .get_or_init(|| { let url = url.clone(); async move { match self.get_text(url).await { Ok(text) => Ok(text), Err(err) => Err(err.to_string()), } } }) .await; match result { Ok(text) => Ok(text.clone()), Err(err) => bail!("{}", err), } } pub async fn get_html<U: IntoUrl>(&self, url: U) -> Result<String> { let url = url.into_url().unwrap(); let resp = self.get_async(url.clone()).await?; let html = resp.text().await?; if !html.starts_with("<!DOCTYPE html>") { bail!("Got non-HTML text from {}", url); } Ok(html) } pub async fn json_headers<T, U: IntoUrl>(&self, url: U) -> Result<(T, HeaderMap)> where T: serde::de::DeserializeOwned, { let url = url.into_url().unwrap(); let resp = self.get_async(url).await?; let headers = resp.headers().clone(); let json = resp.json().await?; Ok((json, headers)) } pub async fn json_headers_with_headers<T, U: IntoUrl>( &self, url: U, headers: &HeaderMap, ) -> Result<(T, HeaderMap)> where T: serde::de::DeserializeOwned, { let url = url.into_url().unwrap(); let resp = self.get_async_with_headers(url, headers).await?; let headers = resp.headers().clone(); let json = resp.json().await?; Ok((json, headers)) } pub async fn json<T, U: IntoUrl>(&self, url: U) -> Result<T> where T: serde::de::DeserializeOwned, { self.json_headers(url).await.map(|(json, _)| json) } /// Like json but caches raw JSON text in memory for the duration of the process. /// Useful when the same URL will be requested multiple times (e.g., zig index.json /// when locking multiple platforms). Concurrent requests for the same URL will /// wait for the first fetch to complete. pub async fn json_cached<T, U: IntoUrl>(&self, url: U) -> Result<T> where T: serde::de::DeserializeOwned, { let text = self.get_text_cached(url).await?; Ok(serde_json::from_str(&text)?) } pub async fn json_with_headers<T, U: IntoUrl>(&self, url: U, headers: &HeaderMap) -> Result<T> where T: serde::de::DeserializeOwned, { self.json_headers_with_headers(url, headers) .await .map(|(json, _)| json) } /// POST JSON data to a URL. Returns Ok(true) on success, Ok(false) on non-success status. /// Errors only on network/connection failures. pub async fn post_json<U: IntoUrl, T: serde::Serialize>( &self, url: U, body: &T, ) -> Result<bool> { ensure!(!*env::OFFLINE, "offline mode is enabled"); let url = url.into_url()?; debug!("POST {}", &url); let resp = self .reqwest .post(url) .header("Content-Type", "application/json") .json(body) .send() .await?; Ok(resp.status().is_success()) } pub async fn download_file<U: IntoUrl>( &self, url: U, path: &Path, pr: Option<&dyn SingleReport>, ) -> Result<()> { let url = url.into_url()?; let headers = github_headers(&url); self.download_file_with_headers(url, path, &headers, pr) .await } pub async fn download_file_with_headers<U: IntoUrl>( &self, url: U, path: &Path, headers: &HeaderMap, pr: Option<&dyn SingleReport>, ) -> Result<()> { let url = url.into_url()?; debug!("GET Downloading {} to {}", &url, display_path(path)); let mut resp = self.get_async_with_headers(url.clone(), headers).await?; if let Some(length) = resp.content_length() && let Some(pr) = pr { // Reset progress on each attempt pr.set_length(length); pr.set_position(0); } let parent = path.parent().unwrap(); file::create_dir_all(parent)?; let mut file = tempfile::NamedTempFile::with_prefix_in(path, parent)?; while let Some(chunk) = resp.chunk().await? { file.write_all(&chunk)?; if let Some(pr) = pr { pr.inc(chunk.len() as u64); } } file.persist(path)?; Ok(()) } async fn send_with_https_fallback( &self, method: Method, url: Url, headers: &HeaderMap, verb_label: &str, ) -> Result<Response> { Retry::spawn( default_backoff_strategy(Settings::get().http_retries), || { let method = method.clone(); let url = url.clone(); let headers = headers.clone(); async move { match self .send_once(method.clone(), url.clone(), &headers, verb_label) .await { Ok(resp) => Ok(resp), Err(_err) if url.scheme() == "http" => { let mut url = url; url.set_scheme("https").unwrap(); self.send_once(method, url, &headers, verb_label).await } Err(err) => Err(err), } } }, ) .await } async fn send_once( &self, method: Method, mut url: Url, headers: &HeaderMap, verb_label: &str, ) -> Result<Response> { apply_url_replacements(&mut url); debug!("{} {}", verb_label, &url); // Apply netrc credentials after URL replacement let mut final_headers = headers.clone(); final_headers.extend(netrc_headers(&url)); let mut req = self.reqwest.request(method, url.clone()); req = req.headers(final_headers); let resp = match req.send().await { Ok(resp) => resp, Err(err) => { if err.is_timeout() { let (setting, env_var) = match self.kind { ClientKind::Http => ("http_timeout", "MISE_HTTP_TIMEOUT"), ClientKind::Fetch => ( "fetch_remote_versions_timeout", "MISE_FETCH_REMOTE_VERSIONS_TIMEOUT", ), ClientKind::VersionCheck => ("version_check_timeout", ""), }; let hint = if env_var.is_empty() { format!( "HTTP timed out after {} for {}.", format_duration(self.timeout), url ) } else { format!( "HTTP timed out after {} for {} (change with `{}` or env `{}`).", format_duration(self.timeout), url, setting, env_var ) }; bail!(hint); } return Err(err.into()); } }; if *env::MISE_LOG_HTTP { eprintln!("{} {url} {}", verb_label, resp.status()); } debug!("{} {url} {}", verb_label, resp.status()); display_github_rate_limit(&resp); resp.error_for_status_ref()?; Ok(resp) } } pub fn error_code(e: &Report) -> Option<u16> { if e.to_string().contains("404") { // TODO: not this when I can figure out how to use eyre properly return Some(404); } if let Some(err) = e.downcast_ref::<reqwest::Error>() { err.status().map(|s| s.as_u16()) } else { None } } fn github_headers(url: &Url) -> HeaderMap { let mut headers = HeaderMap::new(); if url.host_str() == Some("api.github.com") && let Some(token) = &*env::GITHUB_TOKEN { headers.insert( "authorization", HeaderValue::from_str(format!("token {token}").as_str()).unwrap(), ); headers.insert( "x-github-api-version", HeaderValue::from_static("2022-11-28"), ); } headers } /// Get HTTP Basic authentication headers from netrc file for the given URL fn netrc_headers(url: &Url) -> HeaderMap { let mut headers = HeaderMap::new(); if let Some(host) = url.host_str() && let Some((login, password)) = netrc::get_credentials(host) { let credentials = BASE64_STANDARD.encode(format!("{login}:{password}")); if let Ok(value) = HeaderValue::from_str(&format!("Basic {credentials}")) { headers.insert("authorization", value); } } headers } /// Apply URL replacements based on settings configuration /// Supports both simple string replacement and regex patterns (prefixed with "regex:") pub fn apply_url_replacements(url: &mut Url) { let settings = Settings::get(); if let Some(replacements) = &settings.url_replacements { let url_string = url.to_string(); for (pattern, replacement) in replacements { if let Some(pattern_without_prefix) = pattern.strip_prefix("regex:") { // Regex replacement if let Ok(regex) = Regex::new(pattern_without_prefix) { let new_url_string = regex.replace(&url_string, replacement.as_str()); // Only proceed if the URL actually changed if new_url_string != url_string && let Ok(new_url) = new_url_string.parse() { *url = new_url; trace!( "Replaced URL using regex '{}': {} -> {}", pattern_without_prefix, url_string, url.as_str() ); return; // Apply only the first matching replacement } } else { warn!( "Invalid regex pattern in URL replacement: {}", pattern_without_prefix ); } } else { // Simple string replacement if url_string.contains(pattern) { let new_url_string = url_string.replace(pattern, replacement); // Only proceed if the URL actually changed if new_url_string != url_string && let Ok(new_url) = new_url_string.parse() { *url = new_url; trace!( "Replaced URL using string replacement '{}': {} -> {}", pattern, url_string, url.as_str() ); return; // Apply only the first matching replacement } } } } } } fn display_github_rate_limit(resp: &Response) { let status = resp.status().as_u16(); if status == 403 || status == 429 { let remaining = resp .headers() .get("x-ratelimit-remaining") .and_then(|r| r.to_str().ok()); if remaining.is_some_and(|r| r == "0") { if let Some(reset_time) = resp .headers() .get("x-ratelimit-reset") .and_then(|h| h.to_str().ok()) .and_then(|s| s.parse::<i64>().ok()) .and_then(|ts| chrono::DateTime::from_timestamp(ts, 0)) { warn!( "GitHub rate limit exceeded. Resets at {}", reset_time.with_timezone(&chrono::Local) ); } return; } // retry-after header is processed only if x-ratelimit-remaining is not 0 or is missing if let Some(retry_after) = resp .headers() .get("retry-after") .and_then(|h| h.to_str().ok()) .and_then(|s| s.parse::<u64>().ok()) { warn!( "GitHub rate limit exceeded. Retry after {} seconds", retry_after ); } } } fn default_backoff_strategy(retries: i64) -> impl Iterator<Item = std::time::Duration> { ExponentialBackoff::from_millis(10) .map(jitter) .take(retries.max(0) as usize) } #[cfg(test)] mod tests { use super::*; use confique::Partial; use indexmap::IndexMap; use url::Url; // Mutex to ensure tests don't interfere with each other when modifying global settings static TEST_SETTINGS_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); // Helper to create test settings with specific URL replacements fn with_test_settings<F, R>(replacements: IndexMap<String, String>, test_fn: F) -> R where F: FnOnce() -> R, { // Lock to prevent parallel tests from interfering with global settings let _guard = TEST_SETTINGS_LOCK.lock().unwrap(); // Create settings with custom URL replacements let mut settings = crate::config::settings::SettingsPartial::empty(); settings.url_replacements = Some(replacements); // Set settings for this test crate::config::Settings::reset(Some(settings)); // Run test let result = test_fn(); // Clean up after test crate::config::Settings::reset(None); result } #[test] fn test_simple_string_replacement() { let mut replacements = IndexMap::new(); replacements.insert("github.com".to_string(), "my-proxy.com".to_string()); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url); assert_eq!(url.as_str(), "https://my-proxy.com/owner/repo"); }); } #[test] fn test_full_url_string_replacement() { let mut replacements = IndexMap::new(); replacements.insert( "https://github.com".to_string(), "https://my-proxy.com/artifactory/github-remote".to_string(), ); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url); assert_eq!( url.as_str(), "https://my-proxy.com/artifactory/github-remote/owner/repo" ); }); } #[test] fn test_protocol_specific_replacement() { let mut replacements = IndexMap::new(); replacements.insert( "https://github.com".to_string(), "https://secure-proxy.com".to_string(), ); with_test_settings(replacements.clone(), || { // HTTPS gets replaced let mut url1 = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url1); assert_eq!(url1.as_str(), "https://secure-proxy.com/owner/repo"); }); with_test_settings(replacements, || { // HTTP does not get replaced (no match) let mut url2 = Url::parse("http://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url2); assert_eq!(url2.as_str(), "http://github.com/owner/repo"); }); } #[test] fn test_regex_replacement() { let mut replacements = IndexMap::new(); replacements.insert( r"regex:https://github\.com".to_string(), "https://my-proxy.com".to_string(), ); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url); assert_eq!(url.as_str(), "https://my-proxy.com/owner/repo"); }); } #[test] fn test_regex_with_capture_groups() { let mut replacements = IndexMap::new(); replacements.insert( r"regex:https://github\.com/([^/]+)/([^/]+)".to_string(), "https://my-proxy.com/mirror/$1/$2".to_string(), ); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo/releases").unwrap(); apply_url_replacements(&mut url); assert_eq!( url.as_str(), "https://my-proxy.com/mirror/owner/repo/releases" ); }); } #[test] fn test_regex_invalid_replacement_url() { let mut replacements = IndexMap::new(); replacements.insert( r"regex:https://github\.com/([^/]+)".to_string(), "not-a-valid-url".to_string(), ); with_test_settings(replacements, || { // Invalid result URL should be ignored, original URL unchanged let mut url = Url::parse("https://github.com/owner/repo").unwrap(); let original = url.clone(); apply_url_replacements(&mut url); assert_eq!(url.as_str(), original.as_str()); }); } #[test] fn test_multiple_replacements_first_match_wins() { let mut replacements = IndexMap::new(); replacements.insert("github.com".to_string(), "first-proxy.com".to_string()); replacements.insert("github".to_string(), "second-proxy.com".to_string()); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url); // First replacement should win assert_eq!(url.as_str(), "https://first-proxy.com/owner/repo"); }); } #[test] fn test_no_replacements_configured() { let replacements = IndexMap::new(); // Empty with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); let original = url.clone(); apply_url_replacements(&mut url); assert_eq!(url.as_str(), original.as_str()); }); } #[test] fn test_regex_complex_patterns() { let mut replacements = IndexMap::new(); // Convert GitHub releases to JFrog Artifactory replacements.insert( r"regex:https://github\.com/([^/]+)/([^/]+)/releases/download/([^/]+)/(.+)".to_string(), "https://artifactory.company.com/artifactory/github-releases/$1/$2/$3/$4".to_string(), ); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo/releases/download/v1.0.0/file.tar.gz") .unwrap(); apply_url_replacements(&mut url); assert_eq!( url.as_str(), "https://artifactory.company.com/artifactory/github-releases/owner/repo/v1.0.0/file.tar.gz" ); }); } #[test] fn test_no_settings_configured() { // Test the real apply_url_replacements function with no settings override let _guard = TEST_SETTINGS_LOCK.lock().unwrap(); crate::config::Settings::reset(None); let mut url = Url::parse("https://github.com/owner/repo").unwrap(); let original = url.clone(); // This should not crash and should leave URL unchanged apply_url_replacements(&mut url); assert_eq!(url.as_str(), original.as_str()); } #[test] fn test_replacement_affects_full_url_not_just_hostname() { // Test that replacement works on the full URL string, not just hostname let mut replacements = IndexMap::new(); replacements.insert( "github.com/owner".to_string(), "proxy.com/mirror".to_string(), ); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo").unwrap(); apply_url_replacements(&mut url); // This demonstrates that replacement happens on full URL, not just hostname assert_eq!(url.as_str(), "https://proxy.com/mirror/repo"); }); } #[test] fn test_path_replacement_example() { // Test replacing part of the path, proving it's not hostname-only let mut replacements = IndexMap::new(); replacements.insert("/releases/download/".to_string(), "/artifacts/".to_string()); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/owner/repo/releases/download/v1.0.0/file.tar.gz") .unwrap(); apply_url_replacements(&mut url); // Path component was replaced, proving it's full URL replacement assert_eq!( url.as_str(), "https://github.com/owner/repo/artifacts/v1.0.0/file.tar.gz" ); }); } #[test] fn test_documentation_examples() { // Test the examples from the documentation to ensure they work correctly // Example 1: Simple hostname replacement let mut replacements = IndexMap::new(); replacements.insert("github.com".to_string(), "myregistry.net".to_string()); with_test_settings(replacements, || { let mut url = Url::parse("https://github.com/user/repo").unwrap(); apply_url_replacements(&mut url); assert_eq!(url.as_str(), "https://myregistry.net/user/repo"); }); // Example 2: Protocol + hostname replacement let mut replacements2 = IndexMap::new(); replacements2.insert( "https://github.com".to_string(), "https://proxy.corp.com/github-mirror".to_string(), ); with_test_settings(replacements2, || { let mut url = Url::parse("https://github.com/user/repo").unwrap(); apply_url_replacements(&mut url); assert_eq!( url.as_str(), "https://proxy.corp.com/github-mirror/user/repo" ); }); // Example 3: Domain + path replacement let mut replacements3 = IndexMap::new(); replacements3.insert( "github.com/releases/download/".to_string(), "cdn.example.com/artifacts/".to_string(), ); with_test_settings(replacements3, || { let mut url = Url::parse("https://github.com/releases/download/v1.0.0/file.tar.gz").unwrap(); apply_url_replacements(&mut url); assert_eq!( url.as_str(), "https://cdn.example.com/artifacts/v1.0.0/file.tar.gz" ); }); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/lock_file.rs
src/lock_file.rs
use std::path::{Path, PathBuf}; use eyre::Result; use crate::dirs; use crate::file::{create_dir_all, display_path}; use crate::hash::hash_to_str; pub type OnLockedFn = Box<dyn Fn(&Path)>; pub struct LockFile { path: PathBuf, on_locked: Option<OnLockedFn>, } impl LockFile { pub fn new(path: &Path) -> Self { let path = dirs::CACHE.join("lockfiles").join(hash_to_str(&path)); Self { path, on_locked: None, } } pub fn with_callback<F>(mut self, cb: F) -> Self where F: Fn(&Path) + 'static, { self.on_locked = Some(Box::new(cb)); self } pub fn lock(self) -> Result<fslock::LockFile> { if let Some(parent) = self.path.parent() { create_dir_all(parent)?; } let mut lock = fslock::LockFile::open(&self.path)?; if !lock.try_lock()? { if let Some(f) = self.on_locked { f(&self.path) } lock.lock()?; } Ok(lock) } } pub(crate) fn get(path: &Path, force: bool) -> eyre::Result<Option<fslock::LockFile>> { let lock = if force { None } else { let lock = LockFile::new(path) .with_callback(|l| { debug!("waiting for lock on {}", display_path(l)); }) .lock()?; Some(lock) }; Ok(lock) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/watch_files.rs
src/watch_files.rs
use crate::cmd::cmd; use crate::config::{Config, Settings}; use crate::dirs; use crate::toolset::Toolset; use eyre::Result; use globset::{GlobBuilder, GlobSetBuilder}; use itertools::Itertools; use std::iter::once; use std::path::{Path, PathBuf}; use std::sync::Mutex; use std::{collections::BTreeSet, sync::Arc}; #[derive( Debug, Clone, serde::Serialize, serde::Deserialize, Ord, PartialOrd, Eq, PartialEq, Hash, )] pub struct WatchFile { pub patterns: Vec<String>, pub run: String, } pub static MODIFIED_FILES: Mutex<Option<BTreeSet<PathBuf>>> = Mutex::new(None); pub fn add_modified_file(file: PathBuf) { let mut mu = MODIFIED_FILES.lock().unwrap(); let set = mu.get_or_insert_with(BTreeSet::new); set.insert(file); } pub async fn execute_runs(config: &Arc<Config>, ts: &Toolset) { let files = { let mut mu = MODIFIED_FILES.lock().unwrap(); mu.take().unwrap_or_default() }; if files.is_empty() { return; } for (root, wf) in config.watch_file_hooks().unwrap_or_default() { match has_matching_files(&root, &wf, &files) { Ok(files) if files.is_empty() => { continue; } Ok(files) => { if let Err(e) = execute(config, ts, &root, &wf.run, files).await { warn!("error executing watch_file hook: {e}"); } } Err(e) => { warn!("error matching files: {e}"); } } } } async fn execute( config: &Arc<Config>, ts: &Toolset, root: &Path, run: &str, files: Vec<&PathBuf>, ) -> Result<()> { Settings::get().ensure_experimental("watch_file_hooks")?; let modified_files_var = files .iter() .map(|f| f.to_string_lossy().replace(':', "\\:")) .join(":"); let shell = Settings::get().default_inline_shell()?; let args = shell .iter() .skip(1) .map(|s| s.as_str()) .chain(once(run)) .collect_vec(); let mut env = ts.full_env(config).await?; env.insert("MISE_WATCH_FILES_MODIFIED".to_string(), modified_files_var); if let Some(cwd) = &*dirs::CWD { env.insert( "MISE_ORIGINAL_CWD".to_string(), cwd.to_string_lossy().to_string(), ); } env.insert( "MISE_PROJECT_ROOT".to_string(), root.to_string_lossy().to_string(), ); // TODO: this should be different but I don't have easy access to it // env.insert("MISE_CONFIG_ROOT".to_string(), root.to_string_lossy().to_string()); cmd(&shell[0], args) .stdout_to_stderr() // .dir(root) .full_env(env) .run()?; Ok(()) } fn has_matching_files<'a>( root: &Path, wf: &'a WatchFile, files: &'a BTreeSet<PathBuf>, ) -> Result<Vec<&'a PathBuf>> { let mut glob = GlobSetBuilder::new(); for pattern in &wf.patterns { match GlobBuilder::new(pattern).literal_separator(true).build() { Ok(g) => { glob.add(g); } Err(e) => { warn!("invalid glob pattern: {e}"); } } } let glob = glob.build()?; Ok(files .iter() .filter(|file| { if let Ok(rel) = file.strip_prefix(root) { !glob.matches(rel).is_empty() } else { false } }) .collect()) } pub fn glob(root: &Path, patterns: &[String]) -> Result<Vec<PathBuf>> { if patterns.is_empty() { return Ok(vec![]); } let opts = glob::MatchOptions { require_literal_separator: true, ..Default::default() }; Ok(patterns .iter() .map(|pattern| root.join(pattern).to_string_lossy().to_string()) .filter_map(|pattern| glob::glob_with(&pattern, opts).ok()) .collect::<Vec<_>>() .into_iter() .flat_map(|paths| paths.filter_map(|p| p.ok())) .collect()) // let mut overrides = ignore::overrides::OverrideBuilder::new(root); // for pattern in patterns { // overrides.add(&format!("./{pattern}"))?; // } // let files = Arc::new(Mutex::new(vec![])); // ignore::WalkBuilder::new(root) // .overrides(overrides.build()?) // .standard_filters(false) // .follow_links(true) // .build_parallel() // .run(|| { // let files = files.clone(); // Box::new(move |entry| { // if let Ok(entry) = entry { // let mut files = files.lock().unwrap(); // files.push(entry.path().to_path_buf()); // } // WalkState::Continue // }) // }); // // let files = files.lock().unwrap(); // Ok(files.to_vec()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/fake_asdf_windows.rs
src/fake_asdf_windows.rs
use std::env::{join_paths, split_paths}; use std::path::PathBuf; use crate::env; use crate::env::PATH_KEY; #[cfg(windows)] pub fn setup() -> color_eyre::Result<PathBuf> { let path = env::MISE_DATA_DIR.join(".fake-asdf"); Ok(path) } pub fn get_path_with_fake_asdf() -> String { let mut path = split_paths(&env::var_os(&*PATH_KEY).unwrap_or_default()).collect::<Vec<_>>(); match setup() { Ok(fake_asdf_path) => { path.insert(0, fake_asdf_path); } Err(e) => { warn!("Failed to setup fake asdf: {:#}", e); } }; join_paths(path).unwrap().to_string_lossy().to_string() }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/git.rs
src/git.rs
use std::fmt::Debug; use std::path::{Path, PathBuf}; use duct::Expression; use eyre::{Result, WrapErr, eyre}; use gix::{self}; use once_cell::sync::OnceCell; use xx::file; use crate::cmd::CmdLineRunner; use crate::config::Settings; use crate::file::touch_dir; use crate::ui::progress_report::SingleReport; pub struct Git { pub dir: PathBuf, pub repo: OnceCell<gix::Repository>, } macro_rules! git_cmd { ( $dir:expr $(, $arg:expr )* $(,)? ) => { { let safe = format!("safe.directory={}", $dir.display()); cmd!("git", "-C", $dir, "-c", safe $(, $arg)*) } } } macro_rules! git_cmd_read { ( $dir:expr $(, $arg:expr )* $(,)? ) => { { git_cmd!($dir $(, $arg)*).read().wrap_err_with(|| { let args = [$($arg,)*].join(" "); format!("git {args} failed") }) } } } impl Git { pub fn new<P: AsRef<Path>>(dir: P) -> Self { Self { dir: dir.as_ref().to_path_buf(), repo: OnceCell::new(), } } pub fn repo(&self) -> Result<&gix::Repository> { self.repo.get_or_try_init(|| { trace!("opening git repository via gix at {:?}", self.dir); gix::open(&self.dir) .wrap_err_with(|| format!("failed to open git repository at {:?}", self.dir)) .inspect_err(|err| warn!("{err:#}")) }) } pub fn is_repo(&self) -> bool { self.dir.join(".git").is_dir() } pub fn update(&self, gitref: Option<String>) -> Result<(String, String)> { let gitref = gitref.map_or_else(|| self.current_branch(), Ok)?; self.update_ref(gitref, false) } pub fn update_tag(&self, gitref: String) -> Result<(String, String)> { self.update_ref(gitref, true) } fn update_ref(&self, gitref: String, is_tag_ref: bool) -> Result<(String, String)> { debug!("updating {} to {}", self.dir.display(), gitref); let exec = |cmd: Expression| match cmd.stderr_to_stdout().stdout_capture().unchecked().run() { Ok(res) => { if res.status.success() { Ok(()) } else { Err(eyre!( "git failed: {cmd:?} {}", String::from_utf8(res.stdout).unwrap() )) } } Err(err) => Err(eyre!("git failed: {cmd:?} {err:#}")), }; debug!("updating {} to {} with git", self.dir.display(), gitref); let refspec = if is_tag_ref { format!("refs/tags/{gitref}:refs/tags/{gitref}") } else { format!("{gitref}:{gitref}") }; exec(git_cmd!( &self.dir, "fetch", "--prune", "--update-head-ok", "origin", &refspec ))?; let prev_rev = self.current_sha()?; exec(git_cmd!( &self.dir, "-c", "advice.detachedHead=false", "-c", "advice.objectNameWarning=false", "checkout", "--force", &gitref ))?; let post_rev = self.current_sha()?; touch_dir(&self.dir)?; Ok((prev_rev, post_rev)) } pub fn clone(&self, url: &str, options: CloneOptions) -> Result<()> { if let Some(parent) = self.dir.parent() { file::mkdirp(parent)?; } if Settings::get().libgit2 || Settings::get().gix { debug!("cloning {} to {} with gix", url, self.dir.display()); let mut prepare_clone = gix::prepare_clone(url, &self.dir)?; if let Some(branch) = &options.branch { prepare_clone = prepare_clone.with_ref_name(Some(branch))?; } let (mut prepare_checkout, _) = prepare_clone .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)?; prepare_checkout .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)?; return Ok(()); } debug!("cloning {} to {} with git", url, self.dir.display()); match get_git_version() { Ok(version) => trace!("git version: {}", version), Err(err) => warn!( "failed to get git version: {:#}\n Git is required to use mise.", err ), } if let Some(pr) = &options.pr { // in order to prevent hiding potential password prompt, just disable the progress bar pr.abandon(); } let mut cmd = CmdLineRunner::new("git") .arg("clone") .arg("-q") .arg("--depth") .arg("1") .arg(url) .arg(&self.dir); if let Some(branch) = &options.branch { cmd = cmd.args([ "-b", branch, "--single-branch", "-c", "advice.detachedHead=false", ]); } cmd.execute()?; Ok(()) } pub fn update_submodules(&self) -> Result<()> { debug!("updating submodules in {}", self.dir.display()); let exec = |cmd: Expression| match cmd.stderr_to_stdout().stdout_capture().unchecked().run() { Ok(res) => { if res.status.success() { Ok(()) } else { Err(eyre!( "git failed: {cmd:?} {}", String::from_utf8(res.stdout).unwrap() )) } } Err(err) => Err(eyre!("git failed: {cmd:?} {err:#}")), }; exec( git_cmd!(&self.dir, "submodule", "update", "--init", "--recursive") .env("GIT_TERMINAL_PROMPT", "0"), )?; Ok(()) } pub fn current_branch(&self) -> Result<String> { let dir = &self.dir; if let Ok(repo) = self.repo() { let head = repo.head()?; let branch = head .referent_name() .map(|name| name.shorten().to_string()) .unwrap_or_else(|| head.id().unwrap().to_string()); debug!("current branch for {dir:?}: {branch}"); return Ok(branch); } let branch = git_cmd_read!(&self.dir, "branch", "--show-current")?; debug!("current branch for {}: {}", self.dir.display(), &branch); Ok(branch) } pub fn current_sha(&self) -> Result<String> { let dir = &self.dir; if let Ok(repo) = self.repo() { let head = repo.head()?; let id = head.id(); let sha = id.unwrap().to_string(); debug!("current sha for {dir:?}: {sha}"); return Ok(sha); } let sha = git_cmd_read!(&self.dir, "rev-parse", "HEAD")?; debug!("current sha for {}: {}", self.dir.display(), &sha); Ok(sha) } pub fn current_sha_short(&self) -> Result<String> { let dir = &self.dir; if let Ok(repo) = self.repo() { let head = repo.head()?; let id = head.id(); let sha = id.unwrap().to_string()[..7].to_string(); debug!("current sha for {dir:?}: {sha}"); return Ok(sha); } let sha = git_cmd_read!(&self.dir, "rev-parse", "--short", "HEAD")?; debug!("current sha for {dir:?}: {sha}"); Ok(sha) } pub fn current_abbrev_ref(&self) -> Result<String> { let dir = &self.dir; if let Ok(repo) = self.repo() { let head = repo.head()?; let head = head.name().shorten().to_string(); debug!("current abbrev ref for {dir:?}: {head}"); return Ok(head); } let aref = git_cmd_read!(&self.dir, "rev-parse", "--abbrev-ref", "HEAD")?; debug!("current abbrev ref for {}: {}", self.dir.display(), &aref); Ok(aref) } pub fn get_remote_url(&self) -> Option<String> { let dir = &self.dir; if !self.exists() { return None; } if let Ok(repo) = self.repo() && let Ok(remote) = repo.find_remote("origin") && let Some(url) = remote.url(gix::remote::Direction::Fetch) { trace!("remote url for {dir:?}: {url}"); return Some(url.to_string()); } let res = git_cmd_read!(&self.dir, "config", "--get", "remote.origin.url"); match res { Ok(url) => { debug!("remote url for {dir:?}: {url}"); Some(url) } Err(err) => { warn!("failed to get remote url for {dir:?}: {err:#}"); None } } } pub fn split_url_and_ref(url: &str) -> (String, Option<String>) { match url.split_once('#') { Some((url, _ref)) => (url.to_string(), Some(_ref.to_string())), None => (url.to_string(), None), } } pub fn exists(&self) -> bool { self.dir.join(".git").is_dir() } pub fn get_root() -> eyre::Result<PathBuf> { Ok(cmd!("git", "rev-parse", "--show-toplevel") .read()? .trim() .into()) } } fn get_git_version() -> Result<String> { let version = cmd!("git", "--version").read()?; Ok(version.trim().into()) } impl Debug for Git { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Git").field("dir", &self.dir).finish() } } #[derive(Default)] pub struct CloneOptions<'a> { pr: Option<&'a dyn SingleReport>, branch: Option<String>, } impl<'a> CloneOptions<'a> { pub fn pr(mut self, pr: &'a dyn SingleReport) -> Self { self.pr = Some(pr); self } pub fn branch(mut self, branch: &str) -> Self { self.branch = Some(branch.to_string()); self } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/logger.rs
src/logger.rs
use crate::config::{Config, Settings}; use eyre::Result; use std::fs::{File, OpenOptions, create_dir_all}; use std::path::Path; use std::sync::Mutex; use std::thread; use std::{io::Write, sync::OnceLock}; use crate::{config, env, ui}; use log::{Level, LevelFilter, Metadata, Record}; #[derive(Debug)] struct Logger { level: Mutex<LevelFilter>, term_level: Mutex<LevelFilter>, file_level: LevelFilter, log_file: Option<Mutex<File>>, } impl log::Log for Logger { fn enabled(&self, metadata: &Metadata) -> bool { metadata.level() <= *self.level.lock().unwrap() } fn log(&self, record: &Record) { if record.level() <= self.file_level && let Some(log_file) = &self.log_file { let mut log_file = log_file.lock().unwrap(); let out = self.render(record, self.file_level); if !out.is_empty() { let _ = writeln!(log_file, "{}", console::strip_ansi_codes(&out)); } } let term_level = *self.term_level.lock().unwrap(); if record.level() <= term_level { let out = self.render(record, term_level); if !out.is_empty() { ui::multi_progress_report::MultiProgressReport::suspend_if_active(|| { eprintln!("{out}"); }); } } } fn flush(&self) {} } impl Logger { fn init(term_level: LevelFilter, file_level: LevelFilter) -> Self { let mut logger = Logger { level: Mutex::new(std::cmp::max(term_level, file_level)), file_level, term_level: Mutex::new(term_level), log_file: None, }; if let Some(log_file) = &*env::MISE_LOG_FILE { if let Ok(log_file) = init_log_file(log_file) { logger.log_file = Some(Mutex::new(log_file)); } else { eprintln!("mise: could not open log file: {log_file:?}"); } } logger } fn render(&self, record: &Record, level: LevelFilter) -> String { let mut args = record.args().to_string(); if config::is_loaded() { let config = Config::get_(); args = config.redact(args); } match level { LevelFilter::Off => "".to_string(), LevelFilter::Trace => { let level = record.level(); let file = record.file().unwrap_or("<unknown>"); if level == LevelFilter::Trace && file.contains("/expr-lang") { return "".to_string(); }; let meta = ui::style::edim(format!( "{thread_id:>2} [{file}:{line}]", thread_id = thread_id(), line = record.line().unwrap_or(0), )); format!("{level} {meta} {args}", level = self.styled_level(level),) } LevelFilter::Debug => { format!("{level} {args}", level = self.styled_level(record.level()),) } _ => { let mise = match record.level() { Level::Error => ui::style::ered("mise"), Level::Warn => ui::style::eyellow("mise"), _ => ui::style::edim("mise"), }; match record.level() { Level::Info => format!("{mise} {args}"), _ => format!( "{mise} {level} {args}", level = self.styled_level(record.level()), ), } } } } fn styled_level(&self, level: Level) -> String { let level = match level { Level::Error => ui::style::ered("ERROR").to_string(), Level::Warn => ui::style::eyellow("WARN").to_string(), Level::Info => ui::style::ecyan("INFO").to_string(), Level::Debug => ui::style::emagenta("DEBUG").to_string(), Level::Trace => ui::style::edim("TRACE").to_string(), }; console::pad_str(&level, 5, console::Alignment::Left, None).to_string() } } pub fn thread_id() -> String { let id = format!("{:?}", thread::current().id()); let id = id.replace("ThreadId(", ""); id.replace(")", "") } pub fn init() { static LOGGER: OnceLock<Logger> = OnceLock::new(); let settings = Settings::try_get().unwrap_or_else(|_| Default::default()); let term_level = settings.log_level(); if let Some(logger) = LOGGER.get() { *logger.term_level.lock().unwrap() = term_level; *logger.level.lock().unwrap() = std::cmp::max(term_level, logger.file_level); } else { let file_level = env::MISE_LOG_FILE_LEVEL.unwrap_or(settings.log_level()); let logger = LOGGER.get_or_init(|| Logger::init(term_level, file_level)); if let Err(err) = log::set_logger(logger) { eprintln!("mise: could not initialize logger: {err}"); } } log::set_max_level(term_level); } fn init_log_file(log_file: &Path) -> Result<File> { if let Some(log_dir) = log_file.parent() { create_dir_all(log_dir)?; } Ok(OpenOptions::new() .create(true) .append(true) .open(log_file)?) } #[cfg(test)] mod tests { use crate::config::Config; use super::*; #[tokio::test] async fn test_init() { let _config = Config::get().await.unwrap(); init(); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/redactions.rs
src/redactions.rs
use indexmap::IndexSet; #[derive(Default, Clone, Debug, serde::Deserialize)] pub struct Redactions(pub IndexSet<String>); impl Redactions { pub fn merge(&mut self, other: Self) { self.0.extend(other.0); } pub fn render(&mut self, tera: &mut tera::Tera, ctx: &tera::Context) -> eyre::Result<()> { for r in self.0.clone().drain(..) { self.0.insert(tera.render_str(&r, ctx)?); } Ok(()) } pub fn is_empty(&self) -> bool { self.0.is_empty() } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/migrate.rs
src/migrate.rs
use std::fs; use std::path::Path; use crate::dirs::*; use crate::file; use eyre::Result; pub async fn run() { tokio::join!( task(migrate_trusted_configs), task(migrate_tracked_configs), task(|| remove_deprecated_plugin("node", "rtx-nodejs")), task(|| remove_deprecated_plugin("go", "rtx-golang")), task(|| remove_deprecated_plugin("java", "rtx-java")), task(|| remove_deprecated_plugin("python", "rtx-python")), task(|| remove_deprecated_plugin("ruby", "rtx-ruby")), ); } async fn task(job: impl FnOnce() -> Result<()> + Send + 'static) { if let Err(err) = job() { eprintln!("[WARN] migrate: {err}"); } } fn migrate_tracked_configs() -> Result<()> { move_dirs(&DATA.join("tracked_config_files"), &TRACKED_CONFIGS)?; move_dirs(&DATA.join("tracked-config-files"), &TRACKED_CONFIGS)?; Ok(()) } fn migrate_trusted_configs() -> Result<()> { move_dirs(&CACHE.join("trusted-configs"), &TRUSTED_CONFIGS)?; move_dirs(&CONFIG.join("trusted-configs"), &TRUSTED_CONFIGS)?; move_dirs(&DATA.join("trusted-configs"), &TRUSTED_CONFIGS)?; Ok(()) } fn move_dirs(from: &Path, to: &Path) -> Result<bool> { if from.exists() && !to.exists() { eprintln!("migrating {} to {}", from.display(), to.display()); file::create_dir_all(to.parent().unwrap())?; file::rename(from, to)?; Ok(true) } else { Ok(false) } } fn remove_deprecated_plugin(name: &str, plugin_name: &str) -> Result<()> { let plugin_root = PLUGINS.join(name); let gitconfig = plugin_root.join(".git").join("config"); let gitconfig_body = fs::read_to_string(gitconfig).unwrap_or_default(); if !gitconfig_body.contains(&format!("github.com/mise-plugins/{plugin_name}")) { return Ok(()); } eprintln!("removing deprecated plugin {plugin_name}, will use core {name} plugin from now on"); file::remove_all(plugin_root)?; Ok(()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/shims.rs
src/shims.rs
use crate::exit; use std::fs; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::{ collections::{BTreeSet, HashSet}, sync::atomic::Ordering, }; use crate::backend::Backend; use crate::cli::exec::Exec; use crate::config::{Config, Settings}; use crate::file::display_path; use crate::lock_file::LockFile; use crate::toolset::{ToolVersion, Toolset, ToolsetBuilder}; use crate::{backend, dirs, env, fake_asdf, file}; use color_eyre::eyre::{Result, bail, eyre}; use eyre::WrapErr; use indoc::formatdoc; use itertools::Itertools; use path_absolutize::Absolutize; use tokio::task::JoinSet; // executes as if it was a shim if the command is not "mise", e.g.: "node" pub async fn handle_shim() -> Result<()> { // TODO: instead, check if bin is in shims dir let bin_name = *env::MISE_BIN_NAME; if bin_name.starts_with("mise") || cfg!(test) { return Ok(()); } let mut config = Config::get().await?; let mut args = env::ARGS.read().unwrap().clone(); env::PREFER_OFFLINE.store(true, Ordering::Relaxed); trace!("shim[{bin_name}] args: {}", args.join(" ")); args[0] = which_shim(&mut config, &env::MISE_BIN_NAME) .await? .to_string_lossy() .to_string(); env::set_var("__MISE_SHIM", "1"); let exec = Exec { tool: vec![], c: None, command: Some(args), jobs: None, raw: false, no_prepare: true, // Skip prepare for shims to avoid performance impact }; time!("shim exec"); exec.run().await?; exit(0); } async fn which_shim(config: &mut Arc<Config>, bin_name: &str) -> Result<PathBuf> { let mut ts = ToolsetBuilder::new().build(config).await?; if let Some((p, tv)) = ts.which(config, bin_name).await && let Some(bin) = p.which(config, &tv, bin_name).await? { trace!( "shim[{bin_name}] ToolVersion: {tv} bin: {bin}", bin = display_path(&bin) ); return Ok(bin); } if Settings::get().not_found_auto_install { for tv in ts .install_missing_bin(config, bin_name) .await? .unwrap_or_default() { let p = tv.backend()?; if let Some(bin) = p.which(config, &tv, bin_name).await? { trace!( "shim[{bin_name}] NOT_FOUND ToolVersion: {tv} bin: {bin}", bin = display_path(&bin) ); return Ok(bin); } } } // fallback for "system" for path in &*env::PATH { if fs::canonicalize(path).unwrap_or_default() == fs::canonicalize(*dirs::SHIMS).unwrap_or_default() { continue; } let bin = path.join(bin_name); if bin.exists() { trace!("shim[{bin_name}] SYSTEM {bin}", bin = display_path(&bin)); return Ok(bin); } } let tvs = ts.list_rtvs_with_bin(config, bin_name).await?; err_no_version_set(config, ts, bin_name, tvs).await } pub async fn reshim(config: &Arc<Config>, ts: &Toolset, force: bool) -> Result<()> { let _lock = LockFile::new(&dirs::SHIMS) .with_callback(|l| { trace!("reshim callback {}", l.display()); }) .lock(); let mise_bin = file::which("mise").unwrap_or(env::MISE_BIN.clone()); let mise_bin = mise_bin.absolutize()?; // relative paths don't work as shims if force { file::remove_all(*dirs::SHIMS)?; } file::create_dir_all(*dirs::SHIMS)?; let (shims_to_add, shims_to_remove) = get_shim_diffs(config, &mise_bin, ts).await?; for shim in shims_to_add { let symlink_path = dirs::SHIMS.join(&shim); add_shim(&mise_bin, &symlink_path, &shim)?; } for shim in shims_to_remove { let symlink_path = dirs::SHIMS.join(shim); file::remove_all(&symlink_path)?; } let mut jset = JoinSet::new(); for plugin in backend::list() { jset.spawn(async move { if let Ok(files) = dirs::PLUGINS.join(plugin.id()).join("shims").read_dir() { for bin in files { let bin = bin?; let bin_name = bin.file_name().into_string().unwrap(); let symlink_path = dirs::SHIMS.join(bin_name); make_shim(&bin.path(), &symlink_path).await?; } } Ok(()) }); } jset.join_all() .await .into_iter() .collect::<Result<Vec<_>>>()?; Ok(()) } #[cfg(windows)] fn add_shim(mise_bin: &Path, symlink_path: &Path, shim: &str) -> Result<()> { match Settings::get().windows_shim_mode.as_ref() { "file" => { let shim = shim.trim_end_matches(".cmd"); // write a shim file without extension for use in Git Bash/Cygwin file::write( symlink_path.with_extension(""), formatdoc! {r#" #!/bin/bash exec mise x -- {shim} "$@" "#}, ) .wrap_err_with(|| { eyre!( "Failed to create symlink from {} to {}", display_path(mise_bin), display_path(symlink_path) ) })?; file::write( symlink_path.with_extension("cmd"), formatdoc! {r#" @echo off setlocal mise x -- {shim} %* "#}, ) .wrap_err_with(|| { eyre!( "Failed to create symlink from {} to {}", display_path(mise_bin), display_path(symlink_path) ) }) } "hardlink" => fs::hard_link(mise_bin, symlink_path).wrap_err_with(|| { eyre!( "Failed to create hardlink from {} to {}", display_path(mise_bin), display_path(symlink_path) ) }), "symlink" => { std::os::windows::fs::symlink_file(mise_bin, symlink_path).wrap_err_with(|| { eyre!( "Failed to create symlink from {} to {}", display_path(mise_bin), display_path(symlink_path) ) }) } _ => panic!("Unknown shim mode"), } } #[cfg(unix)] fn add_shim(mise_bin: &Path, symlink_path: &Path, _shim: &str) -> Result<()> { file::make_symlink(mise_bin, symlink_path).wrap_err_with(|| { eyre!( "Failed to create symlink from {} to {}", display_path(mise_bin), display_path(symlink_path) ) })?; Ok(()) } // get_shim_diffs contrasts the actual shims on disk // with the desired shims specified by the Toolset // and returns a tuple of (missing shims, extra shims) pub async fn get_shim_diffs( config: &Arc<Config>, mise_bin: impl AsRef<Path>, toolset: &Toolset, ) -> Result<(BTreeSet<String>, BTreeSet<String>)> { let mise_bin = mise_bin.as_ref(); let (actual_shims, desired_shims) = tokio::join!( get_actual_shims(mise_bin), get_desired_shims(config, toolset) ); let (actual_shims, desired_shims) = (actual_shims?, desired_shims?); let out: (BTreeSet<String>, BTreeSet<String>) = ( desired_shims.difference(&actual_shims).cloned().collect(), actual_shims.difference(&desired_shims).cloned().collect(), ); time!("get_shim_diffs sizes: ({},{})", out.0.len(), out.1.len()); Ok(out) } async fn get_actual_shims(mise_bin: impl AsRef<Path>) -> Result<HashSet<String>> { let mise_bin = mise_bin.as_ref(); Ok(list_shims()? .into_iter() .filter(|bin| { let path = dirs::SHIMS.join(bin); !path.is_symlink() || path.read_link().is_ok_and(|p| p == mise_bin) }) .collect::<HashSet<_>>()) } fn list_executables_in_dir(dir: &Path) -> Result<HashSet<String>> { Ok(dir .read_dir()? .map(|bin| { let bin = bin?; // files and symlinks which are executable if file::is_executable(&bin.path()) && (bin.file_type()?.is_file() || bin.file_type()?.is_symlink()) { Ok(Some(bin.file_name().into_string().unwrap())) } else { Ok(None) } }) .collect::<Result<Vec<_>>>()? .into_iter() .flatten() .collect()) } fn list_shims() -> Result<HashSet<String>> { Ok(dirs::SHIMS .read_dir()? .map(|bin| { let bin = bin?; // files and symlinks which are executable or extensionless files (Git Bash/Cygwin) if (file::is_executable(&bin.path()) || bin.path().extension().is_none()) && (bin.file_type()?.is_file() || bin.file_type()?.is_symlink()) { Ok(Some(bin.file_name().into_string().unwrap())) } else { Ok(None) } }) .collect::<Result<Vec<_>>>()? .into_iter() .flatten() .collect()) } async fn get_desired_shims(config: &Arc<Config>, toolset: &Toolset) -> Result<HashSet<String>> { let mut shims = HashSet::new(); for (t, tv) in toolset.list_installed_versions(config).await? { let bins = list_tool_bins(config, t.clone(), &tv) .await .unwrap_or_else(|e| { warn!("Error listing bin paths for {}: {:#}", tv, e); Vec::new() }); if cfg!(windows) { shims.extend(bins.into_iter().flat_map(|b| { let p = PathBuf::from(&b); match Settings::get().windows_shim_mode.as_ref() { "hardlink" | "symlink" => { vec![p.with_extension("exe").to_string_lossy().to_string()] } "file" => { vec![ p.with_extension("").to_string_lossy().to_string(), p.with_extension("cmd").to_string_lossy().to_string(), ] } _ => panic!("Unknown shim mode"), } })); } else if cfg!(macos) { // some bins might be uppercased but on mac APFS is case insensitive shims.extend(bins.into_iter().map(|b| b.to_lowercase())); } else { shims.extend(bins); } } Ok(shims) } // lists all the paths to bins in a tv that shims will be needed for async fn list_tool_bins( config: &Arc<Config>, t: Arc<dyn Backend>, tv: &ToolVersion, ) -> Result<Vec<String>> { Ok(t.list_bin_paths(config, tv) .await? .into_iter() .filter(|p| p.parent().is_some()) .filter(|path| path.exists()) .map(|dir| list_executables_in_dir(&dir)) .collect::<Result<Vec<_>>>()? .into_iter() .flatten() .collect()) } async fn make_shim(target: &Path, shim: &Path) -> Result<()> { if shim.exists() { file::remove_file_async(shim).await?; } file::write_async( shim, formatdoc! {r#" #!/bin/sh export ASDF_DATA_DIR={data_dir} export PATH="{fake_asdf_dir}:$PATH" mise x -- {target} "$@" "#, data_dir = dirs::DATA.display(), fake_asdf_dir = fake_asdf::setup()?.display(), target = target.display()}, ) .await?; file::make_executable_async(shim).await?; trace!( "shim created from {} to {}", target.display(), shim.display() ); Ok(()) } async fn err_no_version_set( config: &Arc<Config>, ts: Toolset, bin_name: &str, tvs: Vec<ToolVersion>, ) -> Result<PathBuf> { if tvs.is_empty() { bail!( "{bin_name} is not a valid shim. This likely means you uninstalled a tool and the shim does not point to anything. Run `mise use <TOOL>` to reinstall the tool." ); } let missing_plugins = tvs.iter().map(|tv| tv.ba()).collect::<HashSet<_>>(); let mut missing_tools = ts .list_missing_versions(config) .await .into_iter() .filter(|t| missing_plugins.contains(t.ba())) .collect_vec(); if missing_tools.is_empty() { let mut msg = format!("No version is set for shim: {bin_name}\n"); msg.push_str("Set a global default version with one of the following:\n"); for tv in tvs { msg.push_str(&format!("mise use -g {}@{}\n", tv.ba(), tv.version)); } Err(eyre!(msg.trim().to_string())) } else { let mut msg = format!( "Tool{} not installed for shim: {}\n", if missing_tools.len() > 1 { "s" } else { "" }, bin_name ); for t in missing_tools.drain(..) { msg.push_str(&format!("Missing tool version: {t}\n")); } msg.push_str("Install all missing tools with: mise install\n"); Err(eyre!(msg.trim().to_string())) } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/platform.rs
src/platform.rs
use crate::config::Settings; use eyre::{Result, bail}; use std::fmt; /// Represents a target platform for lockfile operations #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Platform { pub os: String, pub arch: String, pub qualifier: Option<String>, } impl Platform { /// Parse a platform string in the format "os-arch" or "os-arch-qualifier" /// Qualifier may contain hyphens (e.g., "musl-baseline") pub fn parse(platform_str: &str) -> Result<Self> { let parts: Vec<&str> = platform_str.split('-').collect(); match parts.len() { 0 | 1 => bail!( "Invalid platform format '{}'. Expected 'os-arch' or 'os-arch-qualifier'", platform_str ), 2 => Ok(Platform { os: parts[0].to_string(), arch: parts[1].to_string(), qualifier: None, }), _ => { // Join remaining parts as qualifier (handles compound qualifiers like "musl-baseline") let qualifier = parts[2..].join("-"); Ok(Platform { os: parts[0].to_string(), arch: parts[1].to_string(), qualifier: Some(qualifier), }) } } } /// Get the current platform from system information pub fn current() -> Self { let settings = Settings::get(); Platform { os: settings.os().to_string(), arch: settings.arch().to_string(), qualifier: None, } } /// Validate that this platform is supported pub fn validate(&self) -> Result<()> { // Validate OS match self.os.as_str() { "linux" | "macos" | "windows" => {} _ => bail!( "Unsupported OS '{}'. Supported: linux, macos, windows", self.os ), } // Validate architecture match self.arch.as_str() { "x64" | "arm64" | "x86" => {} _ => bail!( "Unsupported architecture '{}'. Supported: x64, arm64, x86", self.arch ), } // Validate qualifier if present if let Some(qualifier) = &self.qualifier { match qualifier.as_str() { "gnu" | "musl" | "msvc" | "baseline" | "musl-baseline" => {} _ => bail!( "Unsupported qualifier '{}'. Supported: gnu, musl, msvc, baseline, musl-baseline", qualifier ), } } Ok(()) } /// Check if this platform is compatible with the current system pub fn is_compatible_with_current(&self) -> bool { let current = Self::current(); self.os == current.os && self.arch == current.arch } /// Convert to platform key format used in lockfiles pub fn to_key(&self) -> String { match &self.qualifier { Some(qualifier) => format!("{}-{}-{}", self.os, self.arch, qualifier), None => format!("{}-{}", self.os, self.arch), } } /// Parse multiple platform strings, validating each one pub fn parse_multiple(platform_strings: &[String]) -> Result<Vec<Self>> { let mut platforms = Vec::new(); for platform_str in platform_strings { let platform = Self::parse(platform_str)?; platform.validate()?; platforms.push(platform); } // Remove duplicates and sort platforms.sort(); platforms.dedup(); Ok(platforms) } /// Get a list of commonly supported platforms pub fn common_platforms() -> Vec<Self> { vec![ Platform::parse("linux-x64").unwrap(), Platform::parse("linux-arm64").unwrap(), Platform::parse("macos-x64").unwrap(), Platform::parse("macos-arm64").unwrap(), Platform::parse("windows-x64").unwrap(), ] } /// Check if this is a Windows platform pub fn is_windows(&self) -> bool { self.os == "windows" } /// Check if this is a macOS platform pub fn is_macos(&self) -> bool { self.os == "macos" } /// Check if this is a Linux platform pub fn is_linux(&self) -> bool { self.os == "linux" } /// Check if this uses ARM64 architecture pub fn is_arm64(&self) -> bool { self.arch == "arm64" } /// Check if this uses x64 architecture pub fn is_x64(&self) -> bool { self.arch == "x64" } } impl fmt::Display for Platform { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.to_key()) } } impl From<String> for Platform { fn from(s: String) -> Self { Self::parse(&s).unwrap_or_else(|_| { // Fallback to current platform if parsing fails Self::current() }) } } impl From<&str> for Platform { fn from(s: &str) -> Self { Self::parse(s).unwrap_or_else(|_| { // Fallback to current platform if parsing fails Self::current() }) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_platform_parse_basic() { let platform = Platform::parse("linux-x64").unwrap(); assert_eq!(platform.os, "linux"); assert_eq!(platform.arch, "x64"); assert_eq!(platform.qualifier, None); } #[test] fn test_platform_parse_with_qualifier() { let platform = Platform::parse("linux-x64-gnu").unwrap(); assert_eq!(platform.os, "linux"); assert_eq!(platform.arch, "x64"); assert_eq!(platform.qualifier, Some("gnu".to_string())); } #[test] fn test_platform_parse_with_compound_qualifier() { // Compound qualifiers like "musl-baseline" should parse correctly let platform = Platform::parse("linux-x64-musl-baseline").unwrap(); assert_eq!(platform.os, "linux"); assert_eq!(platform.arch, "x64"); assert_eq!(platform.qualifier, Some("musl-baseline".to_string())); // Verify round-trip: parse -> to_key -> parse assert_eq!(platform.to_key(), "linux-x64-musl-baseline"); let reparsed = Platform::parse(&platform.to_key()).unwrap(); assert_eq!(reparsed.qualifier, Some("musl-baseline".to_string())); } #[test] fn test_platform_parse_invalid() { assert!(Platform::parse("linux").is_err()); assert!(Platform::parse("").is_err()); } #[test] fn test_platform_validation() { // Valid platforms assert!(Platform::parse("linux-x64").unwrap().validate().is_ok()); assert!(Platform::parse("macos-arm64").unwrap().validate().is_ok()); assert!(Platform::parse("windows-x64").unwrap().validate().is_ok()); assert!(Platform::parse("linux-x64-gnu").unwrap().validate().is_ok()); // Invalid OS assert!(Platform::parse("invalid-x64").unwrap().validate().is_err()); // Invalid arch assert!( Platform::parse("linux-invalid") .unwrap() .validate() .is_err() ); // Invalid qualifier assert!( Platform::parse("linux-x64-invalid") .unwrap() .validate() .is_err() ); } #[test] fn test_platform_to_key() { let platform1 = Platform::parse("linux-x64").unwrap(); assert_eq!(platform1.to_key(), "linux-x64"); let platform2 = Platform::parse("linux-x64-gnu").unwrap(); assert_eq!(platform2.to_key(), "linux-x64-gnu"); } #[test] fn test_platform_multiple_parsing() { let platform_strings = vec![ "linux-x64".to_string(), "macos-arm64".to_string(), "linux-x64".to_string(), // duplicate should be removed ]; let platforms = Platform::parse_multiple(&platform_strings).unwrap(); assert_eq!(platforms.len(), 2); assert_eq!(platforms[0].to_key(), "linux-x64"); assert_eq!(platforms[1].to_key(), "macos-arm64"); } #[test] fn test_platform_helpers() { let linux_platform = Platform::parse("linux-arm64").unwrap(); assert!(linux_platform.is_linux()); assert!(linux_platform.is_arm64()); assert!(!linux_platform.is_windows()); assert!(!linux_platform.is_x64()); let windows_platform = Platform::parse("windows-x64").unwrap(); assert!(windows_platform.is_windows()); assert!(windows_platform.is_x64()); assert!(!windows_platform.is_linux()); assert!(!windows_platform.is_arm64()); } #[test] fn test_common_platforms() { let platforms = Platform::common_platforms(); assert_eq!(platforms.len(), 5); let keys: Vec<String> = platforms.iter().map(|p| p.to_key()).collect(); assert!(keys.contains(&"linux-x64".to_string())); assert!(keys.contains(&"linux-arm64".to_string())); assert!(keys.contains(&"macos-x64".to_string())); assert!(keys.contains(&"macos-arm64".to_string())); assert!(keys.contains(&"windows-x64".to_string())); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/gitlab.rs
src/gitlab.rs
use eyre::Result; use heck::ToKebabCase; use reqwest::IntoUrl; use reqwest::header::{HeaderMap, HeaderValue}; use serde_derive::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::PathBuf; use std::sync::LazyLock as Lazy; use tokio::sync::{RwLock, RwLockReadGuard}; use xx::regex; use crate::cache::{CacheManager, CacheManagerBuilder}; use crate::{dirs, duration, env}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GitlabRelease { pub tag_name: String, pub description: Option<String>, pub released_at: Option<String>, pub assets: GitlabAssets, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GitlabTag { pub name: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GitlabAssets { // pub count: i64, pub sources: Vec<GitlabAssetSource>, pub links: Vec<GitlabAssetLink>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GitlabAssetSource { pub format: String, pub url: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GitlabAssetLink { pub id: i64, pub name: String, pub url: String, pub direct_asset_url: String, pub link_type: String, } type CacheGroup<T> = HashMap<String, CacheManager<T>>; static RELEASES_CACHE: Lazy<RwLock<CacheGroup<Vec<GitlabRelease>>>> = Lazy::new(Default::default); static RELEASE_CACHE: Lazy<RwLock<CacheGroup<GitlabRelease>>> = Lazy::new(Default::default); static TAGS_CACHE: Lazy<RwLock<CacheGroup<Vec<String>>>> = Lazy::new(Default::default); pub static API_URL: &str = "https://gitlab.com/api/v4"; async fn get_tags_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<String>>> { TAGS_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}-tags.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); TAGS_CACHE.read().await } async fn get_releases_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<GitlabRelease>>> { RELEASES_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}-releases.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); RELEASES_CACHE.read().await } async fn get_release_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<GitlabRelease>> { RELEASE_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); RELEASE_CACHE.read().await } #[allow(dead_code)] pub async fn list_releases(repo: &str) -> Result<Vec<GitlabRelease>> { let key = repo.to_kebab_case(); let cache = get_releases_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_releases_(API_URL, repo).await) .await? .to_vec()) } pub async fn list_releases_from_url(api_url: &str, repo: &str) -> Result<Vec<GitlabRelease>> { let key = format!("{api_url}-{repo}").to_kebab_case(); let cache = get_releases_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_releases_(api_url, repo).await) .await? .to_vec()) } async fn list_releases_(api_url: &str, repo: &str) -> Result<Vec<GitlabRelease>> { let url = format!( "{}/projects/{}/releases", api_url, urlencoding::encode(repo) ); let headers = get_headers(&url); let (mut releases, mut headers) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GitlabRelease>, _>(url, &headers) .await?; if *env::MISE_LIST_ALL_VERSIONS { while let Some(next) = next_page(&headers) { let (more, h) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GitlabRelease>, _>(next, &headers) .await?; releases.extend(more); headers = h; } } Ok(releases) } #[allow(dead_code)] pub async fn list_tags(repo: &str) -> Result<Vec<String>> { let key = repo.to_kebab_case(); let cache = get_tags_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_tags_(API_URL, repo).await) .await? .to_vec()) } pub async fn list_tags_from_url(api_url: &str, repo: &str) -> Result<Vec<String>> { let key = format!("{api_url}-{repo}").to_kebab_case(); let cache = get_tags_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_tags_(api_url, repo).await) .await? .to_vec()) } async fn list_tags_(api_url: &str, repo: &str) -> Result<Vec<String>> { let url = format!( "{}/projects/{}/repository/tags", api_url, urlencoding::encode(repo) ); let headers = get_headers(&url); let (mut tags, mut headers) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GitlabTag>, _>(url, &headers) .await?; if *env::MISE_LIST_ALL_VERSIONS { while let Some(next) = next_page(&headers) { let (more, h) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GitlabTag>, _>(next, &headers) .await?; tags.extend(more); headers = h; } } Ok(tags.into_iter().map(|t| t.name).collect()) } #[allow(dead_code)] pub async fn get_release(repo: &str, tag: &str) -> Result<GitlabRelease> { let key = format!("{repo}-{tag}").to_kebab_case(); let cache = get_release_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || get_release_(API_URL, repo, tag).await) .await? .clone()) } pub async fn get_release_for_url(api_url: &str, repo: &str, tag: &str) -> Result<GitlabRelease> { let key = format!("{api_url}-{repo}-{tag}").to_kebab_case(); let cache = get_release_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || get_release_(api_url, repo, tag).await) .await? .clone()) } async fn get_release_(api_url: &str, repo: &str, tag: &str) -> Result<GitlabRelease> { let url = format!( "{}/projects/{}/releases/{}", api_url, urlencoding::encode(repo), tag ); let headers = get_headers(&url); crate::http::HTTP_FETCH .json_with_headers(url, &headers) .await } fn next_page(headers: &HeaderMap) -> Option<String> { let link = headers .get("link") .map(|l| l.to_str().unwrap_or_default().to_string()) .unwrap_or_default(); regex!(r#"<([^>]+)>; rel="next""#) .captures(&link) .map(|c| c.get(1).unwrap().as_str().to_string()) } fn cache_dir() -> PathBuf { dirs::CACHE.join("gitlab") } pub fn get_headers<U: IntoUrl>(url: U) -> HeaderMap { let mut headers = HeaderMap::new(); let url = url.into_url().unwrap(); let mut set_headers = |token: &str| { headers.insert( "Authorization", HeaderValue::from_str(format!("Bearer {token}").as_str()).unwrap(), ); }; if url.host_str() == Some("gitlab.com") { if let Some(token) = env::GITLAB_TOKEN.as_ref() { set_headers(token); } } else if let Some(token) = env::MISE_GITLAB_ENTERPRISE_TOKEN.as_ref() { set_headers(token); } headers }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/env.rs
src/env.rs
use crate::Result; use crate::env_diff::{EnvDiff, EnvDiffOperation, EnvDiffPatches, EnvMap}; use crate::file::replace_path; use crate::shell::ShellType; use crate::{cli::args::ToolArg, file::display_path}; use eyre::Context; use indexmap::IndexSet; use itertools::Itertools; use log::LevelFilter; pub use std::env::*; use std::sync::LazyLock as Lazy; use std::sync::RwLock; use std::{ collections::{HashMap, HashSet}, ffi::OsStr, sync::Mutex, }; use std::{path, process}; use std::{path::Path, string::ToString}; use std::{path::PathBuf, sync::atomic::AtomicBool}; pub static ARGS: RwLock<Vec<String>> = RwLock::new(vec![]); pub static TOOL_ARGS: RwLock<Vec<ToolArg>> = RwLock::new(vec![]); #[cfg(unix)] pub static SHELL: Lazy<String> = Lazy::new(|| var("SHELL").unwrap_or_else(|_| "sh".into())); #[cfg(windows)] pub static SHELL: Lazy<String> = Lazy::new(|| var("COMSPEC").unwrap_or_else(|_| "cmd.exe".into())); pub static MISE_SHELL: Lazy<Option<ShellType>> = Lazy::new(|| { var("MISE_SHELL") .unwrap_or_else(|_| SHELL.clone()) .parse() .ok() }); #[cfg(unix)] pub static SHELL_COMMAND_FLAG: &str = "-c"; #[cfg(windows)] pub static SHELL_COMMAND_FLAG: &str = "/c"; // paths and directories #[cfg(test)] pub static HOME: Lazy<PathBuf> = Lazy::new(|| PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test")); #[cfg(not(test))] pub static HOME: Lazy<PathBuf> = Lazy::new(|| { homedir::my_home() .ok() .flatten() .unwrap_or_else(|| PathBuf::from("/")) }); pub static EDITOR: Lazy<String> = Lazy::new(|| var("VISUAL").unwrap_or_else(|_| var("EDITOR").unwrap_or_else(|_| "nano".into()))); #[cfg(macos)] pub static XDG_CACHE_HOME: Lazy<PathBuf> = Lazy::new(|| var_path("XDG_CACHE_HOME").unwrap_or_else(|| HOME.join("Library/Caches"))); #[cfg(windows)] pub static XDG_CACHE_HOME: Lazy<PathBuf> = Lazy::new(|| { var_path("XDG_CACHE_HOME") .or_else(|| var_path("TEMP")) .unwrap_or_else(temp_dir) }); #[cfg(all(not(windows), not(macos)))] pub static XDG_CACHE_HOME: Lazy<PathBuf> = Lazy::new(|| var_path("XDG_CACHE_HOME").unwrap_or_else(|| HOME.join(".cache"))); pub static XDG_CONFIG_HOME: Lazy<PathBuf> = Lazy::new(|| var_path("XDG_CONFIG_HOME").unwrap_or_else(|| HOME.join(".config"))); #[cfg(unix)] pub static XDG_DATA_HOME: Lazy<PathBuf> = Lazy::new(|| var_path("XDG_DATA_HOME").unwrap_or_else(|| HOME.join(".local").join("share"))); #[cfg(windows)] pub static XDG_DATA_HOME: Lazy<PathBuf> = Lazy::new(|| { var_path("XDG_DATA_HOME") .or(var_path("LOCALAPPDATA")) .unwrap_or_else(|| HOME.join("AppData/Local")) }); pub static XDG_STATE_HOME: Lazy<PathBuf> = Lazy::new(|| var_path("XDG_STATE_HOME").unwrap_or_else(|| HOME.join(".local").join("state"))); /// control display of "friendly" errors - defaults to release mode behavior unless overridden pub static MISE_FRIENDLY_ERROR: Lazy<bool> = Lazy::new(|| { if var_is_true("MISE_FRIENDLY_ERROR") { true } else if var_is_false("MISE_FRIENDLY_ERROR") { false } else { // default behavior: friendly in release mode unless debug logging !cfg!(debug_assertions) && log::max_level() < log::LevelFilter::Debug } }); pub static MISE_TOOL_STUB: Lazy<bool> = Lazy::new(|| ARGS.read().unwrap().get(1).map(|s| s.as_str()) == Some("tool-stub")); pub static MISE_NO_CONFIG: Lazy<bool> = Lazy::new(|| var_is_true("MISE_NO_CONFIG")); pub static MISE_PROGRESS_TRACE: Lazy<bool> = Lazy::new(|| var_is_true("MISE_PROGRESS_TRACE")); pub static MISE_CACHE_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_CACHE_DIR").unwrap_or_else(|| XDG_CACHE_HOME.join("mise"))); pub static MISE_CONFIG_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_CONFIG_DIR").unwrap_or_else(|| XDG_CONFIG_HOME.join("mise"))); /// The default config directory location (XDG_CONFIG_HOME/mise), used to filter out /// configs from this location when MISE_CONFIG_DIR is set to a different path pub static MISE_DEFAULT_CONFIG_DIR: Lazy<PathBuf> = Lazy::new(|| XDG_CONFIG_HOME.join("mise")); /// True if MISE_CONFIG_DIR was explicitly set to a non-default location pub static MISE_CONFIG_DIR_OVERRIDDEN: Lazy<bool> = Lazy::new(|| { var_path("MISE_CONFIG_DIR").is_some() && *MISE_CONFIG_DIR != *MISE_DEFAULT_CONFIG_DIR }); pub static MISE_DATA_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_DATA_DIR").unwrap_or_else(|| XDG_DATA_HOME.join("mise"))); pub static MISE_STATE_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_STATE_DIR").unwrap_or_else(|| XDG_STATE_HOME.join("mise"))); pub static MISE_TMP_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_TMP_DIR").unwrap_or_else(|| temp_dir().join("mise"))); pub static MISE_SYSTEM_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_SYSTEM_DIR").unwrap_or_else(|| PathBuf::from("/etc/mise"))); // data subdirs pub static MISE_INSTALLS_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_INSTALLS_DIR").unwrap_or_else(|| MISE_DATA_DIR.join("installs"))); pub static MISE_DOWNLOADS_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_DOWNLOADS_DIR").unwrap_or_else(|| MISE_DATA_DIR.join("downloads"))); pub static MISE_PLUGINS_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_PLUGINS_DIR").unwrap_or_else(|| MISE_DATA_DIR.join("plugins"))); pub static MISE_SHIMS_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_SHIMS_DIR").unwrap_or_else(|| MISE_DATA_DIR.join("shims"))); pub static MISE_DEFAULT_TOOL_VERSIONS_FILENAME: Lazy<String> = Lazy::new(|| { var("MISE_DEFAULT_TOOL_VERSIONS_FILENAME") .ok() .or(MISE_OVERRIDE_TOOL_VERSIONS_FILENAMES .as_ref() .and_then(|v| v.first().cloned())) .or(var("MISE_DEFAULT_TOOL_VERSIONS_FILENAME").ok()) .unwrap_or_else(|| ".tool-versions".into()) }); pub static MISE_DEFAULT_CONFIG_FILENAME: Lazy<String> = Lazy::new(|| { var("MISE_DEFAULT_CONFIG_FILENAME") .ok() .or(MISE_OVERRIDE_CONFIG_FILENAMES.first().cloned()) .unwrap_or_else(|| "mise.toml".into()) }); pub static MISE_OVERRIDE_TOOL_VERSIONS_FILENAMES: Lazy<Option<IndexSet<String>>> = Lazy::new(|| match var("MISE_OVERRIDE_TOOL_VERSIONS_FILENAMES") { Ok(v) if v == "none" => Some([].into()), Ok(v) => Some(v.split(':').map(|s| s.to_string()).collect()), Err(_) => Default::default(), }); pub static MISE_OVERRIDE_CONFIG_FILENAMES: Lazy<IndexSet<String>> = Lazy::new(|| match var("MISE_OVERRIDE_CONFIG_FILENAMES") { Ok(v) => v.split(':').map(|s| s.to_string()).collect(), Err(_) => Default::default(), }); pub static MISE_ENV: Lazy<Vec<String>> = Lazy::new(|| environment(&ARGS.read().unwrap())); pub static MISE_GLOBAL_CONFIG_FILE: Lazy<Option<PathBuf>> = Lazy::new(|| var_path("MISE_GLOBAL_CONFIG_FILE").or_else(|| var_path("MISE_CONFIG_FILE"))); pub static MISE_GLOBAL_CONFIG_ROOT: Lazy<PathBuf> = Lazy::new(|| var_path("MISE_GLOBAL_CONFIG_ROOT").unwrap_or_else(|| HOME.to_path_buf())); pub static MISE_SYSTEM_CONFIG_FILE: Lazy<Option<PathBuf>> = Lazy::new(|| var_path("MISE_SYSTEM_CONFIG_FILE")); pub static MISE_IGNORED_CONFIG_PATHS: Lazy<Vec<PathBuf>> = Lazy::new(|| { var("MISE_IGNORED_CONFIG_PATHS") .ok() .map(|v| { v.split(':') .filter(|p| !p.is_empty()) .map(PathBuf::from) .map(replace_path) .collect() }) .unwrap_or_default() }); pub static MISE_CEILING_PATHS: Lazy<HashSet<PathBuf>> = Lazy::new(|| { var("MISE_CEILING_PATHS") .ok() .map(|v| { split_paths(&v) .filter(|p| !p.as_os_str().is_empty()) .map(replace_path) .collect() }) .unwrap_or_default() }); pub static MISE_USE_TOML: Lazy<bool> = Lazy::new(|| !var_is_false("MISE_USE_TOML")); pub static MISE_LIST_ALL_VERSIONS: Lazy<bool> = Lazy::new(|| var_is_true("MISE_LIST_ALL_VERSIONS")); pub static ARGV0: Lazy<String> = Lazy::new(|| ARGS.read().unwrap()[0].to_string()); pub static MISE_BIN_NAME: Lazy<&str> = Lazy::new(|| filename(&ARGV0)); pub static MISE_LOG_FILE: Lazy<Option<PathBuf>> = Lazy::new(|| var_path("MISE_LOG_FILE")); pub static MISE_LOG_FILE_LEVEL: Lazy<Option<LevelFilter>> = Lazy::new(log_file_level); fn find_in_tree(base: &Path, rels: &[&[&str]]) -> Option<PathBuf> { for rel in rels { let mut p = base.to_path_buf(); for part in *rel { p = p.join(part); } if p.exists() { return Some(p); } } None } fn mise_install_base() -> Option<PathBuf> { std::fs::canonicalize(&*MISE_BIN) .ok() .and_then(|p| p.parent().map(|p| p.to_path_buf())) .and_then(|p| p.parent().map(|p| p.to_path_buf())) } pub static MISE_SELF_UPDATE_INSTRUCTIONS: Lazy<Option<PathBuf>> = Lazy::new(|| { if let Some(p) = var_path("MISE_SELF_UPDATE_INSTRUCTIONS") { return Some(p); } let base = mise_install_base()?; // search lib/, lib/mise/, lib64/mise/ find_in_tree( &base, &[ &["lib", "mise-self-update-instructions.toml"], &["lib", "mise", "mise-self-update-instructions.toml"], &["lib64", "mise", "mise-self-update-instructions.toml"], ], ) }); #[cfg(feature = "self_update")] pub static MISE_SELF_UPDATE_AVAILABLE: Lazy<Option<bool>> = Lazy::new(|| { if var_is_true("MISE_SELF_UPDATE_AVAILABLE") { Some(true) } else if var_is_false("MISE_SELF_UPDATE_AVAILABLE") { Some(false) } else { None } }); #[cfg(feature = "self_update")] pub static MISE_SELF_UPDATE_DISABLED_PATH: Lazy<Option<PathBuf>> = Lazy::new(|| { let base = mise_install_base()?; find_in_tree( &base, &[ &["lib", ".disable-self-update"], &["lib", "mise", ".disable-self-update"], &["lib64", "mise", ".disable-self-update"], ], ) }); pub static MISE_LOG_HTTP: Lazy<bool> = Lazy::new(|| var_is_true("MISE_LOG_HTTP")); pub static __USAGE: Lazy<Option<String>> = Lazy::new(|| var("__USAGE").ok()); // true if running inside a shim pub static __MISE_SHIM: Lazy<bool> = Lazy::new(|| var_is_true("__MISE_SHIM")); // true if the current process is running as a shim (not direct mise invocation) pub static IS_RUNNING_AS_SHIM: Lazy<bool> = Lazy::new(|| { // When running tests, always treat as direct mise invocation // to avoid interfering with test expectations if cfg!(test) { return false; } // Check if running as tool stub if *MISE_TOOL_STUB { return true; } #[cfg(unix)] let mise_bin = "mise"; #[cfg(windows)] let mise_bin = "mise.exe"; let bin_name = *MISE_BIN_NAME; bin_name != mise_bin && !bin_name.starts_with("mise-") }); #[cfg(test)] pub static TERM_WIDTH: Lazy<usize> = Lazy::new(|| 80); #[cfg(not(test))] pub static TERM_WIDTH: Lazy<usize> = Lazy::new(|| { terminal_size::terminal_size() .map(|(w, _)| w.0 as usize) .unwrap_or(80) .max(80) }); /// true if inside a script like bin/exec-env or bin/install /// used to prevent infinite loops pub static MISE_BIN: Lazy<PathBuf> = Lazy::new(|| { var_path("__MISE_BIN") .or_else(|| current_exe().ok()) .unwrap_or_else(|| "mise".into()) }); pub static MISE_TIMINGS: Lazy<u8> = Lazy::new(|| var_u8("MISE_TIMINGS")); pub static MISE_PID: Lazy<String> = Lazy::new(|| process::id().to_string()); pub static __MISE_SCRIPT: Lazy<bool> = Lazy::new(|| var_is_true("__MISE_SCRIPT")); pub static __MISE_DIFF: Lazy<EnvDiff> = Lazy::new(get_env_diff); pub static __MISE_ORIG_PATH: Lazy<Option<String>> = Lazy::new(|| var("__MISE_ORIG_PATH").ok()); pub static __MISE_ZSH_PRECMD_RUN: Lazy<bool> = Lazy::new(|| !var_is_false("__MISE_ZSH_PRECMD_RUN")); pub static LINUX_DISTRO: Lazy<Option<String>> = Lazy::new(linux_distro); pub static PREFER_OFFLINE: Lazy<AtomicBool> = Lazy::new(|| prefer_offline(&ARGS.read().unwrap()).into()); pub static OFFLINE: Lazy<bool> = Lazy::new(|| offline(&ARGS.read().unwrap())); pub static WARN_ON_MISSING_REQUIRED_ENV: Lazy<bool> = Lazy::new(|| warn_on_missing_required_env(&ARGS.read().unwrap())); /// essentially, this is whether we show spinners or build output on runtime install pub static PRISTINE_ENV: Lazy<EnvMap> = Lazy::new(|| get_pristine_env(&__MISE_DIFF, vars_safe().collect())); pub static PATH_KEY: Lazy<String> = Lazy::new(|| { vars_safe() .map(|(k, _)| k) .find_or_first(|k| k.to_uppercase() == "PATH") .map(|k| k.to_string()) .unwrap_or("PATH".into()) }); pub static PATH: Lazy<Vec<PathBuf>> = Lazy::new(|| match PRISTINE_ENV.get(&*PATH_KEY) { Some(path) => split_paths(path).collect(), None => vec![], }); pub static PATH_NON_PRISTINE: Lazy<Vec<PathBuf>> = Lazy::new(|| match var(&*PATH_KEY) { Ok(ref path) => split_paths(path).collect(), Err(_) => vec![], }); pub static DIRENV_DIFF: Lazy<Option<String>> = Lazy::new(|| var("DIRENV_DIFF").ok()); pub static GITHUB_TOKEN: Lazy<Option<String>> = Lazy::new(|| get_token(&["MISE_GITHUB_TOKEN", "GITHUB_API_TOKEN", "GITHUB_TOKEN"])); pub static MISE_GITHUB_ENTERPRISE_TOKEN: Lazy<Option<String>> = Lazy::new(|| get_token(&["MISE_GITHUB_ENTERPRISE_TOKEN"])); pub static GITLAB_TOKEN: Lazy<Option<String>> = Lazy::new(|| get_token(&["MISE_GITLAB_TOKEN", "GITLAB_TOKEN"])); pub static MISE_GITLAB_ENTERPRISE_TOKEN: Lazy<Option<String>> = Lazy::new(|| get_token(&["MISE_GITLAB_ENTERPRISE_TOKEN"])); pub static TEST_TRANCHE: Lazy<usize> = Lazy::new(|| var_u8("TEST_TRANCHE") as usize); pub static TEST_TRANCHE_COUNT: Lazy<usize> = Lazy::new(|| var_u8("TEST_TRANCHE_COUNT") as usize); pub static CLICOLOR_FORCE: Lazy<Option<bool>> = Lazy::new(|| var("CLICOLOR_FORCE").ok().map(|v| v != "0")); pub static CLICOLOR: Lazy<Option<bool>> = Lazy::new(|| { if *CLICOLOR_FORCE == Some(true) { Some(true) } else if *NO_COLOR { Some(false) } else if let Ok(v) = var("CLICOLOR") { Some(v != "0") } else { None } }); /// Disable color output - https://no-color.org/ pub static NO_COLOR: Lazy<bool> = Lazy::new(|| var("NO_COLOR").is_ok_and(|v| !v.is_empty())); // Terminal detection pub static TERM_PROGRAM: Lazy<Option<String>> = Lazy::new(|| var("TERM_PROGRAM").ok()); pub static WT_SESSION: Lazy<bool> = Lazy::new(|| var("WT_SESSION").is_ok()); pub static VTE_VERSION: Lazy<bool> = Lazy::new(|| var("VTE_VERSION").is_ok()); // python pub static PYENV_ROOT: Lazy<PathBuf> = Lazy::new(|| var_path("PYENV_ROOT").unwrap_or_else(|| HOME.join(".pyenv"))); pub static UV_PYTHON_INSTALL_DIR: Lazy<PathBuf> = Lazy::new(|| { var_path("UV_PYTHON_INSTALL_DIR").unwrap_or_else(|| XDG_DATA_HOME.join("uv").join("python")) }); // node pub static MISE_NODE_CONCURRENCY: Lazy<Option<usize>> = Lazy::new(|| { var("MISE_NODE_CONCURRENCY") .ok() .and_then(|v| v.parse::<usize>().ok()) .map(|v| v.max(1)) .or_else(|| { if *MISE_NODE_NINJA { None } else { Some(num_cpus::get_physical()) } }) }); pub static MISE_NODE_MAKE: Lazy<String> = Lazy::new(|| var("MISE_NODE_MAKE").unwrap_or_else(|_| "make".into())); pub static MISE_NODE_NINJA: Lazy<bool> = Lazy::new(|| var_option_bool("MISE_NODE_NINJA").unwrap_or_else(is_ninja_on_path)); pub static MISE_NODE_VERIFY: Lazy<bool> = Lazy::new(|| !var_is_false("MISE_NODE_VERIFY")); pub static MISE_NODE_CFLAGS: Lazy<Option<String>> = Lazy::new(|| var("MISE_NODE_CFLAGS").or_else(|_| var("NODE_CFLAGS")).ok()); pub static MISE_NODE_CONFIGURE_OPTS: Lazy<Option<String>> = Lazy::new(|| { var("MISE_NODE_CONFIGURE_OPTS") .or_else(|_| var("NODE_CONFIGURE_OPTS")) .ok() }); pub static MISE_NODE_MAKE_OPTS: Lazy<Option<String>> = Lazy::new(|| { var("MISE_NODE_MAKE_OPTS") .or_else(|_| var("NODE_MAKE_OPTS")) .ok() }); pub static MISE_NODE_MAKE_INSTALL_OPTS: Lazy<Option<String>> = Lazy::new(|| { var("MISE_NODE_MAKE_INSTALL_OPTS") .or_else(|_| var("NODE_MAKE_INSTALL_OPTS")) .ok() }); pub static MISE_JOBS: Lazy<Option<usize>> = Lazy::new(|| var("MISE_JOBS").ok().and_then(|v| v.parse::<usize>().ok())); pub static MISE_NODE_DEFAULT_PACKAGES_FILE: Lazy<PathBuf> = Lazy::new(|| { var_path("MISE_NODE_DEFAULT_PACKAGES_FILE").unwrap_or_else(|| { let p = HOME.join(".default-nodejs-packages"); if p.exists() { return p; } let p = HOME.join(".default-node-packages"); if p.exists() { return p; } HOME.join(".default-npm-packages") }) }); pub static MISE_NODE_COREPACK: Lazy<bool> = Lazy::new(|| var_is_true("MISE_NODE_COREPACK")); pub static NVM_DIR: Lazy<PathBuf> = Lazy::new(|| var_path("NVM_DIR").unwrap_or_else(|| HOME.join(".nvm"))); pub static NODENV_ROOT: Lazy<PathBuf> = Lazy::new(|| var_path("NODENV_ROOT").unwrap_or_else(|| HOME.join(".nodenv"))); #[cfg(unix)] pub const PATH_ENV_SEP: char = ':'; #[cfg(windows)] pub const PATH_ENV_SEP: char = ';'; fn get_env_diff() -> EnvDiff { let env = vars_safe().collect::<HashMap<_, _>>(); match env.get("__MISE_DIFF") { Some(raw) => EnvDiff::deserialize(raw).unwrap_or_else(|err| { warn!("Failed to deserialize __MISE_DIFF: {:#}", err); EnvDiff::default() }), None => EnvDiff::default(), } } fn var_u8(key: &str) -> u8 { var(key) .ok() .and_then(|v| v.parse::<u8>().ok()) .unwrap_or_default() } fn var_is_true(key: &str) -> bool { match var(key) { Ok(v) => { let v = v.to_lowercase(); v == "y" || v == "yes" || v == "true" || v == "1" || v == "on" } Err(_) => false, } } fn var_is_false(key: &str) -> bool { match var(key) { Ok(v) => { let v = v.to_lowercase(); v == "n" || v == "no" || v == "false" || v == "0" || v == "off" } Err(_) => false, } } fn var_option_bool(key: &str) -> Option<bool> { match var(key) { Ok(_) if var_is_true(key) => Some(true), Ok(_) if var_is_false(key) => Some(false), Ok(v) => { warn!("Invalid value for env var {}={}", key, v); None } _ => None, } } pub fn in_home_dir() -> bool { current_dir().is_ok_and(|d| d == *HOME) } pub fn var_path(key: &str) -> Option<PathBuf> { var_os(key).map(PathBuf::from).map(replace_path) } /// this returns the environment as if __MISE_DIFF was reversed. /// putting the shell back into a state before hook-env was run fn get_pristine_env(mise_diff: &EnvDiff, orig_env: EnvMap) -> EnvMap { let patches = mise_diff.reverse().to_patches(); let mut env = apply_patches(&orig_env, &patches); // get the current path as a vector let path = match env.get(&*PATH_KEY) { Some(path) => split_paths(path).collect(), None => vec![], }; // get the paths that were removed by mise as a hashset let mut to_remove = mise_diff.path.iter().collect::<HashSet<_>>(); // remove those paths that were added by mise, but only once (the first time) let path = path .into_iter() .filter(|p| !to_remove.remove(p)) .collect_vec(); // put the pristine PATH back into the environment env.insert( PATH_KEY.to_string(), join_paths(path).unwrap().to_string_lossy().to_string(), ); env } fn apply_patches(env: &EnvMap, patches: &EnvDiffPatches) -> EnvMap { let mut new_env = env.clone(); for patch in patches { match patch { EnvDiffOperation::Add(k, v) | EnvDiffOperation::Change(k, v) => { new_env.insert(k.into(), v.into()); } EnvDiffOperation::Remove(k) => { new_env.remove(k); } } } new_env } fn offline(args: &[String]) -> bool { if var_is_true("MISE_OFFLINE") { return true; } args.iter() .take_while(|a| *a != "--") .any(|a| a == "--offline") } /// returns true if new runtime versions should not be fetched fn prefer_offline(args: &[String]) -> bool { // First check if MISE_PREFER_OFFLINE is set if var_is_true("MISE_PREFER_OFFLINE") { return true; } // Otherwise fall back to the original command-based logic args.iter() .take_while(|a| *a != "--") .filter(|a| !a.starts_with('-') || *a == "--prefer-offline") .nth(1) .map(|a| { [ "--prefer-offline", "activate", "current", "direnv", "env", "exec", "hook-env", "ls", "where", "x", ] .contains(&a.as_str()) }) .unwrap_or_default() } /// returns true if missing required env vars should produce warnings instead of errors fn warn_on_missing_required_env(args: &[String]) -> bool { // Check if we're running in a command that should warn instead of error args.iter() .take_while(|a| *a != "--") .filter(|a| !a.starts_with('-')) .nth(1) .map(|a| { [ "hook-env", // Shell activation should not break the shell ] .contains(&a.as_str()) }) .unwrap_or_default() } fn environment(args: &[String]) -> Vec<String> { let arg_defs = HashSet::from(["--profile", "-P", "--env", "-E"]); // Get environment value from args or env vars if *IS_RUNNING_AS_SHIM { // When running as shim, ignore command line args and use env vars only None } else { // Try to get from command line args first args.windows(2) .take_while(|window| !window.iter().any(|a| a == "--")) .find_map(|window| { if arg_defs.contains(&*window[0]) { Some(window[1].clone()) } else { None } }) } .or_else(|| var("MISE_ENV").ok()) .or_else(|| var("MISE_PROFILE").ok()) .or_else(|| var("MISE_ENVIRONMENT").ok()) .unwrap_or_default() .split(',') .filter(|s| !s.is_empty()) .map(String::from) .collect() } fn log_file_level() -> Option<LevelFilter> { let log_level = var("MISE_LOG_FILE_LEVEL").unwrap_or_default(); log_level.parse::<LevelFilter>().ok() } fn linux_distro() -> Option<String> { match sys_info::linux_os_release() { Ok(release) => release.id, _ => None, } } fn filename(path: &str) -> &str { path.rsplit_once(path::MAIN_SEPARATOR_STR) .map(|(_, file)| file) .unwrap_or(path) } fn get_token(keys: &[&str]) -> Option<String> { keys.iter() .find_map(|key| var(key).ok()) .and_then(|v| if v.trim().is_empty() { None } else { Some(v) }) } fn is_ninja_on_path() -> bool { which::which("ninja").is_ok() } pub fn is_activated() -> bool { var("__MISE_DIFF").is_ok() } pub fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) { static MUTEX: Mutex<()> = Mutex::new(()); let _mutex = MUTEX.lock().unwrap(); unsafe { std::env::set_var(key, value); } } pub fn remove_var<K: AsRef<OsStr>>(key: K) { static MUTEX: Mutex<()> = Mutex::new(()); let _mutex = MUTEX.lock().unwrap(); unsafe { std::env::remove_var(key); } } /// Safe wrapper around std::env::vars() that handles invalid UTF-8 gracefully. /// This function uses vars_os() and converts OsString to String, skipping any /// environment variables that contain invalid UTF-8 sequences. pub fn vars_safe() -> impl Iterator<Item = (String, String)> { vars_os().filter_map(|(k, v)| { let k_str = k.to_str()?; let v_str = v.to_str()?; Some((k_str.to_string(), v_str.to_string())) }) } pub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); trace!("cd {}", display_path(path)); unsafe { std::env::set_current_dir(path).wrap_err_with(|| { format!("failed to set current directory to {}", display_path(path)) })?; path_absolutize::update_cwd(); } Ok(()) } #[cfg(test)] mod tests { use pretty_assertions::assert_eq; use crate::config::Config; use super::*; #[tokio::test] async fn test_apply_patches() { let _config = Config::get().await.unwrap(); let mut env = EnvMap::new(); env.insert("foo".into(), "bar".into()); env.insert("baz".into(), "qux".into()); let patches = vec![ EnvDiffOperation::Add("foo".into(), "bar".into()), EnvDiffOperation::Change("baz".into(), "qux".into()), EnvDiffOperation::Remove("quux".into()), ]; let new_env = apply_patches(&env, &patches); assert_eq!(new_env.len(), 2); assert_eq!(new_env.get("foo").unwrap(), "bar"); assert_eq!(new_env.get("baz").unwrap(), "qux"); } #[tokio::test] async fn test_var_path() { let _config = Config::get().await.unwrap(); set_var("MISE_TEST_PATH", "/foo/bar"); assert_eq!( var_path("MISE_TEST_PATH").unwrap(), PathBuf::from("/foo/bar") ); remove_var("MISE_TEST_PATH"); } #[test] fn test_token_overwrite() { // Clean up any existing environment variables that might interfere remove_var("MISE_GITHUB_TOKEN"); remove_var("GITHUB_TOKEN"); remove_var("GITHUB_API_TOKEN"); set_var("MISE_GITHUB_TOKEN", ""); set_var("GITHUB_TOKEN", "invalid_token"); assert_eq!( get_token(&["MISE_GITHUB_TOKEN", "GITHUB_TOKEN"]), None, "Empty token should overwrite other tokens" ); assert_eq!( get_token(&["GITHUB_API_TOKEN", "GITHUB_TOKEN"]), Some("invalid_token".into()), "Unset token should not overwrite other tokens" ); remove_var("MISE_GITHUB_TOKEN"); remove_var("GITHUB_TOKEN"); remove_var("GITHUB_API_TOKEN"); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/tera.rs
src/tera.rs
use std::collections::HashMap; use std::iter::once; use std::path::{Path, PathBuf}; use heck::{ ToKebabCase, ToLowerCamelCase, ToShoutyKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase, }; use path_absolutize::Absolutize; use rand::prelude::*; use std::sync::LazyLock as Lazy; use tera::{Context, Tera, Value}; use versions::{Requirement, Versioning}; use crate::cache::CacheManagerBuilder; use crate::cmd::cmd; use crate::config::Settings; use crate::env_diff::EnvMap; use crate::{dirs, duration, env, hash}; pub static BASE_CONTEXT: Lazy<Context> = Lazy::new(|| { let mut context = Context::new(); context.insert("env", &*env::PRISTINE_ENV); context.insert("mise_bin", &*env::MISE_BIN); context.insert("mise_pid", &*env::MISE_PID); if !(*env::MISE_ENV).is_empty() { context.insert("mise_env", &*env::MISE_ENV); } if let Ok(dir) = env::current_dir() { context.insert("cwd", &dir); } context.insert("xdg_cache_home", &*env::XDG_CACHE_HOME); context.insert("xdg_config_home", &*env::XDG_CONFIG_HOME); context.insert("xdg_data_home", &*env::XDG_DATA_HOME); context.insert("xdg_state_home", &*env::XDG_STATE_HOME); context }); static TERA: Lazy<Tera> = Lazy::new(|| { let mut tera = Tera::default(); tera.register_function( "arch", move |_args: &HashMap<String, Value>| -> tera::Result<Value> { let arch = if cfg!(target_arch = "x86_64") { "x64" } else if cfg!(target_arch = "aarch64") { "arm64" } else { env::consts::ARCH }; Ok(Value::String(arch.to_string())) }, ); tera.register_function( "num_cpus", move |_args: &HashMap<String, Value>| -> tera::Result<Value> { let num = num_cpus::get(); Ok(Value::String(num.to_string())) }, ); tera.register_function( "os", move |_args: &HashMap<String, Value>| -> tera::Result<Value> { Ok(Value::String(env::consts::OS.to_string())) }, ); tera.register_function( "os_family", move |_args: &HashMap<String, Value>| -> tera::Result<Value> { Ok(Value::String(env::consts::FAMILY.to_string())) }, ); tera.register_function( "choice", move |args: &HashMap<String, Value>| -> tera::Result<Value> { match args.get("n") { Some(Value::Number(n)) => { let n = n.as_u64().unwrap(); match args.get("alphabet") { Some(Value::String(alphabet)) => { let alphabet = alphabet.chars().collect::<Vec<char>>(); let mut rng = rand::rng(); let result = (0..n).map(|_| alphabet.choose(&mut rng).unwrap()).collect(); Ok(Value::String(result)) } _ => Err("choice alphabet must be an string".into()), } } _ => Err("choice n must be an integer".into()), } }, ); tera.register_function( "haiku", move |args: &HashMap<String, Value>| -> tera::Result<Value> { let words = args .get("words") .and_then(Value::as_u64) .unwrap_or(2) .max(1) as usize; let separator = args.get("separator").and_then(Value::as_str).unwrap_or("-"); let digits = args.get("digits").and_then(Value::as_u64).unwrap_or(2) as usize; let result = xx::rand::haiku(&xx::rand::HaikuOptions { words, separator, digits, }); Ok(Value::String(result)) }, ); tera.register_filter( "hash_file", move |input: &Value, args: &HashMap<String, Value>| match input { Value::String(s) => { let path = Path::new(s); let mut hash = hash::file_hash_blake3(path, None).unwrap(); if let Some(len) = args.get("len").and_then(Value::as_u64) { hash = hash.chars().take(len as usize).collect(); } Ok(Value::String(hash)) } _ => Err("hash input must be a string".into()), }, ); tera.register_filter( "hash", move |input: &Value, args: &HashMap<String, Value>| match input { Value::String(s) => { let mut hash = hash::hash_blake3_to_str(s); if let Some(len) = args.get("len").and_then(Value::as_u64) { hash = hash.chars().take(len as usize).collect(); } Ok(Value::String(hash)) } _ => Err("hash input must be a string".into()), }, ); tera.register_filter( "absolute", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => { let p = Path::new(s).absolutize()?; Ok(Value::String(p.to_string_lossy().to_string())) } _ => Err("absolute input must be a string".into()), }, ); tera.register_filter( "canonicalize", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => { let p = Path::new(s).canonicalize()?; Ok(Value::String(p.to_string_lossy().to_string())) } _ => Err("canonicalize input must be a string".into()), }, ); // Helper to create path filters that handle empty strings gracefully fn path_filter<F>(input: &Value, name: &'static str, f: F) -> tera::Result<Value> where F: FnOnce(&Path) -> Option<String>, { match input { Value::String(s) if s.is_empty() => Ok(Value::String(String::new())), Value::String(s) => Ok(Value::String(f(Path::new(s)).unwrap_or_default())), _ => Err(format!("{name} input must be a string").into()), } } tera.register_filter( "dirname", move |input: &Value, _args: &HashMap<String, Value>| { path_filter(input, "dirname", |p| { p.parent().map(|p| p.to_string_lossy().to_string()) }) }, ); tera.register_filter( "basename", move |input: &Value, _args: &HashMap<String, Value>| { path_filter(input, "basename", |p| { p.file_name().map(|p| p.to_string_lossy().to_string()) }) }, ); tera.register_filter( "extname", move |input: &Value, _args: &HashMap<String, Value>| { path_filter(input, "extname", |p| { p.extension().map(|p| p.to_string_lossy().to_string()) }) }, ); tera.register_filter( "file_stem", move |input: &Value, _args: &HashMap<String, Value>| { path_filter(input, "file_stem", |p| { p.file_stem().map(|p| p.to_string_lossy().to_string()) }) }, ); tera.register_filter( "file_size", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => { let p = Path::new(s); let metadata = p.metadata()?; let size = metadata.len(); Ok(Value::Number(size.into())) } _ => Err("file_size input must be a string".into()), }, ); tera.register_filter( "last_modified", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => { let p = Path::new(s); let metadata = p.metadata()?; let modified = metadata.modified()?; let modified = modified.duration_since(std::time::UNIX_EPOCH).unwrap(); Ok(Value::Number(modified.as_secs().into())) } _ => Err("last_modified input must be a string".into()), }, ); tera.register_filter( "join_path", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::Array(arr) => arr .iter() .map(Value::as_str) .collect::<Option<PathBuf>>() .ok_or("join_path input must be an array of strings".into()) .map(|p| Value::String(p.to_string_lossy().to_string())), _ => Err("join_path input must be an array of strings".into()), }, ); tera.register_filter( "quote", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => { let result = format!("'{}'", s.replace("'", "\\'")); Ok(Value::String(result)) } _ => Err("quote input must be a string".into()), }, ); tera.register_filter( "kebabcase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_kebab_case())), _ => Err("kebabcase input must be a string".into()), }, ); tera.register_filter( "lowercamelcase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_lower_camel_case())), _ => Err("lowercamelcase input must be a string".into()), }, ); tera.register_filter( "shoutykebabcase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_shouty_kebab_case())), _ => Err("shoutykebabcase input must be a string".into()), }, ); tera.register_filter( "shoutysnakecase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_shouty_snake_case())), _ => Err("shoutysnakecase input must be a string".into()), }, ); tera.register_filter( "snakecase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_snake_case())), _ => Err("snakecase input must be a string".into()), }, ); tera.register_filter( "uppercamelcase", move |input: &Value, _args: &HashMap<String, Value>| match input { Value::String(s) => Ok(Value::String(s.to_upper_camel_case())), _ => Err("uppercamelcase input must be a string".into()), }, ); tera.register_tester( "dir", move |input: Option<&Value>, _args: &[Value]| match input { Some(Value::String(s)) => Ok(Path::new(s).is_dir()), _ => Err("is_dir input must be a string".into()), }, ); tera.register_tester( "file", move |input: Option<&Value>, _args: &[Value]| match input { Some(Value::String(s)) => Ok(Path::new(s).is_file()), _ => Err("is_file input must be a string".into()), }, ); tera.register_tester( "exists", move |input: Option<&Value>, _args: &[Value]| match input { Some(Value::String(s)) => Ok(Path::new(s).exists()), _ => Err("exists input must be a string".into()), }, ); tera.register_tester( "semver_matching", move |input: Option<&Value>, args: &[Value]| match input { Some(Value::String(version)) => match args.first() { Some(Value::String(requirement)) => { println!("{requirement}"); let result = Requirement::new(requirement) .unwrap() .matches(&Versioning::new(version).unwrap()); Ok(result) } _ => Err("semver_matching argument must be a string".into()), }, _ => Err("semver_matching input must be a string".into()), }, ); tera }); pub fn get_tera(dir: Option<&Path>) -> Tera { let mut tera = TERA.clone(); let dir = dir.map(PathBuf::from); tera.register_function("exec", tera_exec(dir.clone(), env::PRISTINE_ENV.clone())); tera.register_function("read_file", tera_read_file(dir)); tera } pub fn tera_exec( dir: Option<PathBuf>, env: EnvMap, ) -> impl Fn(&HashMap<String, Value>) -> tera::Result<Value> { move |args: &HashMap<String, Value>| -> tera::Result<Value> { let cache = match args.get("cache_key") { Some(Value::String(cache)) => Some(cache), None => None, _ => return Err("exec cache_key must be a string".into()), }; let cache_duration = match args.get("cache_duration") { Some(Value::String(duration)) => { match duration::parse_duration(&duration.to_string()) { Ok(duration) => Some(duration), Err(e) => return Err(format!("exec cache_duration: {e}").into()), } } None => None, _ => return Err("exec cache_duration must be an integer".into()), }; match args.get("command") { Some(Value::String(command)) => { let shell = Settings::get() .default_inline_shell() .map_err(|e| tera::Error::msg(e.to_string()))?; let args = shell .iter() .skip(1) .chain(once(command)) .collect::<Vec<&String>>(); let mut cmd: duct::Expression = cmd(&shell[0], args).full_env(&env); if let Some(dir) = &dir { cmd = cmd.dir(dir); } let result = if cache.is_some() || cache_duration.is_some() { let cachehash = hash::hash_blake3_to_str( &(dir .as_ref() .map(|d| d.to_string_lossy().to_string()) .unwrap_or_default() + command), )[..8] .to_string(); let mut cacheman = CacheManagerBuilder::new(dirs::CACHE.join("exec").join(cachehash)); if let Some(cache) = cache { cacheman = cacheman.with_cache_key(cache.clone()); } if let Some(cache_duration) = cache_duration { cacheman = cacheman.with_fresh_duration(Some(cache_duration)); } let cache = cacheman.build(); match cache.get_or_try_init(|| Ok(cmd.read()?)) { Ok(result) => result.clone(), Err(e) => return Err(format!("exec command: {e}").into()), } } else { cmd.read()? }; Ok(Value::String(result)) } _ => Err("exec command must be a string".into()), } } } pub fn tera_read_file( dir: Option<PathBuf>, ) -> impl Fn(&HashMap<String, Value>) -> tera::Result<Value> { move |args: &HashMap<String, Value>| -> tera::Result<Value> { match args.get("path") { Some(Value::String(path_str)) => { let path = if let Some(ref base_dir) = dir { // Resolve relative to config directory base_dir.join(path_str) } else { // Use path as-is if no directory context PathBuf::from(path_str) }; match std::fs::read_to_string(&path) { Ok(contents) => Ok(Value::String(contents)), Err(e) => { Err(format!("Failed to read file '{}': {}", path.display(), e).into()) } } } _ => Err("read_file path must be a string".into()), } } } #[cfg(test)] mod tests { use crate::config::Config; use super::*; use pretty_assertions::assert_str_eq; #[tokio::test] async fn test_config_root() { let _config = Config::get().await.unwrap(); assert_eq!(render("{{config_root}}"), "/"); } #[tokio::test] async fn test_mise_env() { let _config = Config::get().await.unwrap(); assert_eq!(render("{% if mise_env %}{{mise_env}}{% endif %}"), ""); } #[tokio::test] async fn test_cwd() { let _config = Config::get().await.unwrap(); assert_eq!(render("{{cwd}}"), "/"); } #[tokio::test] async fn test_mise_bin() { let _config = Config::get().await.unwrap(); assert_eq!( render("{{mise_bin}}"), env::current_exe() .unwrap() .into_os_string() .into_string() .unwrap() ); } #[tokio::test] async fn test_mise_pid() { let _config = Config::get().await.unwrap(); let s = render("{{mise_pid}}"); let pid = s.trim().parse::<u32>().unwrap(); assert!(pid > 0); } #[tokio::test] async fn test_xdg_cache_home() { let _config = Config::get().await.unwrap(); let s = render("{{xdg_cache_home}}"); assert_str_eq!(s, env::XDG_CACHE_HOME.to_string_lossy()); } #[tokio::test] #[cfg(unix)] async fn test_xdg_config_home() { let _config = Config::get().await.unwrap(); let s = render("{{xdg_config_home}}"); assert!(s.ends_with("/.config")); // test dir is not deterministic } #[tokio::test] #[cfg(unix)] async fn test_xdg_data_home() { let _config = Config::get().await.unwrap(); let s = render("{{xdg_data_home}}"); assert!(s.ends_with("/.local/share")); // test dir is not deterministic } #[tokio::test] #[cfg(unix)] async fn test_xdg_state_home() { let _config = Config::get().await.unwrap(); let s = render("{{xdg_state_home}}"); assert!(s.ends_with("/.local/state")); // test dir is not deterministic } #[tokio::test] async fn test_arch() { let _config = Config::get().await.unwrap(); if cfg!(target_arch = "x86_64") { assert_eq!(render("{{arch()}}"), "x64"); } else if cfg!(target_arch = "aarch64") { assert_eq!(render("{{arch()}}"), "arm64"); } else { assert_eq!(render("{{arch()}}"), env::consts::ARCH); } } #[tokio::test] async fn test_num_cpus() { let _config = Config::get().await.unwrap(); let s = render("{{ num_cpus() }}"); let num = s.parse::<u32>().unwrap(); assert!(num > 0); } #[tokio::test] async fn test_os() { let _config = Config::get().await.unwrap(); if cfg!(target_os = "linux") { assert_eq!(render("{{os()}}"), "linux"); } else if cfg!(target_os = "macos") { assert_eq!(render("{{os()}}"), "macos"); } else if cfg!(target_os = "windows") { assert_eq!(render("{{os()}}"), "windows"); } } #[tokio::test] async fn test_os_family() { let _config = Config::get().await.unwrap(); if cfg!(target_family = "unix") { assert_eq!(render("{{os_family()}}"), "unix"); } else if cfg!(target_os = "windows") { assert_eq!(render("{{os_family()}}"), "windows"); } } #[tokio::test] async fn test_choice() { let _config = Config::get().await.unwrap(); let result = render("{{choice(n=8, alphabet=\"abcdefgh\")}}"); assert_eq!(result.trim().len(), 8); } #[tokio::test] async fn test_haiku() { let _config = Config::get().await.unwrap(); // Default: 2 words + number let result = render("{{haiku()}}"); let parts: Vec<&str> = result.split('-').collect(); assert_eq!(parts.len(), 3); assert!(!parts[0].is_empty()); assert!(!parts[1].is_empty()); assert!(parts[2].parse::<u32>().is_ok()); // Custom: 3 words, no digits, underscore separator let result = render("{{haiku(words=3, digits=0, separator=\"_\")}}"); let parts: Vec<&str> = result.split('_').collect(); assert_eq!(parts.len(), 3); assert!(parts.iter().all(|p| p.parse::<u32>().is_err())); // no numbers } #[tokio::test] async fn test_quote() { let _config = Config::get().await.unwrap(); let s = render("{{ \"quoted'str\" | quote }}"); assert_eq!(s, "'quoted\\'str'"); } #[tokio::test] async fn test_kebabcase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"thisFilter\" | kebabcase }}"); assert_eq!(s, "this-filter"); } #[tokio::test] async fn test_lowercamelcase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"Camel-case\" | lowercamelcase }}"); assert_eq!(s, "camelCase"); } #[tokio::test] async fn test_shoutykebabcase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"kebabCase\" | shoutykebabcase }}"); assert_eq!(s, "KEBAB-CASE"); } #[tokio::test] async fn test_shoutysnakecase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"snakeCase\" | shoutysnakecase }}"); assert_eq!(s, "SNAKE_CASE"); } #[tokio::test] async fn test_snakecase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"snakeCase\" | snakecase }}"); assert_eq!(s, "snake_case"); } #[tokio::test] async fn test_uppercamelcase() { let _config = Config::get().await.unwrap(); let s = render("{{ \"CamelCase\" | uppercamelcase }}"); assert_eq!(s, "CamelCase"); } #[tokio::test] async fn test_hash() { let _config = Config::get().await.unwrap(); let s = render("{{ \"foo\" | hash(len=8) }}"); assert_eq!(s, "04e0bb39"); } #[tokio::test] #[cfg(unix)] async fn test_hash_file() { let _config = Config::get().await.unwrap(); let s = render("{{ \"../fixtures/shorthands.toml\" | hash_file(len=64) }}"); insta::assert_snapshot!(s, @"ce17f44735ea2083038e61c4b291ed31593e6cf4d93f5dc147e97e62962ac4e6"); } #[tokio::test] #[cfg(unix)] async fn test_absolute() { let _config = Config::get().await.unwrap(); let s = render("{{ \"/a/b/../c\" | absolute }}"); assert_eq!(s, "/a/c"); // relative path let s = render("{{ \"a/b/../c\" | absolute }}"); assert!(s.ends_with("/a/c")); } #[tokio::test] #[cfg(unix)] async fn test_canonicalize() { let _config = Config::get().await.unwrap(); let s = render("{{ \"../fixtures/shorthands.toml\" | canonicalize }}"); assert!(s.ends_with("/fixtures/shorthands.toml")); // test dir is not deterministic } #[tokio::test] async fn test_dirname() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "a/b/c" | dirname }}"#); assert_eq!(s, "a/b"); } #[tokio::test] async fn test_basename() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "a/b/c" | basename }}"#); assert_eq!(s, "c"); } #[tokio::test] async fn test_extname() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "a/b/c.txt" | extname }}"#); assert_eq!(s, "txt"); } #[tokio::test] async fn test_file_stem() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "a/b/c.txt" | file_stem }}"#); assert_eq!(s, "c"); } #[tokio::test] #[cfg(unix)] async fn test_file_size() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "../fixtures/shorthands.toml" | file_size }}"#); assert_eq!(s, "48"); } #[tokio::test] async fn test_last_modified() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ "../fixtures/shorthands.toml" | last_modified }}"#); let timestamp = s.parse::<u64>().unwrap(); assert!((1725000000..=2725000000).contains(&timestamp)); } #[tokio::test] #[cfg(unix)] async fn test_join_path() { let _config = Config::get().await.unwrap(); let s = render(r#"{{ ["..", "fixtures", "shorthands.toml"] | join_path }}"#); assert_eq!(s, "../fixtures/shorthands.toml"); } #[tokio::test] async fn test_is_dir() { let _config = Config::get().await.unwrap(); let s = render(r#"{% set p = ".mise" %}{% if p is dir %} ok {% endif %}"#); assert_eq!(s.trim(), "ok"); } #[tokio::test] async fn test_is_file() { let _config = Config::get().await.unwrap(); let s = render(r#"{% set p = ".test-tool-versions" %}{% if p is file %} ok {% endif %}"#); assert_eq!(s.trim(), "ok"); } #[tokio::test] async fn test_exists() { let _config = Config::get().await.unwrap(); let s = render(r#"{% set p = ".test-tool-versions" %}{% if p is exists %} ok {% endif %}"#); assert_eq!(s.trim(), "ok"); } #[tokio::test] async fn test_semver_matching() { let _config = Config::get().await.unwrap(); let s = render( r#"{% set p = "1.10.2" %}{% if p is semver_matching("^1.10.0") %} ok {% endif %}"#, ); assert_eq!(s.trim(), "ok"); } #[tokio::test] #[cfg(unix)] async fn test_read_file() { use std::fs; use tempfile::TempDir; let _config = Config::get().await.unwrap(); // Create a temp directory and test file let temp_dir = TempDir::new().unwrap(); let test_file_path = temp_dir.path().join("test.txt"); fs::write(&test_file_path, "test content\nwith multiple lines").unwrap(); // Test with the temp file let mut tera_ctx = BASE_CONTEXT.clone(); tera_ctx.insert("config_root", &temp_dir.path().to_str().unwrap()); tera_ctx.insert("cwd", temp_dir.path().to_str().unwrap()); let mut tera = get_tera(Some(temp_dir.path())); let s = tera .render_str(r#"{{ read_file(path="test.txt") }}"#, &tera_ctx) .unwrap(); assert_eq!(s, "test content\nwith multiple lines"); // Test with trim filter let s = tera .render_str(r#"{{ read_file(path="test.txt") | trim }}"#, &tera_ctx) .unwrap(); assert_eq!(s, "test content\nwith multiple lines"); } fn render(s: &str) -> String { let config_root = Path::new("/"); let mut tera_ctx = BASE_CONTEXT.clone(); tera_ctx.insert("config_root", &config_root); tera_ctx.insert("cwd", "/"); let mut tera = get_tera(Option::from(config_root)); tera.render_str(s, &tera_ctx).unwrap() } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/file.rs
src/file.rs
use crate::path::{Path, PathBuf, PathExt}; use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::Display; use std::fs; use std::fs::File; use std::io::Write; #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(unix)] use std::os::unix::prelude::*; use std::sync::Mutex; use std::time::Duration; use bzip2::read::BzDecoder; use color_eyre::eyre::{Context, Result}; use eyre::bail; use filetime::{FileTime, set_file_times}; use flate2::read::GzDecoder; use itertools::Itertools; use std::sync::LazyLock as Lazy; use tar::Archive; use walkdir::WalkDir; use zip::ZipArchive; #[cfg(windows)] use crate::config::Settings; use crate::ui::progress_report::SingleReport; use crate::{dirs, env}; pub fn open<P: AsRef<Path>>(path: P) -> Result<File> { let path = path.as_ref(); trace!("open {}", display_path(path)); File::open(path).wrap_err_with(|| format!("failed open: {}", display_path(path))) } pub fn read<P: AsRef<Path>>(path: P) -> Result<Vec<u8>> { let path = path.as_ref(); trace!("cat {}", display_path(path)); fs::read(path).wrap_err_with(|| format!("failed read: {}", display_path(path))) } pub fn size<P: AsRef<Path>>(path: P) -> Result<u64> { let path = path.as_ref(); trace!("du -b {}", display_path(path)); path.metadata() .map(|m| m.len()) .wrap_err_with(|| format!("failed size: {}", display_path(path))) } pub fn append<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); trace!("append {}", display_path(path)); fs::OpenOptions::new() .append(true) .create(true) .open(path) .and_then(|mut f| f.write_all(contents.as_ref())) .wrap_err_with(|| format!("failed append: {}", display_path(path))) } pub fn remove_all<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); match path.metadata().map(|m| m.file_type()) { Ok(x) if x.is_symlink() || x.is_file() => { remove_file(path)?; } Ok(x) if x.is_dir() => { trace!("rm -rf {}", display_path(path)); fs::remove_dir_all(path) .wrap_err_with(|| format!("failed rm -rf: {}", display_path(path)))?; } _ => {} }; Ok(()) } pub fn remove_file_or_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); match path.metadata().map(|m| m.file_type()) { Ok(x) if x.is_dir() => { remove_dir(path)?; } _ => { remove_file(path)?; } }; Ok(()) } pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); trace!("rm {}", display_path(path)); fs::remove_file(path).wrap_err_with(|| format!("failed rm: {}", display_path(path))) } pub async fn remove_file_async<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); trace!("rm {}", display_path(path)); tokio::fs::remove_file(path) .await .wrap_err_with(|| format!("failed rm: {}", display_path(path))) } pub fn remove_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); (|| -> Result<()> { if path.exists() && is_empty_dir(path)? { trace!("rmdir {}", display_path(path)); fs::remove_dir(path)?; } Ok(()) })() .wrap_err_with(|| format!("failed to remove_dir: {}", display_path(path))) } pub fn remove_dir_ignore<P: AsRef<Path>>(path: P, is_empty_ignore_files: Vec<&str>) -> Result<()> { let path = path.as_ref(); (|| -> Result<()> { if path.exists() && is_empty_dir_ignore(path, is_empty_ignore_files)? { trace!("rm -rf {}", display_path(path)); remove_all_with_warning(path)?; } Ok(()) })() .wrap_err_with(|| format!("failed to remove_dir: {}", display_path(path))) } pub fn remove_all_with_warning<P: AsRef<Path>>(path: P) -> Result<()> { remove_all(&path).map_err(|e| { warn!("failed to remove {}: {}", path.as_ref().display(), e); e }) } pub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> { let from = from.as_ref(); let to = to.as_ref(); trace!("mv {} {}", from.display(), to.display()); fs::rename(from, to).wrap_err_with(|| { format!( "failed rename: {} -> {}", display_path(from), display_path(to) ) }) } pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> { let from = from.as_ref(); let to = to.as_ref(); trace!("cp {} {}", from.display(), to.display()); fs::copy(from, to) .wrap_err_with(|| { format!( "failed copy: {} -> {}", display_path(from), display_path(to) ) }) .map(|_| ()) } pub fn copy_dir_all<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> { let from = from.as_ref(); let to = to.as_ref(); trace!("cp -r {} {}", from.display(), to.display()); recursive_ls(from)?.into_iter().try_for_each(|path| { let relative = path.strip_prefix(from)?; let dest = to.join(relative); create_dir_all(dest.parent().unwrap())?; copy(&path, &dest)?; Ok(()) }) } pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); trace!("write {}", display_path(path)); fs::write(path, contents).wrap_err_with(|| format!("failed write: {}", display_path(path))) } pub async fn write_async<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); trace!("write {}", display_path(path)); tokio::fs::write(path, contents) .await .wrap_err_with(|| format!("failed write: {}", display_path(path))) } pub fn read_to_string<P: AsRef<Path>>(path: P) -> Result<String> { let path = path.as_ref(); trace!("cat {}", path.display_user()); fs::read_to_string(path) .wrap_err_with(|| format!("failed read_to_string: {}", path.display_user())) } pub async fn read_to_string_async<P: AsRef<Path>>(path: P) -> Result<String> { let path = path.as_ref(); trace!("cat {}", path.display_user()); tokio::fs::read_to_string(path) .await .wrap_err_with(|| format!("failed read_to_string: {}", path.display_user())) } pub fn create(path: &Path) -> Result<File> { if let Some(parent) = path.parent() { create_dir_all(parent)?; } trace!("touch {}", display_path(path)); File::create(path).wrap_err_with(|| format!("failed create: {}", display_path(path))) } pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> { static LOCK: Lazy<Mutex<u8>> = Lazy::new(Default::default); let _lock = LOCK.lock().unwrap(); let path = path.as_ref(); if !path.exists() { trace!("mkdir -p {}", display_path(path)); if let Err(err) = fs::create_dir_all(path) { // if not exists error if err.kind() != std::io::ErrorKind::AlreadyExists { return Err(err) .wrap_err_with(|| format!("failed create_dir_all: {}", display_path(path))); } } } Ok(()) } /// replaces $HOME with "~" pub fn display_path<P: AsRef<Path>>(path: P) -> String { path.as_ref().display_user() } pub fn display_rel_path<P: AsRef<Path>>(path: P) -> String { let path = path.as_ref(); match path.strip_prefix(dirs::CWD.as_ref().unwrap()) { Ok(rel) => format!("./{}", rel.display()), Err(_) => display_path(path), } } /// replaces $HOME in a string with "~" and $PATH with "$PATH", generally used to clean up output /// after it is rendered pub fn replace_paths_in_string<S: Display>(input: S) -> String { let home = env::HOME.to_string_lossy().to_string(); input.to_string().replace(&home, "~") } /// replaces "~" with $HOME pub fn replace_path<P: AsRef<Path>>(path: P) -> PathBuf { let path = path.as_ref(); match path.starts_with("~/") { true => dirs::HOME.join(path.strip_prefix("~/").unwrap()), false => path.to_path_buf(), } } pub fn touch_file(file: &Path) -> Result<()> { if !file.exists() { create(file)?; return Ok(()); } trace!("touch_file {}", file.display()); let now = FileTime::now(); set_file_times(file, now, now) .wrap_err_with(|| format!("failed to touch file: {}", display_path(file))) } pub fn touch_dir(dir: &Path) -> Result<()> { trace!("touch {}", dir.display()); let now = FileTime::now(); set_file_times(dir, now, now) .wrap_err_with(|| format!("failed to touch dir: {}", display_path(dir))) } /// Synchronizes a directory to disk, ensuring that filesystem metadata changes /// (such as file creations or deletions) are persisted. /// /// This is important after operations like removing files to ensure the changes /// are immediately visible to other processes, e.g. to avoid race conditions. /// /// # Platform-specific behavior /// /// - **Unix/Linux**: Performs an fsync on the directory file descriptor, which /// ensures directory metadata (like file listings) is written to disk. /// - **Windows**: Not implemented (no-op). /// /// # Errors /// /// On Unix systems, returns an error if the directory cannot be opened or synced. /// On Windows, always succeeds. #[cfg(unix)] pub fn sync_dir<P: AsRef<Path>>(path: P) -> Result<()> { let path = path.as_ref(); trace!("sync {}", display_path(path)); let dir = File::open(path) .wrap_err_with(|| format!("failed to open dir for sync: {}", display_path(path)))?; dir.sync_all() .wrap_err_with(|| format!("failed to sync dir: {}", display_path(path))) } #[cfg(windows)] pub fn sync_dir<P: AsRef<Path>>(_path: P) -> Result<()> { // Not implemented on Windows Ok(()) } pub fn modified_duration(path: &Path) -> Result<Duration> { let metadata = path.metadata()?; let modified = metadata.modified()?; let duration = modified.elapsed().unwrap_or_default(); Ok(duration) } pub fn find_up<FN: AsRef<str>>(from: &Path, filenames: &[FN]) -> Option<PathBuf> { let mut current = from.to_path_buf(); loop { for filename in filenames { let path = current.join(filename.as_ref()); if path.exists() { return Some(path); } } if !current.pop() { return None; } } } pub fn dir_subdirs(dir: &Path) -> Result<BTreeSet<String>> { let mut output = Default::default(); if !dir.exists() { return Ok(output); } for entry in dir.read_dir()? { let entry = entry?; let ft = entry.file_type()?; if ft.is_dir() || (ft.is_symlink() && entry.path().is_dir()) { output.insert(entry.file_name().into_string().unwrap()); } } Ok(output) } pub fn ls(dir: &Path) -> Result<BTreeSet<PathBuf>> { let mut output = Default::default(); if !dir.is_dir() { return Ok(output); } for entry in dir.read_dir()? { let entry = entry?; output.insert(entry.path()); } Ok(output) } pub fn recursive_ls(dir: &Path) -> Result<BTreeSet<PathBuf>> { if !dir.is_dir() { return Ok(Default::default()); } Ok(WalkDir::new(dir) .follow_links(true) .into_iter() .filter_ok(|e| e.file_type().is_file()) .map_ok(|e| e.path().to_path_buf()) .try_collect()?) } #[cfg(unix)] pub fn make_symlink(target: &Path, link: &Path) -> Result<(PathBuf, PathBuf)> { trace!("ln -sf {} {}", target.display(), link.display()); if link.is_file() || link.is_symlink() { fs::remove_file(link)?; } symlink(target, link) .wrap_err_with(|| format!("failed to ln -sf {} {}", target.display(), link.display()))?; Ok((target.to_path_buf(), link.to_path_buf())) } #[cfg(unix)] pub fn make_symlink_or_copy(target: &Path, link: &Path) -> Result<()> { make_symlink(target, link)?; Ok(()) } #[cfg(windows)] pub fn make_symlink_or_copy(target: &Path, link: &Path) -> Result<()> { copy(target, link)?; Ok(()) } #[cfg(windows)] pub fn make_symlink(target: &Path, link: &Path) -> Result<(PathBuf, PathBuf)> { if let Err(err) = junction::create(target, link) { if err.kind() == std::io::ErrorKind::AlreadyExists { let _ = fs::remove_file(link); junction::create(target, link) } else { Err(err) } } else { Ok(()) } .wrap_err_with(|| format!("failed to ln -sf {} {}", target.display(), link.display()))?; Ok((target.to_path_buf(), link.to_path_buf())) } #[cfg(windows)] pub fn make_symlink_or_file(target: &Path, link: &Path) -> Result<()> { trace!("ln -sf {} {}", target.display(), link.display()); if link.is_file() || link.is_symlink() { // remove existing file if exists fs::remove_file(link)?; } xx::file::write(link, target.to_string_lossy().to_string())?; Ok(()) } pub fn resolve_symlink(link: &Path) -> Result<Option<PathBuf>> { // Windows symlink are write in file currently // may be changed to symlink in the future if link.is_symlink() { Ok(Some(fs::read_link(link)?)) } else if link.is_file() { Ok(Some(fs::read_to_string(link)?.into())) } else { Ok(None) } } #[cfg(unix)] pub fn make_symlink_or_file(target: &Path, link: &Path) -> Result<()> { make_symlink(target, link)?; Ok(()) } pub fn remove_symlinks_with_target_prefix( symlink_dir: &Path, target_prefix: &Path, ) -> Result<Vec<PathBuf>> { if !symlink_dir.exists() { return Ok(vec![]); } let mut removed = vec![]; for entry in symlink_dir.read_dir()? { let entry = entry?; let path = entry.path(); if path.is_symlink() { let target = path.read_link()?; if target.starts_with(target_prefix) { fs::remove_file(&path)?; removed.push(path); } } } Ok(removed) } #[cfg(unix)] pub fn is_executable(path: &Path) -> bool { if let Ok(metadata) = path.metadata() { return metadata.permissions().mode() & 0o111 != 0; } false } #[cfg(windows)] pub fn is_executable(path: &Path) -> bool { path.extension().map_or( Settings::get() .windows_executable_extensions .contains(&String::new()), |ext| { if let Some(str_val) = ext.to_str() { return Settings::get() .windows_executable_extensions .contains(&str_val.to_lowercase().to_string()); } false }, ) } #[cfg(unix)] pub fn make_executable<P: AsRef<Path>>(path: P) -> Result<()> { trace!("chmod +x {}", display_path(&path)); let path = path.as_ref(); let mut perms = path.metadata()?.permissions(); perms.set_mode(perms.mode() | 0o111); fs::set_permissions(path, perms) .wrap_err_with(|| format!("failed to chmod +x: {}", display_path(path)))?; Ok(()) } #[cfg(windows)] pub fn make_executable<P: AsRef<Path>>(_path: P) -> Result<()> { Ok(()) } #[cfg(unix)] pub async fn make_executable_async<P: AsRef<Path>>(path: P) -> Result<()> { trace!("chmod +x {}", display_path(&path)); let path = path.as_ref(); let mut perms = path.metadata()?.permissions(); perms.set_mode(perms.mode() | 0o111); tokio::fs::set_permissions(path, perms) .await .wrap_err_with(|| format!("failed to chmod +x: {}", display_path(path))) } #[cfg(windows)] pub async fn make_executable_async<P: AsRef<Path>>(_path: P) -> Result<()> { Ok(()) } pub fn all_dirs<P: AsRef<Path>>( start_dir: P, ceiling_dirs: &HashSet<PathBuf>, ) -> Result<Vec<PathBuf>> { trace!( "file::all_dirs Collecting all ancestors of {} until ceiling {:?}", display_path(&start_dir), ceiling_dirs ); Ok(start_dir .as_ref() .ancestors() .map_while(|p| { if ceiling_dirs.contains(p) { debug!( "file::all_dirs Reached ceiling directory: {}", display_path(p) ); None } else { trace!( "file::all_dirs Adding ancestor directory: {}", display_path(p) ); Some(p.to_path_buf()) } }) .collect()) } fn is_empty_dir(path: &Path) -> Result<bool> { path.read_dir() .map(|mut i| i.next().is_none()) .wrap_err_with(|| format!("failed to read_dir: {}", display_path(path))) } fn is_empty_dir_ignore(path: &Path, ignore_files: Vec<&str>) -> Result<bool> { path.read_dir() .map(|mut i| { i.all(|entry| match entry { Ok(entry) => ignore_files.iter().any(|ignore_file| { entry .file_name() .to_string_lossy() .eq_ignore_ascii_case(ignore_file) }), Err(_) => false, }) }) .wrap_err_with(|| format!("failed to read_dir: {}", display_path(path))) } pub struct FindUp { current_dir: PathBuf, current_dir_filenames: Vec<String>, filenames: Vec<String>, } impl FindUp { pub fn new(from: &Path, filenames: &[String]) -> Self { let filenames: Vec<String> = filenames.iter().map(|s| s.to_string()).collect(); Self { current_dir: from.to_path_buf(), filenames: filenames.clone(), current_dir_filenames: filenames, } } } impl Iterator for FindUp { type Item = PathBuf; fn next(&mut self) -> Option<Self::Item> { while let Some(filename) = self.current_dir_filenames.pop() { let path = self.current_dir.join(filename); if path.is_file() { return Some(path); } } self.current_dir_filenames.clone_from(&self.filenames); if cfg!(test) && self.current_dir == *dirs::HOME { return None; // in tests, do not recurse further than ./test } if !self.current_dir.pop() { return None; } self.next() } } /// returns the first executable in PATH /// will not include mise bin paths or other paths added by mise pub fn which<P: AsRef<Path>>(name: P) -> Option<PathBuf> { static CACHE: Lazy<Mutex<HashMap<PathBuf, Option<PathBuf>>>> = Lazy::new(Default::default); let name = name.as_ref(); if let Some(path) = CACHE.lock().unwrap().get(name) { return path.clone(); } let path = _which(name, &env::PATH); CACHE .lock() .unwrap() .insert(name.to_path_buf(), path.clone()); path } /// returns the first executable in PATH /// will include mise bin paths or other paths added by mise pub fn which_non_pristine<P: AsRef<Path>>(name: P) -> Option<PathBuf> { _which(name, &env::PATH_NON_PRISTINE) } fn _which<P: AsRef<Path>>(name: P, paths: &[PathBuf]) -> Option<PathBuf> { let name = name.as_ref(); paths.iter().find_map(|path| { let bin = path.join(name); if is_executable(&bin) { Some(bin) } else { None } }) } pub fn un_gz(input: &Path, dest: &Path) -> Result<()> { debug!("gunzip {} > {}", input.display(), dest.display()); let f = File::open(input)?; let mut dec = GzDecoder::new(f); let mut output = File::create(dest)?; std::io::copy(&mut dec, &mut output) .wrap_err_with(|| format!("failed to un-gzip: {}", display_path(input)))?; Ok(()) } pub fn un_xz(input: &Path, dest: &Path) -> Result<()> { debug!("xz -d {} -c > {}", input.display(), dest.display()); let f = File::open(input)?; let mut dec = xz2::read::XzDecoder::new(f); let mut output = File::create(dest)?; std::io::copy(&mut dec, &mut output) .wrap_err_with(|| format!("failed to un-xz: {}", display_path(input)))?; Ok(()) } pub fn un_zst(input: &Path, dest: &Path) -> Result<()> { debug!("zstd -d {} -c > {}", input.display(), dest.display()); let f = File::open(input)?; let mut dec = zstd::Decoder::new(f)?; let mut output = File::create(dest)?; std::io::copy(&mut dec, &mut output) .wrap_err_with(|| format!("failed to un-zst: {}", display_path(input)))?; Ok(()) } pub fn un_bz2(input: &Path, dest: &Path) -> Result<()> { debug!("bzip2 -d {} -c > {}", input.display(), dest.display()); let f = File::open(input)?; let mut dec = BzDecoder::new(f); let mut output = File::create(dest)?; std::io::copy(&mut dec, &mut output) .wrap_err_with(|| format!("failed to un-bz2: {}", display_path(input)))?; Ok(()) } #[derive(Default, Clone, Copy, PartialEq, strum::EnumString, strum::Display)] pub enum TarFormat { #[default] Auto, #[strum(serialize = "tar.gz")] TarGz, #[strum(serialize = "tar.xz")] TarXz, #[strum(serialize = "tar.bz2")] TarBz2, #[strum(serialize = "tar.zst")] TarZst, #[strum(serialize = "zip")] Zip, #[strum(serialize = "7z")] SevenZip, #[strum(serialize = "raw")] Raw, } impl TarFormat { pub fn from_ext(ext: &str) -> Self { match ext { "gz" | "tgz" => TarFormat::TarGz, "xz" | "txz" => TarFormat::TarXz, "bz2" | "tbz2" => TarFormat::TarBz2, "zst" | "tzst" => TarFormat::TarZst, "zip" => TarFormat::Zip, "7z" => TarFormat::SevenZip, _ => TarFormat::Raw, } } } pub struct TarOptions<'a> { pub format: TarFormat, pub strip_components: usize, pub pr: Option<&'a dyn SingleReport>, /// When false, files will be extracted with current timestamp instead of archive's mtime pub preserve_mtime: bool, } impl<'a> Default for TarOptions<'a> { fn default() -> Self { Self { format: TarFormat::default(), strip_components: 0, pr: None, preserve_mtime: true, // Default to preserving mtime for backward compatibility } } } pub fn untar(archive: &Path, dest: &Path, opts: &TarOptions) -> Result<()> { let format = match opts.format { TarFormat::Auto => { // Handle missing extension gracefully, default to Raw (which will be treated as tar.gz) match archive.extension() { Some(ext) => TarFormat::from_ext(&ext.to_string_lossy()), None => TarFormat::Raw, } } _ => opts.format, }; if format == TarFormat::Zip { return unzip( archive, dest, &ZipOptions { strip_components: opts.strip_components, }, ); } else if format == TarFormat::SevenZip { #[cfg(windows)] return un7z( archive, dest, &SevenZipOptions { strip_components: opts.strip_components, }, ); } debug!("tar -xf {} -C {}", archive.display(), dest.display()); if let Some(pr) = &opts.pr { pr.set_message(format!( "extract {}", archive.file_name().unwrap().to_string_lossy() )); } let tar = open_tar(format, archive)?; let err = || { let archive = display_path(archive); let dest = display_path(dest); format!("failed to extract tar: {archive} to {dest}") }; // TODO: put this back in when we can read+write in parallel // let mut cur = Cursor::new(vec![]); // let mut total = 0; // loop { // let mut buf = Cursor::new(vec![0; 1024 * 1024]); // let n = tar.read(buf.get_mut()).wrap_err_with(err)?; // cur.get_mut().extend_from_slice(&buf.get_ref()[..n]); // if n == 0 { // break; // } // if let Some(pr) = &opts.pr { // total += n as u64; // pr.set_length(total); // } // } create_dir_all(dest).wrap_err_with(err)?; // Set progress length once at the beginning with archive size if let Some(pr) = &opts.pr && let Ok(metadata) = archive.metadata() { pr.set_length(metadata.len()); } // Try to extract using the tar crate, detecting sparse files during extraction let mut needs_system_tar = false; for entry in Archive::new(tar).entries().wrap_err_with(err)? { let mut entry = entry.wrap_err_with(err)?; // Check if this is a GNU sparse file if entry.header().entry_type().is_gnu_sparse() { debug!("Detected GNU sparse file, falling back to system tar"); needs_system_tar = true; // Clean up any partial extraction remove_all(dest)?; create_dir_all(dest)?; break; } // Configure mtime preservation based on options entry.set_preserve_mtime(opts.preserve_mtime); trace!("extracting {}", entry.path().wrap_err_with(err)?.display()); entry.unpack_in(dest).wrap_err_with(err)?; // Update position as we extract files if let Some(pr) = &opts.pr { pr.set_position(entry.raw_file_position()); } } // Check for the GNUSparseFile.0 directory which indicates the tar crate // incorrectly handled a sparse file if !needs_system_tar { let sparse_dir = dest.join("GNUSparseFile.0"); if sparse_dir.exists() && sparse_dir.is_dir() { debug!("Found GNUSparseFile.0 directory, using system tar"); needs_system_tar = true; // Clean up the bad extraction remove_all(dest)?; create_dir_all(dest)?; } } if needs_system_tar { // Use system tar for archives with problematic sparse files // The tar crate doesn't properly handle certain GNU sparse formats debug!("Using system tar for: {}", archive.display()); // When preserve_mtime is false, use -m flag to not restore modification times // This causes extracted files to have current time, which is important for // cache invalidation and autopruning. Works on both BSD and GNU tar. if !opts.preserve_mtime { cmd!("tar", "-mxf", archive, "-C", dest) .run() .wrap_err_with(|| { format!("Failed to extract {} using system tar", archive.display()) })?; } else { cmd!("tar", "-xf", archive, "-C", dest) .run() .wrap_err_with(|| { format!("Failed to extract {} using system tar", archive.display()) })?; } } // Always use our manual strip to ensure consistent behavior across backends strip_archive_path_components(dest, opts.strip_components).wrap_err_with(err)?; Ok(()) } fn open_tar(format: TarFormat, archive: &Path) -> Result<Box<dyn std::io::Read>> { let f = File::open(archive)?; Ok(match format { // TODO: we probably shouldn't assume raw is tar.gz, but this was to retain existing behavior TarFormat::TarGz | TarFormat::Raw => Box::new(GzDecoder::new(f)), TarFormat::TarXz => Box::new(xz2::read::XzDecoder::new(f)), TarFormat::TarBz2 => Box::new(BzDecoder::new(f)), TarFormat::TarZst => Box::new(zstd::stream::read::Decoder::new(f)?), TarFormat::Zip => bail!("zip format not supported"), TarFormat::SevenZip => bail!("7z format not supported"), TarFormat::Auto => match archive.extension().and_then(|s| s.to_str()) { Some("xz") => open_tar(TarFormat::TarXz, archive)?, Some("bz2") => open_tar(TarFormat::TarBz2, archive)?, Some("zst") => open_tar(TarFormat::TarZst, archive)?, Some("zip") => bail!("zip format not supported"), _ => open_tar(TarFormat::TarGz, archive)?, }, }) } fn strip_archive_path_components(dir: &Path, strip_depth: usize) -> Result<()> { if strip_depth == 0 { return Ok(()); } if strip_depth > 1 { bail!("strip-components > 1 is not supported"); } let top_level_paths = ls(dir)?; for path in top_level_paths { if !path.symlink_metadata()?.is_dir() { continue; } // rename the directory to a temp name to avoid conflicts when moving files let temp_path = path.with_file_name(format!( "{}_tmp_strip", path.file_name().unwrap().to_string_lossy() )); fs::rename(&path, &temp_path)?; for entry in ls(&temp_path)? { if let Some(file_name) = entry.file_name() { let dest_path = dir.join(file_name); fs::rename(entry, dest_path)?; } else { continue; } } remove_dir(temp_path)?; } Ok(()) } #[derive(Default)] pub struct ZipOptions { pub strip_components: usize, } pub fn unzip(archive: &Path, dest: &Path, opts: &ZipOptions) -> Result<()> { // TODO: show progress debug!("unzip {} -d {}", archive.display(), dest.display()); ZipArchive::new(File::open(archive)?) .wrap_err_with(|| format!("failed to open zip archive: {}", display_path(archive)))? .extract(dest) .wrap_err_with(|| format!("failed to extract zip archive: {}", display_path(archive)))?; strip_archive_path_components(dest, opts.strip_components).wrap_err_with(|| { format!( "failed to strip path components from zip archive: {}", display_path(archive) ) }) } pub fn un_dmg(archive: &Path, dest: &Path) -> Result<()> { debug!( "hdiutil attach -quiet -nobrowse -mountpoint {} {}", dest.display(), archive.display() ); let tmp = tempfile::TempDir::new()?; cmd!( "hdiutil", "attach", "-quiet", "-nobrowse", "-mountpoint", tmp.path(), archive.to_path_buf() ) .run()?; copy_dir_all(tmp.path(), dest)?; cmd!("hdiutil", "detach", tmp.path()).run()?; Ok(()) } pub fn un_pkg(archive: &Path, dest: &Path) -> Result<()> { debug!( "pkgutil --expand-full {} {}", archive.display(), dest.display() ); cmd!("pkgutil", "--expand-full", archive, dest).run()?; Ok(()) } #[cfg(windows)] #[derive(Default)] pub struct SevenZipOptions { pub strip_components: usize, } #[cfg(windows)] pub fn un7z(archive: &Path, dest: &Path, opts: &SevenZipOptions) -> Result<()> { sevenz_rust::decompress_file(archive, dest) .wrap_err_with(|| format!("failed to extract 7z archive: {}", display_path(archive)))?; strip_archive_path_components(dest, opts.strip_components).wrap_err_with(|| { format!( "failed to strip path components from 7z archive: {}", display_path(archive) ) }) } pub fn split_file_name(path: &Path) -> (String, String) { let file_name = path.file_name().unwrap().to_string_lossy(); let (file_name_base, ext) = file_name .split_once('.') .unwrap_or((file_name.as_ref(), "")); (file_name_base.to_string(), ext.to_string()) } pub fn same_file(a: &Path, b: &Path) -> bool { desymlink_path(a) == desymlink_path(b) } pub fn desymlink_path(p: &Path) -> PathBuf { if p.is_symlink() && let Ok(target) = fs::read_link(p) { return target .canonicalize() .unwrap_or_else(|_| target.to_path_buf()); } p.canonicalize().unwrap_or_else(|_| p.to_path_buf()) } pub fn clone_dir(from: &PathBuf, to: &PathBuf) -> Result<()> { if cfg!(macos) { cmd!("cp", "-cR", from, to).run()?; } else if cfg!(windows) { cmd!("robocopy", from, to, "/MIR").run()?; } else { cmd!("cp", "--reflink=auto", "-r", from, to).run()?; } Ok(()) } /// Inspects the top-level contents of a tar archive without extracting it pub fn inspect_tar_contents(archive: &Path, format: TarFormat) -> Result<Vec<(String, bool)>> { let tar = open_tar(format, archive)?; let mut archive = Archive::new(tar); let mut top_level_components = std::collections::HashMap::new(); for entry in archive.entries()? { let entry = entry?; let path = entry.path()?; let header = entry.header(); // Get the first component of the path (top-level directory/file) if let Some(first_component) = path.components().next() {
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
true
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/parallel.rs
src/parallel.rs
use crate::Result; use crate::config::Settings; use std::sync::Arc; use tokio::sync::Semaphore; use tokio::task::JoinSet; pub async fn parallel<T, F, Fut, U>(input: Vec<T>, f: F) -> Result<Vec<U>> where T: Send + 'static, U: Send + 'static, F: Fn(T) -> Fut + Send + Copy + 'static, Fut: Future<Output = Result<U>> + Send + 'static, { let semaphore = Arc::new(Semaphore::new(Settings::get().jobs)); let mut jset = JoinSet::new(); let mut results = input.iter().map(|_| None).collect::<Vec<_>>(); for item in input.into_iter().enumerate() { let semaphore = semaphore.clone(); let permit = semaphore.acquire_owned().await?; jset.spawn(async move { let _permit = permit; let res = f(item.1).await?; Ok((item.0, res)) }); } while let Some(result) = jset.join_next().await { let err: eyre::Report = match result { Ok(Ok((i, result))) => { results[i] = Some(result); continue; } Ok(Err(e)) => e, Err(e) => e.into(), }; jset.abort_all(); // Drain remaining tasks - don't use join_all() as it panics on cancelled tasks while jset.join_next().await.is_some() {} return Err(err); } Ok(results.into_iter().flatten().collect()) } #[cfg(test)] mod tests { use super::*; use tokio::test; #[test] async fn test_parallel() { let input = vec![1, 2, 3, 4, 5]; let results = parallel(input, |x| async move { Ok(x * 2) }).await.unwrap(); assert_eq!(results, vec![2, 4, 6, 8, 10]); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/result.rs
src/result.rs
pub type Result<T> = eyre::Result<T>;
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/rand.rs
src/rand.rs
use rand::Rng; use rand::distr::Alphanumeric; pub fn random_string(length: usize) -> String { rand::rng() .sample_iter(&Alphanumeric) .take(length) .map(char::from) .collect::<String>() }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/minisign.rs
src/minisign.rs
use crate::*; use minisign_verify::*; use std::iter::Iterator; use std::sync::LazyLock; pub static MISE_PUB_KEY: LazyLock<String> = LazyLock::new(|| { include_str!("../minisign.pub") .to_string() .lines() .last() .unwrap() .to_string() }); pub fn verify(pub_key: &str, data: &[u8], sig: &str) -> Result<()> { let public_key = PublicKey::from_base64(pub_key)?; let signature = Signature::decode(sig)?; public_key.verify(data, &signature, false)?; Ok(()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/hooks.rs
src/hooks.rs
use crate::cmd::cmd; use crate::config::{Config, Settings, config_file}; use crate::shell::Shell; use crate::toolset::{ToolVersion, Toolset}; use crate::{dirs, hook_env}; use eyre::{Result, eyre}; use indexmap::IndexSet; use itertools::Itertools; use std::path::{Path, PathBuf}; use std::sync::LazyLock as Lazy; use std::sync::Mutex; use std::{iter::once, sync::Arc}; use tokio::sync::OnceCell; /// Represents installed tool info for hooks #[derive(Debug, Clone, serde::Serialize)] pub struct InstalledToolInfo { pub name: String, pub version: String, } impl From<&ToolVersion> for InstalledToolInfo { fn from(tv: &ToolVersion) -> Self { Self { name: tv.ba().short.clone(), version: tv.version.clone(), } } } #[derive( Debug, Clone, Copy, serde::Serialize, serde::Deserialize, strum::Display, Ord, PartialOrd, Eq, PartialEq, Hash, )] #[serde(rename_all = "lowercase")] pub enum Hooks { Enter, Leave, Cd, Preinstall, Postinstall, } #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct Hook { pub hook: Hooks, pub script: String, pub shell: Option<String>, } pub static SCHEDULED_HOOKS: Lazy<Mutex<IndexSet<Hooks>>> = Lazy::new(Default::default); pub fn schedule_hook(hook: Hooks) { let mut mu = SCHEDULED_HOOKS.lock().unwrap(); mu.insert(hook); } pub async fn run_all_hooks(config: &Arc<Config>, ts: &Toolset, shell: &dyn Shell) { let hooks = { let mut mu = SCHEDULED_HOOKS.lock().unwrap(); mu.drain(..).collect::<Vec<_>>() }; for hook in hooks { run_one_hook(config, ts, hook, Some(shell)).await; } } async fn all_hooks(config: &Arc<Config>) -> &'static Vec<(PathBuf, Hook)> { static ALL_HOOKS: OnceCell<Vec<(PathBuf, Hook)>> = OnceCell::const_new(); ALL_HOOKS .get_or_init(async || { let mut hooks = config.hooks().await.cloned().unwrap_or_default(); let cur_configs = config.config_files.keys().cloned().collect::<IndexSet<_>>(); let prev_configs = &hook_env::PREV_SESSION.loaded_configs; let old_configs = prev_configs.difference(&cur_configs); for p in old_configs { if let Ok(cf) = config_file::parse(p).await && let Ok(h) = cf.hooks() { hooks.extend(h.into_iter().map(|h| (cf.config_root(), h))); } } hooks }) .await } #[async_backtrace::framed] pub async fn run_one_hook( config: &Arc<Config>, ts: &Toolset, hook: Hooks, shell: Option<&dyn Shell>, ) { run_one_hook_with_context(config, ts, hook, shell, None).await; } /// Run a hook with optional installed tools context (for postinstall hooks) #[async_backtrace::framed] pub async fn run_one_hook_with_context( config: &Arc<Config>, ts: &Toolset, hook: Hooks, shell: Option<&dyn Shell>, installed_tools: Option<&[InstalledToolInfo]>, ) { for (root, h) in all_hooks(config).await { if hook != h.hook || (h.shell.is_some() && h.shell != shell.map(|s| s.to_string())) { continue; } trace!("running hook {hook} in {root:?}"); match (hook, hook_env::dir_change()) { (Hooks::Enter, Some((old, new))) => { if !new.starts_with(root) { continue; } if old.as_ref().is_some_and(|old| old.starts_with(root)) { continue; } } (Hooks::Leave, Some((old, new))) => { if new.starts_with(root) { continue; } if old.as_ref().is_some_and(|old| !old.starts_with(root)) { continue; } } (Hooks::Cd, Some((_old, new))) => { if !new.starts_with(root) { continue; } } _ => {} } if h.shell.is_some() { println!("{}", h.script); } else if let Err(e) = execute(config, ts, root, h, installed_tools).await { warn!("error executing hook: {e}"); } } } impl Hook { pub fn from_toml(hook: Hooks, value: toml::Value) -> Result<Vec<Self>> { match value { toml::Value::String(run) => Ok(vec![Hook { hook, script: run, shell: None, }]), toml::Value::Table(tbl) => { let script = tbl .get("script") .ok_or_else(|| eyre!("missing `script` key"))?; let script = script .as_str() .ok_or_else(|| eyre!("`run` must be a string"))?; let shell = tbl .get("shell") .and_then(|s| s.as_str()) .map(|s| s.to_string()); Ok(vec![Hook { hook, script: script.to_string(), shell, }]) } toml::Value::Array(arr) => { let mut hooks = vec![]; for v in arr { hooks.extend(Self::from_toml(hook, v)?); } Ok(hooks) } v => panic!("invalid hook value: {v}"), } } } async fn execute( config: &Arc<Config>, ts: &Toolset, root: &Path, hook: &Hook, installed_tools: Option<&[InstalledToolInfo]>, ) -> Result<()> { Settings::get().ensure_experimental("hooks")?; let shell = Settings::get().default_inline_shell()?; let args = shell .iter() .skip(1) .map(|s| s.as_str()) .chain(once(hook.script.as_str())) .collect_vec(); let mut env = ts.full_env(config).await?; if let Some(cwd) = dirs::CWD.as_ref() { env.insert( "MISE_ORIGINAL_CWD".to_string(), cwd.to_string_lossy().to_string(), ); } env.insert( "MISE_PROJECT_ROOT".to_string(), root.to_string_lossy().to_string(), ); if let Some((Some(old), _new)) = hook_env::dir_change() { env.insert( "MISE_PREVIOUS_DIR".to_string(), old.to_string_lossy().to_string(), ); } // Add installed tools info for postinstall hooks if let Some(tools) = installed_tools && let Ok(json) = serde_json::to_string(tools) { env.insert("MISE_INSTALLED_TOOLS".to_string(), json); } // TODO: this should be different but I don't have easy access to it // env.insert("MISE_CONFIG_ROOT".to_string(), root.to_string_lossy().to_string()); cmd(&shell[0], args) .stdout_to_stderr() // .dir(root) .full_env(env) .run()?; Ok(()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/registry.rs
src/registry.rs
use crate::backend::backend_type::BackendType; use crate::cli::args::BackendArg; use crate::config::Settings; use crate::toolset::ToolVersionOptions; use heck::ToShoutySnakeCase; use indexmap::IndexMap; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::env; use std::env::consts::{ARCH, OS}; use std::fmt::Display; use std::iter::Iterator; use std::sync::{LazyLock as Lazy, Mutex}; use strum::IntoEnumIterator; use url::Url; // the registry is generated from registry.toml in the project root pub static REGISTRY: Lazy<BTreeMap<&'static str, RegistryTool>> = Lazy::new(|| include!(concat!(env!("OUT_DIR"), "/registry.rs"))); #[derive(Debug, Clone)] pub struct RegistryTool { pub short: &'static str, pub description: Option<&'static str>, pub backends: &'static [RegistryBackend], #[allow(unused)] pub aliases: &'static [&'static str], pub test: &'static Option<(&'static str, &'static str)>, pub os: &'static [&'static str], pub depends: &'static [&'static str], pub idiomatic_files: &'static [&'static str], } #[derive(Debug, Clone)] pub struct RegistryBackend { pub full: &'static str, pub platforms: &'static [&'static str], pub options: &'static [(&'static str, &'static str)], } // Cache for environment variable overrides static ENV_BACKENDS: Lazy<Mutex<HashMap<String, &'static str>>> = Lazy::new(|| Mutex::new(HashMap::new())); impl RegistryTool { pub fn backends(&self) -> Vec<&'static str> { // Check for environment variable override first // e.g., MISE_BACKENDS_GRAPHITE='github:withgraphite/homebrew-tap[exe=gt]' let env_key = format!("MISE_BACKENDS_{}", self.short.to_shouty_snake_case()); // Check cache first { let cache = ENV_BACKENDS.lock().unwrap(); if let Some(&backend) = cache.get(&env_key) { return vec![backend]; } } // Check environment variable if let Ok(env_value) = env::var(&env_key) { // Store in cache with 'static lifetime let leaked = Box::leak(env_value.into_boxed_str()); let mut cache = ENV_BACKENDS.lock().unwrap(); cache.insert(env_key.clone(), leaked); return vec![leaked]; } static BACKEND_TYPES: Lazy<HashSet<String>> = Lazy::new(|| { let mut backend_types = BackendType::iter() .map(|b| b.to_string()) .collect::<HashSet<_>>(); time!("disable_backends"); for backend in &Settings::get().disable_backends { backend_types.remove(backend); } time!("disable_backends"); if cfg!(windows) { backend_types.remove("asdf"); } backend_types }); let settings = Settings::get(); let os = settings.os.clone().unwrap_or(OS.to_string()); let arch = settings.arch.clone().unwrap_or(ARCH.to_string()); let platform = format!("{os}-{arch}"); let experimental = settings.experimental; self.backends .iter() .filter(|rb| { rb.platforms.is_empty() || rb.platforms.contains(&&*os) || rb.platforms.contains(&&*arch) || rb.platforms.contains(&&*platform) }) .map(|rb| rb.full) .filter(|full| { full.split(':') .next() .is_some_and(|b| BACKEND_TYPES.contains(b)) }) // Filter out experimental backends if experimental mode is disabled .filter(|full| { if experimental { return true; } let backend_type = BackendType::guess(full); !backend_type.is_experimental() }) .collect() } pub fn is_supported_os(&self) -> bool { self.os.is_empty() || self.os.contains(&OS) } pub fn ba(&self) -> Option<BackendArg> { self.backends() .first() .map(|f| BackendArg::new(self.short.to_string(), Some(f.to_string()))) } /// Get RegistryBackend for a specific full backend string pub fn get_backend(&self, full: &str) -> Option<&RegistryBackend> { self.backends.iter().find(|rb| rb.full == full) } /// Get options for a specific backend pub fn backend_options(&self, full: &str) -> ToolVersionOptions { let mut opts = IndexMap::new(); if let Some(backend) = self.get_backend(full) { for (k, v) in backend.options { opts.insert(k.to_string(), v.to_string()); } } ToolVersionOptions { opts, ..Default::default() } } } pub fn shorts_for_full(full: &str) -> &'static Vec<&'static str> { static EMPTY: Vec<&'static str> = vec![]; static FULL_TO_SHORT: Lazy<HashMap<&'static str, Vec<&'static str>>> = Lazy::new(|| { let mut map: HashMap<&'static str, Vec<&'static str>> = HashMap::new(); for (short, rt) in REGISTRY.iter() { for full in rt.backends() { map.entry(full).or_default().push(short); } } map }); FULL_TO_SHORT.get(full).unwrap_or(&EMPTY) } pub fn is_trusted_plugin(name: &str, remote: &str) -> bool { let normalized_url = normalize_remote(remote).unwrap_or("INVALID_URL".into()); let is_shorthand = REGISTRY .get(name) .and_then(|tool| tool.backends().first().copied()) .map(full_to_url) .is_some_and(|s| normalize_remote(&s).unwrap_or_default() == normalized_url); let is_mise_url = normalized_url.starts_with("github.com/mise-plugins/"); !is_shorthand || is_mise_url } pub fn normalize_remote(remote: &str) -> eyre::Result<String> { let url = Url::parse(remote)?; let host = url.host_str().unwrap(); let path = url.path().trim_end_matches(".git"); Ok(format!("{host}{path}")) } pub fn full_to_url(full: &str) -> String { if url_like(full) { return full.to_string(); } let (_backend, url) = full.split_once(':').unwrap_or(("", full)); if url_like(url) { url.to_string() } else { format!("https://github.com/{url}.git") } } fn url_like(s: &str) -> bool { s.starts_with("https://") || s.starts_with("http://") || s.starts_with("git@") || s.starts_with("ssh://") || s.starts_with("git://") } impl Display for RegistryTool { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.short) } } pub fn tool_enabled<T: Ord>( enable_tools: &BTreeSet<T>, disable_tools: &BTreeSet<T>, name: &T, ) -> bool { if enable_tools.is_empty() { !disable_tools.contains(name) } else { enable_tools.contains(name) } } #[cfg(test)] mod tests { use crate::config::Config; #[tokio::test] async fn test_tool_disabled() { let _config = Config::get().await.unwrap(); use super::*; let name = "cargo"; assert!(tool_enabled(&BTreeSet::new(), &BTreeSet::new(), &name)); assert!(tool_enabled( &BTreeSet::from(["cargo"]), &BTreeSet::new(), &name )); assert!(!tool_enabled( &BTreeSet::new(), &BTreeSet::from(["cargo"]), &name )); } #[tokio::test] async fn test_backend_env_override() { let _config = Config::get().await.unwrap(); use super::*; // Clear the cache first ENV_BACKENDS.lock().unwrap().clear(); // Test with a known tool from the registry if let Some(tool) = REGISTRY.get("node") { // First test without env var - should return default backends let default_backends = tool.backends(); assert!(!default_backends.is_empty()); // Test with env var override // SAFETY: This is safe in a test environment unsafe { env::set_var("MISE_BACKENDS_NODE", "test:backend"); } let overridden_backends = tool.backends(); assert_eq!(overridden_backends.len(), 1); assert_eq!(overridden_backends[0], "test:backend"); // Clean up // SAFETY: This is safe in a test environment unsafe { env::remove_var("MISE_BACKENDS_NODE"); } ENV_BACKENDS.lock().unwrap().clear(); } } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/env_diff.rs
src/env_diff.rs
use std::collections::BTreeMap; use std::ffi::OsString; use std::fmt::Debug; use std::io::prelude::*; use std::iter::once; use std::path::{Path, PathBuf}; use base64::prelude::*; use eyre::Result; use flate2::Compression; use flate2::write::{ZlibDecoder, ZlibEncoder}; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use serde_derive::{Deserialize, Serialize}; use std::sync::LazyLock as Lazy; use crate::env::PATH_KEY; use crate::file; #[derive(Default, Serialize, Deserialize)] pub struct EnvDiff { #[serde(default)] pub old: IndexMap<String, String>, #[serde(default)] pub new: IndexMap<String, String>, #[serde(default)] pub path: Vec<PathBuf>, } #[derive(Debug)] pub enum EnvDiffOperation { Add(String, String), Change(String, String), Remove(String), } pub type EnvDiffPatches = Vec<EnvDiffOperation>; pub type EnvMap = BTreeMap<String, String>; impl EnvDiff { pub fn new<T>(original: &EnvMap, additions: T) -> EnvDiff where T: IntoIterator<Item = (String, String)>, { let mut diff = EnvDiff::default(); for (key, new_val) in additions.into_iter() { let key: String = key; match original.get(&key) { Some(original_val) => { if original_val != &new_val { diff.old.insert(key.clone(), original_val.into()); diff.new.insert(key, new_val); } } None => { diff.new.insert(key, new_val); } } } diff } pub fn from_bash_script<T, U, V>( script: &Path, dir: &Path, env: T, opts: &EnvDiffOptions, ) -> Result<Self> where T: IntoIterator<Item = (U, V)>, U: Into<OsString>, V: Into<OsString>, { let env: IndexMap<OsString, OsString> = env.into_iter().map(|(k, v)| (k.into(), v.into())).collect(); let bash_path = file::which("bash").unwrap_or("/bin/bash".into()); let out = cmd!( bash_path, "--noprofile", "-c", indoc::formatdoc! {" . \"{script}\" export -p ", script = script.display()} ) .dir(dir) .full_env(&env) .read()?; let env: EnvMap = env .into_iter() .map(|(k, v)| (k.into_string().unwrap(), v.into_string().unwrap())) .collect(); let mut additions = EnvMap::new(); let mut cur_key = None; for line in out.lines() { match line.strip_prefix("declare -x ") { Some(line) => { let (k, v) = line.split_once('=').unwrap_or_default(); if invalid_key(k, opts) { continue; } cur_key = Some(k.to_string()); additions.insert(k.to_string(), v.to_string()); } None => { if let Some(k) = &cur_key { let v = format!("\n{line}"); additions.get_mut(k).unwrap().push_str(&v); } } } } for (k, v) in additions.clone().iter() { let v = normalize_escape_sequences(v); if let Some(orig) = env.get(k) && &v == orig { additions.remove(k); continue; } additions.insert(k.into(), v); } Ok(Self::new(&env, additions)) } pub fn deserialize(raw: &str) -> Result<EnvDiff> { let mut writer = Vec::new(); let mut decoder = ZlibDecoder::new(writer); let bytes = BASE64_STANDARD_NO_PAD.decode(raw)?; decoder.write_all(&bytes[..])?; writer = decoder.finish()?; Ok(rmp_serde::from_slice(&writer[..])?) } pub fn serialize(&self) -> Result<String> { let mut gz = ZlibEncoder::new(Vec::new(), Compression::fast()); gz.write_all(&rmp_serde::to_vec_named(self)?)?; Ok(BASE64_STANDARD_NO_PAD.encode(gz.finish()?)) } pub fn to_patches(&self) -> EnvDiffPatches { let mut patches = EnvDiffPatches::new(); for k in self.old.keys() { match self.new.get(k) { Some(v) => patches.push(EnvDiffOperation::Change(k.into(), v.into())), None => patches.push(EnvDiffOperation::Remove(k.into())), }; } for (k, v) in self.new.iter() { if !self.old.contains_key(k) { patches.push(EnvDiffOperation::Add(k.into(), v.into())) }; } patches } pub fn reverse(&self) -> EnvDiff { EnvDiff { old: self.new.clone(), new: self.old.clone(), path: self.path.clone(), } } } fn invalid_key(k: &str, opts: &EnvDiffOptions) -> bool { k.is_empty() || opts.ignore_keys.contains(k) // following two ignores are for exported bash functions and exported bash // functions which are multiline, they appear in the environment as e.g.: // BASH_FUNC_exported-bash-function%%=() { echo "this is an" // echo "exported bash function" // echo "with multiple lines" // } || k.starts_with("BASH_FUNC_") || k.starts_with(' ') } static DEFAULT_IGNORE_KEYS: Lazy<IndexSet<String>> = Lazy::new(|| { [ "_", "SHLVL", "PWD", "OLDPWD", "HOME", "USER", "SHELL", "SHELLOPTS", "COMP_WORDBREAKS", "PS1", "PROMPT_DIRTRIM", ] .iter() .map(|s| s.to_string()) .collect() }); impl Debug for EnvDiff { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let print_sorted = |hashmap: &IndexMap<String, String>| { hashmap .iter() .map(|(k, v)| format!("{k}={v}")) .sorted() .collect::<Vec<_>>() }; f.debug_struct("EnvDiff") .field("old", &print_sorted(&self.old)) .field("new", &print_sorted(&self.new)) .finish() } } fn normalize_escape_sequences(input: &str) -> String { let input = if input.starts_with('"') && input.ends_with('"') { input[1..input.len() - 1].to_string() } else if input.starts_with("$'") && input.ends_with('\'') { input[2..input.len() - 1].to_string() } else { input.to_string() }; let mut result = String::with_capacity(input.len()); let mut chars = input.chars(); while let Some(c) = chars.next() { if c == '\\' { match chars.next() { Some(val) => match val { 'a' => result.push('\u{07}'), 'b' => result.push('\u{08}'), 'e' | 'E' => result.push('\u{1b}'), 'f' => result.push('\u{0c}'), 'n' => result.push('\n'), 'r' => result.push('\r'), 't' => result.push('\t'), 'v' => result.push('\u{0b}'), '\\' => result.push('\\'), '\'' => result.push('\''), '"' => result.push('"'), '?' => result.push('?'), '`' => result.push('`'), '$' => result.push('$'), _ => { result.push('\\'); result.push(val); } }, None => { warn!("Invalid escape sequence: {}", input); } } } else { result.push(c) } } result } pub struct EnvDiffOptions { pub ignore_keys: IndexSet<String>, } impl Default for EnvDiffOptions { fn default() -> Self { Self { ignore_keys: DEFAULT_IGNORE_KEYS .iter() .cloned() .chain(once(PATH_KEY.to_string())) .collect(), } } } #[cfg(test)] mod tests { use crate::config::Config; use super::*; use insta::assert_debug_snapshot; use pretty_assertions::assert_str_eq; #[tokio::test] async fn test_diff() { let _config = Config::get().await.unwrap(); let diff = EnvDiff::new(&new_from_hashmap(), new_to_hashmap()); assert_debug_snapshot!(diff.to_patches()); } #[tokio::test] async fn test_reverse() { let _config = Config::get().await.unwrap(); let diff = EnvDiff::new(&new_from_hashmap(), new_to_hashmap()); let patches = diff.reverse().to_patches(); let to_remove = patches .iter() .filter_map(|p| match p { EnvDiffOperation::Remove(k) => Some(k), _ => None, }) .collect::<Vec<_>>(); assert_debug_snapshot!(to_remove, @r#" [ "c", ] "#); let to_add = patches .iter() .filter_map(|p| match p { EnvDiffOperation::Add(k, v) => Some((k, v)), _ => None, }) .collect::<Vec<_>>(); assert_debug_snapshot!(to_add, @"[]"); let to_change = patches .iter() .filter_map(|p| match p { EnvDiffOperation::Change(k, v) => Some((k, v)), _ => None, }) .collect::<Vec<_>>(); assert_debug_snapshot!(to_change, @r#" [ ( "b", "2", ), ] "#); } fn new_from_hashmap() -> EnvMap { [("a", "1"), ("b", "2")] .map(|(k, v)| (k.into(), v.into())) .into() } fn new_to_hashmap() -> EnvMap { [("a", "1"), ("b", "3"), ("c", "4")] .map(|(k, v)| (k.into(), v.into())) .into() } #[tokio::test] async fn test_serialize() { let _config = Config::get().await.unwrap(); let diff = EnvDiff::new(&new_from_hashmap(), new_to_hashmap()); let serialized = diff.serialize().unwrap(); let deserialized = EnvDiff::deserialize(&serialized).unwrap(); assert_debug_snapshot!(deserialized.to_patches()); } #[tokio::test] #[cfg(unix)] async fn test_from_bash_script() { let _config = Config::get().await.unwrap(); use crate::{config::Config, dirs}; use indexmap::indexmap; let path = dirs::HOME.join("fixtures/exec-env"); let orig = indexmap! { "UNMODIFIED_VAR" => "unmodified", "UNMODIFIED_NEWLINE_VAR" => "hello\\nworld", "UNMODIFIED_SQUOTE_VAR" => "hello\\'world", "UNMODIFIED_ESCAPE_VAR" => "hello\\world", "MODIFIED_VAR" => "original", "ESCAPES" => "\\n\\t\\r\\v\\f\\a\\b\\e\\0\\x1b\\u1234\\U00012345\\a\\b\\e\\E\\f\\n\\r\\t\\v\"?`$\\g'\\0", "BACKSPACE" => "\u{08}", "BACKTICK" => "`", "BELL" => "\u{07}", "CARRIAGE_RETURN" => "\r", "DOLLAR" => "$", "DOUBLE_QUOTE" => "\"", "ESCAPE" => "\u{1b}", "ESCAPE2" => "\u{1b}", "FORM_FEED" => "\u{0c}", "G" => "g", "NEWLINE" => "\n", "QUESTION_MARK" => "?", "SINGLE_QUOTE" => "'", "TAB" => "\t", "VERTICAL_TAB" => "\u{0b}", } .into_iter() .map(|(k, v)| (k.into(), v.into())) .collect::<Vec<(String, String)>>(); let cwd = dirs::CWD.clone().unwrap(); let ed = EnvDiff::from_bash_script(path.as_path(), &cwd, orig, &Default::default()).unwrap(); assert_debug_snapshot!(ed); } #[tokio::test] async fn test_invalid_escape_sequence() { let _config = Config::get().await.unwrap(); let input = r#""\g\""#; let output = normalize_escape_sequences(input); // just warns assert_str_eq!(output, r"\g"); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/dirs.rs
src/dirs.rs
use std::path::{Path, PathBuf}; use std::sync::LazyLock as Lazy; use crate::env; pub static HOME: Lazy<&Path> = Lazy::new(|| &env::HOME); pub static CWD: Lazy<Option<PathBuf>> = Lazy::new(|| env::current_dir().ok()); pub static DATA: Lazy<&Path> = Lazy::new(|| &env::MISE_DATA_DIR); pub static CACHE: Lazy<&Path> = Lazy::new(|| &env::MISE_CACHE_DIR); pub static CONFIG: Lazy<&Path> = Lazy::new(|| &env::MISE_CONFIG_DIR); pub static STATE: Lazy<&Path> = Lazy::new(|| &env::MISE_STATE_DIR); pub static SYSTEM: Lazy<&Path> = Lazy::new(|| &env::MISE_SYSTEM_DIR); pub static PLUGINS: Lazy<&Path> = Lazy::new(|| &env::MISE_PLUGINS_DIR); pub static DOWNLOADS: Lazy<&Path> = Lazy::new(|| &env::MISE_DOWNLOADS_DIR); pub static INSTALLS: Lazy<&Path> = Lazy::new(|| &env::MISE_INSTALLS_DIR); pub static SHIMS: Lazy<&Path> = Lazy::new(|| &env::MISE_SHIMS_DIR); pub static TRACKED_CONFIGS: Lazy<PathBuf> = Lazy::new(|| STATE.join("tracked-configs")); pub static TRUSTED_CONFIGS: Lazy<PathBuf> = Lazy::new(|| STATE.join("trusted-configs")); pub static IGNORED_CONFIGS: Lazy<PathBuf> = Lazy::new(|| STATE.join("ignored-configs"));
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/hash.rs
src/hash.rs
use std::collections::HashMap; use std::hash::{Hash, Hasher}; use std::io::{Read, Write}; use std::path::Path; use crate::file; use crate::file::display_path; use crate::ui::progress_report::SingleReport; use blake3::Hasher as Blake3Hasher; use digest::Digest; use eyre::{Result, bail}; use md5::Md5; use sha1::Sha1; use sha2::{Sha256, Sha512}; use siphasher::sip::SipHasher; pub fn hash_to_str<T: Hash>(t: &T) -> String { let mut s = SipHasher::new(); t.hash(&mut s); format!("{:x}", s.finish()) } pub fn hash_sha256_to_str(s: &str) -> String { let mut hasher = Sha256::new(); hasher.update(s); format!("{:x}", hasher.finalize()) } pub fn file_hash_sha256(path: &Path, pr: Option<&dyn SingleReport>) -> Result<String> { let use_external_hasher = file::size(path).unwrap_or_default() > 50 * 1024 * 1024; if use_external_hasher && file::which("sha256sum").is_some() { let out = cmd!("sha256sum", path).read()?; Ok(out.split_whitespace().next().unwrap().to_string()) } else { file_hash_prog::<Sha256>(path, pr) } } fn file_hash_prog<D>(path: &Path, pr: Option<&dyn SingleReport>) -> Result<String> where D: Digest + Write, { let mut file = file::open(path)?; if let Some(pr) = pr { pr.set_length(file.metadata()?.len()); } let mut hasher = D::new(); let mut buf = [0; 32 * 1024]; loop { let n = file.read(&mut buf)?; if n == 0 { break; } hasher.write_all(&buf[..n])?; if let Some(pr) = pr { pr.inc(n as u64); } } std::io::copy(&mut file, &mut hasher)?; let hash = hasher.finalize(); Ok(hash.iter().map(|b| format!("{b:02x}")).collect()) } pub fn hash_blake3_to_str(s: &str) -> String { let mut hasher = Blake3Hasher::new(); hasher.update(s.as_bytes()); hasher.finalize().to_hex().to_string() } pub fn file_hash_blake3(path: &Path, pr: Option<&dyn SingleReport>) -> Result<String> { let mut file = file::open(path)?; if let Some(pr) = pr { pr.set_length(file.metadata()?.len()); } let mut hasher = Blake3Hasher::new(); let mut buf = [0; 32 * 1024]; loop { let n = file.read(&mut buf)?; if n == 0 { break; } hasher.update(&buf[..n]); if let Some(pr) = pr { pr.inc(n as u64); } } let hash = hasher.finalize(); Ok(format!("{}", hash.to_hex())) } pub fn ensure_checksum( path: &Path, checksum: &str, pr: Option<&dyn SingleReport>, algo: &str, ) -> Result<()> { let use_external_hasher = file::size(path).unwrap_or(u64::MAX) > 10 * 1024 * 1024; let actual = match algo { "blake3" => file_hash_blake3(path, pr)?, "sha512" => { if use_external_hasher && file::which("sha512sum").is_some() { let out = cmd!("sha512sum", path).read()?; out.split_whitespace().next().unwrap().to_string() } else { file_hash_prog::<Sha512>(path, pr)? } } "sha256" => file_hash_prog::<Sha256>(path, pr)?, "sha1" => { if use_external_hasher && file::which("sha1sum").is_some() { let out = cmd!("sha1sum", path).read()?; out.split_whitespace().next().unwrap().to_string() } else { file_hash_prog::<Sha1>(path, pr)? } } "md5" => { if use_external_hasher && file::which("md5sum").is_some() { let out = cmd!("md5sum", path).read()?; out.split_whitespace().next().unwrap().to_string() } else { file_hash_prog::<Md5>(path, pr)? } } _ => bail!("Unknown checksum algorithm: {}", algo), }; let checksum = checksum.to_lowercase(); if actual != checksum { bail!( "Checksum mismatch for file {}:\nExpected: {algo}:{checksum}\nActual: {algo}:{actual}", display_path(path) ); } Ok(()) } pub fn parse_shasums(text: &str) -> HashMap<String, String> { text.lines() .map(|l| { let mut parts = l.split_whitespace(); let hash = parts.next().unwrap(); let name = parts.next().unwrap(); (name.into(), hash.into()) }) .collect() } #[cfg(test)] mod tests { use insta::assert_snapshot; use pretty_assertions::assert_eq; use crate::config::Config; use super::*; #[tokio::test] async fn test_hash_to_str() { let _config = Config::get().await.unwrap(); assert_eq!(hash_to_str(&"foo"), "e1b19adfb2e348a2"); } #[tokio::test] async fn test_hash_sha256() { let _config = Config::get().await.unwrap(); let path = Path::new(".test-tool-versions"); let hash = file_hash_prog::<Sha256>(path, None).unwrap(); assert_snapshot!(hash); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/github.rs
src/github.rs
use crate::cache::{CacheManager, CacheManagerBuilder}; use crate::{dirs, duration, env}; use eyre::Result; use heck::ToKebabCase; use reqwest::IntoUrl; use reqwest::header::{HeaderMap, HeaderValue}; use serde_derive::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::PathBuf; use std::sync::LazyLock as Lazy; use tokio::sync::RwLock; use tokio::sync::RwLockReadGuard; use xx::regex; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubRelease { pub tag_name: String, // pub name: Option<String>, // pub body: Option<String>, pub draft: bool, pub prerelease: bool, pub created_at: String, // pub published_at: Option<String>, pub assets: Vec<GithubAsset>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubTag { pub name: String, pub commit: Option<GithubTagCommit>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubTagCommit { pub sha: String, pub url: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubCommit { pub commit: GithubCommitInfo, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubCommitInfo { pub committer: GithubCommitPerson, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubCommitPerson { pub date: String, } /// Tag with date information #[derive(Debug, Clone)] pub struct GithubTagWithDate { pub name: String, pub date: Option<String>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubAsset { pub name: String, // pub size: u64, pub browser_download_url: String, pub url: String, /// SHA256 digest provided by GitHub API (format: "sha256:hash") /// Will be null for releases created before this feature was added #[serde(default)] pub digest: Option<String>, } type CacheGroup<T> = HashMap<String, CacheManager<T>>; static RELEASES_CACHE: Lazy<RwLock<CacheGroup<Vec<GithubRelease>>>> = Lazy::new(Default::default); static RELEASE_CACHE: Lazy<RwLock<CacheGroup<GithubRelease>>> = Lazy::new(Default::default); static TAGS_CACHE: Lazy<RwLock<CacheGroup<Vec<String>>>> = Lazy::new(Default::default); pub static API_URL: &str = "https://api.github.com"; async fn get_tags_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<String>>> { TAGS_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}-tags.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); TAGS_CACHE.read().await } async fn get_releases_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<GithubRelease>>> { RELEASES_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}-releases.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); RELEASES_CACHE.read().await } async fn get_release_cache<'a>(key: &str) -> RwLockReadGuard<'a, CacheGroup<GithubRelease>> { RELEASE_CACHE .write() .await .entry(key.to_string()) .or_insert_with(|| { CacheManagerBuilder::new(cache_dir().join(format!("{key}.msgpack.z"))) .with_fresh_duration(Some(duration::DAILY)) .build() }); RELEASE_CACHE.read().await } pub async fn list_releases(repo: &str) -> Result<Vec<GithubRelease>> { let key = repo.to_kebab_case(); let cache = get_releases_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_releases_(API_URL, repo).await) .await? .to_vec()) } pub async fn list_releases_from_url(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> { let key = format!("{api_url}-{repo}").to_kebab_case(); let cache = get_releases_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_releases_(api_url, repo).await) .await? .to_vec()) } async fn list_releases_(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> { let url = format!("{api_url}/repos/{repo}/releases"); let headers = get_headers(&url); let (mut releases, mut headers) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubRelease>, _>(url, &headers) .await?; if *env::MISE_LIST_ALL_VERSIONS { while let Some(next) = next_page(&headers) { headers = get_headers(&next); let (more, h) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubRelease>, _>(next, &headers) .await?; releases.extend(more); headers = h; } } releases.retain(|r| !r.draft && !r.prerelease); Ok(releases) } pub async fn list_tags(repo: &str) -> Result<Vec<String>> { let key = repo.to_kebab_case(); let cache = get_tags_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_tags_(API_URL, repo).await) .await? .to_vec()) } pub async fn list_tags_from_url(api_url: &str, repo: &str) -> Result<Vec<String>> { let key = format!("{api_url}-{repo}").to_kebab_case(); let cache = get_tags_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || list_tags_(api_url, repo).await) .await? .to_vec()) } async fn list_tags_(api_url: &str, repo: &str) -> Result<Vec<String>> { let url = format!("{api_url}/repos/{repo}/tags"); let headers = get_headers(&url); let (mut tags, mut headers) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubTag>, _>(url, &headers) .await?; if *env::MISE_LIST_ALL_VERSIONS { while let Some(next) = next_page(&headers) { headers = get_headers(&next); let (more, h) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubTag>, _>(next, &headers) .await?; tags.extend(more); headers = h; } } Ok(tags.into_iter().map(|t| t.name).collect()) } /// List tags with their commit dates. This is slower than `list_tags` as it requires /// fetching commit info for each tag. Use only when MISE_LIST_ALL_VERSIONS is set. pub async fn list_tags_with_dates(repo: &str) -> Result<Vec<GithubTagWithDate>> { list_tags_with_dates_(API_URL, repo).await } async fn list_tags_with_dates_(api_url: &str, repo: &str) -> Result<Vec<GithubTagWithDate>> { let url = format!("{api_url}/repos/{repo}/tags"); let headers = get_headers(&url); let (mut tags, mut response_headers) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubTag>, _>(url, &headers) .await?; // Fetch all pages when MISE_LIST_ALL_VERSIONS is set while let Some(next) = next_page(&response_headers) { response_headers = get_headers(&next); let (more, h) = crate::http::HTTP_FETCH .json_headers_with_headers::<Vec<GithubTag>, _>(next, &response_headers) .await?; tags.extend(more); response_headers = h; } // Fetch commit dates in parallel using the parallel utility let results = crate::parallel::parallel(tags, |tag| async move { let date = if let Some(commit) = tag.commit { let headers = get_headers(&commit.url); match crate::http::HTTP_FETCH .json_with_headers::<GithubCommit, _>(&commit.url, &headers) .await { Ok(commit_info) => Some(commit_info.commit.committer.date), Err(e) => { warn!("Failed to fetch commit date for tag {}: {}", tag.name, e); None } } } else { None }; Ok((tag.name, date)) }) .await?; Ok(results .into_iter() .map(|(name, date)| GithubTagWithDate { name, date }) .collect()) } pub async fn get_release(repo: &str, tag: &str) -> Result<GithubRelease> { let key = format!("{repo}-{tag}").to_kebab_case(); let cache = get_release_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || get_release_(API_URL, repo, tag).await) .await? .clone()) } pub async fn get_release_for_url(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> { let key = format!("{api_url}-{repo}-{tag}").to_kebab_case(); let cache = get_release_cache(&key).await; let cache = cache.get(&key).unwrap(); Ok(cache .get_or_try_init_async(async || get_release_(api_url, repo, tag).await) .await? .clone()) } async fn get_release_(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> { let url = format!("{api_url}/repos/{repo}/releases/tags/{tag}"); let headers = get_headers(&url); crate::http::HTTP_FETCH .json_with_headers(url, &headers) .await } fn next_page(headers: &HeaderMap) -> Option<String> { let link = headers .get("link") .map(|l| l.to_str().unwrap_or_default().to_string()) .unwrap_or_default(); regex!(r#"<([^>]+)>; rel="next""#) .captures(&link) .map(|c| c.get(1).unwrap().as_str().to_string()) } fn cache_dir() -> PathBuf { dirs::CACHE.join("github") } pub fn get_headers<U: IntoUrl>(url: U) -> HeaderMap { let mut headers = HeaderMap::new(); let url = url.into_url().unwrap(); let mut set_headers = |token: &str| { headers.insert( "authorization", HeaderValue::from_str(format!("token {token}").as_str()).unwrap(), ); headers.insert( "x-github-api-version", HeaderValue::from_static("2022-11-28"), ); }; if url.host_str() == Some("api.github.com") { if let Some(token) = env::GITHUB_TOKEN.as_ref() { set_headers(token); } } else if let Some(token) = env::MISE_GITHUB_ENTERPRISE_TOKEN.as_ref() { set_headers(token); } if url.path().contains("/releases/assets/") { headers.insert( "accept", HeaderValue::from_static("application/octet-stream"), ); } headers }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/hint.rs
src/hint.rs
use crate::config::Settings; use crate::dirs; use std::collections::HashSet; use std::path::PathBuf; use std::sync::LazyLock as Lazy; use std::sync::Mutex; #[macro_export] macro_rules! hint { ($id:expr, $message:expr, $example_cmd:expr) => {{ if $crate::hint::should_display_hint($id) { let _ = $crate::file::touch_file(&$crate::hint::HINTS_DIR.join($id)); let prefix = console::style("hint") .dim() .yellow() .for_stderr() .to_string(); let message = format!($message); let cmd = console::style($example_cmd).bold().for_stderr(); info!("{prefix} {message} {cmd}"); } }}; } pub static HINTS_DIR: Lazy<PathBuf> = Lazy::new(|| dirs::STATE.join("hints")); pub static DISPLAYED_HINTS: Lazy<Mutex<HashSet<String>>> = Lazy::new(|| { let mut hints = HashSet::new(); for file in xx::file::ls(&*HINTS_DIR).unwrap_or_default() { if let Some(file_name) = file.file_name().map(|f| f.to_string_lossy()) { if file_name.starts_with(".") { continue; } hints.insert(file_name.to_string()); } } Mutex::new(hints) }); pub fn should_display_hint(id: &str) -> bool { if cfg!(test) || !console::user_attended() || !console::user_attended_stderr() { return false; } if Settings::get() .disable_hints .iter() .any(|hint| hint == id || hint == "*") { return false; } let displayed_hints = &mut DISPLAYED_HINTS.lock().unwrap(); if displayed_hints.contains(id) { return false; } displayed_hints.insert(id.to_string()); true }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/semver.rs
src/semver.rs
use versions::{Mess, Versioning}; /// splits a version number into an optional prefix and the remaining version string pub fn split_version_prefix(version: &str) -> (String, String) { version .char_indices() .find_map(|(i, c)| { if c.is_ascii_digit() { if i == 0 { return Some(i); } // If the previous char is a delimiter or 'v', we found a split point. let prev_char = version.chars().nth(i - 1).unwrap(); if ['-', '_', '/', '.', 'v', 'V'].contains(&prev_char) { return Some(i); } } None }) .map_or_else( || ("".into(), version.into()), |i| { let (prefix, version) = version.split_at(i); (prefix.into(), version.into()) }, ) } /// split a version number into chunks /// given v: "1.2-3a4" return ["1", ".2", "-3a4"] pub fn chunkify_version(v: &str) -> Vec<String> { fn chunkify(m: &Mess, sep0: &str, chunks: &mut Vec<String>) { for (i, chunk) in m.chunks.iter().enumerate() { let sep = if i == 0 { sep0 } else { "." }; chunks.push(format!("{sep}{chunk}")); } if let Some((next_sep, next_mess)) = &m.next { chunkify(next_mess, next_sep.to_string().as_ref(), chunks) } } let mut chunks = vec![]; // don't parse "latest", otherwise bump from latest to any version would have one chunk only if v != "latest" && let Some(v) = Versioning::new(v) { let m = match v { Versioning::Ideal(sem_ver) => sem_ver.to_mess(), Versioning::General(version) => version.to_mess(), Versioning::Complex(mess) => mess, }; chunkify(&m, "", &mut chunks); } chunks } #[cfg(test)] mod tests { use super::{chunkify_version, split_version_prefix}; #[test] fn test_split_version_prefix() { assert_eq!(split_version_prefix("latest"), ("".into(), "latest".into())); assert_eq!(split_version_prefix("v1.2.3"), ("v".into(), "1.2.3".into())); assert_eq!( split_version_prefix("mountpoint-s3-v1.2.3-5_beta.5"), ("mountpoint-s3-v".into(), "1.2.3-5_beta.5".into()) ); assert_eq!( split_version_prefix("cli/1.2.3"), ("cli/".into(), "1.2.3".into()) ); assert_eq!( split_version_prefix("temurin-17.0.7+7"), ("temurin-".into(), "17.0.7+7".into()) ); assert_eq!(split_version_prefix("1.2"), ("".into(), "1.2".into())); assert_eq!( split_version_prefix("2:1.2.1"), ("".into(), "2:1.2.1".into()) ); assert_eq!( split_version_prefix("2025-05-17"), ("".into(), "2025-05-17".into()) ); } #[test] fn test_chunkify_version() { assert_eq!(chunkify_version("1.2-3a4"), vec!["1", ".2", "-3a4"]); assert_eq!(chunkify_version("latest"), Vec::<String>::new()); assert_eq!(chunkify_version("1.0.0"), vec!["1", ".0", ".0"]); assert_eq!( chunkify_version("2.3.4-beta"), vec!["2", ".3", ".4", "-beta"] ); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/main.rs
src/main.rs
#![allow(unknown_lints)] #![allow(clippy::literal_string_with_formatting_args)] use std::{ panic, sync::atomic::{AtomicBool, Ordering}, }; use crate::cli::Cli; use crate::cli::version::VERSION; use color_eyre::{Section, SectionExt}; use eyre::Report; use indoc::indoc; use itertools::Itertools; #[cfg(test)] #[macro_use] mod test; #[macro_use] mod output; #[macro_use] mod hint; #[macro_use] mod timings; #[macro_use] mod cmd; mod agecrypt; mod aqua; mod backend; pub(crate) mod build_time; mod cache; mod cli; mod config; mod direnv; mod dirs; pub(crate) mod duration; mod env; mod env_diff; mod errors; mod exit; #[cfg_attr(windows, path = "fake_asdf_windows.rs")] mod fake_asdf; mod file; mod git; pub(crate) mod github; pub(crate) mod gitlab; mod gpg; mod hash; mod hook_env; mod hooks; mod http; mod install_context; mod lock_file; mod lockfile; pub(crate) mod logger; pub(crate) mod maplit; mod migrate; mod minisign; mod netrc; pub(crate) mod parallel; mod path; mod path_env; mod platform; mod plugins; mod prepare; mod rand; mod redactions; mod registry; pub(crate) mod result; mod runtime_symlinks; mod semver; mod shell; mod shims; mod shorthands; mod sops; mod sysconfig; pub(crate) mod task; pub(crate) mod tera; pub(crate) mod timeout; mod toml; mod toolset; mod ui; mod uv; mod versions_host; mod watch_files; mod wildcard; pub(crate) use crate::exit::exit; pub(crate) use crate::result::Result; use crate::ui::multi_progress_report::MultiProgressReport; fn main() -> eyre::Result<()> { let nprocs = std::thread::available_parallelism() .map(|n| n.get()) .unwrap_or_default(); let threads = crate::env::MISE_JOBS.unwrap_or(nprocs).max(8); tokio::runtime::Builder::new_multi_thread() .enable_all() .worker_threads(threads) .build()? .block_on(main_()) } async fn main_() -> eyre::Result<()> { // Configure color-eyre based on color preferences if *env::CLICOLOR == Some(false) { // Use blank theme (no colors) when colors are disabled color_eyre::config::HookBuilder::new() .theme(color_eyre::config::Theme::new()) .install()?; } else { // Use default installation with colors color_eyre::install()?; } install_panic_hook(); if std::env::current_dir().is_ok() { unsafe { path_absolutize::update_cwd(); } } measure!("main", { let args = env::args().collect_vec(); match Cli::run(&args) .await .with_section(|| VERSION.to_string().header("Version:")) { Ok(()) => Ok(()), Err(err) => handle_err(err), }?; }); if let Some(mpr) = MultiProgressReport::try_get() { mpr.stop()?; } Ok(()) } fn handle_err(err: Report) -> eyre::Result<()> { if let Some(err) = err.downcast_ref::<std::io::Error>() && err.kind() == std::io::ErrorKind::BrokenPipe { return Ok(()); } show_github_rate_limit_err(&err); if *env::MISE_FRIENDLY_ERROR { display_friendly_err(&err); exit(1); } let async_backtrace = async_backtrace::taskdump_tree(true); Err(err.section(async_backtrace.header("Async Tasks"))) } fn show_github_rate_limit_err(err: &Report) { let msg = format!("{err:?}"); if msg.contains("HTTP status client error (403 Forbidden) for url (https://api.github.com") { warn!( "GitHub API returned a 403 Forbidden error. This likely means you have exceeded the rate limit." ); if env::GITHUB_TOKEN.is_none() { warn!(indoc!( r#"GITHUB_TOKEN is not set. This means mise is making unauthenticated requests to GitHub which have a lower rate limit. To increase the rate limit, set the GITHUB_TOKEN environment variable to a GitHub personal access token. Create a token at https://github.com/settings/tokens and set it as GITHUB_TOKEN in your environment. You do not need to give this token any scopes."# )); } } } fn display_friendly_err(err: &Report) { for err in err.chain() { error!("{err}"); } let msg = ui::style::edim("Run with --verbose or MISE_VERBOSE=1 for more information"); error!("{msg}"); } static ASYNC_PANIC_OCCURRED: AtomicBool = AtomicBool::new(false); pub fn install_panic_hook() { let default_hook = panic::take_hook(); panic::set_hook(Box::new(move |panic_info| { if tokio::runtime::Handle::try_current().is_ok() && !ASYNC_PANIC_OCCURRED.swap(true, Ordering::SeqCst) { let bt = async_backtrace::backtrace(); let mut bt_buffer = String::new(); if let Some(bt) = bt { let locations = &*bt; for (index, loc) in locations.iter().enumerate() { bt_buffer.push_str(&format!("{index:3}: {loc:?}\n")); } } else { bt_buffer.push_str("[no accessible async backtrace]"); } let all = async_backtrace::taskdump_tree(true); eprintln!( "=== Async Backtrace (panic occurred in tokio runtime) ===\n\ {bt_buffer}\n\ ------- TASK DUMP TREE -------\n\ {all}\n\ === End Async Backtrace ===\n" ); } default_hook(panic_info); })); }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/sops.rs
src/sops.rs
use std::sync::Arc; use crate::config::{Config, Settings}; use crate::env; use crate::file::replace_path; use crate::{dirs, file, result}; use eyre::{WrapErr, eyre}; use rops::cryptography::cipher::AES256GCM; use rops::cryptography::hasher::SHA512; use rops::file::RopsFile; use rops::file::state::EncryptedFile; use tokio::sync::{Mutex, OnceCell}; pub async fn decrypt<PT, F>( config: &Arc<Config>, input: &str, mut parse_template: PT, format: &str, ) -> result::Result<String> where PT: FnMut(String) -> result::Result<String>, F: rops::file::format::FileFormat, { static AGE_KEY: OnceCell<Option<String>> = OnceCell::const_new(); static AGE_KEY_FILE: OnceCell<Option<std::path::PathBuf>> = OnceCell::const_new(); static MUTEX: Mutex<()> = Mutex::const_new(()); let age = AGE_KEY .get_or_init(async || { // 1. Check mise-specific MISE_SOPS_AGE_KEY setting first (highest priority) if let Some(age_key) = &Settings::get().sops.age_key && !age_key.is_empty() { return Some(age_key.clone()); } // 2. Check mise-specific MISE_SOPS_AGE_KEY_FILE setting if let Some(key_file) = &Settings::get().sops.age_key_file { let p = replace_path( match parse_template(key_file.to_string_lossy().to_string()) { Ok(p) => p, Err(e) => { warn!("failed to parse MISE_SOPS_AGE_KEY_FILE: {}", e); return None; } }, ); if p.exists() && let Ok(raw) = file::read_to_string(&p) { let key = raw .trim() .lines() .filter(|l| !l.starts_with('#')) .collect::<String>(); if !key.trim().is_empty() { // Store the path for later use by sops CLI let _ = AGE_KEY_FILE.get_or_init(|| async { Some(p.clone()) }).await; return Some(key); } } } // 3. Check standard SOPS_AGE_KEY_FILE environment variable if let Ok(key_file_path) = env::var("SOPS_AGE_KEY_FILE") { let p = replace_path(match parse_template(key_file_path.clone()) { Ok(p) => p, Err(e) => { warn!("failed to parse SOPS_AGE_KEY_FILE: {}", e); return None; } }); if p.exists() && let Ok(raw) = file::read_to_string(&p) { let key = raw .trim() .lines() .filter(|l| !l.starts_with('#')) .collect::<String>(); if !key.trim().is_empty() { // Store the path for later use by sops CLI let _ = AGE_KEY_FILE.get_or_init(|| async { Some(p.clone()) }).await; return Some(key); } } } // 4. Check standard SOPS_AGE_KEY environment variable (direct key content) if let Ok(key) = env::var("SOPS_AGE_KEY") && !key.trim().is_empty() { return Some(key.trim().to_string()); } // 5. Fall back to default path ~/.config/mise/age.txt let p = dirs::CONFIG.join("age.txt"); let p = replace_path(match parse_template(p.to_string_lossy().to_string()) { Ok(p) => p, Err(e) => { warn!("failed to parse default sops age key file: {}", e); return None; } }); if p.exists() && let Ok(raw) = file::read_to_string(p.clone()) { let key = raw .trim() .lines() .filter(|l| !l.starts_with('#')) .collect::<String>(); if !key.trim().is_empty() { // Store the path for later use by sops CLI let _ = AGE_KEY_FILE.get_or_init(|| async { Some(p.clone()) }).await; return Some(key); } } None }) .await; if age.is_none() && !Settings::get().sops.strict { debug!("age key not found, skipping decryption in non-strict mode"); return Ok(String::new()); } let _lock = MUTEX.lock().await; // prevent multiple threads from using the same age key let age_env_key = if Settings::get().sops.rops { "ROPS_AGE" } else { "SOPS_AGE_KEY" }; let prev_age = env::var(age_env_key).ok(); let prev_age_key_file = env::var("SOPS_AGE_KEY_FILE").ok(); // Set SOPS_AGE_KEY_FILE with expanded path if we found one, so sops CLI can use it if let Some(expanded_path) = AGE_KEY_FILE.get().and_then(|f| f.as_ref()) { env::set_var( "SOPS_AGE_KEY_FILE", expanded_path.to_string_lossy().to_string(), ); } if let Some(age) = &age { env::set_var(age_env_key, age.trim()); } let output = if Settings::get().sops.rops { match input .parse::<RopsFile<EncryptedFile<AES256GCM, SHA512>, F>>() .wrap_err("failed to parse sops file") .and_then(|file| file.decrypt::<F>().wrap_err("failed to decrypt sops file")) { Ok(decrypted) => Some(decrypted.to_string()), Err(e) => { if Settings::get().sops.strict { if let Some(age) = prev_age { env::set_var(age_env_key, age); } else { env::remove_var(age_env_key); } if let Some(age_key_file) = prev_age_key_file { env::set_var("SOPS_AGE_KEY_FILE", age_key_file); } else { env::remove_var("SOPS_AGE_KEY_FILE"); } return Err(e); } else { debug!( "sops decryption failed but continuing in non-strict mode: {}", e ); None } } } } else { let mut ts = config .get_tool_request_set() .await .cloned() .unwrap_or_default() .filter_by_tool(["sops".into()].into()) .into_toolset(); Box::pin(ts.resolve(config)).await?; let sops_path = ts.which_bin(config, "sops").await; match sops_path { None => { if Settings::get().sops.strict { if let Some(age) = prev_age { env::set_var(age_env_key, age); } else { env::remove_var(age_env_key); } if let Some(age_key_file) = prev_age_key_file { env::set_var("SOPS_AGE_KEY_FILE", age_key_file); } else { env::remove_var("SOPS_AGE_KEY_FILE"); } return Err(eyre!("sops command not found")); } else { debug!("sops command not found, skipping decryption in non-strict mode"); None } } Some(sops_path) => { let sops = sops_path.to_string_lossy().to_string(); // TODO: this obviously won't work on windows match cmd!( sops, "--input-type", format, "--output-type", format, "-d", "/dev/stdin" ) .stdin_bytes(input.as_bytes()) .read() { Ok(output) => Some(output), Err(e) => { if Settings::get().sops.strict { if let Some(age) = prev_age { env::set_var(age_env_key, age); } else { env::remove_var(age_env_key); } if let Some(age_key_file) = prev_age_key_file { env::set_var("SOPS_AGE_KEY_FILE", age_key_file); } else { env::remove_var("SOPS_AGE_KEY_FILE"); } return Err(e.into()); } else { debug!( "sops decryption failed but continuing in non-strict mode: {}", e ); None } } } } } }; if let Some(age) = prev_age { env::set_var(age_env_key, age); } else { env::remove_var(age_env_key); } if let Some(age_key_file) = prev_age_key_file { env::set_var("SOPS_AGE_KEY_FILE", age_key_file); } else { env::remove_var("SOPS_AGE_KEY_FILE"); } Ok(output.unwrap_or_default()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/output.rs
src/output.rs
use std::collections::HashSet; use std::sync::LazyLock; use std::sync::Mutex; #[macro_export] macro_rules! prefix_println { ($prefix:expr, $($arg:tt)*) => {{ let msg = format!($($arg)*); println!("{} {}", $prefix, msg); }}; } #[macro_export] macro_rules! prefix_eprintln { ($prefix:expr, $($arg:tt)*) => {{ let msg = format!($($arg)*); eprintln!("{} {}", $prefix, msg); }}; } #[cfg(test)] #[macro_export] macro_rules! miseprintln { () => { miseprint!("\n")?; }; ($($arg:tt)*) => {{ let mut stdout = $crate::output::tests::STDOUT.lock().unwrap(); stdout.push(format!($($arg)*)); }} } #[cfg(not(test))] #[macro_export] macro_rules! miseprintln { () => { calm_io::stdoutln!()?; }; ($($arg:tt)*) => {{ calm_io::stdoutln!($($arg)*)?; }} } #[cfg(test)] #[macro_export] macro_rules! miseprint { ($($arg:tt)*) => {{ let mut stdout = $crate::output::tests::STDOUT.lock().unwrap(); let cur = stdout.pop().unwrap_or_default(); stdout.push(cur + &format!($($arg)*)); std::io::Result::Ok(()) }} } #[cfg(not(test))] #[macro_export] macro_rules! miseprint { ($($arg:tt)*) => {{ calm_io::stdout!($($arg)*) }} } #[cfg(test)] #[macro_export] macro_rules! info { ($($arg:tt)*) => {{ let mut stderr = $crate::output::tests::STDERR.lock().unwrap(); let mise = console::style("mise").dim().for_stderr(); stderr.push(format!("{} {}", mise, format!($($arg)*))); }}; } #[cfg(test)] #[macro_export] macro_rules! warn { ($($arg:tt)*) => {{ let mut stderr = $crate::output::tests::STDERR.lock().unwrap(); let mise = console::style("mise").yellow().for_stderr(); stderr.push(format!("{} {}", mise, format!($($arg)*))); }} } #[cfg(test)] #[macro_export] macro_rules! error { ($($arg:tt)*) => { let mut stderr = $crate::output::tests::STDERR.lock().unwrap(); let mise = console::style("mise").red().for_stderr(); stderr.push(format!("{} {}", mise, format!($($arg)*))); } } #[macro_export] macro_rules! trace { ($($arg:tt)*) => {{ log::trace!($($arg)*); }}; } #[macro_export] macro_rules! debug { ($($arg:tt)*) => {{ log::debug!($($arg)*); }}; } #[cfg(not(test))] #[macro_export] macro_rules! info { ($($arg:tt)*) => {{ log::info!($($arg)*); }}; } #[macro_export] macro_rules! info_trunc { ($($arg:tt)*) => {{ let msg = format!($($arg)*); let msg = msg.lines().next().unwrap_or_default(); let msg = console::truncate_str(&msg, *$crate::env::TERM_WIDTH, "…"); info!("{msg}"); }}; } #[cfg(not(test))] #[macro_export] macro_rules! warn { ($($arg:tt)*) => {{ log::warn!($($arg)*); }}; } pub static WARNED_ONCE: LazyLock<Mutex<HashSet<String>>> = LazyLock::new(Default::default); macro_rules! warn_once { ($($arg:tt)*) => {{ let msg = format!($($arg)*); if $crate::output::WARNED_ONCE.lock().unwrap().insert(msg.clone()) { warn!("{}", msg); } }}; } #[cfg(not(test))] #[macro_export] macro_rules! error { ($($arg:tt)*) => {{ log::error!($($arg)*); }}; } pub static DEPRECATED: LazyLock<Mutex<HashSet<&'static str>>> = LazyLock::new(Default::default); #[macro_export] macro_rules! deprecated { ($id:tt, $($arg:tt)*) => {{ if $crate::output::DEPRECATED.lock().unwrap().insert($id) { warn!("deprecated [{}]: {}", $id, format!($($arg)*)); } }}; } #[cfg(test)] pub mod tests { use std::sync::Mutex; pub static STDOUT: Mutex<Vec<String>> = Mutex::new(Vec::new()); pub static STDERR: Mutex<Vec<String>> = Mutex::new(Vec::new()); }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/path_env.rs
src/path_env.rs
use crate::config::Settings; use crate::dirs; use std::env::{join_paths, split_paths}; use std::ffi::OsString; use std::fmt; use std::fmt::{Display, Formatter}; use std::path::PathBuf; use std::str::FromStr; pub struct PathEnv { pre: Vec<PathBuf>, mise: Vec<PathBuf>, post: Vec<PathBuf>, seen_shims: bool, } impl PathEnv { pub fn new() -> Self { Self { pre: Vec::new(), mise: Vec::new(), post: Vec::new(), seen_shims: false, } } pub fn add(&mut self, path: PathBuf) { for part in split_paths(&path) { self.mise.push(part); } } pub fn to_vec(&self) -> Vec<PathBuf> { self.pre .iter() .chain(self.mise.iter()) .chain(self.post.iter()) .map(|p| p.to_path_buf()) .collect() } pub fn join(&self) -> OsString { join_paths(self.to_vec()).unwrap() } } impl Display for PathEnv { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{}", self.join().to_string_lossy()) } } impl FromIterator<PathBuf> for PathEnv { fn from_iter<T: IntoIterator<Item = PathBuf>>(paths: T) -> Self { let settings = Settings::get(); let mut path_env = Self::new(); for path in paths { if path_env.seen_shims { path_env.post.push(path); } else if path == *dirs::SHIMS && !settings.activate_aggressive { path_env.seen_shims = true; } else { path_env.pre.push(path); } } if !path_env.seen_shims { path_env.post = path_env.pre; path_env.pre = Vec::new(); } path_env } } impl FromStr for PathEnv { type Err = eyre::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self::from_iter(split_paths(s))) } } #[cfg(unix)] #[cfg(test)] mod tests { use pretty_assertions::assert_eq; use crate::config::Config; use super::*; #[tokio::test] async fn test_path_env() { let _config = Config::get().await.unwrap(); let mut path_env = PathEnv::from_iter( [ "/before-1", "/before-2", "/before-3", dirs::SHIMS.to_str().unwrap(), "/after-1", "/after-2", "/after-3", ] .map(PathBuf::from), ); path_env.add("/1".into()); path_env.add("/2".into()); path_env.add("/3".into()); assert_eq!( path_env.to_string(), "/before-1:/before-2:/before-3:/1:/2:/3:/after-1:/after-2:/after-3".to_string() ); } #[tokio::test] async fn test_path_env_no_mise() { let _config = Config::get().await.unwrap(); let mut path_env = PathEnv::from_iter( [ "/before-1", "/before-2", "/before-3", "/after-1", "/after-2", "/after-3", ] .map(PathBuf::from), ); path_env.add("/1".into()); path_env.add("/2".into()); path_env.add("/3".into()); assert_eq!( path_env.to_string(), format!("/1:/2:/3:/before-1:/before-2:/before-3:/after-1:/after-2:/after-3") ); } #[tokio::test] async fn test_path_env_with_colon() { let _config = Config::get().await.unwrap(); let mut path_env = PathEnv::from_iter(["/item1", "/item2"].map(PathBuf::from)); path_env.add("/1:/2".into()); assert_eq!(path_env.to_string(), format!("/1:/2:/item1:/item2")); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/cache.rs
src/cache.rs
use std::cmp::min; use std::fs::File; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use std::time::Duration; use eyre::Result; use flate2::Compression; use flate2::read::ZlibDecoder; use flate2::write::ZlibEncoder; use itertools::Itertools; use once_cell::sync::OnceCell; use serde::Serialize; use serde::de::DeserializeOwned; use std::sync::LazyLock as Lazy; use crate::build_time::built_info; use crate::config::Settings; use crate::file::{display_path, modified_duration}; use crate::hash::hash_to_str; use crate::rand::random_string; use crate::{dirs, file}; #[derive(Debug)] pub struct CacheManagerBuilder { cache_file_path: PathBuf, cache_keys: Vec<String>, fresh_duration: Option<Duration>, fresh_files: Vec<PathBuf>, } pub static BASE_CACHE_KEYS: Lazy<Vec<String>> = Lazy::new(|| { [ built_info::FEATURES_STR, built_info::PKG_VERSION, built_info::PROFILE, built_info::TARGET, ] .into_iter() .map(|s| s.to_string()) .collect() }); impl CacheManagerBuilder { pub fn new(cache_file_path: impl AsRef<Path>) -> Self { let settings = Settings::get(); let mut cache_keys = BASE_CACHE_KEYS.clone(); cache_keys.extend([settings.os().to_string(), settings.arch().to_string()]); Self { cache_file_path: cache_file_path.as_ref().to_path_buf(), cache_keys, fresh_files: vec![], fresh_duration: None, } } pub fn with_fresh_duration(mut self, duration: Option<Duration>) -> Self { self.fresh_duration = duration; self } pub fn with_fresh_file(mut self, path: PathBuf) -> Self { self.fresh_files.push(path); self } pub fn with_cache_key(mut self, key: String) -> Self { self.cache_keys.push(key); self } fn cache_key(&self) -> String { hash_to_str(&self.cache_keys).chars().take(5).collect() } pub fn build<T>(self) -> CacheManager<T> where T: Serialize + DeserializeOwned, { let key = self.cache_key(); let (base, ext) = file::split_file_name(&self.cache_file_path); let mut cache_file_path = self.cache_file_path; cache_file_path.set_file_name(format!("{base}-{key}.{ext}")); CacheManager { cache_file_path, cache: Box::new(OnceCell::new()), cache_async: Box::new(tokio::sync::OnceCell::new()), fresh_files: self.fresh_files, fresh_duration: self.fresh_duration, } } } #[derive(Debug, Clone)] pub struct CacheManager<T> where T: Serialize + DeserializeOwned, { cache_file_path: PathBuf, fresh_duration: Option<Duration>, fresh_files: Vec<PathBuf>, cache: Box<OnceCell<T>>, cache_async: Box<tokio::sync::OnceCell<T>>, } impl<T> CacheManager<T> where T: Serialize + DeserializeOwned, { pub fn get_or_try_init<F>(&self, fetch: F) -> Result<&T> where F: FnOnce() -> Result<T>, { let val = self.cache.get_or_try_init(|| { let path = &self.cache_file_path; if self.is_fresh() { match self.parse() { Ok(val) => return Ok::<_, color_eyre::Report>(val), Err(err) => { warn!("failed to parse cache file: {} {:#}", path.display(), err); } } } let val = (fetch)()?; if let Err(err) = self.write(&val) { warn!("failed to write cache file: {} {:#}", path.display(), err); } Ok(val) })?; Ok(val) } pub async fn get_or_try_init_async<F, Fut>(&self, fetch: F) -> Result<&T> where F: FnOnce() -> Fut, Fut: Future<Output = Result<T>>, { let val = self .cache_async .get_or_try_init(|| async { let path = &self.cache_file_path; if self.is_fresh() { match self.parse() { Ok(val) => return Ok::<_, color_eyre::Report>(val), Err(err) => { warn!("failed to parse cache file: {} {:#}", path.display(), err); } } } let val = fetch().await?; if let Err(err) = self.write(&val) { warn!("failed to write cache file: {} {:#}", path.display(), err); } Ok(val) }) .await?; Ok(val) } fn parse(&self) -> Result<T> { let path = &self.cache_file_path; trace!("reading {}", display_path(path)); let mut zlib = ZlibDecoder::new(File::open(path)?); let mut bytes = Vec::new(); zlib.read_to_end(&mut bytes)?; Ok(rmp_serde::from_slice(&bytes)?) } pub fn write(&self, val: &T) -> Result<()> { trace!("writing {}", display_path(&self.cache_file_path)); if let Some(parent) = self.cache_file_path.parent() { file::create_dir_all(parent)?; } let partial_path = self .cache_file_path .with_extension(format!("part-{}", random_string(8))); let mut zlib = ZlibEncoder::new(File::create(&partial_path)?, Compression::fast()); zlib.write_all(&rmp_serde::to_vec_named(&val)?[..])?; file::rename(&partial_path, &self.cache_file_path)?; Ok(()) } #[cfg(test)] pub fn clear(&self) -> Result<()> { let path = &self.cache_file_path; trace!("clearing cache {}", path.display()); if path.exists() { file::remove_file(path)?; } Ok(()) } fn is_fresh(&self) -> bool { if !self.cache_file_path.exists() { return false; } if let Some(fresh_duration) = self.freshest_duration() && let Ok(metadata) = self.cache_file_path.metadata() && let Ok(modified) = metadata.modified() { return modified.elapsed().unwrap_or_default() < fresh_duration; } true } fn freshest_duration(&self) -> Option<Duration> { let mut freshest = self.fresh_duration; for path in self.fresh_files.iter().unique() { let duration = modified_duration(path).unwrap_or_default(); freshest = Some(match freshest { None => duration, Some(freshest) => min(freshest, duration), }) } freshest } } pub(crate) struct PruneResults { pub(crate) size: u64, pub(crate) count: u64, } pub(crate) struct PruneOptions { pub(crate) dry_run: bool, pub(crate) verbose: bool, pub(crate) age: Duration, } pub(crate) fn auto_prune() -> Result<()> { if !rand::random::<u8>().is_multiple_of(100) { return Ok(()); // only prune 1% of the time } let settings = Settings::get(); let age = match settings.cache_prune_age_duration() { Some(age) => age, None => { return Ok(()); } }; let auto_prune_file = dirs::CACHE.join(".auto_prune"); if let Ok(Ok(modified)) = auto_prune_file.metadata().map(|m| m.modified()) && modified.elapsed().unwrap_or_default() < age { return Ok(()); } let empty = file::ls(*dirs::CACHE).unwrap_or_default().is_empty(); xx::file::touch_dir(&auto_prune_file)?; if empty { return Ok(()); } debug!( "pruning old cache files, this behavior can be modified with the MISE_CACHE_PRUNE_AGE setting" ); prune( *dirs::CACHE, &PruneOptions { dry_run: false, verbose: false, age, }, )?; Ok(()) } pub(crate) fn prune(dir: &Path, opts: &PruneOptions) -> Result<PruneResults> { let mut results = PruneResults { size: 0, count: 0 }; let remove = |file: &Path| { if opts.dry_run || opts.verbose { info!("pruning {}", display_path(file)); } else { debug!("pruning {}", display_path(file)); } if !opts.dry_run { file::remove_file_or_dir(file)?; } Ok::<(), color_eyre::Report>(()) }; for subdir in file::dir_subdirs(dir)? { let subdir = dir.join(&subdir); let r = prune(&subdir, opts)?; results.size += r.size; results.count += r.count; let metadata = subdir.metadata()?; // only delete empty directories if they're old if file::ls(&subdir)?.is_empty() && metadata.modified()?.elapsed().unwrap_or_default() > opts.age { remove(&subdir)?; results.count += 1; } } for f in file::ls(dir)? { let path = dir.join(&f); let metadata = path.metadata()?; let elapsed = metadata.accessed()?.elapsed().unwrap_or_default(); if elapsed > opts.age { remove(&path)?; results.size += metadata.len(); results.count += 1; } } Ok(results) } #[cfg(test)] mod tests { use crate::config::Config; use super::*; use pretty_assertions::assert_eq; #[tokio::test] async fn test_cache() { let _config = Config::get().await.unwrap(); let cache = CacheManagerBuilder::new(dirs::CACHE.join("test-cache")).build(); cache.clear().unwrap(); let val = cache.get_or_try_init(|| Ok(1)).unwrap(); assert_eq!(val, &1); let val = cache.get_or_try_init(|| Ok(2)).unwrap(); assert_eq!(val, &1); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/exit.rs
src/exit.rs
use crate::cmd::CmdLineRunner; #[cfg(unix)] use nix::sys::signal::SIGTERM; pub fn exit(code: i32) -> ! { #[cfg(unix)] CmdLineRunner::kill_all(SIGTERM); #[cfg(windows)] CmdLineRunner::kill_all(); debug!("exiting with code: {code}"); std::process::exit(code) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/versions_host.rs
src/versions_host.rs
use crate::backend::VersionInfo; use crate::config::Settings; use crate::http; use crate::http::HTTP_FETCH; use crate::plugins::core::CORE_PLUGINS; use crate::registry::REGISTRY; use std::{ collections::{HashMap, HashSet}, sync::{ LazyLock, atomic::{AtomicBool, Ordering}, }, }; use tokio::sync::Mutex; /// Tools that use the versions host for listing versions /// (excludes java/python due to complex version schemes) static PLUGINS_USE_VERSION_HOST: LazyLock<HashSet<&str>> = LazyLock::new(|| { CORE_PLUGINS .keys() .map(|name| name.as_str()) .chain(REGISTRY.keys().copied()) .filter(|name| !matches!(*name, "java" | "python")) .collect() }); /// Tools that should have downloads tracked /// (all core plugins and registry tools, including java/python) static PLUGINS_TRACK_DOWNLOADS: LazyLock<HashSet<&str>> = LazyLock::new(|| { CORE_PLUGINS .keys() .map(|name| name.as_str()) .chain(REGISTRY.keys().copied()) .collect() }); /// Response format from the versions host TOML endpoint #[derive(serde::Deserialize)] struct VersionsResponse { versions: indexmap::IndexMap<String, VersionEntry>, } #[derive(serde::Deserialize)] struct VersionEntry { created_at: toml::value::Datetime, #[serde(default)] release_url: Option<String>, } /// List versions from the versions host (mise-versions.jdx.dev). /// Returns Vec<VersionInfo> with created_at timestamps from the TOML endpoint. pub async fn list_versions(tool: &str) -> eyre::Result<Option<Vec<VersionInfo>>> { if !Settings::get().use_versions_host || !PLUGINS_USE_VERSION_HOST.contains(tool) { return Ok(None); } static CACHE: LazyLock<Mutex<HashMap<String, Vec<VersionInfo>>>> = LazyLock::new(|| Mutex::new(HashMap::new())); static RATE_LIMITED: AtomicBool = AtomicBool::new(false); if let Some(versions) = CACHE.lock().await.get(tool) { return Ok(Some(versions.clone())); } if RATE_LIMITED.load(Ordering::Relaxed) { warn!("{tool}: skipping versions host check due to rate limit"); return Ok(None); } // Use TOML format which includes created_at timestamps let url = format!("https://mise-versions.jdx.dev/tools/{}.toml", tool); let versions: Vec<VersionInfo> = match HTTP_FETCH.get_text(&url).await { Ok(body) => { let response: VersionsResponse = toml::from_str(&body)?; response .versions .into_iter() .map(|(version, entry)| VersionInfo { version, created_at: Some(entry.created_at.to_string()), release_url: entry.release_url, }) .collect() } Err(err) => match http::error_code(&err).unwrap_or(0) { 404 => return Ok(None), 429 => { RATE_LIMITED.store(true, Ordering::Relaxed); warn!("{tool}: mise-versions rate limited"); return Ok(None); } _ => return Err(err), }, }; trace!( "got {} {} versions from versions host", versions.len(), tool ); if versions.is_empty() { return Ok(None); } CACHE .lock() .await .insert(tool.to_string(), versions.clone()); Ok(Some(versions)) } /// Tracks a tool installation asynchronously (fire-and-forget) /// Tracks all core plugins and registry tools (including java/python) pub fn track_install(tool: &str, full: &str, version: &str) { let settings = Settings::get(); // Check if tracking is enabled (also requires use_versions_host to be enabled) if !settings.use_versions_host || !settings.use_versions_host_track { return; } // Only track known tools (core plugins and registry tools) if !PLUGINS_TRACK_DOWNLOADS.contains(tool) { return; } let tool = tool.to_string(); let full = full.to_string(); let version = version.to_string(); // Fire-and-forget: spawn a task that won't block installation tokio::spawn(async move { if let Err(e) = track_install_async(&tool, &full, &version).await { trace!("Failed to track install for {tool}@{version}: {e}"); } }); } async fn track_install_async(tool: &str, full: &str, version: &str) -> eyre::Result<()> { use crate::cli::version::{ARCH, OS}; let url = "https://mise-versions.jdx.dev/api/track"; let body = serde_json::json!({ "tool": tool, "full": full, "version": version, "os": *OS, "arch": *ARCH }); match HTTP_FETCH.post_json(url, &body).await { Ok(true) => trace!("Tracked install: {full}@{version}"), Ok(false) => trace!("Track request failed"), Err(e) => trace!("Track request error: {e}"), } Ok(()) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/install_context.rs
src/install_context.rs
use std::sync::Arc; use crate::ui::progress_report::SingleReport; use crate::{config::Config, toolset::Toolset}; pub struct InstallContext { pub config: Arc<Config>, pub ts: Arc<Toolset>, pub pr: Box<dyn SingleReport>, pub force: bool, pub dry_run: bool, /// require lockfile URLs to be present; fail if not pub locked: bool, }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/cmd.rs
src/cmd.rs
use std::collections::HashSet; use std::ffi::{OsStr, OsString}; use std::fmt::{Debug, Display, Formatter}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; use std::process::{Command, ExitStatus, Stdio}; use std::sync::mpsc::channel; use std::sync::{Arc, Mutex, RwLock}; use std::thread; use color_eyre::Result; use duct::{Expression, IntoExecutablePath}; use eyre::Context; use indexmap::IndexSet; #[cfg(not(any(test, target_os = "windows")))] use signal_hook::consts::{SIGHUP, SIGINT, SIGQUIT, SIGTERM, SIGUSR1, SIGUSR2}; #[cfg(not(any(test, target_os = "windows")))] use signal_hook::iterator::Signals; use std::sync::LazyLock as Lazy; use crate::config::Settings; use crate::env; use crate::env::PATH_KEY; use crate::errors::Error::ScriptFailed; use crate::file::display_path; use crate::ui::progress_report::SingleReport; /// Create a command with any number of of positional arguments /// /// may be different types (anything that implements [`Into<OsString>`](https://doc.rust-lang.org/std/convert/trait.From.html)). /// See also the [`cmd`](fn.cmd.html) function, which takes a collection of arguments. /// /// # Example /// /// ``` /// use std::path::Path; /// use mise::cmd; /// /// let arg1 = "foo"; /// let arg2 = "bar".to_owned(); /// let arg3 = Path::new("baz"); /// /// let output = cmd!("echo", arg1, arg2, arg3).read(); /// /// assert_eq!("foo bar baz", output.unwrap()); /// ``` #[macro_export] macro_rules! cmd { ( $program:expr $(, $arg:expr )* $(,)? ) => { { use std::ffi::OsString; let args: std::vec::Vec<OsString> = std::vec![$( Into::<OsString>::into($arg) ),*]; $crate::cmd::cmd($program, args) } }; } /// Create a command with any number of of positional arguments, which may be /// different types (anything that implements /// [`Into<OsString>`](https://doc.rust-lang.org/std/convert/trait.From.html)). /// See also the [`cmd`](fn.cmd.html) function, which takes a collection of /// arguments. /// /// # Example /// /// ``` /// use std::path::Path; /// use mise::cmd; /// /// let arg1 = "foo"; /// let arg2 = "bar".to_owned(); /// let arg3 = Path::new("baz"); /// /// let output = cmd!("echo", arg1, arg2, arg3).read(); /// /// assert_eq!("foo bar baz", output.unwrap()); /// ``` pub fn cmd<T, U>(program: T, args: U) -> Expression where T: IntoExecutablePath, U: IntoIterator, U::Item: Into<OsString>, { let program = program.to_executable(); let args: Vec<OsString> = args.into_iter().map(Into::<OsString>::into).collect(); let display_name = program.to_string_lossy(); let display_args = args .iter() .map(|s| s.to_string_lossy()) .collect::<Vec<_>>() .join(" "); let display_command = [display_name.into(), display_args].join(" "); debug!("$ {display_command}"); duct::cmd(program, args) } pub struct CmdLineRunner<'a> { cmd: Command, pr: Option<&'a dyn SingleReport>, pr_arc: Option<Arc<Box<dyn SingleReport>>>, stdin: Option<String>, redactions: IndexSet<String>, raw: bool, pass_signals: bool, on_stdout: Option<Box<dyn Fn(String) + Send + 'a>>, on_stderr: Option<Box<dyn Fn(String) + Send + 'a>>, } static OUTPUT_LOCK: Mutex<()> = Mutex::new(()); static RUNNING_PIDS: Lazy<Mutex<HashSet<u32>>> = Lazy::new(Default::default); impl<'a> CmdLineRunner<'a> { pub fn new<P: AsRef<OsStr>>(program: P) -> Self { let mut cmd = Command::new(program); cmd.stdin(Stdio::null()); cmd.stdout(Stdio::piped()); cmd.stderr(Stdio::piped()); Self { cmd, pr: None, pr_arc: None, stdin: None, redactions: Default::default(), raw: false, pass_signals: false, on_stdout: None, on_stderr: None, } } #[cfg(unix)] pub fn kill_all(signal: nix::sys::signal::Signal) { let pids = RUNNING_PIDS.lock().unwrap(); for pid in pids.iter() { let pid = *pid as i32; trace!("{signal}: {pid}"); if let Err(e) = nix::sys::signal::kill(nix::unistd::Pid::from_raw(pid), signal) { debug!("Failed to kill cmd {pid}: {e}"); } } } #[cfg(windows)] pub fn kill_all() { let pids = RUNNING_PIDS.lock().unwrap(); for pid in pids.iter() { if let Err(e) = Command::new("taskkill") .arg("/F") .arg("/T") .arg("/PID") .arg(pid.to_string()) .spawn() { warn!("Failed to kill cmd {pid}: {e}"); } } } pub fn stdin<T: Into<Stdio>>(mut self, cfg: T) -> Self { self.cmd.stdin(cfg); self } pub fn stdout<T: Into<Stdio>>(mut self, cfg: T) -> Self { self.cmd.stdout(cfg); self } pub fn stderr<T: Into<Stdio>>(mut self, cfg: T) -> Self { self.cmd.stderr(cfg); self } pub fn redact(mut self, redactions: impl IntoIterator<Item = String>) -> Self { for r in redactions { if !r.is_empty() { self.redactions.insert(r); } } self } pub fn with_on_stdout<F: Fn(String) + Send + 'a>(mut self, on_stdout: F) -> Self { self.on_stdout = Some(Box::new(on_stdout)); self } pub fn with_on_stderr<F: Fn(String) + Send + 'a>(mut self, on_stderr: F) -> Self { self.on_stderr = Some(Box::new(on_stderr)); self } pub fn current_dir<P: AsRef<Path>>(mut self, dir: P) -> Self { self.cmd.current_dir(dir); self } pub fn env_clear(mut self) -> Self { self.cmd.env_clear(); self } pub fn env<K, V>(mut self, key: K, val: V) -> Self where K: AsRef<OsStr>, V: AsRef<OsStr>, { self.cmd.env(key, val); self } pub fn envs<I, K, V>(mut self, vars: I) -> Self where I: IntoIterator<Item = (K, V)>, K: AsRef<OsStr>, V: AsRef<OsStr>, { self.cmd.envs(vars); self } pub fn prepend_path(mut self, paths: Vec<PathBuf>) -> eyre::Result<Self> { let existing = self .get_env(&PATH_KEY) .map(|c| c.to_owned()) .unwrap_or_else(|| env::var_os(&*PATH_KEY).unwrap()); let paths = paths .into_iter() .chain(env::split_paths(&existing)) .collect::<Vec<_>>(); self.cmd.env(&*PATH_KEY, env::join_paths(paths)?); Ok(self) } fn get_env(&self, key: &str) -> Option<&OsStr> { for (k, v) in self.cmd.get_envs() { if k == key { return v; } } None } pub fn opt_arg<S: AsRef<OsStr>>(mut self, arg: Option<S>) -> Self { if let Some(arg) = arg { self.cmd.arg(arg); } self } pub fn opt_args<S: AsRef<OsStr>>(mut self, arg: &str, values: Option<Vec<S>>) -> Self { if let Some(values) = values { for value in values { self.cmd.arg(arg); self.cmd.arg(value); } } self } pub fn arg<S: AsRef<OsStr>>(mut self, arg: S) -> Self { self.cmd.arg(arg.as_ref()); self } pub fn args<I, S>(mut self, args: I) -> Self where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { self.cmd.args(args); self } pub fn with_pr(mut self, pr: &'a dyn SingleReport) -> Self { self.pr = Some(pr); self } pub fn with_pr_arc(mut self, pr: Arc<Box<dyn SingleReport>>) -> Self { self.pr_arc = Some(pr); self } pub fn raw(mut self, raw: bool) -> Self { self.raw = raw; self } pub fn with_pass_signals(&mut self) -> &mut Self { self.pass_signals = true; self } pub fn stdin_string(mut self, input: impl Into<String>) -> Self { self.cmd.stdin(Stdio::piped()); self.stdin = Some(input.into()); self } #[allow(clippy::readonly_write_lock)] pub fn execute(mut self) -> Result<()> { static RAW_LOCK: RwLock<()> = RwLock::new(()); let read_lock = RAW_LOCK.read().unwrap(); debug!("$ {self}"); if Settings::get().raw || self.raw { drop(read_lock); let _write_lock = RAW_LOCK.write().unwrap(); return self.execute_raw(); } let mut cp = self .cmd .spawn() .wrap_err_with(|| format!("failed to execute command: {self}"))?; let id = cp.id(); RUNNING_PIDS.lock().unwrap().insert(id); trace!("Started process: {id} for {}", self.get_program()); let (tx, rx) = channel(); if let Some(stdout) = cp.stdout.take() { thread::spawn({ let name = self.to_string(); let tx = tx.clone(); move || { for line in BufReader::new(stdout).lines() { match line { Ok(line) => tx.send(ChildProcessOutput::Stdout(line)).unwrap(), Err(e) => warn!("Failed to read stdout for {name}: {e}"), } } } }); } if let Some(stderr) = cp.stderr.take() { thread::spawn({ let name = self.to_string(); let tx = tx.clone(); move || { for line in BufReader::new(stderr).lines() { match line { Ok(line) => tx.send(ChildProcessOutput::Stderr(line)).unwrap(), Err(e) => warn!("Failed to read stderr for {name}: {e}"), } } } }); } if let Some(text) = self.stdin.take() { let mut stdin = cp.stdin.take().unwrap(); thread::spawn(move || { stdin.write_all(text.as_bytes()).unwrap(); }); } #[cfg(not(any(test, target_os = "windows")))] let mut sighandle = None; #[cfg(not(any(test, target_os = "windows")))] if self.pass_signals { let mut signals = Signals::new([SIGINT, SIGTERM, SIGTERM, SIGHUP, SIGQUIT, SIGUSR1, SIGUSR2])?; sighandle = Some(signals.handle()); let tx = tx.clone(); thread::spawn(move || { for sig in &mut signals { tx.send(ChildProcessOutput::Signal(sig)).unwrap(); } }); } thread::spawn(move || { let status = cp.wait().unwrap(); #[cfg(not(any(test, target_os = "windows")))] if let Some(sighandle) = sighandle { sighandle.close(); } tx.send(ChildProcessOutput::ExitStatus(status)).unwrap(); }); let mut combined_output = vec![]; let mut status = None; for line in rx { match line { ChildProcessOutput::Stdout(line) => { let line = self .redactions .iter() .fold(line, |acc, r| acc.replace(r, "[redacted]")); self.on_stdout(line.clone()); combined_output.push(line); } ChildProcessOutput::Stderr(line) => { let line = self .redactions .iter() .fold(line, |acc, r| acc.replace(r, "[redacted]")); self.on_stderr(line.clone()); combined_output.push(line); } ChildProcessOutput::ExitStatus(s) => { RUNNING_PIDS.lock().unwrap().remove(&id); status = Some(s); } #[cfg(not(any(test, windows)))] ChildProcessOutput::Signal(sig) => { if sig != SIGINT { debug!("Received signal {sig}, {id}"); let pid = nix::unistd::Pid::from_raw(id as i32); let sig = nix::sys::signal::Signal::try_from(sig).unwrap(); nix::sys::signal::kill(pid, sig)?; } } } } RUNNING_PIDS.lock().unwrap().remove(&id); let status = status.unwrap(); if !status.success() { self.on_error(combined_output.join("\n"), status)?; } Ok(()) } fn execute_raw(mut self) -> Result<()> { let status = self.cmd.spawn()?.wait()?; match status.success() { true => Ok(()), false => self.on_error(String::new(), status), } } fn on_stdout(&self, line: String) { let _lock = OUTPUT_LOCK.lock().unwrap(); if let Some(on_stdout) = &self.on_stdout { on_stdout(line); return; } if let Some(pr) = self .pr .or(self.pr_arc.as_ref().map(|arc| arc.as_ref().as_ref())) { if !line.trim().is_empty() { pr.set_message(line) } } else if console::colors_enabled() { println!("{line}\x1b[0m"); } else { println!("{line}"); } } fn on_stderr(&self, line: String) { let _lock = OUTPUT_LOCK.lock().unwrap(); if let Some(on_stderr) = &self.on_stderr { on_stderr(line); return; } match self .pr .or(self.pr_arc.as_ref().map(|arc| arc.as_ref().as_ref())) { Some(pr) => { if !line.trim().is_empty() { pr.println(line) } } None => { if console::colors_enabled_stderr() { eprintln!("{line}\x1b[0m"); } else { eprintln!("{line}"); } } } } fn on_error(&self, output: String, status: ExitStatus) -> Result<()> { match self .pr .or(self.pr_arc.as_ref().map(|arc| arc.as_ref().as_ref())) { Some(pr) => { error!("{} failed", self.get_program()); if !Settings::get().verbose && !output.trim().is_empty() { pr.println(output); } } None => { // eprintln!("{}", output); } } Err(ScriptFailed(self.get_program(), Some(status)))? } fn get_program(&self) -> String { display_path(PathBuf::from(self.cmd.get_program())) } fn get_args(&self) -> Vec<String> { self.cmd .get_args() .map(|s| s.to_string_lossy().to_string()) .collect::<Vec<_>>() } } impl Display for CmdLineRunner<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let args = self.get_args().join(" "); write!(f, "{} {args}", self.get_program()) } } impl Debug for CmdLineRunner<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let args = self.get_args().join(" "); write!(f, "{} {args}", self.get_program()) } } enum ChildProcessOutput { Stdout(String), Stderr(String), ExitStatus(ExitStatus), #[cfg(not(any(test, target_os = "windows")))] Signal(i32), } #[cfg(test)] #[cfg(unix)] mod tests { use pretty_assertions::assert_eq; use crate::config::Config; #[tokio::test] async fn test_cmd() { let _config = Config::get().await.unwrap(); let output = cmd!("echo", "foo", "bar").read().unwrap(); assert_eq!("foo bar", output); } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/uv.rs
src/uv.rs
use crate::cli::args::BackendArg; use crate::cmd::CmdLineRunner; use crate::config::{Config, Settings}; use crate::ui::multi_progress_report::MultiProgressReport; use crate::{Result, toolset::Toolset}; use crate::{dirs, file}; use std::path::PathBuf; use std::sync::LazyLock as Lazy; use std::{collections::HashMap, sync::Arc}; use tokio::sync::OnceCell; #[derive(Clone, Debug)] pub struct Venv { pub venv_path: PathBuf, pub env: HashMap<String, String>, } // use a mutex to prevent deadlocks that occurs due to reentrantly initialization // when resolving the venv path or env vars static UV_VENV: Lazy<OnceCell<Option<Venv>>> = Lazy::new(Default::default); pub async fn uv_venv(config: &Arc<Config>, ts: &Toolset) -> &'static Option<Venv> { UV_VENV .get_or_init(async || { let settings = Settings::get(); if !settings.python.uv_venv_auto { return None; } let (Some(venv_path), Some(uv_path)) = (venv_path(), uv_path(config, ts).await) else { return None; }; match get_or_create_venv(ts, venv_path, uv_path).await { Ok(venv) => Some(venv), Err(e) => { warn!("uv venv failed: {e}"); None } } }) .await } async fn get_or_create_venv(ts: &Toolset, venv_path: PathBuf, uv_path: PathBuf) -> Result<Venv> { #[cfg(windows)] let venv_bin_dir = "Scripts"; #[cfg(not(windows))] let venv_bin_dir = "bin"; let mut venv = Venv { env: Default::default(), venv_path: venv_path.join(venv_bin_dir), }; if let Some(python_tv) = ts .versions .get(&BackendArg::from("python")) .and_then(|tvl| tvl.versions.first()) { venv.env .insert("UV_PYTHON".to_string(), python_tv.version.to_string()); } if !venv_path.exists() { let mpr = MultiProgressReport::get(); let pr = mpr.add("Creating uv venv"); let mut cmd = CmdLineRunner::new(uv_path) .current_dir(uv_root().unwrap()) .with_pr(pr.as_ref()) .envs(&venv.env) .arg("venv"); if !log::log_enabled!(log::Level::Debug) { cmd = cmd.arg("--quiet"); } if let Some(extra) = Settings::get().python.uv_venv_create_args.clone() { cmd = cmd.args(extra); } cmd.execute()?; } venv.env.insert( "VIRTUAL_ENV".to_string(), venv_path.to_string_lossy().to_string(), ); Ok(venv) } fn uv_root() -> Option<PathBuf> { file::find_up(dirs::CWD.as_ref()?, &["uv.lock"]).map(|p| p.parent().unwrap().to_path_buf()) } fn venv_path() -> Option<PathBuf> { Some(uv_root()?.join(".venv")) } async fn uv_path(config: &Arc<Config>, ts: &Toolset) -> Option<PathBuf> { if let Some(uv_path) = ts.which_bin(config, "uv").await { return Some(uv_path); } which::which("uv").ok() }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/toml.rs
src/toml.rs
use std::collections::HashSet; #[macro_export] macro_rules! parse_error { ($key:expr, $val:expr, $t:expr) => {{ use eyre::bail; bail!( r#"expected value of {} to be a {}, got: {}"#, $crate::ui::style::eyellow($key), $crate::ui::style::ecyan($t), $crate::ui::style::eblue($val.to_string().trim()), ) }}; } pub fn dedup_toml_array(array: &toml_edit::Array) -> toml_edit::Array { let mut seen = HashSet::new(); let mut deduped = toml_edit::Array::new(); for item in array.iter() { if seen.insert(item.as_str()) { deduped.push(item.clone()); } } deduped }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/gpg.rs
src/gpg.rs
use crate::Result; use crate::cmd::CmdLineRunner; use crate::install_context::InstallContext; pub fn add_keys_node(ctx: &InstallContext) -> Result<()> { add_keys(ctx, include_str!("assets/gpg/node.asc")) } pub fn add_keys_swift(ctx: &InstallContext) -> Result<()> { add_keys(ctx, include_str!("assets/gpg/swift.asc")) } fn add_keys(ctx: &InstallContext, keys: &str) -> Result<()> { CmdLineRunner::new("gpg") .arg("--quiet") .arg("--import") .stdin_string(keys) .with_pr(ctx.pr.as_ref()) .execute() }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/hook_env.rs
src/hook_env.rs
use std::io::prelude::*; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::time::{SystemTime, UNIX_EPOCH}; use std::{collections::BTreeSet, sync::Arc}; use base64::prelude::*; use eyre::Result; use flate2::Compression; use flate2::write::{ZlibDecoder, ZlibEncoder}; use indexmap::IndexSet; use itertools::Itertools; use serde_derive::{Deserialize, Serialize}; use std::sync::LazyLock as Lazy; use crate::cli::HookReason; use crate::config::{Config, DEFAULT_CONFIG_FILENAMES, Settings}; use crate::env::PATH_KEY; use crate::env_diff::{EnvDiffOperation, EnvDiffPatches, EnvMap}; use crate::hash::hash_to_str; use crate::shell::Shell; use crate::{dirs, duration, env, file, hooks, watch_files}; /// Directory to store per-directory last check timestamps. /// Timestamps are stored per-directory (using a hash of CWD) so that /// multiple shells in different directories don't interfere with each other. static LAST_CHECK_DIR: Lazy<PathBuf> = Lazy::new(|| dirs::STATE.join("hook-env-checks")); /// Get the path to the last check file for a specific directory. fn last_check_file_for_dir(dir: &Path) -> PathBuf { let hash = hash_to_str(&dir.to_string_lossy()); LAST_CHECK_DIR.join(hash) } /// Read the last full check timestamp from the state file for the current directory. fn read_last_full_check() -> u128 { let Some(cwd) = &*dirs::CWD else { return 0; }; std::fs::read_to_string(last_check_file_for_dir(cwd)) .ok() .and_then(|s| s.trim().parse().ok()) .unwrap_or(0) } /// Write the last full check timestamp to the state file for the current directory. fn write_last_full_check(timestamp: u128) { let Some(cwd) = &*dirs::CWD else { return; }; if let Err(e) = file::create_dir_all(&*LAST_CHECK_DIR) { trace!("failed to create last check dir: {e}"); return; } if let Err(e) = std::fs::write(last_check_file_for_dir(cwd), timestamp.to_string()) { trace!("failed to write last check file: {e}"); } } /// Convert a SystemTime to milliseconds since Unix epoch fn mtime_to_millis(mtime: SystemTime) -> u128 { mtime .duration_since(UNIX_EPOCH) .unwrap_or_default() .as_millis() } pub static PREV_SESSION: Lazy<HookEnvSession> = Lazy::new(|| { env::var("__MISE_SESSION") .ok() .and_then(|s| { deserialize(s) .map_err(|err| { warn!("error deserializing __MISE_SESSION: {err}"); err }) .ok() }) .unwrap_or_default() }); #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct WatchFilePattern { pub root: Option<PathBuf>, pub patterns: Vec<String>, } impl From<&Path> for WatchFilePattern { fn from(path: &Path) -> Self { Self { root: None, patterns: vec![path.to_string_lossy().to_string()], } } } impl From<PathBuf> for WatchFilePattern { fn from(path: PathBuf) -> Self { Self { patterns: vec![path.to_string_lossy().to_string()], root: Some(path), } } } /// Fast-path early exit check that can be called BEFORE loading config/tools. /// This checks basic conditions using only the previous session data. /// Returns true if we can definitely skip hook-env, false if we need to continue. pub fn should_exit_early_fast() -> bool { let args = env::ARGS.read().unwrap(); if args.len() < 2 || args[1] != "hook-env" { return false; } // Can't exit early if no previous session // Check for dir being set as a proxy for "has valid session" // (loaded_configs can be empty if there are no config files) if PREV_SESSION.dir.is_none() { return false; } // Can't exit early if --force flag is present if args.iter().any(|a| a == "--force" || a == "-f") { return false; } // Check if running from precmd for the first time // Handle both "--reason=precmd" and "--reason precmd" forms let is_precmd = args.iter().any(|a| a == "--reason=precmd") || args .windows(2) .any(|w| w[0] == "--reason" && w[1] == "precmd"); if is_precmd && !*env::__MISE_ZSH_PRECMD_RUN { return false; } // Get settings for cache_ttl and chpwd_only let settings = Settings::get(); let cache_ttl_ms = duration::parse_duration(&settings.hook_env.cache_ttl) .map(|d| d.as_millis()) .inspect_err(|e| warn!("invalid hook_env.cache_ttl setting: {e}")) .unwrap_or(0); // Compute TTL window check only if cache_ttl is enabled (avoid unnecessary file read) let (now, within_ttl_window) = if cache_ttl_ms > 0 { let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap_or_default() .as_millis(); let last_full_check = read_last_full_check(); (now, now.saturating_sub(last_full_check) < cache_ttl_ms) } else { (0, false) }; // Can't exit early if directory changed if dir_change().is_some() { return false; } // Can't exit early if MISE_ env vars changed (cheap in-memory hash comparison) if have_mise_env_vars_been_modified() { return false; } // chpwd_only mode: skip on precmd if directory hasn't changed // This significantly reduces stat operations on slow filesystems like NFS // Note: We check this AFTER env var check since that's cheap (no I/O) if settings.hook_env.chpwd_only && is_precmd { trace!("chpwd_only enabled, skipping precmd hook-env"); return true; } // Cache TTL check: if within the TTL window, skip all stat operations // This is useful for slow filesystems like NFS where stat calls are expensive if within_ttl_window { trace!("within cache TTL, skipping filesystem checks"); return true; } // Check if any loaded config files have been modified for config_path in &PREV_SESSION.loaded_configs { if let Ok(metadata) = config_path.metadata() { if let Ok(modified) = metadata.modified() && mtime_to_millis(modified) > PREV_SESSION.latest_update { return false; } } else if !config_path.exists() { return false; } } // Check if data dir has been modified (new tools installed, etc.) // Also check if it's been deleted - this requires a full update if !dirs::DATA.exists() { return false; } if let Ok(metadata) = dirs::DATA.metadata() && let Ok(modified) = metadata.modified() && mtime_to_millis(modified) > PREV_SESSION.latest_update { return false; } // Check if any directory in the config search path has been modified // This catches new config files created anywhere in the hierarchy if let Some(cwd) = &*dirs::CWD && let Ok(ancestor_dirs) = file::all_dirs(cwd, &env::MISE_CEILING_PATHS) { // Config subdirectories that might contain config files let config_subdirs = DEFAULT_CONFIG_FILENAMES .iter() .map(|f| Path::new(f).parent().and_then(|p| p.to_str()).unwrap_or("")) .unique() .collect::<Vec<_>>(); for dir in ancestor_dirs { for subdir in &config_subdirs { let check_dir = if subdir.is_empty() { dir.clone() } else { dir.join(subdir) }; if let Ok(metadata) = check_dir.metadata() && let Ok(modified) = metadata.modified() && mtime_to_millis(modified) > PREV_SESSION.latest_update { return false; } } } } // Filesystem checks passed - update the last check timestamp so subsequent // prompts can benefit from the TTL cache without repeating these checks if cache_ttl_ms > 0 { write_last_full_check(now); } true } /// Check if hook-env can exit early after config is loaded. /// This is called after the fast-path check and handles cases that need /// the full config (watch_files, hook scheduling). pub fn should_exit_early( watch_files: impl IntoIterator<Item = WatchFilePattern>, reason: Option<HookReason>, ) -> bool { // Force hook-env to run at least once from precmd after activation // This catches PATH modifications from shell initialization (e.g., path_helper in zsh) if reason == Some(HookReason::Precmd) && !*env::__MISE_ZSH_PRECMD_RUN { trace!("__MISE_ZSH_PRECMD_RUN=0 and reason=precmd, forcing hook-env to run"); return false; } // Schedule hooks on directory change (can't do this in fast-path) if dir_change().is_some() { hooks::schedule_hook(hooks::Hooks::Cd); hooks::schedule_hook(hooks::Hooks::Enter); hooks::schedule_hook(hooks::Hooks::Leave); return false; } // Check full watch_files list from config (may include more than config files) let watch_files = match get_watch_files(watch_files) { Ok(w) => w, Err(e) => { warn!("error getting watch files: {e}"); return false; } }; if have_files_been_modified(watch_files) { return false; } if have_mise_env_vars_been_modified() { return false; } trace!("early-exit"); true } pub fn dir_change() -> Option<(Option<PathBuf>, PathBuf)> { match (&PREV_SESSION.dir, &*dirs::CWD) { (Some(old), Some(new)) if old != new => { trace!("dir change: {:?} -> {:?}", old, new); Some((Some(old.clone()), new.clone())) } (None, Some(new)) => { trace!("dir change: None -> {:?}", new); Some((None, new.clone())) } _ => None, } } fn have_files_been_modified(watch_files: BTreeSet<PathBuf>) -> bool { if let Some(p) = PREV_SESSION.loaded_configs.iter().find(|p| !p.exists()) { trace!("config deleted: {}", p.display()); return true; } // check the files to see if they've been altered let mut modified = false; for fp in &watch_files { if let Ok(mtime) = fp.metadata().and_then(|m| m.modified()) { if mtime_to_millis(mtime) > PREV_SESSION.latest_update { trace!("file modified: {:?}", fp); modified = true; watch_files::add_modified_file(fp.clone()); } } else if !fp.exists() { trace!("file deleted: {:?}", fp); modified = true; watch_files::add_modified_file(fp.clone()); } } if !modified { trace!("watch files unmodified"); } modified } fn have_mise_env_vars_been_modified() -> bool { get_mise_env_vars_hashed() != PREV_SESSION.env_var_hash } #[derive(Debug, Default, Serialize, Deserialize)] pub struct HookEnvSession { pub loaded_tools: IndexSet<String>, pub loaded_configs: IndexSet<PathBuf>, pub config_paths: IndexSet<PathBuf>, pub env: EnvMap, #[serde(default)] pub aliases: indexmap::IndexMap<String, String>, dir: Option<PathBuf>, env_var_hash: String, latest_update: u128, } pub fn serialize<T: serde::Serialize>(obj: &T) -> Result<String> { let mut gz = ZlibEncoder::new(Vec::new(), Compression::fast()); gz.write_all(&rmp_serde::to_vec_named(obj)?)?; Ok(BASE64_STANDARD_NO_PAD.encode(gz.finish()?)) } pub fn deserialize<T: serde::de::DeserializeOwned>(raw: String) -> Result<T> { let mut writer = Vec::new(); let mut decoder = ZlibDecoder::new(writer); let bytes = BASE64_STANDARD_NO_PAD.decode(raw)?; decoder.write_all(&bytes[..])?; writer = decoder.finish()?; Ok(rmp_serde::from_slice(&writer[..])?) } pub async fn build_session( config: &Arc<Config>, env: EnvMap, aliases: indexmap::IndexMap<String, String>, loaded_tools: IndexSet<String>, watch_files: BTreeSet<WatchFilePattern>, ) -> Result<HookEnvSession> { let mut max_modtime = UNIX_EPOCH; for cf in get_watch_files(watch_files)? { if let Ok(Ok(modified)) = cf.metadata().map(|m| m.modified()) { max_modtime = std::cmp::max(modified, max_modtime); } } let config_paths = if let Ok(paths) = config.path_dirs().await { paths.iter().cloned().collect() } else { IndexSet::new() }; let loaded_configs: IndexSet<PathBuf> = config.config_files.keys().cloned().collect(); // Update the last full check timestamp (only if cache_ttl feature is enabled) let settings = Settings::get(); if duration::parse_duration(&settings.hook_env.cache_ttl) .map(|d| d.as_millis() > 0) .unwrap_or(false) { let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap_or_default() .as_millis(); write_last_full_check(now); } Ok(HookEnvSession { dir: dirs::CWD.clone(), env_var_hash: get_mise_env_vars_hashed(), env, aliases, loaded_configs, loaded_tools, config_paths, latest_update: mtime_to_millis(max_modtime), }) } pub fn get_watch_files( watch_files: impl IntoIterator<Item = WatchFilePattern>, ) -> Result<BTreeSet<PathBuf>> { let mut watches = BTreeSet::new(); if dirs::DATA.exists() { watches.insert(dirs::DATA.to_path_buf()); } if dirs::TRUSTED_CONFIGS.exists() { watches.insert(dirs::TRUSTED_CONFIGS.to_path_buf()); } if dirs::IGNORED_CONFIGS.exists() { watches.insert(dirs::IGNORED_CONFIGS.to_path_buf()); } for (root, patterns) in &watch_files.into_iter().chunk_by(|wfp| wfp.root.clone()) { if let Some(root) = root { let patterns = patterns.flat_map(|wfp| wfp.patterns).collect::<Vec<_>>(); watches.extend(watch_files::glob(&root, &patterns)?); } else { watches.extend(patterns.flat_map(|wfp| wfp.patterns).map(PathBuf::from)); } } Ok(watches) } /// gets a hash of all MISE_ environment variables fn get_mise_env_vars_hashed() -> String { let env_vars: Vec<(&String, &String)> = env::PRISTINE_ENV .deref() .iter() .filter(|(k, _)| k.starts_with("MISE_")) .sorted() .collect(); hash_to_str(&env_vars) } pub fn clear_old_env(shell: &dyn Shell) -> String { let mut patches = env::__MISE_DIFF.reverse().to_patches(); // For fish shell, filter out PATH operations from the reversed diff because // fish has its own PATH management that conflicts with ours. if shell.to_string() == "fish" { patches.retain(|p| match p { EnvDiffOperation::Add(k, _) | EnvDiffOperation::Change(k, _) | EnvDiffOperation::Remove(k) => k != &*PATH_KEY, }); // Fish also needs PATH restored during deactivation let new_path = compute_deactivated_path(); patches.push(EnvDiffOperation::Change(PATH_KEY.to_string(), new_path)); } else { // For non-fish shells, we need to preserve user-added paths while removing mise paths let new_path = compute_deactivated_path(); patches.push(EnvDiffOperation::Change(PATH_KEY.to_string(), new_path)); } build_env_commands(shell, &patches) } /// Clear all aliases from the previous session. Called only during deactivation. pub fn clear_aliases(shell: &dyn Shell) -> String { let mut output = String::new(); for name in PREV_SESSION.aliases.keys() { output.push_str(&shell.unset_alias(name)); } output } /// Compute PATH after deactivation, preserving user additions fn compute_deactivated_path() -> String { // Get current PATH (may include user additions since last hook-env) let current_path = env::var("PATH").unwrap_or_default(); // Get the PATH that mise set during the last hook-env let mise_paths = &env::__MISE_DIFF.path; // Get pristine PATH (from before mise activation) let pristine_path = env::PRISTINE_ENV .deref() .get(&*PATH_KEY) .map(|s| s.to_string()) .unwrap_or_default(); if current_path.is_empty() || mise_paths.is_empty() { // If no current PATH or no mise PATH, just return pristine return pristine_path; } // Parse paths let current_paths: Vec<PathBuf> = env::split_paths(&current_path).collect(); let mise_paths_vec = mise_paths.clone(); // Count occurrences of each path in current_path, mise_paths, and pristine_path let pristine_paths: Vec<PathBuf> = env::split_paths(&pristine_path).collect(); let mut current_counts: std::collections::HashMap<PathBuf, usize> = std::collections::HashMap::new(); for path in &current_paths { *current_counts.entry(path.clone()).or_insert(0) += 1; } let mut mise_counts: std::collections::HashMap<PathBuf, usize> = std::collections::HashMap::new(); for path in &mise_paths_vec { *mise_counts.entry(path.clone()).or_insert(0) += 1; } let mut pristine_counts: std::collections::HashMap<PathBuf, usize> = std::collections::HashMap::new(); for path in &pristine_paths { *pristine_counts.entry(path.clone()).or_insert(0) += 1; } // Determine how many copies of each path we should keep: user additions plus pristine entries use std::collections::HashMap; let mut target_counts: HashMap<PathBuf, usize> = HashMap::new(); for (path, current_count) in current_counts.iter() { let removal_count = *mise_counts.get(path).unwrap_or(&0); let pristine_count = *pristine_counts.get(path).unwrap_or(&0); let user_and_pristine = current_count .saturating_sub(removal_count) .max(pristine_count); target_counts.insert(path.clone(), user_and_pristine); } for (path, pristine_count) in pristine_counts.iter() { target_counts .entry(path.clone()) .and_modify(|count| *count = (*count).max(*pristine_count)) .or_insert(*pristine_count); } let mut kept_counts: HashMap<PathBuf, usize> = HashMap::new(); let mut final_paths: Vec<PathBuf> = Vec::new(); for path in &current_paths { if let Some(target) = target_counts.get(path) { let kept = kept_counts.entry(path.clone()).or_insert(0); if *kept < *target { final_paths.push(path.clone()); *kept += 1; } } } for path in pristine_paths { let target = target_counts.get(&path).copied().unwrap_or(0); let kept = kept_counts.entry(path.clone()).or_insert(0); while *kept < target { final_paths.push(path.clone()); *kept += 1; } } env::join_paths(final_paths.iter()) .map(|p| p.to_string_lossy().to_string()) .unwrap_or(pristine_path) } pub fn build_env_commands(shell: &dyn Shell, patches: &EnvDiffPatches) -> String { let mut output = String::new(); for patch in patches.iter() { match patch { EnvDiffOperation::Add(k, v) | EnvDiffOperation::Change(k, v) => { output.push_str(&shell.set_env(k, v)); } EnvDiffOperation::Remove(k) => { output.push_str(&shell.unset_env(k)); } } } output } /// Build shell alias commands based on the difference between old and new aliases pub fn build_alias_commands( shell: &dyn Shell, old_aliases: &indexmap::IndexMap<String, String>, new_aliases: &indexmap::IndexMap<String, String>, ) -> String { let mut output = String::new(); // Remove aliases that no longer exist or have changed for (name, old_cmd) in old_aliases { match new_aliases.get(name) { Some(new_cmd) if new_cmd != old_cmd => { // Alias changed, unset then set new output.push_str(&shell.unset_alias(name)); output.push_str(&shell.set_alias(name, new_cmd)); } None => { // Alias removed output.push_str(&shell.unset_alias(name)); } _ => { // Alias unchanged, do nothing } } } // Add new aliases for (name, cmd) in new_aliases { if !old_aliases.contains_key(name) { output.push_str(&shell.set_alias(name, cmd)); } } output }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/duration.rs
src/duration.rs
pub use std::time::Duration; use eyre::{Result, bail}; use jiff::{Span, Timestamp, Zoned, civil::date}; pub const HOURLY: Duration = Duration::from_secs(60 * 60); pub const DAILY: Duration = Duration::from_secs(60 * 60 * 24); pub const WEEKLY: Duration = Duration::from_secs(60 * 60 * 24 * 7); pub fn parse_duration(s: &str) -> Result<Duration> { match s.parse::<Span>() { Ok(span) => { // we must provide a relative date to determine the duration with months and years let duration = span.to_duration(date(2025, 1, 1))?; if duration.is_negative() { bail!("duration must not be negative: {}", s); } Ok(duration.unsigned_abs()) } Err(_) => Ok(Duration::from_secs(s.parse()?)), } } /// Parse a date/duration string into a Timestamp. /// Supports: /// - RFC3339 timestamps: "2024-06-01T12:00:00Z" /// - ISO dates: "2024-06-01" (treated as end of day in UTC) /// - Relative durations: "90d", "1y", "6m" (subtracted from now) pub fn parse_into_timestamp(s: &str) -> Result<Timestamp> { // Try RFC3339 timestamp first if let Ok(ts) = s.parse::<Timestamp>() { return Ok(ts); } // Try parsing as a Zoned datetime (handles various formats) if let Ok(zoned) = s.parse::<Zoned>() { return Ok(zoned.timestamp()); } // Try parsing as date only (YYYY-MM-DD) - use end of day UTC if let Ok(civil_date) = s.parse::<jiff::civil::Date>() { let datetime = civil_date.at(23, 59, 59, 0); let ts = datetime.to_zoned(jiff::tz::TimeZone::UTC)?.timestamp(); return Ok(ts); } // Try parsing as duration and subtract from now if let Ok(span) = s.parse::<Span>() { // Validate that duration is positive (negative would result in future date) let duration = span.to_duration(date(2025, 1, 1))?; if duration.is_negative() { bail!("duration must not be negative: {}", s); } let now = Timestamp::now(); // Convert to Zoned to support calendar units (days, months, years) let now_zoned = now.to_zoned(jiff::tz::TimeZone::UTC); let past = now_zoned.checked_sub(span)?; return Ok(past.timestamp()); } bail!( "Invalid date or duration: {s}. Expected formats: '2024-06-01', '2024-06-01T12:00:00Z', '90d', '1y'" ) }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false
jdx/mise
https://github.com/jdx/mise/blob/3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb/src/toolset/toolset_paths.rs
src/toolset/toolset_paths.rs
use std::path::PathBuf; use std::sync::Arc; use dashmap::DashMap; use eyre::Result; use std::sync::LazyLock as Lazy; use crate::config::Config; use crate::config::env_directive::EnvResults; use crate::toolset::Toolset; use crate::uv; // Cache Toolset::list_paths results across identical toolsets within a process. // Keyed by project_root plus sorted list of backend@version pairs currently installed. pub(super) static LIST_PATHS_CACHE: Lazy<DashMap<String, Vec<PathBuf>>> = Lazy::new(DashMap::new); impl Toolset { pub async fn list_paths(&self, config: &Arc<Config>) -> Vec<PathBuf> { // Build a stable cache key based on project_root and current installed versions let mut key_parts = vec![]; if let Some(root) = &config.project_root { key_parts.push(root.to_string_lossy().to_string()); } let mut installed: Vec<String> = self .list_current_installed_versions(config) .into_iter() .map(|(p, tv)| format!("{}@{}", p.id(), tv.version)) .collect(); installed.sort(); key_parts.extend(installed); let cache_key = key_parts.join("|"); if let Some(entry) = LIST_PATHS_CACHE.get(&cache_key) { trace!("toolset.list_paths hit cache"); return entry.clone(); } let mut paths: Vec<PathBuf> = Vec::new(); for (p, tv) in self.list_current_installed_versions(config).into_iter() { let start = std::time::Instant::now(); let new_paths = p.list_bin_paths(config, &tv).await.unwrap_or_else(|e| { warn!("Error listing bin paths for {tv}: {e:#}"); Vec::new() }); trace!( "toolset.list_paths {}@{} list_bin_paths took {}ms", p.id(), tv.version, start.elapsed().as_millis() ); paths.extend(new_paths); } LIST_PATHS_CACHE.insert(cache_key, paths.clone()); paths .into_iter() .filter(|p| p.parent().is_some()) // TODO: why? .collect() } /// same as list_paths but includes config.list_paths, venv paths, and MISE_ADD_PATHs from self.env() pub async fn list_final_paths( &self, config: &Arc<Config>, env_results: EnvResults, ) -> Result<Vec<PathBuf>> { let mut paths = Vec::new(); // Match the tera_env PATH ordering from final_env(): // 1. Original system PATH is handled by PathEnv::from_iter() in env_with_path() // 2. Config path dirs paths.extend(config.path_dirs().await?.clone()); // 3. UV venv path (if any) - ensure project venv takes precedence over tool and tool_add_paths if let Some(venv) = uv::uv_venv(config, self).await { paths.push(venv.venv_path.clone()); } // 4. tool_add_paths (MISE_ADD_PATH/RTX_ADD_PATH from tools) paths.extend(env_results.tool_add_paths); // 5. Tool paths paths.extend(self.list_paths(config).await); // 6. env_results.env_paths (from load_post_env like _.path directives) - these go at the front let paths = env_results.env_paths.into_iter().chain(paths).collect(); Ok(paths) } /// Returns paths separated by their source: (user_configured_paths, tool_paths) /// User-configured paths should never be filtered, while tool paths should be filtered /// if they duplicate entries in the original PATH. pub async fn list_final_paths_split( &self, config: &Arc<Config>, env_results: EnvResults, ) -> Result<(Vec<PathBuf>, Vec<PathBuf>)> { // User-configured paths from env._.path directives // IMPORTANT: There are TWO sources of env paths: // 1. config.path_dirs() - from config.env_results() (cached, no tera context) // 2. env_results.env_paths - from ts.final_env() (fresh, with tera context applied) // env_results.env_paths must come FIRST for highest precedence let mut user_paths = env_results.env_paths; user_paths.extend(config.path_dirs().await?.clone()); // Tool paths start empty let mut tool_paths = Vec::new(); // UV venv path (if any) - these are tool-managed paths if let Some(venv) = uv::uv_venv(config, self).await { tool_paths.push(venv.venv_path.clone()); } // tool_add_paths (MISE_ADD_PATH/RTX_ADD_PATH from tools) tool_paths.extend(env_results.tool_add_paths); // Tool installation paths tool_paths.extend(self.list_paths(config).await); Ok((user_paths, tool_paths)) } }
rust
MIT
3e382b34b6bf7d7b1a0efb8fdd8ea10c84498adb
2026-01-04T15:39:11.175160Z
false