repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/display/color.rs | alacritty/src/display/color.rs | use std::fmt::{self, Display, Formatter};
use std::ops::{Add, Deref, Index, IndexMut, Mul};
use std::str::FromStr;
use log::trace;
use serde::de::{Error as SerdeError, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use alacritty_config_derive::SerdeReplace;
use alacritty_terminal::term::color::COUNT;
use alacritty_terminal::vte::ansi::{NamedColor, Rgb as VteRgb};
use crate::config::color::Colors;
/// Factor for automatic computation of dim colors.
pub const DIM_FACTOR: f32 = 0.66;
#[derive(Copy, Clone)]
pub struct List([Rgb; COUNT]);
impl From<&'_ Colors> for List {
fn from(colors: &Colors) -> List {
// Type inference fails without this annotation.
let mut list = List([Rgb::default(); COUNT]);
list.fill_named(colors);
list.fill_cube(colors);
list.fill_gray_ramp(colors);
list
}
}
impl List {
pub fn fill_named(&mut self, colors: &Colors) {
// Normals.
self[NamedColor::Black] = colors.normal.black;
self[NamedColor::Red] = colors.normal.red;
self[NamedColor::Green] = colors.normal.green;
self[NamedColor::Yellow] = colors.normal.yellow;
self[NamedColor::Blue] = colors.normal.blue;
self[NamedColor::Magenta] = colors.normal.magenta;
self[NamedColor::Cyan] = colors.normal.cyan;
self[NamedColor::White] = colors.normal.white;
// Brights.
self[NamedColor::BrightBlack] = colors.bright.black;
self[NamedColor::BrightRed] = colors.bright.red;
self[NamedColor::BrightGreen] = colors.bright.green;
self[NamedColor::BrightYellow] = colors.bright.yellow;
self[NamedColor::BrightBlue] = colors.bright.blue;
self[NamedColor::BrightMagenta] = colors.bright.magenta;
self[NamedColor::BrightCyan] = colors.bright.cyan;
self[NamedColor::BrightWhite] = colors.bright.white;
self[NamedColor::BrightForeground] =
colors.primary.bright_foreground.unwrap_or(colors.primary.foreground);
// Foreground and background.
self[NamedColor::Foreground] = colors.primary.foreground;
self[NamedColor::Background] = colors.primary.background;
// Dims.
self[NamedColor::DimForeground] =
colors.primary.dim_foreground.unwrap_or(colors.primary.foreground * DIM_FACTOR);
match colors.dim {
Some(ref dim) => {
trace!("Using config-provided dim colors");
self[NamedColor::DimBlack] = dim.black;
self[NamedColor::DimRed] = dim.red;
self[NamedColor::DimGreen] = dim.green;
self[NamedColor::DimYellow] = dim.yellow;
self[NamedColor::DimBlue] = dim.blue;
self[NamedColor::DimMagenta] = dim.magenta;
self[NamedColor::DimCyan] = dim.cyan;
self[NamedColor::DimWhite] = dim.white;
},
None => {
trace!("Deriving dim colors from normal colors");
self[NamedColor::DimBlack] = colors.normal.black * DIM_FACTOR;
self[NamedColor::DimRed] = colors.normal.red * DIM_FACTOR;
self[NamedColor::DimGreen] = colors.normal.green * DIM_FACTOR;
self[NamedColor::DimYellow] = colors.normal.yellow * DIM_FACTOR;
self[NamedColor::DimBlue] = colors.normal.blue * DIM_FACTOR;
self[NamedColor::DimMagenta] = colors.normal.magenta * DIM_FACTOR;
self[NamedColor::DimCyan] = colors.normal.cyan * DIM_FACTOR;
self[NamedColor::DimWhite] = colors.normal.white * DIM_FACTOR;
},
}
}
pub fn fill_cube(&mut self, colors: &Colors) {
let mut index: usize = 16;
// Build colors.
for r in 0..6 {
for g in 0..6 {
for b in 0..6 {
// Override colors 16..232 with the config (if present).
if let Some(indexed_color) =
colors.indexed_colors.iter().find(|ic| ic.index() == index as u8)
{
self[index] = indexed_color.color;
} else {
self[index] = Rgb::new(
if r == 0 { 0 } else { r * 40 + 55 },
if g == 0 { 0 } else { g * 40 + 55 },
if b == 0 { 0 } else { b * 40 + 55 },
);
}
index += 1;
}
}
}
debug_assert!(index == 232);
}
pub fn fill_gray_ramp(&mut self, colors: &Colors) {
let mut index: usize = 232;
for i in 0..24 {
// Index of the color is number of named colors + number of cube colors + i.
let color_index = 16 + 216 + i;
// Override colors 232..256 with the config (if present).
if let Some(indexed_color) =
colors.indexed_colors.iter().find(|ic| ic.index() == color_index)
{
self[index] = indexed_color.color;
index += 1;
continue;
}
let value = i * 10 + 8;
self[index] = Rgb::new(value, value, value);
index += 1;
}
debug_assert!(index == 256);
}
}
impl Index<usize> for List {
type Output = Rgb;
#[inline]
fn index(&self, idx: usize) -> &Self::Output {
&self.0[idx]
}
}
impl IndexMut<usize> for List {
#[inline]
fn index_mut(&mut self, idx: usize) -> &mut Self::Output {
&mut self.0[idx]
}
}
impl Index<NamedColor> for List {
type Output = Rgb;
#[inline]
fn index(&self, idx: NamedColor) -> &Self::Output {
&self.0[idx as usize]
}
}
impl IndexMut<NamedColor> for List {
#[inline]
fn index_mut(&mut self, idx: NamedColor) -> &mut Self::Output {
&mut self.0[idx as usize]
}
}
#[derive(SerdeReplace, Debug, Eq, PartialEq, Copy, Clone, Default)]
pub struct Rgb(pub VteRgb);
impl Rgb {
#[inline]
pub const fn new(r: u8, g: u8, b: u8) -> Self {
Self(VteRgb { r, g, b })
}
#[inline]
pub fn as_tuple(self) -> (u8, u8, u8) {
(self.0.r, self.0.g, self.0.b)
}
}
impl From<VteRgb> for Rgb {
fn from(value: VteRgb) -> Self {
Self(value)
}
}
impl Deref for Rgb {
type Target = VteRgb;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Mul<f32> for Rgb {
type Output = Rgb;
fn mul(self, rhs: f32) -> Self::Output {
Rgb(self.0 * rhs)
}
}
impl Add<Rgb> for Rgb {
type Output = Rgb;
fn add(self, rhs: Rgb) -> Self::Output {
Rgb(self.0 + rhs.0)
}
}
/// Deserialize Rgb color from a hex string.
impl<'de> Deserialize<'de> for Rgb {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct RgbVisitor;
// Used for deserializing reftests.
#[derive(Deserialize)]
struct RgbDerivedDeser {
r: u8,
g: u8,
b: u8,
}
impl Visitor<'_> for RgbVisitor {
type Value = Rgb;
fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("hex color like #ff00ff")
}
fn visit_str<E>(self, value: &str) -> Result<Rgb, E>
where
E: serde::de::Error,
{
Rgb::from_str(value).map_err(|_| {
E::custom(format!(
"failed to parse rgb color {value}; expected hex color like #ff00ff"
))
})
}
}
// Return an error if the syntax is incorrect.
let value = toml::Value::deserialize(deserializer)?;
// Attempt to deserialize from struct form.
if let Ok(RgbDerivedDeser { r, g, b }) = RgbDerivedDeser::deserialize(value.clone()) {
return Ok(Rgb::new(r, g, b));
}
// Deserialize from hex notation (either 0xff00ff or #ff00ff).
value.deserialize_str(RgbVisitor).map_err(D::Error::custom)
}
}
/// Serialize Rgb color to a hex string.
impl Serialize for Rgb {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl Display for Rgb {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "#{:02x}{:02x}{:02x}", self.r, self.g, self.b)
}
}
impl FromStr for Rgb {
type Err = ();
fn from_str(s: &str) -> Result<Rgb, ()> {
let chars = if s.starts_with("0x") && s.len() == 8 {
&s[2..]
} else if s.starts_with('#') && s.len() == 7 {
&s[1..]
} else {
return Err(());
};
match u32::from_str_radix(chars, 16) {
Ok(mut color) => {
let b = (color & 0xff) as u8;
color >>= 8;
let g = (color & 0xff) as u8;
color >>= 8;
let r = color as u8;
Ok(Rgb::new(r, g, b))
},
Err(_) => Err(()),
}
}
}
/// RGB color optionally referencing the cell's foreground or background.
#[derive(SerdeReplace, Serialize, Copy, Clone, Debug, PartialEq, Eq)]
pub enum CellRgb {
CellForeground,
CellBackground,
#[serde(untagged)]
Rgb(Rgb),
}
impl CellRgb {
pub fn color(self, foreground: Rgb, background: Rgb) -> Rgb {
match self {
Self::CellForeground => foreground,
Self::CellBackground => background,
Self::Rgb(rgb) => rgb,
}
}
}
impl Default for CellRgb {
fn default() -> Self {
Self::Rgb(Rgb::default())
}
}
impl<'de> Deserialize<'de> for CellRgb {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
const EXPECTING: &str = "CellForeground, CellBackground, or hex color like #ff00ff";
struct CellRgbVisitor;
impl Visitor<'_> for CellRgbVisitor {
type Value = CellRgb;
fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str(EXPECTING)
}
fn visit_str<E>(self, value: &str) -> Result<CellRgb, E>
where
E: serde::de::Error,
{
// Attempt to deserialize as enum constants.
match value {
"CellForeground" => return Ok(CellRgb::CellForeground),
"CellBackground" => return Ok(CellRgb::CellBackground),
_ => (),
}
Rgb::from_str(value).map(CellRgb::Rgb).map_err(|_| {
E::custom(format!("failed to parse color {value}; expected {EXPECTING}"))
})
}
}
deserializer.deserialize_str(CellRgbVisitor).map_err(D::Error::custom)
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/input/mod.rs | alacritty/src/input/mod.rs | //! Handle input from winit.
//!
//! Certain key combinations should send some escape sequence back to the PTY.
//! In order to figure that out, state about which modifier keys are pressed
//! needs to be tracked. Additionally, we need a bit of a state machine to
//! determine what to do when a non-modifier key is pressed.
use std::borrow::Cow;
use std::cmp::{Ordering, max, min};
use std::collections::HashSet;
use std::ffi::OsStr;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::mem;
use std::time::{Duration, Instant};
use log::debug;
use winit::dpi::PhysicalPosition;
use winit::event::{
ElementState, Modifiers, MouseButton, MouseScrollDelta, Touch as TouchEvent, TouchPhase,
};
#[cfg(target_os = "macos")]
use winit::event_loop::ActiveEventLoop;
use winit::keyboard::ModifiersState;
#[cfg(target_os = "macos")]
use winit::platform::macos::ActiveEventLoopExtMacOS;
use winit::window::CursorIcon;
use alacritty_terminal::event::EventListener;
use alacritty_terminal::grid::{Dimensions, Scroll};
use alacritty_terminal::index::{Boundary, Column, Direction, Point, Side};
use alacritty_terminal::selection::SelectionType;
use alacritty_terminal::term::search::Match;
use alacritty_terminal::term::{ClipboardType, Term, TermMode};
use alacritty_terminal::vi_mode::ViMotion;
use alacritty_terminal::vte::ansi::{ClearMode, Handler};
use crate::clipboard::Clipboard;
#[cfg(target_os = "macos")]
use crate::config::window::Decorations;
use crate::config::{
Action, BindingMode, MouseAction, MouseEvent, SearchAction, UiConfig, ViAction,
};
use crate::display::hint::HintMatch;
use crate::display::window::{ImeInhibitor, Window};
use crate::display::{Display, SizeInfo};
use crate::event::{
ClickState, Event, EventType, InlineSearchState, Mouse, TouchPurpose, TouchZoom,
};
use crate::message_bar::{self, Message};
use crate::scheduler::{Scheduler, TimerId, Topic};
pub mod keyboard;
/// Font size change interval in px.
pub const FONT_SIZE_STEP: f32 = 1.;
/// Interval for mouse scrolling during selection outside of the boundaries.
const SELECTION_SCROLLING_INTERVAL: Duration = Duration::from_millis(15);
/// Minimum number of pixels at the bottom/top where selection scrolling is performed.
const MIN_SELECTION_SCROLLING_HEIGHT: f64 = 5.;
/// Number of pixels for increasing the selection scrolling speed factor by one.
const SELECTION_SCROLLING_STEP: f64 = 20.;
/// Distance before a touch input is considered a drag.
const MAX_TAP_DISTANCE: f64 = 20.;
/// Threshold used for double_click/triple_click.
const CLICK_THRESHOLD: Duration = Duration::from_millis(400);
/// Processes input from winit.
///
/// An escape sequence may be emitted in case specific keys or key combinations
/// are activated.
pub struct Processor<T: EventListener, A: ActionContext<T>> {
pub ctx: A,
_phantom: PhantomData<T>,
}
pub trait ActionContext<T: EventListener> {
fn write_to_pty<B: Into<Cow<'static, [u8]>>>(&self, _data: B) {}
fn mark_dirty(&mut self) {}
fn size_info(&self) -> SizeInfo;
fn copy_selection(&mut self, _ty: ClipboardType) {}
fn start_selection(&mut self, _ty: SelectionType, _point: Point, _side: Side) {}
fn toggle_selection(&mut self, _ty: SelectionType, _point: Point, _side: Side) {}
fn update_selection(&mut self, _point: Point, _side: Side) {}
fn clear_selection(&mut self) {}
fn selection_is_empty(&self) -> bool;
fn mouse_mut(&mut self) -> &mut Mouse;
fn mouse(&self) -> &Mouse;
fn touch_purpose(&mut self) -> &mut TouchPurpose;
fn modifiers(&mut self) -> &mut Modifiers;
fn scroll(&mut self, _scroll: Scroll) {}
fn window(&mut self) -> &mut Window;
fn display(&mut self) -> &mut Display;
fn terminal(&self) -> &Term<T>;
fn terminal_mut(&mut self) -> &mut Term<T>;
fn spawn_new_instance(&mut self) {}
#[cfg(target_os = "macos")]
fn create_new_window(&mut self, _tabbing_id: Option<String>) {}
#[cfg(not(target_os = "macos"))]
fn create_new_window(&mut self) {}
fn change_font_size(&mut self, _delta: f32) {}
fn reset_font_size(&mut self) {}
fn pop_message(&mut self) {}
fn message(&self) -> Option<&Message>;
fn config(&self) -> &UiConfig;
#[cfg(target_os = "macos")]
fn event_loop(&self) -> &ActiveEventLoop;
fn mouse_mode(&self) -> bool;
fn clipboard_mut(&mut self) -> &mut Clipboard;
fn scheduler_mut(&mut self) -> &mut Scheduler;
fn start_search(&mut self, _direction: Direction) {}
fn start_seeded_search(&mut self, _direction: Direction, _text: String) {}
fn confirm_search(&mut self) {}
fn cancel_search(&mut self) {}
fn search_input(&mut self, _c: char) {}
fn search_pop_word(&mut self) {}
fn search_history_previous(&mut self) {}
fn search_history_next(&mut self) {}
fn search_next(&mut self, origin: Point, direction: Direction, side: Side) -> Option<Match>;
fn advance_search_origin(&mut self, _direction: Direction) {}
fn search_direction(&self) -> Direction;
fn search_active(&self) -> bool;
fn on_typing_start(&mut self) {}
fn toggle_vi_mode(&mut self) {}
fn inline_search_state(&mut self) -> &mut InlineSearchState;
fn start_inline_search(&mut self, _direction: Direction, _stop_short: bool) {}
fn inline_search_next(&mut self) {}
fn inline_search_input(&mut self, _text: &str) {}
fn inline_search_previous(&mut self) {}
fn hint_input(&mut self, _character: char) {}
fn trigger_hint(&mut self, _hint: &HintMatch) {}
fn expand_selection(&mut self) {}
fn semantic_word(&self, point: Point) -> String;
fn on_terminal_input_start(&mut self) {}
fn paste(&mut self, _text: &str, _bracketed: bool) {}
fn spawn_daemon<I, S>(&self, _program: &str, _args: I)
where
I: IntoIterator<Item = S> + Debug + Copy,
S: AsRef<OsStr>,
{
}
}
impl Action {
fn toggle_selection<T, A>(ctx: &mut A, ty: SelectionType)
where
A: ActionContext<T>,
T: EventListener,
{
ctx.toggle_selection(ty, ctx.terminal().vi_mode_cursor.point, Side::Left);
// Make sure initial selection is not empty.
if let Some(selection) = &mut ctx.terminal_mut().selection {
selection.include_all();
}
}
}
trait Execute<T: EventListener> {
fn execute<A: ActionContext<T>>(&self, ctx: &mut A);
}
impl<T: EventListener> Execute<T> for Action {
#[inline]
fn execute<A: ActionContext<T>>(&self, ctx: &mut A) {
match self {
Action::Esc(s) => ctx.paste(s, false),
Action::Command(program) => ctx.spawn_daemon(program.program(), program.args()),
Action::Hint(hint) => {
ctx.display().hint_state.start(hint.clone());
ctx.mark_dirty();
},
Action::ToggleViMode => {
ctx.on_typing_start();
ctx.toggle_vi_mode()
},
action @ (Action::ViMotion(_) | Action::Vi(_))
if !ctx.terminal().mode().contains(TermMode::VI) =>
{
debug!("Ignoring {action:?}: Vi mode inactive");
},
Action::ViMotion(motion) => {
ctx.on_typing_start();
ctx.terminal_mut().vi_motion(*motion);
ctx.mark_dirty();
},
Action::Vi(ViAction::ToggleNormalSelection) => {
Self::toggle_selection(ctx, SelectionType::Simple);
},
Action::Vi(ViAction::ToggleLineSelection) => {
Self::toggle_selection(ctx, SelectionType::Lines);
},
Action::Vi(ViAction::ToggleBlockSelection) => {
Self::toggle_selection(ctx, SelectionType::Block);
},
Action::Vi(ViAction::ToggleSemanticSelection) => {
Self::toggle_selection(ctx, SelectionType::Semantic);
},
Action::Vi(ViAction::Open) => {
let hint = ctx.display().vi_highlighted_hint.take();
if let Some(hint) = &hint {
ctx.mouse_mut().block_hint_launcher = false;
ctx.trigger_hint(hint);
}
ctx.display().vi_highlighted_hint = hint;
},
Action::Vi(ViAction::SearchNext) => {
ctx.on_typing_start();
let terminal = ctx.terminal();
let direction = ctx.search_direction();
let vi_point = terminal.vi_mode_cursor.point;
let origin = match direction {
Direction::Right => vi_point.add(terminal, Boundary::None, 1),
Direction::Left => vi_point.sub(terminal, Boundary::None, 1),
};
if let Some(regex_match) = ctx.search_next(origin, direction, Side::Left) {
ctx.terminal_mut().vi_goto_point(*regex_match.start());
ctx.mark_dirty();
}
},
Action::Vi(ViAction::SearchPrevious) => {
ctx.on_typing_start();
let terminal = ctx.terminal();
let direction = ctx.search_direction().opposite();
let vi_point = terminal.vi_mode_cursor.point;
let origin = match direction {
Direction::Right => vi_point.add(terminal, Boundary::None, 1),
Direction::Left => vi_point.sub(terminal, Boundary::None, 1),
};
if let Some(regex_match) = ctx.search_next(origin, direction, Side::Left) {
ctx.terminal_mut().vi_goto_point(*regex_match.start());
ctx.mark_dirty();
}
},
Action::Vi(ViAction::SearchStart) => {
let terminal = ctx.terminal();
let origin = terminal.vi_mode_cursor.point.sub(terminal, Boundary::None, 1);
if let Some(regex_match) = ctx.search_next(origin, Direction::Left, Side::Left) {
ctx.terminal_mut().vi_goto_point(*regex_match.start());
ctx.mark_dirty();
}
},
Action::Vi(ViAction::SearchEnd) => {
let terminal = ctx.terminal();
let origin = terminal.vi_mode_cursor.point.add(terminal, Boundary::None, 1);
if let Some(regex_match) = ctx.search_next(origin, Direction::Right, Side::Right) {
ctx.terminal_mut().vi_goto_point(*regex_match.end());
ctx.mark_dirty();
}
},
Action::Vi(ViAction::CenterAroundViCursor) => {
let term = ctx.terminal();
let display_offset = term.grid().display_offset() as i32;
let target = -display_offset + term.screen_lines() as i32 / 2 - 1;
let line = term.vi_mode_cursor.point.line;
let scroll_lines = target - line.0;
ctx.scroll(Scroll::Delta(scroll_lines));
},
Action::Vi(ViAction::InlineSearchForward) => {
ctx.start_inline_search(Direction::Right, false)
},
Action::Vi(ViAction::InlineSearchBackward) => {
ctx.start_inline_search(Direction::Left, false)
},
Action::Vi(ViAction::InlineSearchForwardShort) => {
ctx.start_inline_search(Direction::Right, true)
},
Action::Vi(ViAction::InlineSearchBackwardShort) => {
ctx.start_inline_search(Direction::Left, true)
},
Action::Vi(ViAction::InlineSearchNext) => ctx.inline_search_next(),
Action::Vi(ViAction::InlineSearchPrevious) => ctx.inline_search_previous(),
Action::Vi(ViAction::SemanticSearchForward | ViAction::SemanticSearchBackward) => {
let seed_text = match ctx.terminal().selection_to_string() {
Some(selection) if !selection.is_empty() => selection,
// Get semantic word at the vi cursor position.
_ => ctx.semantic_word(ctx.terminal().vi_mode_cursor.point),
};
if !seed_text.is_empty() {
let direction = match self {
Action::Vi(ViAction::SemanticSearchForward) => Direction::Right,
_ => Direction::Left,
};
ctx.start_seeded_search(direction, seed_text);
}
},
action @ Action::Search(_) if !ctx.search_active() => {
debug!("Ignoring {action:?}: Search mode inactive");
},
Action::Search(SearchAction::SearchFocusNext) => {
ctx.advance_search_origin(ctx.search_direction());
},
Action::Search(SearchAction::SearchFocusPrevious) => {
let direction = ctx.search_direction().opposite();
ctx.advance_search_origin(direction);
},
Action::Search(SearchAction::SearchConfirm) => ctx.confirm_search(),
Action::Search(SearchAction::SearchCancel) => ctx.cancel_search(),
Action::Search(SearchAction::SearchClear) => {
let direction = ctx.search_direction();
ctx.cancel_search();
ctx.start_search(direction);
},
Action::Search(SearchAction::SearchDeleteWord) => ctx.search_pop_word(),
Action::Search(SearchAction::SearchHistoryPrevious) => ctx.search_history_previous(),
Action::Search(SearchAction::SearchHistoryNext) => ctx.search_history_next(),
Action::Mouse(MouseAction::ExpandSelection) => ctx.expand_selection(),
Action::SearchForward => ctx.start_search(Direction::Right),
Action::SearchBackward => ctx.start_search(Direction::Left),
Action::Copy => ctx.copy_selection(ClipboardType::Clipboard),
#[cfg(not(any(target_os = "macos", windows)))]
Action::CopySelection => ctx.copy_selection(ClipboardType::Selection),
Action::ClearSelection => ctx.clear_selection(),
Action::Paste => {
let text = ctx.clipboard_mut().load(ClipboardType::Clipboard);
ctx.paste(&text, true);
},
Action::PasteSelection => {
let text = ctx.clipboard_mut().load(ClipboardType::Selection);
ctx.paste(&text, true);
},
Action::ToggleFullscreen => ctx.window().toggle_fullscreen(),
Action::ToggleMaximized => ctx.window().toggle_maximized(),
#[cfg(target_os = "macos")]
Action::ToggleSimpleFullscreen => ctx.window().toggle_simple_fullscreen(),
#[cfg(target_os = "macos")]
Action::Hide => ctx.event_loop().hide_application(),
#[cfg(target_os = "macos")]
Action::HideOtherApplications => ctx.event_loop().hide_other_applications(),
#[cfg(not(target_os = "macos"))]
Action::Hide => ctx.window().set_visible(false),
Action::Minimize => ctx.window().set_minimized(true),
Action::Quit => {
ctx.window().hold = false;
ctx.terminal_mut().exit();
},
Action::IncreaseFontSize => ctx.change_font_size(FONT_SIZE_STEP),
Action::DecreaseFontSize => ctx.change_font_size(-FONT_SIZE_STEP),
Action::ResetFontSize => ctx.reset_font_size(),
Action::ScrollPageUp
| Action::ScrollPageDown
| Action::ScrollHalfPageUp
| Action::ScrollHalfPageDown => {
// Move vi mode cursor.
let term = ctx.terminal_mut();
let (scroll, amount) = match self {
Action::ScrollPageUp => (Scroll::PageUp, term.screen_lines() as i32),
Action::ScrollPageDown => (Scroll::PageDown, -(term.screen_lines() as i32)),
Action::ScrollHalfPageUp => {
let amount = term.screen_lines() as i32 / 2;
(Scroll::Delta(amount), amount)
},
Action::ScrollHalfPageDown => {
let amount = -(term.screen_lines() as i32 / 2);
(Scroll::Delta(amount), amount)
},
_ => unreachable!(),
};
let old_vi_cursor = term.vi_mode_cursor;
term.vi_mode_cursor = term.vi_mode_cursor.scroll(term, amount);
if old_vi_cursor != term.vi_mode_cursor {
ctx.mark_dirty();
}
ctx.scroll(scroll);
},
Action::ScrollLineUp => ctx.scroll(Scroll::Delta(1)),
Action::ScrollLineDown => ctx.scroll(Scroll::Delta(-1)),
Action::ScrollToTop => {
ctx.scroll(Scroll::Top);
// Move vi mode cursor.
let topmost_line = ctx.terminal().topmost_line();
ctx.terminal_mut().vi_mode_cursor.point.line = topmost_line;
ctx.terminal_mut().vi_motion(ViMotion::FirstOccupied);
ctx.mark_dirty();
},
Action::ScrollToBottom => {
ctx.scroll(Scroll::Bottom);
// Move vi mode cursor.
let term = ctx.terminal_mut();
term.vi_mode_cursor.point.line = term.bottommost_line();
// Move to beginning twice, to always jump across linewraps.
term.vi_motion(ViMotion::FirstOccupied);
term.vi_motion(ViMotion::FirstOccupied);
ctx.mark_dirty();
},
Action::ClearHistory => ctx.terminal_mut().clear_screen(ClearMode::Saved),
Action::ClearLogNotice => ctx.pop_message(),
#[cfg(not(target_os = "macos"))]
Action::CreateNewWindow => ctx.create_new_window(),
Action::SpawnNewInstance => ctx.spawn_new_instance(),
#[cfg(target_os = "macos")]
Action::CreateNewWindow => ctx.create_new_window(None),
#[cfg(target_os = "macos")]
Action::CreateNewTab => {
// Tabs on macOS are not possible without decorations.
if ctx.config().window.decorations != Decorations::None {
let tabbing_id = Some(ctx.window().tabbing_id());
ctx.create_new_window(tabbing_id);
}
},
#[cfg(target_os = "macos")]
Action::SelectNextTab => ctx.window().select_next_tab(),
#[cfg(target_os = "macos")]
Action::SelectPreviousTab => ctx.window().select_previous_tab(),
#[cfg(target_os = "macos")]
Action::SelectTab1 => ctx.window().select_tab_at_index(0),
#[cfg(target_os = "macos")]
Action::SelectTab2 => ctx.window().select_tab_at_index(1),
#[cfg(target_os = "macos")]
Action::SelectTab3 => ctx.window().select_tab_at_index(2),
#[cfg(target_os = "macos")]
Action::SelectTab4 => ctx.window().select_tab_at_index(3),
#[cfg(target_os = "macos")]
Action::SelectTab5 => ctx.window().select_tab_at_index(4),
#[cfg(target_os = "macos")]
Action::SelectTab6 => ctx.window().select_tab_at_index(5),
#[cfg(target_os = "macos")]
Action::SelectTab7 => ctx.window().select_tab_at_index(6),
#[cfg(target_os = "macos")]
Action::SelectTab8 => ctx.window().select_tab_at_index(7),
#[cfg(target_os = "macos")]
Action::SelectTab9 => ctx.window().select_tab_at_index(8),
#[cfg(target_os = "macos")]
Action::SelectLastTab => ctx.window().select_last_tab(),
_ => (),
}
}
}
impl<T: EventListener, A: ActionContext<T>> Processor<T, A> {
pub fn new(ctx: A) -> Self {
Self { ctx, _phantom: Default::default() }
}
#[inline]
pub fn mouse_moved(&mut self, position: PhysicalPosition<f64>) {
let size_info = self.ctx.size_info();
let (x, y) = position.into();
let lmb_pressed = self.ctx.mouse().left_button_state == ElementState::Pressed;
let rmb_pressed = self.ctx.mouse().right_button_state == ElementState::Pressed;
if !self.ctx.selection_is_empty() && (lmb_pressed || rmb_pressed) {
self.update_selection_scrolling(y);
}
let display_offset = self.ctx.terminal().grid().display_offset();
let old_point = self.ctx.mouse().point(&size_info, display_offset);
let x = x.clamp(0, size_info.width() as i32 - 1) as usize;
let y = y.clamp(0, size_info.height() as i32 - 1) as usize;
self.ctx.mouse_mut().x = x;
self.ctx.mouse_mut().y = y;
let inside_text_area = size_info.contains_point(x, y);
let cell_side = self.cell_side(x);
let point = self.ctx.mouse().point(&size_info, display_offset);
let cell_changed = old_point != point;
// If the mouse hasn't changed cells, do nothing.
if !cell_changed
&& self.ctx.mouse().cell_side == cell_side
&& self.ctx.mouse().inside_text_area == inside_text_area
{
return;
}
self.ctx.mouse_mut().inside_text_area = inside_text_area;
self.ctx.mouse_mut().cell_side = cell_side;
// Update mouse state and check for URL change.
let mouse_state = self.cursor_state();
self.ctx.window().set_mouse_cursor(mouse_state);
// Prompt hint highlight update.
self.ctx.mouse_mut().hint_highlight_dirty = true;
// Don't launch URLs if mouse has moved.
self.ctx.mouse_mut().block_hint_launcher = true;
if (lmb_pressed || rmb_pressed)
&& (self.ctx.modifiers().state().shift_key() || !self.ctx.mouse_mode())
{
self.ctx.update_selection(point, cell_side);
} else if cell_changed
&& self.ctx.terminal().mode().intersects(TermMode::MOUSE_MOTION | TermMode::MOUSE_DRAG)
{
if lmb_pressed {
self.mouse_report(32, ElementState::Pressed);
} else if self.ctx.mouse().middle_button_state == ElementState::Pressed {
self.mouse_report(33, ElementState::Pressed);
} else if self.ctx.mouse().right_button_state == ElementState::Pressed {
self.mouse_report(34, ElementState::Pressed);
} else if self.ctx.terminal().mode().contains(TermMode::MOUSE_MOTION) {
self.mouse_report(35, ElementState::Pressed);
}
}
}
/// Check which side of a cell an X coordinate lies on.
fn cell_side(&self, x: usize) -> Side {
let size_info = self.ctx.size_info();
let cell_x =
x.saturating_sub(size_info.padding_x() as usize) % size_info.cell_width() as usize;
let half_cell_width = (size_info.cell_width() / 2.0) as usize;
let additional_padding =
(size_info.width() - size_info.padding_x() * 2.) % size_info.cell_width();
let end_of_grid = size_info.width() - size_info.padding_x() - additional_padding;
if cell_x > half_cell_width
// Edge case when mouse leaves the window.
|| x as f32 >= end_of_grid
{
Side::Right
} else {
Side::Left
}
}
fn mouse_report(&mut self, button: u8, state: ElementState) {
let display_offset = self.ctx.terminal().grid().display_offset();
let point = self.ctx.mouse().point(&self.ctx.size_info(), display_offset);
// Assure the mouse point is not in the scrollback.
if point.line < 0 {
return;
}
// Calculate modifiers value.
let mut mods = 0;
let modifiers = self.ctx.modifiers().state();
if modifiers.shift_key() {
mods += 4;
}
if modifiers.alt_key() {
mods += 8;
}
if modifiers.control_key() {
mods += 16;
}
// Report mouse events.
if self.ctx.terminal().mode().contains(TermMode::SGR_MOUSE) {
self.sgr_mouse_report(point, button + mods, state);
} else if let ElementState::Released = state {
self.normal_mouse_report(point, 3 + mods);
} else {
self.normal_mouse_report(point, button + mods);
}
}
fn normal_mouse_report(&mut self, point: Point, button: u8) {
let Point { line, column } = point;
let utf8 = self.ctx.terminal().mode().contains(TermMode::UTF8_MOUSE);
let max_point = if utf8 { 2015 } else { 223 };
if line >= max_point || column >= max_point {
return;
}
let mut msg = vec![b'\x1b', b'[', b'M', 32 + button];
let mouse_pos_encode = |pos: usize| -> Vec<u8> {
let pos = 32 + 1 + pos;
let first = 0xC0 + pos / 64;
let second = 0x80 + (pos & 63);
vec![first as u8, second as u8]
};
if utf8 && column >= Column(95) {
msg.append(&mut mouse_pos_encode(column.0));
} else {
msg.push(32 + 1 + column.0 as u8);
}
if utf8 && line >= 95 {
msg.append(&mut mouse_pos_encode(line.0 as usize));
} else {
msg.push(32 + 1 + line.0 as u8);
}
self.ctx.write_to_pty(msg);
}
fn sgr_mouse_report(&mut self, point: Point, button: u8, state: ElementState) {
let c = match state {
ElementState::Pressed => 'M',
ElementState::Released => 'm',
};
let msg = format!("\x1b[<{};{};{}{}", button, point.column + 1, point.line + 1, c);
self.ctx.write_to_pty(msg.into_bytes());
}
fn on_mouse_press(&mut self, button: MouseButton) {
// Handle mouse mode.
if !self.ctx.modifiers().state().shift_key() && self.ctx.mouse_mode() {
self.ctx.mouse_mut().click_state = ClickState::None;
let code = match button {
MouseButton::Left => 0,
MouseButton::Middle => 1,
MouseButton::Right => 2,
// Can't properly report more than three buttons..
MouseButton::Back | MouseButton::Forward | MouseButton::Other(_) => return,
};
self.mouse_report(code, ElementState::Pressed);
} else {
// Calculate time since the last click to handle double/triple clicks.
let now = Instant::now();
let elapsed = now - self.ctx.mouse().last_click_timestamp;
self.ctx.mouse_mut().last_click_timestamp = now;
// Update multi-click state.
self.ctx.mouse_mut().click_state = match self.ctx.mouse().click_state {
// Reset click state if button has changed.
_ if button != self.ctx.mouse().last_click_button => {
self.ctx.mouse_mut().last_click_button = button;
ClickState::Click
},
ClickState::Click if elapsed < CLICK_THRESHOLD => ClickState::DoubleClick,
ClickState::DoubleClick if elapsed < CLICK_THRESHOLD => ClickState::TripleClick,
_ => ClickState::Click,
};
// Load mouse point, treating message bar and padding as the closest cell.
let display_offset = self.ctx.terminal().grid().display_offset();
let point = self.ctx.mouse().point(&self.ctx.size_info(), display_offset);
if let MouseButton::Left = button {
self.on_left_click(point)
}
}
}
/// Handle left click selection and vi mode cursor movement.
fn on_left_click(&mut self, point: Point) {
let side = self.ctx.mouse().cell_side;
let control = self.ctx.modifiers().state().control_key();
match self.ctx.mouse().click_state {
ClickState::Click => {
// Don't launch URLs if this click cleared the selection.
self.ctx.mouse_mut().block_hint_launcher = !self.ctx.selection_is_empty();
self.ctx.clear_selection();
// Start new empty selection.
if control {
self.ctx.start_selection(SelectionType::Block, point, side);
} else {
self.ctx.start_selection(SelectionType::Simple, point, side);
}
},
ClickState::DoubleClick if !control => {
self.ctx.mouse_mut().block_hint_launcher = true;
self.ctx.start_selection(SelectionType::Semantic, point, side);
},
ClickState::TripleClick if !control => {
self.ctx.mouse_mut().block_hint_launcher = true;
self.ctx.start_selection(SelectionType::Lines, point, side);
},
_ => (),
};
// Move vi mode cursor to mouse click position.
if self.ctx.terminal().mode().contains(TermMode::VI) && !self.ctx.search_active() {
self.ctx.terminal_mut().vi_mode_cursor.point = point;
self.ctx.mark_dirty();
}
}
fn on_mouse_release(&mut self, button: MouseButton) {
if !self.ctx.modifiers().state().shift_key() && self.ctx.mouse_mode() {
let code = match button {
MouseButton::Left => 0,
MouseButton::Middle => 1,
MouseButton::Right => 2,
// Can't properly report more than three buttons.
MouseButton::Back | MouseButton::Forward | MouseButton::Other(_) => return,
};
self.mouse_report(code, ElementState::Released);
return;
}
// Trigger hints highlighted by the mouse.
let hint = self.ctx.display().highlighted_hint.take();
if let Some(hint) = hint.as_ref().filter(|_| button == MouseButton::Left) {
self.ctx.trigger_hint(hint);
}
self.ctx.display().highlighted_hint = hint;
let timer_id = TimerId::new(Topic::SelectionScrolling, self.ctx.window().id());
self.ctx.scheduler_mut().unschedule(timer_id);
if let MouseButton::Left | MouseButton::Right = button {
// Copy selection on release, to prevent flooding the display server.
self.ctx.copy_selection(ClipboardType::Selection);
}
}
pub fn mouse_wheel_input(&mut self, delta: MouseScrollDelta, phase: TouchPhase) {
let multiplier = self.ctx.config().scrolling.multiplier;
match delta {
MouseScrollDelta::LineDelta(columns, lines) => {
let new_scroll_px_x = columns * self.ctx.size_info().cell_width();
let new_scroll_px_y = lines * self.ctx.size_info().cell_height();
self.scroll_terminal(
new_scroll_px_x as f64,
new_scroll_px_y as f64,
multiplier as f64,
);
},
MouseScrollDelta::PixelDelta(mut lpos) => {
match phase {
TouchPhase::Started => {
// Reset offset to zero.
self.ctx.mouse_mut().accumulated_scroll = Default::default();
},
TouchPhase::Moved => {
// When the angle between (x, 0) and (x, y) is lower than ~25 degrees
// (cosine is larger that 0.9) we consider this scrolling as horizontal.
if lpos.x.abs() / lpos.x.hypot(lpos.y) > 0.9 {
lpos.y = 0.;
} else {
lpos.x = 0.;
}
self.scroll_terminal(lpos.x, lpos.y, multiplier as f64);
},
_ => (),
}
},
}
}
fn scroll_terminal(&mut self, new_scroll_x_px: f64, new_scroll_y_px: f64, multiplier: f64) {
const MOUSE_WHEEL_UP: u8 = 64;
const MOUSE_WHEEL_DOWN: u8 = 65;
const MOUSE_WHEEL_LEFT: u8 = 66;
const MOUSE_WHEEL_RIGHT: u8 = 67;
let width = f64::from(self.ctx.size_info().cell_width());
let height = f64::from(self.ctx.size_info().cell_height());
let multiplier = if self.ctx.mouse_mode() { 1. } else { multiplier };
self.ctx.mouse_mut().accumulated_scroll.x += new_scroll_x_px * multiplier;
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | true |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/input/keyboard.rs | alacritty/src/input/keyboard.rs | use std::borrow::Cow;
use winit::event::{ElementState, KeyEvent};
#[cfg(target_os = "macos")]
use winit::keyboard::ModifiersKeyState;
use winit::keyboard::{Key, KeyLocation, ModifiersState, NamedKey};
#[cfg(target_os = "macos")]
use winit::platform::macos::OptionAsAlt;
use alacritty_terminal::event::EventListener;
use alacritty_terminal::term::TermMode;
use winit::platform::modifier_supplement::KeyEventExtModifierSupplement;
use crate::config::{Action, BindingKey, BindingMode, KeyBinding};
use crate::display::window::ImeInhibitor;
use crate::event::TYPING_SEARCH_DELAY;
use crate::input::{ActionContext, Execute, Processor};
use crate::scheduler::{TimerId, Topic};
impl<T: EventListener, A: ActionContext<T>> Processor<T, A> {
/// Process key input.
pub fn key_input(&mut self, key: KeyEvent) {
// IME input will be applied on commit and shouldn't trigger key bindings.
if self.ctx.display().ime.preedit().is_some() {
return;
}
let mode = *self.ctx.terminal().mode();
let mods = self.ctx.modifiers().state();
if key.state == ElementState::Released {
if self.ctx.inline_search_state().char_pending {
self.ctx.window().set_ime_inhibitor(ImeInhibitor::VI, false);
}
self.key_release(key, mode, mods);
return;
}
let text = key.text_with_all_modifiers().unwrap_or_default();
// All key bindings are disabled while a hint is being selected.
if self.ctx.display().hint_state.active() {
for character in text.chars() {
self.ctx.hint_input(character);
}
return;
}
// First key after inline search is captured.
let inline_state = self.ctx.inline_search_state();
if inline_state.char_pending {
self.ctx.inline_search_input(text);
return;
}
// Reset search delay when the user is still typing.
self.reset_search_delay();
// Key bindings suppress the character input.
if self.process_key_bindings(&key) {
return;
}
if self.ctx.search_active() {
for character in text.chars() {
self.ctx.search_input(character);
}
return;
}
// Vi mode on its own doesn't have any input, the search input was done before.
if mode.contains(TermMode::VI) {
return;
}
// Mask `Alt` modifier from input when we won't send esc.
let mods = if self.alt_send_esc(&key, text) { mods } else { mods & !ModifiersState::ALT };
let build_key_sequence = Self::should_build_sequence(&key, text, mode, mods);
let is_modifier_key = Self::is_modifier_key(&key);
let bytes = if build_key_sequence {
build_sequence(key, mods, mode)
} else {
let mut bytes = Vec::with_capacity(text.len() + 1);
if mods.alt_key() {
bytes.push(b'\x1b');
}
bytes.extend_from_slice(text.as_bytes());
bytes
};
// Write only if we have something to write.
if !bytes.is_empty() {
// Don't clear selection/scroll down when writing escaped modifier keys.
if !is_modifier_key {
self.ctx.on_terminal_input_start();
}
self.ctx.write_to_pty(bytes);
}
}
fn alt_send_esc(&mut self, key: &KeyEvent, text: &str) -> bool {
#[cfg(not(target_os = "macos"))]
let alt_send_esc = self.ctx.modifiers().state().alt_key();
#[cfg(target_os = "macos")]
let alt_send_esc = {
let option_as_alt = self.ctx.config().window.option_as_alt();
self.ctx.modifiers().state().alt_key()
&& (option_as_alt == OptionAsAlt::Both
|| (option_as_alt == OptionAsAlt::OnlyLeft
&& self.ctx.modifiers().lalt_state() == ModifiersKeyState::Pressed)
|| (option_as_alt == OptionAsAlt::OnlyRight
&& self.ctx.modifiers().ralt_state() == ModifiersKeyState::Pressed))
};
match key.logical_key {
Key::Named(named) => {
if named.to_text().is_some() {
alt_send_esc
} else {
// Treat `Alt` as modifier for named keys without text, like ArrowUp.
self.ctx.modifiers().state().alt_key()
}
},
_ => alt_send_esc && text.chars().count() == 1,
}
}
fn is_modifier_key(key: &KeyEvent) -> bool {
matches!(
key.logical_key.as_ref(),
Key::Named(NamedKey::Shift)
| Key::Named(NamedKey::Control)
| Key::Named(NamedKey::Alt)
| Key::Named(NamedKey::Super)
)
}
/// Check whether we should try to build escape sequence for the [`KeyEvent`].
fn should_build_sequence(
key: &KeyEvent,
text: &str,
mode: TermMode,
mods: ModifiersState,
) -> bool {
if mode.contains(TermMode::REPORT_ALL_KEYS_AS_ESC) {
return true;
}
let disambiguate = mode.contains(TermMode::DISAMBIGUATE_ESC_CODES)
&& (key.logical_key == Key::Named(NamedKey::Escape)
|| key.location == KeyLocation::Numpad
|| (!mods.is_empty()
&& (mods != ModifiersState::SHIFT
|| matches!(
key.logical_key,
Key::Named(NamedKey::Tab)
| Key::Named(NamedKey::Enter)
| Key::Named(NamedKey::Backspace)
))));
match key.logical_key {
_ if disambiguate => true,
// Exclude all the named keys unless they have textual representation.
Key::Named(named) => named.to_text().is_none(),
_ => text.is_empty(),
}
}
/// Attempt to find a binding and execute its action.
///
/// The provided mode, mods, and key must match what is allowed by a binding
/// for its action to be executed.
fn process_key_bindings(&mut self, key: &KeyEvent) -> bool {
let mode = BindingMode::new(self.ctx.terminal().mode(), self.ctx.search_active());
let mods = self.ctx.modifiers().state();
// Don't suppress char if no bindings were triggered.
let mut suppress_chars = None;
// We don't want the key without modifier, because it means something else most of
// the time. However what we want is to manually lowercase the character to account
// for both small and capital letters on regular characters at the same time.
let logical_key = if let Key::Character(ch) = key.logical_key.as_ref() {
// Match `Alt` bindings without `Alt` being applied, otherwise they use the
// composed chars, which are not intuitive to bind.
//
// On Windows, the `Ctrl + Alt` mangles `logical_key` to unidentified values, thus
// preventing them from being used in bindings
//
// For more see https://github.com/rust-windowing/winit/issues/2945.
if (cfg!(target_os = "macos") || (cfg!(windows) && mods.control_key()))
&& mods.alt_key()
{
key.key_without_modifiers()
} else {
Key::Character(ch.to_lowercase().into())
}
} else {
key.logical_key.clone()
};
// Get the action of a key binding.
let mut binding_action = |binding: &KeyBinding| {
let key = match (&binding.trigger, &logical_key) {
(BindingKey::Scancode(_), _) => BindingKey::Scancode(key.physical_key),
(_, code) => {
BindingKey::Keycode { key: code.clone(), location: key.location.into() }
},
};
if binding.is_triggered_by(mode, mods, &key) {
// Pass through the key if any of the bindings has the `ReceiveChar` action.
*suppress_chars.get_or_insert(true) &= binding.action != Action::ReceiveChar;
// Binding was triggered; run the action.
Some(binding.action.clone())
} else {
None
}
};
// Trigger matching key bindings.
for i in 0..self.ctx.config().key_bindings().len() {
let binding = &self.ctx.config().key_bindings()[i];
if let Some(action) = binding_action(binding) {
action.execute(&mut self.ctx);
}
}
// Trigger key bindings for hints.
for i in 0..self.ctx.config().hints.enabled.len() {
let hint = &self.ctx.config().hints.enabled[i];
let binding = match hint.binding.as_ref() {
Some(binding) => binding.key_binding(hint),
None => continue,
};
if let Some(action) = binding_action(binding) {
action.execute(&mut self.ctx);
}
}
suppress_chars.unwrap_or(false)
}
/// Handle key release.
fn key_release(&mut self, key: KeyEvent, mode: TermMode, mods: ModifiersState) {
if !mode.contains(TermMode::REPORT_EVENT_TYPES)
|| mode.contains(TermMode::VI)
|| self.ctx.search_active()
|| self.ctx.display().hint_state.active()
{
return;
}
// Mask `Alt` modifier from input when we won't send esc.
let text = key.text_with_all_modifiers().unwrap_or_default();
let mods = if self.alt_send_esc(&key, text) { mods } else { mods & !ModifiersState::ALT };
let bytes = match key.logical_key.as_ref() {
Key::Named(NamedKey::Enter)
| Key::Named(NamedKey::Tab)
| Key::Named(NamedKey::Backspace)
if !mode.contains(TermMode::REPORT_ALL_KEYS_AS_ESC) =>
{
return;
},
_ => build_sequence(key, mods, mode),
};
self.ctx.write_to_pty(bytes);
}
/// Reset search delay.
fn reset_search_delay(&mut self) {
if self.ctx.search_active() {
let timer_id = TimerId::new(Topic::DelayedSearch, self.ctx.window().id());
let scheduler = self.ctx.scheduler_mut();
if let Some(timer) = scheduler.unschedule(timer_id) {
scheduler.schedule(timer.event, TYPING_SEARCH_DELAY, false, timer.id);
}
}
}
}
/// Build a key's keyboard escape sequence based on the given `key`, `mods`, and `mode`.
///
/// The key sequences for `APP_KEYPAD` and alike are handled inside the bindings.
#[inline(never)]
fn build_sequence(key: KeyEvent, mods: ModifiersState, mode: TermMode) -> Vec<u8> {
let mut modifiers = mods.into();
let kitty_seq = mode.intersects(
TermMode::REPORT_ALL_KEYS_AS_ESC
| TermMode::DISAMBIGUATE_ESC_CODES
| TermMode::REPORT_EVENT_TYPES,
);
let kitty_encode_all = mode.contains(TermMode::REPORT_ALL_KEYS_AS_ESC);
// The default parameter is 1, so we can omit it.
let kitty_event_type = mode.contains(TermMode::REPORT_EVENT_TYPES)
&& (key.repeat || key.state == ElementState::Released);
let context =
SequenceBuilder { mode, modifiers, kitty_seq, kitty_encode_all, kitty_event_type };
let associated_text = key.text_with_all_modifiers().filter(|text| {
mode.contains(TermMode::REPORT_ASSOCIATED_TEXT)
&& key.state != ElementState::Released
&& !text.is_empty()
&& !is_control_character(text)
});
let sequence_base = context
.try_build_numpad(&key)
.or_else(|| context.try_build_named_kitty(&key))
.or_else(|| context.try_build_named_normal(&key, associated_text.is_some()))
.or_else(|| context.try_build_control_char_or_mod(&key, &mut modifiers))
.or_else(|| context.try_build_textual(&key, associated_text));
let (payload, terminator) = match sequence_base {
Some(SequenceBase { payload, terminator }) => (payload, terminator),
_ => return Vec::new(),
};
let mut payload = format!("\x1b[{payload}");
// Add modifiers information.
if kitty_event_type || !modifiers.is_empty() || associated_text.is_some() {
payload.push_str(&format!(";{}", modifiers.encode_esc_sequence()));
}
// Push event type.
if kitty_event_type {
payload.push(':');
let event_type = match key.state {
_ if key.repeat => '2',
ElementState::Pressed => '1',
ElementState::Released => '3',
};
payload.push(event_type);
}
if let Some(text) = associated_text {
let mut codepoints = text.chars().map(u32::from);
if let Some(codepoint) = codepoints.next() {
payload.push_str(&format!(";{codepoint}"));
}
for codepoint in codepoints {
payload.push_str(&format!(":{codepoint}"));
}
}
payload.push(terminator.encode_esc_sequence());
payload.into_bytes()
}
/// Helper to build escape sequence payloads from [`KeyEvent`].
pub struct SequenceBuilder {
mode: TermMode,
/// The emitted sequence should follow the kitty keyboard protocol.
kitty_seq: bool,
/// Encode all the keys according to the protocol.
kitty_encode_all: bool,
/// Report event types.
kitty_event_type: bool,
modifiers: SequenceModifiers,
}
impl SequenceBuilder {
/// Try building sequence from the event's emitting text.
fn try_build_textual(
&self,
key: &KeyEvent,
associated_text: Option<&str>,
) -> Option<SequenceBase> {
let character = match key.logical_key.as_ref() {
Key::Character(character) if self.kitty_seq => character,
_ => return None,
};
if character.chars().count() == 1 {
let shift = self.modifiers.contains(SequenceModifiers::SHIFT);
let ch = character.chars().next().unwrap();
let unshifted_ch = if shift { ch.to_lowercase().next().unwrap() } else { ch };
let alternate_key_code = u32::from(ch);
let mut unicode_key_code = u32::from(unshifted_ch);
// Try to get the base for keys which change based on modifier, like `1` for `!`.
//
// However it should only be performed when `SHIFT` is pressed.
if shift && alternate_key_code == unicode_key_code {
if let Key::Character(unmodded) = key.key_without_modifiers().as_ref() {
unicode_key_code = u32::from(unmodded.chars().next().unwrap_or(unshifted_ch));
}
}
// NOTE: Base layouts are ignored, since winit doesn't expose this information
// yet.
let payload = if self.mode.contains(TermMode::REPORT_ALTERNATE_KEYS)
&& alternate_key_code != unicode_key_code
{
format!("{unicode_key_code}:{alternate_key_code}")
} else {
unicode_key_code.to_string()
};
Some(SequenceBase::new(payload.into(), SequenceTerminator::Kitty))
} else if self.kitty_encode_all && associated_text.is_some() {
// Fallback when need to report text, but we don't have any key associated with this
// text.
Some(SequenceBase::new("0".into(), SequenceTerminator::Kitty))
} else {
None
}
}
/// Try building from numpad key.
///
/// `None` is returned when the key is neither known nor numpad.
fn try_build_numpad(&self, key: &KeyEvent) -> Option<SequenceBase> {
if !self.kitty_seq || key.location != KeyLocation::Numpad {
return None;
}
let base = match key.logical_key.as_ref() {
Key::Character("0") => "57399",
Key::Character("1") => "57400",
Key::Character("2") => "57401",
Key::Character("3") => "57402",
Key::Character("4") => "57403",
Key::Character("5") => "57404",
Key::Character("6") => "57405",
Key::Character("7") => "57406",
Key::Character("8") => "57407",
Key::Character("9") => "57408",
Key::Character(".") => "57409",
Key::Character("/") => "57410",
Key::Character("*") => "57411",
Key::Character("-") => "57412",
Key::Character("+") => "57413",
Key::Character("=") => "57415",
Key::Named(named) => match named {
NamedKey::Enter => "57414",
NamedKey::ArrowLeft => "57417",
NamedKey::ArrowRight => "57418",
NamedKey::ArrowUp => "57419",
NamedKey::ArrowDown => "57420",
NamedKey::PageUp => "57421",
NamedKey::PageDown => "57422",
NamedKey::Home => "57423",
NamedKey::End => "57424",
NamedKey::Insert => "57425",
NamedKey::Delete => "57426",
_ => return None,
},
_ => return None,
};
Some(SequenceBase::new(base.into(), SequenceTerminator::Kitty))
}
/// Try building from [`NamedKey`] using the kitty keyboard protocol encoding
/// for functional keys.
fn try_build_named_kitty(&self, key: &KeyEvent) -> Option<SequenceBase> {
let named = match key.logical_key {
Key::Named(named) if self.kitty_seq => named,
_ => return None,
};
let (base, terminator) = match named {
// F3 in kitty protocol diverges from alacritty's terminfo.
NamedKey::F3 => ("13", SequenceTerminator::Normal('~')),
NamedKey::F13 => ("57376", SequenceTerminator::Kitty),
NamedKey::F14 => ("57377", SequenceTerminator::Kitty),
NamedKey::F15 => ("57378", SequenceTerminator::Kitty),
NamedKey::F16 => ("57379", SequenceTerminator::Kitty),
NamedKey::F17 => ("57380", SequenceTerminator::Kitty),
NamedKey::F18 => ("57381", SequenceTerminator::Kitty),
NamedKey::F19 => ("57382", SequenceTerminator::Kitty),
NamedKey::F20 => ("57383", SequenceTerminator::Kitty),
NamedKey::F21 => ("57384", SequenceTerminator::Kitty),
NamedKey::F22 => ("57385", SequenceTerminator::Kitty),
NamedKey::F23 => ("57386", SequenceTerminator::Kitty),
NamedKey::F24 => ("57387", SequenceTerminator::Kitty),
NamedKey::F25 => ("57388", SequenceTerminator::Kitty),
NamedKey::F26 => ("57389", SequenceTerminator::Kitty),
NamedKey::F27 => ("57390", SequenceTerminator::Kitty),
NamedKey::F28 => ("57391", SequenceTerminator::Kitty),
NamedKey::F29 => ("57392", SequenceTerminator::Kitty),
NamedKey::F30 => ("57393", SequenceTerminator::Kitty),
NamedKey::F31 => ("57394", SequenceTerminator::Kitty),
NamedKey::F32 => ("57395", SequenceTerminator::Kitty),
NamedKey::F33 => ("57396", SequenceTerminator::Kitty),
NamedKey::F34 => ("57397", SequenceTerminator::Kitty),
NamedKey::F35 => ("57398", SequenceTerminator::Kitty),
NamedKey::ScrollLock => ("57359", SequenceTerminator::Kitty),
NamedKey::PrintScreen => ("57361", SequenceTerminator::Kitty),
NamedKey::Pause => ("57362", SequenceTerminator::Kitty),
NamedKey::ContextMenu => ("57363", SequenceTerminator::Kitty),
NamedKey::MediaPlay => ("57428", SequenceTerminator::Kitty),
NamedKey::MediaPause => ("57429", SequenceTerminator::Kitty),
NamedKey::MediaPlayPause => ("57430", SequenceTerminator::Kitty),
NamedKey::MediaStop => ("57432", SequenceTerminator::Kitty),
NamedKey::MediaFastForward => ("57433", SequenceTerminator::Kitty),
NamedKey::MediaRewind => ("57434", SequenceTerminator::Kitty),
NamedKey::MediaTrackNext => ("57435", SequenceTerminator::Kitty),
NamedKey::MediaTrackPrevious => ("57436", SequenceTerminator::Kitty),
NamedKey::MediaRecord => ("57437", SequenceTerminator::Kitty),
NamedKey::AudioVolumeDown => ("57438", SequenceTerminator::Kitty),
NamedKey::AudioVolumeUp => ("57439", SequenceTerminator::Kitty),
NamedKey::AudioVolumeMute => ("57440", SequenceTerminator::Kitty),
_ => return None,
};
Some(SequenceBase::new(base.into(), terminator))
}
/// Try building from [`NamedKey`].
fn try_build_named_normal(
&self,
key: &KeyEvent,
has_associated_text: bool,
) -> Option<SequenceBase> {
let named = match key.logical_key {
Key::Named(named) => named,
_ => return None,
};
// The default parameter is 1, so we can omit it.
let one_based =
if self.modifiers.is_empty() && !self.kitty_event_type && !has_associated_text {
""
} else {
"1"
};
let (base, terminator) = match named {
NamedKey::PageUp => ("5", SequenceTerminator::Normal('~')),
NamedKey::PageDown => ("6", SequenceTerminator::Normal('~')),
NamedKey::Insert => ("2", SequenceTerminator::Normal('~')),
NamedKey::Delete => ("3", SequenceTerminator::Normal('~')),
NamedKey::Home => (one_based, SequenceTerminator::Normal('H')),
NamedKey::End => (one_based, SequenceTerminator::Normal('F')),
NamedKey::ArrowLeft => (one_based, SequenceTerminator::Normal('D')),
NamedKey::ArrowRight => (one_based, SequenceTerminator::Normal('C')),
NamedKey::ArrowUp => (one_based, SequenceTerminator::Normal('A')),
NamedKey::ArrowDown => (one_based, SequenceTerminator::Normal('B')),
NamedKey::F1 => (one_based, SequenceTerminator::Normal('P')),
NamedKey::F2 => (one_based, SequenceTerminator::Normal('Q')),
NamedKey::F3 => (one_based, SequenceTerminator::Normal('R')),
NamedKey::F4 => (one_based, SequenceTerminator::Normal('S')),
NamedKey::F5 => ("15", SequenceTerminator::Normal('~')),
NamedKey::F6 => ("17", SequenceTerminator::Normal('~')),
NamedKey::F7 => ("18", SequenceTerminator::Normal('~')),
NamedKey::F8 => ("19", SequenceTerminator::Normal('~')),
NamedKey::F9 => ("20", SequenceTerminator::Normal('~')),
NamedKey::F10 => ("21", SequenceTerminator::Normal('~')),
NamedKey::F11 => ("23", SequenceTerminator::Normal('~')),
NamedKey::F12 => ("24", SequenceTerminator::Normal('~')),
NamedKey::F13 => ("25", SequenceTerminator::Normal('~')),
NamedKey::F14 => ("26", SequenceTerminator::Normal('~')),
NamedKey::F15 => ("28", SequenceTerminator::Normal('~')),
NamedKey::F16 => ("29", SequenceTerminator::Normal('~')),
NamedKey::F17 => ("31", SequenceTerminator::Normal('~')),
NamedKey::F18 => ("32", SequenceTerminator::Normal('~')),
NamedKey::F19 => ("33", SequenceTerminator::Normal('~')),
NamedKey::F20 => ("34", SequenceTerminator::Normal('~')),
_ => return None,
};
Some(SequenceBase::new(base.into(), terminator))
}
/// Try building escape from control characters (e.g. Enter) and modifiers.
fn try_build_control_char_or_mod(
&self,
key: &KeyEvent,
mods: &mut SequenceModifiers,
) -> Option<SequenceBase> {
if !self.kitty_encode_all && !self.kitty_seq {
return None;
}
let named = match key.logical_key {
Key::Named(named) => named,
_ => return None,
};
let base = match named {
NamedKey::Tab => "9",
NamedKey::Enter => "13",
NamedKey::Escape => "27",
NamedKey::Space => "32",
NamedKey::Backspace => "127",
_ => "",
};
// Fail when the key is not a named control character and the active mode prohibits us
// from encoding modifier keys.
if !self.kitty_encode_all && base.is_empty() {
return None;
}
let base = match (named, key.location) {
(NamedKey::Shift, KeyLocation::Left) => "57441",
(NamedKey::Control, KeyLocation::Left) => "57442",
(NamedKey::Alt, KeyLocation::Left) => "57443",
(NamedKey::Super, KeyLocation::Left) => "57444",
(NamedKey::Hyper, KeyLocation::Left) => "57445",
(NamedKey::Meta, KeyLocation::Left) => "57446",
(NamedKey::Shift, _) => "57447",
(NamedKey::Control, _) => "57448",
(NamedKey::Alt, _) => "57449",
(NamedKey::Super, _) => "57450",
(NamedKey::Hyper, _) => "57451",
(NamedKey::Meta, _) => "57452",
(NamedKey::CapsLock, _) => "57358",
(NamedKey::NumLock, _) => "57360",
_ => base,
};
// NOTE: Kitty's protocol mandates that the modifier state is applied before
// key press, however winit sends them after the key press, so for modifiers
// itself apply the state based on keysyms and not the _actual_ modifiers
// state, which is how kitty is doing so and what is suggested in such case.
let press = key.state.is_pressed();
match named {
NamedKey::Shift => mods.set(SequenceModifiers::SHIFT, press),
NamedKey::Control => mods.set(SequenceModifiers::CONTROL, press),
NamedKey::Alt => mods.set(SequenceModifiers::ALT, press),
NamedKey::Super => mods.set(SequenceModifiers::SUPER, press),
_ => (),
}
if base.is_empty() {
None
} else {
Some(SequenceBase::new(base.into(), SequenceTerminator::Kitty))
}
}
}
pub struct SequenceBase {
/// The base of the payload, which is the `number` and optionally an alt base from the kitty
/// spec.
payload: Cow<'static, str>,
terminator: SequenceTerminator,
}
impl SequenceBase {
fn new(payload: Cow<'static, str>, terminator: SequenceTerminator) -> Self {
Self { payload, terminator }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SequenceTerminator {
/// The normal key esc sequence terminator defined by xterm/dec.
Normal(char),
/// The terminator is for kitty escape sequence.
Kitty,
}
impl SequenceTerminator {
fn encode_esc_sequence(self) -> char {
match self {
SequenceTerminator::Normal(char) => char,
SequenceTerminator::Kitty => 'u',
}
}
}
bitflags::bitflags! {
/// The modifiers encoding for escape sequence.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct SequenceModifiers : u8 {
const SHIFT = 0b0000_0001;
const ALT = 0b0000_0010;
const CONTROL = 0b0000_0100;
const SUPER = 0b0000_1000;
// NOTE: Kitty protocol defines additional modifiers to what is present here, like
// Capslock, but it's not a modifier as per winit.
}
}
impl SequenceModifiers {
/// Get the value which should be passed to escape sequence.
pub fn encode_esc_sequence(self) -> u8 {
self.bits() + 1
}
}
impl From<ModifiersState> for SequenceModifiers {
fn from(mods: ModifiersState) -> Self {
let mut modifiers = Self::empty();
modifiers.set(Self::SHIFT, mods.shift_key());
modifiers.set(Self::ALT, mods.alt_key());
modifiers.set(Self::CONTROL, mods.control_key());
modifiers.set(Self::SUPER, mods.super_key());
modifiers
}
}
/// Check whether the `text` is `0x7f`, `C0` or `C1` control code.
fn is_control_character(text: &str) -> bool {
// 0x7f (DEL) is included here since it has a dedicated control code (`^?`) which generally
// does not match the reported text (`^H`), despite not technically being part of C0 or C1.
let codepoint = text.bytes().next().unwrap();
text.len() == 1 && (codepoint < 0x20 || (0x7f..=0x9f).contains(&codepoint))
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/general.rs | alacritty/src/config/general.rs | //! Miscellaneous configuration options.
use std::path::PathBuf;
use serde::Serialize;
use alacritty_config_derive::ConfigDeserialize;
/// General config section.
///
/// This section is for fields which can not be easily categorized,
/// to avoid common TOML issues with root-level fields.
#[derive(ConfigDeserialize, Serialize, Clone, PartialEq, Debug)]
pub struct General {
/// Configuration file imports.
///
/// This is never read since the field is directly accessed through the config's
/// [`toml::Value`], but still present to prevent unused field warnings.
pub import: Vec<String>,
/// Shell startup directory.
pub working_directory: Option<PathBuf>,
/// Live config reload.
pub live_config_reload: bool,
/// Offer IPC through a unix socket.
#[allow(unused)]
pub ipc_socket: bool,
}
impl Default for General {
fn default() -> Self {
Self {
live_config_reload: true,
ipc_socket: true,
working_directory: Default::default(),
import: Default::default(),
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/bindings.rs | alacritty/src/config/bindings.rs | #![allow(clippy::enum_glob_use)]
use std::fmt::{self, Debug, Display};
use bitflags::bitflags;
use serde::de::{self, Error as SerdeError, MapAccess, Unexpected, Visitor};
use serde::{Deserialize, Deserializer};
use std::rc::Rc;
use toml::Value as SerdeValue;
use winit::event::MouseButton;
use winit::keyboard::{
Key, KeyCode, KeyLocation as WinitKeyLocation, ModifiersState, NamedKey, PhysicalKey,
};
use winit::platform::scancode::PhysicalKeyExtScancode;
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use alacritty_terminal::term::TermMode;
use alacritty_terminal::vi_mode::ViMotion;
use crate::config::ui_config::{Hint, Program, StringVisitor};
/// Describes a state and action to take in that state.
///
/// This is the shared component of `MouseBinding` and `KeyBinding`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Binding<T> {
/// Modifier keys required to activate binding.
pub mods: ModifiersState,
/// String to send to PTY if mods and mode match.
pub action: Action,
/// Binding mode required to activate binding.
pub mode: BindingMode,
/// Excluded binding modes where the binding won't be activated.
pub notmode: BindingMode,
/// This property is used as part of the trigger detection code.
///
/// For example, this might be a key like "G", or a mouse button.
pub trigger: T,
}
/// Bindings that are triggered by a keyboard key.
pub type KeyBinding = Binding<BindingKey>;
/// Bindings that are triggered by a mouse event.
pub type MouseBinding = Binding<MouseEvent>;
impl<T: Eq> Binding<T> {
#[inline]
pub fn is_triggered_by(&self, mode: BindingMode, mods: ModifiersState, input: &T) -> bool {
// Check input first since bindings are stored in one big list. This is
// the most likely item to fail so prioritizing it here allows more
// checks to be short circuited.
self.trigger == *input
&& self.mods == mods
&& mode.contains(self.mode)
&& !mode.intersects(self.notmode)
}
#[inline]
pub fn triggers_match(&self, binding: &Binding<T>) -> bool {
// Check the binding's key and modifiers.
if self.trigger != binding.trigger || self.mods != binding.mods {
return false;
}
let selfmode = if self.mode.is_empty() { BindingMode::all() } else { self.mode };
let bindingmode = if binding.mode.is_empty() { BindingMode::all() } else { binding.mode };
if !selfmode.intersects(bindingmode) {
return false;
}
// The bindings are never active at the same time when the required modes of one binding
// are part of the forbidden bindings of the other.
if self.mode.intersects(binding.notmode) || binding.mode.intersects(self.notmode) {
return false;
}
true
}
}
#[derive(ConfigDeserialize, Debug, Clone, PartialEq, Eq)]
pub enum Action {
/// Write an escape sequence.
#[config(skip)]
Esc(String),
/// Run given command.
#[config(skip)]
Command(Program),
/// Regex keyboard hints.
#[config(skip)]
Hint(Rc<Hint>),
/// Move vi mode cursor.
#[config(skip)]
ViMotion(ViMotion),
/// Perform vi mode action.
#[config(skip)]
Vi(ViAction),
/// Perform search mode action.
#[config(skip)]
Search(SearchAction),
/// Perform mouse binding exclusive action.
#[config(skip)]
Mouse(MouseAction),
/// Paste contents of system clipboard.
Paste,
/// Store current selection into clipboard.
Copy,
/// Store current selection into selection buffer.
CopySelection,
/// Paste contents of selection buffer.
PasteSelection,
/// Increase font size.
IncreaseFontSize,
/// Decrease font size.
DecreaseFontSize,
/// Reset font size to the config value.
ResetFontSize,
/// Scroll exactly one page up.
ScrollPageUp,
/// Scroll exactly one page down.
ScrollPageDown,
/// Scroll half a page up.
ScrollHalfPageUp,
/// Scroll half a page down.
ScrollHalfPageDown,
/// Scroll one line up.
ScrollLineUp,
/// Scroll one line down.
ScrollLineDown,
/// Scroll all the way to the top.
ScrollToTop,
/// Scroll all the way to the bottom.
ScrollToBottom,
/// Clear the display buffer(s) to remove history.
ClearHistory,
/// Hide the Alacritty window.
Hide,
/// Hide all windows other than Alacritty on macOS.
HideOtherApplications,
/// Minimize the Alacritty window.
Minimize,
/// Quit Alacritty.
Quit,
/// Clear warning and error notices.
ClearLogNotice,
/// Spawn a new instance of Alacritty.
SpawnNewInstance,
/// Select next tab.
SelectNextTab,
/// Select previous tab.
SelectPreviousTab,
/// Select the first tab.
SelectTab1,
/// Select the second tab.
SelectTab2,
/// Select the third tab.
SelectTab3,
/// Select the fourth tab.
SelectTab4,
/// Select the fifth tab.
SelectTab5,
/// Select the sixth tab.
SelectTab6,
/// Select the seventh tab.
SelectTab7,
/// Select the eighth tab.
SelectTab8,
/// Select the ninth tab.
SelectTab9,
/// Select the last tab.
SelectLastTab,
/// Create a new Alacritty window.
CreateNewWindow,
/// Create new window in a tab.
CreateNewTab,
/// Toggle fullscreen.
ToggleFullscreen,
/// Toggle maximized.
ToggleMaximized,
/// Toggle simple fullscreen on macOS.
ToggleSimpleFullscreen,
/// Clear active selection.
ClearSelection,
/// Toggle vi mode.
ToggleViMode,
/// Allow receiving char input.
ReceiveChar,
/// Start a forward buffer search.
SearchForward,
/// Start a backward buffer search.
SearchBackward,
/// No action.
None,
}
impl From<&'static str> for Action {
fn from(s: &'static str) -> Action {
Action::Esc(s.into())
}
}
impl From<ViAction> for Action {
fn from(action: ViAction) -> Self {
Self::Vi(action)
}
}
impl From<ViMotion> for Action {
fn from(motion: ViMotion) -> Self {
Self::ViMotion(motion)
}
}
impl From<SearchAction> for Action {
fn from(action: SearchAction) -> Self {
Self::Search(action)
}
}
impl From<MouseAction> for Action {
fn from(action: MouseAction) -> Self {
Self::Mouse(action)
}
}
/// Display trait used for error logging.
impl Display for Action {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Action::ViMotion(motion) => motion.fmt(f),
Action::Vi(action) => action.fmt(f),
Action::Mouse(action) => action.fmt(f),
_ => write!(f, "{self:?}"),
}
}
}
/// Vi mode specific actions.
#[derive(ConfigDeserialize, Debug, Copy, Clone, PartialEq, Eq)]
pub enum ViAction {
/// Toggle normal vi selection.
ToggleNormalSelection,
/// Toggle line vi selection.
ToggleLineSelection,
/// Toggle block vi selection.
ToggleBlockSelection,
/// Toggle semantic vi selection.
ToggleSemanticSelection,
/// Jump to the beginning of the next match.
SearchNext,
/// Jump to the beginning of the previous match.
SearchPrevious,
/// Jump to the next start of a match to the left of the origin.
SearchStart,
/// Jump to the next end of a match to the right of the origin.
SearchEnd,
/// Launch the URL below the vi mode cursor.
Open,
/// Centers the screen around the vi mode cursor.
CenterAroundViCursor,
/// Search forward within the current line.
InlineSearchForward,
/// Search backward within the current line.
InlineSearchBackward,
/// Search forward within the current line, stopping just short of the character.
InlineSearchForwardShort,
/// Search backward within the current line, stopping just short of the character.
InlineSearchBackwardShort,
/// Jump to the next inline search match.
InlineSearchNext,
/// Jump to the previous inline search match.
InlineSearchPrevious,
/// Search forward for selection or word under the cursor.
SemanticSearchForward,
/// Search backward for selection or word under the cursor.
SemanticSearchBackward,
}
/// Search mode specific actions.
#[allow(clippy::enum_variant_names)]
#[derive(ConfigDeserialize, Debug, Copy, Clone, PartialEq, Eq)]
pub enum SearchAction {
/// Move the focus to the next search match.
SearchFocusNext,
/// Move the focus to the previous search match.
SearchFocusPrevious,
/// Confirm the active search.
SearchConfirm,
/// Cancel the active search.
SearchCancel,
/// Reset the search regex.
SearchClear,
/// Delete the last word in the search regex.
SearchDeleteWord,
/// Go to the previous regex in the search history.
SearchHistoryPrevious,
/// Go to the next regex in the search history.
SearchHistoryNext,
}
/// Mouse binding specific actions.
#[derive(ConfigDeserialize, Debug, Copy, Clone, PartialEq, Eq)]
pub enum MouseAction {
/// Expand the selection to the current mouse cursor position.
ExpandSelection,
}
/// Mouse binding specific events.
#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub enum MouseEvent {
Button(MouseButton),
WheelUp,
WheelDown,
}
macro_rules! bindings {
(
$ty:ident;
$(
$key:tt$(::$button:ident)?
$(=>$location:expr)?
$(,$mods:expr)*
$(,+$mode:expr)*
$(,~$notmode:expr)*
;$action:expr
);*
$(;)*
) => {{
let mut v = Vec::new();
$(
let mut _mods = ModifiersState::empty();
$(_mods = $mods;)*
let mut _mode = BindingMode::empty();
$(_mode.insert($mode);)*
let mut _notmode = BindingMode::empty();
$(_notmode.insert($notmode);)*
v.push($ty {
trigger: trigger!($ty, $key$(::$button)?, $($location)?),
mods: _mods,
mode: _mode,
notmode: _notmode,
action: $action.into(),
});
)*
v
}};
}
macro_rules! trigger {
(KeyBinding, $key:literal, $location:expr) => {{ BindingKey::Keycode { key: Key::Character($key.into()), location: $location } }};
(KeyBinding, $key:literal,) => {{ BindingKey::Keycode { key: Key::Character($key.into()), location: KeyLocation::Any } }};
(KeyBinding, $key:ident, $location:expr) => {{ BindingKey::Keycode { key: Key::Named(NamedKey::$key), location: $location } }};
(KeyBinding, $key:ident,) => {{ BindingKey::Keycode { key: Key::Named(NamedKey::$key), location: KeyLocation::Any } }};
(MouseBinding, MouseButton::$button:ident,) => {{ MouseEvent::Button(MouseButton::$button) }};
(MouseBinding, MouseEvent::$event:ident,) => {{ MouseEvent::$event }};
}
pub fn default_mouse_bindings() -> Vec<MouseBinding> {
bindings!(
MouseBinding;
MouseButton::Right; MouseAction::ExpandSelection;
MouseButton::Right, ModifiersState::CONTROL; MouseAction::ExpandSelection;
MouseButton::Middle, ~BindingMode::VI; Action::PasteSelection;
)
}
// NOTE: key sequences which are not present here, like F5-F20, PageUp/PageDown codes are
// built on the fly in input/keyboard.rs.
pub fn default_key_bindings() -> Vec<KeyBinding> {
let mut bindings = bindings!(
KeyBinding;
Copy; Action::Copy;
Copy, +BindingMode::VI; Action::ClearSelection;
Paste, ~BindingMode::VI; Action::Paste;
Paste, +BindingMode::VI, +BindingMode::SEARCH; Action::Paste;
"l", ModifiersState::CONTROL; Action::ClearLogNotice;
"l", ModifiersState::CONTROL; Action::ReceiveChar;
Home, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollToTop;
End, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollToBottom;
PageUp, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollPageUp;
PageDown, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollPageDown;
// App cursor mode.
Home, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOH".into());
End, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOF".into());
ArrowUp, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOA".into());
ArrowDown, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOB".into());
ArrowRight, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOC".into());
ArrowLeft, +BindingMode::APP_CURSOR, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1bOD".into());
// Legacy keys handling which can't be automatically encoded.
F1, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1bOP".into());
F2, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1bOQ".into());
F3, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1bOR".into());
F4, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1bOS".into());
Tab, ModifiersState::SHIFT, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1b[Z".into());
Tab, ModifiersState::SHIFT | ModifiersState::ALT, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1b\x1b[Z".into());
Backspace, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC; Action::Esc("\x7f".into());
Backspace, ModifiersState::ALT, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x1b\x7f".into());
Backspace, ModifiersState::SHIFT, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\x7f".into());
Enter => KeyLocation::Numpad, ~BindingMode::VI, ~BindingMode::SEARCH, ~BindingMode::REPORT_ALL_KEYS_AS_ESC, ~BindingMode::DISAMBIGUATE_ESC_CODES; Action::Esc("\n".into());
// Vi mode.
Space, ModifiersState::SHIFT | ModifiersState::CONTROL, ~BindingMode::SEARCH; Action::ToggleViMode;
Space, ModifiersState::SHIFT | ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollToBottom;
Escape, +BindingMode::VI, ~BindingMode::SEARCH; Action::ClearSelection;
"i", +BindingMode::VI, ~BindingMode::SEARCH; Action::ToggleViMode;
"i", +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollToBottom;
"c", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ToggleViMode;
"y", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollLineUp;
"e", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollLineDown;
"g", +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollToTop;
"g", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollToBottom;
"b", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollPageUp;
"f", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollPageDown;
"u", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollHalfPageUp;
"d", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; Action::ScrollHalfPageDown;
"y", +BindingMode::VI, ~BindingMode::SEARCH; Action::Copy;
"y", +BindingMode::VI, ~BindingMode::SEARCH; Action::ClearSelection;
"/", +BindingMode::VI, ~BindingMode::SEARCH; Action::SearchForward;
"?", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; Action::SearchBackward;
"y", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::ToggleNormalSelection;
"y", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Last;
"y", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; Action::Copy;
"y", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; Action::ClearSelection;
"v", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::ToggleNormalSelection;
"v", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::ToggleLineSelection;
"v", ModifiersState::CONTROL, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::ToggleBlockSelection;
"v", ModifiersState::ALT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::ToggleSemanticSelection;
"n", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::SearchNext;
"n", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::SearchPrevious;
Enter, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::Open;
"z", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::CenterAroundViCursor;
"f", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchForward;
"f", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchBackward;
"t", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchForwardShort;
"t", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchBackwardShort;
";", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchNext;
",", +BindingMode::VI, ~BindingMode::SEARCH; ViAction::InlineSearchPrevious;
"*", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::SemanticSearchForward;
"#", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViAction::SemanticSearchBackward;
"k", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Up;
"j", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Down;
"h", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Left;
"l", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Right;
ArrowUp, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Up;
ArrowDown, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Down;
ArrowLeft, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Left;
ArrowRight, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Right;
"0", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::First;
"$", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Last;
Home, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::First;
End, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Last;
"^", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::FirstOccupied;
"h", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::High;
"m", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Middle;
"l", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Low;
"b", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::SemanticLeft;
"w", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::SemanticRight;
"e", +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::SemanticRightEnd;
"b", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::WordLeft;
"w", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::WordRight;
"e", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::WordRightEnd;
"%", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::Bracket;
"{", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::ParagraphUp;
"}", ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; ViMotion::ParagraphDown;
Enter, +BindingMode::VI, +BindingMode::SEARCH; SearchAction::SearchConfirm;
// Plain search.
Escape, +BindingMode::SEARCH; SearchAction::SearchCancel;
"c", ModifiersState::CONTROL, +BindingMode::SEARCH; SearchAction::SearchCancel;
"u", ModifiersState::CONTROL, +BindingMode::SEARCH; SearchAction::SearchClear;
"w", ModifiersState::CONTROL, +BindingMode::SEARCH; SearchAction::SearchDeleteWord;
"p", ModifiersState::CONTROL, +BindingMode::SEARCH; SearchAction::SearchHistoryPrevious;
"n", ModifiersState::CONTROL, +BindingMode::SEARCH; SearchAction::SearchHistoryNext;
ArrowUp, +BindingMode::SEARCH; SearchAction::SearchHistoryPrevious;
ArrowDown, +BindingMode::SEARCH; SearchAction::SearchHistoryNext;
Enter, +BindingMode::SEARCH, ~BindingMode::VI; SearchAction::SearchFocusNext;
Enter, ModifiersState::SHIFT, +BindingMode::SEARCH, ~BindingMode::VI; SearchAction::SearchFocusPrevious;
);
bindings.extend(platform_key_bindings());
bindings
}
#[cfg(not(any(target_os = "macos", test)))]
fn common_keybindings() -> Vec<KeyBinding> {
bindings!(
KeyBinding;
"v", ModifiersState::CONTROL | ModifiersState::SHIFT, ~BindingMode::VI; Action::Paste;
"v", ModifiersState::CONTROL | ModifiersState::SHIFT, +BindingMode::VI, +BindingMode::SEARCH; Action::Paste;
"f", ModifiersState::CONTROL | ModifiersState::SHIFT, ~BindingMode::SEARCH; Action::SearchForward;
"b", ModifiersState::CONTROL | ModifiersState::SHIFT, ~BindingMode::SEARCH; Action::SearchBackward;
Insert, ModifiersState::SHIFT, ~BindingMode::VI; Action::PasteSelection;
"c", ModifiersState::CONTROL | ModifiersState::SHIFT; Action::Copy;
"c", ModifiersState::CONTROL | ModifiersState::SHIFT, +BindingMode::VI, ~BindingMode::SEARCH; Action::ClearSelection;
"0", ModifiersState::CONTROL; Action::ResetFontSize;
"=", ModifiersState::CONTROL; Action::IncreaseFontSize;
"+", ModifiersState::CONTROL; Action::IncreaseFontSize;
"-", ModifiersState::CONTROL; Action::DecreaseFontSize;
"+" => KeyLocation::Numpad, ModifiersState::CONTROL; Action::IncreaseFontSize;
"-" => KeyLocation::Numpad, ModifiersState::CONTROL; Action::DecreaseFontSize;
)
}
#[cfg(not(any(target_os = "macos", target_os = "windows", test)))]
pub fn platform_key_bindings() -> Vec<KeyBinding> {
common_keybindings()
}
#[cfg(all(target_os = "windows", not(test)))]
pub fn platform_key_bindings() -> Vec<KeyBinding> {
let mut bindings = bindings!(
KeyBinding;
Enter, ModifiersState::ALT; Action::ToggleFullscreen;
);
bindings.extend(common_keybindings());
bindings
}
#[cfg(all(target_os = "macos", not(test)))]
pub fn platform_key_bindings() -> Vec<KeyBinding> {
bindings!(
KeyBinding;
Insert, ModifiersState::SHIFT, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x1b[2;2~".into());
// Tabbing api.
"t", ModifiersState::SUPER; Action::CreateNewTab;
"]", ModifiersState::SUPER | ModifiersState::SHIFT; Action::SelectNextTab;
"[", ModifiersState::SUPER | ModifiersState::SHIFT; Action::SelectPreviousTab;
Tab, ModifiersState::SUPER; Action::SelectNextTab;
Tab, ModifiersState::SUPER | ModifiersState::SHIFT; Action::SelectPreviousTab;
"1", ModifiersState::SUPER; Action::SelectTab1;
"2", ModifiersState::SUPER; Action::SelectTab2;
"3", ModifiersState::SUPER; Action::SelectTab3;
"4", ModifiersState::SUPER; Action::SelectTab4;
"5", ModifiersState::SUPER; Action::SelectTab5;
"6", ModifiersState::SUPER; Action::SelectTab6;
"7", ModifiersState::SUPER; Action::SelectTab7;
"8", ModifiersState::SUPER; Action::SelectTab8;
"9", ModifiersState::SUPER; Action::SelectLastTab;
"0", ModifiersState::SUPER; Action::ResetFontSize;
"=", ModifiersState::SUPER; Action::IncreaseFontSize;
"+", ModifiersState::SUPER; Action::IncreaseFontSize;
"-", ModifiersState::SUPER; Action::DecreaseFontSize;
"k", ModifiersState::SUPER, ~BindingMode::VI, ~BindingMode::SEARCH; Action::Esc("\x0c".into());
"k", ModifiersState::SUPER, ~BindingMode::VI, ~BindingMode::SEARCH; Action::ClearHistory;
"v", ModifiersState::SUPER, ~BindingMode::VI; Action::Paste;
"v", ModifiersState::SUPER, +BindingMode::VI, +BindingMode::SEARCH; Action::Paste;
"n", ModifiersState::SUPER; Action::CreateNewWindow;
"f", ModifiersState::CONTROL | ModifiersState::SUPER; Action::ToggleFullscreen;
"c", ModifiersState::SUPER; Action::Copy;
"c", ModifiersState::SUPER, +BindingMode::VI, ~BindingMode::SEARCH; Action::ClearSelection;
"h", ModifiersState::SUPER; Action::Hide;
"h", ModifiersState::SUPER | ModifiersState::ALT; Action::HideOtherApplications;
"m", ModifiersState::SUPER; Action::Minimize;
"q", ModifiersState::SUPER; Action::Quit;
"w", ModifiersState::SUPER; Action::Quit;
"f", ModifiersState::SUPER, ~BindingMode::SEARCH; Action::SearchForward;
"b", ModifiersState::SUPER, ~BindingMode::SEARCH; Action::SearchBackward;
"+" => KeyLocation::Numpad, ModifiersState::SUPER; Action::IncreaseFontSize;
"-" => KeyLocation::Numpad, ModifiersState::SUPER; Action::DecreaseFontSize;
)
}
// Don't return any bindings for tests since they are commented-out by default.
#[cfg(test)]
pub fn platform_key_bindings() -> Vec<KeyBinding> {
vec![]
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum BindingKey {
Scancode(PhysicalKey),
Keycode { key: Key, location: KeyLocation },
}
/// Key location for matching bindings.
#[derive(Debug, Clone, Copy, Eq)]
pub enum KeyLocation {
/// The key is in its standard position.
Standard,
/// The key is on the numeric pad.
Numpad,
/// The key could be anywhere on the keyboard.
Any,
}
impl From<WinitKeyLocation> for KeyLocation {
fn from(value: WinitKeyLocation) -> Self {
match value {
WinitKeyLocation::Standard => KeyLocation::Standard,
WinitKeyLocation::Left => KeyLocation::Any,
WinitKeyLocation::Right => KeyLocation::Any,
WinitKeyLocation::Numpad => KeyLocation::Numpad,
}
}
}
impl PartialEq for KeyLocation {
fn eq(&self, other: &Self) -> bool {
matches!(
(self, other),
(_, KeyLocation::Any)
| (KeyLocation::Any, _)
| (KeyLocation::Standard, KeyLocation::Standard)
| (KeyLocation::Numpad, KeyLocation::Numpad)
)
}
}
impl<'a> Deserialize<'a> for BindingKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
let value = SerdeValue::deserialize(deserializer)?;
match u32::deserialize(value.clone()) {
Ok(scancode) => Ok(BindingKey::Scancode(PhysicalKey::from_scancode(scancode))),
Err(_) => {
let keycode = String::deserialize(value.clone()).map_err(D::Error::custom)?;
let (key, location) = if keycode.chars().count() == 1 {
(Key::Character(keycode.to_lowercase().into()), KeyLocation::Any)
} else {
// Translate legacy winit codes into their modern counterparts.
match keycode.as_str() {
"Back" => (Key::Named(NamedKey::Backspace), KeyLocation::Any),
"Up" => (Key::Named(NamedKey::ArrowUp), KeyLocation::Any),
"Down" => (Key::Named(NamedKey::ArrowDown), KeyLocation::Any),
"Left" => (Key::Named(NamedKey::ArrowLeft), KeyLocation::Any),
"Right" => (Key::Named(NamedKey::ArrowRight), KeyLocation::Any),
"At" => (Key::Character("@".into()), KeyLocation::Any),
"Colon" => (Key::Character(":".into()), KeyLocation::Any),
"Period" => (Key::Character(".".into()), KeyLocation::Any),
"LBracket" => (Key::Character("[".into()), KeyLocation::Any),
"RBracket" => (Key::Character("]".into()), KeyLocation::Any),
"Semicolon" => (Key::Character(";".into()), KeyLocation::Any),
"Backslash" => (Key::Character("\\".into()), KeyLocation::Any),
// The keys which has alternative on numeric pad.
"Enter" => (Key::Named(NamedKey::Enter), KeyLocation::Standard),
"Return" => (Key::Named(NamedKey::Enter), KeyLocation::Standard),
"Plus" => (Key::Character("+".into()), KeyLocation::Standard),
"Comma" => (Key::Character(",".into()), KeyLocation::Standard),
"Slash" => (Key::Character("/".into()), KeyLocation::Standard),
"Equals" => (Key::Character("=".into()), KeyLocation::Standard),
"Minus" => (Key::Character("-".into()), KeyLocation::Standard),
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | true |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/cursor.rs | alacritty/src/config/cursor.rs | use std::cmp;
use std::time::Duration;
use serde::{Deserialize, Serialize};
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use alacritty_terminal::vte::ansi::{CursorShape as VteCursorShape, CursorStyle as VteCursorStyle};
use crate::config::ui_config::Percentage;
/// The minimum blink interval value in milliseconds.
const MIN_BLINK_INTERVAL: u64 = 10;
/// The minimum number of blinks before pausing.
const MIN_BLINK_CYCLES_BEFORE_PAUSE: u64 = 1;
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq)]
pub struct Cursor {
pub style: ConfigCursorStyle,
pub vi_mode_style: Option<ConfigCursorStyle>,
pub unfocused_hollow: bool,
thickness: Percentage,
blink_interval: u64,
blink_timeout: u8,
}
impl Default for Cursor {
fn default() -> Self {
Self {
thickness: Percentage::new(0.15),
unfocused_hollow: true,
blink_interval: 750,
blink_timeout: 5,
style: Default::default(),
vi_mode_style: Default::default(),
}
}
}
impl Cursor {
#[inline]
pub fn thickness(self) -> f32 {
self.thickness.as_f32()
}
#[inline]
pub fn style(self) -> VteCursorStyle {
self.style.into()
}
#[inline]
pub fn vi_mode_style(self) -> Option<VteCursorStyle> {
self.vi_mode_style.map(Into::into)
}
#[inline]
pub fn blink_interval(self) -> u64 {
cmp::max(self.blink_interval, MIN_BLINK_INTERVAL)
}
#[inline]
pub fn blink_timeout(self) -> Duration {
if self.blink_timeout == 0 {
Duration::ZERO
} else {
cmp::max(
// Show/hide is what we consider a cycle, so multiply by `2`.
Duration::from_millis(self.blink_interval * 2 * MIN_BLINK_CYCLES_BEFORE_PAUSE),
Duration::from_secs(self.blink_timeout as u64),
)
}
}
}
#[derive(SerdeReplace, Deserialize, Serialize, Debug, Copy, Clone, PartialEq, Eq)]
#[serde(untagged, deny_unknown_fields)]
pub enum ConfigCursorStyle {
Shape(CursorShape),
WithBlinking {
#[serde(default)]
shape: CursorShape,
#[serde(default)]
blinking: CursorBlinking,
},
}
impl Default for ConfigCursorStyle {
fn default() -> Self {
Self::Shape(CursorShape::default())
}
}
impl ConfigCursorStyle {
/// Check if blinking is force enabled/disabled.
pub fn blinking_override(&self) -> Option<bool> {
match self {
Self::Shape(_) => None,
Self::WithBlinking { blinking, .. } => blinking.blinking_override(),
}
}
}
impl From<ConfigCursorStyle> for VteCursorStyle {
fn from(config_style: ConfigCursorStyle) -> Self {
match config_style {
ConfigCursorStyle::Shape(shape) => Self { shape: shape.into(), blinking: false },
ConfigCursorStyle::WithBlinking { shape, blinking } => {
Self { shape: shape.into(), blinking: blinking.into() }
},
}
}
}
#[derive(ConfigDeserialize, Serialize, Default, Debug, Copy, Clone, PartialEq, Eq)]
pub enum CursorBlinking {
Never,
#[default]
Off,
On,
Always,
}
impl CursorBlinking {
fn blinking_override(&self) -> Option<bool> {
match self {
Self::Never => Some(false),
Self::Off | Self::On => None,
Self::Always => Some(true),
}
}
}
impl From<CursorBlinking> for bool {
fn from(blinking: CursorBlinking) -> bool {
blinking == CursorBlinking::On || blinking == CursorBlinking::Always
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Default, Eq, PartialEq, Copy, Clone, Hash)]
pub enum CursorShape {
#[default]
Block,
Underline,
Beam,
}
impl From<CursorShape> for VteCursorShape {
fn from(value: CursorShape) -> Self {
match value {
CursorShape::Block => VteCursorShape::Block,
CursorShape::Underline => VteCursorShape::Underline,
CursorShape::Beam => VteCursorShape::Beam,
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/bell.rs | alacritty/src/config/bell.rs | use std::time::Duration;
use serde::Serialize;
use alacritty_config_derive::ConfigDeserialize;
use crate::config::ui_config::Program;
use crate::display::color::Rgb;
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct BellConfig {
/// Visual bell animation function.
pub animation: BellAnimation,
/// Command to run on bell.
pub command: Option<Program>,
/// Visual bell flash color.
pub color: Rgb,
/// Visual bell duration in milliseconds.
duration: u16,
}
impl Default for BellConfig {
fn default() -> Self {
Self {
color: Rgb::new(255, 255, 255),
animation: Default::default(),
command: Default::default(),
duration: Default::default(),
}
}
}
impl BellConfig {
pub fn duration(&self) -> Duration {
Duration::from_millis(self.duration as u64)
}
}
/// `VisualBellAnimations` are modeled after a subset of CSS transitions and Robert
/// Penner's Easing Functions.
#[derive(ConfigDeserialize, Serialize, Default, Clone, Copy, Debug, PartialEq, Eq)]
pub enum BellAnimation {
// CSS animation.
Ease,
// CSS animation.
EaseOut,
// Penner animation.
EaseOutSine,
// Penner animation.
EaseOutQuad,
// Penner animation.
EaseOutCubic,
// Penner animation.
EaseOutQuart,
// Penner animation.
EaseOutQuint,
// Penner animation.
EaseOutExpo,
// Penner animation.
EaseOutCirc,
// Penner animation.
#[default]
Linear,
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/mouse.rs | alacritty/src/config/mouse.rs | use serde::{Deserialize, Deserializer, Serialize};
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use crate::config::bindings::{self, MouseBinding};
use crate::config::ui_config;
#[derive(ConfigDeserialize, Serialize, Default, Clone, Debug, PartialEq, Eq)]
pub struct Mouse {
pub hide_when_typing: bool,
#[serde(skip_serializing)]
pub bindings: MouseBindings,
}
#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]
pub struct MouseBindings(pub Vec<MouseBinding>);
impl Default for MouseBindings {
fn default() -> Self {
Self(bindings::default_mouse_bindings())
}
}
impl<'de> Deserialize<'de> for MouseBindings {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/ui_config.rs | alacritty/src/config/ui_config.rs | use std::cell::{OnceCell, RefCell};
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{self, Formatter};
use std::mem;
use std::path::PathBuf;
use std::rc::Rc;
use log::{error, warn};
use serde::de::{Error as SerdeError, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use unicode_width::UnicodeWidthChar;
use winit::keyboard::{Key, ModifiersState};
use alacritty_config::SerdeReplace;
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use alacritty_terminal::term::Config as TermConfig;
use alacritty_terminal::term::search::RegexSearch;
use alacritty_terminal::tty::{Options as PtyOptions, Shell};
use crate::config::LOG_TARGET_CONFIG;
use crate::config::bell::BellConfig;
use crate::config::bindings::{
self, Action, Binding, BindingKey, KeyBinding, KeyLocation, ModeWrapper, ModsWrapper,
MouseBinding,
};
use crate::config::color::Colors;
use crate::config::cursor::Cursor;
use crate::config::debug::Debug;
use crate::config::font::Font;
use crate::config::general::General;
use crate::config::mouse::Mouse;
use crate::config::scrolling::Scrolling;
use crate::config::selection::Selection;
use crate::config::terminal::Terminal;
use crate::config::window::WindowConfig;
/// Regex used for the default URL hint.
#[rustfmt::skip]
const URL_REGEX: &str = "(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file:|git://|ssh:|ftp://)\
[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>\"\\s{-}\\^⟨⟩`\\\\]+";
#[derive(ConfigDeserialize, Serialize, Default, Clone, Debug, PartialEq)]
pub struct UiConfig {
/// Miscellaneous configuration options.
pub general: General,
/// Extra environment variables.
pub env: HashMap<String, String>,
/// How much scrolling history to keep.
pub scrolling: Scrolling,
/// Cursor configuration.
pub cursor: Cursor,
/// Selection configuration.
pub selection: Selection,
/// Font configuration.
pub font: Font,
/// Window configuration.
pub window: WindowConfig,
/// Mouse configuration.
pub mouse: Mouse,
/// Debug options.
pub debug: Debug,
/// Bell configuration.
pub bell: BellConfig,
/// RGB values for colors.
pub colors: Colors,
/// Path where config was loaded from.
#[config(skip)]
#[serde(skip_serializing)]
pub config_paths: Vec<PathBuf>,
/// Regex hints for interacting with terminal content.
pub hints: Hints,
/// Config for the alacritty_terminal itself.
pub terminal: Terminal,
/// Keyboard configuration.
keyboard: Keyboard,
/// Path to a shell program to run on startup.
#[config(deprecated = "use terminal.shell instead")]
shell: Option<Program>,
/// Configuration file imports.
///
/// This is never read since the field is directly accessed through the config's
/// [`toml::Value`], but still present to prevent unused field warnings.
#[config(deprecated = "use general.import instead")]
import: Option<Vec<String>>,
/// Shell startup directory.
#[config(deprecated = "use general.working_directory instead")]
working_directory: Option<PathBuf>,
/// Live config reload.
#[config(deprecated = "use general.live_config_reload instead")]
live_config_reload: Option<bool>,
/// Offer IPC through a unix socket.
#[cfg(unix)]
#[config(deprecated = "use general.ipc_socket instead")]
pub ipc_socket: Option<bool>,
}
impl UiConfig {
/// Derive [`TermConfig`] from the config.
pub fn term_options(&self) -> TermConfig {
TermConfig {
semantic_escape_chars: self.selection.semantic_escape_chars.clone(),
scrolling_history: self.scrolling.history() as usize,
vi_mode_cursor_style: self.cursor.vi_mode_style(),
default_cursor_style: self.cursor.style(),
osc52: self.terminal.osc52.0,
kitty_keyboard: true,
}
}
/// Derive [`PtyOptions`] from the config.
pub fn pty_config(&self) -> PtyOptions {
let shell = self.terminal.shell.clone().or_else(|| self.shell.clone()).map(Into::into);
let working_directory =
self.working_directory.clone().or_else(|| self.general.working_directory.clone());
PtyOptions {
working_directory,
shell,
drain_on_exit: false,
env: HashMap::new(),
#[cfg(target_os = "windows")]
escape_args: false,
}
}
#[inline]
pub fn window_opacity(&self) -> f32 {
self.window.opacity.as_f32()
}
#[inline]
pub fn key_bindings(&self) -> &[KeyBinding] {
&self.keyboard.bindings.0
}
#[inline]
pub fn mouse_bindings(&self) -> &[MouseBinding] {
&self.mouse.bindings.0
}
#[inline]
pub fn live_config_reload(&self) -> bool {
self.live_config_reload.unwrap_or(self.general.live_config_reload)
}
#[cfg(unix)]
#[inline]
pub fn ipc_socket(&self) -> bool {
self.ipc_socket.unwrap_or(self.general.ipc_socket)
}
}
/// Keyboard configuration.
#[derive(ConfigDeserialize, Serialize, Default, Clone, Debug, PartialEq)]
struct Keyboard {
/// Keybindings.
#[serde(skip_serializing)]
bindings: KeyBindings,
}
#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]
struct KeyBindings(Vec<KeyBinding>);
impl Default for KeyBindings {
fn default() -> Self {
Self(bindings::default_key_bindings())
}
}
impl<'de> Deserialize<'de> for KeyBindings {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Self(deserialize_bindings(deserializer, Self::default().0)?))
}
}
pub fn deserialize_bindings<'a, D, T>(
deserializer: D,
mut default: Vec<Binding<T>>,
) -> Result<Vec<Binding<T>>, D::Error>
where
D: Deserializer<'a>,
T: Clone + Eq,
Binding<T>: Deserialize<'a>,
{
let values = Vec::<toml::Value>::deserialize(deserializer)?;
// Skip all invalid values.
let mut bindings = Vec::with_capacity(values.len());
for value in values {
match Binding::<T>::deserialize(value) {
Ok(binding) => bindings.push(binding),
Err(err) => {
error!(target: LOG_TARGET_CONFIG, "Config error: {err}; ignoring binding");
},
}
}
// Remove matching default bindings.
for binding in bindings.iter() {
default.retain(|b| !b.triggers_match(binding));
}
bindings.extend(default);
Ok(bindings)
}
/// A delta for a point in a 2 dimensional plane.
#[derive(ConfigDeserialize, Serialize, Clone, Copy, Debug, Default, PartialEq, Eq)]
pub struct Delta<T: Default> {
/// Horizontal change.
pub x: T,
/// Vertical change.
pub y: T,
}
/// Regex terminal hints.
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct Hints {
/// Characters for the hint labels.
alphabet: HintsAlphabet,
/// All configured terminal hints.
pub enabled: Vec<Rc<Hint>>,
}
impl Default for Hints {
fn default() -> Self {
// Add URL hint by default when no other hint is present.
let pattern = LazyRegexVariant::Pattern(String::from(URL_REGEX));
let regex = LazyRegex(Rc::new(RefCell::new(pattern)));
let content = HintContent::new(Some(regex), true);
#[cfg(not(any(target_os = "macos", windows)))]
let action = HintAction::Command(Program::Just(String::from("xdg-open")));
#[cfg(target_os = "macos")]
let action = HintAction::Command(Program::Just(String::from("open")));
#[cfg(windows)]
let action = HintAction::Command(Program::WithArgs {
program: String::from("cmd"),
args: vec!["/c".to_string(), "start".to_string(), "".to_string()],
});
Self {
enabled: vec![Rc::new(Hint {
content,
action,
persist: false,
post_processing: true,
mouse: Some(HintMouse { enabled: true, mods: Default::default() }),
binding: Some(HintBinding {
key: BindingKey::Keycode {
key: Key::Character("o".into()),
location: KeyLocation::Standard,
},
mods: ModsWrapper(ModifiersState::SHIFT | ModifiersState::CONTROL),
cache: Default::default(),
mode: Default::default(),
}),
})],
alphabet: Default::default(),
}
}
}
impl Hints {
/// Characters for the hint labels.
pub fn alphabet(&self) -> &str {
&self.alphabet.0
}
}
#[derive(SerdeReplace, Serialize, Clone, Debug, PartialEq, Eq)]
struct HintsAlphabet(String);
impl Default for HintsAlphabet {
fn default() -> Self {
Self(String::from("jfkdls;ahgurieowpq"))
}
}
impl<'de> Deserialize<'de> for HintsAlphabet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
let mut character_count = 0;
for character in value.chars() {
if character.width() != Some(1) {
return Err(D::Error::custom("characters must be of width 1"));
}
character_count += 1;
}
if character_count < 2 {
return Err(D::Error::custom("must include at last 2 characters"));
}
Ok(Self(value))
}
}
/// Built-in actions for hint mode.
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub enum HintInternalAction {
/// Copy the text to the clipboard.
Copy,
/// Write the text to the PTY/search.
Paste,
/// Select the text matching the hint.
Select,
/// Move the vi mode cursor to the beginning of the hint.
MoveViModeCursor,
}
/// Actions for hint bindings.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub enum HintAction {
/// Built-in hint action.
#[serde(rename = "action")]
Action(HintInternalAction),
/// Command the text will be piped to.
#[serde(rename = "command")]
Command(Program),
}
/// Hint configuration.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct Hint {
/// Regex for finding matches.
#[serde(flatten)]
pub content: HintContent,
/// Action executed when this hint is triggered.
#[serde(flatten)]
pub action: HintAction,
/// Hint text post processing.
#[serde(default)]
pub post_processing: bool,
/// Persist hints after selection.
#[serde(default)]
pub persist: bool,
/// Hint mouse highlighting.
pub mouse: Option<HintMouse>,
/// Binding required to search for this hint.
#[serde(skip_serializing)]
pub binding: Option<HintBinding>,
}
#[derive(Serialize, Default, Clone, Debug, PartialEq, Eq)]
pub struct HintContent {
/// Regex for finding matches.
pub regex: Option<LazyRegex>,
/// Escape sequence hyperlinks.
pub hyperlinks: bool,
}
impl HintContent {
pub fn new(regex: Option<LazyRegex>, hyperlinks: bool) -> Self {
Self { regex, hyperlinks }
}
}
impl<'de> Deserialize<'de> for HintContent {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct HintContentVisitor;
impl<'a> Visitor<'a> for HintContentVisitor {
type Value = HintContent;
fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("a mapping")
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'a>,
{
let mut content = Self::Value::default();
while let Some((key, value)) = map.next_entry::<String, toml::Value>()? {
match key.as_str() {
"regex" => match Option::<LazyRegex>::deserialize(value) {
Ok(regex) => content.regex = regex,
Err(err) => {
error!(
target: LOG_TARGET_CONFIG,
"Config error: hint's regex: {err}"
);
},
},
"hyperlinks" => match bool::deserialize(value) {
Ok(hyperlink) => content.hyperlinks = hyperlink,
Err(err) => {
error!(
target: LOG_TARGET_CONFIG,
"Config error: hint's hyperlinks: {err}"
);
},
},
"command" | "action" => (),
key => warn!(target: LOG_TARGET_CONFIG, "Unrecognized hint field: {key}"),
}
}
// Require at least one of hyperlinks or regex trigger hint matches.
if content.regex.is_none() && !content.hyperlinks {
return Err(M::Error::custom(
"Config error: At least one of the hint's regex or hint's hyperlinks must \
be set",
));
}
Ok(content)
}
}
deserializer.deserialize_any(HintContentVisitor)
}
}
/// Binding for triggering a keyboard hint.
#[derive(Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct HintBinding {
pub key: BindingKey,
#[serde(default)]
pub mods: ModsWrapper,
#[serde(default)]
pub mode: ModeWrapper,
/// Cache for on-demand [`HintBinding`] to [`KeyBinding`] conversion.
#[serde(skip)]
cache: OnceCell<KeyBinding>,
}
impl HintBinding {
/// Get the key binding for a hint.
pub fn key_binding(&self, hint: &Rc<Hint>) -> &KeyBinding {
self.cache.get_or_init(|| KeyBinding {
trigger: self.key.clone(),
mods: self.mods.0,
mode: self.mode.mode,
notmode: self.mode.not_mode,
action: Action::Hint(hint.clone()),
})
}
}
impl fmt::Debug for HintBinding {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("HintBinding")
.field("key", &self.key)
.field("mods", &self.mods)
.field("mode", &self.mode)
.finish_non_exhaustive()
}
}
/// Hint mouse highlighting.
#[derive(ConfigDeserialize, Serialize, Default, Copy, Clone, Debug, PartialEq, Eq)]
pub struct HintMouse {
/// Hint mouse highlighting availability.
pub enabled: bool,
/// Required mouse modifiers for hint highlighting.
#[serde(skip_serializing)]
pub mods: ModsWrapper,
}
/// Lazy regex with interior mutability.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LazyRegex(Rc<RefCell<LazyRegexVariant>>);
impl LazyRegex {
/// Execute a function with the compiled regex DFAs as parameter.
pub fn with_compiled<T, F>(&self, f: F) -> Option<T>
where
F: FnMut(&mut RegexSearch) -> T,
{
self.0.borrow_mut().compiled().map(f)
}
}
impl<'de> Deserialize<'de> for LazyRegex {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let regex = LazyRegexVariant::Pattern(String::deserialize(deserializer)?);
Ok(Self(Rc::new(RefCell::new(regex))))
}
}
impl Serialize for LazyRegex {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let variant = self.0.borrow();
let regex = match &*variant {
LazyRegexVariant::Compiled(regex, _) => regex,
LazyRegexVariant::Uncompilable(regex) => regex,
LazyRegexVariant::Pattern(regex) => regex,
};
serializer.serialize_str(regex)
}
}
/// Regex which is compiled on demand, to avoid expensive computations at startup.
#[derive(Clone, Debug)]
pub enum LazyRegexVariant {
Compiled(String, Box<RegexSearch>),
Pattern(String),
Uncompilable(String),
}
impl LazyRegexVariant {
/// Get a reference to the compiled regex.
///
/// If the regex is not already compiled, this will compile the DFAs and store them for future
/// access.
fn compiled(&mut self) -> Option<&mut RegexSearch> {
// Check if the regex has already been compiled.
let regex = match self {
Self::Compiled(_, regex_search) => return Some(regex_search),
Self::Uncompilable(_) => return None,
Self::Pattern(regex) => mem::take(regex),
};
// Compile the regex.
let regex_search = match RegexSearch::new(®ex) {
Ok(regex_search) => regex_search,
Err(err) => {
error!("could not compile hint regex: {err}");
*self = Self::Uncompilable(regex);
return None;
},
};
*self = Self::Compiled(regex, Box::new(regex_search));
// Return a reference to the compiled DFAs.
match self {
Self::Compiled(_, dfas) => Some(dfas),
_ => unreachable!(),
}
}
}
impl PartialEq for LazyRegexVariant {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Pattern(regex), Self::Pattern(other_regex)) => regex == other_regex,
_ => false,
}
}
}
impl Eq for LazyRegexVariant {}
/// Wrapper around f32 that represents a percentage value between 0.0 and 1.0.
#[derive(SerdeReplace, Serialize, Clone, Copy, Debug, PartialEq)]
pub struct Percentage(f32);
impl Default for Percentage {
fn default() -> Self {
Percentage(1.0)
}
}
impl Percentage {
pub fn new(value: f32) -> Self {
Percentage(value.clamp(0., 1.))
}
pub fn as_f32(self) -> f32 {
self.0
}
}
impl<'de> Deserialize<'de> for Percentage {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Percentage::new(f32::deserialize(deserializer)?))
}
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged, deny_unknown_fields)]
pub enum Program {
Just(String),
WithArgs {
program: String,
#[serde(default)]
args: Vec<String>,
},
}
impl Program {
pub fn program(&self) -> &str {
match self {
Program::Just(program) => program,
Program::WithArgs { program, .. } => program,
}
}
pub fn args(&self) -> &[String] {
match self {
Program::Just(_) => &[],
Program::WithArgs { args, .. } => args,
}
}
}
impl From<Program> for Shell {
fn from(value: Program) -> Self {
match value {
Program::Just(program) => Shell::new(program, Vec::new()),
Program::WithArgs { program, args } => Shell::new(program, args),
}
}
}
impl SerdeReplace for Program {
fn replace(&mut self, value: toml::Value) -> Result<(), Box<dyn Error>> {
*self = Self::deserialize(value)?;
Ok(())
}
}
pub(crate) struct StringVisitor;
impl serde::de::Visitor<'_> for StringVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("a string")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(s.to_lowercase())
}
}
#[cfg(test)]
mod tests {
use super::*;
use alacritty_terminal::term::test::mock_term;
use crate::display::hint::visible_regex_match_iter;
#[test]
fn positive_url_parsing_regex_test() {
for regular_url in [
"ipfs:s0mEhAsh",
"ipns:an0TherHash1234",
"magnet:?xt=urn:btih:L0UDHA5H12",
"mailto:example@example.org",
"gemini://gemini.example.org/",
"gopher://gopher.example.org",
"https://www.example.org",
"http://example.org",
"news:some.news.portal",
"file:///C:/Windows/",
"file:/home/user/whatever",
"git://github.com/user/repo.git",
"ssh:git@github.com:user/repo.git",
"ftp://ftp.example.org",
] {
let term = mock_term(regular_url);
let mut regex = RegexSearch::new(URL_REGEX).unwrap();
let matches = visible_regex_match_iter(&term, &mut regex).collect::<Vec<_>>();
assert_eq!(
matches.len(),
1,
"Should have exactly one match url {regular_url}, but instead got: {matches:?}"
)
}
}
#[test]
fn negative_url_parsing_regex_test() {
for url_like in [
"http::trace::on_request::log_parameters",
"http//www.example.org",
"/user:example.org",
"mailto: example@example.org",
"http://<script>alert('xss')</script>",
"mailto:",
] {
let term = mock_term(url_like);
let mut regex = RegexSearch::new(URL_REGEX).unwrap();
let matches = visible_regex_match_iter(&term, &mut regex).collect::<Vec<_>>();
assert!(
matches.is_empty(),
"Should not match url in string {url_like}, but instead got: {matches:?}"
)
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/scrolling.rs | alacritty/src/config/scrolling.rs | use serde::de::Error as SerdeError;
use serde::{Deserialize, Deserializer, Serialize};
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
/// Maximum scrollback amount configurable.
pub const MAX_SCROLLBACK_LINES: u32 = 100_000;
/// Struct for scrolling related settings.
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)]
pub struct Scrolling {
pub multiplier: u8,
history: ScrollingHistory,
}
impl Default for Scrolling {
fn default() -> Self {
Self { multiplier: 3, history: Default::default() }
}
}
impl Scrolling {
pub fn history(self) -> u32 {
self.history.0
}
}
#[derive(SerdeReplace, Serialize, Copy, Clone, Debug, PartialEq, Eq)]
struct ScrollingHistory(u32);
impl Default for ScrollingHistory {
fn default() -> Self {
Self(10_000)
}
}
impl<'de> Deserialize<'de> for ScrollingHistory {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let lines = u32::deserialize(deserializer)?;
if lines > MAX_SCROLLBACK_LINES {
Err(SerdeError::custom(format!(
"exceeded maximum scrolling history ({lines}/{MAX_SCROLLBACK_LINES})"
)))
} else {
Ok(Self(lines))
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/font.rs | alacritty/src/config/font.rs | use std::fmt;
use crossfont::Size as FontSize;
use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use crate::config::ui_config::Delta;
/// Font config.
///
/// Defaults are provided at the level of this struct per platform, but not per
/// field in this struct. It might be nice in the future to have defaults for
/// each value independently. Alternatively, maybe erroring when the user
/// doesn't provide complete config is Ok.
#[derive(ConfigDeserialize, Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Font {
/// Extra spacing per character.
pub offset: Delta<i8>,
/// Glyph offset within character cell.
pub glyph_offset: Delta<i8>,
#[config(removed = "set the AppleFontSmoothing user default instead")]
pub use_thin_strokes: bool,
/// Normal font face.
normal: FontDescription,
/// Bold font face.
bold: SecondaryFontDescription,
/// Italic font face.
italic: SecondaryFontDescription,
/// Bold italic font face.
bold_italic: SecondaryFontDescription,
/// Font size in points.
size: Size,
/// Whether to use the built-in font for box drawing characters.
pub builtin_box_drawing: bool,
}
impl Font {
/// Get a font clone with a size modification.
pub fn with_size(self, size: FontSize) -> Font {
Font { size: Size(size), ..self }
}
#[inline]
pub fn size(&self) -> FontSize {
self.size.0
}
/// Get normal font description.
pub fn normal(&self) -> &FontDescription {
&self.normal
}
/// Get bold font description.
pub fn bold(&self) -> FontDescription {
self.bold.desc(&self.normal)
}
/// Get italic font description.
pub fn italic(&self) -> FontDescription {
self.italic.desc(&self.normal)
}
/// Get bold italic font description.
pub fn bold_italic(&self) -> FontDescription {
self.bold_italic.desc(&self.normal)
}
}
impl Default for Font {
fn default() -> Font {
Self {
builtin_box_drawing: true,
glyph_offset: Default::default(),
use_thin_strokes: Default::default(),
bold_italic: Default::default(),
italic: Default::default(),
offset: Default::default(),
normal: Default::default(),
bold: Default::default(),
size: Default::default(),
}
}
}
/// Description of the normal font.
#[derive(ConfigDeserialize, Serialize, Debug, Clone, PartialEq, Eq)]
pub struct FontDescription {
pub family: String,
pub style: Option<String>,
}
impl Default for FontDescription {
fn default() -> FontDescription {
FontDescription {
#[cfg(not(any(target_os = "macos", windows)))]
family: "monospace".into(),
#[cfg(target_os = "macos")]
family: "Menlo".into(),
#[cfg(windows)]
family: "Consolas".into(),
style: None,
}
}
}
/// Description of the italic and bold font.
#[derive(ConfigDeserialize, Serialize, Debug, Default, Clone, PartialEq, Eq)]
pub struct SecondaryFontDescription {
family: Option<String>,
style: Option<String>,
}
impl SecondaryFontDescription {
pub fn desc(&self, fallback: &FontDescription) -> FontDescription {
FontDescription {
family: self.family.clone().unwrap_or_else(|| fallback.family.clone()),
style: self.style.clone(),
}
}
}
#[derive(SerdeReplace, Debug, Clone, PartialEq, Eq)]
struct Size(FontSize);
impl Default for Size {
fn default() -> Self {
Self(FontSize::new(11.25))
}
}
impl<'de> Deserialize<'de> for Size {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct NumVisitor;
impl Visitor<'_> for NumVisitor {
type Value = Size;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("f64 or i64")
}
fn visit_f64<E: de::Error>(self, value: f64) -> Result<Self::Value, E> {
Ok(Size(FontSize::new(value as f32)))
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<Self::Value, E> {
Ok(Size(FontSize::new(value as f32)))
}
}
deserializer.deserialize_any(NumVisitor)
}
}
impl Serialize for Size {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_f32(self.0.as_pt())
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/debug.rs | alacritty/src/config/debug.rs | use log::LevelFilter;
use serde::Serialize;
use alacritty_config_derive::ConfigDeserialize;
/// Debugging options.
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Debug {
pub log_level: LevelFilter,
pub print_events: bool,
/// Keep the log file after quitting.
pub persistent_logging: bool,
/// Should show render timer.
pub render_timer: bool,
/// Highlight damage information produced by alacritty.
pub highlight_damage: bool,
/// The renderer alacritty should be using.
pub renderer: Option<RendererPreference>,
/// Use EGL as display API if the current platform allows it.
pub prefer_egl: bool,
/// Record ref test.
#[config(skip)]
#[serde(skip_serializing)]
pub ref_test: bool,
}
impl Default for Debug {
fn default() -> Self {
Self {
log_level: LevelFilter::Warn,
print_events: Default::default(),
persistent_logging: Default::default(),
render_timer: Default::default(),
highlight_damage: Default::default(),
ref_test: Default::default(),
renderer: Default::default(),
prefer_egl: Default::default(),
}
}
}
/// The renderer configuration options.
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum RendererPreference {
/// OpenGL 3.3 renderer.
Glsl3,
/// GLES 2 renderer, with optional extensions like dual source blending.
Gles2,
/// Pure GLES 2 renderer.
Gles2Pure,
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/terminal.rs | alacritty/src/config/terminal.rs | use serde::{Deserialize, Deserializer, Serialize, de};
use toml::Value;
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use alacritty_terminal::term::Osc52;
use crate::config::ui_config::{Program, StringVisitor};
#[derive(ConfigDeserialize, Serialize, Default, Clone, Debug, PartialEq)]
pub struct Terminal {
/// OSC52 support mode.
pub osc52: SerdeOsc52,
/// Path to a shell program to run on startup.
pub shell: Option<Program>,
}
#[derive(SerdeReplace, Serialize, Default, Copy, Clone, Debug, PartialEq)]
pub struct SerdeOsc52(pub Osc52);
impl<'de> Deserialize<'de> for SerdeOsc52 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = deserializer.deserialize_str(StringVisitor)?;
Osc52::deserialize(Value::String(value)).map(SerdeOsc52).map_err(de::Error::custom)
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/mod.rs | alacritty/src/config/mod.rs | use std::fmt::{self, Display, Formatter};
use std::path::{Path, PathBuf};
use std::result::Result as StdResult;
use std::{env, fs, io};
use log::{debug, error, info, warn};
use serde::Deserialize;
use serde_yaml::Error as YamlError;
use toml::de::Error as TomlError;
use toml::ser::Error as TomlSeError;
use toml::{Table, Value};
pub mod bell;
pub mod color;
pub mod cursor;
pub mod debug;
pub mod font;
pub mod general;
pub mod monitor;
pub mod scrolling;
pub mod selection;
pub mod serde_utils;
pub mod terminal;
pub mod ui_config;
pub mod window;
mod bindings;
mod mouse;
use crate::cli::Options;
#[cfg(test)]
pub use crate::config::bindings::Binding;
pub use crate::config::bindings::{
Action, BindingKey, BindingMode, KeyBinding, MouseAction, MouseEvent, SearchAction, ViAction,
};
pub use crate::config::ui_config::UiConfig;
use crate::logging::LOG_TARGET_CONFIG;
/// Maximum number of depth for the configuration file imports.
pub const IMPORT_RECURSION_LIMIT: usize = 5;
/// Result from config loading.
pub type Result<T> = std::result::Result<T, Error>;
/// Errors occurring during config loading.
#[derive(Debug)]
pub enum Error {
/// Couldn't read $HOME environment variable.
ReadingEnvHome(env::VarError),
/// io error reading file.
Io(io::Error),
/// Invalid toml.
Toml(TomlError),
/// Failed toml serialization.
TomlSe(TomlSeError),
/// Invalid yaml.
Yaml(YamlError),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::ReadingEnvHome(err) => err.source(),
Error::Io(err) => err.source(),
Error::Toml(err) => err.source(),
Error::TomlSe(err) => err.source(),
Error::Yaml(err) => err.source(),
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Error::ReadingEnvHome(err) => {
write!(f, "Unable to read $HOME environment variable: {err}")
},
Error::Io(err) => write!(f, "Error reading config file: {err}"),
Error::Toml(err) => write!(f, "Config error: {err}"),
Error::TomlSe(err) => write!(f, "Yaml conversion error: {err}"),
Error::Yaml(err) => write!(f, "Config error: {err}"),
}
}
}
impl From<env::VarError> for Error {
fn from(val: env::VarError) -> Self {
Error::ReadingEnvHome(val)
}
}
impl From<io::Error> for Error {
fn from(val: io::Error) -> Self {
Error::Io(val)
}
}
impl From<TomlError> for Error {
fn from(val: TomlError) -> Self {
Error::Toml(val)
}
}
impl From<TomlSeError> for Error {
fn from(val: TomlSeError) -> Self {
Error::TomlSe(val)
}
}
impl From<YamlError> for Error {
fn from(val: YamlError) -> Self {
Error::Yaml(val)
}
}
/// Load the configuration file.
pub fn load(options: &mut Options) -> UiConfig {
let config_path = options
.config_file
.clone()
.or_else(|| installed_config("toml"))
.or_else(|| installed_config("yml"));
// Load the config using the following fallback behavior:
// - Config path + CLI overrides
// - CLI overrides
// - Default
let mut config = config_path
.as_ref()
.and_then(|config_path| load_from(config_path).ok())
.unwrap_or_else(|| {
let mut config = UiConfig::default();
match config_path {
Some(config_path) => config.config_paths.push(config_path),
None => info!(target: LOG_TARGET_CONFIG, "No config file found; using default"),
}
config
});
after_loading(&mut config, options);
config
}
/// Attempt to reload the configuration file.
pub fn reload(config_path: &Path, options: &mut Options) -> Result<UiConfig> {
debug!("Reloading configuration file: {config_path:?}");
// Load config, propagating errors.
let mut config = load_from(config_path)?;
after_loading(&mut config, options);
Ok(config)
}
/// Modifications after the `UiConfig` object is created.
fn after_loading(config: &mut UiConfig, options: &mut Options) {
// Override config with CLI options.
options.override_config(config);
}
/// Load configuration file and log errors.
fn load_from(path: &Path) -> Result<UiConfig> {
match read_config(path) {
Ok(config) => Ok(config),
Err(Error::Io(io)) if io.kind() == io::ErrorKind::NotFound => {
error!(target: LOG_TARGET_CONFIG, "Unable to load config {path:?}: File not found");
Err(Error::Io(io))
},
Err(err) => {
error!(target: LOG_TARGET_CONFIG, "Unable to load config {path:?}: {err}");
Err(err)
},
}
}
/// Deserialize configuration file from path.
fn read_config(path: &Path) -> Result<UiConfig> {
let mut config_paths = Vec::new();
let config_value = parse_config(path, &mut config_paths, IMPORT_RECURSION_LIMIT)?;
// Deserialize to concrete type.
let mut config = UiConfig::deserialize(config_value)?;
config.config_paths = config_paths;
Ok(config)
}
/// Deserialize all configuration files as generic Value.
fn parse_config(
path: &Path,
config_paths: &mut Vec<PathBuf>,
recursion_limit: usize,
) -> Result<Value> {
config_paths.push(path.to_owned());
// Deserialize the configuration file.
let config = deserialize_config(path, false)?;
// Merge config with imports.
let imports = load_imports(&config, path, config_paths, recursion_limit);
Ok(serde_utils::merge(imports, config))
}
/// Deserialize a configuration file.
pub fn deserialize_config(path: &Path, warn_pruned: bool) -> Result<Value> {
let mut contents = fs::read_to_string(path)?;
// Remove UTF-8 BOM.
if contents.starts_with('\u{FEFF}') {
contents = contents.split_off(3);
}
// Convert YAML to TOML as a transitionary fallback mechanism.
let extension = path.extension().unwrap_or_default();
if (extension == "yaml" || extension == "yml") && !contents.trim().is_empty() {
warn!(
"YAML config {path:?} is deprecated, please migrate to TOML using `alacritty migrate`"
);
let mut value: serde_yaml::Value = serde_yaml::from_str(&contents)?;
prune_yaml_nulls(&mut value, warn_pruned);
contents = toml::to_string(&value)?;
}
// Load configuration file as Value.
let config: Value = toml::from_str(&contents)?;
Ok(config)
}
/// Load all referenced configuration files.
fn load_imports(
config: &Value,
base_path: &Path,
config_paths: &mut Vec<PathBuf>,
recursion_limit: usize,
) -> Value {
// Get paths for all imports.
let import_paths = match imports(config, base_path, recursion_limit) {
Ok(import_paths) => import_paths,
Err(err) => {
error!(target: LOG_TARGET_CONFIG, "{err}");
return Value::Table(Table::new());
},
};
// Parse configs for all imports recursively.
let mut merged = Value::Table(Table::new());
for import_path in import_paths {
let path = match import_path {
Ok(path) => path,
Err(err) => {
error!(target: LOG_TARGET_CONFIG, "{err}");
continue;
},
};
match parse_config(&path, config_paths, recursion_limit - 1) {
Ok(config) => merged = serde_utils::merge(merged, config),
Err(Error::Io(io)) if io.kind() == io::ErrorKind::NotFound => {
info!(target: LOG_TARGET_CONFIG, "Config import not found:\n {:?}", path.display());
continue;
},
Err(err) => {
error!(target: LOG_TARGET_CONFIG, "Unable to import config {path:?}: {err}")
},
}
}
merged
}
/// Get all import paths for a configuration.
pub fn imports(
config: &Value,
base_path: &Path,
recursion_limit: usize,
) -> StdResult<Vec<StdResult<PathBuf, String>>, String> {
let imports =
config.get("import").or_else(|| config.get("general").and_then(|g| g.get("import")));
let imports = match imports {
Some(Value::Array(imports)) => imports,
Some(_) => return Err("Invalid import type: expected a sequence".into()),
None => return Ok(Vec::new()),
};
// Limit recursion to prevent infinite loops.
if !imports.is_empty() && recursion_limit == 0 {
return Err("Exceeded maximum configuration import depth".into());
}
let mut import_paths = Vec::new();
for import in imports {
let path = match import {
Value::String(path) => PathBuf::from(path),
_ => {
import_paths.push(Err("Invalid import element type: expected path string".into()));
continue;
},
};
let normalized = normalize_import(base_path, path);
import_paths.push(Ok(normalized));
}
Ok(import_paths)
}
/// Normalize import paths.
pub fn normalize_import(base_config_path: &Path, import_path: impl Into<PathBuf>) -> PathBuf {
let mut import_path = import_path.into();
// Resolve paths relative to user's home directory.
if let (Ok(stripped), Some(home_dir)) = (import_path.strip_prefix("~/"), home::home_dir()) {
import_path = home_dir.join(stripped);
}
if import_path.is_relative() {
if let Some(base_config_dir) = base_config_path.parent() {
import_path = base_config_dir.join(import_path)
}
}
import_path
}
/// Prune the nulls from the YAML to ensure TOML compatibility.
fn prune_yaml_nulls(value: &mut serde_yaml::Value, warn_pruned: bool) {
fn walk(value: &mut serde_yaml::Value, warn_pruned: bool) -> bool {
match value {
serde_yaml::Value::Sequence(sequence) => {
sequence.retain_mut(|value| !walk(value, warn_pruned));
sequence.is_empty()
},
serde_yaml::Value::Mapping(mapping) => {
mapping.retain(|key, value| {
let retain = !walk(value, warn_pruned);
if let Some(key_name) = key.as_str().filter(|_| !retain && warn_pruned) {
eprintln!("Removing null key \"{key_name}\" from the end config");
}
retain
});
mapping.is_empty()
},
serde_yaml::Value::Null => true,
_ => false,
}
}
if walk(value, warn_pruned) {
// When the value itself is null return the mapping.
*value = serde_yaml::Value::Mapping(Default::default());
}
}
/// Get the location of the first found default config file paths
/// according to the following order:
///
/// 1. $XDG_CONFIG_HOME/alacritty/alacritty.toml
/// 2. $XDG_CONFIG_HOME/alacritty.toml
/// 3. $HOME/.config/alacritty/alacritty.toml
/// 4. $HOME/.alacritty.toml
/// 5. /etc/alacritty/alacritty.toml
#[cfg(not(windows))]
pub fn installed_config(suffix: &str) -> Option<PathBuf> {
let file_name = format!("alacritty.{suffix}");
// Try using XDG location by default.
xdg::BaseDirectories::with_prefix("alacritty")
.find_config_file(&file_name)
.or_else(|| xdg::BaseDirectories::new().find_config_file(&file_name))
.or_else(|| {
if let Ok(home) = env::var("HOME") {
// Fallback path: $HOME/.config/alacritty/alacritty.toml.
let fallback = PathBuf::from(&home).join(".config/alacritty").join(&file_name);
if fallback.exists() {
return Some(fallback);
}
// Fallback path: $HOME/.alacritty.toml.
let hidden_name = format!(".{file_name}");
let fallback = PathBuf::from(&home).join(hidden_name);
if fallback.exists() {
return Some(fallback);
}
}
let fallback = PathBuf::from("/etc/alacritty").join(&file_name);
fallback.exists().then_some(fallback)
})
}
#[cfg(windows)]
pub fn installed_config(suffix: &str) -> Option<PathBuf> {
let file_name = format!("alacritty.{suffix}");
dirs::config_dir().map(|path| path.join("alacritty").join(file_name)).filter(|new| new.exists())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_config() {
toml::from_str::<UiConfig>("").unwrap();
}
fn yaml_to_toml(contents: &str) -> String {
let mut value: serde_yaml::Value = serde_yaml::from_str(contents).unwrap();
prune_yaml_nulls(&mut value, false);
toml::to_string(&value).unwrap()
}
#[test]
fn yaml_with_nulls() {
let contents = r#"
window:
blinking: Always
cursor:
not_blinking: Always
some_array:
- { window: }
- { window: "Hello" }
"#;
let toml = yaml_to_toml(contents);
assert_eq!(
toml.trim(),
r#"[window]
blinking = "Always"
not_blinking = "Always"
[[window.some_array]]
window = "Hello""#
);
}
#[test]
fn empty_yaml_to_toml() {
let contents = r#"
"#;
let toml = yaml_to_toml(contents);
assert!(toml.is_empty());
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/window.rs | alacritty/src/config/window.rs | use std::fmt::{self, Formatter};
use log::{error, warn};
use serde::de::{self, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize};
#[cfg(target_os = "macos")]
use winit::platform::macos::OptionAsAlt as WinitOptionAsAlt;
use winit::window::{Fullscreen, Theme as WinitTheme, WindowLevel as WinitWindowLevel};
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
use crate::config::LOG_TARGET_CONFIG;
use crate::config::ui_config::{Delta, Percentage};
/// Default Alacritty name, used for window title and class.
pub const DEFAULT_NAME: &str = "Alacritty";
#[derive(ConfigDeserialize, Serialize, Debug, Clone, PartialEq)]
pub struct WindowConfig {
/// Initial position.
pub position: Option<Delta<i32>>,
/// Draw the window with title bar / borders.
pub decorations: Decorations,
/// Startup mode.
pub startup_mode: StartupMode,
/// XEmbed parent.
#[config(skip)]
#[serde(skip_serializing)]
pub embed: Option<u32>,
/// Spread out additional padding evenly.
pub dynamic_padding: bool,
/// Use dynamic title.
pub dynamic_title: bool,
/// Information to identify a particular window.
#[config(flatten)]
pub identity: Identity,
/// Background opacity from 0.0 to 1.0.
pub opacity: Percentage,
/// Request blur behind the window.
pub blur: bool,
/// Controls which `Option` key should be treated as `Alt`.
option_as_alt: OptionAsAlt,
/// Resize increments.
pub resize_increments: bool,
/// Pixel padding.
padding: Delta<u16>,
/// Initial dimensions.
dimensions: Dimensions,
/// System decorations theme variant.
decorations_theme_variant: Option<Theme>,
/// Window level.
pub level: WindowLevel,
}
impl Default for WindowConfig {
fn default() -> Self {
Self {
dynamic_title: true,
blur: Default::default(),
embed: Default::default(),
padding: Default::default(),
opacity: Default::default(),
position: Default::default(),
identity: Default::default(),
dimensions: Default::default(),
decorations: Default::default(),
startup_mode: Default::default(),
dynamic_padding: Default::default(),
resize_increments: Default::default(),
decorations_theme_variant: Default::default(),
option_as_alt: Default::default(),
level: Default::default(),
}
}
}
impl WindowConfig {
#[inline]
pub fn dimensions(&self) -> Option<Dimensions> {
let (lines, columns) = (self.dimensions.lines, self.dimensions.columns);
let (lines_is_non_zero, columns_is_non_zero) = (lines != 0, columns != 0);
if lines_is_non_zero && columns_is_non_zero {
// Return dimensions if both `lines` and `columns` are non-zero.
Some(self.dimensions)
} else if lines_is_non_zero || columns_is_non_zero {
// Warn if either `columns` or `lines` is non-zero.
let (zero_key, non_zero_key, non_zero_value) = if lines_is_non_zero {
("columns", "lines", lines)
} else {
("lines", "columns", columns)
};
warn!(
target: LOG_TARGET_CONFIG,
"Both `lines` and `columns` must be non-zero for `window.dimensions` to take \
effect. Configured value of `{zero_key}` is 0 while that of `{non_zero_key}` is {non_zero_value}",
);
None
} else {
None
}
}
#[inline]
pub fn padding(&self, scale_factor: f32) -> (f32, f32) {
let padding_x = (f32::from(self.padding.x) * scale_factor).floor();
let padding_y = (f32::from(self.padding.y) * scale_factor).floor();
(padding_x, padding_y)
}
#[inline]
pub fn fullscreen(&self) -> Option<Fullscreen> {
if self.startup_mode == StartupMode::Fullscreen {
Some(Fullscreen::Borderless(None))
} else {
None
}
}
#[inline]
pub fn maximized(&self) -> bool {
self.startup_mode == StartupMode::Maximized
}
#[cfg(target_os = "macos")]
pub fn option_as_alt(&self) -> WinitOptionAsAlt {
match self.option_as_alt {
OptionAsAlt::OnlyLeft => WinitOptionAsAlt::OnlyLeft,
OptionAsAlt::OnlyRight => WinitOptionAsAlt::OnlyRight,
OptionAsAlt::Both => WinitOptionAsAlt::Both,
OptionAsAlt::None => WinitOptionAsAlt::None,
}
}
pub fn theme(&self) -> Option<WinitTheme> {
self.decorations_theme_variant.map(WinitTheme::from)
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Identity {
/// Window title.
pub title: String,
/// Window class.
pub class: Class,
}
impl Default for Identity {
fn default() -> Self {
Self { title: DEFAULT_NAME.into(), class: Default::default() }
}
}
#[derive(ConfigDeserialize, Serialize, Default, Debug, Copy, Clone, PartialEq, Eq)]
pub enum StartupMode {
#[default]
Windowed,
Maximized,
Fullscreen,
SimpleFullscreen,
}
#[derive(ConfigDeserialize, Serialize, Default, Debug, Copy, Clone, PartialEq, Eq)]
pub enum Decorations {
#[default]
Full,
Transparent,
Buttonless,
None,
}
/// Window Dimensions.
///
/// Newtype to avoid passing values incorrectly.
#[derive(ConfigDeserialize, Serialize, Default, Debug, Copy, Clone, PartialEq, Eq)]
pub struct Dimensions {
/// Window width in character columns.
pub columns: usize,
/// Window Height in character lines.
pub lines: usize,
}
/// Window class hint.
#[derive(SerdeReplace, Serialize, Debug, Clone, PartialEq, Eq)]
pub struct Class {
pub general: String,
pub instance: String,
}
impl Class {
pub fn new(general: impl ToString, instance: impl ToString) -> Self {
Self { general: general.to_string(), instance: instance.to_string() }
}
}
impl Default for Class {
fn default() -> Self {
Self::new(DEFAULT_NAME, DEFAULT_NAME)
}
}
impl<'de> Deserialize<'de> for Class {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ClassVisitor;
impl<'a> Visitor<'a> for ClassVisitor {
type Value = Class;
fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("a mapping")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Self::Value { instance: value.into(), ..Self::Value::default() })
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'a>,
{
let mut class = Self::Value::default();
while let Some((key, value)) = map.next_entry::<String, toml::Value>()? {
match key.as_str() {
"instance" => match String::deserialize(value) {
Ok(instance) => class.instance = instance,
Err(err) => {
error!(
target: LOG_TARGET_CONFIG,
"Config error: class.instance: {err}"
);
},
},
"general" => match String::deserialize(value) {
Ok(general) => class.general = general,
Err(err) => {
error!(
target: LOG_TARGET_CONFIG,
"Config error: class.instance: {err}"
);
},
},
key => warn!(target: LOG_TARGET_CONFIG, "Unrecognized class field: {key}"),
}
}
Ok(class)
}
}
deserializer.deserialize_any(ClassVisitor)
}
}
#[derive(ConfigDeserialize, Serialize, Default, Debug, Clone, Copy, PartialEq, Eq)]
pub enum OptionAsAlt {
/// The left `Option` key is treated as `Alt`.
OnlyLeft,
/// The right `Option` key is treated as `Alt`.
OnlyRight,
/// Both `Option` keys are treated as `Alt`.
Both,
/// No special handling is applied for `Option` key.
#[default]
None,
}
/// System decorations theme variant.
#[derive(ConfigDeserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)]
pub enum Theme {
Light,
Dark,
}
impl From<Theme> for WinitTheme {
fn from(theme: Theme) -> Self {
match theme {
Theme::Light => WinitTheme::Light,
Theme::Dark => WinitTheme::Dark,
}
}
}
#[derive(ConfigDeserialize, Serialize, Default, Debug, Clone, Copy, PartialEq, Eq)]
pub enum WindowLevel {
#[default]
Normal,
AlwaysOnTop,
}
impl From<WindowLevel> for WinitWindowLevel {
fn from(level: WindowLevel) -> Self {
match level {
WindowLevel::Normal => WinitWindowLevel::Normal,
WindowLevel::AlwaysOnTop => WinitWindowLevel::AlwaysOnTop,
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/selection.rs | alacritty/src/config/selection.rs | use serde::Serialize;
use alacritty_config_derive::ConfigDeserialize;
use alacritty_terminal::term::SEMANTIC_ESCAPE_CHARS;
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct Selection {
pub semantic_escape_chars: String,
pub save_to_clipboard: bool,
}
impl Default for Selection {
fn default() -> Self {
Self {
semantic_escape_chars: SEMANTIC_ESCAPE_CHARS.to_owned(),
save_to_clipboard: Default::default(),
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/monitor.rs | alacritty/src/config/monitor.rs | use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::path::PathBuf;
use std::sync::mpsc::{self, RecvTimeoutError, Sender};
use std::thread::JoinHandle;
use std::time::{Duration, Instant};
use log::{debug, error, warn};
use notify::event::{ModifyKind, RenameMode};
use notify::{
Config, Error as NotifyError, Event as NotifyEvent, EventKind, RecommendedWatcher,
RecursiveMode, Watcher,
};
use winit::event_loop::EventLoopProxy;
use alacritty_terminal::thread;
use crate::event::{Event, EventType};
const DEBOUNCE_DELAY: Duration = Duration::from_millis(10);
/// The fallback for `RecommendedWatcher` polling.
const FALLBACK_POLLING_TIMEOUT: Duration = Duration::from_secs(1);
/// Config file update monitor.
pub struct ConfigMonitor {
thread: JoinHandle<()>,
shutdown_tx: Sender<Result<NotifyEvent, NotifyError>>,
watched_hash: Option<u64>,
}
impl ConfigMonitor {
pub fn new(mut paths: Vec<PathBuf>, event_proxy: EventLoopProxy<Event>) -> Option<Self> {
// Don't monitor config if there is no path to watch.
if paths.is_empty() {
return None;
}
// Calculate the hash for the unmodified list of paths.
let watched_hash = Self::hash_paths(&paths);
// Exclude char devices like `/dev/null`, sockets, and so on, by checking that file type is
// a regular file.
paths.retain(|path| {
// Call `metadata` to resolve symbolic links.
path.metadata().is_ok_and(|metadata| metadata.file_type().is_file())
});
// Canonicalize paths, keeping the base paths for symlinks.
for i in 0..paths.len() {
if let Ok(canonical_path) = paths[i].canonicalize() {
match paths[i].symlink_metadata() {
Ok(metadata) if metadata.file_type().is_symlink() => paths.push(canonical_path),
_ => paths[i] = canonical_path,
}
}
}
// The Duration argument is a debouncing period.
let (tx, rx) = mpsc::channel();
let mut watcher = match RecommendedWatcher::new(
tx.clone(),
Config::default().with_poll_interval(FALLBACK_POLLING_TIMEOUT),
) {
Ok(watcher) => watcher,
Err(err) => {
error!("Unable to watch config file: {err}");
return None;
},
};
let join_handle = thread::spawn_named("config watcher", move || {
// Get all unique parent directories.
let mut parents = paths
.iter()
.map(|path| {
let mut path = path.clone();
path.pop();
path
})
.collect::<Vec<PathBuf>>();
parents.sort_unstable();
parents.dedup();
// Watch all configuration file directories.
for parent in &parents {
if let Err(err) = watcher.watch(parent, RecursiveMode::NonRecursive) {
debug!("Unable to watch config directory {parent:?}: {err}");
}
}
// The current debouncing time.
let mut debouncing_deadline: Option<Instant> = None;
// The events accumulated during the debounce period.
let mut received_events = Vec::new();
loop {
// We use `recv_timeout` to debounce the events coming from the watcher and reduce
// the amount of config reloads.
let event = match debouncing_deadline.as_ref() {
Some(debouncing_deadline) => rx.recv_timeout(
debouncing_deadline.saturating_duration_since(Instant::now()),
),
None => {
let event = rx.recv().map_err(Into::into);
// Set the debouncing deadline after receiving the event.
debouncing_deadline = Some(Instant::now() + DEBOUNCE_DELAY);
event
},
};
match event {
Ok(Ok(event)) => match event.kind {
EventKind::Other if event.info() == Some("shutdown") => break,
// Ignore when config file is moved as it's equivalent to deletion.
// Some editors trigger this as they move the file as part of saving.
EventKind::Modify(ModifyKind::Name(
RenameMode::From | RenameMode::Both,
)) => (),
EventKind::Any
| EventKind::Create(_)
| EventKind::Modify(_)
| EventKind::Other => {
received_events.push(event);
},
_ => (),
},
Err(RecvTimeoutError::Timeout) => {
// Go back to polling the events.
debouncing_deadline = None;
if received_events
.drain(..)
.flat_map(|event| event.paths.into_iter())
.any(|path| paths.contains(&path))
{
// Always reload the primary configuration file.
let event = Event::new(EventType::ConfigReload(paths[0].clone()), None);
let _ = event_proxy.send_event(event);
}
},
Ok(Err(err)) => {
debug!("Config watcher errors: {err:?}");
},
Err(err) => {
debug!("Config watcher channel dropped unexpectedly: {err}");
break;
},
};
}
});
Some(Self { watched_hash, thread: join_handle, shutdown_tx: tx })
}
/// Synchronously shut down the monitor.
pub fn shutdown(self) {
// Request shutdown.
let mut event = NotifyEvent::new(EventKind::Other);
event = event.set_info("shutdown");
let _ = self.shutdown_tx.send(Ok(event));
// Wait for thread to terminate.
if let Err(err) = self.thread.join() {
warn!("config monitor shutdown failed: {err:?}");
}
}
/// Check if the config monitor needs to be restarted.
///
/// This checks the supplied list of files against the monitored files to determine if a
/// restart is necessary.
pub fn needs_restart(&self, files: &[PathBuf]) -> bool {
Self::hash_paths(files).is_none_or(|hash| Some(hash) == self.watched_hash)
}
/// Generate the hash for a list of paths.
fn hash_paths(files: &[PathBuf]) -> Option<u64> {
// Use file count limit to avoid allocations.
const MAX_PATHS: usize = 1024;
if files.len() > MAX_PATHS {
return None;
}
// Sort files to avoid restart on order change.
let mut sorted_files = [None; MAX_PATHS];
for (i, file) in files.iter().enumerate() {
sorted_files[i] = Some(file);
}
sorted_files.sort_unstable();
// Calculate hash for the paths, regardless of order.
let mut hasher = DefaultHasher::new();
Hash::hash_slice(&sorted_files, &mut hasher);
Some(hasher.finish())
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/color.rs | alacritty/src/config/color.rs | use serde::de::Error as SerdeError;
use serde::{Deserialize, Deserializer, Serialize};
use alacritty_config_derive::ConfigDeserialize;
use crate::display::color::{CellRgb, Rgb};
#[derive(ConfigDeserialize, Serialize, Clone, Debug, Default, PartialEq, Eq)]
pub struct Colors {
pub primary: PrimaryColors,
pub cursor: InvertedCellColors,
pub vi_mode_cursor: InvertedCellColors,
pub selection: InvertedCellColors,
pub normal: NormalColors,
pub bright: BrightColors,
pub dim: Option<DimColors>,
pub indexed_colors: Vec<IndexedColor>,
pub search: SearchColors,
pub line_indicator: LineIndicatorColors,
pub hints: HintColors,
pub transparent_background_colors: bool,
pub draw_bold_text_with_bright_colors: bool,
footer_bar: BarColors,
}
impl Colors {
pub fn footer_bar_foreground(&self) -> Rgb {
self.footer_bar.foreground.unwrap_or(self.primary.background)
}
pub fn footer_bar_background(&self) -> Rgb {
self.footer_bar.background.unwrap_or(self.primary.foreground)
}
}
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Default, Debug, PartialEq, Eq)]
pub struct LineIndicatorColors {
pub foreground: Option<Rgb>,
pub background: Option<Rgb>,
}
#[derive(ConfigDeserialize, Serialize, Default, Copy, Clone, Debug, PartialEq, Eq)]
pub struct HintColors {
pub start: HintStartColors,
pub end: HintEndColors,
}
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)]
pub struct HintStartColors {
pub foreground: CellRgb,
pub background: CellRgb,
}
impl Default for HintStartColors {
fn default() -> Self {
Self {
foreground: CellRgb::Rgb(Rgb::new(0x18, 0x18, 0x18)),
background: CellRgb::Rgb(Rgb::new(0xf4, 0xbf, 0x75)),
}
}
}
#[derive(ConfigDeserialize, Serialize, Copy, Clone, Debug, PartialEq, Eq)]
pub struct HintEndColors {
pub foreground: CellRgb,
pub background: CellRgb,
}
impl Default for HintEndColors {
fn default() -> Self {
Self {
foreground: CellRgb::Rgb(Rgb::new(0x18, 0x18, 0x18)),
background: CellRgb::Rgb(Rgb::new(0xac, 0x42, 0x42)),
}
}
}
#[derive(Deserialize, Serialize, Copy, Clone, Default, Debug, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct IndexedColor {
pub color: Rgb,
index: ColorIndex,
}
impl IndexedColor {
#[inline]
pub fn index(&self) -> u8 {
self.index.0
}
}
#[derive(Serialize, Copy, Clone, Default, Debug, PartialEq, Eq)]
struct ColorIndex(u8);
impl<'de> Deserialize<'de> for ColorIndex {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let index = u8::deserialize(deserializer)?;
if index < 16 {
Err(SerdeError::custom(
"Config error: indexed_color's index is {}, but a value bigger than 15 was \
expected; ignoring setting",
))
} else {
Ok(Self(index))
}
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Copy, Clone, PartialEq, Eq)]
pub struct InvertedCellColors {
#[config(alias = "text")]
pub foreground: CellRgb,
#[config(alias = "cursor")]
pub background: CellRgb,
}
impl Default for InvertedCellColors {
fn default() -> Self {
Self { foreground: CellRgb::CellBackground, background: CellRgb::CellForeground }
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Copy, Clone, Default, PartialEq, Eq)]
pub struct SearchColors {
pub focused_match: FocusedMatchColors,
pub matches: MatchColors,
}
#[derive(ConfigDeserialize, Serialize, Debug, Copy, Clone, PartialEq, Eq)]
pub struct FocusedMatchColors {
pub foreground: CellRgb,
pub background: CellRgb,
}
impl Default for FocusedMatchColors {
fn default() -> Self {
Self {
background: CellRgb::Rgb(Rgb::new(0xf4, 0xbf, 0x75)),
foreground: CellRgb::Rgb(Rgb::new(0x18, 0x18, 0x18)),
}
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Copy, Clone, PartialEq, Eq)]
pub struct MatchColors {
pub foreground: CellRgb,
pub background: CellRgb,
}
impl Default for MatchColors {
fn default() -> Self {
Self {
background: CellRgb::Rgb(Rgb::new(0xac, 0x42, 0x42)),
foreground: CellRgb::Rgb(Rgb::new(0x18, 0x18, 0x18)),
}
}
}
#[derive(ConfigDeserialize, Serialize, Debug, Copy, Clone, Default, PartialEq, Eq)]
pub struct BarColors {
foreground: Option<Rgb>,
background: Option<Rgb>,
}
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct PrimaryColors {
pub foreground: Rgb,
pub background: Rgb,
pub bright_foreground: Option<Rgb>,
pub dim_foreground: Option<Rgb>,
}
impl Default for PrimaryColors {
fn default() -> Self {
PrimaryColors {
background: Rgb::new(0x18, 0x18, 0x18),
foreground: Rgb::new(0xd8, 0xd8, 0xd8),
bright_foreground: Default::default(),
dim_foreground: Default::default(),
}
}
}
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct NormalColors {
pub black: Rgb,
pub red: Rgb,
pub green: Rgb,
pub yellow: Rgb,
pub blue: Rgb,
pub magenta: Rgb,
pub cyan: Rgb,
pub white: Rgb,
}
impl Default for NormalColors {
fn default() -> Self {
NormalColors {
black: Rgb::new(0x18, 0x18, 0x18),
red: Rgb::new(0xac, 0x42, 0x42),
green: Rgb::new(0x90, 0xa9, 0x59),
yellow: Rgb::new(0xf4, 0xbf, 0x75),
blue: Rgb::new(0x6a, 0x9f, 0xb5),
magenta: Rgb::new(0xaa, 0x75, 0x9f),
cyan: Rgb::new(0x75, 0xb5, 0xaa),
white: Rgb::new(0xd8, 0xd8, 0xd8),
}
}
}
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct BrightColors {
pub black: Rgb,
pub red: Rgb,
pub green: Rgb,
pub yellow: Rgb,
pub blue: Rgb,
pub magenta: Rgb,
pub cyan: Rgb,
pub white: Rgb,
}
impl Default for BrightColors {
fn default() -> Self {
// Generated with oklab by multiplying brightness by 1.12 and then adjusting numbers
// to make them look "nicer". Yellow color was generated the same way, however the first
// srgb representable color was picked.
BrightColors {
black: Rgb::new(0x6b, 0x6b, 0x6b),
red: Rgb::new(0xc5, 0x55, 0x55),
green: Rgb::new(0xaa, 0xc4, 0x74),
yellow: Rgb::new(0xfe, 0xca, 0x88),
blue: Rgb::new(0x82, 0xb8, 0xc8),
magenta: Rgb::new(0xc2, 0x8c, 0xb8),
cyan: Rgb::new(0x93, 0xd3, 0xc3),
white: Rgb::new(0xf8, 0xf8, 0xf8),
}
}
}
#[derive(ConfigDeserialize, Serialize, Clone, Debug, PartialEq, Eq)]
pub struct DimColors {
pub black: Rgb,
pub red: Rgb,
pub green: Rgb,
pub yellow: Rgb,
pub blue: Rgb,
pub magenta: Rgb,
pub cyan: Rgb,
pub white: Rgb,
}
impl Default for DimColors {
fn default() -> Self {
// Generated with builtin alacritty's color dimming function.
DimColors {
black: Rgb::new(0x0f, 0x0f, 0x0f),
red: Rgb::new(0x71, 0x2b, 0x2b),
green: Rgb::new(0x5f, 0x6f, 0x3a),
yellow: Rgb::new(0xa1, 0x7e, 0x4d),
blue: Rgb::new(0x45, 0x68, 0x77),
magenta: Rgb::new(0x70, 0x4d, 0x68),
cyan: Rgb::new(0x4d, 0x77, 0x70),
white: Rgb::new(0x8e, 0x8e, 0x8e),
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/config/serde_utils.rs | alacritty/src/config/serde_utils.rs | //! Serde helpers.
use toml::{Table, Value};
/// Merge two serde structures.
///
/// This will take all values from `replacement` and use `base` whenever a value isn't present in
/// `replacement`.
pub fn merge(base: Value, replacement: Value) -> Value {
match (base, replacement) {
(Value::Array(mut base), Value::Array(mut replacement)) => {
base.append(&mut replacement);
Value::Array(base)
},
(Value::Table(base), Value::Table(replacement)) => {
Value::Table(merge_tables(base, replacement))
},
(_, value) => value,
}
}
/// Merge two key/value tables.
fn merge_tables(mut base: Table, replacement: Table) -> Table {
for (key, value) in replacement {
let value = match base.remove(&key) {
Some(base_value) => merge(base_value, value),
None => value,
};
base.insert(key, value);
}
base
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn merge_primitive() {
let base = Value::Table(Table::new());
let replacement = Value::Boolean(true);
assert_eq!(merge(base, replacement.clone()), replacement);
let base = Value::Boolean(false);
let replacement = Value::Boolean(true);
assert_eq!(merge(base, replacement.clone()), replacement);
let base = Value::Integer(0.into());
let replacement = Value::Integer(1.into());
assert_eq!(merge(base, replacement.clone()), replacement);
let base = Value::String(String::new());
let replacement = Value::String(String::from("test"));
assert_eq!(merge(base, replacement.clone()), replacement);
let base = Value::Table(Table::new());
let replacement = Value::Table(Table::new());
assert_eq!(merge(base.clone(), replacement), base);
}
#[test]
fn merge_sequence() {
let base = Value::Array(vec![Value::Table(Table::new())]);
let replacement = Value::Array(vec![Value::Boolean(true)]);
let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);
assert_eq!(merge(base, replacement), expected);
}
#[test]
fn merge_tables() {
let mut base_table = Table::new();
base_table.insert(String::from("a"), Value::Boolean(true));
base_table.insert(String::from("b"), Value::Boolean(false));
let base = Value::Table(base_table);
let mut replacement_table = Table::new();
replacement_table.insert(String::from("a"), Value::Boolean(true));
replacement_table.insert(String::from("c"), Value::Boolean(false));
let replacement = Value::Table(replacement_table);
let merged = merge(base, replacement);
let mut expected_table = Table::new();
expected_table.insert(String::from("b"), Value::Boolean(false));
expected_table.insert(String::from("a"), Value::Boolean(true));
expected_table.insert(String::from("c"), Value::Boolean(false));
let expected = Value::Table(expected_table);
assert_eq!(merged, expected);
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/macos/mod.rs | alacritty/src/macos/mod.rs | use objc2::runtime::AnyObject;
use objc2_foundation::{NSDictionary, NSString, NSUserDefaults, ns_string};
pub mod locale;
pub mod proc;
pub fn disable_autofill() {
unsafe {
NSUserDefaults::standardUserDefaults().registerDefaults(
&NSDictionary::<NSString, AnyObject>::from_slices(
&[ns_string!("NSAutoFillHeuristicControllerEnabled")],
&[ns_string!("NO")],
),
);
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/macos/proc.rs | alacritty/src/macos/proc.rs | use std::ffi::{CStr, CString, IntoStringError};
use std::fmt::{self, Display, Formatter};
use std::io;
use std::mem::{self, MaybeUninit};
use std::os::raw::{c_int, c_void};
use std::path::PathBuf;
/// Error during working directory retrieval.
#[derive(Debug)]
pub enum Error {
Io(io::Error),
/// Error converting into utf8 string.
IntoString(IntoStringError),
/// Expected return size didn't match libproc's.
InvalidSize,
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::InvalidSize => None,
Error::Io(err) => err.source(),
Error::IntoString(err) => err.source(),
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Error::InvalidSize => write!(f, "Invalid proc_pidinfo return size"),
Error::Io(err) => write!(f, "Error getting current working directory: {}", err),
Error::IntoString(err) => {
write!(f, "Error when parsing current working directory: {}", err)
},
}
}
}
impl From<io::Error> for Error {
fn from(val: io::Error) -> Self {
Error::Io(val)
}
}
impl From<IntoStringError> for Error {
fn from(val: IntoStringError) -> Self {
Error::IntoString(val)
}
}
pub fn cwd(pid: c_int) -> Result<PathBuf, Error> {
let mut info = MaybeUninit::<sys::proc_vnodepathinfo>::uninit();
let info_ptr = info.as_mut_ptr() as *mut c_void;
let size = mem::size_of::<sys::proc_vnodepathinfo>() as c_int;
let c_str = unsafe {
let pidinfo_size = sys::proc_pidinfo(pid, sys::PROC_PIDVNODEPATHINFO, 0, info_ptr, size);
match pidinfo_size {
c if c < 0 => return Err(io::Error::last_os_error().into()),
s if s != size => return Err(Error::InvalidSize),
_ => CStr::from_ptr(info.assume_init().pvi_cdir.vip_path.as_ptr()),
}
};
Ok(CString::from(c_str).into_string().map(PathBuf::from)?)
}
/// Bindings for libproc.
#[allow(non_camel_case_types)]
mod sys {
use std::os::raw::{c_char, c_int, c_longlong, c_void};
pub const PROC_PIDVNODEPATHINFO: c_int = 9;
type gid_t = c_int;
type off_t = c_longlong;
type uid_t = c_int;
type fsid_t = fsid;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct fsid {
pub val: [i32; 2usize],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct vinfo_stat {
pub vst_dev: u32,
pub vst_mode: u16,
pub vst_nlink: u16,
pub vst_ino: u64,
pub vst_uid: uid_t,
pub vst_gid: gid_t,
pub vst_atime: i64,
pub vst_atimensec: i64,
pub vst_mtime: i64,
pub vst_mtimensec: i64,
pub vst_ctime: i64,
pub vst_ctimensec: i64,
pub vst_birthtime: i64,
pub vst_birthtimensec: i64,
pub vst_size: off_t,
pub vst_blocks: i64,
pub vst_blksize: i32,
pub vst_flags: u32,
pub vst_gen: u32,
pub vst_rdev: u32,
pub vst_qspare: [i64; 2usize],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct vnode_info {
pub vi_stat: vinfo_stat,
pub vi_type: c_int,
pub vi_pad: c_int,
pub vi_fsid: fsid_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct vnode_info_path {
pub vip_vi: vnode_info,
pub vip_path: [c_char; 1024usize],
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct proc_vnodepathinfo {
pub pvi_cdir: vnode_info_path,
pub pvi_rdir: vnode_info_path,
}
unsafe extern "C" {
pub fn proc_pidinfo(
pid: c_int,
flavor: c_int,
arg: u64,
buffer: *mut c_void,
buffersize: c_int,
) -> c_int;
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::{env, process};
#[test]
fn cwd_matches_current_dir() {
assert_eq!(cwd(process::id() as i32).ok(), env::current_dir().ok());
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/macos/locale.rs | alacritty/src/macos/locale.rs | #![allow(clippy::let_unit_value)]
use std::ffi::{CStr, CString};
use std::{env, str};
use libc::{LC_ALL, LC_CTYPE, setlocale};
use log::debug;
use objc2::sel;
use objc2_foundation::{NSLocale, NSObjectProtocol};
const FALLBACK_LOCALE: &str = "UTF-8";
pub fn set_locale_environment() {
let env_locale_c = CString::new("").unwrap();
let env_locale_ptr = unsafe { setlocale(LC_ALL, env_locale_c.as_ptr()) };
if !env_locale_ptr.is_null() {
let env_locale = unsafe { CStr::from_ptr(env_locale_ptr).to_string_lossy() };
// Assume `C` locale means unchanged, since it is the default anyways.
if env_locale != "C" {
debug!("Using environment locale: {}", env_locale);
return;
}
}
let system_locale = system_locale();
// Set locale to system locale.
let system_locale_c = CString::new(system_locale.clone()).expect("nul byte in system locale");
let lc_all = unsafe { setlocale(LC_ALL, system_locale_c.as_ptr()) };
// Check if system locale was valid or not.
if lc_all.is_null() {
// Use fallback locale.
debug!("Using fallback locale: {}", FALLBACK_LOCALE);
let fallback_locale_c = CString::new(FALLBACK_LOCALE).unwrap();
unsafe { setlocale(LC_CTYPE, fallback_locale_c.as_ptr()) };
unsafe { env::set_var("LC_CTYPE", FALLBACK_LOCALE) };
} else {
// Use system locale.
debug!("Using system locale: {}", system_locale);
unsafe { env::set_var("LC_ALL", system_locale) };
}
}
/// Determine system locale based on language and country code.
fn system_locale() -> String {
let locale = NSLocale::currentLocale();
// `localeIdentifier` returns extra metadata with the locale (including currency and
// collator) on newer versions of macOS. This is not a valid locale, so we use
// `languageCode` and `countryCode`, if they're available (macOS 10.12+):
//
// https://developer.apple.com/documentation/foundation/nslocale/1416263-localeidentifier?language=objc
// https://developer.apple.com/documentation/foundation/nslocale/1643060-countrycode?language=objc
// https://developer.apple.com/documentation/foundation/nslocale/1643026-languagecode?language=objc
let is_language_code_supported: bool = locale.respondsToSelector(sel!(languageCode));
let is_country_code_supported: bool = locale.respondsToSelector(sel!(countryCode));
if is_language_code_supported && is_country_code_supported {
let language_code = locale.languageCode();
#[allow(deprecated)]
if let Some(country_code) = locale.countryCode() {
format!("{}_{}.UTF-8", language_code, country_code)
} else {
// Fall back to en_US in case the country code is not available.
"en_US.UTF-8".into()
}
} else {
locale.localeIdentifier().to_string() + ".UTF-8"
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/migrate/mod.rs | alacritty/src/migrate/mod.rs | //! Configuration file migration.
use std::fmt::Debug;
use std::path::Path;
use std::{fs, mem};
use tempfile::NamedTempFile;
use toml_edit::{DocumentMut, Item};
use crate::cli::MigrateOptions;
use crate::config;
mod yaml;
/// Handle migration.
pub fn migrate(options: MigrateOptions) {
// Find configuration file path.
let config_path = options
.config_file
.clone()
.or_else(|| config::installed_config("toml"))
.or_else(|| config::installed_config("yml"));
// Abort if system has no installed configuration.
let config_path = match config_path {
Some(config_path) => config_path,
None => {
eprintln!("No configuration file found");
std::process::exit(1);
},
};
// If we're doing a wet run, perform a dry run first for safety.
if !options.dry_run {
#[allow(clippy::redundant_clone)]
let mut options = options.clone();
options.silent = true;
options.dry_run = true;
if let Err(err) = migrate_config(&options, &config_path, config::IMPORT_RECURSION_LIMIT) {
eprintln!("Configuration file migration failed:");
eprintln!(" {config_path:?}: {err}");
std::process::exit(1);
}
}
// Migrate the root config.
match migrate_config(&options, &config_path, config::IMPORT_RECURSION_LIMIT) {
Ok(migration) => {
if !options.silent {
println!("{}", migration.success_message(false));
}
},
Err(err) => {
eprintln!("Configuration file migration failed:");
eprintln!(" {config_path:?}: {err}");
std::process::exit(1);
},
}
}
/// Migrate a specific configuration file.
fn migrate_config<'a>(
options: &MigrateOptions,
path: &'a Path,
recursion_limit: usize,
) -> Result<Migration<'a>, String> {
// Ensure configuration file has an extension.
let path_str = path.to_string_lossy();
let (prefix, suffix) = match path_str.rsplit_once('.') {
Some((prefix, suffix)) => (prefix, suffix),
None => return Err("missing file extension".to_string()),
};
// Handle legacy YAML files.
if suffix == "yml" {
let new_path = yaml::migrate(options, path, recursion_limit, prefix)?;
return Ok(Migration::Yaml((path, new_path)));
}
// TOML only does renames, so return early if they are disabled.
if options.skip_renames {
if options.dry_run {
eprintln!("Ignoring TOML file {path:?} since `--skip-renames` was supplied");
}
return Ok(Migration::Toml(path));
}
// Read TOML file and perform all in-file migrations.
let toml = fs::read_to_string(path).map_err(|err| format!("{err}"))?;
let mut migrated = migrate_toml(toml)?;
// Recursively migrate imports.
migrate_imports(options, path, &mut migrated, recursion_limit)?;
// Write migrated TOML file.
write_results(options, path, &migrated.to_string())?;
Ok(Migration::Toml(path))
}
/// Migrate TOML config to the latest version.
fn migrate_toml(toml: String) -> Result<DocumentMut, String> {
// Parse TOML file.
let mut document = match toml.parse::<DocumentMut>() {
Ok(document) => document,
Err(err) => return Err(format!("TOML parsing error: {err}")),
};
// Move `draw_bold_text_with_bright_colors` to its own section.
move_value(&mut document, &["draw_bold_text_with_bright_colors"], &[
"colors",
"draw_bold_text_with_bright_colors",
])?;
// Move bindings to their own section.
move_value(&mut document, &["key_bindings"], &["keyboard", "bindings"])?;
move_value(&mut document, &["mouse_bindings"], &["mouse", "bindings"])?;
// Avoid warnings due to introduction of the new `general` section.
move_value(&mut document, &["live_config_reload"], &["general", "live_config_reload"])?;
move_value(&mut document, &["working_directory"], &["general", "working_directory"])?;
move_value(&mut document, &["ipc_socket"], &["general", "ipc_socket"])?;
move_value(&mut document, &["import"], &["general", "import"])?;
move_value(&mut document, &["shell"], &["terminal", "shell"])?;
Ok(document)
}
/// Migrate TOML imports to the latest version.
fn migrate_imports(
options: &MigrateOptions,
path: &Path,
document: &mut DocumentMut,
recursion_limit: usize,
) -> Result<(), String> {
// Check if any imports need to be processed.
let imports = match document
.get("general")
.and_then(|general| general.get("import"))
.and_then(|import| import.as_array())
{
Some(array) if !array.is_empty() => array,
_ => return Ok(()),
};
// Abort once recursion limit is exceeded.
if recursion_limit == 0 {
return Err("Exceeded maximum configuration import depth".into());
}
// Migrate each import.
for import in imports.into_iter().filter_map(|item| item.as_str()) {
let normalized_path = config::normalize_import(path, import);
if !normalized_path.exists() {
if options.dry_run {
println!("Skipping migration for nonexistent path: {}", normalized_path.display());
}
continue;
}
let migration = migrate_config(options, &normalized_path, recursion_limit - 1)?;
if options.dry_run {
println!("{}", migration.success_message(true));
}
}
Ok(())
}
/// Move a TOML value from one map to another.
fn move_value(document: &mut DocumentMut, origin: &[&str], target: &[&str]) -> Result<(), String> {
// Find and remove the original item.
let (mut origin_key, mut origin_item) = (None, document.as_item_mut());
for element in origin {
let table = match origin_item.as_table_like_mut() {
Some(table) => table,
None => panic!("Moving from unsupported TOML structure"),
};
let (key, item) = match table.get_key_value_mut(element) {
Some((key, item)) => (key, item),
None => return Ok(()),
};
origin_key = Some(key);
origin_item = item;
// Ensure no empty tables are left behind.
if let Some(table) = origin_item.as_table_mut() {
table.set_implicit(true)
}
}
let origin_key_decor =
origin_key.map(|key| (key.leaf_decor().clone(), key.dotted_decor().clone()));
let origin_item = mem::replace(origin_item, Item::None);
// Create all dependencies for the new location.
let mut target_item = document.as_item_mut();
for (i, element) in target.iter().enumerate() {
let table = match target_item.as_table_like_mut() {
Some(table) => table,
None => panic!("Moving into unsupported TOML structure"),
};
if i + 1 == target.len() {
table.insert(element, origin_item);
// Move original key decorations.
if let Some((leaf, dotted)) = origin_key_decor {
let mut key = table.key_mut(element).unwrap();
*key.leaf_decor_mut() = leaf;
*key.dotted_decor_mut() = dotted;
}
break;
} else {
// Create missing parent tables.
target_item = target_item[element].or_insert(toml_edit::table());
}
}
Ok(())
}
/// Write migrated TOML to its target location.
fn write_results<P>(options: &MigrateOptions, path: P, toml: &str) -> Result<(), String>
where
P: AsRef<Path> + Debug,
{
let path = path.as_ref();
if options.dry_run && !options.silent {
// Output new content to STDOUT.
println!(
"\nv-----Start TOML for {path:?}-----v\n\n{toml}\n^-----End TOML for {path:?}-----^\n"
);
} else if !options.dry_run {
// Atomically replace the configuration file.
let tmp = NamedTempFile::new_in(path.parent().unwrap())
.map_err(|err| format!("could not create temporary file: {err}"))?;
fs::write(tmp.path(), toml).map_err(|err| format!("filesystem error: {err}"))?;
tmp.persist(path).map_err(|err| format!("atomic replacement failed: {err}"))?;
}
Ok(())
}
/// Performed migration mode.
enum Migration<'a> {
/// In-place TOML migration.
Toml(&'a Path),
/// YAML to TOML migration.
Yaml((&'a Path, String)),
}
impl Migration<'_> {
/// Get the success message for this migration.
fn success_message(&self, import: bool) -> String {
match self {
Self::Yaml((original_path, new_path)) if import => {
format!("Successfully migrated import {original_path:?} to {new_path:?}")
},
Self::Yaml((original_path, new_path)) => {
format!("Successfully migrated {original_path:?} to {new_path:?}")
},
Self::Toml(original_path) if import => {
format!("Successfully migrated import {original_path:?}")
},
Self::Toml(original_path) => format!("Successfully migrated {original_path:?}"),
}
}
/// Get the file path after migration.
fn new_path(&self) -> String {
match self {
Self::Toml(path) => path.to_string_lossy().into(),
Self::Yaml((_, path)) => path.into(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn move_values() {
let input = r#"
# This is a root_value.
#
# Use it with care.
root_value = 3
[table]
table_value = 5
[preexisting]
not_moved = 9
"#;
let mut document = input.parse::<DocumentMut>().unwrap();
move_value(&mut document, &["root_value"], &["new_table", "root_value"]).unwrap();
move_value(&mut document, &["table", "table_value"], &[
"preexisting",
"subtable",
"new_name",
])
.unwrap();
let output = document.to_string();
let expected = r#"
[preexisting]
not_moved = 9
[preexisting.subtable]
new_name = 5
[new_table]
# This is a root_value.
#
# Use it with care.
root_value = 3
"#;
assert_eq!(output, expected);
}
#[test]
fn migrate_empty() {
assert!(migrate_toml(String::new()).unwrap().to_string().is_empty());
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty/src/migrate/yaml.rs | alacritty/src/migrate/yaml.rs | //! Migration of legacy YAML files to TOML.
use std::path::Path;
use toml::Value;
use crate::cli::MigrateOptions;
use crate::config;
use crate::migrate::{migrate_config, migrate_toml, write_results};
/// Migrate a legacy YAML config to TOML.
pub fn migrate(
options: &MigrateOptions,
path: &Path,
recursion_limit: usize,
prefix: &str,
) -> Result<String, String> {
// Try to parse the configuration file.
let mut config = match config::deserialize_config(path, !options.dry_run) {
Ok(config) => config,
Err(err) => return Err(format!("YAML parsing error: {err}")),
};
// Migrate config imports.
if !options.skip_imports {
migrate_imports(options, &mut config, path, recursion_limit)?;
}
// Convert to TOML format.
let mut toml = toml::to_string(&config).map_err(|err| format!("conversion error: {err}"))?;
let new_path = format!("{prefix}.toml");
// Apply TOML migration, without recursing through imports.
toml = migrate_toml(toml)?.to_string();
// Write migrated TOML config.
write_results(options, &new_path, &toml)?;
Ok(new_path)
}
/// Migrate the imports of a config.
fn migrate_imports(
options: &MigrateOptions,
config: &mut Value,
base_path: &Path,
recursion_limit: usize,
) -> Result<(), String> {
let imports = match config::imports(config, base_path, recursion_limit) {
Ok(imports) => imports,
Err(err) => return Err(format!("import error: {err}")),
};
// Migrate the individual imports.
let mut new_imports = Vec::new();
for import in imports {
let import = match import {
Ok(import) => import,
Err(err) => return Err(format!("import error: {err}")),
};
// Keep yaml import if path does not exist.
if !import.exists() {
if options.dry_run {
eprintln!("Keeping yaml config for nonexistent import: {import:?}");
}
new_imports.push(Value::String(import.to_string_lossy().into()));
continue;
}
let migration = migrate_config(options, &import, recursion_limit - 1)?;
// Print success message.
if options.dry_run {
println!("{}", migration.success_message(true));
}
new_imports.push(Value::String(migration.new_path()));
}
// Update the imports field.
if let Some(import) = config.get_mut("import") {
*import = Value::Array(new_imports);
}
Ok(())
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config/src/lib.rs | alacritty_config/src/lib.rs | use std::collections::HashMap;
use std::error::Error;
use std::path::PathBuf;
use log::LevelFilter;
use serde::Deserialize;
use toml::Value;
pub trait SerdeReplace {
fn replace(&mut self, value: Value) -> Result<(), Box<dyn Error>>;
}
#[macro_export]
macro_rules! impl_replace {
($($ty:ty),*$(,)*) => {
$(
impl SerdeReplace for $ty {
fn replace(&mut self, value: Value) -> Result<(), Box<dyn Error>> {
replace_simple(self, value)
}
}
)*
};
}
#[rustfmt::skip]
impl_replace!(
usize, u8, u16, u32, u64, u128,
isize, i8, i16, i32, i64, i128,
f32, f64,
bool,
char,
String,
PathBuf,
LevelFilter,
);
fn replace_simple<'de, D>(data: &mut D, value: Value) -> Result<(), Box<dyn Error>>
where
D: Deserialize<'de>,
{
*data = D::deserialize(value)?;
Ok(())
}
impl<'de, T: Deserialize<'de>> SerdeReplace for Vec<T> {
fn replace(&mut self, value: Value) -> Result<(), Box<dyn Error>> {
replace_simple(self, value)
}
}
impl<'de, T: SerdeReplace + Deserialize<'de>> SerdeReplace for Option<T> {
fn replace(&mut self, value: Value) -> Result<(), Box<dyn Error>> {
match self {
Some(inner) => inner.replace(value),
None => replace_simple(self, value),
}
}
}
impl<'de, T: Deserialize<'de>> SerdeReplace for HashMap<String, T> {
fn replace(&mut self, value: Value) -> Result<(), Box<dyn Error>> {
// Deserialize replacement as HashMap.
let hashmap: HashMap<String, T> = Self::deserialize(value)?;
// Merge the two HashMaps, replacing existing values.
for (key, value) in hashmap {
self.insert(key, value);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate as alacritty_config;
use alacritty_config_derive::ConfigDeserialize;
#[test]
fn replace_option() {
#[derive(ConfigDeserialize, Default, PartialEq, Eq, Debug)]
struct ReplaceOption {
a: usize,
b: usize,
}
let mut subject: Option<ReplaceOption> = None;
let value: Value = toml::from_str("a=1").unwrap();
SerdeReplace::replace(&mut subject, value).unwrap();
let value: Value = toml::from_str("b=2").unwrap();
SerdeReplace::replace(&mut subject, value).unwrap();
assert_eq!(subject, Some(ReplaceOption { a: 1, b: 2 }));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/event.rs | alacritty_terminal/src/event.rs | use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc;
use crate::term::ClipboardType;
use crate::vte::ansi::Rgb;
/// Terminal event.
///
/// These events instruct the UI over changes that can't be handled by the terminal emulation layer
/// itself.
#[derive(Clone)]
pub enum Event {
/// Grid has changed possibly requiring a mouse cursor shape change.
MouseCursorDirty,
/// Window title change.
Title(String),
/// Reset to the default window title.
ResetTitle,
/// Request to store a text string in the clipboard.
ClipboardStore(ClipboardType, String),
/// Request to write the contents of the clipboard to the PTY.
///
/// The attached function is a formatter which will correctly transform the clipboard content
/// into the expected escape sequence format.
ClipboardLoad(ClipboardType, Arc<dyn Fn(&str) -> String + Sync + Send + 'static>),
/// Request to write the RGB value of a color to the PTY.
///
/// The attached function is a formatter which will correctly transform the RGB color into the
/// expected escape sequence format.
ColorRequest(usize, Arc<dyn Fn(Rgb) -> String + Sync + Send + 'static>),
/// Write some text to the PTY.
PtyWrite(String),
/// Request to write the text area size.
TextAreaSizeRequest(Arc<dyn Fn(WindowSize) -> String + Sync + Send + 'static>),
/// Cursor blinking state has changed.
CursorBlinkingChange,
/// New terminal content available.
Wakeup,
/// Terminal bell ring.
Bell,
/// Shutdown request.
Exit,
/// Child process exited with an error code.
ChildExit(i32),
}
impl Debug for Event {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Event::ClipboardStore(ty, text) => write!(f, "ClipboardStore({ty:?}, {text})"),
Event::ClipboardLoad(ty, _) => write!(f, "ClipboardLoad({ty:?})"),
Event::TextAreaSizeRequest(_) => write!(f, "TextAreaSizeRequest"),
Event::ColorRequest(index, _) => write!(f, "ColorRequest({index})"),
Event::PtyWrite(text) => write!(f, "PtyWrite({text})"),
Event::Title(title) => write!(f, "Title({title})"),
Event::CursorBlinkingChange => write!(f, "CursorBlinkingChange"),
Event::MouseCursorDirty => write!(f, "MouseCursorDirty"),
Event::ResetTitle => write!(f, "ResetTitle"),
Event::Wakeup => write!(f, "Wakeup"),
Event::Bell => write!(f, "Bell"),
Event::Exit => write!(f, "Exit"),
Event::ChildExit(code) => write!(f, "ChildExit({code})"),
}
}
}
/// Byte sequences are sent to a `Notify` in response to some events.
pub trait Notify {
/// Notify that an escape sequence should be written to the PTY.
///
/// TODO this needs to be able to error somehow.
fn notify<B: Into<Cow<'static, [u8]>>>(&self, _: B);
}
#[derive(Copy, Clone, Debug)]
pub struct WindowSize {
pub num_lines: u16,
pub num_cols: u16,
pub cell_width: u16,
pub cell_height: u16,
}
/// Types that are interested in when the display is resized.
pub trait OnResize {
fn on_resize(&mut self, window_size: WindowSize);
}
/// Event Loop for notifying the renderer about terminal events.
pub trait EventListener {
fn send_event(&self, _event: Event) {}
}
/// Null sink for events.
pub struct VoidListener;
impl EventListener for VoidListener {}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/lib.rs | alacritty_terminal/src/lib.rs | //! Alacritty - The GPU Enhanced Terminal.
#![warn(rust_2018_idioms, future_incompatible)]
#![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use)]
#![cfg_attr(clippy, deny(warnings))]
pub mod event;
pub mod event_loop;
pub mod grid;
pub mod index;
pub mod selection;
pub mod sync;
pub mod term;
pub mod thread;
pub mod tty;
pub mod vi_mode;
pub use crate::grid::Grid;
pub use crate::term::Term;
pub use vte;
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/index.rs | alacritty_terminal/src/index.rs | //! Line and Column newtypes for strongly typed tty/grid/terminal APIs.
/// Indexing types and implementations for Grid and Line.
use std::cmp::{Ord, Ordering, max, min};
use std::fmt;
use std::ops::{Add, AddAssign, Deref, Sub, SubAssign};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::grid::Dimensions;
/// The side of a cell.
pub type Side = Direction;
/// Horizontal direction.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Direction {
Left,
Right,
}
impl Direction {
#[must_use]
pub fn opposite(self) -> Self {
match self {
Side::Right => Side::Left,
Side::Left => Side::Right,
}
}
}
/// Terminal grid boundaries.
pub enum Boundary {
/// Cursor's range of motion in the grid.
///
/// This is equal to the viewport when the user isn't scrolled into the history.
Cursor,
/// Topmost line in history until the bottommost line in the terminal.
Grid,
/// Unbounded.
None,
}
/// Index in the grid using row, column notation.
#[derive(Debug, Clone, Copy, Default, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Point<L = Line, C = Column> {
pub line: L,
pub column: C,
}
impl<L, C> Point<L, C> {
pub fn new(line: L, column: C) -> Point<L, C> {
Point { line, column }
}
}
impl Point {
/// Subtract a number of columns from a point.
#[inline]
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn sub<D>(mut self, dimensions: &D, boundary: Boundary, rhs: usize) -> Self
where
D: Dimensions,
{
let cols = dimensions.columns();
let line_changes = (rhs + cols - 1).saturating_sub(self.column.0) / cols;
self.line -= line_changes;
self.column = Column((cols + self.column.0 - rhs % cols) % cols);
self.grid_clamp(dimensions, boundary)
}
/// Add a number of columns to a point.
#[inline]
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn add<D>(mut self, dimensions: &D, boundary: Boundary, rhs: usize) -> Self
where
D: Dimensions,
{
let cols = dimensions.columns();
self.line += (rhs + self.column.0) / cols;
self.column = Column((self.column.0 + rhs) % cols);
self.grid_clamp(dimensions, boundary)
}
/// Clamp a point to a grid boundary.
#[inline]
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn grid_clamp<D>(mut self, dimensions: &D, boundary: Boundary) -> Self
where
D: Dimensions,
{
let last_column = dimensions.last_column();
self.column = min(self.column, last_column);
let topmost_line = dimensions.topmost_line();
let bottommost_line = dimensions.bottommost_line();
match boundary {
Boundary::Cursor if self.line < 0 => Point::new(Line(0), Column(0)),
Boundary::Grid if self.line < topmost_line => Point::new(topmost_line, Column(0)),
Boundary::Cursor | Boundary::Grid if self.line > bottommost_line => {
Point::new(bottommost_line, last_column)
},
Boundary::None => {
self.line = self.line.grid_clamp(dimensions, boundary);
self
},
_ => self,
}
}
}
impl<L: Ord, C: Ord> PartialOrd for Point<L, C> {
fn partial_cmp(&self, other: &Point<L, C>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<L: Ord, C: Ord> Ord for Point<L, C> {
fn cmp(&self, other: &Point<L, C>) -> Ordering {
match (self.line.cmp(&other.line), self.column.cmp(&other.column)) {
(Ordering::Equal, ord) | (ord, _) => ord,
}
}
}
/// A line.
///
/// Newtype to avoid passing values incorrectly.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Ord, PartialOrd)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Line(pub i32);
impl Line {
/// Clamp a line to a grid boundary.
#[must_use]
pub fn grid_clamp<D: Dimensions>(self, dimensions: &D, boundary: Boundary) -> Self {
match boundary {
Boundary::Cursor => max(Line(0), min(dimensions.bottommost_line(), self)),
Boundary::Grid => {
let bottommost_line = dimensions.bottommost_line();
let topmost_line = dimensions.topmost_line();
max(topmost_line, min(bottommost_line, self))
},
Boundary::None => {
let screen_lines = dimensions.screen_lines() as i32;
let total_lines = dimensions.total_lines() as i32;
if self >= screen_lines {
let topmost_line = dimensions.topmost_line();
let extra = (self.0 - screen_lines) % total_lines;
topmost_line + extra
} else {
let bottommost_line = dimensions.bottommost_line();
let extra = (self.0 - screen_lines + 1) % total_lines;
bottommost_line + extra
}
},
}
}
}
impl fmt::Display for Line {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<usize> for Line {
fn from(source: usize) -> Self {
Self(source as i32)
}
}
impl Add<usize> for Line {
type Output = Line;
#[inline]
fn add(self, rhs: usize) -> Line {
self + rhs as i32
}
}
impl AddAssign<usize> for Line {
#[inline]
fn add_assign(&mut self, rhs: usize) {
*self += rhs as i32;
}
}
impl Sub<usize> for Line {
type Output = Line;
#[inline]
fn sub(self, rhs: usize) -> Line {
self - rhs as i32
}
}
impl SubAssign<usize> for Line {
#[inline]
fn sub_assign(&mut self, rhs: usize) {
*self -= rhs as i32;
}
}
impl PartialOrd<usize> for Line {
#[inline]
fn partial_cmp(&self, other: &usize) -> Option<Ordering> {
self.0.partial_cmp(&(*other as i32))
}
}
impl PartialEq<usize> for Line {
#[inline]
fn eq(&self, other: &usize) -> bool {
self.0.eq(&(*other as i32))
}
}
/// A column.
///
/// Newtype to avoid passing values incorrectly.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Ord, PartialOrd)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Column(pub usize);
impl fmt::Display for Column {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
macro_rules! ops {
($ty:ty, $construct:expr, $primitive:ty) => {
impl Deref for $ty {
type Target = $primitive;
#[inline]
fn deref(&self) -> &$primitive {
&self.0
}
}
impl From<$primitive> for $ty {
#[inline]
fn from(val: $primitive) -> $ty {
$construct(val)
}
}
impl Add<$ty> for $ty {
type Output = $ty;
#[inline]
fn add(self, rhs: $ty) -> $ty {
$construct(self.0 + rhs.0)
}
}
impl AddAssign<$ty> for $ty {
#[inline]
fn add_assign(&mut self, rhs: $ty) {
self.0 += rhs.0;
}
}
impl Add<$primitive> for $ty {
type Output = $ty;
#[inline]
fn add(self, rhs: $primitive) -> $ty {
$construct(self.0 + rhs)
}
}
impl AddAssign<$primitive> for $ty {
#[inline]
fn add_assign(&mut self, rhs: $primitive) {
self.0 += rhs
}
}
impl Sub<$ty> for $ty {
type Output = $ty;
#[inline]
fn sub(self, rhs: $ty) -> $ty {
$construct(self.0 - rhs.0)
}
}
impl SubAssign<$ty> for $ty {
#[inline]
fn sub_assign(&mut self, rhs: $ty) {
self.0 -= rhs.0;
}
}
impl Sub<$primitive> for $ty {
type Output = $ty;
#[inline]
fn sub(self, rhs: $primitive) -> $ty {
$construct(self.0 - rhs)
}
}
impl SubAssign<$primitive> for $ty {
#[inline]
fn sub_assign(&mut self, rhs: $primitive) {
self.0 -= rhs
}
}
impl PartialEq<$ty> for $primitive {
#[inline]
fn eq(&self, other: &$ty) -> bool {
self.eq(&other.0)
}
}
impl PartialEq<$primitive> for $ty {
#[inline]
fn eq(&self, other: &$primitive) -> bool {
self.0.eq(other)
}
}
impl PartialOrd<$ty> for $primitive {
#[inline]
fn partial_cmp(&self, other: &$ty) -> Option<Ordering> {
self.partial_cmp(&other.0)
}
}
impl PartialOrd<$primitive> for $ty {
#[inline]
fn partial_cmp(&self, other: &$primitive) -> Option<Ordering> {
self.0.partial_cmp(other)
}
}
};
}
ops!(Column, Column, usize);
ops!(Line, Line, i32);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn location_ordering() {
assert!(Point::new(Line(0), Column(0)) == Point::new(Line(0), Column(0)));
assert!(Point::new(Line(1), Column(0)) > Point::new(Line(0), Column(0)));
assert!(Point::new(Line(0), Column(1)) > Point::new(Line(0), Column(0)));
assert!(Point::new(Line(1), Column(1)) > Point::new(Line(0), Column(0)));
assert!(Point::new(Line(1), Column(1)) > Point::new(Line(0), Column(1)));
assert!(Point::new(Line(1), Column(1)) > Point::new(Line(1), Column(0)));
assert!(Point::new(Line(0), Column(0)) > Point::new(Line(-1), Column(0)));
}
#[test]
fn sub() {
let size = (10, 42);
let point = Point::new(Line(0), Column(13));
let result = point.sub(&size, Boundary::Cursor, 1);
assert_eq!(result, Point::new(Line(0), point.column - 1));
}
#[test]
fn sub_wrap() {
let size = (10, 42);
let point = Point::new(Line(1), Column(0));
let result = point.sub(&size, Boundary::Cursor, 1);
assert_eq!(result, Point::new(Line(0), size.last_column()));
}
#[test]
fn sub_clamp() {
let size = (10, 42);
let point = Point::new(Line(0), Column(0));
let result = point.sub(&size, Boundary::Cursor, 1);
assert_eq!(result, point);
}
#[test]
fn sub_grid_clamp() {
let size = (0, 42);
let point = Point::new(Line(0), Column(0));
let result = point.sub(&size, Boundary::Grid, 1);
assert_eq!(result, point);
}
#[test]
fn sub_none_clamp() {
let size = (10, 42);
let point = Point::new(Line(0), Column(0));
let result = point.sub(&size, Boundary::None, 1);
assert_eq!(result, Point::new(Line(9), Column(41)));
}
#[test]
fn add() {
let size = (10, 42);
let point = Point::new(Line(0), Column(13));
let result = point.add(&size, Boundary::Cursor, 1);
assert_eq!(result, Point::new(Line(0), point.column + 1));
}
#[test]
fn add_wrap() {
let size = (10, 42);
let point = Point::new(Line(0), size.last_column());
let result = point.add(&size, Boundary::Cursor, 1);
assert_eq!(result, Point::new(Line(1), Column(0)));
}
#[test]
fn add_clamp() {
let size = (10, 42);
let point = Point::new(Line(9), Column(41));
let result = point.add(&size, Boundary::Cursor, 1);
assert_eq!(result, point);
}
#[test]
fn add_grid_clamp() {
let size = (10, 42);
let point = Point::new(Line(9), Column(41));
let result = point.add(&size, Boundary::Grid, 1);
assert_eq!(result, point);
}
#[test]
fn add_none_clamp() {
let size = (10, 42);
let point = Point::new(Line(9), Column(41));
let result = point.add(&size, Boundary::None, 1);
assert_eq!(result, Point::new(Line(0), Column(0)));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/sync.rs | alacritty_terminal/src/sync.rs | //! Synchronization types.
//!
//! Most importantly, a fair mutex is included.
use parking_lot::{Mutex, MutexGuard};
/// A fair mutex.
///
/// Uses an extra lock to ensure that if one thread is waiting that it will get
/// the lock before a single thread can re-lock it.
pub struct FairMutex<T> {
/// Data.
data: Mutex<T>,
/// Next-to-access.
next: Mutex<()>,
}
impl<T> FairMutex<T> {
/// Create a new fair mutex.
pub fn new(data: T) -> FairMutex<T> {
FairMutex { data: Mutex::new(data), next: Mutex::new(()) }
}
/// Acquire a lease to reserve the mutex lock.
///
/// This will prevent others from acquiring a terminal lock, but block if anyone else is
/// already holding a lease.
pub fn lease(&self) -> MutexGuard<'_, ()> {
self.next.lock()
}
/// Lock the mutex.
pub fn lock(&self) -> MutexGuard<'_, T> {
// Must bind to a temporary or the lock will be freed before going
// into data.lock().
let _next = self.next.lock();
self.data.lock()
}
/// Unfairly lock the mutex.
pub fn lock_unfair(&self) -> MutexGuard<'_, T> {
self.data.lock()
}
/// Unfairly try to lock the mutex.
pub fn try_lock_unfair(&self) -> Option<MutexGuard<'_, T>> {
self.data.try_lock()
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/vi_mode.rs | alacritty_terminal/src/vi_mode.rs | use std::cmp::min;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::event::EventListener;
use crate::grid::{Dimensions, GridCell};
use crate::index::{Boundary, Column, Direction, Line, Point, Side};
use crate::term::Term;
use crate::term::cell::Flags;
/// Possible vi mode motion movements.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize), serde(rename_all = "lowercase"))]
pub enum ViMotion {
/// Move up.
Up,
/// Move down.
Down,
/// Move left.
Left,
/// Move right.
Right,
/// First column, or beginning of the line when already at the first column.
First,
/// Last column, or beginning of the line when already at the last column.
Last,
/// First non-empty cell in this terminal row, or first non-empty cell
/// of the line when already at the first cell of the row.
FirstOccupied,
/// Move to top of screen.
High,
/// Move to center of screen.
Middle,
/// Move to bottom of screen.
Low,
/// Move to start of semantically separated word.
SemanticLeft,
/// Move to start of next semantically separated word.
SemanticRight,
/// Move to end of previous semantically separated word.
SemanticLeftEnd,
/// Move to end of semantically separated word.
SemanticRightEnd,
/// Move to start of whitespace separated word.
WordLeft,
/// Move to start of next whitespace separated word.
WordRight,
/// Move to end of previous whitespace separated word.
WordLeftEnd,
/// Move to end of whitespace separated word.
WordRightEnd,
/// Move to opposing bracket.
Bracket,
/// Move above the current paragraph.
ParagraphUp,
/// Move below the current paragraph.
ParagraphDown,
}
/// Cursor tracking vi mode position.
#[derive(Default, Copy, Clone, PartialEq, Eq)]
pub struct ViModeCursor {
pub point: Point,
}
impl ViModeCursor {
pub fn new(point: Point) -> Self {
Self { point }
}
/// Move vi mode cursor.
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn motion<T: EventListener>(mut self, term: &mut Term<T>, motion: ViMotion) -> Self {
match motion {
ViMotion::Up => {
if self.point.line > term.topmost_line() {
self.point.line -= 1;
}
},
ViMotion::Down => {
if self.point.line + 1 < term.screen_lines() as i32 {
self.point.line += 1;
}
},
ViMotion::Left => {
self.point = term.expand_wide(self.point, Direction::Left);
let wrap_point = Point::new(self.point.line - 1, term.last_column());
if self.point.column == 0
&& self.point.line > term.topmost_line()
&& is_wrap(term, wrap_point)
{
self.point = wrap_point;
} else {
self.point.column = Column(self.point.column.saturating_sub(1));
}
},
ViMotion::Right => {
self.point = term.expand_wide(self.point, Direction::Right);
if is_wrap(term, self.point) {
self.point = Point::new(self.point.line + 1, Column(0));
} else {
self.point.column = min(self.point.column + 1, term.last_column());
}
},
ViMotion::First => {
self.point = term.expand_wide(self.point, Direction::Left);
while self.point.column == 0
&& self.point.line > term.topmost_line()
&& is_wrap(term, Point::new(self.point.line - 1, term.last_column()))
{
self.point.line -= 1;
}
self.point.column = Column(0);
},
ViMotion::Last => self.point = last(term, self.point),
ViMotion::FirstOccupied => self.point = first_occupied(term, self.point),
ViMotion::High => {
let line = Line(-(term.grid().display_offset() as i32));
let col = first_occupied_in_line(term, line).unwrap_or_default().column;
self.point = Point::new(line, col);
},
ViMotion::Middle => {
let display_offset = term.grid().display_offset() as i32;
let line = Line(-display_offset + term.screen_lines() as i32 / 2 - 1);
let col = first_occupied_in_line(term, line).unwrap_or_default().column;
self.point = Point::new(line, col);
},
ViMotion::Low => {
let display_offset = term.grid().display_offset() as i32;
let line = Line(-display_offset + term.screen_lines() as i32 - 1);
let col = first_occupied_in_line(term, line).unwrap_or_default().column;
self.point = Point::new(line, col);
},
ViMotion::SemanticLeft => {
self.point = semantic(term, self.point, Direction::Left, Side::Left);
},
ViMotion::SemanticRight => {
self.point = semantic(term, self.point, Direction::Right, Side::Left);
},
ViMotion::SemanticLeftEnd => {
self.point = semantic(term, self.point, Direction::Left, Side::Right);
},
ViMotion::SemanticRightEnd => {
self.point = semantic(term, self.point, Direction::Right, Side::Right);
},
ViMotion::WordLeft => {
self.point = word(term, self.point, Direction::Left, Side::Left);
},
ViMotion::WordRight => {
self.point = word(term, self.point, Direction::Right, Side::Left);
},
ViMotion::WordLeftEnd => {
self.point = word(term, self.point, Direction::Left, Side::Right);
},
ViMotion::WordRightEnd => {
self.point = word(term, self.point, Direction::Right, Side::Right);
},
ViMotion::Bracket => self.point = term.bracket_search(self.point).unwrap_or(self.point),
ViMotion::ParagraphUp => {
// Skip empty lines until we find the next paragraph,
// then skip over the paragraph until we reach the next empty line.
let topmost_line = term.topmost_line();
self.point.line = (*topmost_line..*self.point.line)
.rev()
.skip_while(|line| term.grid()[Line(*line)].is_clear())
.find(|line| term.grid()[Line(*line)].is_clear())
.map_or(topmost_line, Line);
self.point.column = Column(0);
},
ViMotion::ParagraphDown => {
// Skip empty lines until we find the next paragraph,
// then skip over the paragraph until we reach the next empty line.
let bottommost_line = term.bottommost_line();
self.point.line = (*self.point.line..*bottommost_line)
.skip_while(|line| term.grid()[Line(*line)].is_clear())
.find(|line| term.grid()[Line(*line)].is_clear())
.map_or(bottommost_line, Line);
self.point.column = Column(0);
},
}
term.scroll_to_point(self.point);
self
}
/// Get target cursor point for vim-like page movement.
#[must_use = "this returns the result of the operation, without modifying the original"]
pub fn scroll<T: EventListener>(mut self, term: &Term<T>, lines: i32) -> Self {
// Clamp movement to within visible region.
let line = (self.point.line - lines).grid_clamp(term, Boundary::Grid);
// Find the first occupied cell after scrolling has been performed.
let column = first_occupied_in_line(term, line).unwrap_or_default().column;
// Move cursor.
self.point = Point::new(line, column);
self
}
}
/// Find next end of line to move to.
fn last<T>(term: &Term<T>, mut point: Point) -> Point {
// Expand across wide cells.
point = term.expand_wide(point, Direction::Right);
// Find last non-empty cell in the current line.
let occupied = last_occupied_in_line(term, point.line).unwrap_or_default();
if point.column < occupied.column {
// Jump to last occupied cell when not already at or beyond it.
occupied
} else if is_wrap(term, point) {
// Jump to last occupied cell across linewraps.
while is_wrap(term, point) {
point.line += 1;
}
last_occupied_in_line(term, point.line).unwrap_or(point)
} else {
// Jump to last column when beyond the last occupied cell.
Point::new(point.line, term.last_column())
}
}
/// Find next non-empty cell to move to.
fn first_occupied<T>(term: &Term<T>, mut point: Point) -> Point {
let last_column = term.last_column();
// Expand left across wide chars, since we're searching lines left to right.
point = term.expand_wide(point, Direction::Left);
// Find first non-empty cell in current line.
let occupied = first_occupied_in_line(term, point.line)
.unwrap_or_else(|| Point::new(point.line, last_column));
// Jump across wrapped lines if we're already at this line's first occupied cell.
if point == occupied {
let mut occupied = None;
// Search for non-empty cell in previous lines.
for line in (term.topmost_line().0..point.line.0).rev().map(Line::from) {
if !is_wrap(term, Point::new(line, last_column)) {
break;
}
occupied = first_occupied_in_line(term, line).or(occupied);
}
// Fallback to the next non-empty cell.
let mut line = point.line;
occupied.unwrap_or_else(|| {
loop {
if let Some(occupied) = first_occupied_in_line(term, line) {
break occupied;
}
let last_cell = Point::new(line, last_column);
if !is_wrap(term, last_cell) {
break last_cell;
}
line += 1;
}
})
} else {
occupied
}
}
/// Move by semantically separated word, like w/b/e/ge in vi.
fn semantic<T: EventListener>(
term: &Term<T>,
mut point: Point,
direction: Direction,
side: Side,
) -> Point {
// Expand semantically based on movement direction.
let expand_semantic = |point: Point| {
// Do not expand when currently on a semantic escape char.
let cell = &term.grid()[point];
if term.semantic_escape_chars().contains(cell.c)
&& !cell.flags.intersects(Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER)
{
point
} else if direction == Direction::Left {
term.semantic_search_left(point)
} else {
term.semantic_search_right(point)
}
};
// Move to word boundary.
if direction != side && !is_boundary(term, point, direction) {
point = expand_semantic(point);
}
// Make sure we jump above wide chars.
point = term.expand_wide(point, direction);
// Skip whitespace.
let mut next_point = advance(term, point, direction);
while !is_boundary(term, point, direction) && is_space(term, next_point) {
point = next_point;
next_point = advance(term, point, direction);
}
// Assure minimum movement of one cell.
if !is_boundary(term, point, direction) {
point = advance(term, point, direction);
// Skip over wide cell spacers.
if direction == Direction::Left {
point = term.expand_wide(point, direction);
}
}
// Move to word boundary.
if direction == side && !is_boundary(term, point, direction) {
point = expand_semantic(point);
}
point
}
/// Move by whitespace separated word, like W/B/E/gE in vi.
fn word<T: EventListener>(
term: &Term<T>,
mut point: Point,
direction: Direction,
side: Side,
) -> Point {
// Make sure we jump above wide chars.
point = term.expand_wide(point, direction);
if direction == side {
// Skip whitespace until right before a word.
let mut next_point = advance(term, point, direction);
while !is_boundary(term, point, direction) && is_space(term, next_point) {
point = next_point;
next_point = advance(term, point, direction);
}
// Skip non-whitespace until right inside word boundary.
let mut next_point = advance(term, point, direction);
while !is_boundary(term, point, direction) && !is_space(term, next_point) {
point = next_point;
next_point = advance(term, point, direction);
}
}
if direction != side {
// Skip non-whitespace until just beyond word.
while !is_boundary(term, point, direction) && !is_space(term, point) {
point = advance(term, point, direction);
}
// Skip whitespace until right inside word boundary.
while !is_boundary(term, point, direction) && is_space(term, point) {
point = advance(term, point, direction);
}
}
point
}
/// Find first non-empty cell in line.
fn first_occupied_in_line<T>(term: &Term<T>, line: Line) -> Option<Point> {
(0..term.columns())
.map(|col| Point::new(line, Column(col)))
.find(|&point| !is_space(term, point))
}
/// Find last non-empty cell in line.
fn last_occupied_in_line<T>(term: &Term<T>, line: Line) -> Option<Point> {
(0..term.columns())
.map(|col| Point::new(line, Column(col)))
.rfind(|&point| !is_space(term, point))
}
/// Advance point based on direction.
fn advance<T>(term: &Term<T>, point: Point, direction: Direction) -> Point {
if direction == Direction::Left {
point.sub(term, Boundary::Grid, 1)
} else {
point.add(term, Boundary::Grid, 1)
}
}
/// Check if cell at point contains whitespace.
fn is_space<T>(term: &Term<T>, point: Point) -> bool {
let cell = &term.grid()[point.line][point.column];
!cell.flags().intersects(Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER)
&& (cell.c == ' ' || cell.c == '\t')
}
/// Check if the cell at a point contains the WRAPLINE flag.
fn is_wrap<T>(term: &Term<T>, point: Point) -> bool {
term.grid()[point].flags.contains(Flags::WRAPLINE)
}
/// Check if point is at screen boundary.
fn is_boundary<T>(term: &Term<T>, point: Point, direction: Direction) -> bool {
(point.line <= term.topmost_line() && point.column == 0 && direction == Direction::Left)
|| (point.line == term.bottommost_line()
&& point.column + 1 >= term.columns()
&& direction == Direction::Right)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::event::VoidListener;
use crate::index::{Column, Line};
use crate::term::test::TermSize;
use crate::term::{Config, Term};
use crate::vte::ansi::Handler;
fn term() -> Term<VoidListener> {
let size = TermSize::new(20, 20);
Term::new(Config::default(), &size, VoidListener)
}
#[test]
fn motion_simple() {
let mut term = term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Right);
assert_eq!(cursor.point, Point::new(Line(0), Column(1)));
cursor = cursor.motion(&mut term, ViMotion::Left);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Down);
assert_eq!(cursor.point, Point::new(Line(1), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Up);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn simple_wide() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = 'a';
term.grid_mut()[Line(0)][Column(1)].c = '汉';
term.grid_mut()[Line(0)][Column(1)].flags.insert(Flags::WIDE_CHAR);
term.grid_mut()[Line(0)][Column(2)].c = ' ';
term.grid_mut()[Line(0)][Column(2)].flags.insert(Flags::WIDE_CHAR_SPACER);
term.grid_mut()[Line(0)][Column(3)].c = 'a';
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(1)));
cursor = cursor.motion(&mut term, ViMotion::Right);
assert_eq!(cursor.point, Point::new(Line(0), Column(3)));
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::Left);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn motion_start_end() {
let mut term = term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Last);
assert_eq!(cursor.point, Point::new(Line(0), Column(19)));
cursor = cursor.motion(&mut term, ViMotion::First);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn motion_first_occupied() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = ' ';
term.grid_mut()[Line(0)][Column(1)].c = 'x';
term.grid_mut()[Line(0)][Column(2)].c = ' ';
term.grid_mut()[Line(0)][Column(3)].c = 'y';
term.grid_mut()[Line(0)][Column(19)].flags.insert(Flags::WRAPLINE);
term.grid_mut()[Line(1)][Column(19)].flags.insert(Flags::WRAPLINE);
term.grid_mut()[Line(2)][Column(0)].c = 'z';
term.grid_mut()[Line(2)][Column(1)].c = ' ';
let mut cursor = ViModeCursor::new(Point::new(Line(2), Column(1)));
cursor = cursor.motion(&mut term, ViMotion::FirstOccupied);
assert_eq!(cursor.point, Point::new(Line(2), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::FirstOccupied);
assert_eq!(cursor.point, Point::new(Line(0), Column(1)));
}
#[test]
fn motion_high_middle_low() {
let mut term = term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::High);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Middle);
assert_eq!(cursor.point, Point::new(Line(9), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Low);
assert_eq!(cursor.point, Point::new(Line(19), Column(0)));
}
#[test]
fn motion_bracket() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = '(';
term.grid_mut()[Line(0)][Column(1)].c = 'x';
term.grid_mut()[Line(0)][Column(2)].c = ')';
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::Bracket);
assert_eq!(cursor.point, Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::Bracket);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
fn motion_semantic_term() -> Term<VoidListener> {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = 'x';
term.grid_mut()[Line(0)][Column(1)].c = ' ';
term.grid_mut()[Line(0)][Column(2)].c = 'x';
term.grid_mut()[Line(0)][Column(3)].c = 'x';
term.grid_mut()[Line(0)][Column(4)].c = ' ';
term.grid_mut()[Line(0)][Column(5)].c = ' ';
term.grid_mut()[Line(0)][Column(6)].c = ':';
term.grid_mut()[Line(0)][Column(7)].c = ' ';
term.grid_mut()[Line(0)][Column(8)].c = 'x';
term.grid_mut()[Line(0)][Column(9)].c = ':';
term.grid_mut()[Line(0)][Column(10)].c = 'x';
term.grid_mut()[Line(0)][Column(11)].c = ' ';
term.grid_mut()[Line(0)][Column(12)].c = ' ';
term.grid_mut()[Line(0)][Column(13)].c = ':';
term.grid_mut()[Line(0)][Column(14)].c = ' ';
term.grid_mut()[Line(0)][Column(15)].c = 'x';
term
}
#[test]
fn motion_semantic_right_end() {
let mut term = motion_semantic_term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(3)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(6)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(8)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(9)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(10)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(13)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(15)));
}
#[test]
fn motion_semantic_left_start() {
let mut term = motion_semantic_term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(15)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(13)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(10)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(9)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(8)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(6)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn motion_semantic_right_start() {
let mut term = motion_semantic_term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(6)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(8)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(9)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(10)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(13)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(15)));
}
#[test]
fn motion_semantic_left_end() {
let mut term = motion_semantic_term();
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(15)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(13)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(10)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(9)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(8)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(6)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(3)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn scroll_semantic() {
let mut term = term();
term.grid_mut().scroll_up(&(Line(0)..Line(20)), 5);
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(-5), Column(0)));
assert_eq!(term.grid().display_offset(), 5);
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(19), Column(19)));
assert_eq!(term.grid().display_offset(), 0);
cursor = cursor.motion(&mut term, ViMotion::SemanticLeftEnd);
assert_eq!(cursor.point, Point::new(Line(-5), Column(0)));
assert_eq!(term.grid().display_offset(), 5);
cursor = cursor.motion(&mut term, ViMotion::SemanticRightEnd);
assert_eq!(cursor.point, Point::new(Line(19), Column(19)));
assert_eq!(term.grid().display_offset(), 0);
}
#[test]
fn semantic_wide() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = 'a';
term.grid_mut()[Line(0)][Column(1)].c = ' ';
term.grid_mut()[Line(0)][Column(2)].c = '汉';
term.grid_mut()[Line(0)][Column(2)].flags.insert(Flags::WIDE_CHAR);
term.grid_mut()[Line(0)][Column(3)].c = ' ';
term.grid_mut()[Line(0)][Column(3)].flags.insert(Flags::WIDE_CHAR_SPACER);
term.grid_mut()[Line(0)][Column(4)].c = ' ';
term.grid_mut()[Line(0)][Column(5)].c = 'a';
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::SemanticRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(5)));
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(3)));
cursor = cursor.motion(&mut term, ViMotion::SemanticLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn motion_word() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = 'a';
term.grid_mut()[Line(0)][Column(1)].c = ';';
term.grid_mut()[Line(0)][Column(2)].c = ' ';
term.grid_mut()[Line(0)][Column(3)].c = ' ';
term.grid_mut()[Line(0)][Column(4)].c = 'a';
term.grid_mut()[Line(0)][Column(5)].c = ';';
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::WordRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(1)));
cursor = cursor.motion(&mut term, ViMotion::WordRightEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(5)));
cursor = cursor.motion(&mut term, ViMotion::WordLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(4)));
cursor = cursor.motion(&mut term, ViMotion::WordLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::WordRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(4)));
cursor = cursor.motion(&mut term, ViMotion::WordLeftEnd);
assert_eq!(cursor.point, Point::new(Line(0), Column(1)));
}
#[test]
fn scroll_word() {
let mut term = term();
term.grid_mut().scroll_up(&(Line(0)..Line(20)), 5);
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.motion(&mut term, ViMotion::WordLeft);
assert_eq!(cursor.point, Point::new(Line(-5), Column(0)));
assert_eq!(term.grid().display_offset(), 5);
cursor = cursor.motion(&mut term, ViMotion::WordRight);
assert_eq!(cursor.point, Point::new(Line(19), Column(19)));
assert_eq!(term.grid().display_offset(), 0);
cursor = cursor.motion(&mut term, ViMotion::WordLeftEnd);
assert_eq!(cursor.point, Point::new(Line(-5), Column(0)));
assert_eq!(term.grid().display_offset(), 5);
cursor = cursor.motion(&mut term, ViMotion::WordRightEnd);
assert_eq!(cursor.point, Point::new(Line(19), Column(19)));
assert_eq!(term.grid().display_offset(), 0);
}
#[test]
fn word_wide() {
let mut term = term();
term.grid_mut()[Line(0)][Column(0)].c = 'a';
term.grid_mut()[Line(0)][Column(1)].c = ' ';
term.grid_mut()[Line(0)][Column(2)].c = '汉';
term.grid_mut()[Line(0)][Column(2)].flags.insert(Flags::WIDE_CHAR);
term.grid_mut()[Line(0)][Column(3)].c = ' ';
term.grid_mut()[Line(0)][Column(3)].flags.insert(Flags::WIDE_CHAR_SPACER);
term.grid_mut()[Line(0)][Column(4)].c = ' ';
term.grid_mut()[Line(0)][Column(5)].c = 'a';
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(2)));
cursor = cursor.motion(&mut term, ViMotion::WordRight);
assert_eq!(cursor.point, Point::new(Line(0), Column(5)));
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(3)));
cursor = cursor.motion(&mut term, ViMotion::WordLeft);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
}
#[test]
fn scroll_simple() {
let mut term = term();
// Create 1 line of scrollback.
for _ in 0..20 {
term.newline();
}
let mut cursor = ViModeCursor::new(Point::new(Line(0), Column(0)));
cursor = cursor.scroll(&term, -1);
assert_eq!(cursor.point, Point::new(Line(1), Column(0)));
cursor = cursor.scroll(&term, 1);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
cursor = cursor.scroll(&term, 1);
assert_eq!(cursor.point, Point::new(Line(-1), Column(0)));
}
#[test]
fn scroll_over_top() {
let mut term = term();
// Create 40 lines of scrollback.
for _ in 0..59 {
term.newline();
}
let mut cursor = ViModeCursor::new(Point::new(Line(19), Column(0)));
cursor = cursor.scroll(&term, 20);
assert_eq!(cursor.point, Point::new(Line(-1), Column(0)));
cursor = cursor.scroll(&term, 20);
assert_eq!(cursor.point, Point::new(Line(-21), Column(0)));
cursor = cursor.scroll(&term, 20);
assert_eq!(cursor.point, Point::new(Line(-40), Column(0)));
cursor = cursor.scroll(&term, 20);
assert_eq!(cursor.point, Point::new(Line(-40), Column(0)));
}
#[test]
fn scroll_over_bottom() {
let mut term = term();
// Create 40 lines of scrollback.
for _ in 0..59 {
term.newline();
}
let mut cursor = ViModeCursor::new(Point::new(Line(-40), Column(0)));
cursor = cursor.scroll(&term, -20);
assert_eq!(cursor.point, Point::new(Line(-20), Column(0)));
cursor = cursor.scroll(&term, -20);
assert_eq!(cursor.point, Point::new(Line(0), Column(0)));
cursor = cursor.scroll(&term, -20);
assert_eq!(cursor.point, Point::new(Line(19), Column(0)));
cursor = cursor.scroll(&term, -20);
assert_eq!(cursor.point, Point::new(Line(19), Column(0)));
}
#[test]
fn wide_semantic_char() {
let mut term = term();
term.set_semantic_escape_chars("-");
term.grid_mut()[Line(0)][Column(0)].c = 'x';
term.grid_mut()[Line(0)][Column(1)].c = 'x';
term.grid_mut()[Line(0)][Column(2)].c = '-';
term.grid_mut()[Line(0)][Column(2)].flags.insert(Flags::WIDE_CHAR);
term.grid_mut()[Line(0)][Column(3)].c = ' ';
term.grid_mut()[Line(0)][Column(3)].flags.insert(Flags::WIDE_CHAR_SPACER);
term.grid_mut()[Line(0)][Column(4)].c = 'x';
term.grid_mut()[Line(0)][Column(5)].c = 'x';
// Test motion to the right.
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | true |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/selection.rs | alacritty_terminal/src/selection.rs | //! State management for a selection in the grid.
//!
//! A selection should start when the mouse is clicked, and it should be
//! finalized when the button is released. The selection should be cleared
//! when text is added/removed/scrolled on the screen. The selection should
//! also be cleared if the user clicks off of the selection.
use std::cmp::min;
use std::mem;
use std::ops::{Bound, Range, RangeBounds};
use crate::grid::{Dimensions, GridCell, Indexed};
use crate::index::{Boundary, Column, Line, Point, Side};
use crate::term::Term;
use crate::term::cell::{Cell, Flags};
use crate::vte::ansi::CursorShape;
/// A Point and side within that point.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
struct Anchor {
point: Point,
side: Side,
}
impl Anchor {
fn new(point: Point, side: Side) -> Anchor {
Anchor { point, side }
}
}
/// Represents a range of selected cells.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct SelectionRange {
/// Start point, top left of the selection.
pub start: Point,
/// End point, bottom right of the selection.
pub end: Point,
/// Whether this selection is a block selection.
pub is_block: bool,
}
impl SelectionRange {
pub fn new(start: Point, end: Point, is_block: bool) -> Self {
assert!(start <= end);
Self { start, end, is_block }
}
}
impl SelectionRange {
/// Check if a point lies within the selection.
pub fn contains(&self, point: Point) -> bool {
self.start.line <= point.line
&& self.end.line >= point.line
&& (self.start.column <= point.column
|| (self.start.line != point.line && !self.is_block))
&& (self.end.column >= point.column || (self.end.line != point.line && !self.is_block))
}
/// Check if the cell at a point is part of the selection.
pub fn contains_cell(
&self,
indexed: &Indexed<&Cell>,
point: Point,
shape: CursorShape,
) -> bool {
// Do not invert block cursor at selection boundaries.
if shape == CursorShape::Block
&& point == indexed.point
&& (self.start == indexed.point
|| self.end == indexed.point
|| (self.is_block
&& ((self.start.line == indexed.point.line
&& self.end.column == indexed.point.column)
|| (self.end.line == indexed.point.line
&& self.start.column == indexed.point.column))))
{
return false;
}
// Point itself is selected.
if self.contains(indexed.point) {
return true;
}
// Check if a wide char's trailing spacer is selected.
indexed.cell.flags().contains(Flags::WIDE_CHAR)
&& self.contains(Point::new(indexed.point.line, indexed.point.column + 1))
}
}
/// Different kinds of selection.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum SelectionType {
Simple,
Block,
Semantic,
Lines,
}
/// Describes a region of a 2-dimensional area.
///
/// Used to track a text selection. There are four supported modes, each with its own constructor:
/// [`simple`], [`block`], [`semantic`], and [`lines`]. The [`simple`] mode precisely tracks which
/// cells are selected without any expansion. [`block`] will select rectangular regions.
/// [`semantic`] mode expands the initial selection to the nearest semantic escape char in either
/// direction. [`lines`] will always select entire lines.
///
/// Calls to [`update`] operate different based on the selection kind. The [`simple`] and [`block`]
/// mode do nothing special, simply track points and sides. [`semantic`] will continue to expand
/// out to semantic boundaries as the selection point changes. Similarly, [`lines`] will always
/// expand the new point to encompass entire lines.
///
/// [`simple`]: enum.Selection.html#method.simple
/// [`block`]: enum.Selection.html#method.block
/// [`semantic`]: enum.Selection.html#method.semantic
/// [`lines`]: enum.Selection.html#method.lines
/// [`update`]: enum.Selection.html#method.update
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Selection {
pub ty: SelectionType,
region: Range<Anchor>,
}
impl Selection {
pub fn new(ty: SelectionType, location: Point, side: Side) -> Selection {
Self {
region: Range { start: Anchor::new(location, side), end: Anchor::new(location, side) },
ty,
}
}
/// Update the end of the selection.
pub fn update(&mut self, point: Point, side: Side) {
self.region.end = Anchor::new(point, side);
}
pub fn rotate<D: Dimensions>(
mut self,
dimensions: &D,
range: &Range<Line>,
delta: i32,
) -> Option<Selection> {
let bottommost_line = dimensions.bottommost_line();
let range_bottom = range.end;
let range_top = range.start;
let (mut start, mut end) = (&mut self.region.start, &mut self.region.end);
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Rotate start of selection.
if (start.point.line >= range_top || range_top == 0) && start.point.line < range_bottom {
start.point.line = min(start.point.line - delta, bottommost_line);
// If end is within the same region, delete selection once start rotates out.
if start.point.line >= range_bottom && end.point.line < range_bottom {
return None;
}
// Clamp selection to start of region.
if start.point.line < range_top && range_top != 0 {
if self.ty != SelectionType::Block {
start.point.column = Column(0);
start.side = Side::Left;
}
start.point.line = range_top;
}
}
// Rotate end of selection.
if (end.point.line >= range_top || range_top == 0) && end.point.line < range_bottom {
end.point.line = min(end.point.line - delta, bottommost_line);
// Delete selection if end has overtaken the start.
if end.point.line < start.point.line {
return None;
}
// Clamp selection to end of region.
if end.point.line >= range_bottom {
if self.ty != SelectionType::Block {
end.point.column = dimensions.last_column();
end.side = Side::Right;
}
end.point.line = range_bottom - 1;
}
}
Some(self)
}
pub fn is_empty(&self) -> bool {
match self.ty {
SelectionType::Simple => {
let (mut start, mut end) = (self.region.start, self.region.end);
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Simple selection is empty when the points are identical
// or two adjacent cells have the sides right -> left.
start == end
|| (start.side == Side::Right
&& end.side == Side::Left
&& (start.point.line == end.point.line)
&& start.point.column + 1 == end.point.column)
},
SelectionType::Block => {
let (start, end) = (self.region.start, self.region.end);
// Block selection is empty when the points' columns and sides are identical
// or two cells with adjacent columns have the sides right -> left,
// regardless of their lines
(start.point.column == end.point.column && start.side == end.side)
|| (start.point.column + 1 == end.point.column
&& start.side == Side::Right
&& end.side == Side::Left)
|| (end.point.column + 1 == start.point.column
&& start.side == Side::Left
&& end.side == Side::Right)
},
SelectionType::Semantic | SelectionType::Lines => false,
}
}
/// Check whether selection contains any point in a given range.
pub fn intersects_range<R: RangeBounds<Line>>(&self, range: R) -> bool {
let mut start = self.region.start.point.line;
let mut end = self.region.end.point.line;
if start > end {
mem::swap(&mut start, &mut end);
}
let range_top = match range.start_bound() {
Bound::Included(&range_start) => range_start,
Bound::Excluded(&range_start) => range_start + 1,
Bound::Unbounded => Line(i32::MIN),
};
let range_bottom = match range.end_bound() {
Bound::Included(&range_end) => range_end,
Bound::Excluded(&range_end) => range_end - 1,
Bound::Unbounded => Line(i32::MAX),
};
range_bottom >= start && range_top <= end
}
/// Expand selection sides to include all cells.
pub fn include_all(&mut self) {
let (start, end) = (self.region.start.point, self.region.end.point);
let (start_side, end_side) = match self.ty {
SelectionType::Block
if start.column > end.column
|| (start.column == end.column && start.line > end.line) =>
{
(Side::Right, Side::Left)
},
SelectionType::Block => (Side::Left, Side::Right),
_ if start > end => (Side::Right, Side::Left),
_ => (Side::Left, Side::Right),
};
self.region.start.side = start_side;
self.region.end.side = end_side;
}
/// Convert selection to grid coordinates.
pub fn to_range<T>(&self, term: &Term<T>) -> Option<SelectionRange> {
let grid = term.grid();
let columns = grid.columns();
// Order start above the end.
let mut start = self.region.start;
let mut end = self.region.end;
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Clamp selection to within grid boundaries.
if end.point.line < term.topmost_line() {
return None;
}
start.point = start.point.grid_clamp(term, Boundary::Grid);
end.point = end.point.grid_clamp(term, Boundary::Grid);
match self.ty {
SelectionType::Simple => self.range_simple(start, end, columns),
SelectionType::Block => self.range_block(start, end),
SelectionType::Semantic => Some(Self::range_semantic(term, start.point, end.point)),
SelectionType::Lines => Some(Self::range_lines(term, start.point, end.point)),
}
}
fn range_semantic<T>(term: &Term<T>, mut start: Point, mut end: Point) -> SelectionRange {
if start == end {
if let Some(matching) = term.bracket_search(start) {
if (matching.line == start.line && matching.column < start.column)
|| (matching.line < start.line)
{
start = matching;
} else {
end = matching;
}
return SelectionRange { start, end, is_block: false };
}
}
let start = term.semantic_search_left(start);
let end = term.semantic_search_right(end);
SelectionRange { start, end, is_block: false }
}
fn range_lines<T>(term: &Term<T>, start: Point, end: Point) -> SelectionRange {
let start = term.line_search_left(start);
let end = term.line_search_right(end);
SelectionRange { start, end, is_block: false }
}
fn range_simple(
&self,
mut start: Anchor,
mut end: Anchor,
columns: usize,
) -> Option<SelectionRange> {
if self.is_empty() {
return None;
}
// Remove last cell if selection ends to the left of a cell.
if end.side == Side::Left && start.point != end.point {
// Special case when selection ends to left of first cell.
if end.point.column == 0 {
end.point.column = Column(columns - 1);
end.point.line -= 1;
} else {
end.point.column -= 1;
}
}
// Remove first cell if selection starts at the right of a cell.
if start.side == Side::Right && start.point != end.point {
start.point.column += 1;
// Wrap to next line when selection starts to the right of last column.
if start.point.column == columns {
start.point.column = Column(0);
start.point.line += 1;
}
}
Some(SelectionRange { start: start.point, end: end.point, is_block: false })
}
fn range_block(&self, mut start: Anchor, mut end: Anchor) -> Option<SelectionRange> {
if self.is_empty() {
return None;
}
// Always go top-left -> bottom-right.
if start.point.column > end.point.column {
mem::swap(&mut start.side, &mut end.side);
mem::swap(&mut start.point.column, &mut end.point.column);
}
// Remove last cell if selection ends to the left of a cell.
if end.side == Side::Left && start.point != end.point && end.point.column.0 > 0 {
end.point.column -= 1;
}
// Remove first cell if selection starts at the right of a cell.
if start.side == Side::Right && start.point != end.point {
start.point.column += 1;
}
Some(SelectionRange { start: start.point, end: end.point, is_block: true })
}
}
/// Tests for selection.
///
/// There are comments on all of the tests describing the selection. Pictograms
/// are used to avoid ambiguity. Grid cells are represented by a [ ]. Only
/// cells that are completely covered are counted in a selection. Ends are
/// represented by `B` and `E` for begin and end, respectively. A selected cell
/// looks like [XX], [BX] (at the start), [XB] (at the end), [XE] (at the end),
/// and [EX] (at the start), or [BE] for a single cell. Partially selected cells
/// look like [ B] and [E ].
#[cfg(test)]
mod tests {
use super::*;
use crate::index::{Column, Point, Side};
use crate::term::test::TermSize;
use crate::term::{Config, Term};
fn term(height: usize, width: usize) -> Term<()> {
let size = TermSize::new(width, height);
Term::new(Config::default(), &size, ())
}
/// Test case of single cell selection.
///
/// 1. [ ]
/// 2. [B ]
/// 3. [BE]
#[test]
fn single_cell_left_to_right() {
let location = Point::new(Line(0), Column(0));
let mut selection = Selection::new(SelectionType::Simple, location, Side::Left);
selection.update(location, Side::Right);
assert_eq!(selection.to_range(&term(1, 2)).unwrap(), SelectionRange {
start: location,
end: location,
is_block: false
});
}
/// Test case of single cell selection.
///
/// 1. [ ]
/// 2. [ B]
/// 3. [EB]
#[test]
fn single_cell_right_to_left() {
let location = Point::new(Line(0), Column(0));
let mut selection = Selection::new(SelectionType::Simple, location, Side::Right);
selection.update(location, Side::Left);
assert_eq!(selection.to_range(&term(1, 2)).unwrap(), SelectionRange {
start: location,
end: location,
is_block: false
});
}
/// Test adjacent cell selection from left to right.
///
/// 1. [ ][ ]
/// 2. [ B][ ]
/// 3. [ B][E ]
#[test]
fn between_adjacent_cells_left_to_right() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(0)), Side::Right);
selection.update(Point::new(Line(0), Column(1)), Side::Left);
assert_eq!(selection.to_range(&term(1, 2)), None);
}
/// Test adjacent cell selection from right to left.
///
/// 1. [ ][ ]
/// 2. [ ][B ]
/// 3. [ E][B ]
#[test]
fn between_adjacent_cells_right_to_left() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(1)), Side::Left);
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert_eq!(selection.to_range(&term(1, 2)), None);
}
#[rustfmt::skip]
/// Test selection across adjacent lines.
///
/// 1. [ ][ ][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 2. [ ][ B][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 3. [ ][ B][XX][XX][XX]
/// [XX][XE][ ][ ][ ]
#[test]
fn across_adjacent_lines_upward_final_cell_exclusive() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(1)), Side::Right);
selection.update(Point::new(Line(1), Column(1)), Side::Right);
assert_eq!(selection.to_range(&term(2, 5)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(1), Column(1)),
is_block: false,
});
}
#[rustfmt::skip]
/// Test selection across adjacent lines.
///
/// 1. [ ][ ][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 2. [ ][ ][ ][ ][ ]
/// [ ][ B][ ][ ][ ]
/// 3. [ ][ E][XX][XX][XX]
/// [XX][XB][ ][ ][ ]
/// 4. [ E][XX][XX][XX][XX]
/// [XX][XB][ ][ ][ ]
#[test]
fn selection_bigger_then_smaller() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(1), Column(1)), Side::Right);
selection.update(Point::new(Line(0), Column(1)), Side::Right);
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert_eq!(selection.to_range(&term(2, 5)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(1)),
end: Point::new(Line(1), Column(1)),
is_block: false,
});
}
#[test]
fn line_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Lines, Point::new(Line(9), Column(1)), Side::Left);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(0)),
end: Point::new(Line(5), Column(4)),
is_block: false,
});
}
#[test]
fn semantic_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Semantic, Point::new(Line(9), Column(3)), Side::Left);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(1)),
end: Point::new(Line(5), Column(3)),
is_block: false,
});
}
#[test]
fn simple_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(9), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(5), Column(3)),
is_block: false,
});
}
#[test]
fn block_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(9), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(5), Column(3)),
is_block: true
});
}
#[test]
fn simple_is_empty() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(1), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert!(!selection.is_empty());
}
#[test]
fn block_is_empty() {
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(1), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Right);
assert!(!selection.is_empty());
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(1)), Side::Right);
assert!(!selection.is_empty());
}
#[test]
fn rotate_in_region_up() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(7), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(1), Column(0)),
end: Point::new(Line(3), Column(3)),
is_block: false,
});
}
#[test]
fn rotate_in_region_down() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(4), Column(3)), Side::Right);
selection.update(Point::new(Line(1), Column(1)), Side::Left);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), -5).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(6), Column(1)),
end: Point::new(Line(8), size.last_column()),
is_block: false,
});
}
#[test]
fn rotate_in_region_up_block() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(7), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(1), Column(2)),
end: Point::new(Line(3), Column(3)),
is_block: true,
});
}
#[test]
fn range_intersection() {
let mut selection =
Selection::new(SelectionType::Lines, Point::new(Line(3), Column(1)), Side::Left);
selection.update(Point::new(Line(6), Column(1)), Side::Right);
assert!(selection.intersects_range(..));
assert!(selection.intersects_range(Line(2)..));
assert!(selection.intersects_range(Line(2)..=Line(4)));
assert!(selection.intersects_range(Line(2)..=Line(7)));
assert!(selection.intersects_range(Line(4)..=Line(5)));
assert!(selection.intersects_range(Line(5)..Line(8)));
assert!(!selection.intersects_range(..=Line(2)));
assert!(!selection.intersects_range(Line(7)..=Line(8)));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/event_loop.rs | alacritty_terminal/src/event_loop.rs | //! The main event loop which performs I/O on the pseudoterminal.
use std::borrow::Cow;
use std::collections::VecDeque;
use std::fmt::{self, Display, Formatter};
use std::fs::File;
use std::io::{self, ErrorKind, Read, Write};
use std::num::NonZeroUsize;
use std::sync::Arc;
use std::sync::mpsc::{self, Receiver, Sender, TryRecvError};
use std::thread::JoinHandle;
use std::time::Instant;
use log::error;
use polling::{Event as PollingEvent, Events, PollMode};
use crate::event::{self, Event, EventListener, WindowSize};
use crate::sync::FairMutex;
use crate::term::Term;
use crate::{thread, tty};
use vte::ansi;
/// Max bytes to read from the PTY before forced terminal synchronization.
pub(crate) const READ_BUFFER_SIZE: usize = 0x10_0000;
/// Max bytes to read from the PTY while the terminal is locked.
const MAX_LOCKED_READ: usize = u16::MAX as usize;
/// Messages that may be sent to the `EventLoop`.
#[derive(Debug)]
pub enum Msg {
/// Data that should be written to the PTY.
Input(Cow<'static, [u8]>),
/// Indicates that the `EventLoop` should shut down, as Alacritty is shutting down.
Shutdown,
/// Instruction to resize the PTY.
Resize(WindowSize),
}
/// The main event loop.
///
/// Handles all the PTY I/O and runs the PTY parser which updates terminal
/// state.
pub struct EventLoop<T: tty::EventedPty, U: EventListener> {
poll: Arc<polling::Poller>,
pty: T,
rx: PeekableReceiver<Msg>,
tx: Sender<Msg>,
terminal: Arc<FairMutex<Term<U>>>,
event_proxy: U,
drain_on_exit: bool,
ref_test: bool,
}
impl<T, U> EventLoop<T, U>
where
T: tty::EventedPty + event::OnResize + Send + 'static,
U: EventListener + Send + 'static,
{
/// Create a new event loop.
pub fn new(
terminal: Arc<FairMutex<Term<U>>>,
event_proxy: U,
pty: T,
drain_on_exit: bool,
ref_test: bool,
) -> io::Result<EventLoop<T, U>> {
let (tx, rx) = mpsc::channel();
let poll = polling::Poller::new()?.into();
Ok(EventLoop {
poll,
pty,
tx,
rx: PeekableReceiver::new(rx),
terminal,
event_proxy,
drain_on_exit,
ref_test,
})
}
pub fn channel(&self) -> EventLoopSender {
EventLoopSender { sender: self.tx.clone(), poller: self.poll.clone() }
}
/// Drain the channel.
///
/// Returns `false` when a shutdown message was received.
fn drain_recv_channel(&mut self, state: &mut State) -> bool {
while let Some(msg) = self.rx.recv() {
match msg {
Msg::Input(input) => state.write_list.push_back(input),
Msg::Resize(window_size) => self.pty.on_resize(window_size),
Msg::Shutdown => return false,
}
}
true
}
#[inline]
fn pty_read<X>(
&mut self,
state: &mut State,
buf: &mut [u8],
mut writer: Option<&mut X>,
) -> io::Result<()>
where
X: Write,
{
let mut unprocessed = 0;
let mut processed = 0;
// Reserve the next terminal lock for PTY reading.
let _terminal_lease = Some(self.terminal.lease());
let mut terminal = None;
loop {
// Read from the PTY.
match self.pty.reader().read(&mut buf[unprocessed..]) {
// This is received on Windows/macOS when no more data is readable from the PTY.
Ok(0) if unprocessed == 0 => break,
Ok(got) => unprocessed += got,
Err(err) => match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => {
// Go back to mio if we're caught up on parsing and the PTY would block.
if unprocessed == 0 {
break;
}
},
_ => return Err(err),
},
}
// Attempt to lock the terminal.
let terminal = match &mut terminal {
Some(terminal) => terminal,
None => terminal.insert(match self.terminal.try_lock_unfair() {
// Force block if we are at the buffer size limit.
None if unprocessed >= READ_BUFFER_SIZE => self.terminal.lock_unfair(),
None => continue,
Some(terminal) => terminal,
}),
};
// Write a copy of the bytes to the ref test file.
if let Some(writer) = &mut writer {
writer.write_all(&buf[..unprocessed]).unwrap();
}
// Parse the incoming bytes.
state.parser.advance(&mut **terminal, &buf[..unprocessed]);
processed += unprocessed;
unprocessed = 0;
// Assure we're not blocking the terminal too long unnecessarily.
if processed >= MAX_LOCKED_READ {
break;
}
}
// Queue terminal redraw unless all processed bytes were synchronized.
if state.parser.sync_bytes_count() < processed && processed > 0 {
self.event_proxy.send_event(Event::Wakeup);
}
Ok(())
}
#[inline]
fn pty_write(&mut self, state: &mut State) -> io::Result<()> {
state.ensure_next();
'write_many: while let Some(mut current) = state.take_current() {
'write_one: loop {
match self.pty.writer().write(current.remaining_bytes()) {
Ok(0) => {
state.set_current(Some(current));
break 'write_many;
},
Ok(n) => {
current.advance(n);
if current.finished() {
state.goto_next();
break 'write_one;
}
},
Err(err) => {
state.set_current(Some(current));
match err.kind() {
ErrorKind::Interrupted | ErrorKind::WouldBlock => break 'write_many,
_ => return Err(err),
}
},
}
}
}
Ok(())
}
pub fn spawn(mut self) -> JoinHandle<(Self, State)> {
thread::spawn_named("PTY reader", move || {
let mut state = State::default();
let mut buf = [0u8; READ_BUFFER_SIZE];
let poll_opts = PollMode::Level;
let mut interest = PollingEvent::readable(0);
// Register TTY through EventedRW interface.
if let Err(err) = unsafe { self.pty.register(&self.poll, interest, poll_opts) } {
error!("Event loop registration error: {err}");
return (self, state);
}
let mut events = Events::with_capacity(NonZeroUsize::new(1024).unwrap());
let mut pipe = if self.ref_test {
Some(File::create("./alacritty.recording").expect("create alacritty recording"))
} else {
None
};
'event_loop: loop {
// Wakeup the event loop when a synchronized update timeout was reached.
let handler = state.parser.sync_timeout();
let timeout =
handler.sync_timeout().map(|st| st.saturating_duration_since(Instant::now()));
events.clear();
if let Err(err) = self.poll.wait(&mut events, timeout) {
match err.kind() {
ErrorKind::Interrupted => continue,
_ => {
error!("Event loop polling error: {err}");
break 'event_loop;
},
}
}
// Handle synchronized update timeout.
if events.is_empty() && self.rx.peek().is_none() {
state.parser.stop_sync(&mut *self.terminal.lock());
self.event_proxy.send_event(Event::Wakeup);
continue;
}
// Handle channel events, if there are any.
if !self.drain_recv_channel(&mut state) {
break;
}
for event in events.iter() {
match event.key {
tty::PTY_CHILD_EVENT_TOKEN => {
if let Some(tty::ChildEvent::Exited(code)) = self.pty.next_child_event()
{
if let Some(code) = code {
self.event_proxy.send_event(Event::ChildExit(code));
}
if self.drain_on_exit {
let _ = self.pty_read(&mut state, &mut buf, pipe.as_mut());
}
self.terminal.lock().exit();
self.event_proxy.send_event(Event::Wakeup);
break 'event_loop;
}
},
tty::PTY_READ_WRITE_TOKEN => {
if event.is_interrupt() {
// Don't try to do I/O on a dead PTY.
continue;
}
if event.readable {
if let Err(err) = self.pty_read(&mut state, &mut buf, pipe.as_mut())
{
// On Linux, a `read` on the master side of a PTY can fail
// with `EIO` if the client side hangs up. In that case,
// just loop back round for the inevitable `Exited` event.
// This sucks, but checking the process is either racy or
// blocking.
#[cfg(target_os = "linux")]
if err.raw_os_error() == Some(libc::EIO) {
continue;
}
error!("Error reading from PTY in event loop: {err}");
break 'event_loop;
}
}
if event.writable {
if let Err(err) = self.pty_write(&mut state) {
error!("Error writing to PTY in event loop: {err}");
break 'event_loop;
}
}
},
_ => (),
}
}
// Register write interest if necessary.
let needs_write = state.needs_write();
if needs_write != interest.writable {
interest.writable = needs_write;
// Re-register with new interest.
self.pty.reregister(&self.poll, interest, poll_opts).unwrap();
}
}
// The evented instances are not dropped here so deregister them explicitly.
let _ = self.pty.deregister(&self.poll);
(self, state)
})
}
}
/// Helper type which tracks how much of a buffer has been written.
struct Writing {
source: Cow<'static, [u8]>,
written: usize,
}
pub struct Notifier(pub EventLoopSender);
impl event::Notify for Notifier {
fn notify<B>(&self, bytes: B)
where
B: Into<Cow<'static, [u8]>>,
{
let bytes = bytes.into();
// Terminal hangs if we send 0 bytes through.
if bytes.is_empty() {
return;
}
let _ = self.0.send(Msg::Input(bytes));
}
}
impl event::OnResize for Notifier {
fn on_resize(&mut self, window_size: WindowSize) {
let _ = self.0.send(Msg::Resize(window_size));
}
}
#[derive(Debug)]
pub enum EventLoopSendError {
/// Error polling the event loop.
Io(io::Error),
/// Error sending a message to the event loop.
Send(mpsc::SendError<Msg>),
}
impl Display for EventLoopSendError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
EventLoopSendError::Io(err) => err.fmt(f),
EventLoopSendError::Send(err) => err.fmt(f),
}
}
}
impl std::error::Error for EventLoopSendError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
EventLoopSendError::Io(err) => err.source(),
EventLoopSendError::Send(err) => err.source(),
}
}
}
#[derive(Clone)]
pub struct EventLoopSender {
sender: Sender<Msg>,
poller: Arc<polling::Poller>,
}
impl EventLoopSender {
pub fn send(&self, msg: Msg) -> Result<(), EventLoopSendError> {
self.sender.send(msg).map_err(EventLoopSendError::Send)?;
self.poller.notify().map_err(EventLoopSendError::Io)
}
}
/// All of the mutable state needed to run the event loop.
///
/// Contains list of items to write, current write state, etc. Anything that
/// would otherwise be mutated on the `EventLoop` goes here.
#[derive(Default)]
pub struct State {
write_list: VecDeque<Cow<'static, [u8]>>,
writing: Option<Writing>,
parser: ansi::Processor,
}
impl State {
#[inline]
fn ensure_next(&mut self) {
if self.writing.is_none() {
self.goto_next();
}
}
#[inline]
fn goto_next(&mut self) {
self.writing = self.write_list.pop_front().map(Writing::new);
}
#[inline]
fn take_current(&mut self) -> Option<Writing> {
self.writing.take()
}
#[inline]
fn needs_write(&self) -> bool {
self.writing.is_some() || !self.write_list.is_empty()
}
#[inline]
fn set_current(&mut self, new: Option<Writing>) {
self.writing = new;
}
}
impl Writing {
#[inline]
fn new(c: Cow<'static, [u8]>) -> Writing {
Writing { source: c, written: 0 }
}
#[inline]
fn advance(&mut self, n: usize) {
self.written += n;
}
#[inline]
fn remaining_bytes(&self) -> &[u8] {
&self.source[self.written..]
}
#[inline]
fn finished(&self) -> bool {
self.written >= self.source.len()
}
}
struct PeekableReceiver<T> {
rx: Receiver<T>,
peeked: Option<T>,
}
impl<T> PeekableReceiver<T> {
fn new(rx: Receiver<T>) -> Self {
Self { rx, peeked: None }
}
fn peek(&mut self) -> Option<&T> {
if self.peeked.is_none() {
self.peeked = self.rx.try_recv().ok();
}
self.peeked.as_ref()
}
fn recv(&mut self) -> Option<T> {
if self.peeked.is_some() {
self.peeked.take()
} else {
match self.rx.try_recv() {
Err(TryRecvError::Disconnected) => panic!("event loop channel closed"),
res => res.ok(),
}
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/thread.rs | alacritty_terminal/src/thread.rs | use std::thread::{Builder, JoinHandle};
/// Like `thread::spawn`, but with a `name` argument.
pub fn spawn_named<F, T, S>(name: S, f: F) -> JoinHandle<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
S: Into<String>,
{
Builder::new().name(name.into()).spawn(f).expect("thread spawn works")
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/term/search.rs | alacritty_terminal/src/term/search.rs | use std::cmp::max;
use std::error::Error;
use std::mem;
use std::ops::RangeInclusive;
use log::{debug, warn};
pub use regex_automata::hybrid::BuildError;
use regex_automata::hybrid::dfa::{Builder, Cache, Config, DFA};
use regex_automata::nfa::thompson::Config as ThompsonConfig;
use regex_automata::util::syntax::Config as SyntaxConfig;
use regex_automata::{Anchored, Input, MatchKind};
use crate::grid::{BidirectionalIterator, Dimensions, GridIterator, Indexed};
use crate::index::{Boundary, Column, Direction, Point, Side};
use crate::term::Term;
use crate::term::cell::{Cell, Flags};
/// Used to match equal brackets, when performing a bracket-pair selection.
const BRACKET_PAIRS: [(char, char); 4] = [('(', ')'), ('[', ']'), ('{', '}'), ('<', '>')];
pub type Match = RangeInclusive<Point>;
/// Terminal regex search state.
#[derive(Clone, Debug)]
pub struct RegexSearch {
left_fdfa: LazyDfa,
left_rdfa: LazyDfa,
right_rdfa: LazyDfa,
right_fdfa: LazyDfa,
}
impl RegexSearch {
/// Build the forward and backward search DFAs.
pub fn new(search: &str) -> Result<RegexSearch, Box<BuildError>> {
// Setup configs for both DFA directions.
//
// Bounds are based on Regex's meta engine:
// https://github.com/rust-lang/regex/blob/061ee815ef2c44101dba7b0b124600fcb03c1912/regex-automata/src/meta/wrappers.rs#L581-L599
let has_uppercase = search.chars().any(|c| c.is_uppercase());
let syntax_config = SyntaxConfig::new().case_insensitive(!has_uppercase);
let config =
Config::new().minimum_cache_clear_count(Some(3)).minimum_bytes_per_state(Some(10));
let max_size = config.get_cache_capacity();
let thompson_config = ThompsonConfig::new().nfa_size_limit(Some(max_size));
// Create DFAs to find start/end in right-to-left search.
let left_rdfa = LazyDfa::new(
search,
config.clone(),
syntax_config,
thompson_config.clone(),
Direction::Right,
true,
)?;
let has_empty = left_rdfa.dfa.get_nfa().has_empty();
let left_fdfa = LazyDfa::new(
search,
config.clone(),
syntax_config,
thompson_config.clone(),
Direction::Left,
has_empty,
)?;
// Create DFAs to find start/end in left-to-right search.
let right_fdfa = LazyDfa::new(
search,
config.clone(),
syntax_config,
thompson_config.clone(),
Direction::Right,
has_empty,
)?;
let right_rdfa =
LazyDfa::new(search, config, syntax_config, thompson_config, Direction::Left, true)?;
Ok(RegexSearch { left_fdfa, left_rdfa, right_fdfa, right_rdfa })
}
}
/// Runtime-evaluated DFA.
#[derive(Clone, Debug)]
struct LazyDfa {
dfa: DFA,
cache: Cache,
direction: Direction,
match_all: bool,
}
impl LazyDfa {
fn new(
search: &str,
mut config: Config,
syntax: SyntaxConfig,
mut thompson: ThompsonConfig,
direction: Direction,
match_all: bool,
) -> Result<Self, Box<BuildError>> {
thompson = match direction {
Direction::Left => thompson.reverse(true),
Direction::Right => thompson.reverse(false),
};
config = if match_all {
config.match_kind(MatchKind::All)
} else {
config.match_kind(MatchKind::LeftmostFirst)
};
// Create the DFA.
let dfa =
Builder::new().configure(config).syntax(syntax).thompson(thompson).build(search)?;
let cache = dfa.create_cache();
Ok(Self { direction, cache, dfa, match_all })
}
}
impl<T> Term<T> {
/// Get next search match in the specified direction.
pub fn search_next(
&self,
regex: &mut RegexSearch,
mut origin: Point,
direction: Direction,
side: Side,
mut max_lines: Option<usize>,
) -> Option<Match> {
origin = self.expand_wide(origin, direction);
max_lines = max_lines.filter(|max_lines| max_lines + 1 < self.total_lines());
match direction {
Direction::Right => self.next_match_right(regex, origin, side, max_lines),
Direction::Left => self.next_match_left(regex, origin, side, max_lines),
}
}
/// Find the next match to the right of the origin.
fn next_match_right(
&self,
regex: &mut RegexSearch,
origin: Point,
side: Side,
max_lines: Option<usize>,
) -> Option<Match> {
let start = self.line_search_left(origin);
let mut end = start;
// Limit maximum number of lines searched.
end = match max_lines {
Some(max_lines) => {
let line = (start.line + max_lines).grid_clamp(self, Boundary::None);
Point::new(line, self.last_column())
},
_ => end.sub(self, Boundary::None, 1),
};
let mut regex_iter = RegexIter::new(start, end, Direction::Right, self, regex).peekable();
// Check if there's any match at all.
let first_match = regex_iter.peek()?.clone();
let regex_match = regex_iter
.find(|regex_match| {
let match_point = Self::match_side(regex_match, side);
// If the match's point is beyond the origin, we're done.
match_point.line < start.line
|| match_point.line > origin.line
|| (match_point.line == origin.line && match_point.column >= origin.column)
})
.unwrap_or(first_match);
Some(regex_match)
}
/// Find the next match to the left of the origin.
fn next_match_left(
&self,
regex: &mut RegexSearch,
origin: Point,
side: Side,
max_lines: Option<usize>,
) -> Option<Match> {
let start = self.line_search_right(origin);
let mut end = start;
// Limit maximum number of lines searched.
end = match max_lines {
Some(max_lines) => {
let line = (start.line - max_lines).grid_clamp(self, Boundary::None);
Point::new(line, Column(0))
},
_ => end.add(self, Boundary::None, 1),
};
let mut regex_iter = RegexIter::new(start, end, Direction::Left, self, regex).peekable();
// Check if there's any match at all.
let first_match = regex_iter.peek()?.clone();
let regex_match = regex_iter
.find(|regex_match| {
let match_point = Self::match_side(regex_match, side);
// If the match's point is beyond the origin, we're done.
match_point.line > start.line
|| match_point.line < origin.line
|| (match_point.line == origin.line && match_point.column <= origin.column)
})
.unwrap_or(first_match);
Some(regex_match)
}
/// Get the side of a match.
fn match_side(regex_match: &Match, side: Side) -> Point {
match side {
Side::Right => *regex_match.end(),
Side::Left => *regex_match.start(),
}
}
/// Find the next regex match to the left of the origin point.
///
/// The origin is always included in the regex.
pub fn regex_search_left(
&self,
regex: &mut RegexSearch,
start: Point,
end: Point,
) -> Option<Match> {
// Find start and end of match.
let match_start = self.regex_search(start, end, &mut regex.left_fdfa)?;
let match_end = self.regex_search(match_start, start, &mut regex.left_rdfa)?;
Some(match_start..=match_end)
}
/// Find the next regex match to the right of the origin point.
///
/// The origin is always included in the regex.
pub fn regex_search_right(
&self,
regex: &mut RegexSearch,
start: Point,
end: Point,
) -> Option<Match> {
// Find start and end of match.
let match_end = self.regex_search(start, end, &mut regex.right_fdfa)?;
let match_start = self.regex_search(match_end, start, &mut regex.right_rdfa)?;
Some(match_start..=match_end)
}
/// Find the next regex match.
///
/// This will always return the side of the first match which is farthest from the start point.
fn regex_search(&self, start: Point, end: Point, regex: &mut LazyDfa) -> Option<Point> {
match self.regex_search_internal(start, end, regex) {
Ok(regex_match) => regex_match,
Err(err) => {
warn!("Regex exceeded complexity limit");
debug!(" {err}");
None
},
}
}
/// Find the next regex match.
///
/// To automatically log regex complexity errors, use [`Self::regex_search`] instead.
fn regex_search_internal(
&self,
start: Point,
end: Point,
regex: &mut LazyDfa,
) -> Result<Option<Point>, Box<dyn Error>> {
let topmost_line = self.topmost_line();
let screen_lines = self.screen_lines() as i32;
let last_column = self.last_column();
// Advance the iterator.
let next = match regex.direction {
Direction::Right => GridIterator::next,
Direction::Left => GridIterator::prev,
};
// Get start state for the DFA.
let regex_anchored = if regex.match_all { Anchored::Yes } else { Anchored::No };
let input = Input::new(&[]).anchored(regex_anchored);
let mut state = regex.dfa.start_state_forward(&mut regex.cache, &input).unwrap();
let mut iter = self.grid.iter_from(start);
let mut regex_match = None;
let mut done = false;
let mut cell = iter.cell();
self.skip_fullwidth(&mut iter, &mut cell, regex.direction);
let mut c = cell.c;
let mut last_wrapped = iter.cell().flags.contains(Flags::WRAPLINE);
let mut point = iter.point();
let mut last_point = point;
let mut consumed_bytes = 0;
// Reset the regex state to restart the search.
macro_rules! reset_state {
() => {{
state = regex.dfa.start_state_forward(&mut regex.cache, &input)?;
consumed_bytes = 0;
regex_match = None;
}};
}
'outer: loop {
// Convert char to array of bytes.
let mut buf = [0; 4];
let utf8_len = c.encode_utf8(&mut buf).len();
// Pass char to DFA as individual bytes.
for i in 0..utf8_len {
// Inverse byte order when going left.
let byte = match regex.direction {
Direction::Right => buf[i],
Direction::Left => buf[utf8_len - i - 1],
};
state = regex.dfa.next_state(&mut regex.cache, state, byte)?;
consumed_bytes += 1;
if i == 0 && state.is_match() {
// Matches require one additional BYTE of lookahead, so we check the match state
// for the first byte of every new character to determine if the last character
// was a match.
regex_match = Some(last_point);
} else if state.is_dead() {
if consumed_bytes == 2 {
// Reset search if we found an empty match.
//
// With an unanchored search, a dead state only occurs after the end of a
// match has been found. While we want to abort after the first match has
// ended, we don't want empty matches since we cannot highlight them.
//
// So once we encounter an empty match, we reset our parser state and clear
// the match, effectively starting a new search one character farther than
// before.
//
// An empty match requires consuming `2` bytes, since the first byte will
// report the match for the empty string, while the second byte then
// reports the dead state indicating the first character isn't part of the
// match.
reset_state!();
// Retry this character if first byte caused failure.
//
// After finding an empty match, we want to advance the search start by one
// character. So if the first character has multiple bytes and the dead
// state isn't reached at `i == 0`, then we continue with the rest of the
// loop to advance the parser by one character.
if i == 0 {
continue 'outer;
}
} else {
// Abort on dead state.
break 'outer;
}
}
}
// Stop once we've reached the target point.
if point == end || done {
// When reaching the end-of-input, we need to notify the parser that no look-ahead
// is possible and check for state changes.
state = regex.dfa.next_eoi_state(&mut regex.cache, state)?;
if state.is_match() {
regex_match = Some(point);
} else if state.is_dead() && consumed_bytes == 1 {
// Ignore empty matches.
regex_match = None;
}
break;
}
// Advance grid cell iterator.
let mut cell = match next(&mut iter) {
Some(Indexed { cell, .. }) => cell,
None => {
// Wrap around to other end of the scrollback buffer.
let line = topmost_line - point.line + screen_lines - 1;
let start = Point::new(line, last_column - point.column);
iter = self.grid.iter_from(start);
iter.cell()
},
};
// Check for completion before potentially skipping over fullwidth characters.
done = iter.point() == end;
self.skip_fullwidth(&mut iter, &mut cell, regex.direction);
c = cell.c;
let wrapped = iter.cell().flags.contains(Flags::WRAPLINE);
last_point = mem::replace(&mut point, iter.point());
// Handle linebreaks.
if (last_point.column == last_column && point.column == Column(0) && !last_wrapped)
|| (last_point.column == Column(0) && point.column == last_column && !wrapped)
{
// When reaching the end-of-input, we need to notify the parser that no
// look-ahead is possible and check if the current state is still a match.
state = regex.dfa.next_eoi_state(&mut regex.cache, state)?;
if state.is_match() {
regex_match = Some(last_point);
}
match regex_match {
// Stop if we found a non-empty match before the linebreak.
Some(_) if (!state.is_dead() || consumed_bytes > 1) && consumed_bytes != 0 => {
break;
},
_ => reset_state!(),
}
}
last_wrapped = wrapped;
}
Ok(regex_match)
}
/// Advance a grid iterator over fullwidth characters.
fn skip_fullwidth<'a>(
&self,
iter: &'a mut GridIterator<'_, Cell>,
cell: &mut &'a Cell,
direction: Direction,
) {
match direction {
// In the alternate screen buffer there might not be a wide char spacer after a wide
// char, so we only advance the iterator when the wide char is not in the last column.
Direction::Right
if cell.flags.contains(Flags::WIDE_CHAR)
&& iter.point().column < self.last_column() =>
{
iter.next();
},
Direction::Right if cell.flags.contains(Flags::LEADING_WIDE_CHAR_SPACER) => {
if let Some(Indexed { cell: new_cell, .. }) = iter.next() {
*cell = new_cell;
}
iter.next();
},
Direction::Left if cell.flags.contains(Flags::WIDE_CHAR_SPACER) => {
if let Some(Indexed { cell: new_cell, .. }) = iter.prev() {
*cell = new_cell;
}
let prev = iter.point().sub(self, Boundary::Grid, 1);
if self.grid[prev].flags.contains(Flags::LEADING_WIDE_CHAR_SPACER) {
iter.prev();
}
},
_ => (),
}
}
/// Find next matching bracket.
pub fn bracket_search(&self, point: Point) -> Option<Point> {
let start_char = self.grid[point].c;
// Find the matching bracket we're looking for
let (forward, end_char) = BRACKET_PAIRS.iter().find_map(|(open, close)| {
if open == &start_char {
Some((true, *close))
} else if close == &start_char {
Some((false, *open))
} else {
None
}
})?;
let mut iter = self.grid.iter_from(point);
// For every character match that equals the starting bracket, we
// ignore one bracket of the opposite type.
let mut skip_pairs = 0;
loop {
// Check the next cell
let cell = if forward { iter.next() } else { iter.prev() };
// Break if there are no more cells
let cell = match cell {
Some(cell) => cell,
None => break,
};
// Check if the bracket matches
if cell.c == end_char && skip_pairs == 0 {
return Some(cell.point);
} else if cell.c == start_char {
skip_pairs += 1;
} else if cell.c == end_char {
skip_pairs -= 1;
}
}
None
}
/// Find left end of semantic block.
#[must_use]
pub fn semantic_search_left(&self, point: Point) -> Point {
match self.inline_search_left(point, self.semantic_escape_chars()) {
// If we found a match, reverse for at least one cell, skipping over wide cell spacers.
Ok(point) => {
let wide_spacer = Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER;
self.grid
.iter_from(point)
.find(|cell| !cell.flags.intersects(wide_spacer))
.map_or(point, |cell| cell.point)
},
Err(point) => point,
}
}
/// Find right end of semantic block.
#[must_use]
pub fn semantic_search_right(&self, point: Point) -> Point {
match self.inline_search_right(point, self.semantic_escape_chars()) {
Ok(point) => self.grid.iter_from(point).prev().map_or(point, |cell| cell.point),
Err(point) => point,
}
}
/// Searching to the left, find the next character contained in `needles`.
pub fn inline_search_left(&self, mut point: Point, needles: &str) -> Result<Point, Point> {
// Limit the starting point to the last line in the history
point.line = max(point.line, self.topmost_line());
let mut iter = self.grid.iter_from(point);
let last_column = self.columns() - 1;
let wide_spacer = Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER;
while let Some(cell) = iter.prev() {
if cell.point.column == last_column && !cell.flags.contains(Flags::WRAPLINE) {
break;
}
point = cell.point;
if !cell.flags.intersects(wide_spacer) && needles.contains(cell.c) {
return Ok(point);
}
}
Err(point)
}
/// Searching to the right, find the next character contained in `needles`.
pub fn inline_search_right(&self, mut point: Point, needles: &str) -> Result<Point, Point> {
// Limit the starting point to the last line in the history
point.line = max(point.line, self.topmost_line());
let wide_spacer = Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER;
let last_column = self.columns() - 1;
// Immediately stop if start point in on line break.
if point.column == last_column && !self.grid[point].flags.contains(Flags::WRAPLINE) {
return Err(point);
}
for cell in self.grid.iter_from(point) {
point = cell.point;
if !cell.flags.intersects(wide_spacer) && needles.contains(cell.c) {
return Ok(point);
}
if point.column == last_column && !cell.flags.contains(Flags::WRAPLINE) {
break;
}
}
Err(point)
}
/// Find the beginning of the current line across linewraps.
pub fn line_search_left(&self, mut point: Point) -> Point {
while point.line > self.topmost_line()
&& self.grid[point.line - 1i32][self.last_column()].flags.contains(Flags::WRAPLINE)
{
point.line -= 1;
}
point.column = Column(0);
point
}
/// Find the end of the current line across linewraps.
pub fn line_search_right(&self, mut point: Point) -> Point {
while point.line + 1 < self.screen_lines()
&& self.grid[point.line][self.last_column()].flags.contains(Flags::WRAPLINE)
{
point.line += 1;
}
point.column = self.last_column();
point
}
}
/// Iterator over regex matches.
pub struct RegexIter<'a, T> {
point: Point,
end: Point,
direction: Direction,
regex: &'a mut RegexSearch,
term: &'a Term<T>,
done: bool,
}
impl<'a, T> RegexIter<'a, T> {
pub fn new(
start: Point,
end: Point,
direction: Direction,
term: &'a Term<T>,
regex: &'a mut RegexSearch,
) -> Self {
Self { point: start, done: false, end, direction, term, regex }
}
/// Skip one cell, advancing the origin point to the next one.
fn skip(&mut self) {
self.point = self.term.expand_wide(self.point, self.direction);
self.point = match self.direction {
Direction::Right => self.point.add(self.term, Boundary::None, 1),
Direction::Left => self.point.sub(self.term, Boundary::None, 1),
};
}
/// Get the next match in the specified direction.
fn next_match(&mut self) -> Option<Match> {
match self.direction {
Direction::Right => self.term.regex_search_right(self.regex, self.point, self.end),
Direction::Left => self.term.regex_search_left(self.regex, self.point, self.end),
}
}
}
impl<T> Iterator for RegexIter<'_, T> {
type Item = Match;
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
// Since the end itself might be a single cell match, we search one more time.
if self.point == self.end {
self.done = true;
}
let regex_match = self.next_match()?;
self.point = *regex_match.end();
if self.point == self.end {
// Stop when the match terminates right on the end limit.
self.done = true;
} else {
// Move the new search origin past the match.
self.skip();
}
Some(regex_match)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::index::{Column, Line};
use crate::term::Config;
use crate::term::test::{TermSize, mock_term};
#[test]
fn regex_right() {
#[rustfmt::skip]
let term = mock_term("\
testing66\r\n\
Alacritty\n\
123\r\n\
Alacritty\r\n\
123\
");
// Check regex across wrapped and unwrapped lines.
let mut regex = RegexSearch::new("Ala.*123").unwrap();
let start = Point::new(Line(1), Column(0));
let end = Point::new(Line(4), Column(2));
let match_start = Point::new(Line(1), Column(0));
let match_end = Point::new(Line(2), Column(2));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(match_start..=match_end));
}
#[test]
fn regex_left() {
#[rustfmt::skip]
let term = mock_term("\
testing66\r\n\
Alacritty\n\
123\r\n\
Alacritty\r\n\
123\
");
// Check regex across wrapped and unwrapped lines.
let mut regex = RegexSearch::new("Ala.*123").unwrap();
let start = Point::new(Line(4), Column(2));
let end = Point::new(Line(1), Column(0));
let match_start = Point::new(Line(1), Column(0));
let match_end = Point::new(Line(2), Column(2));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(match_start..=match_end));
}
#[test]
fn nested_regex() {
#[rustfmt::skip]
let term = mock_term("\
Ala -> Alacritty -> critty\r\n\
critty\
");
// Greedy stopped at linebreak.
let mut regex = RegexSearch::new("Ala.*critty").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(25));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
// Greedy stopped at dead state.
let mut regex = RegexSearch::new("Ala[^y]*critty").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(15));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
}
#[test]
fn no_match_right() {
#[rustfmt::skip]
let term = mock_term("\
first line\n\
broken second\r\n\
third\
");
let mut regex = RegexSearch::new("nothing").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(2), Column(4));
assert_eq!(term.regex_search_right(&mut regex, start, end), None);
}
#[test]
fn no_match_left() {
#[rustfmt::skip]
let term = mock_term("\
first line\n\
broken second\r\n\
third\
");
let mut regex = RegexSearch::new("nothing").unwrap();
let start = Point::new(Line(2), Column(4));
let end = Point::new(Line(0), Column(0));
assert_eq!(term.regex_search_left(&mut regex, start, end), None);
}
#[test]
fn include_linebreak_left() {
#[rustfmt::skip]
let term = mock_term("\
testing123\r\n\
xxx\
");
// Make sure the cell containing the linebreak is not skipped.
let mut regex = RegexSearch::new("te.*123").unwrap();
let start = Point::new(Line(1), Column(0));
let end = Point::new(Line(0), Column(0));
let match_start = Point::new(Line(0), Column(0));
let match_end = Point::new(Line(0), Column(9));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(match_start..=match_end));
}
#[test]
fn include_linebreak_right() {
#[rustfmt::skip]
let term = mock_term("\
xxx\r\n\
testing123\
");
// Make sure the cell containing the linebreak is not skipped.
let mut regex = RegexSearch::new("te.*123").unwrap();
let start = Point::new(Line(0), Column(2));
let end = Point::new(Line(1), Column(9));
let match_start = Point::new(Line(1), Column(0));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(match_start..=end));
}
#[test]
fn skip_dead_cell() {
let term = mock_term("alacritty");
// Make sure dead state cell is skipped when reversing.
let mut regex = RegexSearch::new("alacrit").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(6));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
}
#[test]
fn reverse_search_dead_recovery() {
let term = mock_term("zooo lense");
// Make sure the reverse DFA operates the same as a forward DFA.
let mut regex = RegexSearch::new("zoo").unwrap();
let start = Point::new(Line(0), Column(9));
let end = Point::new(Line(0), Column(0));
let match_start = Point::new(Line(0), Column(0));
let match_end = Point::new(Line(0), Column(2));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(match_start..=match_end));
}
#[test]
fn multibyte_unicode() {
let term = mock_term("testвосибing");
let mut regex = RegexSearch::new("te.*ing").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(11));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
let mut regex = RegexSearch::new("te.*ing").unwrap();
let start = Point::new(Line(0), Column(11));
let end = Point::new(Line(0), Column(0));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(end..=start));
}
#[test]
fn end_on_multibyte_unicode() {
let term = mock_term("testвосиб");
let mut regex = RegexSearch::new("te.*и").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(8));
let match_end = Point::new(Line(0), Column(7));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=match_end));
}
#[test]
fn fullwidth() {
let term = mock_term("a🦇x🦇");
let mut regex = RegexSearch::new("[^ ]*").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(5));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
let mut regex = RegexSearch::new("[^ ]*").unwrap();
let start = Point::new(Line(0), Column(5));
let end = Point::new(Line(0), Column(0));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(end..=start));
}
#[test]
fn singlecell_fullwidth() {
let term = mock_term("🦇");
let mut regex = RegexSearch::new("🦇").unwrap();
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(1));
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=end));
let mut regex = RegexSearch::new("🦇").unwrap();
let start = Point::new(Line(0), Column(1));
let end = Point::new(Line(0), Column(0));
assert_eq!(term.regex_search_left(&mut regex, start, end), Some(end..=start));
}
#[test]
fn end_on_fullwidth() {
let term = mock_term("jarr🦇");
let start = Point::new(Line(0), Column(0));
let end = Point::new(Line(0), Column(4));
// Ensure ending without a match doesn't loop indefinitely.
let mut regex = RegexSearch::new("x").unwrap();
assert_eq!(term.regex_search_right(&mut regex, start, end), None);
let mut regex = RegexSearch::new("x").unwrap();
let match_end = Point::new(Line(0), Column(5));
assert_eq!(term.regex_search_right(&mut regex, start, match_end), None);
// Ensure match is captured when only partially inside range.
let mut regex = RegexSearch::new("jarr🦇").unwrap();
assert_eq!(term.regex_search_right(&mut regex, start, end), Some(start..=match_end));
}
#[test]
fn wrapping() {
#[rustfmt::skip]
let term = mock_term("\
xxx\r\n\
xxx\
");
let mut regex = RegexSearch::new("xxx").unwrap();
let start = Point::new(Line(0), Column(2));
let end = Point::new(Line(1), Column(2));
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | true |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/term/mod.rs | alacritty_terminal/src/term/mod.rs | //! Exports the `Term` type which is a high-level API for the Grid.
use std::ops::{Index, IndexMut, Range};
use std::sync::Arc;
use std::{cmp, mem, ptr, slice, str};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use base64::Engine;
use base64::engine::general_purpose::STANDARD as Base64;
use bitflags::bitflags;
use log::{debug, trace};
use unicode_width::UnicodeWidthChar;
use crate::event::{Event, EventListener};
use crate::grid::{Dimensions, Grid, GridIterator, Scroll};
use crate::index::{self, Boundary, Column, Direction, Line, Point, Side};
use crate::selection::{Selection, SelectionRange, SelectionType};
use crate::term::cell::{Cell, Flags, LineLength};
use crate::term::color::Colors;
use crate::vi_mode::{ViModeCursor, ViMotion};
use crate::vte::ansi::{
self, Attr, CharsetIndex, Color, CursorShape, CursorStyle, Handler, Hyperlink, KeyboardModes,
KeyboardModesApplyBehavior, NamedColor, NamedMode, NamedPrivateMode, PrivateMode, Rgb,
StandardCharset,
};
pub mod cell;
pub mod color;
pub mod search;
/// Minimum number of columns.
///
/// A minimum of 2 is necessary to hold fullwidth unicode characters.
pub const MIN_COLUMNS: usize = 2;
/// Minimum number of visible lines.
pub const MIN_SCREEN_LINES: usize = 1;
/// Max size of the window title stack.
const TITLE_STACK_MAX_DEPTH: usize = 4096;
/// Default semantic escape characters.
pub const SEMANTIC_ESCAPE_CHARS: &str = ",│`|:\"' ()[]{}<>\t";
/// Max size of the keyboard modes.
const KEYBOARD_MODE_STACK_MAX_DEPTH: usize = TITLE_STACK_MAX_DEPTH;
/// Default tab interval, corresponding to terminfo `it` value.
const INITIAL_TABSTOPS: usize = 8;
bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TermMode: u32 {
const NONE = 0;
const SHOW_CURSOR = 1;
const APP_CURSOR = 1 << 1;
const APP_KEYPAD = 1 << 2;
const MOUSE_REPORT_CLICK = 1 << 3;
const BRACKETED_PASTE = 1 << 4;
const SGR_MOUSE = 1 << 5;
const MOUSE_MOTION = 1 << 6;
const LINE_WRAP = 1 << 7;
const LINE_FEED_NEW_LINE = 1 << 8;
const ORIGIN = 1 << 9;
const INSERT = 1 << 10;
const FOCUS_IN_OUT = 1 << 11;
const ALT_SCREEN = 1 << 12;
const MOUSE_DRAG = 1 << 13;
const UTF8_MOUSE = 1 << 14;
const ALTERNATE_SCROLL = 1 << 15;
const VI = 1 << 16;
const URGENCY_HINTS = 1 << 17;
const DISAMBIGUATE_ESC_CODES = 1 << 18;
const REPORT_EVENT_TYPES = 1 << 19;
const REPORT_ALTERNATE_KEYS = 1 << 20;
const REPORT_ALL_KEYS_AS_ESC = 1 << 21;
const REPORT_ASSOCIATED_TEXT = 1 << 22;
const MOUSE_MODE = Self::MOUSE_REPORT_CLICK.bits() | Self::MOUSE_MOTION.bits() | Self::MOUSE_DRAG.bits();
const KITTY_KEYBOARD_PROTOCOL = Self::DISAMBIGUATE_ESC_CODES.bits()
| Self::REPORT_EVENT_TYPES.bits()
| Self::REPORT_ALTERNATE_KEYS.bits()
| Self::REPORT_ALL_KEYS_AS_ESC.bits()
| Self::REPORT_ASSOCIATED_TEXT.bits();
const ANY = u32::MAX;
}
}
impl From<KeyboardModes> for TermMode {
fn from(value: KeyboardModes) -> Self {
let mut mode = Self::empty();
let disambiguate_esc_codes = value.contains(KeyboardModes::DISAMBIGUATE_ESC_CODES);
mode.set(TermMode::DISAMBIGUATE_ESC_CODES, disambiguate_esc_codes);
let report_event_types = value.contains(KeyboardModes::REPORT_EVENT_TYPES);
mode.set(TermMode::REPORT_EVENT_TYPES, report_event_types);
let report_alternate_keys = value.contains(KeyboardModes::REPORT_ALTERNATE_KEYS);
mode.set(TermMode::REPORT_ALTERNATE_KEYS, report_alternate_keys);
let report_all_keys_as_esc = value.contains(KeyboardModes::REPORT_ALL_KEYS_AS_ESC);
mode.set(TermMode::REPORT_ALL_KEYS_AS_ESC, report_all_keys_as_esc);
let report_associated_text = value.contains(KeyboardModes::REPORT_ASSOCIATED_TEXT);
mode.set(TermMode::REPORT_ASSOCIATED_TEXT, report_associated_text);
mode
}
}
impl Default for TermMode {
fn default() -> TermMode {
TermMode::SHOW_CURSOR
| TermMode::LINE_WRAP
| TermMode::ALTERNATE_SCROLL
| TermMode::URGENCY_HINTS
}
}
/// Convert a terminal point to a viewport relative point.
#[inline]
pub fn point_to_viewport(display_offset: usize, point: Point) -> Option<Point<usize>> {
let viewport_line = point.line.0 + display_offset as i32;
usize::try_from(viewport_line).ok().map(|line| Point::new(line, point.column))
}
/// Convert a viewport relative point to a terminal point.
#[inline]
pub fn viewport_to_point(display_offset: usize, point: Point<usize>) -> Point {
let line = Line(point.line as i32) - display_offset;
Point::new(line, point.column)
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct LineDamageBounds {
/// Damaged line number.
pub line: usize,
/// Leftmost damaged column.
pub left: usize,
/// Rightmost damaged column.
pub right: usize,
}
impl LineDamageBounds {
#[inline]
pub fn new(line: usize, left: usize, right: usize) -> Self {
Self { line, left, right }
}
#[inline]
pub fn undamaged(line: usize, num_cols: usize) -> Self {
Self { line, left: num_cols, right: 0 }
}
#[inline]
pub fn reset(&mut self, num_cols: usize) {
*self = Self::undamaged(self.line, num_cols);
}
#[inline]
pub fn expand(&mut self, left: usize, right: usize) {
self.left = cmp::min(self.left, left);
self.right = cmp::max(self.right, right);
}
#[inline]
pub fn is_damaged(&self) -> bool {
self.left <= self.right
}
}
/// Terminal damage information collected since the last [`Term::reset_damage`] call.
#[derive(Debug)]
pub enum TermDamage<'a> {
/// The entire terminal is damaged.
Full,
/// Iterator over damaged lines in the terminal.
Partial(TermDamageIterator<'a>),
}
/// Iterator over the terminal's viewport damaged lines.
#[derive(Clone, Debug)]
pub struct TermDamageIterator<'a> {
line_damage: slice::Iter<'a, LineDamageBounds>,
display_offset: usize,
}
impl<'a> TermDamageIterator<'a> {
pub fn new(line_damage: &'a [LineDamageBounds], display_offset: usize) -> Self {
let num_lines = line_damage.len();
// Filter out invisible damage.
let line_damage = &line_damage[..num_lines.saturating_sub(display_offset)];
Self { display_offset, line_damage: line_damage.iter() }
}
}
impl Iterator for TermDamageIterator<'_> {
type Item = LineDamageBounds;
fn next(&mut self) -> Option<Self::Item> {
self.line_damage.find_map(|line| {
line.is_damaged().then_some(LineDamageBounds::new(
line.line + self.display_offset,
line.left,
line.right,
))
})
}
}
/// State of the terminal damage.
struct TermDamageState {
/// Hint whether terminal should be damaged entirely regardless of the actual damage changes.
full: bool,
/// Information about damage on terminal lines.
lines: Vec<LineDamageBounds>,
/// Old terminal cursor point.
last_cursor: Point,
}
impl TermDamageState {
fn new(num_cols: usize, num_lines: usize) -> Self {
let lines =
(0..num_lines).map(|line| LineDamageBounds::undamaged(line, num_cols)).collect();
Self { full: true, lines, last_cursor: Default::default() }
}
#[inline]
fn resize(&mut self, num_cols: usize, num_lines: usize) {
// Reset point, so old cursor won't end up outside of the viewport.
self.last_cursor = Default::default();
self.full = true;
self.lines.clear();
self.lines.reserve(num_lines);
for line in 0..num_lines {
self.lines.push(LineDamageBounds::undamaged(line, num_cols));
}
}
/// Damage point inside of the viewport.
#[inline]
fn damage_point(&mut self, point: Point<usize>) {
self.damage_line(point.line, point.column.0, point.column.0);
}
/// Expand `line`'s damage to span at least `left` to `right` column.
#[inline]
fn damage_line(&mut self, line: usize, left: usize, right: usize) {
self.lines[line].expand(left, right);
}
/// Reset information about terminal damage.
fn reset(&mut self, num_cols: usize) {
self.full = false;
self.lines.iter_mut().for_each(|line| line.reset(num_cols));
}
}
pub struct Term<T> {
/// Terminal focus controlling the cursor shape.
pub is_focused: bool,
/// Cursor for keyboard selection.
pub vi_mode_cursor: ViModeCursor,
pub selection: Option<Selection>,
/// Currently active grid.
///
/// Tracks the screen buffer currently in use. While the alternate screen buffer is active,
/// this will be the alternate grid. Otherwise it is the primary screen buffer.
grid: Grid<Cell>,
/// Currently inactive grid.
///
/// Opposite of the active grid. While the alternate screen buffer is active, this will be the
/// primary grid. Otherwise it is the alternate screen buffer.
inactive_grid: Grid<Cell>,
/// Index into `charsets`, pointing to what ASCII is currently being mapped to.
active_charset: CharsetIndex,
/// Tabstops.
tabs: TabStops,
/// Mode flags.
mode: TermMode,
/// Scroll region.
///
/// Range going from top to bottom of the terminal, indexed from the top of the viewport.
scroll_region: Range<Line>,
/// Modified terminal colors.
colors: Colors,
/// Current style of the cursor.
cursor_style: Option<CursorStyle>,
/// Proxy for sending events to the event loop.
event_proxy: T,
/// Current title of the window.
title: Option<String>,
/// Stack of saved window titles. When a title is popped from this stack, the `title` for the
/// term is set.
title_stack: Vec<Option<String>>,
/// The stack for the keyboard modes.
keyboard_mode_stack: Vec<KeyboardModes>,
/// Currently inactive keyboard mode stack.
inactive_keyboard_mode_stack: Vec<KeyboardModes>,
/// Information about damaged cells.
damage: TermDamageState,
/// Config directly for the terminal.
config: Config,
}
/// Configuration options for the [`Term`].
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Config {
/// The maximum amount of scrolling history.
pub scrolling_history: usize,
/// Default cursor style to reset the cursor to.
pub default_cursor_style: CursorStyle,
/// Cursor style for Vi mode.
pub vi_mode_cursor_style: Option<CursorStyle>,
/// The characters which terminate semantic selection.
///
/// The default value is [`SEMANTIC_ESCAPE_CHARS`].
pub semantic_escape_chars: String,
/// Whether to enable kitty keyboard protocol.
pub kitty_keyboard: bool,
/// OSC52 support mode.
pub osc52: Osc52,
}
impl Default for Config {
fn default() -> Self {
Self {
scrolling_history: 10000,
semantic_escape_chars: SEMANTIC_ESCAPE_CHARS.to_owned(),
default_cursor_style: Default::default(),
vi_mode_cursor_style: Default::default(),
kitty_keyboard: Default::default(),
osc52: Default::default(),
}
}
}
/// OSC 52 behavior.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize), serde(rename_all = "lowercase"))]
pub enum Osc52 {
/// The handling of the escape sequence is disabled.
Disabled,
/// Only copy sequence is accepted.
///
/// This option is the default as a compromise between entirely
/// disabling it (the most secure) and allowing `paste` (the less secure).
#[default]
OnlyCopy,
/// Only paste sequence is accepted.
OnlyPaste,
/// Both are accepted.
CopyPaste,
}
impl<T> Term<T> {
#[inline]
pub fn scroll_display(&mut self, scroll: Scroll)
where
T: EventListener,
{
let old_display_offset = self.grid.display_offset();
self.grid.scroll_display(scroll);
self.event_proxy.send_event(Event::MouseCursorDirty);
// Clamp vi mode cursor to the viewport.
let viewport_start = -(self.grid.display_offset() as i32);
let viewport_end = viewport_start + self.bottommost_line().0;
let vi_cursor_line = &mut self.vi_mode_cursor.point.line.0;
*vi_cursor_line = cmp::min(viewport_end, cmp::max(viewport_start, *vi_cursor_line));
self.vi_mode_recompute_selection();
// Damage everything if display offset changed.
if old_display_offset != self.grid().display_offset() {
self.mark_fully_damaged();
}
}
pub fn new<D: Dimensions>(config: Config, dimensions: &D, event_proxy: T) -> Term<T> {
let num_cols = dimensions.columns();
let num_lines = dimensions.screen_lines();
let history_size = config.scrolling_history;
let grid = Grid::new(num_lines, num_cols, history_size);
let inactive_grid = Grid::new(num_lines, num_cols, 0);
let tabs = TabStops::new(grid.columns());
let scroll_region = Line(0)..Line(grid.screen_lines() as i32);
// Initialize terminal damage, covering the entire terminal upon launch.
let damage = TermDamageState::new(num_cols, num_lines);
Term {
inactive_grid,
scroll_region,
event_proxy,
damage,
config,
grid,
tabs,
inactive_keyboard_mode_stack: Default::default(),
keyboard_mode_stack: Default::default(),
active_charset: Default::default(),
vi_mode_cursor: Default::default(),
cursor_style: Default::default(),
colors: color::Colors::default(),
title_stack: Default::default(),
is_focused: Default::default(),
selection: Default::default(),
title: Default::default(),
mode: Default::default(),
}
}
/// Collect the information about the changes in the lines, which
/// could be used to minimize the amount of drawing operations.
///
/// The user controlled elements, like `Vi` mode cursor and `Selection` are **not** part of the
/// collected damage state. Those could easily be tracked by comparing their old and new
/// value between adjacent frames.
///
/// After reading damage [`reset_damage`] should be called.
///
/// [`reset_damage`]: Self::reset_damage
#[must_use]
pub fn damage(&mut self) -> TermDamage<'_> {
// Ensure the entire terminal is damaged after entering insert mode.
// Leaving is handled in the ansi handler.
if self.mode.contains(TermMode::INSERT) {
self.mark_fully_damaged();
}
let previous_cursor = mem::replace(&mut self.damage.last_cursor, self.grid.cursor.point);
if self.damage.full {
return TermDamage::Full;
}
// Add information about old cursor position and new one if they are not the same, so we
// cover everything that was produced by `Term::input`.
if self.damage.last_cursor != previous_cursor {
// Cursor coordinates are always inside viewport even if you have `display_offset`.
let point = Point::new(previous_cursor.line.0 as usize, previous_cursor.column);
self.damage.damage_point(point);
}
// Always damage current cursor.
self.damage_cursor();
// NOTE: damage which changes all the content when the display offset is non-zero (e.g.
// scrolling) is handled via full damage.
let display_offset = self.grid().display_offset();
TermDamage::Partial(TermDamageIterator::new(&self.damage.lines, display_offset))
}
/// Resets the terminal damage information.
pub fn reset_damage(&mut self) {
self.damage.reset(self.columns());
}
#[inline]
fn mark_fully_damaged(&mut self) {
self.damage.full = true;
}
/// Set new options for the [`Term`].
pub fn set_options(&mut self, options: Config)
where
T: EventListener,
{
let old_config = mem::replace(&mut self.config, options);
let title_event = match &self.title {
Some(title) => Event::Title(title.clone()),
None => Event::ResetTitle,
};
self.event_proxy.send_event(title_event);
if self.mode.contains(TermMode::ALT_SCREEN) {
self.inactive_grid.update_history(self.config.scrolling_history);
} else {
self.grid.update_history(self.config.scrolling_history);
}
if self.config.kitty_keyboard != old_config.kitty_keyboard {
self.keyboard_mode_stack = Vec::new();
self.inactive_keyboard_mode_stack = Vec::new();
self.mode.remove(TermMode::KITTY_KEYBOARD_PROTOCOL);
}
// Damage everything on config updates.
self.mark_fully_damaged();
}
/// Convert the active selection to a String.
pub fn selection_to_string(&self) -> Option<String> {
let selection_range = self.selection.as_ref().and_then(|s| s.to_range(self))?;
let SelectionRange { start, end, .. } = selection_range;
let mut res = String::new();
match self.selection.as_ref() {
Some(Selection { ty: SelectionType::Block, .. }) => {
for line in (start.line.0..end.line.0).map(Line::from) {
res += self
.line_to_string(line, start.column..end.column, start.column.0 != 0)
.trim_end();
res += "\n";
}
res += self.line_to_string(end.line, start.column..end.column, true).trim_end();
},
Some(Selection { ty: SelectionType::Lines, .. }) => {
res = self.bounds_to_string(start, end) + "\n";
},
_ => {
res = self.bounds_to_string(start, end);
},
}
Some(res)
}
/// Convert range between two points to a String.
pub fn bounds_to_string(&self, start: Point, end: Point) -> String {
let mut res = String::new();
for line in (start.line.0..=end.line.0).map(Line::from) {
let start_col = if line == start.line { start.column } else { Column(0) };
let end_col = if line == end.line { end.column } else { self.last_column() };
res += &self.line_to_string(line, start_col..end_col, line == end.line);
}
res.strip_suffix('\n').map(str::to_owned).unwrap_or(res)
}
/// Convert a single line in the grid to a String.
fn line_to_string(
&self,
line: Line,
mut cols: Range<Column>,
include_wrapped_wide: bool,
) -> String {
let mut text = String::new();
let grid_line = &self.grid[line];
let line_length = cmp::min(grid_line.line_length(), cols.end + 1);
// Include wide char when trailing spacer is selected.
if grid_line[cols.start].flags.contains(Flags::WIDE_CHAR_SPACER) {
cols.start -= 1;
}
let mut tab_mode = false;
for column in (cols.start.0..line_length.0).map(Column::from) {
let cell = &grid_line[column];
// Skip over cells until next tab-stop once a tab was found.
if tab_mode {
if self.tabs[column] || cell.c != ' ' {
tab_mode = false;
} else {
continue;
}
}
if cell.c == '\t' {
tab_mode = true;
}
if !cell.flags.intersects(Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER) {
// Push cells primary character.
text.push(cell.c);
// Push zero-width characters.
for c in cell.zerowidth().into_iter().flatten() {
text.push(*c);
}
}
}
if cols.end >= self.columns() - 1
&& (line_length.0 == 0
|| !self.grid[line][line_length - 1].flags.contains(Flags::WRAPLINE))
{
text.push('\n');
}
// If wide char is not part of the selection, but leading spacer is, include it.
if line_length == self.columns()
&& line_length.0 >= 2
&& grid_line[line_length - 1].flags.contains(Flags::LEADING_WIDE_CHAR_SPACER)
&& include_wrapped_wide
{
text.push(self.grid[line - 1i32][Column(0)].c);
}
text
}
/// Terminal content required for rendering.
#[inline]
pub fn renderable_content(&self) -> RenderableContent<'_>
where
T: EventListener,
{
RenderableContent::new(self)
}
/// Access to the raw grid data structure.
pub fn grid(&self) -> &Grid<Cell> {
&self.grid
}
/// Mutable access to the raw grid data structure.
pub fn grid_mut(&mut self) -> &mut Grid<Cell> {
&mut self.grid
}
/// Resize terminal to new dimensions.
pub fn resize<S: Dimensions>(&mut self, size: S) {
let old_cols = self.columns();
let old_lines = self.screen_lines();
let num_cols = size.columns();
let num_lines = size.screen_lines();
if old_cols == num_cols && old_lines == num_lines {
debug!("Term::resize dimensions unchanged");
return;
}
debug!("New num_cols is {num_cols} and num_lines is {num_lines}");
// Move vi mode cursor with the content.
let history_size = self.history_size();
let mut delta = num_lines as i32 - old_lines as i32;
let min_delta = cmp::min(0, num_lines as i32 - self.grid.cursor.point.line.0 - 1);
delta = cmp::min(cmp::max(delta, min_delta), history_size as i32);
self.vi_mode_cursor.point.line += delta;
let is_alt = self.mode.contains(TermMode::ALT_SCREEN);
self.grid.resize(!is_alt, num_lines, num_cols);
self.inactive_grid.resize(is_alt, num_lines, num_cols);
// Invalidate selection and tabs only when necessary.
if old_cols != num_cols {
self.selection = None;
// Recreate tabs list.
self.tabs.resize(num_cols);
} else if let Some(selection) = self.selection.take() {
let max_lines = cmp::max(num_lines, old_lines) as i32;
let range = Line(0)..Line(max_lines);
self.selection = selection.rotate(self, &range, -delta);
}
// Clamp vi cursor to viewport.
let vi_point = self.vi_mode_cursor.point;
let viewport_top = Line(-(self.grid.display_offset() as i32));
let viewport_bottom = viewport_top + self.bottommost_line();
self.vi_mode_cursor.point.line =
cmp::max(cmp::min(vi_point.line, viewport_bottom), viewport_top);
self.vi_mode_cursor.point.column = cmp::min(vi_point.column, self.last_column());
// Reset scrolling region.
self.scroll_region = Line(0)..Line(self.screen_lines() as i32);
// Resize damage information.
self.damage.resize(num_cols, num_lines);
}
/// Active terminal modes.
#[inline]
pub fn mode(&self) -> &TermMode {
&self.mode
}
/// Swap primary and alternate screen buffer.
pub fn swap_alt(&mut self) {
if !self.mode.contains(TermMode::ALT_SCREEN) {
// Set alt screen cursor to the current primary screen cursor.
self.inactive_grid.cursor = self.grid.cursor.clone();
// Drop information about the primary screens saved cursor.
self.grid.saved_cursor = self.grid.cursor.clone();
// Reset alternate screen contents.
self.inactive_grid.reset_region(..);
}
mem::swap(&mut self.keyboard_mode_stack, &mut self.inactive_keyboard_mode_stack);
let keyboard_mode =
self.keyboard_mode_stack.last().copied().unwrap_or(KeyboardModes::NO_MODE).into();
self.set_keyboard_mode(keyboard_mode, KeyboardModesApplyBehavior::Replace);
mem::swap(&mut self.grid, &mut self.inactive_grid);
self.mode ^= TermMode::ALT_SCREEN;
self.selection = None;
self.mark_fully_damaged();
}
/// Scroll screen down.
///
/// Text moves down; clear at bottom
/// Expects origin to be in scroll range.
#[inline]
fn scroll_down_relative(&mut self, origin: Line, mut lines: usize) {
trace!("Scrolling down relative: origin={origin}, lines={lines}");
lines = cmp::min(lines, (self.scroll_region.end - self.scroll_region.start).0 as usize);
lines = cmp::min(lines, (self.scroll_region.end - origin).0 as usize);
let region = origin..self.scroll_region.end;
// Scroll selection.
self.selection =
self.selection.take().and_then(|s| s.rotate(self, ®ion, -(lines as i32)));
// Scroll vi mode cursor.
let line = &mut self.vi_mode_cursor.point.line;
if region.start <= *line && region.end > *line {
*line = cmp::min(*line + lines, region.end - 1);
}
// Scroll between origin and bottom
self.grid.scroll_down(®ion, lines);
self.mark_fully_damaged();
}
/// Scroll screen up
///
/// Text moves up; clear at top
/// Expects origin to be in scroll range.
#[inline]
fn scroll_up_relative(&mut self, origin: Line, mut lines: usize) {
trace!("Scrolling up relative: origin={origin}, lines={lines}");
lines = cmp::min(lines, (self.scroll_region.end - self.scroll_region.start).0 as usize);
let region = origin..self.scroll_region.end;
// Scroll selection.
self.selection = self.selection.take().and_then(|s| s.rotate(self, ®ion, lines as i32));
self.grid.scroll_up(®ion, lines);
// Scroll vi mode cursor.
let viewport_top = Line(-(self.grid.display_offset() as i32));
let top = if region.start == 0 { viewport_top } else { region.start };
let line = &mut self.vi_mode_cursor.point.line;
if (top <= *line) && region.end > *line {
*line = cmp::max(*line - lines, top);
}
self.mark_fully_damaged();
}
fn deccolm(&mut self)
where
T: EventListener,
{
// Setting 132 column font makes no sense, but run the other side effects.
// Clear scrolling region.
self.set_scrolling_region(1, None);
// Clear grid.
self.grid.reset_region(..);
self.mark_fully_damaged();
}
#[inline]
pub fn exit(&mut self)
where
T: EventListener,
{
self.event_proxy.send_event(Event::Exit);
}
/// Toggle the vi mode.
#[inline]
pub fn toggle_vi_mode(&mut self)
where
T: EventListener,
{
self.mode ^= TermMode::VI;
if self.mode.contains(TermMode::VI) {
let display_offset = self.grid.display_offset() as i32;
if self.grid.cursor.point.line > self.bottommost_line() - display_offset {
// Move cursor to top-left if terminal cursor is not visible.
let point = Point::new(Line(-display_offset), Column(0));
self.vi_mode_cursor = ViModeCursor::new(point);
} else {
// Reset vi mode cursor position to match primary cursor.
self.vi_mode_cursor = ViModeCursor::new(self.grid.cursor.point);
}
}
// Update UI about cursor blinking state changes.
self.event_proxy.send_event(Event::CursorBlinkingChange);
}
/// Move vi mode cursor.
#[inline]
pub fn vi_motion(&mut self, motion: ViMotion)
where
T: EventListener,
{
// Require vi mode to be active.
if !self.mode.contains(TermMode::VI) {
return;
}
// Move cursor.
self.vi_mode_cursor = self.vi_mode_cursor.motion(self, motion);
self.vi_mode_recompute_selection();
}
/// Move vi cursor to a point in the grid.
#[inline]
pub fn vi_goto_point(&mut self, point: Point)
where
T: EventListener,
{
// Move viewport to make point visible.
self.scroll_to_point(point);
// Move vi cursor to the point.
self.vi_mode_cursor.point = point;
self.vi_mode_recompute_selection();
}
/// Update the active selection to match the vi mode cursor position.
#[inline]
fn vi_mode_recompute_selection(&mut self) {
// Require vi mode to be active.
if !self.mode.contains(TermMode::VI) {
return;
}
// Update only if non-empty selection is present.
if let Some(selection) = self.selection.as_mut().filter(|s| !s.is_empty()) {
selection.update(self.vi_mode_cursor.point, Side::Left);
selection.include_all();
}
}
/// Scroll display to point if it is outside of viewport.
pub fn scroll_to_point(&mut self, point: Point)
where
T: EventListener,
{
let display_offset = self.grid.display_offset() as i32;
let screen_lines = self.grid.screen_lines() as i32;
if point.line < -display_offset {
let lines = point.line + display_offset;
self.scroll_display(Scroll::Delta(-lines.0));
} else if point.line >= (screen_lines - display_offset) {
let lines = point.line + display_offset - screen_lines + 1i32;
self.scroll_display(Scroll::Delta(-lines.0));
}
}
/// Jump to the end of a wide cell.
pub fn expand_wide(&self, mut point: Point, direction: Direction) -> Point {
let flags = self.grid[point.line][point.column].flags;
match direction {
Direction::Right if flags.contains(Flags::LEADING_WIDE_CHAR_SPACER) => {
point.column = Column(1);
point.line += 1;
},
Direction::Right if flags.contains(Flags::WIDE_CHAR) => {
point.column = cmp::min(point.column + 1, self.last_column());
},
Direction::Left if flags.intersects(Flags::WIDE_CHAR | Flags::WIDE_CHAR_SPACER) => {
if flags.contains(Flags::WIDE_CHAR_SPACER) {
point.column -= 1;
}
let prev = point.sub(self, Boundary::Grid, 1);
if self.grid[prev].flags.contains(Flags::LEADING_WIDE_CHAR_SPACER) {
point = prev;
}
},
_ => (),
}
point
}
#[inline]
pub fn semantic_escape_chars(&self) -> &str {
&self.config.semantic_escape_chars
}
#[cfg(test)]
pub(crate) fn set_semantic_escape_chars(&mut self, semantic_escape_chars: &str) {
self.config.semantic_escape_chars = semantic_escape_chars.into();
}
/// Active terminal cursor style.
///
/// While vi mode is active, this will automatically return the vi mode cursor style.
#[inline]
pub fn cursor_style(&self) -> CursorStyle {
let cursor_style = self.cursor_style.unwrap_or(self.config.default_cursor_style);
if self.mode.contains(TermMode::VI) {
self.config.vi_mode_cursor_style.unwrap_or(cursor_style)
} else {
cursor_style
}
}
pub fn colors(&self) -> &Colors {
&self.colors
}
/// Insert a linebreak at the current cursor position.
#[inline]
fn wrapline(&mut self)
where
T: EventListener,
{
if !self.mode.contains(TermMode::LINE_WRAP) {
return;
}
trace!("Wrapping input");
self.grid.cursor_cell().flags.insert(Flags::WRAPLINE);
if self.grid.cursor.point.line + 1 >= self.scroll_region.end {
self.linefeed();
} else {
self.damage_cursor();
self.grid.cursor.point.line += 1;
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | true |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/term/color.rs | alacritty_terminal/src/term/color.rs | use std::ops::{Index, IndexMut};
use crate::vte::ansi::{NamedColor, Rgb};
/// Number of terminal colors.
pub const COUNT: usize = 269;
/// Array of indexed colors.
///
/// | Indices | Description |
/// | -------- | ----------------- |
/// | 0..16 | Named ANSI colors |
/// | 16..232 | Color cube |
/// | 233..256 | Grayscale ramp |
/// | 256 | Foreground |
/// | 257 | Background |
/// | 258 | Cursor |
/// | 259..267 | Dim colors |
/// | 267 | Bright foreground |
/// | 268 | Dim background |
#[derive(Copy, Clone)]
pub struct Colors([Option<Rgb>; COUNT]);
impl Default for Colors {
fn default() -> Self {
Self([None; COUNT])
}
}
impl Index<usize> for Colors {
type Output = Option<Rgb>;
#[inline]
fn index(&self, index: usize) -> &Self::Output {
&self.0[index]
}
}
impl IndexMut<usize> for Colors {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.0[index]
}
}
impl Index<NamedColor> for Colors {
type Output = Option<Rgb>;
#[inline]
fn index(&self, index: NamedColor) -> &Self::Output {
&self.0[index as usize]
}
}
impl IndexMut<NamedColor> for Colors {
#[inline]
fn index_mut(&mut self, index: NamedColor) -> &mut Self::Output {
&mut self.0[index as usize]
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/term/cell.rs | alacritty_terminal/src/term/cell.rs | use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering};
use bitflags::bitflags;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::grid::{self, GridCell};
use crate::index::Column;
use crate::vte::ansi::{Color, Hyperlink as VteHyperlink, NamedColor};
bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Flags: u16 {
const INVERSE = 0b0000_0000_0000_0001;
const BOLD = 0b0000_0000_0000_0010;
const ITALIC = 0b0000_0000_0000_0100;
const BOLD_ITALIC = 0b0000_0000_0000_0110;
const UNDERLINE = 0b0000_0000_0000_1000;
const WRAPLINE = 0b0000_0000_0001_0000;
const WIDE_CHAR = 0b0000_0000_0010_0000;
const WIDE_CHAR_SPACER = 0b0000_0000_0100_0000;
const DIM = 0b0000_0000_1000_0000;
const DIM_BOLD = 0b0000_0000_1000_0010;
const HIDDEN = 0b0000_0001_0000_0000;
const STRIKEOUT = 0b0000_0010_0000_0000;
const LEADING_WIDE_CHAR_SPACER = 0b0000_0100_0000_0000;
const DOUBLE_UNDERLINE = 0b0000_1000_0000_0000;
const UNDERCURL = 0b0001_0000_0000_0000;
const DOTTED_UNDERLINE = 0b0010_0000_0000_0000;
const DASHED_UNDERLINE = 0b0100_0000_0000_0000;
const ALL_UNDERLINES = Self::UNDERLINE.bits() | Self::DOUBLE_UNDERLINE.bits()
| Self::UNDERCURL.bits() | Self::DOTTED_UNDERLINE.bits()
| Self::DASHED_UNDERLINE.bits();
}
}
/// Counter for hyperlinks without explicit ID.
static HYPERLINK_ID_SUFFIX: AtomicU32 = AtomicU32::new(0);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Hyperlink {
inner: Arc<HyperlinkInner>,
}
impl Hyperlink {
pub fn new<T: ToString>(id: Option<T>, uri: String) -> Self {
let inner = Arc::new(HyperlinkInner::new(id, uri));
Self { inner }
}
pub fn id(&self) -> &str {
&self.inner.id
}
pub fn uri(&self) -> &str {
&self.inner.uri
}
}
impl From<VteHyperlink> for Hyperlink {
fn from(value: VteHyperlink) -> Self {
Self::new(value.id, value.uri)
}
}
impl From<Hyperlink> for VteHyperlink {
fn from(val: Hyperlink) -> Self {
VteHyperlink { id: Some(val.id().to_owned()), uri: val.uri().to_owned() }
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
struct HyperlinkInner {
/// Identifier for the given hyperlink.
id: String,
/// Resource identifier of the hyperlink.
uri: String,
}
impl HyperlinkInner {
pub fn new<T: ToString>(id: Option<T>, uri: String) -> Self {
let id = match id {
Some(id) => id.to_string(),
None => {
let mut id = HYPERLINK_ID_SUFFIX.fetch_add(1, Ordering::Relaxed).to_string();
id.push_str("_alacritty");
id
},
};
Self { id, uri }
}
}
/// Trait for determining if a reset should be performed.
pub trait ResetDiscriminant<T> {
/// Value based on which equality for the reset will be determined.
fn discriminant(&self) -> T;
}
impl<T: Copy> ResetDiscriminant<T> for T {
fn discriminant(&self) -> T {
*self
}
}
impl ResetDiscriminant<Color> for Cell {
fn discriminant(&self) -> Color {
self.bg
}
}
/// Dynamically allocated cell content.
///
/// This storage is reserved for cell attributes which are rarely set. This allows reducing the
/// allocation required ahead of time for every cell, with some additional overhead when the extra
/// storage is actually required.
#[derive(Default, Debug, Clone, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct CellExtra {
zerowidth: Vec<char>,
underline_color: Option<Color>,
hyperlink: Option<Hyperlink>,
}
/// Content and attributes of a single cell in the terminal grid.
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Cell {
pub c: char,
pub fg: Color,
pub bg: Color,
pub flags: Flags,
pub extra: Option<Arc<CellExtra>>,
}
impl Default for Cell {
#[inline]
fn default() -> Cell {
Cell {
c: ' ',
bg: Color::Named(NamedColor::Background),
fg: Color::Named(NamedColor::Foreground),
flags: Flags::empty(),
extra: None,
}
}
}
impl Cell {
/// Zerowidth characters stored in this cell.
#[inline]
pub fn zerowidth(&self) -> Option<&[char]> {
self.extra.as_ref().map(|extra| extra.zerowidth.as_slice())
}
/// Write a new zerowidth character to this cell.
#[inline]
pub fn push_zerowidth(&mut self, character: char) {
let extra = self.extra.get_or_insert(Default::default());
Arc::make_mut(extra).zerowidth.push(character);
}
/// Remove all wide char data from a cell.
#[inline(never)]
pub fn clear_wide(&mut self) {
self.flags.remove(Flags::WIDE_CHAR);
if let Some(extra) = self.extra.as_mut() {
Arc::make_mut(extra).zerowidth = Vec::new();
}
self.c = ' ';
}
/// Set underline color on the cell.
pub fn set_underline_color(&mut self, color: Option<Color>) {
// If we reset color and we don't have zerowidth we should drop extra storage.
if color.is_none()
&& self
.extra
.as_ref()
.is_none_or(|extra| extra.zerowidth.is_empty() && extra.hyperlink.is_none())
{
self.extra = None;
} else {
let extra = self.extra.get_or_insert(Default::default());
Arc::make_mut(extra).underline_color = color;
}
}
/// Underline color stored in this cell.
#[inline]
pub fn underline_color(&self) -> Option<Color> {
self.extra.as_ref()?.underline_color
}
/// Set hyperlink.
pub fn set_hyperlink(&mut self, hyperlink: Option<Hyperlink>) {
let should_drop = hyperlink.is_none()
&& self
.extra
.as_ref()
.is_none_or(|extra| extra.zerowidth.is_empty() && extra.underline_color.is_none());
if should_drop {
self.extra = None;
} else {
let extra = self.extra.get_or_insert(Default::default());
Arc::make_mut(extra).hyperlink = hyperlink;
}
}
/// Hyperlink stored in this cell.
#[inline]
pub fn hyperlink(&self) -> Option<Hyperlink> {
self.extra.as_ref()?.hyperlink.clone()
}
}
impl GridCell for Cell {
#[inline]
fn is_empty(&self) -> bool {
(self.c == ' ' || self.c == '\t')
&& self.bg == Color::Named(NamedColor::Background)
&& self.fg == Color::Named(NamedColor::Foreground)
&& !self.flags.intersects(
Flags::INVERSE
| Flags::ALL_UNDERLINES
| Flags::STRIKEOUT
| Flags::WRAPLINE
| Flags::WIDE_CHAR_SPACER
| Flags::LEADING_WIDE_CHAR_SPACER,
)
&& self.extra.as_ref().map(|extra| extra.zerowidth.is_empty()) != Some(false)
}
#[inline]
fn flags(&self) -> &Flags {
&self.flags
}
#[inline]
fn flags_mut(&mut self) -> &mut Flags {
&mut self.flags
}
#[inline]
fn reset(&mut self, template: &Self) {
*self = Cell { bg: template.bg, ..Cell::default() };
}
}
impl From<Color> for Cell {
#[inline]
fn from(color: Color) -> Self {
Self { bg: color, ..Cell::default() }
}
}
/// Get the length of occupied cells in a line.
pub trait LineLength {
/// Calculate the occupied line length.
fn line_length(&self) -> Column;
}
impl LineLength for grid::Row<Cell> {
fn line_length(&self) -> Column {
let mut length = Column(0);
if self[Column(self.len() - 1)].flags.contains(Flags::WRAPLINE) {
return Column(self.len());
}
for (index, cell) in self[..].iter().rev().enumerate() {
if cell.c != ' '
|| cell.extra.as_ref().map(|extra| extra.zerowidth.is_empty()) == Some(false)
{
length = Column(self.len() - index);
break;
}
}
length
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::mem;
use crate::grid::Row;
use crate::index::Column;
#[test]
fn cell_size_is_below_cap() {
// Expected cell size on 64-bit architectures.
const EXPECTED_CELL_SIZE: usize = 24;
// Ensure that cell size isn't growing by accident.
assert!(mem::size_of::<Cell>() <= EXPECTED_CELL_SIZE);
}
#[test]
fn line_length_works() {
let mut row = Row::<Cell>::new(10);
row[Column(5)].c = 'a';
assert_eq!(row.line_length(), Column(6));
}
#[test]
fn line_length_works_with_wrapline() {
let mut row = Row::<Cell>::new(10);
row[Column(9)].flags.insert(super::Flags::WRAPLINE);
assert_eq!(row.line_length(), Column(10));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/unix.rs | alacritty_terminal/src/tty/unix.rs | //! TTY related functionality.
use std::ffi::{CStr, CString};
use std::fs::File;
use std::io::{Error, ErrorKind, Read, Result};
use std::mem::MaybeUninit;
use std::os::fd::OwnedFd;
use std::os::unix::ffi::OsStrExt;
use std::os::unix::io::AsRawFd;
use std::os::unix::net::UnixStream;
use std::os::unix::process::CommandExt;
#[cfg(target_os = "macos")]
use std::path::Path;
use std::process::{Child, Command};
use std::sync::Arc;
use std::{env, ptr};
use libc::{F_GETFL, F_SETFL, O_NONBLOCK, TIOCSCTTY, c_int, fcntl};
use log::error;
use polling::{Event, PollMode, Poller};
use rustix_openpty::openpty;
use rustix_openpty::rustix::termios::Winsize;
#[cfg(any(target_os = "linux", target_os = "macos"))]
use rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};
use signal_hook::low_level::{pipe as signal_pipe, unregister as unregister_signal};
use signal_hook::{SigId, consts as sigconsts};
use crate::event::{OnResize, WindowSize};
use crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};
// Interest in PTY read/writes.
pub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;
// Interest in new child events.
pub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;
macro_rules! die {
($($arg:tt)*) => {{
error!($($arg)*);
std::process::exit(1);
}};
}
/// Really only needed on BSD, but should be fine elsewhere.
fn set_controlling_terminal(fd: c_int) -> Result<()> {
let res = unsafe {
// TIOSCTTY changes based on platform and the `ioctl` call is different
// based on architecture (32/64). So a generic cast is used to make sure
// there are no issues. To allow such a generic cast the clippy warning
// is disabled.
#[allow(clippy::cast_lossless)]
libc::ioctl(fd, TIOCSCTTY as _, 0)
};
if res == 0 { Ok(()) } else { Err(Error::last_os_error()) }
}
#[derive(Debug)]
struct Passwd<'a> {
name: &'a str,
dir: &'a str,
shell: &'a str,
}
/// Return a Passwd struct with pointers into the provided buf.
///
/// # Unsafety
///
/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.
fn get_pw_entry(buf: &mut [i8; 1024]) -> Result<Passwd<'_>> {
// Create zeroed passwd struct.
let mut entry: MaybeUninit<libc::passwd> = MaybeUninit::uninit();
let mut res: *mut libc::passwd = ptr::null_mut();
// Try and read the pw file.
let uid = unsafe { libc::getuid() };
let status = unsafe {
libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)
};
let entry = unsafe { entry.assume_init() };
if status < 0 {
return Err(Error::other("getpwuid_r failed"));
}
if res.is_null() {
return Err(Error::other("pw not found"));
}
// Sanity check.
assert_eq!(entry.pw_uid, uid);
// Build a borrowed Passwd struct.
Ok(Passwd {
name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },
dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },
shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },
})
}
pub struct Pty {
child: Child,
file: File,
signals: UnixStream,
sig_id: SigId,
}
impl Pty {
pub fn child(&self) -> &Child {
&self.child
}
pub fn file(&self) -> &File {
&self.file
}
}
/// User information that is required for a new shell session.
struct ShellUser {
user: String,
home: String,
shell: String,
}
impl ShellUser {
/// look for shell, username, longname, and home dir in the respective environment variables
/// before falling back on looking into `passwd`.
fn from_env() -> Result<Self> {
let mut buf = [0; 1024];
let pw = get_pw_entry(&mut buf);
let user = match env::var("USER") {
Ok(user) => user,
Err(_) => match pw {
Ok(ref pw) => pw.name.to_owned(),
Err(err) => return Err(err),
},
};
let home = match env::var("HOME") {
Ok(home) => home,
Err(_) => match pw {
Ok(ref pw) => pw.dir.to_owned(),
Err(err) => return Err(err),
},
};
let shell = match env::var("SHELL") {
Ok(shell) => shell,
Err(_) => match pw {
Ok(ref pw) => pw.shell.to_owned(),
Err(err) => return Err(err),
},
};
Ok(Self { user, home, shell })
}
}
#[cfg(not(target_os = "macos"))]
fn default_shell_command(shell: &str, _user: &str, _home: &str) -> Command {
Command::new(shell)
}
#[cfg(target_os = "macos")]
fn default_shell_command(shell: &str, user: &str, home: &str) -> Command {
let shell_name = shell.rsplit('/').next().unwrap();
// On macOS, use the `login` command so the shell will appear as a tty session.
let mut login_command = Command::new("/usr/bin/login");
// Exec the shell with argv[0] prepended by '-' so it becomes a login shell.
// `login` normally does this itself, but `-l` disables this.
let exec = format!("exec -a -{} {}", shell_name, shell);
// Since we use -l, `login` will not change directory to the user's home. However,
// `login` only checks the current working directory for a .hushlogin file, causing
// it to miss any in the user's home directory. We can fix this by doing the check
// ourselves and passing `-q`
let has_home_hushlogin = Path::new(home).join(".hushlogin").exists();
// -f: Bypasses authentication for the already-logged-in user.
// -l: Skips changing directory to $HOME and prepending '-' to argv[0].
// -p: Preserves the environment.
// -q: Act as if `.hushlogin` exists.
//
// XXX: we use zsh here over sh due to `exec -a`.
let flags = if has_home_hushlogin { "-qflp" } else { "-flp" };
login_command.args([flags, user, "/bin/zsh", "-fc", &exec]);
login_command
}
/// Create a new TTY and return a handle to interact with it.
pub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result<Pty> {
let pty = openpty(None, Some(&window_size.to_winsize()))?;
let (master, slave) = (pty.controller, pty.user);
from_fd(config, window_id, master, slave)
}
/// Create a new TTY from a PTY's file descriptors.
pub fn from_fd(config: &Options, window_id: u64, master: OwnedFd, slave: OwnedFd) -> Result<Pty> {
let master_fd = master.as_raw_fd();
let slave_fd = slave.as_raw_fd();
#[cfg(any(target_os = "linux", target_os = "macos"))]
if let Ok(mut termios) = termios::tcgetattr(&master) {
// Set character encoding to UTF-8.
termios.input_modes.set(InputModes::IUTF8, true);
let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);
}
let user = ShellUser::from_env()?;
let mut builder = if let Some(shell) = config.shell.as_ref() {
let mut cmd = Command::new(&shell.program);
cmd.args(shell.args.as_slice());
cmd
} else {
default_shell_command(&user.shell, &user.user, &user.home)
};
// Setup child stdin/stdout/stderr as slave fd of PTY.
builder.stdin(slave.try_clone()?);
builder.stderr(slave.try_clone()?);
builder.stdout(slave);
// Setup shell environment.
let window_id = window_id.to_string();
builder.env("ALACRITTY_WINDOW_ID", &window_id);
builder.env("USER", user.user);
builder.env("HOME", user.home);
// Set Window ID for clients relying on X11 hacks.
builder.env("WINDOWID", window_id);
for (key, value) in &config.env {
builder.env(key, value);
}
// Prevent child processes from inheriting linux-specific startup notification env.
builder.env_remove("XDG_ACTIVATION_TOKEN");
builder.env_remove("DESKTOP_STARTUP_ID");
let working_directory = config
.working_directory
.as_ref()
.and_then(|path| CString::new(path.as_os_str().as_bytes()).ok());
unsafe {
builder.pre_exec(move || {
// Create a new process group.
let err = libc::setsid();
if err == -1 {
return Err(Error::last_os_error());
}
// Set working directory, ignoring invalid paths.
if let Some(working_directory) = working_directory.as_ref() {
libc::chdir(working_directory.as_ptr());
}
set_controlling_terminal(slave_fd)?;
// No longer need slave/master fds.
libc::close(slave_fd);
libc::close(master_fd);
libc::signal(libc::SIGCHLD, libc::SIG_DFL);
libc::signal(libc::SIGHUP, libc::SIG_DFL);
libc::signal(libc::SIGINT, libc::SIG_DFL);
libc::signal(libc::SIGQUIT, libc::SIG_DFL);
libc::signal(libc::SIGTERM, libc::SIG_DFL);
libc::signal(libc::SIGALRM, libc::SIG_DFL);
Ok(())
});
}
// Prepare signal handling before spawning child.
let (signals, sig_id) = {
let (sender, recv) = UnixStream::pair()?;
// Register the recv end of the pipe for SIGCHLD.
let sig_id = signal_pipe::register(sigconsts::SIGCHLD, sender)?;
recv.set_nonblocking(true)?;
(recv, sig_id)
};
match builder.spawn() {
Ok(child) => {
unsafe {
// Maybe this should be done outside of this function so nonblocking
// isn't forced upon consumers. Although maybe it should be?
set_nonblocking(master_fd);
}
Ok(Pty { child, file: File::from(master), signals, sig_id })
},
Err(err) => Err(Error::new(
err.kind(),
format!(
"Failed to spawn command '{}': {}",
builder.get_program().to_string_lossy(),
err
),
)),
}
}
impl Drop for Pty {
fn drop(&mut self) {
// Make sure the PTY is terminated properly.
unsafe {
libc::kill(self.child.id() as i32, libc::SIGHUP);
}
// Clear signal-hook handler.
unregister_signal(self.sig_id);
let _ = self.child.wait();
}
}
impl EventedReadWrite for Pty {
type Reader = File;
type Writer = File;
#[inline]
unsafe fn register(
&mut self,
poll: &Arc<Poller>,
mut interest: Event,
poll_opts: PollMode,
) -> Result<()> {
interest.key = PTY_READ_WRITE_TOKEN;
unsafe {
poll.add_with_mode(&self.file, interest, poll_opts)?;
}
unsafe {
poll.add_with_mode(
&self.signals,
Event::readable(PTY_CHILD_EVENT_TOKEN),
PollMode::Level,
)
}
}
#[inline]
fn reregister(
&mut self,
poll: &Arc<Poller>,
mut interest: Event,
poll_opts: PollMode,
) -> Result<()> {
interest.key = PTY_READ_WRITE_TOKEN;
poll.modify_with_mode(&self.file, interest, poll_opts)?;
poll.modify_with_mode(
&self.signals,
Event::readable(PTY_CHILD_EVENT_TOKEN),
PollMode::Level,
)
}
#[inline]
fn deregister(&mut self, poll: &Arc<Poller>) -> Result<()> {
poll.delete(&self.file)?;
poll.delete(&self.signals)
}
#[inline]
fn reader(&mut self) -> &mut File {
&mut self.file
}
#[inline]
fn writer(&mut self) -> &mut File {
&mut self.file
}
}
impl EventedPty for Pty {
#[inline]
fn next_child_event(&mut self) -> Option<ChildEvent> {
// See if there has been a SIGCHLD.
let mut buf = [0u8; 1];
if let Err(err) = self.signals.read(&mut buf) {
if err.kind() != ErrorKind::WouldBlock {
error!("Error reading from signal pipe: {err}");
}
return None;
}
// Match on the child process.
match self.child.try_wait() {
Err(err) => {
error!("Error checking child process termination: {err}");
None
},
Ok(None) => None,
Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),
}
}
}
impl OnResize for Pty {
/// Resize the PTY.
///
/// Tells the kernel that the window size changed with the new pixel
/// dimensions and line/column counts.
fn on_resize(&mut self, window_size: WindowSize) {
let win = window_size.to_winsize();
let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };
if res < 0 {
die!("ioctl TIOCSWINSZ failed: {}", Error::last_os_error());
}
}
}
/// Types that can produce a `Winsize`.
pub trait ToWinsize {
/// Get a `Winsize`.
fn to_winsize(self) -> Winsize;
}
impl ToWinsize for WindowSize {
fn to_winsize(self) -> Winsize {
let ws_row = self.num_lines as libc::c_ushort;
let ws_col = self.num_cols as libc::c_ushort;
let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;
let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;
Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }
}
}
unsafe fn set_nonblocking(fd: c_int) {
let res = unsafe { fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK) };
assert_eq!(res, 0);
}
#[test]
fn test_get_pw_entry() {
let mut buf: [i8; 1024] = [0; 1024];
let _pw = get_pw_entry(&mut buf).unwrap();
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/mod.rs | alacritty_terminal/src/tty/mod.rs | //! TTY related functionality.
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::{env, io};
use polling::{Event, PollMode, Poller};
#[cfg(not(windows))]
mod unix;
#[cfg(not(windows))]
pub use self::unix::*;
#[cfg(windows)]
pub mod windows;
#[cfg(windows)]
pub use self::windows::*;
/// Configuration for the `Pty` interface.
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct Options {
/// Shell options.
///
/// [`None`] will use the default shell.
pub shell: Option<Shell>,
/// Shell startup directory.
pub working_directory: Option<PathBuf>,
/// Drain the child process output before exiting the terminal.
pub drain_on_exit: bool,
/// Extra environment variables.
pub env: HashMap<String, String>,
/// Specifies whether the Windows shell arguments should be escaped.
///
/// - When `true`: Arguments will be escaped according to the standard C runtime rules.
/// - When `false`: Arguments will be passed raw without additional escaping.
#[cfg(target_os = "windows")]
pub escape_args: bool,
}
/// Shell options.
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct Shell {
/// Path to a shell program to run on startup.
pub(crate) program: String,
/// Arguments passed to shell.
pub(crate) args: Vec<String>,
}
impl Shell {
pub fn new(program: String, args: Vec<String>) -> Self {
Self { program, args }
}
}
/// Stream read and/or write behavior.
///
/// This defines an abstraction over polling's interface in order to allow either
/// one read/write object or a separate read and write object.
pub trait EventedReadWrite {
type Reader: io::Read;
type Writer: io::Write;
/// # Safety
///
/// The underlying sources must outlive their registration in the `Poller`.
unsafe fn register(&mut self, _: &Arc<Poller>, _: Event, _: PollMode) -> io::Result<()>;
fn reregister(&mut self, _: &Arc<Poller>, _: Event, _: PollMode) -> io::Result<()>;
fn deregister(&mut self, _: &Arc<Poller>) -> io::Result<()>;
fn reader(&mut self) -> &mut Self::Reader;
fn writer(&mut self) -> &mut Self::Writer;
}
/// Events concerning TTY child processes.
#[derive(Debug, PartialEq, Eq)]
pub enum ChildEvent {
/// Indicates the child has exited, with an error code if available.
Exited(Option<i32>),
}
/// A pseudoterminal (or PTY).
///
/// This is a refinement of EventedReadWrite that also provides a channel through which we can be
/// notified if the PTY child process does something we care about (other than writing to the TTY).
/// In particular, this allows for race-free child exit notification on UNIX (cf. `SIGCHLD`).
pub trait EventedPty: EventedReadWrite {
/// Tries to retrieve an event.
///
/// Returns `Some(event)` on success, or `None` if there are no events to retrieve.
fn next_child_event(&mut self) -> Option<ChildEvent>;
}
/// Setup environment variables.
pub fn setup_env() {
// Default to 'alacritty' terminfo if it is available, otherwise
// default to 'xterm-256color'. May be overridden by user's config
// below.
let terminfo = if terminfo_exists("alacritty") { "alacritty" } else { "xterm-256color" };
unsafe { env::set_var("TERM", terminfo) };
// Advertise 24-bit color support.
unsafe { env::set_var("COLORTERM", "truecolor") };
}
/// Check if a terminfo entry exists on the system.
fn terminfo_exists(terminfo: &str) -> bool {
// Get first terminfo character for the parent directory.
let first = terminfo.get(..1).unwrap_or_default();
let first_hex = format!("{:x}", first.chars().next().unwrap_or_default() as usize);
// Return true if the terminfo file exists at the specified location.
macro_rules! check_path {
($path:expr) => {
if $path.join(first).join(terminfo).exists()
|| $path.join(&first_hex).join(terminfo).exists()
{
return true;
}
};
}
if let Some(dir) = env::var_os("TERMINFO") {
check_path!(PathBuf::from(&dir));
} else if let Some(home) = home::home_dir() {
check_path!(home.join(".terminfo"));
}
if let Ok(dirs) = env::var("TERMINFO_DIRS") {
for dir in dirs.split(':') {
check_path!(PathBuf::from(dir));
}
}
if let Ok(prefix) = env::var("PREFIX") {
let path = PathBuf::from(prefix);
check_path!(path.join("etc/terminfo"));
check_path!(path.join("lib/terminfo"));
check_path!(path.join("share/terminfo"));
}
check_path!(PathBuf::from("/etc/terminfo"));
check_path!(PathBuf::from("/lib/terminfo"));
check_path!(PathBuf::from("/usr/share/terminfo"));
check_path!(PathBuf::from("/boot/system/data/terminfo"));
// No valid terminfo path has been found.
false
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/windows/blocking.rs | alacritty_terminal/src/tty/windows/blocking.rs | //! Code for running a reader/writer on another thread while driving it through `polling`.
use std::io::prelude::*;
use std::marker::PhantomData;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll, Wake, Waker};
use std::{io, thread};
use piper::{Reader, Writer, pipe};
use polling::os::iocp::{CompletionPacket, PollerIocpExt};
use polling::{Event, PollMode, Poller};
use crate::thread::spawn_named;
struct Registration {
interest: Mutex<Option<Interest>>,
end: PipeEnd,
}
#[derive(Copy, Clone)]
enum PipeEnd {
Reader,
Writer,
}
struct Interest {
/// The event to send about completion.
event: Event,
/// The poller to send the event to.
poller: Arc<Poller>,
/// The mode that we are in.
mode: PollMode,
}
/// Poll a reader in another thread.
pub struct UnblockedReader<R> {
/// The event to send about completion.
interest: Arc<Registration>,
/// The pipe that we are reading from.
pipe: Reader,
/// Is this the first time registering?
first_register: bool,
/// We logically own the reader, but we don't actually use it.
_reader: PhantomData<R>,
}
impl<R: Read + Send + 'static> UnblockedReader<R> {
/// Spawn a new unblocked reader.
pub fn new(mut source: R, pipe_capacity: usize) -> Self {
// Create a new pipe.
let (reader, mut writer) = pipe(pipe_capacity);
let interest = Arc::new(Registration {
interest: Mutex::<Option<Interest>>::new(None),
end: PipeEnd::Reader,
});
// Spawn the reader thread.
spawn_named("alacritty-tty-reader-thread", move || {
let waker = Waker::from(Arc::new(ThreadWaker(thread::current())));
let mut context = Context::from_waker(&waker);
loop {
// Read from the reader into the pipe.
match writer.poll_fill(&mut context, &mut source) {
Poll::Ready(Ok(0)) => {
// Either the pipe is closed or the reader is at its EOF.
// In any case, we are done.
return;
},
Poll::Ready(Ok(_)) => {
// Keep reading.
continue;
},
Poll::Ready(Err(e)) if e.kind() == io::ErrorKind::Interrupted => {
// We were interrupted; continue.
continue;
},
Poll::Ready(Err(e)) => {
log::error!("error writing to pipe: {}", e);
return;
},
Poll::Pending => {
// We are now waiting on the other end to advance. Park the
// thread until they do.
thread::park();
},
}
}
});
Self { interest, pipe: reader, first_register: true, _reader: PhantomData }
}
/// Register interest in the reader.
pub fn register(&mut self, poller: &Arc<Poller>, event: Event, mode: PollMode) {
let mut interest = self.interest.interest.lock().unwrap();
*interest = Some(Interest { event, poller: poller.clone(), mode });
// Send the event to start off with if we have any data.
if (!self.pipe.is_empty() && event.readable) || self.first_register {
self.first_register = false;
poller.post(CompletionPacket::new(event)).ok();
}
}
/// Deregister interest in the reader.
pub fn deregister(&self) {
let mut interest = self.interest.interest.lock().unwrap();
*interest = None;
}
/// Try to read from the reader.
pub fn try_read(&mut self, buf: &mut [u8]) -> usize {
let waker = Waker::from(self.interest.clone());
match self.pipe.poll_drain_bytes(&mut Context::from_waker(&waker), buf) {
Poll::Pending => 0,
Poll::Ready(n) => n,
}
}
}
impl<R: Read + Send + 'static> Read for UnblockedReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
Ok(self.try_read(buf))
}
}
/// Poll a writer in another thread.
pub struct UnblockedWriter<W> {
/// The interest to send about completion.
interest: Arc<Registration>,
/// The pipe that we are writing to.
pipe: Writer,
/// We logically own the writer, but we don't actually use it.
_reader: PhantomData<W>,
}
impl<W: Write + Send + 'static> UnblockedWriter<W> {
/// Spawn a new unblocked writer.
pub fn new(mut sink: W, pipe_capacity: usize) -> Self {
// Create a new pipe.
let (mut reader, writer) = pipe(pipe_capacity);
let interest = Arc::new(Registration {
interest: Mutex::<Option<Interest>>::new(None),
end: PipeEnd::Writer,
});
// Spawn the writer thread.
spawn_named("alacritty-tty-writer-thread", move || {
let waker = Waker::from(Arc::new(ThreadWaker(thread::current())));
let mut context = Context::from_waker(&waker);
loop {
// Write from the pipe into the writer.
match reader.poll_drain(&mut context, &mut sink) {
Poll::Ready(Ok(0)) => {
// Either the pipe is closed or the writer is full.
// In any case, we are done.
return;
},
Poll::Ready(Ok(_)) => {
// Keep writing.
continue;
},
Poll::Ready(Err(e)) if e.kind() == io::ErrorKind::Interrupted => {
// We were interrupted; continue.
continue;
},
Poll::Ready(Err(e)) => {
log::error!("error writing to pipe: {}", e);
return;
},
Poll::Pending => {
// We are now waiting on the other end to advance. Park the
// thread until they do.
thread::park();
},
}
}
});
Self { interest, pipe: writer, _reader: PhantomData }
}
/// Register interest in the writer.
pub fn register(&self, poller: &Arc<Poller>, event: Event, mode: PollMode) {
let mut interest = self.interest.interest.lock().unwrap();
*interest = Some(Interest { event, poller: poller.clone(), mode });
// Send the event to start off with if we have room for data.
if !self.pipe.is_full() && event.writable {
poller.post(CompletionPacket::new(event)).ok();
}
}
/// Deregister interest in the writer.
pub fn deregister(&self) {
let mut interest = self.interest.interest.lock().unwrap();
*interest = None;
}
/// Try to write to the writer.
pub fn try_write(&mut self, buf: &[u8]) -> usize {
let waker = Waker::from(self.interest.clone());
match self.pipe.poll_fill_bytes(&mut Context::from_waker(&waker), buf) {
Poll::Pending => 0,
Poll::Ready(n) => n,
}
}
}
impl<W: Write + Send + 'static> Write for UnblockedWriter<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
Ok(self.try_write(buf))
}
fn flush(&mut self) -> io::Result<()> {
// Nothing to flush.
Ok(())
}
}
struct ThreadWaker(thread::Thread);
impl Wake for ThreadWaker {
fn wake(self: Arc<Self>) {
self.0.unpark();
}
fn wake_by_ref(self: &Arc<Self>) {
self.0.unpark();
}
}
impl Wake for Registration {
fn wake(self: Arc<Self>) {
self.wake_by_ref();
}
fn wake_by_ref(self: &Arc<Self>) {
let mut interest_lock = self.interest.lock().unwrap();
if let Some(interest) = interest_lock.as_ref() {
// Send the event to the poller.
let send_event = match self.end {
PipeEnd::Reader => interest.event.readable,
PipeEnd::Writer => interest.event.writable,
};
if send_event {
interest.poller.post(CompletionPacket::new(interest.event)).ok();
// Clear the event if we're in oneshot mode.
if matches!(interest.mode, PollMode::Oneshot | PollMode::EdgeOneshot) {
*interest_lock = None;
}
}
}
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/windows/mod.rs | alacritty_terminal/src/tty/windows/mod.rs | use std::ffi::OsStr;
use std::io::{self, Result};
use std::iter::once;
use std::os::windows::ffi::OsStrExt;
use std::sync::Arc;
use std::sync::mpsc::TryRecvError;
use crate::event::{OnResize, WindowSize};
use crate::tty::windows::child::ChildExitWatcher;
use crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};
mod blocking;
mod child;
mod conpty;
use blocking::{UnblockedReader, UnblockedWriter};
use conpty::Conpty as Backend;
use miow::pipe::{AnonRead, AnonWrite};
use polling::{Event, Poller};
pub const PTY_CHILD_EVENT_TOKEN: usize = 1;
pub const PTY_READ_WRITE_TOKEN: usize = 2;
type ReadPipe = UnblockedReader<AnonRead>;
type WritePipe = UnblockedWriter<AnonWrite>;
pub struct Pty {
// XXX: Backend is required to be the first field, to ensure correct drop order. Dropping
// `conout` before `backend` will cause a deadlock (with Conpty).
backend: Backend,
conout: ReadPipe,
conin: WritePipe,
child_watcher: ChildExitWatcher,
}
pub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result<Pty> {
conpty::new(config, window_size)
}
impl Pty {
fn new(
backend: impl Into<Backend>,
conout: impl Into<ReadPipe>,
conin: impl Into<WritePipe>,
child_watcher: ChildExitWatcher,
) -> Self {
Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }
}
pub fn child_watcher(&self) -> &ChildExitWatcher {
&self.child_watcher
}
}
fn with_key(mut event: Event, key: usize) -> Event {
event.key = key;
event
}
impl EventedReadWrite for Pty {
type Reader = ReadPipe;
type Writer = WritePipe;
#[inline]
unsafe fn register(
&mut self,
poll: &Arc<Poller>,
interest: polling::Event,
poll_opts: polling::PollMode,
) -> io::Result<()> {
self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);
self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);
self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));
Ok(())
}
#[inline]
fn reregister(
&mut self,
poll: &Arc<Poller>,
interest: polling::Event,
poll_opts: polling::PollMode,
) -> io::Result<()> {
self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);
self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);
self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));
Ok(())
}
#[inline]
fn deregister(&mut self, _poll: &Arc<Poller>) -> io::Result<()> {
self.conin.deregister();
self.conout.deregister();
self.child_watcher.deregister();
Ok(())
}
#[inline]
fn reader(&mut self) -> &mut Self::Reader {
&mut self.conout
}
#[inline]
fn writer(&mut self) -> &mut Self::Writer {
&mut self.conin
}
}
impl EventedPty for Pty {
fn next_child_event(&mut self) -> Option<ChildEvent> {
match self.child_watcher.event_rx().try_recv() {
Ok(ev) => Some(ev),
Err(TryRecvError::Empty) => None,
Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),
}
}
}
impl OnResize for Pty {
fn on_resize(&mut self, window_size: WindowSize) {
self.backend.on_resize(window_size)
}
}
// Modified per stdlib implementation.
// https://github.com/rust-lang/rust/blob/6707bf0f59485cf054ac1095725df43220e4be20/library/std/src/sys/args/windows.rs#L174
fn push_escaped_arg(cmd: &mut String, arg: &str) {
let arg_bytes = arg.as_bytes();
let quote = arg_bytes.iter().any(|c| *c == b' ' || *c == b'\t') || arg_bytes.is_empty();
if quote {
cmd.push('"');
}
let mut backslashes: usize = 0;
for x in arg.chars() {
if x == '\\' {
backslashes += 1;
} else {
if x == '"' {
// Add n+1 backslashes to total 2n+1 before internal '"'.
cmd.extend((0..=backslashes).map(|_| '\\'));
}
backslashes = 0;
}
cmd.push(x);
}
if quote {
// Add n backslashes to total 2n before ending '"'.
cmd.extend((0..backslashes).map(|_| '\\'));
cmd.push('"');
}
}
fn cmdline(config: &Options) -> String {
let default_shell = Shell::new("powershell".to_owned(), Vec::new());
let shell = config.shell.as_ref().unwrap_or(&default_shell);
let mut cmd = String::new();
cmd.push_str(&shell.program);
for arg in &shell.args {
cmd.push(' ');
if config.escape_args {
push_escaped_arg(&mut cmd, arg);
} else {
cmd.push_str(arg)
}
}
cmd
}
/// Converts the string slice into a Windows-standard representation for "W"-
/// suffixed function variants, which accept UTF-16 encoded string values.
pub fn win32_string<S: AsRef<OsStr> + ?Sized>(value: &S) -> Vec<u16> {
OsStr::new(value).encode_wide().chain(once(0)).collect()
}
#[cfg(test)]
mod test {
use crate::tty::windows::{cmdline, push_escaped_arg};
use crate::tty::{Options, Shell};
#[test]
fn test_escape() {
let test_set = vec![
// Basic cases - no escaping needed
("abc", "abc"),
// Cases requiring quotes (space/tab)
("", "\"\""),
(" ", "\" \""),
("ab c", "\"ab c\""),
("ab\tc", "\"ab\tc\""),
// Cases with backslashes only (no spaces, no quotes) - no quotes added
("ab\\c", "ab\\c"),
// Cases with quotes only (no spaces) - quotes escaped but no outer quotes
("ab\"c", "ab\\\"c"),
("\"", "\\\""),
("a\"b\"c", "a\\\"b\\\"c"),
// Cases requiring both quotes and escaping (contains spaces)
("ab \"c", "\"ab \\\"c\""),
("a \"b\" c", "\"a \\\"b\\\" c\""),
// Complex real-world cases
("C:\\Program Files\\", "\"C:\\Program Files\\\\\""),
("C:\\Program Files\\a.txt", "\"C:\\Program Files\\a.txt\""),
(
r#"sh -c "cd /home/user; ARG='abc' \""'${SHELL:-sh}" -i -c '"'echo hello'""#,
r#""sh -c \"cd /home/user; ARG='abc' \\\"\"'${SHELL:-sh}\" -i -c '\"'echo hello'\"""#,
),
];
for (input, expected) in test_set {
let mut escaped_arg = String::new();
push_escaped_arg(&mut escaped_arg, input);
assert_eq!(escaped_arg, expected, "Failed for input: {}", input);
}
}
#[test]
fn test_cmdline() {
let mut options = Options {
shell: Some(Shell {
program: "echo".to_string(),
args: vec!["hello world".to_string()],
}),
working_directory: None,
drain_on_exit: true,
env: Default::default(),
escape_args: false,
};
assert_eq!(cmdline(&options), "echo hello world");
options.escape_args = true;
assert_eq!(cmdline(&options), "echo \"hello world\"");
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/windows/child.rs | alacritty_terminal/src/tty/windows/child.rs | use std::ffi::c_void;
use std::io::Error;
use std::num::NonZeroU32;
use std::ptr;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::sync::{Arc, Mutex, mpsc};
use polling::os::iocp::{CompletionPacket, PollerIocpExt};
use polling::{Event, Poller};
use windows_sys::Win32::Foundation::{BOOLEAN, FALSE, HANDLE};
use windows_sys::Win32::System::Threading::{
GetExitCodeProcess, GetProcessId, INFINITE, RegisterWaitForSingleObject, UnregisterWait,
WT_EXECUTEINWAITTHREAD, WT_EXECUTEONLYONCE,
};
use crate::tty::ChildEvent;
struct Interest {
poller: Arc<Poller>,
event: Event,
}
struct ChildExitSender {
sender: mpsc::Sender<ChildEvent>,
interest: Arc<Mutex<Option<Interest>>>,
child_handle: AtomicPtr<c_void>,
}
/// WinAPI callback to run when child process exits.
extern "system" fn child_exit_callback(ctx: *mut c_void, timed_out: BOOLEAN) {
if timed_out != 0 {
return;
}
let event_tx: Box<_> = unsafe { Box::from_raw(ctx as *mut ChildExitSender) };
let mut exit_code = 0_u32;
let child_handle = event_tx.child_handle.load(Ordering::Relaxed) as HANDLE;
let status = unsafe { GetExitCodeProcess(child_handle, &mut exit_code) };
let exit_code = if status == FALSE { None } else { Some(exit_code as i32) };
event_tx.sender.send(ChildEvent::Exited(exit_code)).ok();
let interest = event_tx.interest.lock().unwrap();
if let Some(interest) = interest.as_ref() {
interest.poller.post(CompletionPacket::new(interest.event)).ok();
}
}
pub struct ChildExitWatcher {
wait_handle: AtomicPtr<c_void>,
event_rx: mpsc::Receiver<ChildEvent>,
interest: Arc<Mutex<Option<Interest>>>,
child_handle: AtomicPtr<c_void>,
pid: Option<NonZeroU32>,
}
impl ChildExitWatcher {
pub fn new(child_handle: HANDLE) -> Result<ChildExitWatcher, Error> {
let (event_tx, event_rx) = mpsc::channel();
let mut wait_handle: HANDLE = ptr::null_mut();
let interest = Arc::new(Mutex::new(None));
let sender_ref = Box::new(ChildExitSender {
sender: event_tx,
interest: interest.clone(),
child_handle: AtomicPtr::from(child_handle),
});
let success = unsafe {
RegisterWaitForSingleObject(
&mut wait_handle,
child_handle,
Some(child_exit_callback),
Box::into_raw(sender_ref).cast(),
INFINITE,
WT_EXECUTEINWAITTHREAD | WT_EXECUTEONLYONCE,
)
};
if success == 0 {
Err(Error::last_os_error())
} else {
let pid = unsafe { NonZeroU32::new(GetProcessId(child_handle)) };
Ok(ChildExitWatcher {
event_rx,
interest,
pid,
child_handle: AtomicPtr::from(child_handle),
wait_handle: AtomicPtr::from(wait_handle),
})
}
}
pub fn event_rx(&self) -> &mpsc::Receiver<ChildEvent> {
&self.event_rx
}
pub fn register(&self, poller: &Arc<Poller>, event: Event) {
*self.interest.lock().unwrap() = Some(Interest { poller: poller.clone(), event });
}
pub fn deregister(&self) {
*self.interest.lock().unwrap() = None;
}
/// Retrieve the process handle of the underlying child process.
///
/// This function does **not** pass ownership of the raw handle to you,
/// and the handle is only guaranteed to be valid while the hosted application
/// has not yet been destroyed.
///
/// If you terminate the process using this handle, the terminal will get a
/// timeout error, and the child watcher will emit an `Exited` event.
pub fn raw_handle(&self) -> HANDLE {
self.child_handle.load(Ordering::Relaxed) as HANDLE
}
/// Retrieve the Process ID associated to the underlying child process.
pub fn pid(&self) -> Option<NonZeroU32> {
self.pid
}
}
impl Drop for ChildExitWatcher {
fn drop(&mut self) {
unsafe {
UnregisterWait(self.wait_handle.load(Ordering::Relaxed) as HANDLE);
}
}
}
#[cfg(test)]
mod tests {
use std::os::windows::io::AsRawHandle;
use std::process::Command;
use std::sync::Arc;
use std::time::Duration;
use super::super::PTY_CHILD_EVENT_TOKEN;
use super::*;
#[test]
pub fn event_is_emitted_when_child_exits() {
const WAIT_TIMEOUT: Duration = Duration::from_millis(200);
let poller = Arc::new(Poller::new().unwrap());
let mut child = Command::new("cmd.exe").spawn().unwrap();
let child_exit_watcher = ChildExitWatcher::new(child.as_raw_handle() as HANDLE).unwrap();
child_exit_watcher.register(&poller, Event::readable(PTY_CHILD_EVENT_TOKEN));
child.kill().unwrap();
// Poll for the event or fail with timeout if nothing has been sent.
let mut events = polling::Events::new();
poller.wait(&mut events, Some(WAIT_TIMEOUT)).unwrap();
assert_eq!(events.iter().next().unwrap().key, PTY_CHILD_EVENT_TOKEN);
// Verify that at least one `ChildEvent::Exited` was received.
assert_eq!(child_exit_watcher.event_rx().try_recv(), Ok(ChildEvent::Exited(Some(1))));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/tty/windows/conpty.rs | alacritty_terminal/src/tty/windows/conpty.rs | use log::{info, warn};
use std::collections::{HashMap, HashSet};
use std::ffi::OsStr;
use std::io::{Error, Result};
use std::os::windows::ffi::OsStrExt;
use std::os::windows::io::IntoRawHandle;
use std::{mem, ptr};
use windows_sys::Win32::Foundation::{HANDLE, S_OK};
use windows_sys::Win32::System::Console::{
COORD, ClosePseudoConsole, CreatePseudoConsole, HPCON, ResizePseudoConsole,
};
use windows_sys::Win32::System::LibraryLoader::{GetProcAddress, LoadLibraryW};
use windows_sys::core::{HRESULT, PWSTR};
use windows_sys::{s, w};
use windows_sys::Win32::System::Threading::{
CREATE_UNICODE_ENVIRONMENT, CreateProcessW, EXTENDED_STARTUPINFO_PRESENT,
InitializeProcThreadAttributeList, PROC_THREAD_ATTRIBUTE_PSEUDOCONSOLE, PROCESS_INFORMATION,
STARTF_USESTDHANDLES, STARTUPINFOEXW, STARTUPINFOW, UpdateProcThreadAttribute,
};
use crate::event::{OnResize, WindowSize};
use crate::tty::Options;
use crate::tty::windows::blocking::{UnblockedReader, UnblockedWriter};
use crate::tty::windows::child::ChildExitWatcher;
use crate::tty::windows::{Pty, cmdline, win32_string};
const PIPE_CAPACITY: usize = crate::event_loop::READ_BUFFER_SIZE;
/// Load the pseudoconsole API from conpty.dll if possible, otherwise use the
/// standard Windows API.
///
/// The conpty.dll from the Windows Terminal project
/// supports loading OpenConsole.exe, which offers many improvements and
/// bugfixes compared to the standard conpty that ships with Windows.
///
/// The conpty.dll and OpenConsole.exe files will be searched in PATH and in
/// the directory where Alacritty's executable is located.
type CreatePseudoConsoleFn =
unsafe extern "system" fn(COORD, HANDLE, HANDLE, u32, *mut HPCON) -> HRESULT;
type ResizePseudoConsoleFn = unsafe extern "system" fn(HPCON, COORD) -> HRESULT;
type ClosePseudoConsoleFn = unsafe extern "system" fn(HPCON);
struct ConptyApi {
create: CreatePseudoConsoleFn,
resize: ResizePseudoConsoleFn,
close: ClosePseudoConsoleFn,
}
impl ConptyApi {
fn new() -> Self {
match Self::load_conpty() {
Some(conpty) => {
info!("Using conpty.dll for pseudoconsole");
conpty
},
None => {
// Cannot load conpty.dll - use the standard Windows API.
info!("Using Windows API for pseudoconsole");
Self {
create: CreatePseudoConsole,
resize: ResizePseudoConsole,
close: ClosePseudoConsole,
}
},
}
}
/// Try loading ConptyApi from conpty.dll library.
fn load_conpty() -> Option<Self> {
type LoadedFn = unsafe extern "system" fn() -> isize;
unsafe {
let hmodule = LoadLibraryW(w!("conpty.dll"));
if hmodule.is_null() {
return None;
}
let create_fn = GetProcAddress(hmodule, s!("CreatePseudoConsole"))?;
let resize_fn = GetProcAddress(hmodule, s!("ResizePseudoConsole"))?;
let close_fn = GetProcAddress(hmodule, s!("ClosePseudoConsole"))?;
Some(Self {
create: mem::transmute::<LoadedFn, CreatePseudoConsoleFn>(create_fn),
resize: mem::transmute::<LoadedFn, ResizePseudoConsoleFn>(resize_fn),
close: mem::transmute::<LoadedFn, ClosePseudoConsoleFn>(close_fn),
})
}
}
}
/// RAII Pseudoconsole.
pub struct Conpty {
pub handle: HPCON,
api: ConptyApi,
}
impl Drop for Conpty {
fn drop(&mut self) {
// XXX: This will block until the conout pipe is drained. Will cause a deadlock if the
// conout pipe has already been dropped by this point.
//
// See PR #3084 and https://docs.microsoft.com/en-us/windows/console/closepseudoconsole.
unsafe { (self.api.close)(self.handle) }
}
}
// The ConPTY handle can be sent between threads.
unsafe impl Send for Conpty {}
pub fn new(config: &Options, window_size: WindowSize) -> Result<Pty> {
let api = ConptyApi::new();
let mut pty_handle: HPCON = 0;
// Passing 0 as the size parameter allows the "system default" buffer
// size to be used. There may be small performance and memory advantages
// to be gained by tuning this in the future, but it's likely a reasonable
// start point.
let (conout, conout_pty_handle) = miow::pipe::anonymous(0)?;
let (conin_pty_handle, conin) = miow::pipe::anonymous(0)?;
// Create the Pseudo Console, using the pipes.
let result = unsafe {
(api.create)(
window_size.into(),
conin_pty_handle.into_raw_handle() as HANDLE,
conout_pty_handle.into_raw_handle() as HANDLE,
0,
&mut pty_handle as *mut _,
)
};
assert_eq!(result, S_OK);
let mut success;
// Prepare child process startup info.
let mut size: usize = 0;
let mut startup_info_ex: STARTUPINFOEXW = unsafe { mem::zeroed() };
startup_info_ex.StartupInfo.lpTitle = std::ptr::null_mut() as PWSTR;
startup_info_ex.StartupInfo.cb = mem::size_of::<STARTUPINFOEXW>() as u32;
// Setting this flag but leaving all the handles as default (null) ensures the
// PTY process does not inherit any handles from this Alacritty process.
startup_info_ex.StartupInfo.dwFlags |= STARTF_USESTDHANDLES;
// Create the appropriately sized thread attribute list.
unsafe {
let failure =
InitializeProcThreadAttributeList(ptr::null_mut(), 1, 0, &mut size as *mut usize) > 0;
// This call was expected to return false.
if failure {
return Err(Error::last_os_error());
}
}
let mut attr_list: Box<[u8]> = vec![0; size].into_boxed_slice();
// Set startup info's attribute list & initialize it
//
// Lint failure is spurious; it's because winapi's definition of PROC_THREAD_ATTRIBUTE_LIST
// implies it is one pointer in size (32 or 64 bits) but really this is just a dummy value.
// Casting a *mut u8 (pointer to 8 bit type) might therefore not be aligned correctly in
// the compiler's eyes.
#[allow(clippy::cast_ptr_alignment)]
{
startup_info_ex.lpAttributeList = attr_list.as_mut_ptr() as _;
}
unsafe {
success = InitializeProcThreadAttributeList(
startup_info_ex.lpAttributeList,
1,
0,
&mut size as *mut usize,
) > 0;
if !success {
return Err(Error::last_os_error());
}
}
// Set thread attribute list's Pseudo Console to the specified ConPTY.
unsafe {
success = UpdateProcThreadAttribute(
startup_info_ex.lpAttributeList,
0,
PROC_THREAD_ATTRIBUTE_PSEUDOCONSOLE as usize,
pty_handle as *mut std::ffi::c_void,
mem::size_of::<HPCON>(),
ptr::null_mut(),
ptr::null_mut(),
) > 0;
if !success {
return Err(Error::last_os_error());
}
}
// Prepare child process creation arguments.
let cmdline = win32_string(&cmdline(config));
let cwd = config.working_directory.as_ref().map(win32_string);
let mut creation_flags = EXTENDED_STARTUPINFO_PRESENT;
let custom_env_block = convert_custom_env(&config.env);
let custom_env_block_pointer = match &custom_env_block {
Some(custom_env_block) => {
creation_flags |= CREATE_UNICODE_ENVIRONMENT;
custom_env_block.as_ptr() as *mut std::ffi::c_void
},
None => ptr::null_mut(),
};
let mut proc_info: PROCESS_INFORMATION = unsafe { mem::zeroed() };
unsafe {
success = CreateProcessW(
ptr::null(),
cmdline.as_ptr() as PWSTR,
ptr::null_mut(),
ptr::null_mut(),
false as i32,
creation_flags,
custom_env_block_pointer,
cwd.as_ref().map_or_else(ptr::null, |s| s.as_ptr()),
&mut startup_info_ex.StartupInfo as *mut STARTUPINFOW,
&mut proc_info as *mut PROCESS_INFORMATION,
) > 0;
if !success {
return Err(Error::last_os_error());
}
}
let conin = UnblockedWriter::new(conin, PIPE_CAPACITY);
let conout = UnblockedReader::new(conout, PIPE_CAPACITY);
let child_watcher = ChildExitWatcher::new(proc_info.hProcess)?;
let conpty = Conpty { handle: pty_handle as HPCON, api };
Ok(Pty::new(conpty, conout, conin, child_watcher))
}
// Windows environment variables are case-insensitive, and the caller is responsible for
// deduplicating environment variables, so do that here while converting.
//
// https://learn.microsoft.com/en-us/previous-versions/troubleshoot/windows/win32/createprocess-cannot-eliminate-duplicate-variables#environment-variables
fn convert_custom_env(custom_env: &HashMap<String, String>) -> Option<Vec<u16>> {
// Windows inherits parent's env when no `lpEnvironment` parameter is specified.
if custom_env.is_empty() {
return None;
}
let mut converted_block = Vec::new();
let mut all_env_keys = HashSet::new();
for (custom_key, custom_value) in custom_env {
let custom_key_os = OsStr::new(custom_key);
if all_env_keys.insert(custom_key_os.to_ascii_uppercase()) {
add_windows_env_key_value_to_block(
&mut converted_block,
custom_key_os,
OsStr::new(&custom_value),
);
} else {
warn!(
"Omitting environment variable pair with duplicate key: \
'{custom_key}={custom_value}'"
);
}
}
// Pull the current process environment after, to avoid overwriting the user provided one.
for (inherited_key, inherited_value) in std::env::vars_os() {
if all_env_keys.insert(inherited_key.to_ascii_uppercase()) {
add_windows_env_key_value_to_block(
&mut converted_block,
&inherited_key,
&inherited_value,
);
}
}
converted_block.push(0);
Some(converted_block)
}
// According to the `lpEnvironment` parameter description:
// https://learn.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessa#parameters
//
// > An environment block consists of a null-terminated block of null-terminated strings. Each
// string is in the following form:
// >
// > name=value\0
fn add_windows_env_key_value_to_block(block: &mut Vec<u16>, key: &OsStr, value: &OsStr) {
block.extend(key.encode_wide());
block.push('=' as u16);
block.extend(value.encode_wide());
block.push(0);
}
impl OnResize for Conpty {
fn on_resize(&mut self, window_size: WindowSize) {
let result = unsafe { (self.api.resize)(self.handle, window_size.into()) };
assert_eq!(result, S_OK);
}
}
impl From<WindowSize> for COORD {
fn from(window_size: WindowSize) -> Self {
let lines = window_size.num_lines;
let columns = window_size.num_cols;
COORD { X: columns as i16, Y: lines as i16 }
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/grid/tests.rs | alacritty_terminal/src/grid/tests.rs | //! Tests for the Grid.
use super::*;
use crate::term::cell::Cell;
impl GridCell for usize {
fn is_empty(&self) -> bool {
*self == 0
}
fn reset(&mut self, template: &Self) {
*self = *template;
}
fn flags(&self) -> &Flags {
unimplemented!();
}
fn flags_mut(&mut self) -> &mut Flags {
unimplemented!();
}
}
// Scroll up moves lines upward.
#[test]
fn scroll_up() {
let mut grid = Grid::<usize>::new(10, 1, 0);
for i in 0..10 {
grid[Line(i as i32)][Column(0)] = i;
}
grid.scroll_up::<usize>(&(Line(0)..Line(10)), 2);
assert_eq!(grid[Line(0)][Column(0)], 2);
assert_eq!(grid[Line(0)].occ, 1);
assert_eq!(grid[Line(1)][Column(0)], 3);
assert_eq!(grid[Line(1)].occ, 1);
assert_eq!(grid[Line(2)][Column(0)], 4);
assert_eq!(grid[Line(2)].occ, 1);
assert_eq!(grid[Line(3)][Column(0)], 5);
assert_eq!(grid[Line(3)].occ, 1);
assert_eq!(grid[Line(4)][Column(0)], 6);
assert_eq!(grid[Line(4)].occ, 1);
assert_eq!(grid[Line(5)][Column(0)], 7);
assert_eq!(grid[Line(5)].occ, 1);
assert_eq!(grid[Line(6)][Column(0)], 8);
assert_eq!(grid[Line(6)].occ, 1);
assert_eq!(grid[Line(7)][Column(0)], 9);
assert_eq!(grid[Line(7)].occ, 1);
assert_eq!(grid[Line(8)][Column(0)], 0); // was 0.
assert_eq!(grid[Line(8)].occ, 0);
assert_eq!(grid[Line(9)][Column(0)], 0); // was 1.
assert_eq!(grid[Line(9)].occ, 0);
}
// Scroll down moves lines downward.
#[test]
fn scroll_down() {
let mut grid = Grid::<usize>::new(10, 1, 0);
for i in 0..10 {
grid[Line(i as i32)][Column(0)] = i;
}
grid.scroll_down::<usize>(&(Line(0)..Line(10)), 2);
assert_eq!(grid[Line(0)][Column(0)], 0); // was 8.
assert_eq!(grid[Line(0)].occ, 0);
assert_eq!(grid[Line(1)][Column(0)], 0); // was 9.
assert_eq!(grid[Line(1)].occ, 0);
assert_eq!(grid[Line(2)][Column(0)], 0);
assert_eq!(grid[Line(2)].occ, 1);
assert_eq!(grid[Line(3)][Column(0)], 1);
assert_eq!(grid[Line(3)].occ, 1);
assert_eq!(grid[Line(4)][Column(0)], 2);
assert_eq!(grid[Line(4)].occ, 1);
assert_eq!(grid[Line(5)][Column(0)], 3);
assert_eq!(grid[Line(5)].occ, 1);
assert_eq!(grid[Line(6)][Column(0)], 4);
assert_eq!(grid[Line(6)].occ, 1);
assert_eq!(grid[Line(7)][Column(0)], 5);
assert_eq!(grid[Line(7)].occ, 1);
assert_eq!(grid[Line(8)][Column(0)], 6);
assert_eq!(grid[Line(8)].occ, 1);
assert_eq!(grid[Line(9)][Column(0)], 7);
assert_eq!(grid[Line(9)].occ, 1);
}
#[test]
fn scroll_down_with_history() {
let mut grid = Grid::<usize>::new(10, 1, 1);
grid.increase_scroll_limit(1);
for i in 0..10 {
grid[Line(i as i32)][Column(0)] = i;
}
grid.scroll_down::<usize>(&(Line(0)..Line(10)), 2);
assert_eq!(grid[Line(0)][Column(0)], 0); // was 8.
assert_eq!(grid[Line(0)].occ, 0);
assert_eq!(grid[Line(1)][Column(0)], 0); // was 9.
assert_eq!(grid[Line(1)].occ, 0);
assert_eq!(grid[Line(2)][Column(0)], 0);
assert_eq!(grid[Line(2)].occ, 1);
assert_eq!(grid[Line(3)][Column(0)], 1);
assert_eq!(grid[Line(3)].occ, 1);
assert_eq!(grid[Line(4)][Column(0)], 2);
assert_eq!(grid[Line(4)].occ, 1);
assert_eq!(grid[Line(5)][Column(0)], 3);
assert_eq!(grid[Line(5)].occ, 1);
assert_eq!(grid[Line(6)][Column(0)], 4);
assert_eq!(grid[Line(6)].occ, 1);
assert_eq!(grid[Line(7)][Column(0)], 5);
assert_eq!(grid[Line(7)].occ, 1);
assert_eq!(grid[Line(8)][Column(0)], 6);
assert_eq!(grid[Line(8)].occ, 1);
assert_eq!(grid[Line(9)][Column(0)], 7);
assert_eq!(grid[Line(9)].occ, 1);
}
// Test that GridIterator works.
#[test]
fn test_iter() {
let assert_indexed = |value: usize, indexed: Option<Indexed<&usize>>| {
assert_eq!(Some(&value), indexed.map(|indexed| indexed.cell));
};
let mut grid = Grid::<usize>::new(5, 5, 0);
for i in 0..5 {
for j in 0..5 {
grid[Line(i)][Column(j)] = i as usize * 5 + j;
}
}
let mut iter = grid.iter_from(Point::new(Line(0), Column(0)));
assert_eq!(None, iter.prev());
assert_indexed(1, iter.next());
assert_eq!(Column(1), iter.point().column);
assert_eq!(0, iter.point().line);
assert_indexed(2, iter.next());
assert_indexed(3, iter.next());
assert_indexed(4, iter.next());
// Test line-wrapping.
assert_indexed(5, iter.next());
assert_eq!(Column(0), iter.point().column);
assert_eq!(1, iter.point().line);
assert_indexed(4, iter.prev());
assert_eq!(Column(4), iter.point().column);
assert_eq!(0, iter.point().line);
// Make sure iter.cell() returns the current iterator position.
assert_eq!(&4, iter.cell());
// Test that iter ends at end of grid.
let mut final_iter = grid.iter_from(Point { line: Line(4), column: Column(4) });
assert_eq!(None, final_iter.next());
assert_indexed(23, final_iter.prev());
}
#[test]
fn shrink_reflow() {
let mut grid = Grid::<Cell>::new(1, 5, 2);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = cell('2');
grid[Line(0)][Column(2)] = cell('3');
grid[Line(0)][Column(3)] = cell('4');
grid[Line(0)][Column(4)] = cell('5');
grid.resize(true, 1, 2);
assert_eq!(grid.total_lines(), 3);
assert_eq!(grid[Line(-2)].len(), 2);
assert_eq!(grid[Line(-2)][Column(0)], cell('1'));
assert_eq!(grid[Line(-2)][Column(1)], wrap_cell('2'));
assert_eq!(grid[Line(-1)].len(), 2);
assert_eq!(grid[Line(-1)][Column(0)], cell('3'));
assert_eq!(grid[Line(-1)][Column(1)], wrap_cell('4'));
assert_eq!(grid[Line(0)].len(), 2);
assert_eq!(grid[Line(0)][Column(0)], cell('5'));
assert_eq!(grid[Line(0)][Column(1)], Cell::default());
}
#[test]
fn shrink_reflow_twice() {
let mut grid = Grid::<Cell>::new(1, 5, 2);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = cell('2');
grid[Line(0)][Column(2)] = cell('3');
grid[Line(0)][Column(3)] = cell('4');
grid[Line(0)][Column(4)] = cell('5');
grid.resize(true, 1, 4);
grid.resize(true, 1, 2);
assert_eq!(grid.total_lines(), 3);
assert_eq!(grid[Line(-2)].len(), 2);
assert_eq!(grid[Line(-2)][Column(0)], cell('1'));
assert_eq!(grid[Line(-2)][Column(1)], wrap_cell('2'));
assert_eq!(grid[Line(-1)].len(), 2);
assert_eq!(grid[Line(-1)][Column(0)], cell('3'));
assert_eq!(grid[Line(-1)][Column(1)], wrap_cell('4'));
assert_eq!(grid[Line(0)].len(), 2);
assert_eq!(grid[Line(0)][Column(0)], cell('5'));
assert_eq!(grid[Line(0)][Column(1)], Cell::default());
}
#[test]
fn shrink_reflow_empty_cell_inside_line() {
let mut grid = Grid::<Cell>::new(1, 5, 3);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = Cell::default();
grid[Line(0)][Column(2)] = cell('3');
grid[Line(0)][Column(3)] = cell('4');
grid[Line(0)][Column(4)] = Cell::default();
grid.resize(true, 1, 2);
assert_eq!(grid.total_lines(), 2);
assert_eq!(grid[Line(-1)].len(), 2);
assert_eq!(grid[Line(-1)][Column(0)], cell('1'));
assert_eq!(grid[Line(-1)][Column(1)], wrap_cell(' '));
assert_eq!(grid[Line(0)].len(), 2);
assert_eq!(grid[Line(0)][Column(0)], cell('3'));
assert_eq!(grid[Line(0)][Column(1)], cell('4'));
grid.resize(true, 1, 1);
assert_eq!(grid.total_lines(), 4);
assert_eq!(grid[Line(-3)].len(), 1);
assert_eq!(grid[Line(-3)][Column(0)], wrap_cell('1'));
assert_eq!(grid[Line(-2)].len(), 1);
assert_eq!(grid[Line(-2)][Column(0)], wrap_cell(' '));
assert_eq!(grid[Line(-1)].len(), 1);
assert_eq!(grid[Line(-1)][Column(0)], wrap_cell('3'));
assert_eq!(grid[Line(0)].len(), 1);
assert_eq!(grid[Line(0)][Column(0)], cell('4'));
}
#[test]
fn grow_reflow() {
let mut grid = Grid::<Cell>::new(2, 2, 0);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = wrap_cell('2');
grid[Line(1)][Column(0)] = cell('3');
grid[Line(1)][Column(1)] = Cell::default();
grid.resize(true, 2, 3);
assert_eq!(grid.total_lines(), 2);
assert_eq!(grid[Line(0)].len(), 3);
assert_eq!(grid[Line(0)][Column(0)], cell('1'));
assert_eq!(grid[Line(0)][Column(1)], cell('2'));
assert_eq!(grid[Line(0)][Column(2)], cell('3'));
// Make sure rest of grid is empty.
assert_eq!(grid[Line(1)].len(), 3);
assert_eq!(grid[Line(1)][Column(0)], Cell::default());
assert_eq!(grid[Line(1)][Column(1)], Cell::default());
assert_eq!(grid[Line(1)][Column(2)], Cell::default());
}
#[test]
fn grow_reflow_multiline() {
let mut grid = Grid::<Cell>::new(3, 2, 0);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = wrap_cell('2');
grid[Line(1)][Column(0)] = cell('3');
grid[Line(1)][Column(1)] = wrap_cell('4');
grid[Line(2)][Column(0)] = cell('5');
grid[Line(2)][Column(1)] = cell('6');
grid.resize(true, 3, 6);
assert_eq!(grid.total_lines(), 3);
assert_eq!(grid[Line(0)].len(), 6);
assert_eq!(grid[Line(0)][Column(0)], cell('1'));
assert_eq!(grid[Line(0)][Column(1)], cell('2'));
assert_eq!(grid[Line(0)][Column(2)], cell('3'));
assert_eq!(grid[Line(0)][Column(3)], cell('4'));
assert_eq!(grid[Line(0)][Column(4)], cell('5'));
assert_eq!(grid[Line(0)][Column(5)], cell('6'));
// Make sure rest of grid is empty.
for r in (1..3).map(Line::from) {
assert_eq!(grid[r].len(), 6);
for c in 0..6 {
assert_eq!(grid[r][Column(c)], Cell::default());
}
}
}
#[test]
fn grow_reflow_disabled() {
let mut grid = Grid::<Cell>::new(2, 2, 0);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = wrap_cell('2');
grid[Line(1)][Column(0)] = cell('3');
grid[Line(1)][Column(1)] = Cell::default();
grid.resize(false, 2, 3);
assert_eq!(grid.total_lines(), 2);
assert_eq!(grid[Line(0)].len(), 3);
assert_eq!(grid[Line(0)][Column(0)], cell('1'));
assert_eq!(grid[Line(0)][Column(1)], wrap_cell('2'));
assert_eq!(grid[Line(0)][Column(2)], Cell::default());
assert_eq!(grid[Line(1)].len(), 3);
assert_eq!(grid[Line(1)][Column(0)], cell('3'));
assert_eq!(grid[Line(1)][Column(1)], Cell::default());
assert_eq!(grid[Line(1)][Column(2)], Cell::default());
}
#[test]
fn shrink_reflow_disabled() {
let mut grid = Grid::<Cell>::new(1, 5, 2);
grid[Line(0)][Column(0)] = cell('1');
grid[Line(0)][Column(1)] = cell('2');
grid[Line(0)][Column(2)] = cell('3');
grid[Line(0)][Column(3)] = cell('4');
grid[Line(0)][Column(4)] = cell('5');
grid.resize(false, 1, 2);
assert_eq!(grid.total_lines(), 1);
assert_eq!(grid[Line(0)].len(), 2);
assert_eq!(grid[Line(0)][Column(0)], cell('1'));
assert_eq!(grid[Line(0)][Column(1)], cell('2'));
}
#[test]
fn accurate_size_hint() {
let grid = Grid::<Cell>::new(5, 5, 2);
size_hint_matches_count(grid.iter_from(Point::new(Line(0), Column(0))));
size_hint_matches_count(grid.iter_from(Point::new(Line(2), Column(3))));
size_hint_matches_count(grid.iter_from(Point::new(Line(4), Column(4))));
size_hint_matches_count(grid.iter_from(Point::new(Line(4), Column(2))));
size_hint_matches_count(grid.iter_from(Point::new(Line(10), Column(10))));
size_hint_matches_count(grid.iter_from(Point::new(Line(2), Column(10))));
let mut iterator = grid.iter_from(Point::new(Line(3), Column(1)));
iterator.next();
iterator.next();
size_hint_matches_count(iterator);
size_hint_matches_count(grid.display_iter());
}
fn size_hint_matches_count<T>(iter: impl Iterator<Item = T>) {
let iterator = iter.into_iter();
let (lower, upper) = iterator.size_hint();
let count = iterator.count();
assert_eq!(lower, count);
assert_eq!(upper, Some(count));
}
// https://github.com/rust-lang/rust-clippy/pull/6375
#[allow(clippy::all)]
fn cell(c: char) -> Cell {
let mut cell = Cell::default();
cell.c = c;
cell
}
fn wrap_cell(c: char) -> Cell {
let mut cell = cell(c);
cell.flags.insert(Flags::WRAPLINE);
cell
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/grid/row.rs | alacritty_terminal/src/grid/row.rs | //! Defines the Row type which makes up lines in the grid.
use std::cmp::{max, min};
use std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};
use std::{ptr, slice};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::grid::GridCell;
use crate::index::Column;
use crate::term::cell::ResetDiscriminant;
/// A row in the grid.
#[derive(Default, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Row<T> {
inner: Vec<T>,
/// Maximum number of occupied entries.
///
/// This is the upper bound on the number of elements in the row, which have been modified
/// since the last reset. All cells after this point are guaranteed to be equal.
pub(crate) occ: usize,
}
impl<T: PartialEq> PartialEq for Row<T> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T: Default> Row<T> {
/// Create a new terminal row.
///
/// Ideally the `template` should be `Copy` in all performance sensitive scenarios.
pub fn new(columns: usize) -> Row<T> {
debug_assert!(columns >= 1);
let mut inner: Vec<T> = Vec::with_capacity(columns);
// This is a slightly optimized version of `std::vec::Vec::resize`.
unsafe {
let mut ptr = inner.as_mut_ptr();
for _ in 1..columns {
ptr::write(ptr, T::default());
ptr = ptr.offset(1);
}
ptr::write(ptr, T::default());
inner.set_len(columns);
}
Row { inner, occ: 0 }
}
/// Increase the number of columns in the row.
#[inline]
pub fn grow(&mut self, columns: usize) {
if self.inner.len() >= columns {
return;
}
self.inner.resize_with(columns, T::default);
}
/// Reduce the number of columns in the row.
///
/// This will return all non-empty cells that were removed.
pub fn shrink(&mut self, columns: usize) -> Option<Vec<T>>
where
T: GridCell,
{
if self.inner.len() <= columns {
return None;
}
// Split off cells for a new row.
let mut new_row = self.inner.split_off(columns);
let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);
new_row.truncate(index);
self.occ = min(self.occ, columns);
if new_row.is_empty() { None } else { Some(new_row) }
}
/// Reset all cells in the row to the `template` cell.
#[inline]
pub fn reset<D>(&mut self, template: &T)
where
T: ResetDiscriminant<D> + GridCell,
D: PartialEq,
{
debug_assert!(!self.inner.is_empty());
// Mark all cells as dirty if template cell changed.
let len = self.inner.len();
if self.inner[len - 1].discriminant() != template.discriminant() {
self.occ = len;
}
// Reset every dirty cell in the row.
for item in &mut self.inner[0..self.occ] {
item.reset(template);
}
self.occ = 0;
}
}
#[allow(clippy::len_without_is_empty)]
impl<T> Row<T> {
#[inline]
pub fn from_vec(vec: Vec<T>, occ: usize) -> Row<T> {
Row { inner: vec, occ }
}
#[inline]
pub fn len(&self) -> usize {
self.inner.len()
}
#[inline]
pub fn last(&self) -> Option<&T> {
self.inner.last()
}
#[inline]
pub fn last_mut(&mut self) -> Option<&mut T> {
self.occ = self.inner.len();
self.inner.last_mut()
}
#[inline]
pub fn append(&mut self, vec: &mut Vec<T>)
where
T: GridCell,
{
self.occ += vec.len();
self.inner.append(vec);
}
#[inline]
pub fn append_front(&mut self, mut vec: Vec<T>) {
self.occ += vec.len();
vec.append(&mut self.inner);
self.inner = vec;
}
/// Check if all cells in the row are empty.
#[inline]
pub fn is_clear(&self) -> bool
where
T: GridCell,
{
self.inner.iter().all(GridCell::is_empty)
}
#[inline]
pub fn front_split_off(&mut self, at: usize) -> Vec<T> {
self.occ = self.occ.saturating_sub(at);
let mut split = self.inner.split_off(at);
std::mem::swap(&mut split, &mut self.inner);
split
}
}
impl<'a, T> IntoIterator for &'a Row<T> {
type IntoIter = slice::Iter<'a, T>;
type Item = &'a T;
#[inline]
fn into_iter(self) -> slice::Iter<'a, T> {
self.inner.iter()
}
}
impl<'a, T> IntoIterator for &'a mut Row<T> {
type IntoIter = slice::IterMut<'a, T>;
type Item = &'a mut T;
#[inline]
fn into_iter(self) -> slice::IterMut<'a, T> {
self.occ = self.len();
self.inner.iter_mut()
}
}
impl<T> Index<Column> for Row<T> {
type Output = T;
#[inline]
fn index(&self, index: Column) -> &T {
&self.inner[index.0]
}
}
impl<T> IndexMut<Column> for Row<T> {
#[inline]
fn index_mut(&mut self, index: Column) -> &mut T {
self.occ = max(self.occ, *index + 1);
&mut self.inner[index.0]
}
}
impl<T> Index<Range<Column>> for Row<T> {
type Output = [T];
#[inline]
fn index(&self, index: Range<Column>) -> &[T] {
&self.inner[(index.start.0)..(index.end.0)]
}
}
impl<T> IndexMut<Range<Column>> for Row<T> {
#[inline]
fn index_mut(&mut self, index: Range<Column>) -> &mut [T] {
self.occ = max(self.occ, *index.end);
&mut self.inner[(index.start.0)..(index.end.0)]
}
}
impl<T> Index<RangeTo<Column>> for Row<T> {
type Output = [T];
#[inline]
fn index(&self, index: RangeTo<Column>) -> &[T] {
&self.inner[..(index.end.0)]
}
}
impl<T> IndexMut<RangeTo<Column>> for Row<T> {
#[inline]
fn index_mut(&mut self, index: RangeTo<Column>) -> &mut [T] {
self.occ = max(self.occ, *index.end);
&mut self.inner[..(index.end.0)]
}
}
impl<T> Index<RangeFrom<Column>> for Row<T> {
type Output = [T];
#[inline]
fn index(&self, index: RangeFrom<Column>) -> &[T] {
&self.inner[(index.start.0)..]
}
}
impl<T> IndexMut<RangeFrom<Column>> for Row<T> {
#[inline]
fn index_mut(&mut self, index: RangeFrom<Column>) -> &mut [T] {
self.occ = self.len();
&mut self.inner[(index.start.0)..]
}
}
impl<T> Index<RangeFull> for Row<T> {
type Output = [T];
#[inline]
fn index(&self, _: RangeFull) -> &[T] {
&self.inner[..]
}
}
impl<T> IndexMut<RangeFull> for Row<T> {
#[inline]
fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
self.occ = self.len();
&mut self.inner[..]
}
}
impl<T> Index<RangeToInclusive<Column>> for Row<T> {
type Output = [T];
#[inline]
fn index(&self, index: RangeToInclusive<Column>) -> &[T] {
&self.inner[..=(index.end.0)]
}
}
impl<T> IndexMut<RangeToInclusive<Column>> for Row<T> {
#[inline]
fn index_mut(&mut self, index: RangeToInclusive<Column>) -> &mut [T] {
self.occ = max(self.occ, *index.end + 1);
&mut self.inner[..=(index.end.0)]
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/grid/storage.rs | alacritty_terminal/src/grid/storage.rs | use std::cmp::max;
use std::mem;
use std::mem::MaybeUninit;
use std::ops::{Index, IndexMut};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use super::Row;
use crate::index::Line;
/// Maximum number of buffered lines outside of the grid for performance optimization.
const MAX_CACHE_SIZE: usize = 1_000;
/// A ring buffer for optimizing indexing and rotation.
///
/// The [`Storage::rotate`] and [`Storage::rotate_down`] functions are fast modular additions on
/// the internal [`zero`] field. As compared with [`slice::rotate_left`] which must rearrange items
/// in memory.
///
/// As a consequence, both [`Index`] and [`IndexMut`] are reimplemented for this type to account
/// for the zeroth element not always being at the start of the allocation.
///
/// Because certain [`Vec`] operations are no longer valid on this type, no [`Deref`]
/// implementation is provided. Anything from [`Vec`] that should be exposed must be done so
/// manually.
///
/// [`slice::rotate_left`]: https://doc.rust-lang.org/std/primitive.slice.html#method.rotate_left
/// [`Deref`]: std::ops::Deref
/// [`zero`]: #structfield.zero
#[derive(Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Storage<T> {
inner: Vec<Row<T>>,
/// Starting point for the storage of rows.
///
/// This value represents the starting line offset within the ring buffer. The value of this
/// offset may be larger than the `len` itself, and will wrap around to the start to form the
/// ring buffer. It represents the bottommost line of the terminal.
zero: usize,
/// Number of visible lines.
visible_lines: usize,
/// Total number of lines currently active in the terminal (scrollback + visible)
///
/// Shrinking this length allows reducing the number of lines in the scrollback buffer without
/// having to truncate the raw `inner` buffer.
/// As long as `len` is bigger than `inner`, it is also possible to grow the scrollback buffer
/// without any additional insertions.
len: usize,
}
impl<T: PartialEq> PartialEq for Storage<T> {
fn eq(&self, other: &Self) -> bool {
// Both storage buffers need to be truncated and zeroed.
assert_eq!(self.zero, 0);
assert_eq!(other.zero, 0);
self.inner == other.inner && self.len == other.len
}
}
impl<T> Storage<T> {
#[inline]
pub fn with_capacity(visible_lines: usize, columns: usize) -> Storage<T>
where
T: Default,
{
// Initialize visible lines; the scrollback buffer is initialized dynamically.
let mut inner = Vec::with_capacity(visible_lines);
inner.resize_with(visible_lines, || Row::new(columns));
Storage { inner, zero: 0, visible_lines, len: visible_lines }
}
/// Increase the number of lines in the buffer.
#[inline]
pub fn grow_visible_lines(&mut self, next: usize)
where
T: Default,
{
// Number of lines the buffer needs to grow.
let additional_lines = next - self.visible_lines;
let columns = self[Line(0)].len();
self.initialize(additional_lines, columns);
// Update visible lines.
self.visible_lines = next;
}
/// Decrease the number of lines in the buffer.
#[inline]
pub fn shrink_visible_lines(&mut self, next: usize) {
// Shrink the size without removing any lines.
let shrinkage = self.visible_lines - next;
self.shrink_lines(shrinkage);
// Update visible lines.
self.visible_lines = next;
}
/// Shrink the number of lines in the buffer.
#[inline]
pub fn shrink_lines(&mut self, shrinkage: usize) {
self.len -= shrinkage;
// Free memory.
if self.inner.len() > self.len + MAX_CACHE_SIZE {
self.truncate();
}
}
/// Truncate the invisible elements from the raw buffer.
#[inline]
pub fn truncate(&mut self) {
self.rezero();
self.inner.truncate(self.len);
}
/// Dynamically grow the storage buffer at runtime.
#[inline]
pub fn initialize(&mut self, additional_rows: usize, columns: usize)
where
T: Default,
{
if self.len + additional_rows > self.inner.len() {
self.rezero();
let realloc_size = self.inner.len() + max(additional_rows, MAX_CACHE_SIZE);
self.inner.resize_with(realloc_size, || Row::new(columns));
}
self.len += additional_rows;
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Swap implementation for Row<T>.
///
/// Exploits the known size of Row<T> to produce a slightly more efficient
/// swap than going through slice::swap.
///
/// The default implementation from swap generates 8 movups and 4 movaps
/// instructions. This implementation achieves the swap in only 8 movups
/// instructions.
pub fn swap(&mut self, a: Line, b: Line) {
debug_assert_eq!(mem::size_of::<Row<T>>(), mem::size_of::<usize>() * 4);
let a = self.compute_index(a);
let b = self.compute_index(b);
unsafe {
// Cast to a qword array to opt out of copy restrictions and avoid
// drop hazards. Byte array is no good here since for whatever
// reason LLVM won't optimized it.
let a_ptr = self.inner.as_mut_ptr().add(a) as *mut MaybeUninit<usize>;
let b_ptr = self.inner.as_mut_ptr().add(b) as *mut MaybeUninit<usize>;
// Copy 1 qword at a time.
//
// The optimizer unrolls this loop and vectorizes it.
let mut tmp: MaybeUninit<usize>;
for i in 0..4 {
tmp = *a_ptr.offset(i);
*a_ptr.offset(i) = *b_ptr.offset(i);
*b_ptr.offset(i) = tmp;
}
}
}
/// Rotate the grid, moving all lines up/down in history.
#[inline]
pub fn rotate(&mut self, count: isize) {
debug_assert!(count.unsigned_abs() <= self.inner.len());
let len = self.inner.len();
self.zero = (self.zero as isize + count + len as isize) as usize % len;
}
/// Rotate all existing lines down in history.
///
/// This is a faster, specialized version of [`rotate_left`].
///
/// [`rotate_left`]: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.rotate_left
#[inline]
pub fn rotate_down(&mut self, count: usize) {
self.zero = (self.zero + count) % self.inner.len();
}
/// Update the raw storage buffer.
#[inline]
pub fn replace_inner(&mut self, vec: Vec<Row<T>>) {
self.len = vec.len();
self.inner = vec;
self.zero = 0;
}
/// Remove all rows from storage.
#[inline]
pub fn take_all(&mut self) -> Vec<Row<T>> {
self.truncate();
let mut buffer = Vec::new();
mem::swap(&mut buffer, &mut self.inner);
self.len = 0;
buffer
}
/// Compute actual index in underlying storage given the requested index.
#[inline]
fn compute_index(&self, requested: Line) -> usize {
debug_assert!(requested.0 < self.visible_lines as i32);
let positive = -(requested - self.visible_lines).0 as usize - 1;
debug_assert!(positive < self.len);
let zeroed = self.zero + positive;
// Use if/else instead of remainder here to improve performance.
//
// Requires `zeroed` to be smaller than `self.inner.len() * 2`,
// but both `self.zero` and `requested` are always smaller than `self.inner.len()`.
if zeroed >= self.inner.len() { zeroed - self.inner.len() } else { zeroed }
}
/// Rotate the ringbuffer to reset `self.zero` back to index `0`.
#[inline]
fn rezero(&mut self) {
if self.zero == 0 {
return;
}
self.inner.rotate_left(self.zero);
self.zero = 0;
}
}
impl<T> Index<Line> for Storage<T> {
type Output = Row<T>;
#[inline]
fn index(&self, index: Line) -> &Self::Output {
let index = self.compute_index(index);
&self.inner[index]
}
}
impl<T> IndexMut<Line> for Storage<T> {
#[inline]
fn index_mut(&mut self, index: Line) -> &mut Self::Output {
let index = self.compute_index(index);
&mut self.inner[index]
}
}
#[cfg(test)]
mod tests {
use crate::grid::GridCell;
use crate::grid::row::Row;
use crate::grid::storage::{MAX_CACHE_SIZE, Storage};
use crate::index::{Column, Line};
use crate::term::cell::Flags;
impl GridCell for char {
fn is_empty(&self) -> bool {
*self == ' ' || *self == '\t'
}
fn reset(&mut self, template: &Self) {
*self = *template;
}
fn flags(&self) -> &Flags {
unimplemented!();
}
fn flags_mut(&mut self) -> &mut Flags {
unimplemented!();
}
}
#[test]
fn with_capacity() {
let storage = Storage::<char>::with_capacity(3, 1);
assert_eq!(storage.inner.len(), 3);
assert_eq!(storage.len, 3);
assert_eq!(storage.zero, 0);
assert_eq!(storage.visible_lines, 3);
}
#[test]
fn indexing() {
let mut storage = Storage::<char>::with_capacity(3, 1);
storage[Line(0)] = filled_row('0');
storage[Line(1)] = filled_row('1');
storage[Line(2)] = filled_row('2');
storage.zero += 1;
assert_eq!(storage[Line(0)], filled_row('2'));
assert_eq!(storage[Line(1)], filled_row('0'));
assert_eq!(storage[Line(2)], filled_row('1'));
}
#[test]
#[should_panic]
#[cfg(debug_assertions)]
fn indexing_above_inner_len() {
let storage = Storage::<char>::with_capacity(1, 1);
let _ = &storage[Line(-1)];
}
#[test]
fn rotate() {
let mut storage = Storage::<char>::with_capacity(3, 1);
storage.rotate(2);
assert_eq!(storage.zero, 2);
storage.shrink_lines(2);
assert_eq!(storage.len, 1);
assert_eq!(storage.inner.len(), 3);
assert_eq!(storage.zero, 2);
}
/// Grow the buffer one line at the end of the buffer.
///
/// Before:
/// 0: 0 <- Zero
/// 1: 1
/// 2: -
/// After:
/// 0: 0 <- Zero
/// 1: 1
/// 2: -
/// 3: \0
/// ...
/// MAX_CACHE_SIZE: \0
#[test]
fn grow_after_zero() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('0'), filled_row('1'), filled_row('-')],
zero: 0,
visible_lines: 3,
len: 3,
};
// Grow buffer.
storage.grow_visible_lines(4);
// Make sure the result is correct.
let mut expected = Storage {
inner: vec![filled_row('0'), filled_row('1'), filled_row('-')],
zero: 0,
visible_lines: 4,
len: 4,
};
expected.inner.append(&mut vec![filled_row('\0'); MAX_CACHE_SIZE]);
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Grow the buffer one line at the start of the buffer.
///
/// Before:
/// 0: -
/// 1: 0 <- Zero
/// 2: 1
/// After:
/// 0: 0 <- Zero
/// 1: 1
/// 2: -
/// 3: \0
/// ...
/// MAX_CACHE_SIZE: \0
#[test]
fn grow_before_zero() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('-'), filled_row('0'), filled_row('1')],
zero: 1,
visible_lines: 3,
len: 3,
};
// Grow buffer.
storage.grow_visible_lines(4);
// Make sure the result is correct.
let mut expected = Storage {
inner: vec![filled_row('0'), filled_row('1'), filled_row('-')],
zero: 0,
visible_lines: 4,
len: 4,
};
expected.inner.append(&mut vec![filled_row('\0'); MAX_CACHE_SIZE]);
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer one line at the start of the buffer.
///
/// Before:
/// 0: 2
/// 1: 0 <- Zero
/// 2: 1
/// After:
/// 0: 2 <- Hidden
/// 0: 0 <- Zero
/// 1: 1
#[test]
fn shrink_before_zero() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('2'), filled_row('0'), filled_row('1')],
zero: 1,
visible_lines: 3,
len: 3,
};
// Shrink buffer.
storage.shrink_visible_lines(2);
// Make sure the result is correct.
let expected = Storage {
inner: vec![filled_row('2'), filled_row('0'), filled_row('1')],
zero: 1,
visible_lines: 2,
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer one line at the end of the buffer.
///
/// Before:
/// 0: 0 <- Zero
/// 1: 1
/// 2: 2
/// After:
/// 0: 0 <- Zero
/// 1: 1
/// 2: 2 <- Hidden
#[test]
fn shrink_after_zero() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('0'), filled_row('1'), filled_row('2')],
zero: 0,
visible_lines: 3,
len: 3,
};
// Shrink buffer.
storage.shrink_visible_lines(2);
// Make sure the result is correct.
let expected = Storage {
inner: vec![filled_row('0'), filled_row('1'), filled_row('2')],
zero: 0,
visible_lines: 2,
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer at the start and end of the buffer.
///
/// Before:
/// 0: 4
/// 1: 5
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3
/// After:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2 <- Hidden
/// 5: 3 <- Hidden
#[test]
fn shrink_before_and_after_zero() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 6,
len: 6,
};
// Shrink buffer.
storage.shrink_visible_lines(2);
// Make sure the result is correct.
let expected = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 2,
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Check that when truncating all hidden lines are removed from the raw buffer.
///
/// Before:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2 <- Hidden
/// 5: 3 <- Hidden
/// After:
/// 0: 0 <- Zero
/// 1: 1
#[test]
fn truncate_invisible_lines() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 1,
len: 2,
};
// Truncate buffer.
storage.truncate();
// Make sure the result is correct.
let expected = Storage {
inner: vec![filled_row('0'), filled_row('1')],
zero: 0,
visible_lines: 1,
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Truncate buffer only at the beginning.
///
/// Before:
/// 0: 1
/// 1: 2 <- Hidden
/// 2: 0 <- Zero
/// After:
/// 0: 1
/// 0: 0 <- Zero
#[test]
fn truncate_invisible_lines_beginning() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('1'), filled_row('2'), filled_row('0')],
zero: 2,
visible_lines: 1,
len: 2,
};
// Truncate buffer.
storage.truncate();
// Make sure the result is correct.
let expected = Storage {
inner: vec![filled_row('0'), filled_row('1')],
zero: 0,
visible_lines: 1,
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// First shrink the buffer and then grow it again.
///
/// Before:
/// 0: 4
/// 1: 5
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3
/// After Shrinking:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3 <- Hidden
/// After Growing:
/// 0: 4
/// 1: 5
/// 2: -
/// 3: 0 <- Zero
/// 4: 1
/// 5: 2
/// 6: 3
#[test]
fn shrink_then_grow() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 0,
len: 6,
};
// Shrink buffer.
storage.shrink_lines(3);
// Make sure the result after shrinking is correct.
let shrinking_expected = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 0,
len: 3,
};
assert_eq!(storage.inner, shrinking_expected.inner);
assert_eq!(storage.zero, shrinking_expected.zero);
assert_eq!(storage.len, shrinking_expected.len);
// Grow buffer.
storage.initialize(1, 1);
// Make sure the previously freed elements are reused.
let growing_expected = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 0,
len: 4,
};
assert_eq!(storage.inner, growing_expected.inner);
assert_eq!(storage.zero, growing_expected.zero);
assert_eq!(storage.len, growing_expected.len);
}
#[test]
fn initialize() {
// Setup storage area.
let mut storage: Storage<char> = Storage {
inner: vec![
filled_row('4'),
filled_row('5'),
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
],
zero: 2,
visible_lines: 0,
len: 6,
};
// Initialize additional lines.
let init_size = 3;
storage.initialize(init_size, 1);
// Generate expected grid.
let mut expected_inner = vec![
filled_row('0'),
filled_row('1'),
filled_row('2'),
filled_row('3'),
filled_row('4'),
filled_row('5'),
];
let expected_init_size = std::cmp::max(init_size, MAX_CACHE_SIZE);
expected_inner.append(&mut vec![filled_row('\0'); expected_init_size]);
let expected_storage = Storage { inner: expected_inner, zero: 0, visible_lines: 0, len: 9 };
assert_eq!(storage.len, expected_storage.len);
assert_eq!(storage.zero, expected_storage.zero);
assert_eq!(storage.inner, expected_storage.inner);
}
#[test]
fn rotate_wrap_zero() {
let mut storage: Storage<char> = Storage {
inner: vec![filled_row('-'), filled_row('-'), filled_row('-')],
zero: 2,
visible_lines: 0,
len: 3,
};
storage.rotate(2);
assert!(storage.zero < storage.inner.len());
}
fn filled_row(content: char) -> Row<char> {
let mut row = Row::new(1);
row[Column(0)] = content;
row
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/grid/mod.rs | alacritty_terminal/src/grid/mod.rs | //! A specialized 2D grid implementation optimized for use in a terminal.
use std::cmp::{max, min};
use std::ops::{Bound, Deref, Index, IndexMut, Range, RangeBounds};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::index::{Column, Line, Point};
use crate::term::cell::{Flags, ResetDiscriminant};
use crate::vte::ansi::{CharsetIndex, StandardCharset};
pub mod resize;
mod row;
mod storage;
#[cfg(test)]
mod tests;
pub use self::row::Row;
use self::storage::Storage;
pub trait GridCell: Sized {
/// Check if the cell contains any content.
fn is_empty(&self) -> bool;
/// Perform an opinionated cell reset based on a template cell.
fn reset(&mut self, template: &Self);
fn flags(&self) -> &Flags;
fn flags_mut(&mut self) -> &mut Flags;
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct Cursor<T> {
/// The location of this cursor.
pub point: Point,
/// Template cell when using this cursor.
pub template: T,
/// Currently configured graphic character sets.
pub charsets: Charsets,
/// Tracks if the next call to input will need to first handle wrapping.
///
/// This is true after the last column is set with the input function. Any function that
/// implicitly sets the line or column needs to set this to false to avoid wrapping twice.
///
/// Tracking `input_needs_wrap` makes it possible to not store a cursor position that exceeds
/// the number of columns, which would lead to index out of bounds when interacting with arrays
/// without sanitization.
pub input_needs_wrap: bool,
}
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
pub struct Charsets([StandardCharset; 4]);
impl Index<CharsetIndex> for Charsets {
type Output = StandardCharset;
fn index(&self, index: CharsetIndex) -> &StandardCharset {
&self.0[index as usize]
}
}
impl IndexMut<CharsetIndex> for Charsets {
fn index_mut(&mut self, index: CharsetIndex) -> &mut StandardCharset {
&mut self.0[index as usize]
}
}
#[derive(Debug, Copy, Clone)]
pub enum Scroll {
Delta(i32),
PageUp,
PageDown,
Top,
Bottom,
}
/// Grid based terminal content storage.
///
/// ```notrust
/// ┌─────────────────────────┐ <-- max_scroll_limit + lines
/// │ │
/// │ UNINITIALIZED │
/// │ │
/// ├─────────────────────────┤ <-- self.raw.inner.len()
/// │ │
/// │ RESIZE BUFFER │
/// │ │
/// ├─────────────────────────┤ <-- self.history_size() + lines
/// │ │
/// │ SCROLLUP REGION │
/// │ │
/// ├─────────────────────────┤v lines
/// │ │|
/// │ VISIBLE REGION │|
/// │ │|
/// ├─────────────────────────┤^ <-- display_offset
/// │ │
/// │ SCROLLDOWN REGION │
/// │ │
/// └─────────────────────────┘ <-- zero
/// ^
/// columns
/// ```
#[derive(Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Grid<T> {
/// Current cursor for writing data.
#[cfg_attr(feature = "serde", serde(skip))]
pub cursor: Cursor<T>,
/// Last saved cursor.
#[cfg_attr(feature = "serde", serde(skip))]
pub saved_cursor: Cursor<T>,
/// Lines in the grid. Each row holds a list of cells corresponding to the
/// columns in that row.
raw: Storage<T>,
/// Number of columns.
columns: usize,
/// Number of visible lines.
lines: usize,
/// Offset of displayed area.
///
/// If the displayed region isn't at the bottom of the screen, it stays
/// stationary while more text is emitted. The scrolling implementation
/// updates this offset accordingly.
display_offset: usize,
/// Maximum number of lines in history.
max_scroll_limit: usize,
}
impl<T: GridCell + Default + PartialEq> Grid<T> {
pub fn new(lines: usize, columns: usize, max_scroll_limit: usize) -> Grid<T> {
Grid {
raw: Storage::with_capacity(lines, columns),
max_scroll_limit,
display_offset: 0,
saved_cursor: Cursor::default(),
cursor: Cursor::default(),
lines,
columns,
}
}
/// Update the size of the scrollback history.
pub fn update_history(&mut self, history_size: usize) {
let current_history_size = self.history_size();
if current_history_size > history_size {
self.raw.shrink_lines(current_history_size - history_size);
}
self.display_offset = min(self.display_offset, history_size);
self.max_scroll_limit = history_size;
}
pub fn scroll_display(&mut self, scroll: Scroll) {
self.display_offset = match scroll {
Scroll::Delta(count) => {
min(max((self.display_offset as i32) + count, 0) as usize, self.history_size())
},
Scroll::PageUp => min(self.display_offset + self.lines, self.history_size()),
Scroll::PageDown => self.display_offset.saturating_sub(self.lines),
Scroll::Top => self.history_size(),
Scroll::Bottom => 0,
};
}
fn increase_scroll_limit(&mut self, count: usize) {
let count = min(count, self.max_scroll_limit - self.history_size());
if count != 0 {
self.raw.initialize(count, self.columns);
}
}
fn decrease_scroll_limit(&mut self, count: usize) {
let count = min(count, self.history_size());
if count != 0 {
self.raw.shrink_lines(min(count, self.history_size()));
self.display_offset = min(self.display_offset, self.history_size());
}
}
#[inline]
pub fn scroll_down<D>(&mut self, region: &Range<Line>, positions: usize)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
// When rotating the entire region, just reset everything.
if region.end - region.start <= positions {
for i in (region.start.0..region.end.0).map(Line::from) {
self.raw[i].reset(&self.cursor.template);
}
return;
}
// Which implementation we can use depends on the existence of a scrollback history.
//
// Since a scrollback history prevents us from rotating the entire buffer downwards, we
// instead have to rely on a slower, swap-based implementation.
if self.max_scroll_limit == 0 {
// Swap the lines fixed at the bottom to their target positions after rotation.
//
// Since we've made sure that the rotation will never rotate away the entire region, we
// know that the position of the fixed lines before the rotation must already be
// visible.
//
// We need to start from the top, to make sure the fixed lines aren't swapped with each
// other.
let screen_lines = self.screen_lines() as i32;
for i in (region.end.0..screen_lines).map(Line::from) {
self.raw.swap(i, i - positions as i32);
}
// Rotate the entire line buffer downward.
self.raw.rotate_down(positions);
// Ensure all new lines are fully cleared.
for i in (0..positions).map(Line::from) {
self.raw[i].reset(&self.cursor.template);
}
// Swap the fixed lines at the top back into position.
for i in (0..region.start.0).map(Line::from) {
self.raw.swap(i, i + positions);
}
} else {
// Subregion rotation.
let range = (region.start + positions).0..region.end.0;
for line in range.rev().map(Line::from) {
self.raw.swap(line, line - positions);
}
let range = region.start.0..(region.start + positions).0;
for line in range.rev().map(Line::from) {
self.raw[line].reset(&self.cursor.template);
}
}
}
/// Move lines at the bottom toward the top.
///
/// This is the performance-sensitive part of scrolling.
pub fn scroll_up<D>(&mut self, region: &Range<Line>, positions: usize)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
// When rotating the entire region with fixed lines at the top, just reset everything.
if region.end - region.start <= positions && region.start != 0 {
for i in (region.start.0..region.end.0).map(Line::from) {
self.raw[i].reset(&self.cursor.template);
}
return;
}
// Update display offset when not pinned to active area.
if self.display_offset != 0 {
self.display_offset = min(self.display_offset + positions, self.max_scroll_limit);
}
// Only rotate the entire history if the active region starts at the top.
if region.start == 0 {
// Create scrollback for the new lines.
self.increase_scroll_limit(positions);
// Swap the lines fixed at the top to their target positions after rotation.
//
// Since we've made sure that the rotation will never rotate away the entire region, we
// know that the position of the fixed lines before the rotation must already be
// visible.
//
// We need to start from the bottom, to make sure the fixed lines aren't swapped with
// each other.
for i in (0..region.start.0).rev().map(Line::from) {
self.raw.swap(i, i + positions);
}
// Rotate the entire line buffer upward.
self.raw.rotate(-(positions as isize));
// Swap the fixed lines at the bottom back into position.
let screen_lines = self.screen_lines() as i32;
for i in (region.end.0..screen_lines).rev().map(Line::from) {
self.raw.swap(i, i - positions);
}
} else {
// Rotate lines without moving anything into history.
for i in (region.start.0..region.end.0 - positions as i32).map(Line::from) {
self.raw.swap(i, i + positions);
}
}
// Ensure all new lines are fully cleared.
for i in (region.end.0 - positions as i32..region.end.0).map(Line::from) {
self.raw[i].reset(&self.cursor.template);
}
}
pub fn clear_viewport<D>(&mut self)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
// Determine how many lines to scroll up by.
let end = Point::new(Line(self.lines as i32 - 1), Column(self.columns()));
let mut iter = self.iter_from(end);
while let Some(cell) = iter.prev() {
if !cell.is_empty() || cell.point.line < 0 {
break;
}
}
debug_assert!(iter.point.line >= -1);
let positions = (iter.point.line.0 + 1) as usize;
let region = Line(0)..Line(self.lines as i32);
// Clear the viewport.
self.scroll_up(®ion, positions);
// Reset rotated lines.
for line in (0..(self.lines - positions)).map(Line::from) {
self.raw[line].reset(&self.cursor.template);
}
}
/// Completely reset the grid state.
pub fn reset<D>(&mut self)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
self.clear_history();
self.saved_cursor = Cursor::default();
self.cursor = Cursor::default();
self.display_offset = 0;
// Reset all visible lines.
let range = self.topmost_line().0..(self.screen_lines() as i32);
for line in range.map(Line::from) {
self.raw[line].reset(&self.cursor.template);
}
}
}
impl<T> Grid<T> {
/// Reset a visible region within the grid.
pub fn reset_region<D, R: RangeBounds<Line>>(&mut self, bounds: R)
where
T: ResetDiscriminant<D> + GridCell + Default,
D: PartialEq,
{
let start = match bounds.start_bound() {
Bound::Included(line) => *line,
Bound::Excluded(line) => *line + 1,
Bound::Unbounded => Line(0),
};
let end = match bounds.end_bound() {
Bound::Included(line) => *line + 1,
Bound::Excluded(line) => *line,
Bound::Unbounded => Line(self.screen_lines() as i32),
};
debug_assert!(start < self.screen_lines() as i32);
debug_assert!(end <= self.screen_lines() as i32);
for line in (start.0..end.0).map(Line::from) {
self.raw[line].reset(&self.cursor.template);
}
}
#[inline]
pub fn clear_history(&mut self) {
// Explicitly purge all lines from history.
self.raw.shrink_lines(self.history_size());
// Reset display offset.
self.display_offset = 0;
}
/// This is used only for initializing after loading ref-tests.
#[inline]
pub fn initialize_all(&mut self)
where
T: GridCell + Default,
{
// Remove all cached lines to clear them of any content.
self.truncate();
// Initialize everything with empty new lines.
self.raw.initialize(self.max_scroll_limit - self.history_size(), self.columns);
}
/// This is used only for truncating before saving ref-tests.
#[inline]
pub fn truncate(&mut self) {
self.raw.truncate();
}
/// Iterate over all cells in the grid starting at a specific point.
#[inline]
pub fn iter_from(&self, point: Point) -> GridIterator<'_, T> {
let end = Point::new(self.bottommost_line(), self.last_column());
GridIterator { grid: self, point, end }
}
/// Iterate over all visible cells.
///
/// This is slightly more optimized than calling `Grid::iter_from` in combination with
/// `Iterator::take_while`.
#[inline]
pub fn display_iter(&self) -> GridIterator<'_, T> {
let last_column = self.last_column();
let start = Point::new(Line(-(self.display_offset() as i32) - 1), last_column);
let end_line = min(start.line + self.screen_lines(), self.bottommost_line());
let end = Point::new(end_line, last_column);
GridIterator { grid: self, point: start, end }
}
#[inline]
pub fn display_offset(&self) -> usize {
self.display_offset
}
#[inline]
pub fn cursor_cell(&mut self) -> &mut T {
let point = self.cursor.point;
&mut self[point.line][point.column]
}
}
impl<T: PartialEq> PartialEq for Grid<T> {
fn eq(&self, other: &Self) -> bool {
// Compare struct fields and check result of grid comparison.
self.raw.eq(&other.raw)
&& self.columns.eq(&other.columns)
&& self.lines.eq(&other.lines)
&& self.display_offset.eq(&other.display_offset)
}
}
impl<T> Index<Line> for Grid<T> {
type Output = Row<T>;
#[inline]
fn index(&self, index: Line) -> &Row<T> {
&self.raw[index]
}
}
impl<T> IndexMut<Line> for Grid<T> {
#[inline]
fn index_mut(&mut self, index: Line) -> &mut Row<T> {
&mut self.raw[index]
}
}
impl<T> Index<Point> for Grid<T> {
type Output = T;
#[inline]
fn index(&self, point: Point) -> &T {
&self[point.line][point.column]
}
}
impl<T> IndexMut<Point> for Grid<T> {
#[inline]
fn index_mut(&mut self, point: Point) -> &mut T {
&mut self[point.line][point.column]
}
}
/// Grid dimensions.
pub trait Dimensions {
/// Total number of lines in the buffer, this includes scrollback and visible lines.
fn total_lines(&self) -> usize;
/// Height of the viewport in lines.
fn screen_lines(&self) -> usize;
/// Width of the terminal in columns.
fn columns(&self) -> usize;
/// Index for the last column.
#[inline]
fn last_column(&self) -> Column {
Column(self.columns() - 1)
}
/// Line farthest up in the grid history.
#[inline]
fn topmost_line(&self) -> Line {
Line(-(self.history_size() as i32))
}
/// Line farthest down in the grid history.
#[inline]
fn bottommost_line(&self) -> Line {
Line(self.screen_lines() as i32 - 1)
}
/// Number of invisible lines part of the scrollback history.
#[inline]
fn history_size(&self) -> usize {
self.total_lines().saturating_sub(self.screen_lines())
}
}
impl<G> Dimensions for Grid<G> {
#[inline]
fn total_lines(&self) -> usize {
self.raw.len()
}
#[inline]
fn screen_lines(&self) -> usize {
self.lines
}
#[inline]
fn columns(&self) -> usize {
self.columns
}
}
#[cfg(test)]
impl Dimensions for (usize, usize) {
fn total_lines(&self) -> usize {
self.0
}
fn screen_lines(&self) -> usize {
self.0
}
fn columns(&self) -> usize {
self.1
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Indexed<T> {
pub point: Point,
pub cell: T,
}
impl<T> Deref for Indexed<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.cell
}
}
/// Grid cell iterator.
pub struct GridIterator<'a, T> {
/// Immutable grid reference.
grid: &'a Grid<T>,
/// Current position of the iterator within the grid.
point: Point,
/// Last cell included in the iterator.
end: Point,
}
impl<'a, T> GridIterator<'a, T> {
/// Current iterator position.
pub fn point(&self) -> Point {
self.point
}
/// Cell at the current iterator position.
pub fn cell(&self) -> &'a T {
&self.grid[self.point]
}
}
impl<'a, T> Iterator for GridIterator<'a, T> {
type Item = Indexed<&'a T>;
fn next(&mut self) -> Option<Self::Item> {
// Stop once we've reached the end of the grid.
if self.point >= self.end {
return None;
}
match self.point {
Point { column, .. } if column >= self.grid.last_column() => {
self.point.column = Column(0);
self.point.line += 1;
},
_ => self.point.column += Column(1),
}
Some(Indexed { cell: &self.grid[self.point], point: self.point })
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.point >= self.end {
return (0, Some(0));
}
let size = if self.point.line == self.end.line {
(self.end.column - self.point.column).0
} else {
let cols_on_first_line = self.grid.columns.saturating_sub(self.point.column.0 + 1);
let middle_lines = (self.end.line - self.point.line).0 as usize - 1;
let cols_on_last_line = self.end.column + 1;
cols_on_first_line + middle_lines * self.grid.columns + cols_on_last_line.0
};
(size, Some(size))
}
}
/// Bidirectional iterator.
pub trait BidirectionalIterator: Iterator {
fn prev(&mut self) -> Option<Self::Item>;
}
impl<T> BidirectionalIterator for GridIterator<'_, T> {
fn prev(&mut self) -> Option<Self::Item> {
let topmost_line = self.grid.topmost_line();
let last_column = self.grid.last_column();
// Stop once we've reached the end of the grid.
if self.point <= Point::new(topmost_line, Column(0)) {
return None;
}
match self.point {
Point { column: Column(0), .. } => {
self.point.column = last_column;
self.point.line -= 1;
},
_ => self.point.column -= Column(1),
}
Some(Indexed { cell: &self.grid[self.point], point: self.point })
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/src/grid/resize.rs | alacritty_terminal/src/grid/resize.rs | //! Grid resize and reflow.
use std::cmp::{Ordering, max, min};
use std::mem;
use crate::index::{Boundary, Column, Line};
use crate::term::cell::{Flags, ResetDiscriminant};
use crate::grid::row::Row;
use crate::grid::{Dimensions, Grid, GridCell};
impl<T: GridCell + Default + PartialEq> Grid<T> {
/// Resize the grid's width and/or height.
pub fn resize<D>(&mut self, reflow: bool, lines: usize, columns: usize)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
// Use empty template cell for resetting cells due to resize.
let template = mem::take(&mut self.cursor.template);
match self.lines.cmp(&lines) {
Ordering::Less => self.grow_lines(lines),
Ordering::Greater => self.shrink_lines(lines),
Ordering::Equal => (),
}
match self.columns.cmp(&columns) {
Ordering::Less => self.grow_columns(reflow, columns),
Ordering::Greater => self.shrink_columns(reflow, columns),
Ordering::Equal => (),
}
// Restore template cell.
self.cursor.template = template;
}
/// Add lines to the visible area.
///
/// Alacritty keeps the cursor at the bottom of the terminal as long as there
/// is scrollback available. Once scrollback is exhausted, new lines are
/// simply added to the bottom of the screen.
fn grow_lines<D>(&mut self, target: usize)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
let lines_added = target - self.lines;
// Need to resize before updating buffer.
self.raw.grow_visible_lines(target);
self.lines = target;
let history_size = self.history_size();
let from_history = min(history_size, lines_added);
// Move existing lines up for every line that couldn't be pulled from history.
if from_history != lines_added {
let delta = lines_added - from_history;
self.scroll_up(&(Line(0)..Line(target as i32)), delta);
}
// Move cursor down for every line pulled from history.
self.saved_cursor.point.line += from_history;
self.cursor.point.line += from_history;
self.display_offset = self.display_offset.saturating_sub(lines_added);
self.decrease_scroll_limit(lines_added);
}
/// Remove lines from the visible area.
///
/// The behavior in Terminal.app and iTerm.app is to keep the cursor at the
/// bottom of the screen. This is achieved by pushing history "out the top"
/// of the terminal window.
///
/// Alacritty takes the same approach.
fn shrink_lines<D>(&mut self, target: usize)
where
T: ResetDiscriminant<D>,
D: PartialEq,
{
// Scroll up to keep content inside the window.
let required_scrolling = (self.cursor.point.line.0 as usize + 1).saturating_sub(target);
if required_scrolling > 0 {
self.scroll_up(&(Line(0)..Line(self.lines as i32)), required_scrolling);
// Clamp cursors to the new viewport size.
self.cursor.point.line = min(self.cursor.point.line, Line(target as i32 - 1));
}
// Clamp saved cursor, since only primary cursor is scrolled into viewport.
self.saved_cursor.point.line = min(self.saved_cursor.point.line, Line(target as i32 - 1));
self.raw.rotate((self.lines - target) as isize);
self.raw.shrink_visible_lines(target);
self.lines = target;
}
/// Grow number of columns in each row, reflowing if necessary.
fn grow_columns(&mut self, reflow: bool, columns: usize) {
// Check if a row needs to be wrapped.
let should_reflow = |row: &Row<T>| -> bool {
let len = Column(row.len());
reflow && len.0 > 0 && len < columns && row[len - 1].flags().contains(Flags::WRAPLINE)
};
self.columns = columns;
let mut reversed: Vec<Row<T>> = Vec::with_capacity(self.raw.len());
let mut cursor_line_delta = 0;
// Remove the linewrap special case, by moving the cursor outside of the grid.
if self.cursor.input_needs_wrap && reflow {
self.cursor.input_needs_wrap = false;
self.cursor.point.column += 1;
}
let mut rows = self.raw.take_all();
for (i, mut row) in rows.drain(..).enumerate().rev() {
// Check if reflowing should be performed.
let last_row = match reversed.last_mut() {
Some(last_row) if should_reflow(last_row) => last_row,
_ => {
reversed.push(row);
continue;
},
};
// Remove wrap flag before appending additional cells.
if let Some(cell) = last_row.last_mut() {
cell.flags_mut().remove(Flags::WRAPLINE);
}
// Remove leading spacers when reflowing wide char to the previous line.
let mut last_len = last_row.len();
if last_len >= 1
&& last_row[Column(last_len - 1)].flags().contains(Flags::LEADING_WIDE_CHAR_SPACER)
{
last_row.shrink(last_len - 1);
last_len -= 1;
}
// Don't try to pull more cells from the next line than available.
let mut num_wrapped = columns - last_len;
let len = min(row.len(), num_wrapped);
// Insert leading spacer when there's not enough room for reflowing wide char.
let mut cells = if row[Column(len - 1)].flags().contains(Flags::WIDE_CHAR) {
num_wrapped -= 1;
let mut cells = row.front_split_off(len - 1);
let mut spacer = T::default();
spacer.flags_mut().insert(Flags::LEADING_WIDE_CHAR_SPACER);
cells.push(spacer);
cells
} else {
row.front_split_off(len)
};
// Add removed cells to previous row and reflow content.
last_row.append(&mut cells);
let cursor_buffer_line = self.lines - self.cursor.point.line.0 as usize - 1;
if i == cursor_buffer_line && reflow {
// Resize cursor's line and reflow the cursor if necessary.
let mut target = self.cursor.point.sub(self, Boundary::Cursor, num_wrapped);
// Clamp to the last column, if no content was reflown with the cursor.
if target.column.0 == 0 && row.is_clear() {
self.cursor.input_needs_wrap = true;
target = target.sub(self, Boundary::Cursor, 1);
}
self.cursor.point.column = target.column;
// Get required cursor line changes. Since `num_wrapped` is smaller than `columns`
// this will always be either `0` or `1`.
let line_delta = self.cursor.point.line - target.line;
if line_delta != 0 && row.is_clear() {
continue;
}
cursor_line_delta += line_delta.0 as usize;
} else if row.is_clear() {
if i < self.display_offset {
// Since we removed a line, rotate down the viewport.
self.display_offset = self.display_offset.saturating_sub(1);
}
// Rotate cursor down if content below them was pulled from history.
if i < cursor_buffer_line {
self.cursor.point.line += 1;
}
// Don't push line into the new buffer.
continue;
}
if let Some(cell) = last_row.last_mut() {
// Set wrap flag if next line still has cells.
cell.flags_mut().insert(Flags::WRAPLINE);
}
reversed.push(row);
}
// Make sure we have at least the viewport filled.
if reversed.len() < self.lines {
let delta = (self.lines - reversed.len()) as i32;
self.cursor.point.line = max(self.cursor.point.line - delta, Line(0));
reversed.resize_with(self.lines, || Row::new(columns));
}
// Pull content down to put cursor in correct position, or move cursor up if there's no
// more lines to delete below the cursor.
if cursor_line_delta != 0 {
let cursor_buffer_line = self.lines - self.cursor.point.line.0 as usize - 1;
let available = min(cursor_buffer_line, reversed.len() - self.lines);
let overflow = cursor_line_delta.saturating_sub(available);
reversed.truncate(reversed.len() + overflow - cursor_line_delta);
self.cursor.point.line = max(self.cursor.point.line - overflow, Line(0));
}
// Reverse iterator and fill all rows that are still too short.
let mut new_raw = Vec::with_capacity(reversed.len());
for mut row in reversed.drain(..).rev() {
if row.len() < columns {
row.grow(columns);
}
new_raw.push(row);
}
self.raw.replace_inner(new_raw);
// Clamp display offset in case lines above it got merged.
self.display_offset = min(self.display_offset, self.history_size());
}
/// Shrink number of columns in each row, reflowing if necessary.
fn shrink_columns(&mut self, reflow: bool, columns: usize) {
self.columns = columns;
// Remove the linewrap special case, by moving the cursor outside of the grid.
if self.cursor.input_needs_wrap && reflow {
self.cursor.input_needs_wrap = false;
self.cursor.point.column += 1;
}
let mut new_raw = Vec::with_capacity(self.raw.len());
let mut buffered: Option<Vec<T>> = None;
let mut rows = self.raw.take_all();
for (i, mut row) in rows.drain(..).enumerate().rev() {
// Append lines left over from the previous row.
if let Some(buffered) = buffered.take() {
// Add a column for every cell added before the cursor, if it goes beyond the new
// width it is then later reflown.
let cursor_buffer_line = self.lines - self.cursor.point.line.0 as usize - 1;
if i == cursor_buffer_line {
self.cursor.point.column += buffered.len();
}
row.append_front(buffered);
}
loop {
// Remove all cells which require reflowing.
let mut wrapped = match row.shrink(columns) {
Some(wrapped) if reflow => wrapped,
_ => {
let cursor_buffer_line = self.lines - self.cursor.point.line.0 as usize - 1;
if reflow && i == cursor_buffer_line && self.cursor.point.column > columns {
// If there are empty cells before the cursor, we assume it is explicit
// whitespace and need to wrap it like normal content.
Vec::new()
} else {
// Since it fits, just push the existing line without any reflow.
new_raw.push(row);
break;
}
},
};
// Insert spacer if a wide char would be wrapped into the last column.
if row.len() >= columns
&& row[Column(columns - 1)].flags().contains(Flags::WIDE_CHAR)
{
let mut spacer = T::default();
spacer.flags_mut().insert(Flags::LEADING_WIDE_CHAR_SPACER);
let wide_char = mem::replace(&mut row[Column(columns - 1)], spacer);
wrapped.insert(0, wide_char);
}
// Remove wide char spacer before shrinking.
let len = wrapped.len();
if len > 0 && wrapped[len - 1].flags().contains(Flags::LEADING_WIDE_CHAR_SPACER) {
if len == 1 {
row[Column(columns - 1)].flags_mut().insert(Flags::WRAPLINE);
new_raw.push(row);
break;
} else {
// Remove the leading spacer from the end of the wrapped row.
wrapped[len - 2].flags_mut().insert(Flags::WRAPLINE);
wrapped.truncate(len - 1);
}
}
new_raw.push(row);
// Set line as wrapped if cells got removed.
if let Some(cell) = new_raw.last_mut().and_then(|r| r.last_mut()) {
cell.flags_mut().insert(Flags::WRAPLINE);
}
if wrapped
.last()
.map(|c| c.flags().contains(Flags::WRAPLINE) && i >= 1)
.unwrap_or(false)
&& wrapped.len() < columns
{
// Make sure previous wrap flag doesn't linger around.
if let Some(cell) = wrapped.last_mut() {
cell.flags_mut().remove(Flags::WRAPLINE);
}
// Add removed cells to start of next row.
buffered = Some(wrapped);
break;
} else {
// Reflow cursor if a line below it is deleted.
let cursor_buffer_line = self.lines - self.cursor.point.line.0 as usize - 1;
if (i == cursor_buffer_line && self.cursor.point.column < columns)
|| i < cursor_buffer_line
{
self.cursor.point.line = max(self.cursor.point.line - 1, Line(0));
}
// Reflow the cursor if it is on this line beyond the width.
if i == cursor_buffer_line && self.cursor.point.column >= columns {
// Since only a single new line is created, we subtract only `columns`
// from the cursor instead of reflowing it completely.
self.cursor.point.column -= columns;
}
// Make sure new row is at least as long as new width.
let occ = wrapped.len();
if occ < columns {
wrapped.resize_with(columns, T::default);
}
row = Row::from_vec(wrapped, occ);
if i < self.display_offset {
// Since we added a new line, rotate up the viewport.
self.display_offset += 1;
}
}
}
}
// Reverse iterator and use it as the new grid storage.
let mut reversed: Vec<Row<T>> = new_raw.drain(..).rev().collect();
reversed.truncate(self.max_scroll_limit + self.lines);
self.raw.replace_inner(reversed);
// Clamp display offset in case some lines went off.
self.display_offset = min(self.display_offset, self.history_size());
// Reflow the primary cursor, or clamp it if reflow is disabled.
if !reflow {
self.cursor.point.column = min(self.cursor.point.column, Column(columns - 1));
} else if self.cursor.point.column == columns
&& !self[self.cursor.point.line][Column(columns - 1)].flags().contains(Flags::WRAPLINE)
{
self.cursor.input_needs_wrap = true;
self.cursor.point.column -= 1;
} else {
self.cursor.point = self.cursor.point.grid_clamp(self, Boundary::Cursor);
}
// Clamp the saved cursor to the grid.
self.saved_cursor.point.column = min(self.saved_cursor.point.column, Column(columns - 1));
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_terminal/tests/ref.rs | alacritty_terminal/tests/ref.rs | #![cfg(feature = "serde")]
use serde::Deserialize;
use serde_json as json;
use std::fs::{self, File};
use std::io::Read;
use std::path::Path;
use alacritty_terminal::event::{Event, EventListener};
use alacritty_terminal::grid::{Dimensions, Grid};
use alacritty_terminal::index::{Column, Line};
use alacritty_terminal::term::cell::Cell;
use alacritty_terminal::term::test::TermSize;
use alacritty_terminal::term::{Config, Term};
use alacritty_terminal::vte::ansi;
macro_rules! ref_tests {
($($name:ident)*) => {
$(
#[test]
fn $name() {
let test_dir = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/tests/ref"));
let test_path = test_dir.join(stringify!($name));
ref_test(&test_path);
}
)*
};
}
ref_tests! {
alt_reset
clear_underline
colored_reset
colored_underline
csi_rep
decaln_reset
deccolm_reset
delete_chars_reset
delete_lines
erase_chars_reset
fish_cc
grid_reset
history
hyperlinks
indexed_256_colors
insert_blank_reset
issue_855
ll
newline_with_cursor_beyond_scroll_region
region_scroll_down
row_reset
saved_cursor
saved_cursor_alt
scroll_up_reset
selective_erasure
sgr
tab_rendering
tmux_git_log
tmux_htop
underline
vim_24bitcolors_bce
vim_large_window_scroll
vim_simple_edit
vttest_cursor_movement_1
vttest_insert
vttest_origin_mode_1
vttest_origin_mode_2
vttest_scroll
vttest_tab_clear_set
wrapline_alt_toggle
zerowidth
zsh_tab_completion
erase_in_line
scroll_in_region_up_preserves_history
origin_goto
}
fn read_u8<P>(path: P) -> Vec<u8>
where
P: AsRef<Path>,
{
let mut res = Vec::new();
File::open(path.as_ref()).unwrap().read_to_end(&mut res).unwrap();
res
}
#[derive(Deserialize, Default)]
struct RefConfig {
history_size: u32,
}
#[derive(Copy, Clone)]
struct Mock;
impl EventListener for Mock {
fn send_event(&self, _event: Event) {}
}
fn ref_test(dir: &Path) {
let recording = read_u8(dir.join("alacritty.recording"));
let serialized_size = fs::read_to_string(dir.join("size.json")).unwrap();
let serialized_grid = fs::read_to_string(dir.join("grid.json")).unwrap();
let serialized_cfg = fs::read_to_string(dir.join("config.json")).unwrap();
let size: TermSize = json::from_str(&serialized_size).unwrap();
let grid: Grid<Cell> = json::from_str(&serialized_grid).unwrap();
let ref_config: RefConfig = json::from_str(&serialized_cfg).unwrap();
let options =
Config { scrolling_history: ref_config.history_size as usize, ..Default::default() };
let mut terminal = Term::new(options, &size, Mock);
let mut parser: ansi::Processor = ansi::Processor::new();
parser.advance(&mut terminal, &recording);
// Truncate invisible lines from the grid.
let mut term_grid = terminal.grid().clone();
term_grid.initialize_all();
term_grid.truncate();
if grid != term_grid {
for i in 0..grid.total_lines() {
for j in 0..grid.columns() {
let cell = &term_grid[Line(i as i32)][Column(j)];
let original_cell = &grid[Line(i as i32)][Column(j)];
if original_cell != cell {
println!("[{i}][{j}] {original_cell:?} => {cell:?}",);
}
}
}
panic!("Ref test failed; grid doesn't match");
}
assert_eq!(grid, term_grid);
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/src/lib.rs | alacritty_config_derive/src/lib.rs | #![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use)]
#![cfg_attr(clippy, deny(warnings))]
use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use syn::parse::{self, Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::{GenericParam, Ident, LitStr, Token, TypeParam};
mod config_deserialize;
mod serde_replace;
/// Error message when attempting to flatten multiple fields.
pub(crate) const MULTIPLE_FLATTEN_ERROR: &str =
"At most one instance of #[config(flatten)] is supported";
#[proc_macro_derive(ConfigDeserialize, attributes(config))]
pub fn derive_config_deserialize(input: TokenStream) -> TokenStream {
config_deserialize::derive(input)
}
#[proc_macro_derive(SerdeReplace)]
pub fn derive_serde_replace(input: TokenStream) -> TokenStream {
serde_replace::derive(input)
}
/// Storage for all necessary generics information.
#[derive(Default)]
struct GenericsStreams {
unconstrained: TokenStream2,
constrained: TokenStream2,
phantoms: TokenStream2,
}
/// Create the necessary generics annotations.
///
/// This will create three different token streams, which might look like this:
/// - unconstrained: `T`
/// - constrained: `T: Default + Deserialize<'de>`
/// - phantoms: `T: PhantomData<T>,`
pub(crate) fn generics_streams<T>(params: &Punctuated<GenericParam, T>) -> GenericsStreams {
let mut generics = GenericsStreams::default();
for generic in params {
// NOTE: Lifetimes and const params are not supported.
if let GenericParam::Type(TypeParam { ident, .. }) = generic {
generics.unconstrained.extend(quote!( #ident , ));
generics.constrained.extend(quote! {
#ident : Default + serde::Deserialize<'de> + alacritty_config::SerdeReplace,
});
generics.phantoms.extend(quote! {
#ident : std::marker::PhantomData < #ident >,
});
}
}
generics
}
/// Field attribute.
pub(crate) struct Attr {
ident: String,
param: Option<LitStr>,
}
impl Parse for Attr {
fn parse(input: ParseStream<'_>) -> parse::Result<Self> {
let ident = input.parse::<Ident>()?.to_string();
let param = input.parse::<Token![=]>().and_then(|_| input.parse()).ok();
Ok(Self { ident, param })
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/src/serde_replace.rs | alacritty_config_derive/src/serde_replace.rs | use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::{
Data, DataStruct, DeriveInput, Error, Field, Fields, Generics, Ident, parse_macro_input,
};
use crate::{Attr, GenericsStreams, MULTIPLE_FLATTEN_ERROR};
/// Error if the derive was used on an unsupported type.
const UNSUPPORTED_ERROR: &str = "SerdeReplace must be used on a tuple struct";
pub fn derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
match input.data {
Data::Struct(DataStruct { fields: Fields::Unnamed(_), .. }) | Data::Enum(_) => {
derive_direct(input.ident, input.generics).into()
},
Data::Struct(DataStruct { fields: Fields::Named(fields), .. }) => {
derive_recursive(input.ident, input.generics, fields.named).into()
},
_ => Error::new(input.ident.span(), UNSUPPORTED_ERROR).to_compile_error().into(),
}
}
pub fn derive_direct(ident: Ident, generics: Generics) -> TokenStream2 {
quote! {
impl <#generics> alacritty_config::SerdeReplace for #ident <#generics> {
fn replace(&mut self, value: toml::Value) -> Result<(), Box<dyn std::error::Error>> {
*self = serde::Deserialize::deserialize(value)?;
Ok(())
}
}
}
}
pub fn derive_recursive<T>(
ident: Ident,
generics: Generics,
fields: Punctuated<Field, T>,
) -> TokenStream2 {
let GenericsStreams { unconstrained, constrained, .. } =
crate::generics_streams(&generics.params);
let replace_arms = match match_arms(&fields) {
Err(e) => return e.to_compile_error(),
Ok(replace_arms) => replace_arms,
};
quote! {
#[allow(clippy::extra_unused_lifetimes)]
impl <'de, #constrained> alacritty_config::SerdeReplace for #ident <#unconstrained> {
fn replace(&mut self, value: toml::Value) -> Result<(), Box<dyn std::error::Error>> {
match value.as_table() {
Some(table) => {
for (field, next_value) in table {
let next_value = next_value.clone();
let value = value.clone();
match field.as_str() {
#replace_arms
_ => {
let error = format!("Field \"{}\" does not exist", field);
return Err(error.into());
},
}
}
},
None => *self = serde::Deserialize::deserialize(value)?,
}
Ok(())
}
}
}
}
/// Create SerdeReplace recursive match arms.
fn match_arms<T>(fields: &Punctuated<Field, T>) -> Result<TokenStream2, syn::Error> {
let mut stream = TokenStream2::default();
let mut flattened_arm = None;
// Create arm for each field.
for field in fields {
let ident = field.ident.as_ref().expect("unreachable tuple struct");
let literal = ident.to_string();
// Check if #[config(flattened)] attribute is present.
let flatten = field
.attrs
.iter()
.filter(|attr| (*attr).path().is_ident("config"))
.filter_map(|attr| attr.parse_args::<Attr>().ok())
.any(|parsed| parsed.ident.as_str() == "flatten");
if flatten && flattened_arm.is_some() {
return Err(Error::new(ident.span(), MULTIPLE_FLATTEN_ERROR));
} else if flatten {
flattened_arm = Some(quote! {
_ => alacritty_config::SerdeReplace::replace(&mut self.#ident, value)?,
});
} else {
// Extract all `#[config(alias = "...")]` attribute values.
let aliases = field
.attrs
.iter()
.filter(|attr| (*attr).path().is_ident("config"))
.filter_map(|attr| attr.parse_args::<Attr>().ok())
.filter(|parsed| parsed.ident.as_str() == "alias")
.map(|parsed| {
let value = parsed
.param
.ok_or_else(|| format!("Field \"{ident}\" has no alias value"))?
.value();
if value.trim().is_empty() {
return Err(format!("Field \"{ident}\" has an empty alias value"));
}
Ok(value)
})
.collect::<Result<Vec<String>, String>>()
.map_err(|msg| Error::new(ident.span(), msg))?;
stream.extend(quote! {
#(#aliases)|* | #literal => alacritty_config::SerdeReplace::replace(&mut
self.#ident, next_value)?, });
}
}
// Add the flattened catch-all as last match arm.
if let Some(flattened_arm) = flattened_arm.take() {
stream.extend(flattened_arm);
}
Ok(stream)
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/src/config_deserialize/de_enum.rs | alacritty_config_derive/src/config_deserialize/de_enum.rs | use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use syn::meta::ParseNestedMeta;
use syn::{DataEnum, Generics, Ident};
use crate::serde_replace;
pub fn derive_deserialize(ident: Ident, generics: Generics, data_enum: DataEnum) -> TokenStream {
let visitor = format_ident!("{}Visitor", ident);
// Create match arm streams and get a list with all available values.
let mut match_arms_stream = TokenStream2::new();
let mut available_values = String::from("one of ");
for variant in data_enum.variants.iter().filter(|variant| {
// Skip deserialization for `#[config(skip)]` fields.
variant.attrs.iter().all(|attr| {
let is_skip = |meta: ParseNestedMeta| {
if meta.path.is_ident("skip") { Ok(()) } else { Err(meta.error("not skip")) }
};
!attr.path().is_ident("config") || attr.parse_nested_meta(is_skip).is_err()
})
}) {
let variant_ident = &variant.ident;
let variant_str = variant_ident.to_string();
available_values = format!("{available_values}`{variant_str}`, ");
let literal = variant_str.to_lowercase();
match_arms_stream.extend(quote! {
#literal => Ok(#ident :: #variant_ident),
});
}
// Remove trailing `, ` from the last enum variant.
available_values.truncate(available_values.len().saturating_sub(2));
// Generate deserialization impl.
let mut tokens = quote! {
struct #visitor;
impl<'de> serde::de::Visitor<'de> for #visitor {
type Value = #ident;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str(#available_values)
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match s.to_lowercase().as_str() {
#match_arms_stream
_ => Err(E::custom(
&format!("unknown variant `{}`, expected {}", s, #available_values)
)),
}
}
}
impl<'de> serde::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_str(#visitor)
}
}
};
// Automatically implement [`alacritty_config::SerdeReplace`].
tokens.extend(serde_replace::derive_direct(ident, generics));
tokens.into()
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/src/config_deserialize/mod.rs | alacritty_config_derive/src/config_deserialize/mod.rs | use proc_macro::TokenStream;
use syn::{Data, DataStruct, DeriveInput, Error, Fields, parse_macro_input};
/// Error if the derive was used on an unsupported type.
const UNSUPPORTED_ERROR: &str = "ConfigDeserialize must be used on an enum or struct with fields";
mod de_enum;
mod de_struct;
pub fn derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
match input.data {
Data::Struct(DataStruct { fields: Fields::Named(fields), .. }) => {
de_struct::derive_deserialize(input.ident, input.generics, fields.named)
},
Data::Enum(data_enum) => {
de_enum::derive_deserialize(input.ident, input.generics, data_enum)
},
_ => Error::new(input.ident.span(), UNSUPPORTED_ERROR).to_compile_error().into(),
}
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/src/config_deserialize/de_struct.rs | alacritty_config_derive/src/config_deserialize/de_struct.rs | use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote};
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
use syn::{Error, Field, Generics, Ident, Type};
use crate::{Attr, GenericsStreams, MULTIPLE_FLATTEN_ERROR, serde_replace};
/// Use this crate's name as log target.
const LOG_TARGET: &str = env!("CARGO_PKG_NAME");
pub fn derive_deserialize<T>(
ident: Ident,
generics: Generics,
fields: Punctuated<Field, T>,
) -> TokenStream {
// Create all necessary tokens for the implementation.
let GenericsStreams { unconstrained, constrained, phantoms } =
crate::generics_streams(&generics.params);
let FieldStreams { flatten, match_assignments } = fields_deserializer(&fields);
let visitor = format_ident!("{}Visitor", ident);
// Generate deserialization impl.
let mut tokens = quote! {
#[derive(Default)]
#[allow(non_snake_case)]
struct #visitor <#unconstrained> {
#phantoms
}
impl <'de, #constrained> serde::de::Visitor<'de> for #visitor <#unconstrained> {
type Value = #ident <#unconstrained>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a mapping")
}
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
where
M: serde::de::MapAccess<'de>,
{
let mut config = Self::Value::default();
// Unused keys for flattening and warning.
let mut unused = toml::Table::new();
while let Some((key, value)) = map.next_entry::<String, toml::Value>()? {
match key.as_str() {
#match_assignments
_ => {
unused.insert(key, value);
},
}
}
#flatten
// Warn about unused keys.
for key in unused.keys() {
log::warn!(target: #LOG_TARGET, "Unused config key: {}", key);
}
Ok(config)
}
}
impl <'de, #constrained> serde::Deserialize<'de> for #ident <#unconstrained> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_map(#visitor :: default())
}
}
};
// Automatically implement [`alacritty_config::SerdeReplace`].
tokens.extend(serde_replace::derive_recursive(ident, generics, fields));
tokens.into()
}
// Token streams created from the fields in the struct.
#[derive(Default)]
struct FieldStreams {
match_assignments: TokenStream2,
flatten: TokenStream2,
}
/// Create the deserializers for match arms and flattened fields.
fn fields_deserializer<T>(fields: &Punctuated<Field, T>) -> FieldStreams {
let mut field_streams = FieldStreams::default();
// Create the deserialization stream for each field.
for field in fields.iter() {
if let Err(err) = field_deserializer(&mut field_streams, field) {
field_streams.flatten = err.to_compile_error();
return field_streams;
}
}
field_streams
}
/// Append a single field deserializer to the stream.
fn field_deserializer(field_streams: &mut FieldStreams, field: &Field) -> Result<(), Error> {
let ident = field.ident.as_ref().expect("unreachable tuple struct");
let literal = ident.to_string();
let mut literals = vec![literal.clone()];
// Create default stream for deserializing fields.
let mut match_assignment_stream = quote! {
match serde::Deserialize::deserialize(value) {
Ok(value) => config.#ident = value,
Err(err) => {
log::error!(
target: #LOG_TARGET,
"Config error: {}: {}",
#literal,
err.to_string().trim(),
);
},
}
};
// Iterate over all #[config(...)] attributes.
for attr in field.attrs.iter().filter(|attr| attr.path().is_ident("config")) {
let parsed = match attr.parse_args::<Attr>() {
Ok(parsed) => parsed,
Err(_) => continue,
};
match parsed.ident.as_str() {
// Skip deserialization for `#[config(skip)]` fields.
"skip" => return Ok(()),
"flatten" => {
// NOTE: Currently only a single instance of flatten is supported per struct
// for complexity reasons.
if !field_streams.flatten.is_empty() {
return Err(Error::new(attr.span(), MULTIPLE_FLATTEN_ERROR));
}
// Create the tokens to deserialize the flattened struct from the unused fields.
field_streams.flatten.extend(quote! {
// Drain unused fields since they will be used for flattening.
let flattened = std::mem::replace(&mut unused, toml::Table::new());
config.#ident = serde::Deserialize::deserialize(flattened).unwrap_or_default();
});
},
"deprecated" | "removed" => {
// Construct deprecation/removal message with optional attribute override.
let mut message = format!("Config warning: {} has been {}", literal, parsed.ident);
if let Some(warning) = parsed.param {
message = format!("{}; {}", message, warning.value());
}
message.push_str("\nUse `alacritty migrate` to automatically resolve it");
// Append stream to log deprecation/removal warning.
match_assignment_stream.extend(quote! {
log::warn!(target: #LOG_TARGET, #message);
});
},
// Add aliases to match pattern.
"alias" => {
if let Some(alias) = parsed.param {
literals.push(alias.value());
}
},
_ => (),
}
}
// Create token stream for deserializing "none" string into `Option<T>`.
if let Type::Path(type_path) = &field.ty {
if type_path.path.segments.iter().next_back().is_some_and(|s| s.ident == "Option") {
match_assignment_stream = quote! {
if value.as_str().is_some_and(|s| s.eq_ignore_ascii_case("none")) {
config.#ident = None;
continue;
}
#match_assignment_stream
};
}
}
// Create the token stream for deserialization and error handling.
field_streams.match_assignments.extend(quote! {
#(#literals)|* => { #match_assignment_stream },
});
Ok(())
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
alacritty/alacritty | https://github.com/alacritty/alacritty/blob/6ee6e53ee3457c24137f117237b0ff1d84f6f836/alacritty_config_derive/tests/config.rs | alacritty_config_derive/tests/config.rs | use std::sync::{Arc, Mutex, OnceLock};
use log::{Level, Log, Metadata, Record};
use serde::Deserialize;
use alacritty_config::SerdeReplace as _;
use alacritty_config_derive::{ConfigDeserialize, SerdeReplace};
#[derive(ConfigDeserialize, Debug, PartialEq, Eq)]
enum TestEnum {
One,
Two,
Three,
#[config(skip)]
Nine(String),
}
impl Default for TestEnum {
fn default() -> Self {
Self::Nine(String::from("nine"))
}
}
#[derive(ConfigDeserialize)]
struct Test {
#[config(alias = "field1_alias")]
#[config(deprecated = "use field2 instead")]
field1: usize,
#[config(deprecated = "shouldn't be hit")]
field2: String,
field3: Option<u8>,
#[doc(hidden)]
nesting: Test2<usize>,
#[config(flatten)]
flatten: Test3,
enom_small: TestEnum,
enom_big: TestEnum,
#[config(deprecated)]
enom_error: TestEnum,
#[config(removed = "it's gone")]
gone: bool,
#[config(alias = "multiple_alias1")]
#[config(alias = "multiple_alias2")]
multiple_alias_field: usize,
}
impl Default for Test {
fn default() -> Self {
Self {
field1: 13,
field2: String::from("field2"),
field3: Some(23),
nesting: Test2::default(),
flatten: Test3::default(),
enom_small: TestEnum::default(),
enom_big: TestEnum::default(),
enom_error: TestEnum::default(),
gone: false,
multiple_alias_field: 0,
}
}
}
#[derive(ConfigDeserialize, Default)]
struct Test2<T: Default> {
field1: T,
field2: Option<usize>,
#[config(skip)]
field3: usize,
#[config(alias = "aliased")]
field4: u8,
newtype: NewType,
}
#[derive(ConfigDeserialize, Default)]
struct Test3 {
#[config(alias = "flatty_alias")]
flatty: usize,
}
#[derive(SerdeReplace, Deserialize, Default, PartialEq, Eq, Debug)]
struct NewType(usize);
#[test]
fn config_deserialize() {
static LOGGER: OnceLock<Logger> = OnceLock::new();
let logger = LOGGER.get_or_init(Logger::default);
log::set_logger(logger).unwrap();
log::set_max_level(log::LevelFilter::Warn);
let test: Test = toml::from_str(
r#"
field1 = 3
field3 = 32
flatty = 123
enom_small = "one"
enom_big = "THREE"
enom_error = "HugaBuga"
gone = false
[nesting]
field1 = "testing"
field2 = "None"
field3 = 99
aliased = 8
"#,
)
.unwrap();
// Verify fields were deserialized correctly.
assert_eq!(test.field1, 3);
assert_eq!(test.field2, Test::default().field2);
assert_eq!(test.field3, Some(32));
assert_eq!(test.enom_small, TestEnum::One);
assert_eq!(test.enom_big, TestEnum::Three);
assert_eq!(test.enom_error, Test::default().enom_error);
assert!(!test.gone);
assert_eq!(test.nesting.field1, Test::default().nesting.field1);
assert_eq!(test.nesting.field2, None);
assert_eq!(test.nesting.field3, Test::default().nesting.field3);
assert_eq!(test.nesting.field4, 8);
assert_eq!(test.flatten.flatty, 123);
// Verify all log messages are correct.
let mut error_logs = logger.error_logs.lock().unwrap();
error_logs.sort_unstable();
assert_eq!(error_logs.as_slice(), [
"Config error: enom_error: unknown variant `HugaBuga`, expected one of `One`, `Two`, \
`Three`",
"Config error: field1: invalid type: string \"testing\", expected usize",
]);
let mut warn_logs = logger.warn_logs.lock().unwrap();
warn_logs.sort_unstable();
assert_eq!(warn_logs.as_slice(), [
"Config warning: enom_error has been deprecated\nUse `alacritty migrate` to automatically \
resolve it",
"Config warning: field1 has been deprecated; use field2 instead\nUse `alacritty migrate` \
to automatically resolve it",
"Config warning: gone has been removed; it's gone\nUse `alacritty migrate` to \
automatically resolve it",
"Unused config key: field3",
]);
}
/// Logger storing all messages for later validation.
#[derive(Default)]
struct Logger {
error_logs: Arc<Mutex<Vec<String>>>,
warn_logs: Arc<Mutex<Vec<String>>>,
}
impl Log for Logger {
fn log(&self, record: &Record<'_>) {
assert_eq!(record.target(), env!("CARGO_PKG_NAME"));
match record.level() {
Level::Error => {
let mut error_logs = self.error_logs.lock().unwrap();
error_logs.push(record.args().to_string());
},
Level::Warn => {
let mut warn_logs = self.warn_logs.lock().unwrap();
warn_logs.push(record.args().to_string());
},
_ => unreachable!(),
}
}
fn enabled(&self, _metadata: &Metadata<'_>) -> bool {
true
}
fn flush(&self) {}
}
#[test]
fn field_replacement() {
let mut test = Test::default();
let value = toml::from_str("nesting.field2=13").unwrap();
test.replace(value).unwrap();
assert_eq!(test.nesting.field2, Some(13));
}
#[test]
fn replace_derive() {
let mut test = Test::default();
let value = toml::from_str("nesting.newtype=9").unwrap();
test.replace(value).unwrap();
assert_eq!(test.nesting.newtype, NewType(9));
}
#[test]
fn replace_derive_using_alias() {
let mut test = Test::default();
assert_ne!(test.field1, 9);
let value = toml::from_str("field1_alias=9").unwrap();
test.replace(value).unwrap();
assert_eq!(test.field1, 9);
}
#[test]
fn replace_derive_using_multiple_aliases() {
let mut test = Test::default();
let toml_value = toml::from_str("multiple_alias1=6").unwrap();
test.replace(toml_value).unwrap();
assert_eq!(test.multiple_alias_field, 6);
let toml_value = toml::from_str("multiple_alias1=7").unwrap();
test.replace(toml_value).unwrap();
assert_eq!(test.multiple_alias_field, 7);
}
#[test]
fn replace_flatten() {
let mut test = Test::default();
let value = toml::from_str("flatty=7").unwrap();
test.replace(value).unwrap();
assert_eq!(test.flatten.flatty, 7);
}
#[test]
fn replace_flatten_using_alias() {
let mut test = Test::default();
assert_ne!(test.flatten.flatty, 7);
let value = toml::from_str("flatty_alias=7").unwrap();
test.replace(value).unwrap();
assert_eq!(test.flatten.flatty, 7);
}
| rust | Apache-2.0 | 6ee6e53ee3457c24137f117237b0ff1d84f6f836 | 2026-01-04T15:31:58.707223Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/build.rs | src-tauri/build.rs | fn main() {
tauri_build::build()
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/lib.rs | src-tauri/src/lib.rs | #[cfg_attr(mobile, tauri::mobile_entry_point)]
mod app;
mod util;
use tauri::Manager;
use tauri_plugin_window_state::Builder as WindowStatePlugin;
use tauri_plugin_window_state::StateFlags;
#[cfg(target_os = "macos")]
use std::time::Duration;
const WINDOW_SHOW_DELAY: u64 = 50;
use app::{
invoke::{
clear_cache_and_restart, download_file, download_file_by_binary, send_notification,
update_theme_mode,
},
setup::{set_global_shortcut, set_system_tray},
window::set_window,
};
use util::get_pake_config;
pub fn run_app() {
let (pake_config, tauri_config) = get_pake_config();
let tauri_app = tauri::Builder::default();
let show_system_tray = pake_config.show_system_tray();
let hide_on_close = pake_config.windows[0].hide_on_close;
let activation_shortcut = pake_config.windows[0].activation_shortcut.clone();
let init_fullscreen = pake_config.windows[0].fullscreen;
let start_to_tray = pake_config.windows[0].start_to_tray && show_system_tray; // Only valid when tray is enabled
let multi_instance = pake_config.multi_instance;
let window_state_plugin = WindowStatePlugin::default()
.with_state_flags(if init_fullscreen {
StateFlags::FULLSCREEN
} else {
// Prevent flickering on the first open.
StateFlags::all() & !StateFlags::VISIBLE
})
.build();
#[allow(deprecated)]
let mut app_builder = tauri_app
.plugin(window_state_plugin)
.plugin(tauri_plugin_oauth::init())
.plugin(tauri_plugin_http::init())
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_notification::init())
.plugin(tauri_plugin_opener::init()); // Add this
// Only add single instance plugin if multiple instances are not allowed
if !multi_instance {
app_builder = app_builder.plugin(tauri_plugin_single_instance::init(|app, _args, _cwd| {
if let Some(window) = app.get_webview_window("pake") {
let _ = window.unminimize();
let _ = window.show();
let _ = window.set_focus();
}
}));
}
app_builder
.invoke_handler(tauri::generate_handler![
download_file,
download_file_by_binary,
send_notification,
update_theme_mode,
clear_cache_and_restart,
])
.setup(move |app| {
// --- Menu Construction Start ---
#[cfg(target_os = "macos")]
{
let menu = app::menu::get_menu(app.app_handle())?;
app.set_menu(menu)?;
// Event Handling for Custom Menu Item
app.on_menu_event(move |app_handle, event| {
app::menu::handle_menu_click(app_handle, event.id().as_ref());
});
}
// --- Menu Construction End ---
let window = set_window(app, &pake_config, &tauri_config);
set_system_tray(
app.app_handle(),
show_system_tray,
&pake_config.system_tray_path,
)
.unwrap();
set_global_shortcut(app.app_handle(), activation_shortcut).unwrap();
// Show window after state restoration to prevent position flashing
// Unless start_to_tray is enabled, then keep it hidden
if !start_to_tray {
let window_clone = window.clone();
tauri::async_runtime::spawn(async move {
tokio::time::sleep(tokio::time::Duration::from_millis(WINDOW_SHOW_DELAY)).await;
window_clone.show().unwrap();
});
}
Ok(())
})
.on_window_event(move |_window, _event| {
if let tauri::WindowEvent::CloseRequested { api, .. } = _event {
if hide_on_close {
// Hide window when hide_on_close is enabled (regardless of tray status)
let window = _window.clone();
tauri::async_runtime::spawn(async move {
#[cfg(target_os = "macos")]
{
if window.is_fullscreen().unwrap_or(false) {
window.set_fullscreen(false).unwrap();
tokio::time::sleep(Duration::from_millis(900)).await;
}
}
window.minimize().unwrap();
window.hide().unwrap();
});
api.prevent_close();
} else {
// Exit app completely when hide_on_close is false
std::process::exit(0);
}
}
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
pub fn run() {
run_app()
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/util.rs | src-tauri/src/util.rs | use crate::app::config::PakeConfig;
use std::env;
use std::path::PathBuf;
use tauri::{AppHandle, Config, Manager, WebviewWindow};
pub fn get_pake_config() -> (PakeConfig, Config) {
#[cfg(feature = "cli-build")]
let pake_config: PakeConfig = serde_json::from_str(include_str!("../.pake/pake.json"))
.expect("Failed to parse pake config");
#[cfg(not(feature = "cli-build"))]
let pake_config: PakeConfig =
serde_json::from_str(include_str!("../pake.json")).expect("Failed to parse pake config");
#[cfg(feature = "cli-build")]
let tauri_config: Config = serde_json::from_str(include_str!("../.pake/tauri.conf.json"))
.expect("Failed to parse tauri config");
#[cfg(not(feature = "cli-build"))]
let tauri_config: Config = serde_json::from_str(include_str!("../tauri.conf.json"))
.expect("Failed to parse tauri config");
(pake_config, tauri_config)
}
pub fn get_data_dir(app: &AppHandle, package_name: String) -> PathBuf {
{
let data_dir = app
.path()
.config_dir()
.expect("Failed to get data dirname")
.join(package_name);
if !data_dir.exists() {
std::fs::create_dir(&data_dir)
.unwrap_or_else(|_| panic!("Can't create dir {}", data_dir.display()));
}
data_dir
}
}
pub fn show_toast(window: &WebviewWindow, message: &str) {
let script = format!(r#"pakeToast("{message}");"#);
window.eval(&script).unwrap();
}
pub enum MessageType {
Start,
Success,
Failure,
}
pub fn get_download_message_with_lang(
message_type: MessageType,
language: Option<String>,
) -> String {
let default_start_message = "Start downloading~";
let chinese_start_message = "开始下载中~";
let default_success_message = "Download successful, saved to download directory~";
let chinese_success_message = "下载成功,已保存到下载目录~";
let default_failure_message = "Download failed, please check your network connection~";
let chinese_failure_message = "下载失败,请检查你的网络连接~";
let is_chinese = language
.as_ref()
.map(|lang| {
lang.starts_with("zh")
|| lang.contains("CN")
|| lang.contains("TW")
|| lang.contains("HK")
})
.unwrap_or_else(|| {
// Try multiple environment variables for better system detection
["LANG", "LC_ALL", "LC_MESSAGES", "LANGUAGE"]
.iter()
.find_map(|var| env::var(var).ok())
.map(|lang| {
lang.starts_with("zh")
|| lang.contains("CN")
|| lang.contains("TW")
|| lang.contains("HK")
})
.unwrap_or(false)
});
if is_chinese {
match message_type {
MessageType::Start => chinese_start_message,
MessageType::Success => chinese_success_message,
MessageType::Failure => chinese_failure_message,
}
} else {
match message_type {
MessageType::Start => default_start_message,
MessageType::Success => default_success_message,
MessageType::Failure => default_failure_message,
}
}
.to_string()
}
// Check if the file exists, if it exists, add a number to file name
pub fn check_file_or_append(file_path: &str) -> String {
let mut new_path = PathBuf::from(file_path);
let mut counter = 0;
while new_path.exists() {
let file_stem = new_path.file_stem().unwrap().to_string_lossy().to_string();
let extension = new_path.extension().unwrap().to_string_lossy().to_string();
let parent_dir = new_path.parent().unwrap();
let new_file_stem = match file_stem.rfind('-') {
Some(index) if file_stem[index + 1..].parse::<u32>().is_ok() => {
let base_name = &file_stem[..index];
counter = file_stem[index + 1..].parse::<u32>().unwrap() + 1;
format!("{base_name}-{counter}")
}
_ => {
counter += 1;
format!("{file_stem}-{counter}")
}
};
new_path = parent_dir.join(format!("{new_file_stem}.{extension}"));
}
new_path.to_string_lossy().into_owned()
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/main.rs | src-tauri/src/main.rs | #![cfg_attr(
all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows"
)]
fn main() {
app_lib::run()
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/setup.rs | src-tauri/src/app/setup.rs | use std::str::FromStr;
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use tauri::{
menu::{MenuBuilder, MenuItemBuilder},
tray::{TrayIconBuilder, TrayIconEvent},
AppHandle, Manager,
};
use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut};
use tauri_plugin_window_state::{AppHandleExt, StateFlags};
pub fn set_system_tray(
app: &AppHandle,
show_system_tray: bool,
tray_icon_path: &str,
) -> tauri::Result<()> {
if !show_system_tray {
app.remove_tray_by_id("pake-tray");
return Ok(());
}
let hide_app = MenuItemBuilder::with_id("hide_app", "Hide").build(app)?;
let show_app = MenuItemBuilder::with_id("show_app", "Show").build(app)?;
let quit = MenuItemBuilder::with_id("quit", "Quit").build(app)?;
let menu = MenuBuilder::new(app)
.items(&[&hide_app, &show_app, &quit])
.build()?;
app.app_handle().remove_tray_by_id("pake-tray");
let tray = TrayIconBuilder::new()
.menu(&menu)
.on_menu_event(move |app, event| match event.id().as_ref() {
"hide_app" => {
if let Some(window) = app.get_webview_window("pake") {
window.minimize().unwrap();
}
}
"show_app" => {
if let Some(window) = app.get_webview_window("pake") {
window.show().unwrap();
}
}
"quit" => {
app.save_window_state(StateFlags::all()).unwrap();
std::process::exit(0);
}
_ => (),
})
.on_tray_icon_event(|tray, event| match event {
TrayIconEvent::Click { button, .. } => {
if button == tauri::tray::MouseButton::Left {
if let Some(window) = tray.app_handle().get_webview_window("pake") {
let is_visible = window.is_visible().unwrap_or(false);
if is_visible {
window.hide().unwrap();
} else {
window.show().unwrap();
window.set_focus().unwrap();
}
}
}
}
_ => {}
})
.icon(if tray_icon_path.is_empty() {
app.default_window_icon()
.unwrap_or_else(|| panic!("Failed to get default window icon"))
.clone()
} else {
tauri::image::Image::from_path(tray_icon_path).unwrap_or_else(|_| {
// If custom tray icon fails to load, fallback to default
app.default_window_icon()
.unwrap_or_else(|| panic!("Failed to get default window icon"))
.clone()
})
})
.build(app)?;
tray.set_icon_as_template(false)?;
Ok(())
}
pub fn set_global_shortcut(app: &AppHandle, shortcut: String) -> tauri::Result<()> {
if shortcut.is_empty() {
return Ok(());
}
let app_handle = app.clone();
let shortcut_hotkey = Shortcut::from_str(&shortcut).unwrap();
let last_triggered = Arc::new(Mutex::new(Instant::now()));
app_handle
.plugin(
tauri_plugin_global_shortcut::Builder::new()
.with_handler({
let last_triggered = Arc::clone(&last_triggered);
move |app, event, _shortcut| {
let mut last_triggered = last_triggered.lock().unwrap();
if Instant::now().duration_since(*last_triggered)
< Duration::from_millis(300)
{
return;
}
*last_triggered = Instant::now();
if shortcut_hotkey.eq(event) {
if let Some(window) = app.get_webview_window("pake") {
let is_visible = window.is_visible().unwrap();
if is_visible {
window.hide().unwrap();
} else {
window.show().unwrap();
window.set_focus().unwrap();
}
}
}
}
})
.build(),
)
.expect("Failed to set global shortcut");
app.global_shortcut().register(shortcut_hotkey).unwrap();
Ok(())
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/config.rs | src-tauri/src/app/config.rs | use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct WindowConfig {
pub url: String,
pub hide_title_bar: bool,
pub fullscreen: bool,
pub maximize: bool,
pub width: f64,
pub height: f64,
pub resizable: bool,
pub url_type: String,
pub always_on_top: bool,
pub dark_mode: bool,
pub disabled_web_shortcuts: bool,
pub activation_shortcut: String,
pub hide_on_close: bool,
pub incognito: bool,
pub title: Option<String>,
pub enable_wasm: bool,
pub enable_drag_drop: bool,
pub new_window: bool,
pub start_to_tray: bool,
#[serde(default)]
pub force_internal_navigation: bool,
#[serde(default = "default_zoom")]
pub zoom: u32,
#[serde(default)]
pub min_width: f64,
#[serde(default)]
pub min_height: f64,
#[serde(default)]
pub ignore_certificate_errors: bool,
}
fn default_zoom() -> u32 {
100
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PlatformSpecific<T> {
pub macos: T,
pub linux: T,
pub windows: T,
}
impl<T> PlatformSpecific<T> {
pub const fn get(&self) -> &T {
#[cfg(target_os = "macos")]
let platform = &self.macos;
#[cfg(target_os = "linux")]
let platform = &self.linux;
#[cfg(target_os = "windows")]
let platform = &self.windows;
platform
}
}
impl<T> PlatformSpecific<T>
where
T: Copy,
{
pub const fn copied(&self) -> T {
*self.get()
}
}
pub type UserAgent = PlatformSpecific<String>;
pub type FunctionON = PlatformSpecific<bool>;
#[derive(Debug, Serialize, Deserialize)]
pub struct PakeConfig {
pub windows: Vec<WindowConfig>,
pub user_agent: UserAgent,
pub system_tray: FunctionON,
pub system_tray_path: String,
pub proxy_url: String,
#[serde(default)]
pub multi_instance: bool,
}
impl PakeConfig {
pub fn show_system_tray(&self) -> bool {
self.system_tray.copied()
}
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/menu.rs | src-tauri/src/app/menu.rs | // Menu functionality is only used on macOS
#![cfg(target_os = "macos")]
use tauri::menu::{AboutMetadata, Menu, MenuItem, PredefinedMenuItem, Submenu};
use tauri::{AppHandle, Manager, Wry};
use tauri_plugin_opener::OpenerExt;
pub fn get_menu(app: &AppHandle<Wry>) -> tauri::Result<Menu<Wry>> {
let pake_version = env!("CARGO_PKG_VERSION");
let pake_menu_item_title = format!("Built with Pake V{}", pake_version);
let menu = Menu::with_items(
app,
&[
&app_menu(app)?,
&file_menu(app)?,
&edit_menu(app)?,
&view_menu(app)?,
&navigation_menu(app)?,
&window_menu(app)?,
&help_menu(app, &pake_menu_item_title)?,
],
)?;
Ok(menu)
}
fn app_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let app_menu = Submenu::new(app, "Pake", true)?;
let about_metadata = AboutMetadata::default();
app_menu.append(&PredefinedMenuItem::about(
app,
Some("Pake"),
Some(about_metadata),
)?)?;
app_menu.append(&PredefinedMenuItem::separator(app)?)?;
app_menu.append(&PredefinedMenuItem::services(app, None)?)?;
app_menu.append(&PredefinedMenuItem::separator(app)?)?;
app_menu.append(&PredefinedMenuItem::hide(app, None)?)?;
app_menu.append(&PredefinedMenuItem::hide_others(app, None)?)?;
app_menu.append(&PredefinedMenuItem::show_all(app, None)?)?;
app_menu.append(&PredefinedMenuItem::separator(app)?)?;
app_menu.append(&PredefinedMenuItem::quit(app, None)?)?;
Ok(app_menu)
}
fn file_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let file_menu = Submenu::new(app, "File", true)?;
file_menu.append(&PredefinedMenuItem::close_window(app, None)?)?;
file_menu.append(&PredefinedMenuItem::separator(app)?)?;
file_menu.append(&MenuItem::with_id(
app,
"clear_cache_restart",
"Clear Cache & Restart",
true,
Some("CmdOrCtrl+Shift+Backspace"),
)?)?;
Ok(file_menu)
}
fn edit_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let edit_menu = Submenu::new(app, "Edit", true)?;
edit_menu.append(&PredefinedMenuItem::undo(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::redo(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::separator(app)?)?;
edit_menu.append(&PredefinedMenuItem::cut(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::copy(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::paste(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::select_all(app, None)?)?;
edit_menu.append(&PredefinedMenuItem::separator(app)?)?;
edit_menu.append(&MenuItem::with_id(
app,
"copy_url",
"Copy URL",
true,
Some("CmdOrCtrl+L"),
)?)?;
Ok(edit_menu)
}
fn view_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let view_menu = Submenu::new(app, "View", true)?;
view_menu.append(&MenuItem::with_id(
app,
"reload",
"Reload",
true,
Some("CmdOrCtrl+R"),
)?)?;
view_menu.append(&PredefinedMenuItem::separator(app)?)?;
view_menu.append(&MenuItem::with_id(
app,
"zoom_in",
"Zoom In",
true,
Some("CmdOrCtrl+="),
)?)?;
view_menu.append(&MenuItem::with_id(
app,
"zoom_out",
"Zoom Out",
true,
Some("CmdOrCtrl+-"),
)?)?;
view_menu.append(&MenuItem::with_id(
app,
"zoom_reset",
"Actual Size",
true,
Some("CmdOrCtrl+0"),
)?)?;
view_menu.append(&PredefinedMenuItem::separator(app)?)?;
view_menu.append(&PredefinedMenuItem::fullscreen(app, None)?)?;
view_menu.append(&PredefinedMenuItem::separator(app)?)?;
view_menu.append(&MenuItem::with_id(
app,
"toggle_devtools",
"Toggle Developer Tools",
cfg!(debug_assertions),
Some("CmdOrCtrl+Option+I"),
)?)?;
Ok(view_menu)
}
fn navigation_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let navigation_menu = Submenu::new(app, "Navigation", true)?;
navigation_menu.append(&MenuItem::with_id(
app,
"go_back",
"Back",
true,
Some("CmdOrCtrl+["),
)?)?;
navigation_menu.append(&MenuItem::with_id(
app,
"go_forward",
"Forward",
true,
Some("CmdOrCtrl+]"),
)?)?;
navigation_menu.append(&MenuItem::with_id(
app,
"go_home",
"Go Home",
true,
Some("CmdOrCtrl+Shift+H"),
)?)?;
Ok(navigation_menu)
}
fn window_menu(app: &AppHandle<Wry>) -> tauri::Result<Submenu<Wry>> {
let window_menu = Submenu::new(app, "Window", true)?;
window_menu.append(&PredefinedMenuItem::minimize(app, None)?)?;
window_menu.append(&PredefinedMenuItem::maximize(app, None)?)?;
window_menu.append(&PredefinedMenuItem::separator(app)?)?;
window_menu.append(&MenuItem::with_id(
app,
"always_on_top",
"Toggle Always on Top",
true,
None::<&str>,
)?)?;
window_menu.append(&PredefinedMenuItem::separator(app)?)?;
window_menu.append(&PredefinedMenuItem::close_window(app, None)?)?;
Ok(window_menu)
}
fn help_menu(app: &AppHandle<Wry>, title: &str) -> tauri::Result<Submenu<Wry>> {
let help_menu = Submenu::new(app, "Help", true)?;
let github_item = MenuItem::with_id(app, "pake_github_link", title, true, None::<&str>)?;
help_menu.append(&github_item)?;
Ok(help_menu)
}
pub fn handle_menu_click(app_handle: &AppHandle, id: &str) {
match id {
"pake_github_link" => {
let _ = app_handle
.opener()
.open_url("https://github.com/tw93/Pake", None::<&str>);
}
"reload" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("window.location.reload()");
}
}
"toggle_devtools" => {
#[cfg(debug_assertions)] // Only allow in debug builds
if let Some(window) = app_handle.get_webview_window("pake") {
if window.is_devtools_open() {
window.close_devtools();
} else {
window.open_devtools();
}
}
}
"zoom_in" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("zoomIn()");
}
}
"zoom_out" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("zoomOut()");
}
}
"zoom_reset" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("setZoom('100%')");
}
}
"go_back" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("window.history.back()");
}
}
"go_forward" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("window.history.forward()");
}
}
"go_home" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("window.location.href = window.pakeConfig.url");
}
}
"copy_url" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let _ = window.eval("navigator.clipboard.writeText(window.location.href)");
}
}
"clear_cache_restart" => {
if let Some(window) = app_handle.get_webview_window("pake") {
if let Ok(_) = window.clear_all_browsing_data() {
app_handle.restart();
}
}
}
"always_on_top" => {
if let Some(window) = app_handle.get_webview_window("pake") {
let is_on_top = window.is_always_on_top().unwrap_or(false);
let _ = window.set_always_on_top(!is_on_top);
}
}
_ => {}
}
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/invoke.rs | src-tauri/src/app/invoke.rs | use crate::util::{check_file_or_append, get_download_message_with_lang, show_toast, MessageType};
use std::fs::{self, File};
use std::io::Write;
use std::str::FromStr;
use tauri::http::Method;
use tauri::{command, AppHandle, Manager, Url, WebviewWindow};
use tauri_plugin_http::reqwest::{ClientBuilder, Request};
#[cfg(target_os = "macos")]
use tauri::Theme;
#[derive(serde::Deserialize)]
pub struct DownloadFileParams {
url: String,
filename: String,
language: Option<String>,
}
#[derive(serde::Deserialize)]
pub struct BinaryDownloadParams {
filename: String,
binary: Vec<u8>,
language: Option<String>,
}
#[derive(serde::Deserialize)]
pub struct NotificationParams {
title: String,
body: String,
icon: String,
}
#[command]
pub async fn download_file(app: AppHandle, params: DownloadFileParams) -> Result<(), String> {
let window: WebviewWindow = app.get_webview_window("pake").ok_or("Window not found")?;
show_toast(
&window,
&get_download_message_with_lang(MessageType::Start, params.language.clone()),
);
let download_dir = app
.path()
.download_dir()
.map_err(|e| format!("Failed to get download dir: {}", e))?;
let output_path = download_dir.join(¶ms.filename);
let path_str = output_path.to_str().ok_or("Invalid output path")?;
let file_path = check_file_or_append(path_str);
let client = ClientBuilder::new()
.build()
.map_err(|e| format!("Failed to build client: {}", e))?;
let url = Url::from_str(¶ms.url).map_err(|e| format!("Invalid URL: {}", e))?;
let request = Request::new(Method::GET, url);
let response = client.execute(request).await;
match response {
Ok(mut res) => {
let mut file =
File::create(file_path).map_err(|e| format!("Failed to create file: {}", e))?;
while let Some(chunk) = res
.chunk()
.await
.map_err(|e| format!("Failed to get chunk: {}", e))?
{
file.write_all(&chunk)
.map_err(|e| format!("Failed to write chunk: {}", e))?;
}
show_toast(
&window,
&get_download_message_with_lang(MessageType::Success, params.language.clone()),
);
Ok(())
}
Err(e) => {
show_toast(
&window,
&get_download_message_with_lang(MessageType::Failure, params.language),
);
Err(e.to_string())
}
}
}
#[command]
pub async fn download_file_by_binary(
app: AppHandle,
params: BinaryDownloadParams,
) -> Result<(), String> {
let window: WebviewWindow = app.get_webview_window("pake").ok_or("Window not found")?;
show_toast(
&window,
&get_download_message_with_lang(MessageType::Start, params.language.clone()),
);
let download_dir = app
.path()
.download_dir()
.map_err(|e| format!("Failed to get download dir: {}", e))?;
let output_path = download_dir.join(¶ms.filename);
let path_str = output_path.to_str().ok_or("Invalid output path")?;
let file_path = check_file_or_append(path_str);
match fs::write(file_path, ¶ms.binary) {
Ok(_) => {
show_toast(
&window,
&get_download_message_with_lang(MessageType::Success, params.language.clone()),
);
Ok(())
}
Err(e) => {
show_toast(
&window,
&get_download_message_with_lang(MessageType::Failure, params.language),
);
Err(e.to_string())
}
}
}
#[command]
pub fn send_notification(app: AppHandle, params: NotificationParams) -> Result<(), String> {
use tauri_plugin_notification::NotificationExt;
app.notification()
.builder()
.title(¶ms.title)
.body(¶ms.body)
.icon(¶ms.icon)
.show()
.map_err(|e| format!("Failed to show notification: {}", e))?;
Ok(())
}
#[command]
pub async fn update_theme_mode(app: AppHandle, mode: String) {
#[cfg(target_os = "macos")]
{
if let Some(window) = app.get_webview_window("pake") {
let theme = if mode == "dark" {
Theme::Dark
} else {
Theme::Light
};
let _ = window.set_theme(Some(theme));
}
}
#[cfg(not(target_os = "macos"))]
{
let _ = app;
let _ = mode;
}
}
#[command]
#[allow(unreachable_code)]
pub fn clear_cache_and_restart(app: AppHandle) -> Result<(), String> {
if let Some(window) = app.get_webview_window("pake") {
match window.clear_all_browsing_data() {
Ok(_) => {
// Clear all browsing data successfully
app.restart();
Ok(())
}
Err(e) => {
eprintln!("Failed to clear browsing data: {}", e);
Err(format!("Failed to clear browsing data: {}", e))
}
}
} else {
Err("Main window not found".to_string())
}
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/mod.rs | src-tauri/src/app/mod.rs | pub mod config;
pub mod invoke;
#[cfg(target_os = "macos")]
pub mod menu;
pub mod setup;
pub mod window;
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
tw93/Pake | https://github.com/tw93/Pake/blob/d592b81521f388eb42c14fe50afb5198418b4c22/src-tauri/src/app/window.rs | src-tauri/src/app/window.rs | use crate::app::config::PakeConfig;
use crate::util::get_data_dir;
use std::{path::PathBuf, str::FromStr};
use tauri::{App, Config, Url, WebviewUrl, WebviewWindow, WebviewWindowBuilder};
#[cfg(target_os = "macos")]
use tauri::{Theme, TitleBarStyle};
#[cfg(target_os = "windows")]
fn build_proxy_browser_arg(url: &Url) -> Option<String> {
let host = url.host_str()?;
let scheme = url.scheme();
let port = url.port().or_else(|| match scheme {
"http" => Some(80),
"socks5" => Some(1080),
_ => None,
})?;
match scheme {
"http" | "socks5" => Some(format!("--proxy-server={scheme}://{host}:{port}")),
_ => None,
}
}
pub fn set_window(app: &mut App, config: &PakeConfig, tauri_config: &Config) -> WebviewWindow {
let package_name = tauri_config.clone().product_name.unwrap();
let _data_dir = get_data_dir(app.handle(), package_name);
let window_config = config
.windows
.first()
.expect("At least one window configuration is required");
let user_agent = config.user_agent.get();
let url = match window_config.url_type.as_str() {
"web" => WebviewUrl::App(window_config.url.parse().unwrap()),
"local" => WebviewUrl::App(PathBuf::from(&window_config.url)),
_ => panic!("url type can only be web or local"),
};
let config_script = format!(
"window.pakeConfig = {}",
serde_json::to_string(&window_config).unwrap()
);
// Platform-specific title: macOS prefers empty, others fallback to product name
let effective_title = window_config.title.as_deref().unwrap_or_else(|| {
if cfg!(target_os = "macos") {
""
} else {
tauri_config.product_name.as_deref().unwrap_or("")
}
});
let mut window_builder = WebviewWindowBuilder::new(app, "pake", url)
.title(effective_title)
.visible(false)
.user_agent(user_agent)
.resizable(window_config.resizable)
.fullscreen(window_config.fullscreen)
.maximized(window_config.maximize)
.inner_size(window_config.width, window_config.height)
.always_on_top(window_config.always_on_top)
.incognito(window_config.incognito);
if window_config.min_width > 0.0 || window_config.min_height > 0.0 {
let min_w = if window_config.min_width > 0.0 {
window_config.min_width
} else {
window_config.width
};
let min_h = if window_config.min_height > 0.0 {
window_config.min_height
} else {
window_config.height
};
window_builder = window_builder.min_inner_size(min_w, min_h);
}
if !window_config.enable_drag_drop {
window_builder = window_builder.disable_drag_drop_handler();
}
if window_config.new_window {
window_builder = window_builder
.on_new_window(move |_url, _features| tauri::webview::NewWindowResponse::Allow);
}
// Add initialization scripts
window_builder = window_builder
.initialization_script(&config_script)
.initialization_script(include_str!("../inject/component.js"))
.initialization_script(include_str!("../inject/event.js"))
.initialization_script(include_str!("../inject/style.js"))
.initialization_script(include_str!("../inject/theme_refresh.js"))
.initialization_script(include_str!("../inject/auth.js"))
.initialization_script(include_str!("../inject/custom.js"));
#[cfg(target_os = "windows")]
let mut windows_browser_args = String::from("--disable-features=msWebOOUI,msPdfOOUI,msSmartScreenProtection --disable-blink-features=AutomationControlled");
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
let mut linux_browser_args = String::from("--disable-blink-features=AutomationControlled");
if window_config.ignore_certificate_errors {
#[cfg(target_os = "windows")]
{
windows_browser_args.push_str(" --ignore-certificate-errors");
}
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
{
linux_browser_args.push_str(" --ignore-certificate-errors");
}
#[cfg(target_os = "macos")]
{
window_builder = window_builder.additional_browser_args("--ignore-certificate-errors");
}
}
if window_config.enable_wasm {
#[cfg(target_os = "windows")]
{
windows_browser_args.push_str(" --enable-features=SharedArrayBuffer");
windows_browser_args.push_str(" --enable-unsafe-webgpu");
}
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
{
linux_browser_args.push_str(" --enable-features=SharedArrayBuffer");
linux_browser_args.push_str(" --enable-unsafe-webgpu");
}
#[cfg(target_os = "macos")]
{
window_builder = window_builder
.additional_browser_args("--enable-features=SharedArrayBuffer")
.additional_browser_args("--enable-unsafe-webgpu");
}
}
let mut parsed_proxy_url: Option<Url> = None;
// Platform-specific configuration must be set before proxy on Windows/Linux
#[cfg(target_os = "macos")]
{
let title_bar_style = if window_config.hide_title_bar {
TitleBarStyle::Overlay
} else {
TitleBarStyle::Visible
};
window_builder = window_builder.title_bar_style(title_bar_style);
// Default to following system theme (None), only force dark when explicitly set
let theme = if window_config.dark_mode {
Some(Theme::Dark)
} else {
None // Follow system theme
};
window_builder = window_builder.theme(theme);
}
// Windows and Linux: set data_directory before proxy_url
#[cfg(not(target_os = "macos"))]
{
window_builder = window_builder.data_directory(_data_dir).theme(None);
if !config.proxy_url.is_empty() {
if let Ok(proxy_url) = Url::from_str(&config.proxy_url) {
parsed_proxy_url = Some(proxy_url.clone());
#[cfg(target_os = "windows")]
{
if let Some(arg) = build_proxy_browser_arg(&proxy_url) {
windows_browser_args.push(' ');
windows_browser_args.push_str(&arg);
}
}
}
}
#[cfg(target_os = "windows")]
{
window_builder = window_builder.additional_browser_args(&windows_browser_args);
}
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
{
window_builder = window_builder.additional_browser_args(&linux_browser_args);
}
}
// Set proxy after platform-specific configs (required for Windows/Linux)
if parsed_proxy_url.is_none() && !config.proxy_url.is_empty() {
if let Ok(proxy_url) = Url::from_str(&config.proxy_url) {
parsed_proxy_url = Some(proxy_url);
}
}
if let Some(proxy_url) = parsed_proxy_url {
window_builder = window_builder.proxy_url(proxy_url);
#[cfg(debug_assertions)]
println!("Proxy configured: {}", config.proxy_url);
}
// Allow navigation to OAuth/authentication domains
window_builder = window_builder.on_navigation(|url| {
let url_str = url.as_str();
// Always allow same-origin navigation
if url_str.starts_with("http://localhost") || url_str.starts_with("http://127.0.0.1") {
return true;
}
// Check for OAuth/authentication domains
let auth_patterns = [
"accounts.google.com",
"login.microsoftonline.com",
"github.com/login",
"appleid.apple.com",
"facebook.com",
"twitter.com",
];
let auth_paths = ["/oauth/", "/auth/", "/authorize", "/login"];
// Allow if matches auth patterns
for pattern in &auth_patterns {
if url_str.contains(pattern) {
#[cfg(debug_assertions)]
println!("Allowing OAuth navigation to: {}", url_str);
return true;
}
}
for path in &auth_paths {
if url_str.contains(path) {
#[cfg(debug_assertions)]
println!("Allowing auth path navigation to: {}", url_str);
return true;
}
}
// Allow all other navigation by default
true
});
window_builder.build().expect("Failed to build window")
}
| rust | MIT | d592b81521f388eb42c14fe50afb5198418b4c22 | 2026-01-04T15:31:59.426357Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/cloud-tasks-client/src/lib.rs | codex-rs/cloud-tasks-client/src/lib.rs | mod api;
pub use api::ApplyOutcome;
pub use api::ApplyStatus;
pub use api::AttemptStatus;
pub use api::CloudBackend;
pub use api::CloudTaskError;
pub use api::CreatedTask;
pub use api::DiffSummary;
pub use api::Result;
pub use api::TaskId;
pub use api::TaskStatus;
pub use api::TaskSummary;
pub use api::TaskText;
pub use api::TurnAttempt;
#[cfg(feature = "mock")]
mod mock;
#[cfg(feature = "online")]
mod http;
#[cfg(feature = "mock")]
pub use mock::MockClient;
#[cfg(feature = "online")]
pub use http::HttpClient;
// Reusable apply engine now lives in the shared crate `codex-git`.
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/cloud-tasks-client/src/http.rs | codex-rs/cloud-tasks-client/src/http.rs | use crate::ApplyOutcome;
use crate::ApplyStatus;
use crate::AttemptStatus;
use crate::CloudBackend;
use crate::CloudTaskError;
use crate::DiffSummary;
use crate::Result;
use crate::TaskId;
use crate::TaskStatus;
use crate::TaskSummary;
use crate::TurnAttempt;
use crate::api::TaskText;
use chrono::DateTime;
use chrono::Utc;
use codex_backend_client as backend;
use codex_backend_client::CodeTaskDetailsResponseExt;
#[derive(Clone)]
pub struct HttpClient {
pub base_url: String,
backend: backend::Client,
}
impl HttpClient {
pub fn new(base_url: impl Into<String>) -> anyhow::Result<Self> {
let base_url = base_url.into();
let backend = backend::Client::new(base_url.clone())?;
Ok(Self { base_url, backend })
}
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
self.backend = self.backend.clone().with_bearer_token(token);
self
}
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
self.backend = self.backend.clone().with_user_agent(ua);
self
}
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
self.backend = self.backend.clone().with_chatgpt_account_id(account_id);
self
}
fn tasks_api(&self) -> api::Tasks<'_> {
api::Tasks::new(self)
}
fn attempts_api(&self) -> api::Attempts<'_> {
api::Attempts::new(self)
}
fn apply_api(&self) -> api::Apply<'_> {
api::Apply::new(self)
}
}
#[async_trait::async_trait]
impl CloudBackend for HttpClient {
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
self.tasks_api().list(env).await
}
async fn get_task_summary(&self, id: TaskId) -> Result<TaskSummary> {
self.tasks_api().summary(id).await
}
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
self.tasks_api().diff(id).await
}
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>> {
self.tasks_api().messages(id).await
}
async fn get_task_text(&self, id: TaskId) -> Result<TaskText> {
self.tasks_api().task_text(id).await
}
async fn list_sibling_attempts(
&self,
task: TaskId,
turn_id: String,
) -> Result<Vec<TurnAttempt>> {
self.attempts_api().list(task, turn_id).await
}
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome> {
self.apply_api().run(id, diff_override, false).await
}
async fn apply_task_preflight(
&self,
id: TaskId,
diff_override: Option<String>,
) -> Result<ApplyOutcome> {
self.apply_api().run(id, diff_override, true).await
}
async fn create_task(
&self,
env_id: &str,
prompt: &str,
git_ref: &str,
qa_mode: bool,
best_of_n: usize,
) -> Result<crate::CreatedTask> {
self.tasks_api()
.create(env_id, prompt, git_ref, qa_mode, best_of_n)
.await
}
}
mod api {
use super::*;
use serde_json::Value;
use std::cmp::Ordering;
use std::collections::HashMap;
pub(crate) struct Tasks<'a> {
base_url: &'a str,
backend: &'a backend::Client,
}
impl<'a> Tasks<'a> {
pub(crate) fn new(client: &'a HttpClient) -> Self {
Self {
base_url: &client.base_url,
backend: &client.backend,
}
}
pub(crate) async fn list(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
let resp = self
.backend
.list_tasks(Some(20), Some("current"), env)
.await
.map_err(|e| CloudTaskError::Http(format!("list_tasks failed: {e}")))?;
let tasks: Vec<TaskSummary> = resp
.items
.into_iter()
.map(map_task_list_item_to_summary)
.collect();
append_error_log(&format!(
"http.list_tasks: env={} items={}",
env.unwrap_or("<all>"),
tasks.len()
));
Ok(tasks)
}
pub(crate) async fn summary(&self, id: TaskId) -> Result<TaskSummary> {
let id_str = id.0.clone();
let (details, body, ct) = self
.details_with_body(&id.0)
.await
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
let parsed: Value = serde_json::from_str(&body).map_err(|e| {
CloudTaskError::Http(format!(
"Decode error for {}: {e}; content-type={ct}; body={body}",
id.0
))
})?;
let task_obj = parsed
.get("task")
.and_then(Value::as_object)
.ok_or_else(|| {
CloudTaskError::Http(format!("Task metadata missing from details for {id_str}"))
})?;
let status_display = parsed
.get("task_status_display")
.or_else(|| task_obj.get("task_status_display"))
.and_then(Value::as_object)
.map(|m| {
m.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect::<HashMap<String, Value>>()
});
let status = map_status(status_display.as_ref());
let mut summary = diff_summary_from_status_display(status_display.as_ref());
if summary.files_changed == 0
&& summary.lines_added == 0
&& summary.lines_removed == 0
&& let Some(diff) = details.unified_diff()
{
summary = diff_summary_from_diff(&diff);
}
let updated_at_raw = task_obj
.get("updated_at")
.and_then(Value::as_f64)
.or_else(|| task_obj.get("created_at").and_then(Value::as_f64))
.or_else(|| latest_turn_timestamp(status_display.as_ref()));
let environment_id = task_obj
.get("environment_id")
.and_then(Value::as_str)
.map(str::to_string);
let environment_label = env_label_from_status_display(status_display.as_ref());
let attempt_total = attempt_total_from_status_display(status_display.as_ref());
let title = task_obj
.get("title")
.and_then(Value::as_str)
.unwrap_or("<untitled>")
.to_string();
let is_review = task_obj
.get("is_review")
.and_then(Value::as_bool)
.unwrap_or(false);
Ok(TaskSummary {
id,
title,
status,
updated_at: parse_updated_at(updated_at_raw.as_ref()),
environment_id,
environment_label,
summary,
is_review,
attempt_total,
})
}
pub(crate) async fn diff(&self, id: TaskId) -> Result<Option<String>> {
let (details, body, ct) = self
.details_with_body(&id.0)
.await
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
if let Some(diff) = details.unified_diff() {
return Ok(Some(diff));
}
let _ = (body, ct);
Ok(None)
}
pub(crate) async fn messages(&self, id: TaskId) -> Result<Vec<String>> {
let (details, body, ct) = self
.details_with_body(&id.0)
.await
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
let mut msgs = details.assistant_text_messages();
if msgs.is_empty() {
msgs.extend(extract_assistant_messages_from_body(&body));
}
if !msgs.is_empty() {
return Ok(msgs);
}
if let Some(err) = details.assistant_error_message() {
return Ok(vec![format!("Task failed: {err}")]);
}
let url = match details_path(self.base_url, &id.0) {
Some(url) => url,
None => format!("{}/api/codex/tasks/{}", self.base_url, id.0),
};
Err(CloudTaskError::Http(format!(
"No assistant text messages in response. GET {url}; content-type={ct}; body={body}"
)))
}
pub(crate) async fn task_text(&self, id: TaskId) -> Result<TaskText> {
let (details, body, _ct) = self
.details_with_body(&id.0)
.await
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
let prompt = details.user_text_prompt();
let mut messages = details.assistant_text_messages();
if messages.is_empty() {
messages.extend(extract_assistant_messages_from_body(&body));
}
let assistant_turn = details.current_assistant_turn.as_ref();
let turn_id = assistant_turn.and_then(|turn| turn.id.clone());
let sibling_turn_ids = assistant_turn
.map(|turn| turn.sibling_turn_ids.clone())
.unwrap_or_default();
let attempt_placement = assistant_turn.and_then(|turn| turn.attempt_placement);
let attempt_status = attempt_status_from_str(
assistant_turn.and_then(|turn| turn.turn_status.as_deref()),
);
Ok(TaskText {
prompt,
messages,
turn_id,
sibling_turn_ids,
attempt_placement,
attempt_status,
})
}
pub(crate) async fn create(
&self,
env_id: &str,
prompt: &str,
git_ref: &str,
qa_mode: bool,
best_of_n: usize,
) -> Result<crate::CreatedTask> {
let mut input_items: Vec<serde_json::Value> = Vec::new();
input_items.push(serde_json::json!({
"type": "message",
"role": "user",
"content": [{ "content_type": "text", "text": prompt }]
}));
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
&& !diff.is_empty()
{
input_items.push(serde_json::json!({
"type": "pre_apply_patch",
"output_diff": { "diff": diff }
}));
}
let mut request_body = serde_json::json!({
"new_task": {
"environment_id": env_id,
"branch": git_ref,
"run_environment_in_qa_mode": qa_mode,
},
"input_items": input_items,
});
if best_of_n > 1
&& let Some(obj) = request_body.as_object_mut()
{
obj.insert(
"metadata".to_string(),
serde_json::json!({ "best_of_n": best_of_n }),
);
}
match self.backend.create_task(request_body).await {
Ok(id) => {
append_error_log(&format!(
"new_task: created id={id} env={} prompt_chars={}",
env_id,
prompt.chars().count()
));
Ok(crate::CreatedTask { id: TaskId(id) })
}
Err(e) => {
append_error_log(&format!(
"new_task: create failed env={} prompt_chars={}: {}",
env_id,
prompt.chars().count(),
e
));
Err(CloudTaskError::Http(format!("create_task failed: {e}")))
}
}
}
async fn details_with_body(
&self,
id: &str,
) -> anyhow::Result<(backend::CodeTaskDetailsResponse, String, String)> {
let (parsed, body, ct) = self.backend.get_task_details_with_body(id).await?;
Ok((parsed, body, ct))
}
}
pub(crate) struct Attempts<'a> {
backend: &'a backend::Client,
}
impl<'a> Attempts<'a> {
pub(crate) fn new(client: &'a HttpClient) -> Self {
Self {
backend: &client.backend,
}
}
pub(crate) async fn list(&self, task: TaskId, turn_id: String) -> Result<Vec<TurnAttempt>> {
let resp = self
.backend
.list_sibling_turns(&task.0, &turn_id)
.await
.map_err(|e| CloudTaskError::Http(format!("list_sibling_turns failed: {e}")))?;
let mut attempts: Vec<TurnAttempt> = resp
.sibling_turns
.iter()
.filter_map(turn_attempt_from_map)
.collect();
attempts.sort_by(compare_attempts);
Ok(attempts)
}
}
pub(crate) struct Apply<'a> {
backend: &'a backend::Client,
}
impl<'a> Apply<'a> {
pub(crate) fn new(client: &'a HttpClient) -> Self {
Self {
backend: &client.backend,
}
}
pub(crate) async fn run(
&self,
task_id: TaskId,
diff_override: Option<String>,
preflight: bool,
) -> Result<ApplyOutcome> {
let id = task_id.0.clone();
let diff = match diff_override {
Some(diff) => diff,
None => {
let details = self.backend.get_task_details(&id).await.map_err(|e| {
CloudTaskError::Http(format!("get_task_details failed: {e}"))
})?;
details.unified_diff().ok_or_else(|| {
CloudTaskError::Msg(format!("No diff available for task {id}"))
})?
}
};
if !is_unified_diff(&diff) {
let summary = summarize_patch_for_logging(&diff);
let mode = if preflight { "preflight" } else { "apply" };
append_error_log(&format!(
"apply_error: id={id} mode={mode} format=non-unified; {summary}"
));
return Ok(ApplyOutcome {
applied: false,
status: ApplyStatus::Error,
message: "Expected unified git diff; backend returned an incompatible format."
.to_string(),
skipped_paths: Vec::new(),
conflict_paths: Vec::new(),
});
}
let req = codex_git::ApplyGitRequest {
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
diff: diff.clone(),
revert: false,
preflight,
};
let r = codex_git::apply_git_patch(&req)
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
let status = if r.exit_code == 0 {
ApplyStatus::Success
} else if !r.applied_paths.is_empty() || !r.conflicted_paths.is_empty() {
ApplyStatus::Partial
} else {
ApplyStatus::Error
};
let applied = matches!(status, ApplyStatus::Success) && !preflight;
let message = if preflight {
match status {
ApplyStatus::Success => {
format!("Preflight passed for task {id} (applies cleanly)")
}
ApplyStatus::Partial => format!(
"Preflight: patch does not fully apply for task {id} (applied={}, skipped={}, conflicts={})",
r.applied_paths.len(),
r.skipped_paths.len(),
r.conflicted_paths.len()
),
ApplyStatus::Error => format!(
"Preflight failed for task {id} (applied={}, skipped={}, conflicts={})",
r.applied_paths.len(),
r.skipped_paths.len(),
r.conflicted_paths.len()
),
}
} else {
match status {
ApplyStatus::Success => format!(
"Applied task {id} locally ({} files)",
r.applied_paths.len()
),
ApplyStatus::Partial => format!(
"Apply partially succeeded for task {id} (applied={}, skipped={}, conflicts={})",
r.applied_paths.len(),
r.skipped_paths.len(),
r.conflicted_paths.len()
),
ApplyStatus::Error => format!(
"Apply failed for task {id} (applied={}, skipped={}, conflicts={})",
r.applied_paths.len(),
r.skipped_paths.len(),
r.conflicted_paths.len()
),
}
};
if matches!(status, ApplyStatus::Partial | ApplyStatus::Error)
|| (preflight && !matches!(status, ApplyStatus::Success))
{
let mut log = String::new();
let summary = summarize_patch_for_logging(&diff);
let mode = if preflight { "preflight" } else { "apply" };
use std::fmt::Write as _;
let _ = writeln!(
&mut log,
"apply_result: mode={} id={} status={:?} applied={} skipped={} conflicts={} cmd={}",
mode,
id,
status,
r.applied_paths.len(),
r.skipped_paths.len(),
r.conflicted_paths.len(),
r.cmd_for_log
);
let _ = writeln!(
&mut log,
"stdout_tail=\n{}\nstderr_tail=\n{}",
tail(&r.stdout, 2000),
tail(&r.stderr, 2000)
);
let _ = writeln!(&mut log, "{summary}");
let _ = writeln!(
&mut log,
"----- PATCH BEGIN -----\n{diff}\n----- PATCH END -----"
);
append_error_log(&log);
}
Ok(ApplyOutcome {
applied,
status,
message,
skipped_paths: r.skipped_paths,
conflict_paths: r.conflicted_paths,
})
}
}
fn details_path(base_url: &str, id: &str) -> Option<String> {
if base_url.contains("/backend-api") {
Some(format!("{base_url}/wham/tasks/{id}"))
} else if base_url.contains("/api/codex") {
Some(format!("{base_url}/tasks/{id}"))
} else {
None
}
}
fn extract_assistant_messages_from_body(body: &str) -> Vec<String> {
let mut msgs = Vec::new();
if let Ok(full) = serde_json::from_str::<serde_json::Value>(body)
&& let Some(arr) = full
.get("current_assistant_turn")
.and_then(|v| v.get("worklog"))
.and_then(|v| v.get("messages"))
.and_then(|v| v.as_array())
{
for m in arr {
let is_assistant = m
.get("author")
.and_then(|a| a.get("role"))
.and_then(|r| r.as_str())
== Some("assistant");
if !is_assistant {
continue;
}
if let Some(parts) = m
.get("content")
.and_then(|c| c.get("parts"))
.and_then(|p| p.as_array())
{
for p in parts {
if let Some(s) = p.as_str() {
if !s.is_empty() {
msgs.push(s.to_string());
}
continue;
}
if let Some(obj) = p.as_object()
&& obj.get("content_type").and_then(|t| t.as_str()) == Some("text")
&& let Some(txt) = obj.get("text").and_then(|t| t.as_str())
{
msgs.push(txt.to_string());
}
}
}
}
}
msgs
}
fn turn_attempt_from_map(turn: &HashMap<String, Value>) -> Option<TurnAttempt> {
let turn_id = turn.get("id").and_then(Value::as_str)?.to_string();
let attempt_placement = turn.get("attempt_placement").and_then(Value::as_i64);
let created_at = parse_timestamp_value(turn.get("created_at"));
let status = attempt_status_from_str(turn.get("turn_status").and_then(Value::as_str));
let diff = extract_diff_from_turn(turn);
let messages = extract_assistant_messages_from_turn(turn);
Some(TurnAttempt {
turn_id,
attempt_placement,
created_at,
status,
diff,
messages,
})
}
fn compare_attempts(a: &TurnAttempt, b: &TurnAttempt) -> Ordering {
match (a.attempt_placement, b.attempt_placement) {
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
(Some(_), None) => Ordering::Less,
(None, Some(_)) => Ordering::Greater,
(None, None) => match (a.created_at, b.created_at) {
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
(Some(_), None) => Ordering::Less,
(None, Some(_)) => Ordering::Greater,
(None, None) => a.turn_id.cmp(&b.turn_id),
},
}
}
fn extract_diff_from_turn(turn: &HashMap<String, Value>) -> Option<String> {
let items = turn.get("output_items").and_then(Value::as_array)?;
for item in items {
match item.get("type").and_then(Value::as_str) {
Some("output_diff") => {
if let Some(diff) = item.get("diff").and_then(Value::as_str)
&& !diff.is_empty()
{
return Some(diff.to_string());
}
}
Some("pr") => {
if let Some(diff) = item
.get("output_diff")
.and_then(Value::as_object)
.and_then(|od| od.get("diff"))
.and_then(Value::as_str)
&& !diff.is_empty()
{
return Some(diff.to_string());
}
}
_ => {}
}
}
None
}
fn extract_assistant_messages_from_turn(turn: &HashMap<String, Value>) -> Vec<String> {
let mut msgs = Vec::new();
if let Some(items) = turn.get("output_items").and_then(Value::as_array) {
for item in items {
if item.get("type").and_then(Value::as_str) != Some("message") {
continue;
}
if let Some(content) = item.get("content").and_then(Value::as_array) {
for part in content {
if part.get("content_type").and_then(Value::as_str) == Some("text")
&& let Some(txt) = part.get("text").and_then(Value::as_str)
&& !txt.is_empty()
{
msgs.push(txt.to_string());
}
}
}
}
}
msgs
}
fn attempt_status_from_str(raw: Option<&str>) -> AttemptStatus {
match raw.unwrap_or_default() {
"failed" => AttemptStatus::Failed,
"completed" => AttemptStatus::Completed,
"in_progress" => AttemptStatus::InProgress,
"pending" => AttemptStatus::Pending,
_ => AttemptStatus::Pending,
}
}
fn parse_timestamp_value(v: Option<&Value>) -> Option<DateTime<Utc>> {
let ts = v?.as_f64()?;
let secs = ts as i64;
let nanos = ((ts - secs as f64) * 1_000_000_000.0) as u32;
Some(DateTime::<Utc>::from(
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
))
}
fn map_task_list_item_to_summary(src: backend::TaskListItem) -> TaskSummary {
let status_display = src.task_status_display.as_ref();
TaskSummary {
id: TaskId(src.id),
title: src.title,
status: map_status(status_display),
updated_at: parse_updated_at(src.updated_at.as_ref()),
environment_id: None,
environment_label: env_label_from_status_display(status_display),
summary: diff_summary_from_status_display(status_display),
is_review: src
.pull_requests
.as_ref()
.is_some_and(|prs| !prs.is_empty()),
attempt_total: attempt_total_from_status_display(status_display),
}
}
fn map_status(v: Option<&HashMap<String, Value>>) -> TaskStatus {
if let Some(val) = v {
if let Some(turn) = val
.get("latest_turn_status_display")
.and_then(Value::as_object)
&& let Some(s) = turn.get("turn_status").and_then(Value::as_str)
{
return match s {
"failed" => TaskStatus::Error,
"completed" => TaskStatus::Ready,
"in_progress" => TaskStatus::Pending,
"pending" => TaskStatus::Pending,
"cancelled" => TaskStatus::Error,
_ => TaskStatus::Pending,
};
}
if let Some(state) = val.get("state").and_then(Value::as_str) {
return match state {
"pending" => TaskStatus::Pending,
"ready" => TaskStatus::Ready,
"applied" => TaskStatus::Applied,
"error" => TaskStatus::Error,
_ => TaskStatus::Pending,
};
}
}
TaskStatus::Pending
}
fn parse_updated_at(ts: Option<&f64>) -> DateTime<Utc> {
if let Some(v) = ts {
let secs = *v as i64;
let nanos = ((*v - secs as f64) * 1_000_000_000.0) as u32;
return DateTime::<Utc>::from(
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
);
}
Utc::now()
}
fn env_label_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<String> {
let map = v?;
map.get("environment_label")
.and_then(Value::as_str)
.map(str::to_string)
}
fn diff_summary_from_diff(diff: &str) -> DiffSummary {
let mut files_changed = 0usize;
let mut lines_added = 0usize;
let mut lines_removed = 0usize;
for line in diff.lines() {
if line.starts_with("diff --git ") {
files_changed += 1;
continue;
}
if line.starts_with("+++") || line.starts_with("---") || line.starts_with("@@") {
continue;
}
match line.as_bytes().first() {
Some(b'+') => lines_added += 1,
Some(b'-') => lines_removed += 1,
_ => {}
}
}
if files_changed == 0 && !diff.trim().is_empty() {
files_changed = 1;
}
DiffSummary {
files_changed,
lines_added,
lines_removed,
}
}
fn diff_summary_from_status_display(v: Option<&HashMap<String, Value>>) -> DiffSummary {
let mut out = DiffSummary::default();
let Some(map) = v else { return out };
let latest = map
.get("latest_turn_status_display")
.and_then(Value::as_object);
let Some(latest) = latest else { return out };
if let Some(ds) = latest.get("diff_stats").and_then(Value::as_object) {
if let Some(n) = ds.get("files_modified").and_then(Value::as_i64) {
out.files_changed = n.max(0) as usize;
}
if let Some(n) = ds.get("lines_added").and_then(Value::as_i64) {
out.lines_added = n.max(0) as usize;
}
if let Some(n) = ds.get("lines_removed").and_then(Value::as_i64) {
out.lines_removed = n.max(0) as usize;
}
}
out
}
fn latest_turn_timestamp(v: Option<&HashMap<String, Value>>) -> Option<f64> {
let map = v?;
let latest = map
.get("latest_turn_status_display")
.and_then(Value::as_object)?;
latest
.get("updated_at")
.or_else(|| latest.get("created_at"))
.and_then(Value::as_f64)
}
fn attempt_total_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<usize> {
let map = v?;
let latest = map
.get("latest_turn_status_display")
.and_then(Value::as_object)?;
let siblings = latest.get("sibling_turn_ids").and_then(Value::as_array)?;
Some(siblings.len().saturating_add(1))
}
fn is_unified_diff(diff: &str) -> bool {
let t = diff.trim_start();
if t.starts_with("diff --git ") {
return true;
}
let has_dash_headers = diff.contains("\n--- ") && diff.contains("\n+++ ");
let has_hunk = diff.contains("\n@@ ") || diff.starts_with("@@ ");
has_dash_headers && has_hunk
}
fn tail(s: &str, max: usize) -> String {
if s.len() <= max {
s.to_string()
} else {
s[s.len() - max..].to_string()
}
}
fn summarize_patch_for_logging(patch: &str) -> String {
let trimmed = patch.trim_start();
let kind = if trimmed.starts_with("*** Begin Patch") {
"codex-patch"
} else if trimmed.starts_with("diff --git ") || trimmed.contains("\n*** End Patch\n") {
"git-diff"
} else if trimmed.starts_with("@@ ") || trimmed.contains("\n@@ ") {
"unified-diff"
} else {
"unknown"
};
let lines = patch.lines().count();
let chars = patch.len();
let cwd = std::env::current_dir()
.ok()
.map(|p| p.display().to_string())
.unwrap_or_else(|| "<unknown>".to_string());
let head: String = patch.lines().take(20).collect::<Vec<&str>>().join("\n");
let head_trunc = if head.len() > 800 {
format!("{}…", &head[..800])
} else {
head
};
format!(
"patch_summary: kind={kind} lines={lines} chars={chars} cwd={cwd} ; head=\n{head_trunc}"
)
}
}
fn append_error_log(message: &str) {
let ts = Utc::now().to_rfc3339();
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open("error.log")
{
use std::io::Write as _;
let _ = writeln!(f, "[{ts}] {message}");
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/cloud-tasks-client/src/api.rs | codex-rs/cloud-tasks-client/src/api.rs | use chrono::DateTime;
use chrono::Utc;
use serde::Deserialize;
use serde::Serialize;
pub type Result<T> = std::result::Result<T, CloudTaskError>;
#[derive(Debug, thiserror::Error)]
pub enum CloudTaskError {
#[error("unimplemented: {0}")]
Unimplemented(&'static str),
#[error("http error: {0}")]
Http(String),
#[error("io error: {0}")]
Io(String),
#[error("{0}")]
Msg(String),
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct TaskId(pub String);
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum TaskStatus {
Pending,
Ready,
Applied,
Error,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct TaskSummary {
pub id: TaskId,
pub title: String,
pub status: TaskStatus,
pub updated_at: DateTime<Utc>,
/// Backend environment identifier (when available)
pub environment_id: Option<String>,
/// Human-friendly environment label (when available)
pub environment_label: Option<String>,
pub summary: DiffSummary,
/// True when the backend reports this task as a code review.
#[serde(default)]
pub is_review: bool,
/// Number of assistant attempts (best-of-N), when reported by the backend.
#[serde(default)]
pub attempt_total: Option<usize>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub enum AttemptStatus {
Pending,
InProgress,
Completed,
Failed,
Cancelled,
#[default]
Unknown,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TurnAttempt {
pub turn_id: String,
pub attempt_placement: Option<i64>,
pub created_at: Option<DateTime<Utc>>,
pub status: AttemptStatus,
pub diff: Option<String>,
pub messages: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ApplyStatus {
Success,
Partial,
Error,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct ApplyOutcome {
pub applied: bool,
pub status: ApplyStatus,
pub message: String,
#[serde(default)]
pub skipped_paths: Vec<String>,
#[serde(default)]
pub conflict_paths: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct CreatedTask {
pub id: TaskId,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct DiffSummary {
pub files_changed: usize,
pub lines_added: usize,
pub lines_removed: usize,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TaskText {
pub prompt: Option<String>,
pub messages: Vec<String>,
pub turn_id: Option<String>,
pub sibling_turn_ids: Vec<String>,
pub attempt_placement: Option<i64>,
pub attempt_status: AttemptStatus,
}
impl Default for TaskText {
fn default() -> Self {
Self {
prompt: None,
messages: Vec::new(),
turn_id: None,
sibling_turn_ids: Vec::new(),
attempt_placement: None,
attempt_status: AttemptStatus::Unknown,
}
}
}
#[async_trait::async_trait]
pub trait CloudBackend: Send + Sync {
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>>;
async fn get_task_summary(&self, id: TaskId) -> Result<TaskSummary>;
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>>;
/// Return assistant output messages (no diff) when available.
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>>;
/// Return the creating prompt and assistant messages (when available).
async fn get_task_text(&self, id: TaskId) -> Result<TaskText>;
/// Return any sibling attempts (best-of-N) for the given assistant turn.
async fn list_sibling_attempts(
&self,
task: TaskId,
turn_id: String,
) -> Result<Vec<TurnAttempt>>;
/// Dry-run apply (preflight) that validates whether the patch would apply cleanly.
/// Never modifies the working tree. When `diff_override` is supplied, the provided diff is
/// used instead of re-fetching the task details so callers can apply alternate attempts.
async fn apply_task_preflight(
&self,
id: TaskId,
diff_override: Option<String>,
) -> Result<ApplyOutcome>;
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome>;
async fn create_task(
&self,
env_id: &str,
prompt: &str,
git_ref: &str,
qa_mode: bool,
best_of_n: usize,
) -> Result<CreatedTask>;
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/cloud-tasks-client/src/mock.rs | codex-rs/cloud-tasks-client/src/mock.rs | use crate::ApplyOutcome;
use crate::AttemptStatus;
use crate::CloudBackend;
use crate::CloudTaskError;
use crate::DiffSummary;
use crate::Result;
use crate::TaskId;
use crate::TaskStatus;
use crate::TaskSummary;
use crate::TurnAttempt;
use crate::api::TaskText;
use chrono::Utc;
#[derive(Clone, Default)]
pub struct MockClient;
#[async_trait::async_trait]
impl CloudBackend for MockClient {
async fn list_tasks(&self, _env: Option<&str>) -> Result<Vec<TaskSummary>> {
// Slightly vary content by env to aid tests that rely on the mock
let rows = match _env {
Some("env-A") => vec![("T-2000", "A: First", TaskStatus::Ready)],
Some("env-B") => vec![
("T-3000", "B: One", TaskStatus::Ready),
("T-3001", "B: Two", TaskStatus::Pending),
],
_ => vec![
("T-1000", "Update README formatting", TaskStatus::Ready),
("T-1001", "Fix clippy warnings in core", TaskStatus::Pending),
("T-1002", "Add contributing guide", TaskStatus::Ready),
],
};
let environment_id = _env.map(str::to_string);
let environment_label = match _env {
Some("env-A") => Some("Env A".to_string()),
Some("env-B") => Some("Env B".to_string()),
Some(other) => Some(other.to_string()),
None => Some("Global".to_string()),
};
let mut out = Vec::new();
for (id_str, title, status) in rows {
let id = TaskId(id_str.to_string());
let diff = mock_diff_for(&id);
let (a, d) = count_from_unified(&diff);
out.push(TaskSummary {
id,
title: title.to_string(),
status,
updated_at: Utc::now(),
environment_id: environment_id.clone(),
environment_label: environment_label.clone(),
summary: DiffSummary {
files_changed: 1,
lines_added: a,
lines_removed: d,
},
is_review: false,
attempt_total: Some(if id_str == "T-1000" { 2 } else { 1 }),
});
}
Ok(out)
}
async fn get_task_summary(&self, id: TaskId) -> Result<TaskSummary> {
let tasks = self.list_tasks(None).await?;
tasks
.into_iter()
.find(|t| t.id == id)
.ok_or_else(|| CloudTaskError::Msg(format!("Task {} not found (mock)", id.0)))
}
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
Ok(Some(mock_diff_for(&id)))
}
async fn get_task_messages(&self, _id: TaskId) -> Result<Vec<String>> {
Ok(vec![
"Mock assistant output: this task contains no diff.".to_string(),
])
}
async fn get_task_text(&self, _id: TaskId) -> Result<TaskText> {
Ok(TaskText {
prompt: Some("Why is there no diff?".to_string()),
messages: vec!["Mock assistant output: this task contains no diff.".to_string()],
turn_id: Some("mock-turn".to_string()),
sibling_turn_ids: Vec::new(),
attempt_placement: Some(0),
attempt_status: AttemptStatus::Completed,
})
}
async fn apply_task(&self, id: TaskId, _diff_override: Option<String>) -> Result<ApplyOutcome> {
Ok(ApplyOutcome {
applied: true,
status: crate::ApplyStatus::Success,
message: format!("Applied task {} locally (mock)", id.0),
skipped_paths: Vec::new(),
conflict_paths: Vec::new(),
})
}
async fn apply_task_preflight(
&self,
id: TaskId,
_diff_override: Option<String>,
) -> Result<ApplyOutcome> {
Ok(ApplyOutcome {
applied: false,
status: crate::ApplyStatus::Success,
message: format!("Preflight passed for task {} (mock)", id.0),
skipped_paths: Vec::new(),
conflict_paths: Vec::new(),
})
}
async fn list_sibling_attempts(
&self,
task: TaskId,
_turn_id: String,
) -> Result<Vec<TurnAttempt>> {
if task.0 == "T-1000" {
return Ok(vec![TurnAttempt {
turn_id: "T-1000-attempt-2".to_string(),
attempt_placement: Some(1),
created_at: Some(Utc::now()),
status: AttemptStatus::Completed,
diff: Some(mock_diff_for(&task)),
messages: vec!["Mock alternate attempt".to_string()],
}]);
}
Ok(Vec::new())
}
async fn create_task(
&self,
env_id: &str,
prompt: &str,
git_ref: &str,
qa_mode: bool,
best_of_n: usize,
) -> Result<crate::CreatedTask> {
let _ = (env_id, prompt, git_ref, qa_mode, best_of_n);
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
Ok(crate::CreatedTask { id: TaskId(id) })
}
}
fn mock_diff_for(id: &TaskId) -> String {
match id.0.as_str() {
"T-1000" => {
"diff --git a/README.md b/README.md\nindex 000000..111111 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,2 +1,3 @@\n Intro\n-Hello\n+Hello, world!\n+Task: T-1000\n".to_string()
}
"T-1001" => {
"diff --git a/core/src/lib.rs b/core/src/lib.rs\nindex 000000..111111 100644\n--- a/core/src/lib.rs\n+++ b/core/src/lib.rs\n@@ -1,2 +1,1 @@\n-use foo;\n use bar;\n".to_string()
}
_ => {
"diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md\nindex 000000..111111 100644\n--- /dev/null\n+++ b/CONTRIBUTING.md\n@@ -0,0 +1,3 @@\n+## Contributing\n+Please open PRs.\n+Thanks!\n".to_string()
}
}
}
fn count_from_unified(diff: &str) -> (usize, usize) {
if let Ok(patch) = diffy::Patch::from_str(diff) {
patch
.hunks()
.iter()
.flat_map(diffy::Hunk::lines)
.fold((0, 0), |(a, d), l| match l {
diffy::Line::Insert(_) => (a + 1, d),
diffy::Line::Delete(_) => (a, d + 1),
_ => (a, d),
})
} else {
let mut a = 0;
let mut d = 0;
for l in diff.lines() {
if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") {
continue;
}
match l.as_bytes().first() {
Some(b'+') => a += 1,
Some(b'-') => d += 1,
_ => {}
}
}
(a, d)
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/lib.rs | codex-rs/app-server-protocol/src/lib.rs | mod export;
mod jsonrpc_lite;
mod protocol;
pub use export::generate_json;
pub use export::generate_ts;
pub use export::generate_types;
pub use jsonrpc_lite::*;
pub use protocol::common::*;
pub use protocol::thread_history::*;
pub use protocol::v1::*;
pub use protocol::v2::*;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/export.rs | codex-rs/app-server-protocol/src/export.rs | use crate::ClientNotification;
use crate::ClientRequest;
use crate::ServerNotification;
use crate::ServerRequest;
use crate::export_client_notification_schemas;
use crate::export_client_param_schemas;
use crate::export_client_response_schemas;
use crate::export_client_responses;
use crate::export_server_notification_schemas;
use crate::export_server_param_schemas;
use crate::export_server_response_schemas;
use crate::export_server_responses;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
use codex_protocol::protocol::EventMsg;
use schemars::JsonSchema;
use schemars::schema_for;
use serde::Serialize;
use serde_json::Map;
use serde_json::Value;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsStr;
use std::fs;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use ts_rs::TS;
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
#[derive(Clone)]
pub struct GeneratedSchema {
namespace: Option<String>,
logical_name: String,
value: Value,
in_v1_dir: bool,
}
impl GeneratedSchema {
fn namespace(&self) -> Option<&str> {
self.namespace.as_deref()
}
fn logical_name(&self) -> &str {
&self.logical_name
}
fn value(&self) -> &Value {
&self.value
}
}
type JsonSchemaEmitter = fn(&Path) -> Result<GeneratedSchema>;
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
generate_ts(out_dir, prettier)?;
generate_json(out_dir)?;
Ok(())
}
#[derive(Clone, Copy, Debug)]
pub struct GenerateTsOptions {
pub generate_indices: bool,
pub ensure_headers: bool,
pub run_prettier: bool,
}
impl Default for GenerateTsOptions {
fn default() -> Self {
Self {
generate_indices: true,
ensure_headers: true,
run_prettier: true,
}
}
}
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
}
pub fn generate_ts_with_options(
out_dir: &Path,
prettier: Option<&Path>,
options: GenerateTsOptions,
) -> Result<()> {
let v2_out_dir = out_dir.join("v2");
ensure_dir(out_dir)?;
ensure_dir(&v2_out_dir)?;
ClientRequest::export_all_to(out_dir)?;
export_client_responses(out_dir)?;
ClientNotification::export_all_to(out_dir)?;
ServerRequest::export_all_to(out_dir)?;
export_server_responses(out_dir)?;
ServerNotification::export_all_to(out_dir)?;
if options.generate_indices {
generate_index_ts(out_dir)?;
generate_index_ts(&v2_out_dir)?;
}
// Ensure our header is present on all TS files (root + subdirs like v2/).
let mut ts_files = Vec::new();
let should_collect_ts_files =
options.ensure_headers || (options.run_prettier && prettier.is_some());
if should_collect_ts_files {
ts_files = ts_files_in_recursive(out_dir)?;
}
if options.ensure_headers {
for file in &ts_files {
prepend_header_if_missing(file)?;
}
}
// Optionally run Prettier on all generated TS files.
if options.run_prettier
&& let Some(prettier_bin) = prettier
&& !ts_files.is_empty()
{
let status = Command::new(prettier_bin)
.arg("--write")
.arg("--log-level")
.arg("warn")
.args(ts_files.iter().map(|p| p.as_os_str()))
.status()
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
if !status.success() {
return Err(anyhow!("Prettier failed with status {status}"));
}
}
Ok(())
}
pub fn generate_json(out_dir: &Path) -> Result<()> {
ensure_dir(out_dir)?;
let envelope_emitters: &[JsonSchemaEmitter] = &[
|d| write_json_schema_with_return::<crate::RequestId>(d, "RequestId"),
|d| write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage"),
|d| write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest"),
|d| write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification"),
|d| write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse"),
|d| write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError"),
|d| write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError"),
|d| write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest"),
|d| write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest"),
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
];
let mut schemas: Vec<GeneratedSchema> = Vec::new();
for emit in envelope_emitters {
schemas.push(emit(out_dir)?);
}
schemas.extend(export_client_param_schemas(out_dir)?);
schemas.extend(export_client_response_schemas(out_dir)?);
schemas.extend(export_server_param_schemas(out_dir)?);
schemas.extend(export_server_response_schemas(out_dir)?);
schemas.extend(export_client_notification_schemas(out_dir)?);
schemas.extend(export_server_notification_schemas(out_dir)?);
let bundle = build_schema_bundle(schemas)?;
write_pretty_json(
out_dir.join("codex_app_server_protocol.schemas.json"),
&bundle,
)?;
Ok(())
}
fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
const SPECIAL_DEFINITIONS: &[&str] = &[
"ClientNotification",
"ClientRequest",
"EventMsg",
"ServerNotification",
"ServerRequest",
];
let namespaced_types = collect_namespaced_types(&schemas);
let mut definitions = Map::new();
for schema in schemas {
let GeneratedSchema {
namespace,
logical_name,
mut value,
in_v1_dir,
} = schema;
if IGNORED_DEFINITIONS.contains(&logical_name.as_str()) {
continue;
}
if let Some(ref ns) = namespace {
rewrite_refs_to_namespace(&mut value, ns);
}
let mut forced_namespace_refs: Vec<(String, String)> = Vec::new();
if let Value::Object(ref mut obj) = value
&& let Some(defs) = obj.remove("definitions")
&& let Value::Object(defs_obj) = defs
{
for (def_name, mut def_schema) in defs_obj {
if IGNORED_DEFINITIONS.contains(&def_name.as_str()) {
continue;
}
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
continue;
}
annotate_schema(&mut def_schema, Some(def_name.as_str()));
let target_namespace = match namespace {
Some(ref ns) => Some(ns.clone()),
None => namespace_for_definition(&def_name, &namespaced_types)
.cloned()
.filter(|_| !in_v1_dir),
};
if let Some(ref ns) = target_namespace {
if namespace.as_deref() == Some(ns.as_str()) {
rewrite_refs_to_namespace(&mut def_schema, ns);
insert_into_namespace(&mut definitions, ns, def_name.clone(), def_schema)?;
} else if !forced_namespace_refs
.iter()
.any(|(name, existing_ns)| name == &def_name && existing_ns == ns)
{
forced_namespace_refs.push((def_name.clone(), ns.clone()));
}
} else {
definitions.insert(def_name, def_schema);
}
}
}
for (name, ns) in forced_namespace_refs {
rewrite_named_ref_to_namespace(&mut value, &ns, &name);
}
if let Some(ref ns) = namespace {
insert_into_namespace(&mut definitions, ns, logical_name.clone(), value)?;
} else {
definitions.insert(logical_name, value);
}
}
let mut root = Map::new();
root.insert(
"$schema".to_string(),
Value::String("http://json-schema.org/draft-07/schema#".into()),
);
root.insert(
"title".to_string(),
Value::String("CodexAppServerProtocol".into()),
);
root.insert("type".to_string(), Value::String("object".into()));
root.insert("definitions".to_string(), Value::Object(definitions));
Ok(Value::Object(root))
}
fn insert_into_namespace(
definitions: &mut Map<String, Value>,
namespace: &str,
name: String,
schema: Value,
) -> Result<()> {
let entry = definitions
.entry(namespace.to_string())
.or_insert_with(|| Value::Object(Map::new()));
match entry {
Value::Object(map) => {
map.insert(name, schema);
Ok(())
}
_ => Err(anyhow!("expected namespace {namespace} to be an object")),
}
}
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
where
T: JsonSchema,
{
let file_stem = name.trim();
let schema = schema_for!(T);
let mut schema_value = serde_json::to_value(schema)?;
annotate_schema(&mut schema_value, Some(file_stem));
// If the name looks like a namespaced path (e.g., "v2::Type"), mirror
// the TypeScript layout and write to out_dir/v2/Type.json. Otherwise
// write alongside the legacy files.
let (raw_namespace, logical_name) = split_namespace(file_stem);
let out_path = if let Some(ns) = raw_namespace {
let dir = out_dir.join(ns);
ensure_dir(&dir)?;
dir.join(format!("{logical_name}.json"))
} else {
out_dir.join(format!("{file_stem}.json"))
};
if !IGNORED_DEFINITIONS.contains(&logical_name) {
write_pretty_json(out_path, &schema_value)
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
}
let namespace = match raw_namespace {
Some("v1") | None => None,
Some(ns) => Some(ns.to_string()),
};
Ok(GeneratedSchema {
in_v1_dir: raw_namespace == Some("v1"),
namespace,
logical_name: logical_name.to_string(),
value: schema_value,
})
}
pub(crate) fn write_json_schema<T>(out_dir: &Path, name: &str) -> Result<GeneratedSchema>
where
T: JsonSchema,
{
write_json_schema_with_return::<T>(out_dir, name)
}
fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
let json = serde_json::to_vec_pretty(value)
.with_context(|| format!("Failed to serialize JSON schema to {}", path.display()))?;
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
}
/// Split a fully-qualified type name like "v2::Type" into its namespace and logical name.
fn split_namespace(name: &str) -> (Option<&str>, &str) {
name.split_once("::")
.map_or((None, name), |(ns, rest)| (Some(ns), rest))
}
/// Recursively rewrite $ref values that point at "#/definitions/..." so that
/// they point to a namespaced location under the bundle.
fn rewrite_refs_to_namespace(value: &mut Value, ns: &str) {
match value {
Value::Object(obj) => {
if let Some(Value::String(r)) = obj.get_mut("$ref")
&& let Some(suffix) = r.strip_prefix("#/definitions/")
{
let prefix = format!("{ns}/");
if !suffix.starts_with(&prefix) {
*r = format!("#/definitions/{ns}/{suffix}");
}
}
for v in obj.values_mut() {
rewrite_refs_to_namespace(v, ns);
}
}
Value::Array(items) => {
for v in items.iter_mut() {
rewrite_refs_to_namespace(v, ns);
}
}
_ => {}
}
}
fn collect_namespaced_types(schemas: &[GeneratedSchema]) -> HashMap<String, String> {
let mut types = HashMap::new();
for schema in schemas {
if let Some(ns) = schema.namespace() {
types
.entry(schema.logical_name().to_string())
.or_insert_with(|| ns.to_string());
if let Some(Value::Object(defs)) = schema.value().get("definitions") {
for key in defs.keys() {
types.entry(key.clone()).or_insert_with(|| ns.to_string());
}
}
if let Some(Value::Object(defs)) = schema.value().get("$defs") {
for key in defs.keys() {
types.entry(key.clone()).or_insert_with(|| ns.to_string());
}
}
}
}
types
}
fn namespace_for_definition<'a>(
name: &str,
types: &'a HashMap<String, String>,
) -> Option<&'a String> {
if let Some(ns) = types.get(name) {
return Some(ns);
}
let trimmed = name.trim_end_matches(|c: char| c.is_ascii_digit());
if trimmed != name {
return types.get(trimmed);
}
None
}
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
if let Some(props) = variant.get("properties").and_then(Value::as_object) {
if let Some(method_literal) = literal_from_property(props, "method") {
let pascal = to_pascal_case(method_literal);
return Some(match base {
"ClientRequest" | "ServerRequest" => format!("{pascal}Request"),
"ClientNotification" | "ServerNotification" => format!("{pascal}Notification"),
_ => format!("{pascal}{base}"),
});
}
if let Some(type_literal) = literal_from_property(props, "type") {
let pascal = to_pascal_case(type_literal);
return Some(match base {
"EventMsg" => format!("{pascal}EventMsg"),
_ => format!("{pascal}{base}"),
});
}
if props.len() == 1
&& let Some(key) = props.keys().next()
{
let pascal = to_pascal_case(key);
return Some(format!("{pascal}{base}"));
}
}
if let Some(required) = variant.get("required").and_then(Value::as_array)
&& required.len() == 1
&& let Some(key) = required[0].as_str()
{
let pascal = to_pascal_case(key);
return Some(format!("{pascal}{base}"));
}
None
}
fn literal_from_property<'a>(props: &'a Map<String, Value>, key: &str) -> Option<&'a str> {
props.get(key).and_then(string_literal)
}
fn string_literal(value: &Value) -> Option<&str> {
value.get("const").and_then(Value::as_str).or_else(|| {
value
.get("enum")
.and_then(Value::as_array)
.and_then(|arr| arr.first())
.and_then(Value::as_str)
})
}
fn annotate_schema(value: &mut Value, base: Option<&str>) {
match value {
Value::Object(map) => annotate_object(map, base),
Value::Array(items) => {
for item in items {
annotate_schema(item, base);
}
}
_ => {}
}
}
fn annotate_object(map: &mut Map<String, Value>, base: Option<&str>) {
let owner = map.get("title").and_then(Value::as_str).map(str::to_owned);
if let Some(owner) = owner.as_deref()
&& let Some(Value::Object(props)) = map.get_mut("properties")
{
set_discriminator_titles(props, owner);
}
if let Some(Value::Array(variants)) = map.get_mut("oneOf") {
annotate_variant_list(variants, base);
}
if let Some(Value::Array(variants)) = map.get_mut("anyOf") {
annotate_variant_list(variants, base);
}
if let Some(Value::Object(defs)) = map.get_mut("definitions") {
for (name, schema) in defs.iter_mut() {
annotate_schema(schema, Some(name.as_str()));
}
}
if let Some(Value::Object(defs)) = map.get_mut("$defs") {
for (name, schema) in defs.iter_mut() {
annotate_schema(schema, Some(name.as_str()));
}
}
if let Some(Value::Object(props)) = map.get_mut("properties") {
for value in props.values_mut() {
annotate_schema(value, base);
}
}
if let Some(items) = map.get_mut("items") {
annotate_schema(items, base);
}
if let Some(additional) = map.get_mut("additionalProperties") {
annotate_schema(additional, base);
}
for (key, child) in map.iter_mut() {
match key.as_str() {
"oneOf"
| "anyOf"
| "definitions"
| "$defs"
| "properties"
| "items"
| "additionalProperties" => {}
_ => annotate_schema(child, base),
}
}
}
fn annotate_variant_list(variants: &mut [Value], base: Option<&str>) {
let mut seen = HashSet::new();
for variant in variants.iter() {
if let Some(name) = variant_title(variant) {
seen.insert(name.to_owned());
}
}
for variant in variants.iter_mut() {
let mut variant_name = variant_title(variant).map(str::to_owned);
if variant_name.is_none()
&& let Some(base_name) = base
&& let Some(name) = variant_definition_name(base_name, variant)
{
let mut candidate = name.clone();
let mut index = 2;
while seen.contains(&candidate) {
candidate = format!("{name}{index}");
index += 1;
}
if let Some(obj) = variant.as_object_mut() {
obj.insert("title".into(), Value::String(candidate.clone()));
}
seen.insert(candidate.clone());
variant_name = Some(candidate);
}
if let Some(name) = variant_name.as_deref()
&& let Some(obj) = variant.as_object_mut()
&& let Some(Value::Object(props)) = obj.get_mut("properties")
{
set_discriminator_titles(props, name);
}
annotate_schema(variant, base);
}
}
const DISCRIMINATOR_KEYS: &[&str] = &["type", "method", "mode", "status", "role", "reason"];
fn set_discriminator_titles(props: &mut Map<String, Value>, owner: &str) {
for key in DISCRIMINATOR_KEYS {
if let Some(prop_schema) = props.get_mut(*key)
&& string_literal(prop_schema).is_some()
&& let Value::Object(prop_obj) = prop_schema
{
if prop_obj.contains_key("title") {
continue;
}
let suffix = to_pascal_case(key);
prop_obj.insert("title".into(), Value::String(format!("{owner}{suffix}")));
}
}
}
fn variant_title(value: &Value) -> Option<&str> {
value
.as_object()
.and_then(|obj| obj.get("title"))
.and_then(Value::as_str)
}
fn to_pascal_case(input: &str) -> String {
let mut result = String::new();
let mut capitalize_next = true;
for c in input.chars() {
if c == '_' || c == '-' {
capitalize_next = true;
continue;
}
if capitalize_next {
result.extend(c.to_uppercase());
capitalize_next = false;
} else {
result.push(c);
}
}
result
}
fn ensure_dir(dir: &Path) -> Result<()> {
fs::create_dir_all(dir)
.with_context(|| format!("Failed to create output directory {}", dir.display()))
}
fn rewrite_named_ref_to_namespace(value: &mut Value, ns: &str, name: &str) {
let direct = format!("#/definitions/{name}");
let prefixed = format!("{direct}/");
let replacement = format!("#/definitions/{ns}/{name}");
let replacement_prefixed = format!("{replacement}/");
match value {
Value::Object(obj) => {
if let Some(Value::String(reference)) = obj.get_mut("$ref") {
if reference == &direct {
*reference = replacement;
} else if let Some(rest) = reference.strip_prefix(&prefixed) {
*reference = format!("{replacement_prefixed}{rest}");
}
}
for child in obj.values_mut() {
rewrite_named_ref_to_namespace(child, ns, name);
}
}
Value::Array(items) => {
for child in items {
rewrite_named_ref_to_namespace(child, ns, name);
}
}
_ => {}
}
}
fn prepend_header_if_missing(path: &Path) -> Result<()> {
let mut content = String::new();
{
let mut f = fs::File::open(path)
.with_context(|| format!("Failed to open {} for reading", path.display()))?;
f.read_to_string(&mut content)
.with_context(|| format!("Failed to read {}", path.display()))?;
}
if content.starts_with(HEADER) {
return Ok(());
}
let mut f = fs::File::create(path)
.with_context(|| format!("Failed to open {} for writing", path.display()))?;
f.write_all(HEADER.as_bytes())
.with_context(|| format!("Failed to write header to {}", path.display()))?;
f.write_all(content.as_bytes())
.with_context(|| format!("Failed to write content to {}", path.display()))?;
Ok(())
}
fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
for entry in
fs::read_dir(dir).with_context(|| format!("Failed to read dir {}", dir.display()))?
{
let entry = entry?;
let path = entry.path();
if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
files.push(path);
}
}
files.sort();
Ok(files)
}
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
let mut stack = vec![dir.to_path_buf()];
while let Some(d) = stack.pop() {
for entry in
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
{
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
files.push(path);
}
}
}
files.sort();
Ok(files)
}
/// Generate an index.ts file that re-exports all generated types.
/// This allows consumers to import all types from a single file.
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
let mut entries: Vec<String> = Vec::new();
let mut stems: Vec<String> = ts_files_in(out_dir)?
.into_iter()
.filter_map(|p| {
let stem = p.file_stem()?.to_string_lossy().into_owned();
if stem == "index" { None } else { Some(stem) }
})
.collect();
stems.sort();
stems.dedup();
for name in stems {
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
}
// If this is the root out_dir and a ./v2 folder exists with TS files,
// expose it as a namespace to avoid symbol collisions at the root.
let v2_dir = out_dir.join("v2");
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
if has_v2_ts {
entries.push("export * as v2 from \"./v2\";\n".to_string());
}
let mut content =
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
content.push_str(HEADER);
for line in &entries {
content.push_str(line);
}
let index_path = out_dir.join("index.ts");
let mut f = fs::File::create(&index_path)
.with_context(|| format!("Failed to create {}", index_path.display()))?;
f.write_all(content.as_bytes())
.with_context(|| format!("Failed to write {}", index_path.display()))?;
Ok(index_path)
}
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use std::collections::BTreeSet;
use std::fs;
use std::path::PathBuf;
use uuid::Uuid;
#[test]
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
// Assert that there are no types of the form "?: T | null" in the generated TS files.
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
fs::create_dir(&output_dir)?;
struct TempDirGuard(PathBuf);
impl Drop for TempDirGuard {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.0);
}
}
let _guard = TempDirGuard(output_dir.clone());
// Avoid doing more work than necessary to keep the test from timing out.
let options = GenerateTsOptions {
generate_indices: false,
ensure_headers: false,
run_prettier: false,
};
generate_ts_with_options(&output_dir, None, options)?;
let mut undefined_offenders = Vec::new();
let mut optional_nullable_offenders = BTreeSet::new();
let mut stack = vec![output_dir];
while let Some(dir) = stack.pop() {
for entry in fs::read_dir(&dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
continue;
}
if matches!(path.extension().and_then(|ext| ext.to_str()), Some("ts")) {
let contents = fs::read_to_string(&path)?;
if contents.contains("| undefined") {
undefined_offenders.push(path.clone());
}
const SKIP_PREFIXES: &[&str] = &[
"const ",
"let ",
"var ",
"export const ",
"export let ",
"export var ",
];
let mut search_start = 0;
while let Some(idx) = contents[search_start..].find("| null") {
let abs_idx = search_start + idx;
// Find the property-colon for this field by scanning forward
// from the start of the segment and ignoring nested braces,
// brackets, and parens. This avoids colons inside nested
// type literals like `{ [k in string]?: string }`.
let line_start_idx =
contents[..abs_idx].rfind('\n').map(|i| i + 1).unwrap_or(0);
let mut segment_start_idx = line_start_idx;
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind(',') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('{') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
if let Some(rel_idx) = contents[line_start_idx..abs_idx].rfind('}') {
segment_start_idx = segment_start_idx.max(line_start_idx + rel_idx + 1);
}
// Scan forward for the colon that separates the field name from its type.
let mut level_brace = 0_i32;
let mut level_brack = 0_i32;
let mut level_paren = 0_i32;
let mut in_single = false;
let mut in_double = false;
let mut escape = false;
let mut prop_colon_idx = None;
for (i, ch) in contents[segment_start_idx..abs_idx].char_indices() {
let idx_abs = segment_start_idx + i;
if escape {
escape = false;
continue;
}
match ch {
'\\' => {
// Only treat as escape when inside a string.
if in_single || in_double {
escape = true;
}
}
'\'' => {
if !in_double {
in_single = !in_single;
}
}
'"' => {
if !in_single {
in_double = !in_double;
}
}
'{' if !in_single && !in_double => level_brace += 1,
'}' if !in_single && !in_double => level_brace -= 1,
'[' if !in_single && !in_double => level_brack += 1,
']' if !in_single && !in_double => level_brack -= 1,
'(' if !in_single && !in_double => level_paren += 1,
')' if !in_single && !in_double => level_paren -= 1,
':' if !in_single
&& !in_double
&& level_brace == 0
&& level_brack == 0
&& level_paren == 0 =>
{
prop_colon_idx = Some(idx_abs);
break;
}
_ => {}
}
}
let Some(colon_idx) = prop_colon_idx else {
search_start = abs_idx + 5;
continue;
};
let mut field_prefix = contents[segment_start_idx..colon_idx].trim();
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if let Some(comment_idx) = field_prefix.rfind("*/") {
field_prefix = field_prefix[comment_idx + 2..].trim_start();
}
if field_prefix.is_empty() {
search_start = abs_idx + 5;
continue;
}
if SKIP_PREFIXES
.iter()
.any(|prefix| field_prefix.starts_with(prefix))
{
search_start = abs_idx + 5;
continue;
}
if field_prefix.contains('(') {
search_start = abs_idx + 5;
continue;
}
// If the last non-whitespace before ':' is '?', then this is an
// optional field with a nullable type (i.e., "?: T | null"),
// which we explicitly disallow.
if field_prefix.chars().rev().find(|c| !c.is_whitespace()) == Some('?') {
let line_number =
contents[..abs_idx].chars().filter(|c| *c == '\n').count() + 1;
let offending_line_end = contents[line_start_idx..]
.find('\n')
.map(|i| line_start_idx + i)
.unwrap_or(contents.len());
let offending_snippet =
contents[line_start_idx..offending_line_end].trim();
optional_nullable_offenders.insert(format!(
"{}:{}: {offending_snippet}",
path.display(),
line_number
));
}
search_start = abs_idx + 5;
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | true |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/jsonrpc_lite.rs | codex-rs/app-server-protocol/src/jsonrpc_lite.rs | //! We do not do true JSON-RPC 2.0, as we neither send nor expect the
//! "jsonrpc": "2.0" field.
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
pub const JSONRPC_VERSION: &str = "2.0";
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, JsonSchema, TS)]
#[serde(untagged)]
pub enum RequestId {
String(String),
#[ts(type = "number")]
Integer(i64),
}
pub type Result = serde_json::Value;
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
#[serde(untagged)]
pub enum JSONRPCMessage {
Request(JSONRPCRequest),
Notification(JSONRPCNotification),
Response(JSONRPCResponse),
Error(JSONRPCError),
}
/// A request that expects a response.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
pub struct JSONRPCRequest {
pub id: RequestId,
pub method: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub params: Option<serde_json::Value>,
}
/// A notification which does not expect a response.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
pub struct JSONRPCNotification {
pub method: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub params: Option<serde_json::Value>,
}
/// A successful (non-error) response to a request.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
pub struct JSONRPCResponse {
pub id: RequestId,
pub result: Result,
}
/// A response to a request that indicates an error occurred.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
pub struct JSONRPCError {
pub error: JSONRPCErrorError,
pub id: RequestId,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, JsonSchema, TS)]
pub struct JSONRPCErrorError {
pub code: i64,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub data: Option<serde_json::Value>,
pub message: String,
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/bin/export.rs | codex-rs/app-server-protocol/src/bin/export.rs | use anyhow::Result;
use clap::Parser;
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(
about = "Generate TypeScript bindings and JSON Schemas for the Codex app-server protocol"
)]
struct Args {
/// Output directory where generated files will be written
#[arg(short = 'o', long = "out", value_name = "DIR")]
out_dir: PathBuf,
/// Optional Prettier executable path to format generated TypeScript files
#[arg(short = 'p', long = "prettier", value_name = "PRETTIER_BIN")]
prettier: Option<PathBuf>,
}
fn main() -> Result<()> {
let args = Args::parse();
codex_app_server_protocol::generate_types(&args.out_dir, args.prettier.as_deref())
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/thread_history.rs | codex-rs/app-server-protocol/src/protocol/thread_history.rs | use crate::protocol::v2::ThreadItem;
use crate::protocol::v2::Turn;
use crate::protocol::v2::TurnError;
use crate::protocol::v2::TurnStatus;
use crate::protocol::v2::UserInput;
use codex_protocol::protocol::AgentReasoningEvent;
use codex_protocol::protocol::AgentReasoningRawContentEvent;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::TurnAbortedEvent;
use codex_protocol::protocol::UserMessageEvent;
/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
///
/// The purpose of this is to convert the EventMsgs persisted in a rollout file
/// into a sequence of Turns and ThreadItems, which allows the client to render
/// the historical messages when resuming a thread.
pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec<Turn> {
let mut builder = ThreadHistoryBuilder::new();
for event in events {
builder.handle_event(event);
}
builder.finish()
}
struct ThreadHistoryBuilder {
turns: Vec<Turn>,
current_turn: Option<PendingTurn>,
next_turn_index: i64,
next_item_index: i64,
}
impl ThreadHistoryBuilder {
fn new() -> Self {
Self {
turns: Vec::new(),
current_turn: None,
next_turn_index: 1,
next_item_index: 1,
}
}
fn finish(mut self) -> Vec<Turn> {
self.finish_current_turn();
self.turns
}
/// This function should handle all EventMsg variants that can be persisted in a rollout file.
/// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
fn handle_event(&mut self, event: &EventMsg) {
match event {
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
EventMsg::AgentReasoningRawContent(payload) => {
self.handle_agent_reasoning_raw_content(payload)
}
EventMsg::TokenCount(_) => {}
EventMsg::EnteredReviewMode(_) => {}
EventMsg::ExitedReviewMode(_) => {}
EventMsg::UndoCompleted(_) => {}
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
_ => {}
}
}
fn handle_user_message(&mut self, payload: &UserMessageEvent) {
self.finish_current_turn();
let mut turn = self.new_turn();
let id = self.next_item_id();
let content = self.build_user_inputs(payload);
turn.items.push(ThreadItem::UserMessage { id, content });
self.current_turn = Some(turn);
}
fn handle_agent_message(&mut self, text: String) {
if text.is_empty() {
return;
}
let id = self.next_item_id();
self.ensure_turn()
.items
.push(ThreadItem::AgentMessage { id, text });
}
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
if payload.text.is_empty() {
return;
}
// If the last item is a reasoning item, add the new text to the summary.
if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
summary.push(payload.text.clone());
return;
}
// Otherwise, create a new reasoning item.
let id = self.next_item_id();
self.ensure_turn().items.push(ThreadItem::Reasoning {
id,
summary: vec![payload.text.clone()],
content: Vec::new(),
});
}
fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
if payload.text.is_empty() {
return;
}
// If the last item is a reasoning item, add the new text to the content.
if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
content.push(payload.text.clone());
return;
}
// Otherwise, create a new reasoning item.
let id = self.next_item_id();
self.ensure_turn().items.push(ThreadItem::Reasoning {
id,
summary: Vec::new(),
content: vec![payload.text.clone()],
});
}
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
let Some(turn) = self.current_turn.as_mut() else {
return;
};
turn.status = TurnStatus::Interrupted;
}
fn finish_current_turn(&mut self) {
if let Some(turn) = self.current_turn.take() {
if turn.items.is_empty() {
return;
}
self.turns.push(turn.into());
}
}
fn new_turn(&mut self) -> PendingTurn {
PendingTurn {
id: self.next_turn_id(),
items: Vec::new(),
error: None,
status: TurnStatus::Completed,
}
}
fn ensure_turn(&mut self) -> &mut PendingTurn {
if self.current_turn.is_none() {
let turn = self.new_turn();
return self.current_turn.insert(turn);
}
if let Some(turn) = self.current_turn.as_mut() {
return turn;
}
unreachable!("current turn must exist after initialization");
}
fn next_turn_id(&mut self) -> String {
let id = format!("turn-{}", self.next_turn_index);
self.next_turn_index += 1;
id
}
fn next_item_id(&mut self) -> String {
let id = format!("item-{}", self.next_item_index);
self.next_item_index += 1;
id
}
fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec<UserInput> {
let mut content = Vec::new();
if !payload.message.trim().is_empty() {
content.push(UserInput::Text {
text: payload.message.clone(),
});
}
if let Some(images) = &payload.images {
for image in images {
content.push(UserInput::Image { url: image.clone() });
}
}
content
}
}
struct PendingTurn {
id: String,
items: Vec<ThreadItem>,
error: Option<TurnError>,
status: TurnStatus,
}
impl From<PendingTurn> for Turn {
fn from(value: PendingTurn) -> Self {
Self {
id: value.id,
items: value.items,
error: value.error,
status: value.status,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use codex_protocol::protocol::AgentMessageEvent;
use codex_protocol::protocol::AgentReasoningEvent;
use codex_protocol::protocol::AgentReasoningRawContentEvent;
use codex_protocol::protocol::TurnAbortReason;
use codex_protocol::protocol::TurnAbortedEvent;
use codex_protocol::protocol::UserMessageEvent;
use pretty_assertions::assert_eq;
#[test]
fn builds_multiple_turns_with_reasoning_items() {
let events = vec![
EventMsg::UserMessage(UserMessageEvent {
message: "First turn".into(),
images: Some(vec!["https://example.com/one.png".into()]),
}),
EventMsg::AgentMessage(AgentMessageEvent {
message: "Hi there".into(),
}),
EventMsg::AgentReasoning(AgentReasoningEvent {
text: "thinking".into(),
}),
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
text: "full reasoning".into(),
}),
EventMsg::UserMessage(UserMessageEvent {
message: "Second turn".into(),
images: None,
}),
EventMsg::AgentMessage(AgentMessageEvent {
message: "Reply two".into(),
}),
];
let turns = build_turns_from_event_msgs(&events);
assert_eq!(turns.len(), 2);
let first = &turns[0];
assert_eq!(first.id, "turn-1");
assert_eq!(first.status, TurnStatus::Completed);
assert_eq!(first.items.len(), 3);
assert_eq!(
first.items[0],
ThreadItem::UserMessage {
id: "item-1".into(),
content: vec![
UserInput::Text {
text: "First turn".into(),
},
UserInput::Image {
url: "https://example.com/one.png".into(),
}
],
}
);
assert_eq!(
first.items[1],
ThreadItem::AgentMessage {
id: "item-2".into(),
text: "Hi there".into(),
}
);
assert_eq!(
first.items[2],
ThreadItem::Reasoning {
id: "item-3".into(),
summary: vec!["thinking".into()],
content: vec!["full reasoning".into()],
}
);
let second = &turns[1];
assert_eq!(second.id, "turn-2");
assert_eq!(second.items.len(), 2);
assert_eq!(
second.items[0],
ThreadItem::UserMessage {
id: "item-4".into(),
content: vec![UserInput::Text {
text: "Second turn".into()
}],
}
);
assert_eq!(
second.items[1],
ThreadItem::AgentMessage {
id: "item-5".into(),
text: "Reply two".into(),
}
);
}
#[test]
fn splits_reasoning_when_interleaved() {
let events = vec![
EventMsg::UserMessage(UserMessageEvent {
message: "Turn start".into(),
images: None,
}),
EventMsg::AgentReasoning(AgentReasoningEvent {
text: "first summary".into(),
}),
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
text: "first content".into(),
}),
EventMsg::AgentMessage(AgentMessageEvent {
message: "interlude".into(),
}),
EventMsg::AgentReasoning(AgentReasoningEvent {
text: "second summary".into(),
}),
];
let turns = build_turns_from_event_msgs(&events);
assert_eq!(turns.len(), 1);
let turn = &turns[0];
assert_eq!(turn.items.len(), 4);
assert_eq!(
turn.items[1],
ThreadItem::Reasoning {
id: "item-2".into(),
summary: vec!["first summary".into()],
content: vec!["first content".into()],
}
);
assert_eq!(
turn.items[3],
ThreadItem::Reasoning {
id: "item-4".into(),
summary: vec!["second summary".into()],
content: Vec::new(),
}
);
}
#[test]
fn marks_turn_as_interrupted_when_aborted() {
let events = vec![
EventMsg::UserMessage(UserMessageEvent {
message: "Please do the thing".into(),
images: None,
}),
EventMsg::AgentMessage(AgentMessageEvent {
message: "Working...".into(),
}),
EventMsg::TurnAborted(TurnAbortedEvent {
reason: TurnAbortReason::Replaced,
}),
EventMsg::UserMessage(UserMessageEvent {
message: "Let's try again".into(),
images: None,
}),
EventMsg::AgentMessage(AgentMessageEvent {
message: "Second attempt complete.".into(),
}),
];
let turns = build_turns_from_event_msgs(&events);
assert_eq!(turns.len(), 2);
let first_turn = &turns[0];
assert_eq!(first_turn.status, TurnStatus::Interrupted);
assert_eq!(first_turn.items.len(), 2);
assert_eq!(
first_turn.items[0],
ThreadItem::UserMessage {
id: "item-1".into(),
content: vec![UserInput::Text {
text: "Please do the thing".into()
}],
}
);
assert_eq!(
first_turn.items[1],
ThreadItem::AgentMessage {
id: "item-2".into(),
text: "Working...".into(),
}
);
let second_turn = &turns[1];
assert_eq!(second_turn.status, TurnStatus::Completed);
assert_eq!(second_turn.items.len(), 2);
assert_eq!(
second_turn.items[0],
ThreadItem::UserMessage {
id: "item-3".into(),
content: vec![UserInput::Text {
text: "Let's try again".into()
}],
}
);
assert_eq!(
second_turn.items[1],
ThreadItem::AgentMessage {
id: "item-4".into(),
text: "Second attempt complete.".into(),
}
);
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/mappers.rs | codex-rs/app-server-protocol/src/protocol/mappers.rs | use crate::protocol::v1;
use crate::protocol::v2;
impl From<v1::ExecOneOffCommandParams> for v2::CommandExecParams {
fn from(value: v1::ExecOneOffCommandParams) -> Self {
Self {
command: value.command,
timeout_ms: value
.timeout_ms
.map(|timeout| i64::try_from(timeout).unwrap_or(60_000)),
cwd: value.cwd,
sandbox_policy: value.sandbox_policy.map(std::convert::Into::into),
}
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/v2.rs | codex-rs/app-server-protocol/src/protocol/v2.rs | use std::collections::HashMap;
use std::path::PathBuf;
use crate::protocol::common::AuthMode;
use codex_protocol::account::PlanType;
use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
use codex_protocol::config_types::Verbosity;
use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
use codex_protocol::items::TurnItem as CoreTurnItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::parse_command::ParsedCommand as CoreParsedCommand;
use codex_protocol::plan_tool::PlanItemArg as CorePlanItemArg;
use codex_protocol::plan_tool::StepStatus as CorePlanStepStatus;
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
use codex_protocol::protocol::SessionSource as CoreSessionSource;
use codex_protocol::protocol::SkillErrorInfo as CoreSkillErrorInfo;
use codex_protocol::protocol::SkillMetadata as CoreSkillMetadata;
use codex_protocol::protocol::SkillScope as CoreSkillScope;
use codex_protocol::protocol::TokenUsage as CoreTokenUsage;
use codex_protocol::protocol::TokenUsageInfo as CoreTokenUsageInfo;
use codex_protocol::user_input::UserInput as CoreUserInput;
use codex_utils_absolute_path::AbsolutePathBuf;
use mcp_types::ContentBlock as McpContentBlock;
use mcp_types::Resource as McpResource;
use mcp_types::ResourceTemplate as McpResourceTemplate;
use mcp_types::Tool as McpTool;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;
use thiserror::Error;
use ts_rs::TS;
// Macro to declare a camelCased API v2 enum mirroring a core enum which
// tends to use either snake_case or kebab-case.
macro_rules! v2_enum_from_core {
(
pub enum $Name:ident from $Src:path { $( $Variant:ident ),+ $(,)? }
) => {
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum $Name { $( $Variant ),+ }
impl $Name {
pub fn to_core(self) -> $Src {
match self { $( $Name::$Variant => <$Src>::$Variant ),+ }
}
}
impl From<$Src> for $Name {
fn from(value: $Src) -> Self {
match value { $( <$Src>::$Variant => $Name::$Variant ),+ }
}
}
};
}
/// This translation layer make sure that we expose codex error code in camel case.
///
/// When an upstream HTTP status is available (for example, from the Responses API or a provider),
/// it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum CodexErrorInfo {
ContextWindowExceeded,
UsageLimitExceeded,
HttpConnectionFailed {
#[serde(rename = "httpStatusCode")]
#[ts(rename = "httpStatusCode")]
http_status_code: Option<u16>,
},
/// Failed to connect to the response SSE stream.
ResponseStreamConnectionFailed {
#[serde(rename = "httpStatusCode")]
#[ts(rename = "httpStatusCode")]
http_status_code: Option<u16>,
},
InternalServerError,
Unauthorized,
BadRequest,
SandboxError,
/// The response SSE stream disconnected in the middle of a turn before completion.
ResponseStreamDisconnected {
#[serde(rename = "httpStatusCode")]
#[ts(rename = "httpStatusCode")]
http_status_code: Option<u16>,
},
/// Reached the retry limit for responses.
ResponseTooManyFailedAttempts {
#[serde(rename = "httpStatusCode")]
#[ts(rename = "httpStatusCode")]
http_status_code: Option<u16>,
},
Other,
}
impl From<CoreCodexErrorInfo> for CodexErrorInfo {
fn from(value: CoreCodexErrorInfo) -> Self {
match value {
CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
CodexErrorInfo::HttpConnectionFailed { http_status_code }
}
CoreCodexErrorInfo::ResponseStreamConnectionFailed { http_status_code } => {
CodexErrorInfo::ResponseStreamConnectionFailed { http_status_code }
}
CoreCodexErrorInfo::InternalServerError => CodexErrorInfo::InternalServerError,
CoreCodexErrorInfo::Unauthorized => CodexErrorInfo::Unauthorized,
CoreCodexErrorInfo::BadRequest => CodexErrorInfo::BadRequest,
CoreCodexErrorInfo::SandboxError => CodexErrorInfo::SandboxError,
CoreCodexErrorInfo::ResponseStreamDisconnected { http_status_code } => {
CodexErrorInfo::ResponseStreamDisconnected { http_status_code }
}
CoreCodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code } => {
CodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code }
}
CoreCodexErrorInfo::Other => CodexErrorInfo::Other,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "kebab-case")]
#[ts(rename_all = "kebab-case", export_to = "v2/")]
pub enum AskForApproval {
#[serde(rename = "untrusted")]
#[ts(rename = "untrusted")]
UnlessTrusted,
OnFailure,
OnRequest,
Never,
}
impl AskForApproval {
pub fn to_core(self) -> CoreAskForApproval {
match self {
AskForApproval::UnlessTrusted => CoreAskForApproval::UnlessTrusted,
AskForApproval::OnFailure => CoreAskForApproval::OnFailure,
AskForApproval::OnRequest => CoreAskForApproval::OnRequest,
AskForApproval::Never => CoreAskForApproval::Never,
}
}
}
impl From<CoreAskForApproval> for AskForApproval {
fn from(value: CoreAskForApproval) -> Self {
match value {
CoreAskForApproval::UnlessTrusted => AskForApproval::UnlessTrusted,
CoreAskForApproval::OnFailure => AskForApproval::OnFailure,
CoreAskForApproval::OnRequest => AskForApproval::OnRequest,
CoreAskForApproval::Never => AskForApproval::Never,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "kebab-case")]
#[ts(rename_all = "kebab-case", export_to = "v2/")]
pub enum SandboxMode {
ReadOnly,
WorkspaceWrite,
DangerFullAccess,
}
impl SandboxMode {
pub fn to_core(self) -> CoreSandboxMode {
match self {
SandboxMode::ReadOnly => CoreSandboxMode::ReadOnly,
SandboxMode::WorkspaceWrite => CoreSandboxMode::WorkspaceWrite,
SandboxMode::DangerFullAccess => CoreSandboxMode::DangerFullAccess,
}
}
}
impl From<CoreSandboxMode> for SandboxMode {
fn from(value: CoreSandboxMode) -> Self {
match value {
CoreSandboxMode::ReadOnly => SandboxMode::ReadOnly,
CoreSandboxMode::WorkspaceWrite => SandboxMode::WorkspaceWrite,
CoreSandboxMode::DangerFullAccess => SandboxMode::DangerFullAccess,
}
}
}
v2_enum_from_core!(
pub enum ReviewDelivery from codex_protocol::protocol::ReviewDelivery {
Inline, Detached
}
);
v2_enum_from_core!(
pub enum McpAuthStatus from codex_protocol::protocol::McpAuthStatus {
Unsupported,
NotLoggedIn,
BearerToken,
OAuth
}
);
// TODO(mbolin): Support in-repo layer.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum ConfigLayerSource {
/// Managed preferences layer delivered by MDM (macOS only).
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
Mdm {
domain: String,
key: String,
},
/// Managed config layer from a file (usually `managed_config.toml`).
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
System {
/// This is the path to the system config.toml file, though it is not
/// guaranteed to exist.
file: AbsolutePathBuf,
},
/// User config layer from $CODEX_HOME/config.toml. This layer is special
/// in that it is expected to be:
/// - writable by the user
/// - generally outside the workspace directory
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
User {
/// This is the path to the user's config.toml file, though it is not
/// guaranteed to exist.
file: AbsolutePathBuf,
},
/// Path to a .codex/ folder within a project. There could be multiple of
/// these between `cwd` and the project/repo root.
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
Project {
dot_codex_folder: AbsolutePathBuf,
},
/// Session-layer overrides supplied via `-c`/`--config`.
SessionFlags,
/// `managed_config.toml` was designed to be a config that was loaded
/// as the last layer on top of everything else. This scheme did not quite
/// work out as intended, but we keep this variant as a "best effort" while
/// we phase out `managed_config.toml` in favor of `requirements.toml`.
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
LegacyManagedConfigTomlFromFile {
file: AbsolutePathBuf,
},
LegacyManagedConfigTomlFromMdm,
}
impl ConfigLayerSource {
/// A settings from a layer with a higher precedence will override a setting
/// from a layer with a lower precedence.
pub fn precedence(&self) -> i16 {
match self {
ConfigLayerSource::Mdm { .. } => 0,
ConfigLayerSource::System { .. } => 10,
ConfigLayerSource::User { .. } => 20,
ConfigLayerSource::Project { .. } => 25,
ConfigLayerSource::SessionFlags => 30,
ConfigLayerSource::LegacyManagedConfigTomlFromFile { .. } => 40,
ConfigLayerSource::LegacyManagedConfigTomlFromMdm => 50,
}
}
}
/// Compares [ConfigLayerSource] by precedence, so `A < B` means settings from
/// layer `A` will be overridden by settings from layer `B`.
impl PartialOrd for ConfigLayerSource {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.precedence().cmp(&other.precedence()))
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export_to = "v2/")]
pub struct SandboxWorkspaceWrite {
#[serde(default)]
pub writable_roots: Vec<PathBuf>,
#[serde(default)]
pub network_access: bool,
#[serde(default)]
pub exclude_tmpdir_env_var: bool,
#[serde(default)]
pub exclude_slash_tmp: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export_to = "v2/")]
pub struct ToolsV2 {
#[serde(alias = "web_search_request")]
pub web_search: Option<bool>,
pub view_image: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export_to = "v2/")]
pub struct ProfileV2 {
pub model: Option<String>,
pub model_provider: Option<String>,
pub approval_policy: Option<AskForApproval>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
pub chatgpt_base_url: Option<String>,
#[serde(default, flatten)]
pub additional: HashMap<String, JsonValue>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export_to = "v2/")]
pub struct Config {
pub model: Option<String>,
pub review_model: Option<String>,
pub model_context_window: Option<i64>,
pub model_auto_compact_token_limit: Option<i64>,
pub model_provider: Option<String>,
pub approval_policy: Option<AskForApproval>,
pub sandbox_mode: Option<SandboxMode>,
pub sandbox_workspace_write: Option<SandboxWorkspaceWrite>,
pub forced_chatgpt_workspace_id: Option<String>,
pub forced_login_method: Option<ForcedLoginMethod>,
pub tools: Option<ToolsV2>,
pub profile: Option<String>,
#[serde(default)]
pub profiles: HashMap<String, ProfileV2>,
pub instructions: Option<String>,
pub developer_instructions: Option<String>,
pub compact_prompt: Option<String>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
#[serde(default, flatten)]
pub additional: HashMap<String, JsonValue>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigLayerMetadata {
pub name: ConfigLayerSource,
pub version: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigLayer {
pub name: ConfigLayerSource,
pub version: String,
pub config: JsonValue,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum MergeStrategy {
Replace,
Upsert,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum WriteStatus {
Ok,
OkOverridden,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct OverriddenMetadata {
pub message: String,
pub overriding_layer: ConfigLayerMetadata,
pub effective_value: JsonValue,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigWriteResponse {
pub status: WriteStatus,
pub version: String,
/// Canonical path to the config file that was written.
pub file_path: AbsolutePathBuf,
pub overridden_metadata: Option<OverriddenMetadata>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum ConfigWriteErrorCode {
ConfigLayerReadonly,
ConfigVersionConflict,
ConfigValidationError,
ConfigPathNotFound,
ConfigSchemaUnknownKey,
UserLayerNotFound,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigReadParams {
#[serde(default)]
pub include_layers: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigReadResponse {
pub config: Config,
pub origins: HashMap<String, ConfigLayerMetadata>,
#[serde(skip_serializing_if = "Option::is_none")]
pub layers: Option<Vec<ConfigLayer>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigValueWriteParams {
pub key_path: String,
pub value: JsonValue,
pub merge_strategy: MergeStrategy,
/// Path to the config file to write; defaults to the user's `config.toml` when omitted.
pub file_path: Option<String>,
pub expected_version: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigBatchWriteParams {
pub edits: Vec<ConfigEdit>,
/// Path to the config file to write; defaults to the user's `config.toml` when omitted.
pub file_path: Option<String>,
pub expected_version: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ConfigEdit {
pub key_path: String,
pub value: JsonValue,
pub merge_strategy: MergeStrategy,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum ApprovalDecision {
Accept,
/// Approve and remember the approval for the session.
AcceptForSession,
AcceptWithExecpolicyAmendment {
execpolicy_amendment: ExecPolicyAmendment,
},
Decline,
Cancel,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum NetworkAccess {
#[default]
Restricted,
Enabled,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum SandboxPolicy {
DangerFullAccess,
ReadOnly,
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
ExternalSandbox {
#[serde(default)]
network_access: NetworkAccess,
},
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
WorkspaceWrite {
#[serde(default)]
writable_roots: Vec<AbsolutePathBuf>,
#[serde(default)]
network_access: bool,
#[serde(default)]
exclude_tmpdir_env_var: bool,
#[serde(default)]
exclude_slash_tmp: bool,
},
}
impl SandboxPolicy {
pub fn to_core(&self) -> codex_protocol::protocol::SandboxPolicy {
match self {
SandboxPolicy::DangerFullAccess => {
codex_protocol::protocol::SandboxPolicy::DangerFullAccess
}
SandboxPolicy::ReadOnly => codex_protocol::protocol::SandboxPolicy::ReadOnly,
SandboxPolicy::ExternalSandbox { network_access } => {
codex_protocol::protocol::SandboxPolicy::ExternalSandbox {
network_access: match network_access {
NetworkAccess::Restricted => CoreNetworkAccess::Restricted,
NetworkAccess::Enabled => CoreNetworkAccess::Enabled,
},
}
}
SandboxPolicy::WorkspaceWrite {
writable_roots,
network_access,
exclude_tmpdir_env_var,
exclude_slash_tmp,
} => codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
writable_roots: writable_roots.clone(),
network_access: *network_access,
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
exclude_slash_tmp: *exclude_slash_tmp,
},
}
}
}
impl From<codex_protocol::protocol::SandboxPolicy> for SandboxPolicy {
fn from(value: codex_protocol::protocol::SandboxPolicy) -> Self {
match value {
codex_protocol::protocol::SandboxPolicy::DangerFullAccess => {
SandboxPolicy::DangerFullAccess
}
codex_protocol::protocol::SandboxPolicy::ReadOnly => SandboxPolicy::ReadOnly,
codex_protocol::protocol::SandboxPolicy::ExternalSandbox { network_access } => {
SandboxPolicy::ExternalSandbox {
network_access: match network_access {
CoreNetworkAccess::Restricted => NetworkAccess::Restricted,
CoreNetworkAccess::Enabled => NetworkAccess::Enabled,
},
}
}
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite {
writable_roots,
network_access,
exclude_tmpdir_env_var,
exclude_slash_tmp,
} => SandboxPolicy::WorkspaceWrite {
writable_roots,
network_access,
exclude_tmpdir_env_var,
exclude_slash_tmp,
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(transparent)]
#[ts(type = "Array<string>", export_to = "v2/")]
pub struct ExecPolicyAmendment {
pub command: Vec<String>,
}
impl ExecPolicyAmendment {
pub fn into_core(self) -> CoreExecPolicyAmendment {
CoreExecPolicyAmendment::new(self.command)
}
}
impl From<CoreExecPolicyAmendment> for ExecPolicyAmendment {
fn from(value: CoreExecPolicyAmendment) -> Self {
Self {
command: value.command().to_vec(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum CommandAction {
Read {
command: String,
name: String,
path: PathBuf,
},
ListFiles {
command: String,
path: Option<String>,
},
Search {
command: String,
query: Option<String>,
path: Option<String>,
},
Unknown {
command: String,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase", export_to = "v2/")]
#[derive(Default)]
pub enum SessionSource {
Cli,
#[serde(rename = "vscode")]
#[ts(rename = "vscode")]
#[default]
VsCode,
Exec,
AppServer,
#[serde(other)]
Unknown,
}
impl From<CoreSessionSource> for SessionSource {
fn from(value: CoreSessionSource) -> Self {
match value {
CoreSessionSource::Cli => SessionSource::Cli,
CoreSessionSource::VSCode => SessionSource::VsCode,
CoreSessionSource::Exec => SessionSource::Exec,
CoreSessionSource::Mcp => SessionSource::AppServer,
CoreSessionSource::SubAgent(_) => SessionSource::Unknown,
CoreSessionSource::Unknown => SessionSource::Unknown,
}
}
}
impl From<SessionSource> for CoreSessionSource {
fn from(value: SessionSource) -> Self {
match value {
SessionSource::Cli => CoreSessionSource::Cli,
SessionSource::VsCode => CoreSessionSource::VSCode,
SessionSource::Exec => CoreSessionSource::Exec,
SessionSource::AppServer => CoreSessionSource::Mcp,
SessionSource::Unknown => CoreSessionSource::Unknown,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GitInfo {
pub sha: Option<String>,
pub branch: Option<String>,
pub origin_url: Option<String>,
}
impl CommandAction {
pub fn into_core(self) -> CoreParsedCommand {
match self {
CommandAction::Read {
command: cmd,
name,
path,
} => CoreParsedCommand::Read { cmd, name, path },
CommandAction::ListFiles { command: cmd, path } => {
CoreParsedCommand::ListFiles { cmd, path }
}
CommandAction::Search {
command: cmd,
query,
path,
} => CoreParsedCommand::Search { cmd, query, path },
CommandAction::Unknown { command: cmd } => CoreParsedCommand::Unknown { cmd },
}
}
}
impl From<CoreParsedCommand> for CommandAction {
fn from(value: CoreParsedCommand) -> Self {
match value {
CoreParsedCommand::Read { cmd, name, path } => CommandAction::Read {
command: cmd,
name,
path,
},
CoreParsedCommand::ListFiles { cmd, path } => {
CommandAction::ListFiles { command: cmd, path }
}
CoreParsedCommand::Search { cmd, query, path } => CommandAction::Search {
command: cmd,
query,
path,
},
CoreParsedCommand::Unknown { cmd } => CommandAction::Unknown { command: cmd },
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum Account {
#[serde(rename = "apiKey", rename_all = "camelCase")]
#[ts(rename = "apiKey", rename_all = "camelCase")]
ApiKey {},
#[serde(rename = "chatgpt", rename_all = "camelCase")]
#[ts(rename = "chatgpt", rename_all = "camelCase")]
Chatgpt { email: String, plan_type: PlanType },
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum LoginAccountParams {
#[serde(rename = "apiKey", rename_all = "camelCase")]
#[ts(rename = "apiKey", rename_all = "camelCase")]
ApiKey {
#[serde(rename = "apiKey")]
#[ts(rename = "apiKey")]
api_key: String,
},
#[serde(rename = "chatgpt")]
#[ts(rename = "chatgpt")]
Chatgpt,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum LoginAccountResponse {
#[serde(rename = "apiKey", rename_all = "camelCase")]
#[ts(rename = "apiKey", rename_all = "camelCase")]
ApiKey {},
#[serde(rename = "chatgpt", rename_all = "camelCase")]
#[ts(rename = "chatgpt", rename_all = "camelCase")]
Chatgpt {
// Use plain String for identifiers to avoid TS/JSON Schema quirks around uuid-specific types.
// Convert to/from UUIDs at the application layer as needed.
login_id: String,
/// URL the client should open in a browser to initiate the OAuth flow.
auth_url: String,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CancelLoginAccountParams {
pub login_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum CancelLoginAccountStatus {
Canceled,
NotFound,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CancelLoginAccountResponse {
pub status: CancelLoginAccountStatus,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct LogoutAccountResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GetAccountRateLimitsResponse {
pub rate_limits: RateLimitSnapshot,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GetAccountParams {
#[serde(default)]
pub refresh_token: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GetAccountResponse {
pub account: Option<Account>,
pub requires_openai_auth: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ModelListParams {
/// Opaque pagination cursor returned by a previous call.
pub cursor: Option<String>,
/// Optional page size; defaults to a reasonable server-side value.
pub limit: Option<u32>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct Model {
pub id: String,
pub model: String,
pub display_name: String,
pub description: String,
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
pub default_reasoning_effort: ReasoningEffort,
// Only one model should be marked as default.
pub is_default: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ReasoningEffortOption {
pub reasoning_effort: ReasoningEffort,
pub description: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ModelListResponse {
pub data: Vec<Model>,
/// Opaque cursor to pass to the next call to continue after the last item.
/// If None, there are no more items to return.
pub next_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ListMcpServerStatusParams {
/// Opaque pagination cursor returned by a previous call.
pub cursor: Option<String>,
/// Optional page size; defaults to a server-defined value.
pub limit: Option<u32>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpServerStatus {
pub name: String,
pub tools: std::collections::HashMap<String, McpTool>,
pub resources: Vec<McpResource>,
pub resource_templates: Vec<McpResourceTemplate>,
pub auth_status: McpAuthStatus,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ListMcpServerStatusResponse {
pub data: Vec<McpServerStatus>,
/// Opaque cursor to pass to the next call to continue after the last item.
/// If None, there are no more items to return.
pub next_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpServerOauthLoginParams {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub scopes: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[ts(optional)]
pub timeout_secs: Option<i64>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpServerOauthLoginResponse {
pub authorization_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct FeedbackUploadParams {
pub classification: String,
pub reason: Option<String>,
pub thread_id: Option<String>,
pub include_logs: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct FeedbackUploadResponse {
pub thread_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CommandExecParams {
pub command: Vec<String>,
#[ts(type = "number | null")]
pub timeout_ms: Option<i64>,
pub cwd: Option<PathBuf>,
pub sandbox_policy: Option<SandboxPolicy>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CommandExecResponse {
pub exit_code: i32,
pub stdout: String,
pub stderr: String,
}
// === Threads, Turns, and Items ===
// Thread APIs
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadStartParams {
pub model: Option<String>,
pub model_provider: Option<String>,
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | true |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/mod.rs | codex-rs/app-server-protocol/src/protocol/mod.rs | // Module declarations for the app-server protocol namespace.
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
pub mod common;
mod mappers;
pub mod thread_history;
pub mod v1;
pub mod v2;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/common.rs | codex-rs/app-server-protocol/src/protocol/common.rs | use std::path::Path;
use crate::JSONRPCNotification;
use crate::JSONRPCRequest;
use crate::RequestId;
use crate::export::GeneratedSchema;
use crate::export::write_json_schema;
use crate::protocol::v1;
use crate::protocol::v2;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use strum_macros::Display;
use ts_rs::TS;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, TS)]
#[ts(type = "string")]
pub struct GitSha(pub String);
impl GitSha {
pub fn new(sha: &str) -> Self {
Self(sha.to_string())
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, JsonSchema, TS)]
#[serde(rename_all = "lowercase")]
pub enum AuthMode {
ApiKey,
ChatGPT,
}
/// Generates an `enum ClientRequest` where each variant is a request that the
/// client can send to the server. Each variant has associated `params` and
/// `response` types. Also generates a `export_client_responses()` function to
/// export all response types to TypeScript.
macro_rules! client_request_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident $(=> $wire:literal)? {
params: $(#[$params_meta:meta])* $params:ty,
response: $response:ty,
}
),* $(,)?
) => {
/// Request from the client to the server.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "method", rename_all = "camelCase")]
pub enum ClientRequest {
$(
$(#[$variant_meta])*
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
$variant {
#[serde(rename = "id")]
request_id: RequestId,
$(#[$params_meta])*
params: $params,
},
)*
}
pub fn export_client_responses(
out_dir: &::std::path::Path,
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
$(
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
)*
Ok(())
}
#[allow(clippy::vec_init_then_push)]
pub fn export_client_response_schemas(
out_dir: &::std::path::Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let mut schemas = Vec::new();
$(
schemas.push(write_json_schema::<$response>(out_dir, stringify!($response))?);
)*
Ok(schemas)
}
#[allow(clippy::vec_init_then_push)]
pub fn export_client_param_schemas(
out_dir: &::std::path::Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let mut schemas = Vec::new();
$(
schemas.push(write_json_schema::<$params>(out_dir, stringify!($params))?);
)*
Ok(schemas)
}
};
}
client_request_definitions! {
Initialize {
params: v1::InitializeParams,
response: v1::InitializeResponse,
},
/// NEW APIs
// Thread lifecycle
ThreadStart => "thread/start" {
params: v2::ThreadStartParams,
response: v2::ThreadStartResponse,
},
ThreadResume => "thread/resume" {
params: v2::ThreadResumeParams,
response: v2::ThreadResumeResponse,
},
ThreadArchive => "thread/archive" {
params: v2::ThreadArchiveParams,
response: v2::ThreadArchiveResponse,
},
ThreadList => "thread/list" {
params: v2::ThreadListParams,
response: v2::ThreadListResponse,
},
SkillsList => "skills/list" {
params: v2::SkillsListParams,
response: v2::SkillsListResponse,
},
TurnStart => "turn/start" {
params: v2::TurnStartParams,
response: v2::TurnStartResponse,
},
TurnInterrupt => "turn/interrupt" {
params: v2::TurnInterruptParams,
response: v2::TurnInterruptResponse,
},
ReviewStart => "review/start" {
params: v2::ReviewStartParams,
response: v2::ReviewStartResponse,
},
ModelList => "model/list" {
params: v2::ModelListParams,
response: v2::ModelListResponse,
},
McpServerOauthLogin => "mcpServer/oauth/login" {
params: v2::McpServerOauthLoginParams,
response: v2::McpServerOauthLoginResponse,
},
McpServerStatusList => "mcpServerStatus/list" {
params: v2::ListMcpServerStatusParams,
response: v2::ListMcpServerStatusResponse,
},
LoginAccount => "account/login/start" {
params: v2::LoginAccountParams,
response: v2::LoginAccountResponse,
},
CancelLoginAccount => "account/login/cancel" {
params: v2::CancelLoginAccountParams,
response: v2::CancelLoginAccountResponse,
},
LogoutAccount => "account/logout" {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v2::LogoutAccountResponse,
},
GetAccountRateLimits => "account/rateLimits/read" {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v2::GetAccountRateLimitsResponse,
},
FeedbackUpload => "feedback/upload" {
params: v2::FeedbackUploadParams,
response: v2::FeedbackUploadResponse,
},
/// Execute a command (argv vector) under the server's sandbox.
OneOffCommandExec => "command/exec" {
params: v2::CommandExecParams,
response: v2::CommandExecResponse,
},
ConfigRead => "config/read" {
params: v2::ConfigReadParams,
response: v2::ConfigReadResponse,
},
ConfigValueWrite => "config/value/write" {
params: v2::ConfigValueWriteParams,
response: v2::ConfigWriteResponse,
},
ConfigBatchWrite => "config/batchWrite" {
params: v2::ConfigBatchWriteParams,
response: v2::ConfigWriteResponse,
},
GetAccount => "account/read" {
params: v2::GetAccountParams,
response: v2::GetAccountResponse,
},
/// DEPRECATED APIs below
NewConversation {
params: v1::NewConversationParams,
response: v1::NewConversationResponse,
},
GetConversationSummary {
params: v1::GetConversationSummaryParams,
response: v1::GetConversationSummaryResponse,
},
/// List recorded Codex conversations (rollouts) with optional pagination and search.
ListConversations {
params: v1::ListConversationsParams,
response: v1::ListConversationsResponse,
},
/// Resume a recorded Codex conversation from a rollout file.
ResumeConversation {
params: v1::ResumeConversationParams,
response: v1::ResumeConversationResponse,
},
ArchiveConversation {
params: v1::ArchiveConversationParams,
response: v1::ArchiveConversationResponse,
},
SendUserMessage {
params: v1::SendUserMessageParams,
response: v1::SendUserMessageResponse,
},
SendUserTurn {
params: v1::SendUserTurnParams,
response: v1::SendUserTurnResponse,
},
InterruptConversation {
params: v1::InterruptConversationParams,
response: v1::InterruptConversationResponse,
},
AddConversationListener {
params: v1::AddConversationListenerParams,
response: v1::AddConversationSubscriptionResponse,
},
RemoveConversationListener {
params: v1::RemoveConversationListenerParams,
response: v1::RemoveConversationSubscriptionResponse,
},
GitDiffToRemote {
params: v1::GitDiffToRemoteParams,
response: v1::GitDiffToRemoteResponse,
},
LoginApiKey {
params: v1::LoginApiKeyParams,
response: v1::LoginApiKeyResponse,
},
LoginChatGpt {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v1::LoginChatGptResponse,
},
// DEPRECATED in favor of CancelLoginAccount
CancelLoginChatGpt {
params: v1::CancelLoginChatGptParams,
response: v1::CancelLoginChatGptResponse,
},
LogoutChatGpt {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v1::LogoutChatGptResponse,
},
/// DEPRECATED in favor of GetAccount
GetAuthStatus {
params: v1::GetAuthStatusParams,
response: v1::GetAuthStatusResponse,
},
GetUserSavedConfig {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v1::GetUserSavedConfigResponse,
},
SetDefaultModel {
params: v1::SetDefaultModelParams,
response: v1::SetDefaultModelResponse,
},
GetUserAgent {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v1::GetUserAgentResponse,
},
UserInfo {
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
response: v1::UserInfoResponse,
},
FuzzyFileSearch {
params: FuzzyFileSearchParams,
response: FuzzyFileSearchResponse,
},
/// Execute a command (argv vector) under the server's sandbox.
ExecOneOffCommand {
params: v1::ExecOneOffCommandParams,
response: v1::ExecOneOffCommandResponse,
},
}
/// Generates an `enum ServerRequest` where each variant is a request that the
/// server can send to the client along with the corresponding params and
/// response types. It also generates helper types used by the app/server
/// infrastructure (payload enum, request constructor, and export helpers).
macro_rules! server_request_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident $(=> $wire:literal)? {
params: $params:ty,
response: $response:ty,
}
),* $(,)?
) => {
/// Request initiated from the server and sent to the client.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "method", rename_all = "camelCase")]
pub enum ServerRequest {
$(
$(#[$variant_meta])*
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
$variant {
#[serde(rename = "id")]
request_id: RequestId,
params: $params,
},
)*
}
#[derive(Debug, Clone, PartialEq, JsonSchema)]
pub enum ServerRequestPayload {
$( $variant($params), )*
}
impl ServerRequestPayload {
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
match self {
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
}
}
}
pub fn export_server_responses(
out_dir: &::std::path::Path,
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
$(
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
)*
Ok(())
}
#[allow(clippy::vec_init_then_push)]
pub fn export_server_response_schemas(
out_dir: &Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let mut schemas = Vec::new();
$(
schemas.push(crate::export::write_json_schema::<$response>(
out_dir,
concat!(stringify!($variant), "Response"),
)?);
)*
Ok(schemas)
}
#[allow(clippy::vec_init_then_push)]
pub fn export_server_param_schemas(
out_dir: &Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let mut schemas = Vec::new();
$(
schemas.push(crate::export::write_json_schema::<$params>(
out_dir,
concat!(stringify!($variant), "Params"),
)?);
)*
Ok(schemas)
}
};
}
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
/// exporter for each notification.
macro_rules! server_notification_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident $(=> $wire:literal)? ( $payload:ty )
),* $(,)?
) => {
/// Notification sent from the server to the client.
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ServerNotification {
$(
$(#[$variant_meta])*
$(#[serde(rename = $wire)] #[ts(rename = $wire)] #[strum(serialize = $wire)])?
$variant($payload),
)*
}
impl ServerNotification {
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
match self {
$(Self::$variant(params) => serde_json::to_value(params),)*
}
}
}
impl TryFrom<JSONRPCNotification> for ServerNotification {
type Error = serde_json::Error;
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
serde_json::from_value(serde_json::to_value(value)?)
}
}
#[allow(clippy::vec_init_then_push)]
pub fn export_server_notification_schemas(
out_dir: &::std::path::Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let mut schemas = Vec::new();
$(schemas.push(crate::export::write_json_schema::<$payload>(out_dir, stringify!($payload))?);)*
Ok(schemas)
}
};
}
/// Notifications sent from the client to the server.
macro_rules! client_notification_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident $( ( $payload:ty ) )?
),* $(,)?
) => {
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ClientNotification {
$(
$(#[$variant_meta])*
$variant $( ( $payload ) )?,
)*
}
pub fn export_client_notification_schemas(
_out_dir: &::std::path::Path,
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
let schemas = Vec::new();
$( $(schemas.push(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?);)? )*
Ok(schemas)
}
};
}
impl TryFrom<JSONRPCRequest> for ServerRequest {
type Error = serde_json::Error;
fn try_from(value: JSONRPCRequest) -> Result<Self, Self::Error> {
serde_json::from_value(serde_json::to_value(value)?)
}
}
server_request_definitions! {
/// NEW APIs
/// Sent when approval is requested for a specific command execution.
/// This request is used for Turns started via turn/start.
CommandExecutionRequestApproval => "item/commandExecution/requestApproval" {
params: v2::CommandExecutionRequestApprovalParams,
response: v2::CommandExecutionRequestApprovalResponse,
},
/// Sent when approval is requested for a specific file change.
/// This request is used for Turns started via turn/start.
FileChangeRequestApproval => "item/fileChange/requestApproval" {
params: v2::FileChangeRequestApprovalParams,
response: v2::FileChangeRequestApprovalResponse,
},
/// DEPRECATED APIs below
/// Request to approve a patch.
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
ApplyPatchApproval {
params: v1::ApplyPatchApprovalParams,
response: v1::ApplyPatchApprovalResponse,
},
/// Request to exec a command.
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
ExecCommandApproval {
params: v1::ExecCommandApprovalParams,
response: v1::ExecCommandApprovalResponse,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
pub struct FuzzyFileSearchParams {
pub query: String,
pub roots: Vec<String>,
// if provided, will cancel any previous request that used the same value
pub cancellation_token: Option<String>,
}
/// Superset of [`codex_file_search::FileMatch`]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
pub struct FuzzyFileSearchResult {
pub root: String,
pub path: String,
pub file_name: String,
pub score: u32,
pub indices: Option<Vec<u32>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
pub struct FuzzyFileSearchResponse {
pub files: Vec<FuzzyFileSearchResult>,
}
server_notification_definitions! {
/// NEW NOTIFICATIONS
Error => "error" (v2::ErrorNotification),
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
TurnStarted => "turn/started" (v2::TurnStartedNotification),
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
ItemStarted => "item/started" (v2::ItemStartedNotification),
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
/// This event is internal-only. Used by Codex Cloud.
RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
#[serde(rename = "account/login/completed")]
#[ts(rename = "account/login/completed")]
#[strum(serialize = "account/login/completed")]
AccountLoginCompleted(v2::AccountLoginCompletedNotification),
/// DEPRECATED NOTIFICATIONS below
AuthStatusChange(v1::AuthStatusChangeNotification),
/// Deprecated: use `account/login/completed` instead.
LoginChatGptComplete(v1::LoginChatGptCompleteNotification),
SessionConfigured(v1::SessionConfiguredNotification),
}
client_notification_definitions! {
Initialized,
}
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use codex_protocol::ConversationId;
use codex_protocol::account::PlanType;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::AskForApproval;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::path::PathBuf;
#[test]
fn serialize_new_conversation() -> Result<()> {
let request = ClientRequest::NewConversation {
request_id: RequestId::Integer(42),
params: v1::NewConversationParams {
model: Some("gpt-5.1-codex-max".to_string()),
model_provider: None,
profile: None,
cwd: None,
approval_policy: Some(AskForApproval::OnRequest),
sandbox: None,
config: None,
base_instructions: None,
developer_instructions: None,
compact_prompt: None,
include_apply_patch_tool: None,
},
};
assert_eq!(
json!({
"method": "newConversation",
"id": 42,
"params": {
"model": "gpt-5.1-codex-max",
"modelProvider": null,
"profile": null,
"cwd": null,
"approvalPolicy": "on-request",
"sandbox": null,
"config": null,
"baseInstructions": null,
"includeApplyPatchTool": null
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn conversation_id_serializes_as_plain_string() -> Result<()> {
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
assert_eq!(
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
serde_json::to_value(id)?
);
Ok(())
}
#[test]
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
let id: ConversationId =
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
assert_eq!(
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
id,
);
Ok(())
}
#[test]
fn serialize_client_notification() -> Result<()> {
let notification = ClientNotification::Initialized;
// Note there is no "params" field for this notification.
assert_eq!(
json!({
"method": "initialized",
}),
serde_json::to_value(¬ification)?,
);
Ok(())
}
#[test]
fn serialize_server_request() -> Result<()> {
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
let params = v1::ExecCommandApprovalParams {
conversation_id,
call_id: "call-42".to_string(),
command: vec!["echo".to_string(), "hello".to_string()],
cwd: PathBuf::from("/tmp"),
reason: Some("because tests".to_string()),
parsed_cmd: vec![ParsedCommand::Unknown {
cmd: "echo hello".to_string(),
}],
};
let request = ServerRequest::ExecCommandApproval {
request_id: RequestId::Integer(7),
params: params.clone(),
};
assert_eq!(
json!({
"method": "execCommandApproval",
"id": 7,
"params": {
"conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8",
"callId": "call-42",
"command": ["echo", "hello"],
"cwd": "/tmp",
"reason": "because tests",
"parsedCmd": [
{
"type": "unknown",
"cmd": "echo hello"
}
]
}
}),
serde_json::to_value(&request)?,
);
let payload = ServerRequestPayload::ExecCommandApproval(params);
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
Ok(())
}
#[test]
fn serialize_get_account_rate_limits() -> Result<()> {
let request = ClientRequest::GetAccountRateLimits {
request_id: RequestId::Integer(1),
params: None,
};
assert_eq!(
json!({
"method": "account/rateLimits/read",
"id": 1,
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn serialize_account_login_api_key() -> Result<()> {
let request = ClientRequest::LoginAccount {
request_id: RequestId::Integer(2),
params: v2::LoginAccountParams::ApiKey {
api_key: "secret".to_string(),
},
};
assert_eq!(
json!({
"method": "account/login/start",
"id": 2,
"params": {
"type": "apiKey",
"apiKey": "secret"
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn serialize_account_login_chatgpt() -> Result<()> {
let request = ClientRequest::LoginAccount {
request_id: RequestId::Integer(3),
params: v2::LoginAccountParams::Chatgpt,
};
assert_eq!(
json!({
"method": "account/login/start",
"id": 3,
"params": {
"type": "chatgpt"
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn serialize_account_logout() -> Result<()> {
let request = ClientRequest::LogoutAccount {
request_id: RequestId::Integer(4),
params: None,
};
assert_eq!(
json!({
"method": "account/logout",
"id": 4,
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn serialize_get_account() -> Result<()> {
let request = ClientRequest::GetAccount {
request_id: RequestId::Integer(5),
params: v2::GetAccountParams {
refresh_token: false,
},
};
assert_eq!(
json!({
"method": "account/read",
"id": 5,
"params": {
"refreshToken": false
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
#[test]
fn account_serializes_fields_in_camel_case() -> Result<()> {
let api_key = v2::Account::ApiKey {};
assert_eq!(
json!({
"type": "apiKey",
}),
serde_json::to_value(&api_key)?,
);
let chatgpt = v2::Account::Chatgpt {
email: "user@example.com".to_string(),
plan_type: PlanType::Plus,
};
assert_eq!(
json!({
"type": "chatgpt",
"email": "user@example.com",
"planType": "plus",
}),
serde_json::to_value(&chatgpt)?,
);
Ok(())
}
#[test]
fn serialize_list_models() -> Result<()> {
let request = ClientRequest::ModelList {
request_id: RequestId::Integer(6),
params: v2::ModelListParams::default(),
};
assert_eq!(
json!({
"method": "model/list",
"id": 6,
"params": {
"limit": null,
"cursor": null
}
}),
serde_json::to_value(&request)?,
);
Ok(())
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server-protocol/src/protocol/v1.rs | codex-rs/app-server-protocol/src/protocol/v1.rs | use std::collections::HashMap;
use std::path::PathBuf;
use codex_protocol::ConversationId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::config_types::Verbosity;
use codex_protocol::models::ResponseItem;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::FileChange;
use codex_protocol::protocol::ReviewDecision;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::TurnAbortReason;
use codex_utils_absolute_path::AbsolutePathBuf;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use ts_rs::TS;
use uuid::Uuid;
// Reuse shared types defined in `common.rs`.
use crate::protocol::common::AuthMode;
use crate::protocol::common::GitSha;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct InitializeParams {
pub client_info: ClientInfo,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ClientInfo {
pub name: String,
pub title: Option<String>,
pub version: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct InitializeResponse {
pub user_agent: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct NewConversationParams {
pub model: Option<String>,
pub model_provider: Option<String>,
pub profile: Option<String>,
pub cwd: Option<String>,
pub approval_policy: Option<AskForApproval>,
pub sandbox: Option<SandboxMode>,
pub config: Option<HashMap<String, serde_json::Value>>,
pub base_instructions: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub developer_instructions: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub compact_prompt: Option<String>,
pub include_apply_patch_tool: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct NewConversationResponse {
pub conversation_id: ConversationId,
pub model: String,
pub reasoning_effort: Option<ReasoningEffort>,
pub rollout_path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ResumeConversationResponse {
pub conversation_id: ConversationId,
pub model: String,
pub initial_messages: Option<Vec<EventMsg>>,
pub rollout_path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(untagged)]
pub enum GetConversationSummaryParams {
RolloutPath {
#[serde(rename = "rolloutPath")]
rollout_path: PathBuf,
},
ConversationId {
#[serde(rename = "conversationId")]
conversation_id: ConversationId,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GetConversationSummaryResponse {
pub summary: ConversationSummary,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListConversationsParams {
pub page_size: Option<usize>,
pub cursor: Option<String>,
pub model_providers: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ConversationSummary {
pub conversation_id: ConversationId,
pub path: PathBuf,
pub preview: String,
pub timestamp: Option<String>,
pub model_provider: String,
pub cwd: PathBuf,
pub cli_version: String,
pub source: SessionSource,
pub git_info: Option<ConversationGitInfo>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
pub struct ConversationGitInfo {
pub sha: Option<String>,
pub branch: Option<String>,
pub origin_url: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListConversationsResponse {
pub items: Vec<ConversationSummary>,
pub next_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ResumeConversationParams {
pub path: Option<PathBuf>,
pub conversation_id: Option<ConversationId>,
pub history: Option<Vec<ResponseItem>>,
pub overrides: Option<NewConversationParams>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct AddConversationSubscriptionResponse {
#[schemars(with = "String")]
pub subscription_id: Uuid,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ArchiveConversationParams {
pub conversation_id: ConversationId,
pub rollout_path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ArchiveConversationResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct RemoveConversationSubscriptionResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct LoginApiKeyParams {
pub api_key: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct LoginApiKeyResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct LoginChatGptResponse {
#[schemars(with = "String")]
pub login_id: Uuid,
pub auth_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GitDiffToRemoteResponse {
pub sha: GitSha,
pub diff: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ApplyPatchApprovalParams {
pub conversation_id: ConversationId,
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
/// and [codex_core::protocol::PatchApplyEndEvent].
pub call_id: String,
pub file_changes: HashMap<PathBuf, FileChange>,
/// Optional explanatory reason (e.g. request for extra write access).
pub reason: Option<String>,
/// When set, the agent is asking the user to allow writes under this root
/// for the remainder of the session (unclear if this is honored today).
pub grant_root: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ApplyPatchApprovalResponse {
pub decision: ReviewDecision,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ExecCommandApprovalParams {
pub conversation_id: ConversationId,
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
/// and [codex_core::protocol::ExecCommandEndEvent].
pub call_id: String,
pub command: Vec<String>,
pub cwd: PathBuf,
pub reason: Option<String>,
pub parsed_cmd: Vec<ParsedCommand>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
pub struct ExecCommandApprovalResponse {
pub decision: ReviewDecision,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct CancelLoginChatGptParams {
#[schemars(with = "String")]
pub login_id: Uuid,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GitDiffToRemoteParams {
pub cwd: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct CancelLoginChatGptResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct LogoutChatGptParams {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct LogoutChatGptResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GetAuthStatusParams {
pub include_token: Option<bool>,
pub refresh_token: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ExecOneOffCommandParams {
pub command: Vec<String>,
pub timeout_ms: Option<u64>,
pub cwd: Option<PathBuf>,
pub sandbox_policy: Option<SandboxPolicy>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ExecOneOffCommandResponse {
pub exit_code: i32,
pub stdout: String,
pub stderr: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GetAuthStatusResponse {
pub auth_method: Option<AuthMode>,
pub auth_token: Option<String>,
pub requires_openai_auth: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GetUserAgentResponse {
pub user_agent: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct UserInfoResponse {
pub alleged_user_email: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct GetUserSavedConfigResponse {
pub config: UserSavedConfig,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SetDefaultModelParams {
pub model: Option<String>,
pub reasoning_effort: Option<ReasoningEffort>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SetDefaultModelResponse {}
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct UserSavedConfig {
pub approval_policy: Option<AskForApproval>,
pub sandbox_mode: Option<SandboxMode>,
pub sandbox_settings: Option<SandboxSettings>,
pub forced_chatgpt_workspace_id: Option<String>,
pub forced_login_method: Option<ForcedLoginMethod>,
pub model: Option<String>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
pub tools: Option<Tools>,
pub profile: Option<String>,
pub profiles: HashMap<String, Profile>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct Profile {
pub model: Option<String>,
pub model_provider: Option<String>,
pub approval_policy: Option<AskForApproval>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
pub chatgpt_base_url: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct Tools {
pub web_search: Option<bool>,
pub view_image: Option<bool>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Serialize, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SandboxSettings {
#[serde(default)]
pub writable_roots: Vec<AbsolutePathBuf>,
pub network_access: Option<bool>,
pub exclude_tmpdir_env_var: Option<bool>,
pub exclude_slash_tmp: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserMessageParams {
pub conversation_id: ConversationId,
pub items: Vec<InputItem>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserTurnParams {
pub conversation_id: ConversationId,
pub items: Vec<InputItem>,
pub cwd: PathBuf,
pub approval_policy: AskForApproval,
pub sandbox_policy: SandboxPolicy,
pub model: String,
pub effort: Option<ReasoningEffort>,
pub summary: ReasoningSummary,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserTurnResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct InterruptConversationParams {
pub conversation_id: ConversationId,
}
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct InterruptConversationResponse {
pub abort_reason: TurnAbortReason,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SendUserMessageResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct AddConversationListenerParams {
pub conversation_id: ConversationId,
#[serde(default)]
pub experimental_raw_events: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct RemoveConversationListenerParams {
#[schemars(with = "String")]
pub subscription_id: Uuid,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "type", content = "data")]
pub enum InputItem {
Text { text: String },
Image { image_url: String },
LocalImage { path: PathBuf },
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
/// Deprecated in favor of AccountLoginCompletedNotification.
pub struct LoginChatGptCompleteNotification {
#[schemars(with = "String")]
pub login_id: Uuid,
pub success: bool,
pub error: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct SessionConfiguredNotification {
pub session_id: ConversationId,
pub model: String,
pub reasoning_effort: Option<ReasoningEffort>,
pub history_log_id: u64,
#[ts(type = "number")]
pub history_entry_count: usize,
pub initial_messages: Option<Vec<EventMsg>>,
pub rollout_path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
/// Deprecated notification. Use AccountUpdatedNotification instead.
pub struct AuthStatusChangeNotification {
pub auth_method: Option<AuthMode>,
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/src/pkce.rs | codex-rs/login/src/pkce.rs | use base64::Engine;
use rand::RngCore;
use sha2::Digest;
use sha2::Sha256;
#[derive(Debug, Clone)]
pub struct PkceCodes {
pub code_verifier: String,
pub code_challenge: String,
}
pub fn generate_pkce() -> PkceCodes {
let mut bytes = [0u8; 64];
rand::rng().fill_bytes(&mut bytes);
// Verifier: URL-safe base64 without padding (43..128 chars)
let code_verifier = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes);
// Challenge (S256): BASE64URL-ENCODE(SHA256(verifier)) without padding
let digest = Sha256::digest(code_verifier.as_bytes());
let code_challenge = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(digest);
PkceCodes {
code_verifier,
code_challenge,
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/src/lib.rs | codex-rs/login/src/lib.rs | mod device_code_auth;
mod pkce;
mod server;
pub use device_code_auth::run_device_code_login;
pub use server::LoginServer;
pub use server::ServerOptions;
pub use server::ShutdownHandle;
pub use server::run_login_server;
// Re-export commonly used auth types and helpers from codex-core for compatibility
pub use codex_app_server_protocol::AuthMode;
pub use codex_core::AuthManager;
pub use codex_core::CodexAuth;
pub use codex_core::auth::AuthDotJson;
pub use codex_core::auth::CLIENT_ID;
pub use codex_core::auth::CODEX_API_KEY_ENV_VAR;
pub use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
pub use codex_core::auth::login_with_api_key;
pub use codex_core::auth::logout;
pub use codex_core::auth::save_auth;
pub use codex_core::token_data::TokenData;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/src/server.rs | codex-rs/login/src/server.rs | use std::io::Cursor;
use std::io::Read;
use std::io::Write;
use std::io::{self};
use std::net::SocketAddr;
use std::net::TcpStream;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::pkce::PkceCodes;
use crate::pkce::generate_pkce;
use base64::Engine;
use chrono::Utc;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::save_auth;
use codex_core::default_client::originator;
use codex_core::token_data::TokenData;
use codex_core::token_data::parse_id_token;
use rand::RngCore;
use serde_json::Value as JsonValue;
use tiny_http::Header;
use tiny_http::Request;
use tiny_http::Response;
use tiny_http::Server;
use tiny_http::StatusCode;
const DEFAULT_ISSUER: &str = "https://auth.openai.com";
const DEFAULT_PORT: u16 = 1455;
#[derive(Debug, Clone)]
pub struct ServerOptions {
pub codex_home: PathBuf,
pub client_id: String,
pub issuer: String,
pub port: u16,
pub open_browser: bool,
pub force_state: Option<String>,
pub forced_chatgpt_workspace_id: Option<String>,
pub cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
}
impl ServerOptions {
pub fn new(
codex_home: PathBuf,
client_id: String,
forced_chatgpt_workspace_id: Option<String>,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> Self {
Self {
codex_home,
client_id,
issuer: DEFAULT_ISSUER.to_string(),
port: DEFAULT_PORT,
open_browser: true,
force_state: None,
forced_chatgpt_workspace_id,
cli_auth_credentials_store_mode,
}
}
}
pub struct LoginServer {
pub auth_url: String,
pub actual_port: u16,
server_handle: tokio::task::JoinHandle<io::Result<()>>,
shutdown_handle: ShutdownHandle,
}
impl LoginServer {
pub async fn block_until_done(self) -> io::Result<()> {
self.server_handle
.await
.map_err(|err| io::Error::other(format!("login server thread panicked: {err:?}")))?
}
pub fn cancel(&self) {
self.shutdown_handle.shutdown();
}
pub fn cancel_handle(&self) -> ShutdownHandle {
self.shutdown_handle.clone()
}
}
#[derive(Clone, Debug)]
pub struct ShutdownHandle {
shutdown_notify: Arc<tokio::sync::Notify>,
}
impl ShutdownHandle {
pub fn shutdown(&self) {
self.shutdown_notify.notify_waiters();
}
}
pub fn run_login_server(opts: ServerOptions) -> io::Result<LoginServer> {
let pkce = generate_pkce();
let state = opts.force_state.clone().unwrap_or_else(generate_state);
let server = bind_server(opts.port)?;
let actual_port = match server.server_addr().to_ip() {
Some(addr) => addr.port(),
None => {
return Err(io::Error::new(
io::ErrorKind::AddrInUse,
"Unable to determine the server port",
));
}
};
let server = Arc::new(server);
let redirect_uri = format!("http://localhost:{actual_port}/auth/callback");
let auth_url = build_authorize_url(
&opts.issuer,
&opts.client_id,
&redirect_uri,
&pkce,
&state,
opts.forced_chatgpt_workspace_id.as_deref(),
);
if opts.open_browser {
let _ = webbrowser::open(&auth_url);
}
// Map blocking reads from server.recv() to an async channel.
let (tx, mut rx) = tokio::sync::mpsc::channel::<Request>(16);
let _server_handle = {
let server = server.clone();
thread::spawn(move || -> io::Result<()> {
while let Ok(request) = server.recv() {
tx.blocking_send(request).map_err(|e| {
eprintln!("Failed to send request to channel: {e}");
io::Error::other("Failed to send request to channel")
})?;
}
Ok(())
})
};
let shutdown_notify = Arc::new(tokio::sync::Notify::new());
let server_handle = {
let shutdown_notify = shutdown_notify.clone();
let server = server;
tokio::spawn(async move {
let result = loop {
tokio::select! {
_ = shutdown_notify.notified() => {
break Err(io::Error::other("Login was not completed"));
}
maybe_req = rx.recv() => {
let Some(req) = maybe_req else {
break Err(io::Error::other("Login was not completed"));
};
let url_raw = req.url().to_string();
let response =
process_request(&url_raw, &opts, &redirect_uri, &pkce, actual_port, &state).await;
let exit_result = match response {
HandledRequest::Response(response) => {
let _ = tokio::task::spawn_blocking(move || req.respond(response)).await;
None
}
HandledRequest::ResponseAndExit {
headers,
body,
result,
} => {
let _ = tokio::task::spawn_blocking(move || {
send_response_with_disconnect(req, headers, body)
})
.await;
Some(result)
}
HandledRequest::RedirectWithHeader(header) => {
let redirect = Response::empty(302).with_header(header);
let _ = tokio::task::spawn_blocking(move || req.respond(redirect)).await;
None
}
};
if let Some(result) = exit_result {
break result;
}
}
}
};
// Ensure that the server is unblocked so the thread dedicated to
// running `server.recv()` in a loop exits cleanly.
server.unblock();
result
})
};
Ok(LoginServer {
auth_url,
actual_port,
server_handle,
shutdown_handle: ShutdownHandle { shutdown_notify },
})
}
enum HandledRequest {
Response(Response<Cursor<Vec<u8>>>),
RedirectWithHeader(Header),
ResponseAndExit {
headers: Vec<Header>,
body: Vec<u8>,
result: io::Result<()>,
},
}
async fn process_request(
url_raw: &str,
opts: &ServerOptions,
redirect_uri: &str,
pkce: &PkceCodes,
actual_port: u16,
state: &str,
) -> HandledRequest {
let parsed_url = match url::Url::parse(&format!("http://localhost{url_raw}")) {
Ok(u) => u,
Err(e) => {
eprintln!("URL parse error: {e}");
return HandledRequest::Response(
Response::from_string("Bad Request").with_status_code(400),
);
}
};
let path = parsed_url.path().to_string();
match path.as_str() {
"/auth/callback" => {
let params: std::collections::HashMap<String, String> =
parsed_url.query_pairs().into_owned().collect();
if params.get("state").map(String::as_str) != Some(state) {
return HandledRequest::Response(
Response::from_string("State mismatch").with_status_code(400),
);
}
let code = match params.get("code") {
Some(c) if !c.is_empty() => c.clone(),
_ => {
return HandledRequest::Response(
Response::from_string("Missing authorization code").with_status_code(400),
);
}
};
match exchange_code_for_tokens(&opts.issuer, &opts.client_id, redirect_uri, pkce, &code)
.await
{
Ok(tokens) => {
if let Err(message) = ensure_workspace_allowed(
opts.forced_chatgpt_workspace_id.as_deref(),
&tokens.id_token,
) {
eprintln!("Workspace restriction error: {message}");
return login_error_response(&message);
}
// Obtain API key via token-exchange and persist
let api_key = obtain_api_key(&opts.issuer, &opts.client_id, &tokens.id_token)
.await
.ok();
if let Err(err) = persist_tokens_async(
&opts.codex_home,
api_key.clone(),
tokens.id_token.clone(),
tokens.access_token.clone(),
tokens.refresh_token.clone(),
opts.cli_auth_credentials_store_mode,
)
.await
{
eprintln!("Persist error: {err}");
return HandledRequest::Response(
Response::from_string(format!("Unable to persist auth file: {err}"))
.with_status_code(500),
);
}
let success_url = compose_success_url(
actual_port,
&opts.issuer,
&tokens.id_token,
&tokens.access_token,
);
match tiny_http::Header::from_bytes(&b"Location"[..], success_url.as_bytes()) {
Ok(header) => HandledRequest::RedirectWithHeader(header),
Err(_) => HandledRequest::Response(
Response::from_string("Internal Server Error").with_status_code(500),
),
}
}
Err(err) => {
eprintln!("Token exchange error: {err}");
HandledRequest::Response(
Response::from_string(format!("Token exchange failed: {err}"))
.with_status_code(500),
)
}
}
}
"/success" => {
let body = include_str!("assets/success.html");
HandledRequest::ResponseAndExit {
headers: match Header::from_bytes(
&b"Content-Type"[..],
&b"text/html; charset=utf-8"[..],
) {
Ok(header) => vec![header],
Err(_) => Vec::new(),
},
body: body.as_bytes().to_vec(),
result: Ok(()),
}
}
"/cancel" => HandledRequest::ResponseAndExit {
headers: Vec::new(),
body: b"Login cancelled".to_vec(),
result: Err(io::Error::new(
io::ErrorKind::Interrupted,
"Login cancelled",
)),
},
_ => HandledRequest::Response(Response::from_string("Not Found").with_status_code(404)),
}
}
/// tiny_http filters `Connection` headers out of `Response` objects, so using
/// `req.respond` never informs the client (or the library) that a keep-alive
/// socket should be closed. That leaves the per-connection worker parked in a
/// loop waiting for more requests, which in turn causes the next login attempt
/// to hang on the old connection. This helper bypasses tiny_http’s response
/// machinery: it extracts the raw writer, prints the HTTP response manually,
/// and always appends `Connection: close`, ensuring the socket is closed from
/// the server side. Ideally, tiny_http would provide an API to control
/// server-side connection persistence, but it does not.
fn send_response_with_disconnect(
req: Request,
mut headers: Vec<Header>,
body: Vec<u8>,
) -> io::Result<()> {
let status = StatusCode(200);
let mut writer = req.into_writer();
let reason = status.default_reason_phrase();
write!(writer, "HTTP/1.1 {} {}\r\n", status.0, reason)?;
headers.retain(|h| !h.field.equiv("Connection"));
if let Ok(close_header) = Header::from_bytes(&b"Connection"[..], &b"close"[..]) {
headers.push(close_header);
}
let content_length_value = format!("{}", body.len());
if let Ok(content_length_header) =
Header::from_bytes(&b"Content-Length"[..], content_length_value.as_bytes())
{
headers.push(content_length_header);
}
for header in headers {
write!(
writer,
"{}: {}\r\n",
header.field.as_str(),
header.value.as_str()
)?;
}
writer.write_all(b"\r\n")?;
writer.write_all(&body)?;
writer.flush()
}
fn build_authorize_url(
issuer: &str,
client_id: &str,
redirect_uri: &str,
pkce: &PkceCodes,
state: &str,
forced_chatgpt_workspace_id: Option<&str>,
) -> String {
let mut query = vec![
("response_type".to_string(), "code".to_string()),
("client_id".to_string(), client_id.to_string()),
("redirect_uri".to_string(), redirect_uri.to_string()),
(
"scope".to_string(),
"openid profile email offline_access".to_string(),
),
(
"code_challenge".to_string(),
pkce.code_challenge.to_string(),
),
("code_challenge_method".to_string(), "S256".to_string()),
("id_token_add_organizations".to_string(), "true".to_string()),
("codex_cli_simplified_flow".to_string(), "true".to_string()),
("state".to_string(), state.to_string()),
(
"originator".to_string(),
originator().value.as_str().to_string(),
),
];
if let Some(workspace_id) = forced_chatgpt_workspace_id {
query.push(("allowed_workspace_id".to_string(), workspace_id.to_string()));
}
let qs = query
.into_iter()
.map(|(k, v)| format!("{k}={}", urlencoding::encode(&v)))
.collect::<Vec<_>>()
.join("&");
format!("{issuer}/oauth/authorize?{qs}")
}
fn generate_state() -> String {
let mut bytes = [0u8; 32];
rand::rng().fill_bytes(&mut bytes);
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes)
}
fn send_cancel_request(port: u16) -> io::Result<()> {
let addr: SocketAddr = format!("127.0.0.1:{port}")
.parse()
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))?;
let mut stream = TcpStream::connect_timeout(&addr, Duration::from_secs(2))?;
stream.set_read_timeout(Some(Duration::from_secs(2)))?;
stream.set_write_timeout(Some(Duration::from_secs(2)))?;
stream.write_all(b"GET /cancel HTTP/1.1\r\n")?;
stream.write_all(format!("Host: 127.0.0.1:{port}\r\n").as_bytes())?;
stream.write_all(b"Connection: close\r\n\r\n")?;
let mut buf = [0u8; 64];
let _ = stream.read(&mut buf);
Ok(())
}
fn bind_server(port: u16) -> io::Result<Server> {
let bind_address = format!("127.0.0.1:{port}");
let mut cancel_attempted = false;
let mut attempts = 0;
const MAX_ATTEMPTS: u32 = 10;
const RETRY_DELAY: Duration = Duration::from_millis(200);
loop {
match Server::http(&bind_address) {
Ok(server) => return Ok(server),
Err(err) => {
attempts += 1;
let is_addr_in_use = err
.downcast_ref::<io::Error>()
.map(|io_err| io_err.kind() == io::ErrorKind::AddrInUse)
.unwrap_or(false);
// If the address is in use, there is probably another instance of the login server
// running. Attempt to cancel it and retry.
if is_addr_in_use {
if !cancel_attempted {
cancel_attempted = true;
if let Err(cancel_err) = send_cancel_request(port) {
eprintln!("Failed to cancel previous login server: {cancel_err}");
}
}
thread::sleep(RETRY_DELAY);
if attempts >= MAX_ATTEMPTS {
return Err(io::Error::new(
io::ErrorKind::AddrInUse,
format!("Port {bind_address} is already in use"),
));
}
continue;
}
return Err(io::Error::other(err));
}
}
}
}
pub(crate) struct ExchangedTokens {
pub id_token: String,
pub access_token: String,
pub refresh_token: String,
}
pub(crate) async fn exchange_code_for_tokens(
issuer: &str,
client_id: &str,
redirect_uri: &str,
pkce: &PkceCodes,
code: &str,
) -> io::Result<ExchangedTokens> {
#[derive(serde::Deserialize)]
struct TokenResponse {
id_token: String,
access_token: String,
refresh_token: String,
}
let client = reqwest::Client::new();
let resp = client
.post(format!("{issuer}/oauth/token"))
.header("Content-Type", "application/x-www-form-urlencoded")
.body(format!(
"grant_type=authorization_code&code={}&redirect_uri={}&client_id={}&code_verifier={}",
urlencoding::encode(code),
urlencoding::encode(redirect_uri),
urlencoding::encode(client_id),
urlencoding::encode(&pkce.code_verifier)
))
.send()
.await
.map_err(io::Error::other)?;
if !resp.status().is_success() {
return Err(io::Error::other(format!(
"token endpoint returned status {}",
resp.status()
)));
}
let tokens: TokenResponse = resp.json().await.map_err(io::Error::other)?;
Ok(ExchangedTokens {
id_token: tokens.id_token,
access_token: tokens.access_token,
refresh_token: tokens.refresh_token,
})
}
pub(crate) async fn persist_tokens_async(
codex_home: &Path,
api_key: Option<String>,
id_token: String,
access_token: String,
refresh_token: String,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> io::Result<()> {
// Reuse existing synchronous logic but run it off the async runtime.
let codex_home = codex_home.to_path_buf();
tokio::task::spawn_blocking(move || {
let mut tokens = TokenData {
id_token: parse_id_token(&id_token).map_err(io::Error::other)?,
access_token,
refresh_token,
account_id: None,
};
if let Some(acc) = jwt_auth_claims(&id_token)
.get("chatgpt_account_id")
.and_then(|v| v.as_str())
{
tokens.account_id = Some(acc.to_string());
}
let auth = AuthDotJson {
openai_api_key: api_key,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
};
save_auth(&codex_home, &auth, auth_credentials_store_mode)
})
.await
.map_err(|e| io::Error::other(format!("persist task failed: {e}")))?
}
fn compose_success_url(port: u16, issuer: &str, id_token: &str, access_token: &str) -> String {
let token_claims = jwt_auth_claims(id_token);
let access_claims = jwt_auth_claims(access_token);
let org_id = token_claims
.get("organization_id")
.and_then(|v| v.as_str())
.unwrap_or("");
let project_id = token_claims
.get("project_id")
.and_then(|v| v.as_str())
.unwrap_or("");
let completed_onboarding = token_claims
.get("completed_platform_onboarding")
.and_then(JsonValue::as_bool)
.unwrap_or(false);
let is_org_owner = token_claims
.get("is_org_owner")
.and_then(JsonValue::as_bool)
.unwrap_or(false);
let needs_setup = (!completed_onboarding) && is_org_owner;
let plan_type = access_claims
.get("chatgpt_plan_type")
.and_then(|v| v.as_str())
.unwrap_or("");
let platform_url = if issuer == DEFAULT_ISSUER {
"https://platform.openai.com"
} else {
"https://platform.api.openai.org"
};
let mut params = vec![
("id_token", id_token.to_string()),
("needs_setup", needs_setup.to_string()),
("org_id", org_id.to_string()),
("project_id", project_id.to_string()),
("plan_type", plan_type.to_string()),
("platform_url", platform_url.to_string()),
];
let qs = params
.drain(..)
.map(|(k, v)| format!("{}={}", k, urlencoding::encode(&v)))
.collect::<Vec<_>>()
.join("&");
format!("http://localhost:{port}/success?{qs}")
}
fn jwt_auth_claims(jwt: &str) -> serde_json::Map<String, serde_json::Value> {
let mut parts = jwt.split('.');
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
_ => {
eprintln!("Invalid JWT format while extracting claims");
return serde_json::Map::new();
}
};
match base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64) {
Ok(bytes) => match serde_json::from_slice::<serde_json::Value>(&bytes) {
Ok(mut v) => {
if let Some(obj) = v
.get_mut("https://api.openai.com/auth")
.and_then(|x| x.as_object_mut())
{
return obj.clone();
}
eprintln!("JWT payload missing expected 'https://api.openai.com/auth' object");
}
Err(e) => {
eprintln!("Failed to parse JWT JSON payload: {e}");
}
},
Err(e) => {
eprintln!("Failed to base64url-decode JWT payload: {e}");
}
}
serde_json::Map::new()
}
pub(crate) fn ensure_workspace_allowed(
expected: Option<&str>,
id_token: &str,
) -> Result<(), String> {
let Some(expected) = expected else {
return Ok(());
};
let claims = jwt_auth_claims(id_token);
let Some(actual) = claims.get("chatgpt_account_id").and_then(JsonValue::as_str) else {
return Err("Login is restricted to a specific workspace, but the token did not include an chatgpt_account_id claim.".to_string());
};
if actual == expected {
Ok(())
} else {
Err(format!("Login is restricted to workspace id {expected}."))
}
}
// Respond to the oauth server with an error so the code becomes unusable by anybody else.
fn login_error_response(message: &str) -> HandledRequest {
let mut headers = Vec::new();
if let Ok(header) = Header::from_bytes(&b"Content-Type"[..], &b"text/plain; charset=utf-8"[..])
{
headers.push(header);
}
HandledRequest::ResponseAndExit {
headers,
body: message.as_bytes().to_vec(),
result: Err(io::Error::new(
io::ErrorKind::PermissionDenied,
message.to_string(),
)),
}
}
pub(crate) async fn obtain_api_key(
issuer: &str,
client_id: &str,
id_token: &str,
) -> io::Result<String> {
// Token exchange for an API key access token
#[derive(serde::Deserialize)]
struct ExchangeResp {
access_token: String,
}
let client = reqwest::Client::new();
let resp = client
.post(format!("{issuer}/oauth/token"))
.header("Content-Type", "application/x-www-form-urlencoded")
.body(format!(
"grant_type={}&client_id={}&requested_token={}&subject_token={}&subject_token_type={}",
urlencoding::encode("urn:ietf:params:oauth:grant-type:token-exchange"),
urlencoding::encode(client_id),
urlencoding::encode("openai-api-key"),
urlencoding::encode(id_token),
urlencoding::encode("urn:ietf:params:oauth:token-type:id_token")
))
.send()
.await
.map_err(io::Error::other)?;
if !resp.status().is_success() {
return Err(io::Error::other(format!(
"api key exchange failed with status {}",
resp.status()
)));
}
let body: ExchangeResp = resp.json().await.map_err(io::Error::other)?;
Ok(body.access_token)
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/src/device_code_auth.rs | codex-rs/login/src/device_code_auth.rs | use reqwest::StatusCode;
use serde::Deserialize;
use serde::Serialize;
use serde::de::Deserializer;
use serde::de::{self};
use std::time::Duration;
use std::time::Instant;
use crate::pkce::PkceCodes;
use crate::server::ServerOptions;
use std::io;
const ANSI_BLUE: &str = "\x1b[94m";
const ANSI_GRAY: &str = "\x1b[90m";
const ANSI_RESET: &str = "\x1b[0m";
#[derive(Deserialize)]
struct UserCodeResp {
device_auth_id: String,
#[serde(alias = "user_code", alias = "usercode")]
user_code: String,
#[serde(default, deserialize_with = "deserialize_interval")]
interval: u64,
}
#[derive(Serialize)]
struct UserCodeReq {
client_id: String,
}
#[derive(Serialize)]
struct TokenPollReq {
device_auth_id: String,
user_code: String,
}
fn deserialize_interval<'de, D>(deserializer: D) -> Result<u64, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
s.trim()
.parse::<u64>()
.map_err(|e| de::Error::custom(format!("invalid u64 string: {e}")))
}
#[derive(Deserialize)]
struct CodeSuccessResp {
authorization_code: String,
code_challenge: String,
code_verifier: String,
}
/// Request the user code and polling interval.
async fn request_user_code(
client: &reqwest::Client,
auth_base_url: &str,
client_id: &str,
) -> std::io::Result<UserCodeResp> {
let url = format!("{auth_base_url}/deviceauth/usercode");
let body = serde_json::to_string(&UserCodeReq {
client_id: client_id.to_string(),
})
.map_err(std::io::Error::other)?;
let resp = client
.post(url)
.header("Content-Type", "application/json")
.body(body)
.send()
.await
.map_err(std::io::Error::other)?;
if !resp.status().is_success() {
let status = resp.status();
if status == StatusCode::NOT_FOUND {
return Err(std::io::Error::other(
"device code login is not enabled for this Codex server. Use the browser login or verify the server URL.",
));
}
return Err(std::io::Error::other(format!(
"device code request failed with status {status}"
)));
}
let body = resp.text().await.map_err(std::io::Error::other)?;
serde_json::from_str(&body).map_err(std::io::Error::other)
}
/// Poll token endpoint until a code is issued or timeout occurs.
async fn poll_for_token(
client: &reqwest::Client,
auth_base_url: &str,
device_auth_id: &str,
user_code: &str,
interval: u64,
) -> std::io::Result<CodeSuccessResp> {
let url = format!("{auth_base_url}/deviceauth/token");
let max_wait = Duration::from_secs(15 * 60);
let start = Instant::now();
loop {
let body = serde_json::to_string(&TokenPollReq {
device_auth_id: device_auth_id.to_string(),
user_code: user_code.to_string(),
})
.map_err(std::io::Error::other)?;
let resp = client
.post(&url)
.header("Content-Type", "application/json")
.body(body)
.send()
.await
.map_err(std::io::Error::other)?;
let status = resp.status();
if status.is_success() {
return resp.json().await.map_err(std::io::Error::other);
}
if status == StatusCode::FORBIDDEN || status == StatusCode::NOT_FOUND {
if start.elapsed() >= max_wait {
return Err(std::io::Error::other(
"device auth timed out after 15 minutes",
));
}
let sleep_for = Duration::from_secs(interval).min(max_wait - start.elapsed());
tokio::time::sleep(sleep_for).await;
continue;
}
return Err(std::io::Error::other(format!(
"device auth failed with status {}",
resp.status()
)));
}
}
fn print_device_code_prompt(code: &str) {
println!(
"\nWelcome to Codex [v{ANSI_GRAY}{version}{ANSI_RESET}]\n{ANSI_GRAY}OpenAI's command-line coding agent{ANSI_RESET}\n\
\nFollow these steps to sign in with ChatGPT using device code authorization:\n\
\n1. Open this link in your browser and sign in to your account\n {ANSI_BLUE}https://auth.openai.com/codex/device{ANSI_RESET}\n\
\n2. Enter this one-time code {ANSI_GRAY}(expires in 15 minutes){ANSI_RESET}\n {ANSI_BLUE}{code}{ANSI_RESET}\n\
\n{ANSI_GRAY}Device codes are a common phishing target. Never share this code.{ANSI_RESET}\n",
version = env!("CARGO_PKG_VERSION"),
code = code
);
}
/// Full device code login flow.
pub async fn run_device_code_login(opts: ServerOptions) -> std::io::Result<()> {
let client = reqwest::Client::new();
let base_url = opts.issuer.trim_end_matches('/');
let api_base_url = format!("{}/api/accounts", opts.issuer.trim_end_matches('/'));
let uc = request_user_code(&client, &api_base_url, &opts.client_id).await?;
print_device_code_prompt(&uc.user_code);
let code_resp = poll_for_token(
&client,
&api_base_url,
&uc.device_auth_id,
&uc.user_code,
uc.interval,
)
.await?;
let pkce = PkceCodes {
code_verifier: code_resp.code_verifier,
code_challenge: code_resp.code_challenge,
};
let redirect_uri = format!("{base_url}/deviceauth/callback");
let tokens = crate::server::exchange_code_for_tokens(
base_url,
&opts.client_id,
&redirect_uri,
&pkce,
&code_resp.authorization_code,
)
.await
.map_err(|err| std::io::Error::other(format!("device code exchange failed: {err}")))?;
if let Err(message) = crate::server::ensure_workspace_allowed(
opts.forced_chatgpt_workspace_id.as_deref(),
&tokens.id_token,
) {
return Err(io::Error::new(io::ErrorKind::PermissionDenied, message));
}
crate::server::persist_tokens_async(
&opts.codex_home,
None,
tokens.id_token,
tokens.access_token,
tokens.refresh_token,
opts.cli_auth_credentials_store_mode,
)
.await
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/tests/all.rs | codex-rs/login/tests/all.rs | // Single integration test binary that aggregates all test modules.
// The submodules live in `tests/suite/`.
mod suite;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/tests/suite/login_server_e2e.rs | codex-rs/login/tests/suite/login_server_e2e.rs | #![allow(clippy::unwrap_used)]
use std::io;
use std::net::SocketAddr;
use std::net::TcpListener;
use std::thread;
use std::time::Duration;
use anyhow::Result;
use base64::Engine;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_login::ServerOptions;
use codex_login::run_login_server;
use core_test_support::skip_if_no_network;
use tempfile::tempdir;
// See spawn.rs for details
fn start_mock_issuer(chatgpt_account_id: &str) -> (SocketAddr, thread::JoinHandle<()>) {
// Bind to a random available port
let listener = TcpListener::bind(("127.0.0.1", 0)).unwrap();
let addr = listener.local_addr().unwrap();
let server = tiny_http::Server::from_listener(listener, None).unwrap();
let chatgpt_account_id = chatgpt_account_id.to_string();
let handle = thread::spawn(move || {
while let Ok(mut req) = server.recv() {
let url = req.url().to_string();
if url.starts_with("/oauth/token") {
// Read body
let mut body = String::new();
let _ = req.as_reader().read_to_string(&mut body);
// Build minimal JWT with plan=pro
#[derive(serde::Serialize)]
struct Header {
alg: &'static str,
typ: &'static str,
}
let header = Header {
alg: "none",
typ: "JWT",
};
let payload = serde_json::json!({
"email": "user@example.com",
"https://api.openai.com/auth": {
"chatgpt_plan_type": "pro",
"chatgpt_account_id": chatgpt_account_id,
}
});
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
let header_bytes = serde_json::to_vec(&header).unwrap();
let payload_bytes = serde_json::to_vec(&payload).unwrap();
let id_token = format!(
"{}.{}.{}",
b64(&header_bytes),
b64(&payload_bytes),
b64(b"sig")
);
let tokens = serde_json::json!({
"id_token": id_token,
"access_token": "access-123",
"refresh_token": "refresh-123",
});
let data = serde_json::to_vec(&tokens).unwrap();
let mut resp = tiny_http::Response::from_data(data);
resp.add_header(
tiny_http::Header::from_bytes(&b"Content-Type"[..], &b"application/json"[..])
.unwrap_or_else(|_| panic!("header bytes")),
);
let _ = req.respond(resp);
} else {
let _ = req
.respond(tiny_http::Response::from_string("not found").with_status_code(404));
}
}
});
(addr, handle)
}
#[tokio::test]
async fn end_to_end_login_flow_persists_auth_json() -> Result<()> {
skip_if_no_network!(Ok(()));
let chatgpt_account_id = "12345678-0000-0000-0000-000000000000";
let (issuer_addr, issuer_handle) = start_mock_issuer(chatgpt_account_id);
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
let codex_home = tmp.path().to_path_buf();
// Seed auth.json with stale API key + tokens that should be overwritten.
let stale_auth = serde_json::json!({
"OPENAI_API_KEY": "sk-stale",
"tokens": {
"id_token": "stale.header.payload",
"access_token": "stale-access",
"refresh_token": "stale-refresh",
"account_id": "stale-acc"
}
});
std::fs::write(
codex_home.join("auth.json"),
serde_json::to_string_pretty(&stale_auth)?,
)?;
let state = "test_state_123".to_string();
// Run server in background
let server_home = codex_home.clone();
let opts = ServerOptions {
codex_home: server_home,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode::File,
client_id: codex_login::CLIENT_ID.to_string(),
issuer,
port: 0,
open_browser: false,
force_state: Some(state),
forced_chatgpt_workspace_id: Some(chatgpt_account_id.to_string()),
};
let server = run_login_server(opts)?;
assert!(
server
.auth_url
.contains(format!("allowed_workspace_id={chatgpt_account_id}").as_str()),
"auth URL should include forced workspace parameter"
);
let login_port = server.actual_port;
// Simulate browser callback, and follow redirect to /success
let client = reqwest::Client::builder()
.redirect(reqwest::redirect::Policy::limited(5))
.build()?;
let url = format!("http://127.0.0.1:{login_port}/auth/callback?code=abc&state=test_state_123");
let resp = client.get(&url).send().await?;
assert!(resp.status().is_success());
// Wait for server shutdown
server.block_until_done().await?;
// Validate auth.json
let auth_path = codex_home.join("auth.json");
let data = std::fs::read_to_string(&auth_path)?;
let json: serde_json::Value = serde_json::from_str(&data)?;
// The following assert is here because of the old oauth flow that exchanges tokens for an
// API key. See obtain_api_key in server.rs for details. Once we remove this old mechanism
// from the code, this test should be updated to expect that the API key is no longer present.
assert_eq!(json["OPENAI_API_KEY"], "access-123");
assert_eq!(json["tokens"]["access_token"], "access-123");
assert_eq!(json["tokens"]["refresh_token"], "refresh-123");
assert_eq!(json["tokens"]["account_id"], chatgpt_account_id);
// Stop mock issuer
drop(issuer_handle);
Ok(())
}
#[tokio::test]
async fn creates_missing_codex_home_dir() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-123");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
let codex_home = tmp.path().join("missing-subdir"); // does not exist
let state = "state2".to_string();
// Run server in background
let server_home = codex_home.clone();
let opts = ServerOptions {
codex_home: server_home,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode::File,
client_id: codex_login::CLIENT_ID.to_string(),
issuer,
port: 0,
open_browser: false,
force_state: Some(state),
forced_chatgpt_workspace_id: None,
};
let server = run_login_server(opts)?;
let login_port = server.actual_port;
let client = reqwest::Client::new();
let url = format!("http://127.0.0.1:{login_port}/auth/callback?code=abc&state=state2");
let resp = client.get(&url).send().await?;
assert!(resp.status().is_success());
server.block_until_done().await?;
let auth_path = codex_home.join("auth.json");
assert!(
auth_path.exists(),
"auth.json should be created even if parent dir was missing"
);
Ok(())
}
#[tokio::test]
async fn forced_chatgpt_workspace_id_mismatch_blocks_login() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-actual");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
let codex_home = tmp.path().to_path_buf();
let state = "state-mismatch".to_string();
let opts = ServerOptions {
codex_home: codex_home.clone(),
cli_auth_credentials_store_mode: AuthCredentialsStoreMode::File,
client_id: codex_login::CLIENT_ID.to_string(),
issuer,
port: 0,
open_browser: false,
force_state: Some(state.clone()),
forced_chatgpt_workspace_id: Some("org-required".to_string()),
};
let server = run_login_server(opts)?;
assert!(
server
.auth_url
.contains("allowed_workspace_id=org-required"),
"auth URL should include forced workspace parameter"
);
let login_port = server.actual_port;
let client = reqwest::Client::new();
let url = format!("http://127.0.0.1:{login_port}/auth/callback?code=abc&state={state}");
let resp = client.get(&url).send().await?;
assert!(resp.status().is_success());
let body = resp.text().await?;
assert!(
body.contains("Login is restricted to workspace id org-required"),
"error body should mention workspace restriction"
);
let result = server.block_until_done().await;
assert!(
result.is_err(),
"login should fail due to workspace mismatch"
);
let err = result.unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::PermissionDenied);
let auth_path = codex_home.join("auth.json");
assert!(
!auth_path.exists(),
"auth.json should not be written when the workspace mismatches"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn cancels_previous_login_server_when_port_is_in_use() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-123");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let first_tmp = tempdir()?;
let first_codex_home = first_tmp.path().to_path_buf();
let first_opts = ServerOptions {
codex_home: first_codex_home,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode::File,
client_id: codex_login::CLIENT_ID.to_string(),
issuer: issuer.clone(),
port: 0,
open_browser: false,
force_state: Some("cancel_state".to_string()),
forced_chatgpt_workspace_id: None,
};
let first_server = run_login_server(first_opts)?;
let login_port = first_server.actual_port;
let first_server_task = tokio::spawn(async move { first_server.block_until_done().await });
tokio::time::sleep(Duration::from_millis(100)).await;
let second_tmp = tempdir()?;
let second_codex_home = second_tmp.path().to_path_buf();
let second_opts = ServerOptions {
codex_home: second_codex_home,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode::File,
client_id: codex_login::CLIENT_ID.to_string(),
issuer,
port: login_port,
open_browser: false,
force_state: Some("cancel_state_2".to_string()),
forced_chatgpt_workspace_id: None,
};
let second_server = run_login_server(second_opts)?;
assert_eq!(second_server.actual_port, login_port);
let cancel_result = first_server_task
.await
.expect("first login server task panicked")
.expect_err("login server should report cancellation");
assert_eq!(cancel_result.kind(), io::ErrorKind::Interrupted);
let client = reqwest::Client::new();
let cancel_url = format!("http://127.0.0.1:{login_port}/cancel");
let resp = client.get(cancel_url).send().await?;
assert!(resp.status().is_success());
second_server
.block_until_done()
.await
.expect_err("second login server should report cancellation");
Ok(())
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/tests/suite/mod.rs | codex-rs/login/tests/suite/mod.rs | // Aggregates all former standalone integration tests as modules.
mod device_code_login;
mod login_server_e2e;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/login/tests/suite/device_code_login.rs | codex-rs/login/tests/suite/device_code_login.rs | #![allow(clippy::unwrap_used)]
use anyhow::Context;
use base64::Engine;
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::load_auth_dot_json;
use codex_login::ServerOptions;
use codex_login::run_device_code_login;
use serde_json::json;
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::Request;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
use core_test_support::skip_if_no_network;
// ---------- Small helpers ----------
fn make_jwt(payload: serde_json::Value) -> String {
let header = json!({ "alg": "none", "typ": "JWT" });
let header_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&header).unwrap());
let payload_b64 = URL_SAFE_NO_PAD.encode(serde_json::to_vec(&payload).unwrap());
let signature_b64 = URL_SAFE_NO_PAD.encode(b"sig");
format!("{header_b64}.{payload_b64}.{signature_b64}")
}
async fn mock_usercode_success(server: &MockServer) {
Mock::given(method("POST"))
.and(path("/api/accounts/deviceauth/usercode"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"device_auth_id": "device-auth-123",
"user_code": "CODE-12345",
// NOTE: Interval is kept 0 in order to avoid waiting for the interval to pass
"interval": "0"
})))
.mount(server)
.await;
}
async fn mock_usercode_failure(server: &MockServer, status: u16) {
Mock::given(method("POST"))
.and(path("/api/accounts/deviceauth/usercode"))
.respond_with(ResponseTemplate::new(status))
.mount(server)
.await;
}
async fn mock_poll_token_two_step(
server: &MockServer,
counter: Arc<AtomicUsize>,
first_response_status: u16,
) {
let c = counter.clone();
Mock::given(method("POST"))
.and(path("/api/accounts/deviceauth/token"))
.respond_with(move |_: &Request| {
let attempt = c.fetch_add(1, Ordering::SeqCst);
if attempt == 0 {
ResponseTemplate::new(first_response_status)
} else {
ResponseTemplate::new(200).set_body_json(json!({
"authorization_code": "poll-code-321",
"code_challenge": "code-challenge-321",
"code_verifier": "code-verifier-321"
}))
}
})
.expect(2)
.mount(server)
.await;
}
async fn mock_poll_token_single(server: &MockServer, endpoint: &str, response: ResponseTemplate) {
Mock::given(method("POST"))
.and(path(endpoint))
.respond_with(response)
.mount(server)
.await;
}
async fn mock_oauth_token_single(server: &MockServer, jwt: String) {
Mock::given(method("POST"))
.and(path("/oauth/token"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"id_token": jwt.clone(),
"access_token": "access-token-123",
"refresh_token": "refresh-token-123"
})))
.mount(server)
.await;
}
fn server_opts(
codex_home: &tempfile::TempDir,
issuer: String,
cli_auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> ServerOptions {
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
cli_auth_credentials_store_mode,
);
opts.issuer = issuer;
opts.open_browser = false;
opts
}
#[tokio::test]
async fn device_code_login_integration_succeeds() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let codex_home = tempdir().unwrap();
let mock_server = MockServer::start().await;
mock_usercode_success(&mock_server).await;
mock_poll_token_two_step(&mock_server, Arc::new(AtomicUsize::new(0)), 404).await;
let jwt = make_jwt(json!({
"https://api.openai.com/auth": {
"chatgpt_account_id": "acct_321"
}
}));
mock_oauth_token_single(&mock_server, jwt.clone()).await;
let issuer = mock_server.uri();
let opts = server_opts(&codex_home, issuer, AuthCredentialsStoreMode::File);
run_device_code_login(opts)
.await
.expect("device code login integration should succeed");
let auth = load_auth_dot_json(codex_home.path(), AuthCredentialsStoreMode::File)
.context("auth.json should load after login succeeds")?
.context("auth.json written")?;
// assert_eq!(auth.openai_api_key.as_deref(), Some("api-key-321"));
let tokens = auth.tokens.expect("tokens persisted");
assert_eq!(tokens.access_token, "access-token-123");
assert_eq!(tokens.refresh_token, "refresh-token-123");
assert_eq!(tokens.id_token.raw_jwt, jwt);
assert_eq!(tokens.account_id.as_deref(), Some("acct_321"));
Ok(())
}
#[tokio::test]
async fn device_code_login_rejects_workspace_mismatch() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let codex_home = tempdir().unwrap();
let mock_server = MockServer::start().await;
mock_usercode_success(&mock_server).await;
mock_poll_token_two_step(&mock_server, Arc::new(AtomicUsize::new(0)), 404).await;
let jwt = make_jwt(json!({
"https://api.openai.com/auth": {
"chatgpt_account_id": "acct_321",
"organization_id": "org-actual"
}
}));
mock_oauth_token_single(&mock_server, jwt).await;
let issuer = mock_server.uri();
let mut opts = server_opts(&codex_home, issuer, AuthCredentialsStoreMode::File);
opts.forced_chatgpt_workspace_id = Some("org-required".to_string());
let err = run_device_code_login(opts)
.await
.expect_err("device code login should fail when workspace mismatches");
assert_eq!(err.kind(), std::io::ErrorKind::PermissionDenied);
let auth = load_auth_dot_json(codex_home.path(), AuthCredentialsStoreMode::File)
.context("auth.json should load after login fails")?;
assert!(
auth.is_none(),
"auth.json should not be created when workspace validation fails"
);
Ok(())
}
#[tokio::test]
async fn device_code_login_integration_handles_usercode_http_failure() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let codex_home = tempdir().unwrap();
let mock_server = MockServer::start().await;
mock_usercode_failure(&mock_server, 503).await;
let issuer = mock_server.uri();
let opts = server_opts(&codex_home, issuer, AuthCredentialsStoreMode::File);
let err = run_device_code_login(opts)
.await
.expect_err("usercode HTTP failure should bubble up");
assert!(
err.to_string()
.contains("device code request failed with status"),
"unexpected error: {err:?}"
);
let auth = load_auth_dot_json(codex_home.path(), AuthCredentialsStoreMode::File)
.context("auth.json should load after login fails")?;
assert!(
auth.is_none(),
"auth.json should not be created when login fails"
);
Ok(())
}
#[tokio::test]
async fn device_code_login_integration_persists_without_api_key_on_exchange_failure()
-> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let codex_home = tempdir().unwrap();
let mock_server = MockServer::start().await;
mock_usercode_success(&mock_server).await;
mock_poll_token_two_step(&mock_server, Arc::new(AtomicUsize::new(0)), 404).await;
let jwt = make_jwt(json!({}));
mock_oauth_token_single(&mock_server, jwt.clone()).await;
let issuer = mock_server.uri();
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
AuthCredentialsStoreMode::File,
);
opts.issuer = issuer;
opts.open_browser = false;
run_device_code_login(opts)
.await
.expect("device login should succeed without API key exchange");
let auth = load_auth_dot_json(codex_home.path(), AuthCredentialsStoreMode::File)
.context("auth.json should load after login succeeds")?
.context("auth.json written")?;
assert!(auth.openai_api_key.is_none());
let tokens = auth.tokens.expect("tokens persisted");
assert_eq!(tokens.access_token, "access-token-123");
assert_eq!(tokens.refresh_token, "refresh-token-123");
assert_eq!(tokens.id_token.raw_jwt, jwt);
Ok(())
}
#[tokio::test]
async fn device_code_login_integration_handles_error_payload() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let codex_home = tempdir().unwrap();
// Start WireMock
let mock_server = MockServer::start().await;
mock_usercode_success(&mock_server).await;
// // /deviceauth/token → returns error payload with status 401
mock_poll_token_single(
&mock_server,
"/api/accounts/deviceauth/token",
ResponseTemplate::new(401).set_body_json(json!({
"error": "authorization_declined",
"error_description": "Denied"
})),
)
.await;
// (WireMock will automatically 404 for other paths)
let issuer = mock_server.uri();
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
AuthCredentialsStoreMode::File,
);
opts.issuer = issuer;
opts.open_browser = false;
let err = run_device_code_login(opts)
.await
.expect_err("integration failure path should return error");
// Accept either the specific error payload, a 400, or a 404 (since the client may return 404 if the flow is incomplete)
assert!(
err.to_string().contains("authorization_declined") || err.to_string().contains("401"),
"Expected an authorization_declined / 400 / 404 error, got {err:?}"
);
let auth = load_auth_dot_json(codex_home.path(), AuthCredentialsStoreMode::File)
.context("auth.json should load after login fails")?;
assert!(
auth.is_none(),
"auth.json should not be created when device auth fails"
);
Ok(())
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/src/lib.rs | codex-rs/linux-sandbox/src/lib.rs | #[cfg(target_os = "linux")]
mod landlock;
#[cfg(target_os = "linux")]
mod linux_run_main;
#[cfg(target_os = "linux")]
pub fn run_main() -> ! {
linux_run_main::run_main();
}
#[cfg(not(target_os = "linux"))]
pub fn run_main() -> ! {
panic!("codex-linux-sandbox is only supported on Linux");
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/src/landlock.rs | codex-rs/linux-sandbox/src/landlock.rs | use std::collections::BTreeMap;
use std::path::Path;
use codex_core::error::CodexErr;
use codex_core::error::Result;
use codex_core::error::SandboxErr;
use codex_core::protocol::SandboxPolicy;
use codex_utils_absolute_path::AbsolutePathBuf;
use landlock::ABI;
use landlock::Access;
use landlock::AccessFs;
use landlock::CompatLevel;
use landlock::Compatible;
use landlock::Ruleset;
use landlock::RulesetAttr;
use landlock::RulesetCreatedAttr;
use seccompiler::BpfProgram;
use seccompiler::SeccompAction;
use seccompiler::SeccompCmpArgLen;
use seccompiler::SeccompCmpOp;
use seccompiler::SeccompCondition;
use seccompiler::SeccompFilter;
use seccompiler::SeccompRule;
use seccompiler::TargetArch;
use seccompiler::apply_filter;
/// Apply sandbox policies inside this thread so only the child inherits
/// them, not the entire CLI process.
pub(crate) fn apply_sandbox_policy_to_current_thread(
sandbox_policy: &SandboxPolicy,
cwd: &Path,
) -> Result<()> {
if !sandbox_policy.has_full_network_access() {
install_network_seccomp_filter_on_current_thread()?;
}
if !sandbox_policy.has_full_disk_write_access() {
let writable_roots = sandbox_policy
.get_writable_roots_with_cwd(cwd)
.into_iter()
.map(|writable_root| writable_root.root)
.collect();
install_filesystem_landlock_rules_on_current_thread(writable_roots)?;
}
// TODO(ragona): Add appropriate restrictions if
// `sandbox_policy.has_full_disk_read_access()` is `false`.
Ok(())
}
/// Installs Landlock file-system rules on the current thread allowing read
/// access to the entire file-system while restricting write access to
/// `/dev/null` and the provided list of `writable_roots`.
///
/// # Errors
/// Returns [`CodexErr::Sandbox`] variants when the ruleset fails to apply.
fn install_filesystem_landlock_rules_on_current_thread(
writable_roots: Vec<AbsolutePathBuf>,
) -> Result<()> {
let abi = ABI::V5;
let access_rw = AccessFs::from_all(abi);
let access_ro = AccessFs::from_read(abi);
let mut ruleset = Ruleset::default()
.set_compatibility(CompatLevel::BestEffort)
.handle_access(access_rw)?
.create()?
.add_rules(landlock::path_beneath_rules(&["/"], access_ro))?
.add_rules(landlock::path_beneath_rules(&["/dev/null"], access_rw))?
.set_no_new_privs(true);
if !writable_roots.is_empty() {
ruleset = ruleset.add_rules(landlock::path_beneath_rules(&writable_roots, access_rw))?;
}
let status = ruleset.restrict_self()?;
if status.ruleset == landlock::RulesetStatus::NotEnforced {
return Err(CodexErr::Sandbox(SandboxErr::LandlockRestrict));
}
Ok(())
}
/// Installs a seccomp filter that blocks outbound network access except for
/// AF_UNIX domain sockets.
fn install_network_seccomp_filter_on_current_thread() -> std::result::Result<(), SandboxErr> {
// Build rule map.
let mut rules: BTreeMap<i64, Vec<SeccompRule>> = BTreeMap::new();
// Helper – insert unconditional deny rule for syscall number.
let mut deny_syscall = |nr: i64| {
rules.insert(nr, vec![]); // empty rule vec = unconditional match
};
deny_syscall(libc::SYS_connect);
deny_syscall(libc::SYS_accept);
deny_syscall(libc::SYS_accept4);
deny_syscall(libc::SYS_bind);
deny_syscall(libc::SYS_listen);
deny_syscall(libc::SYS_getpeername);
deny_syscall(libc::SYS_getsockname);
deny_syscall(libc::SYS_shutdown);
deny_syscall(libc::SYS_sendto);
deny_syscall(libc::SYS_sendmmsg);
// NOTE: allowing recvfrom allows some tools like: `cargo clippy` to run
// with their socketpair + child processes for sub-proc management
// deny_syscall(libc::SYS_recvfrom);
deny_syscall(libc::SYS_recvmmsg);
deny_syscall(libc::SYS_getsockopt);
deny_syscall(libc::SYS_setsockopt);
deny_syscall(libc::SYS_ptrace);
// For `socket` we allow AF_UNIX (arg0 == AF_UNIX) and deny everything else.
let unix_only_rule = SeccompRule::new(vec![SeccompCondition::new(
0, // first argument (domain)
SeccompCmpArgLen::Dword,
SeccompCmpOp::Ne,
libc::AF_UNIX as u64,
)?])?;
rules.insert(libc::SYS_socket, vec![unix_only_rule.clone()]);
rules.insert(libc::SYS_socketpair, vec![unix_only_rule]); // always deny (Unix can use socketpair but fine, keep open?)
let filter = SeccompFilter::new(
rules,
SeccompAction::Allow, // default – allow
SeccompAction::Errno(libc::EPERM as u32), // when rule matches – return EPERM
if cfg!(target_arch = "x86_64") {
TargetArch::x86_64
} else if cfg!(target_arch = "aarch64") {
TargetArch::aarch64
} else {
unimplemented!("unsupported architecture for seccomp filter");
},
)?;
let prog: BpfProgram = filter.try_into()?;
apply_filter(&prog)?;
Ok(())
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/src/main.rs | codex-rs/linux-sandbox/src/main.rs | /// Note that the cwd, env, and command args are preserved in the ultimate call
/// to `execv`, so the caller is responsible for ensuring those values are
/// correct.
fn main() -> ! {
codex_linux_sandbox::run_main()
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/src/linux_run_main.rs | codex-rs/linux-sandbox/src/linux_run_main.rs | use clap::Parser;
use std::ffi::CString;
use std::path::PathBuf;
use crate::landlock::apply_sandbox_policy_to_current_thread;
#[derive(Debug, Parser)]
pub struct LandlockCommand {
/// It is possible that the cwd used in the context of the sandbox policy
/// is different from the cwd of the process to spawn.
#[arg(long = "sandbox-policy-cwd")]
pub sandbox_policy_cwd: PathBuf,
#[arg(long = "sandbox-policy")]
pub sandbox_policy: codex_core::protocol::SandboxPolicy,
/// Full command args to run under landlock.
#[arg(trailing_var_arg = true)]
pub command: Vec<String>,
}
pub fn run_main() -> ! {
let LandlockCommand {
sandbox_policy_cwd,
sandbox_policy,
command,
} = LandlockCommand::parse();
if let Err(e) = apply_sandbox_policy_to_current_thread(&sandbox_policy, &sandbox_policy_cwd) {
panic!("error running landlock: {e:?}");
}
if command.is_empty() {
panic!("No command specified to execute.");
}
#[expect(clippy::expect_used)]
let c_command =
CString::new(command[0].as_str()).expect("Failed to convert command to CString");
#[expect(clippy::expect_used)]
let c_args: Vec<CString> = command
.iter()
.map(|arg| CString::new(arg.as_str()).expect("Failed to convert arg to CString"))
.collect();
let mut c_args_ptrs: Vec<*const libc::c_char> = c_args.iter().map(|arg| arg.as_ptr()).collect();
c_args_ptrs.push(std::ptr::null());
unsafe {
libc::execvp(c_command.as_ptr(), c_args_ptrs.as_ptr());
}
// If execvp returns, there was an error.
let err = std::io::Error::last_os_error();
panic!("Failed to execvp {}: {err}", command[0].as_str());
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/tests/all.rs | codex-rs/linux-sandbox/tests/all.rs | // Single integration test binary that aggregates all test modules.
// The submodules live in `tests/suite/`.
mod suite;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/tests/suite/landlock.rs | codex-rs/linux-sandbox/tests/suite/landlock.rs | #![cfg(target_os = "linux")]
use codex_core::config::types::ShellEnvironmentPolicy;
use codex_core::error::CodexErr;
use codex_core::error::SandboxErr;
use codex_core::exec::ExecParams;
use codex_core::exec::process_exec_tool_call;
use codex_core::exec_env::create_env;
use codex_core::protocol::SandboxPolicy;
use codex_core::sandboxing::SandboxPermissions;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::collections::HashMap;
use std::path::PathBuf;
use tempfile::NamedTempFile;
// At least on GitHub CI, the arm64 tests appear to need longer timeouts.
#[cfg(not(target_arch = "aarch64"))]
const SHORT_TIMEOUT_MS: u64 = 200;
#[cfg(target_arch = "aarch64")]
const SHORT_TIMEOUT_MS: u64 = 5_000;
#[cfg(not(target_arch = "aarch64"))]
const LONG_TIMEOUT_MS: u64 = 1_000;
#[cfg(target_arch = "aarch64")]
const LONG_TIMEOUT_MS: u64 = 5_000;
#[cfg(not(target_arch = "aarch64"))]
const NETWORK_TIMEOUT_MS: u64 = 2_000;
#[cfg(target_arch = "aarch64")]
const NETWORK_TIMEOUT_MS: u64 = 10_000;
fn create_env_from_core_vars() -> HashMap<String, String> {
let policy = ShellEnvironmentPolicy::default();
create_env(&policy)
}
#[expect(clippy::print_stdout, clippy::expect_used, clippy::unwrap_used)]
async fn run_cmd(cmd: &[&str], writable_roots: &[PathBuf], timeout_ms: u64) {
let cwd = std::env::current_dir().expect("cwd should exist");
let sandbox_cwd = cwd.clone();
let params = ExecParams {
command: cmd.iter().copied().map(str::to_owned).collect(),
cwd,
expiration: timeout_ms.into(),
env: create_env_from_core_vars(),
sandbox_permissions: SandboxPermissions::UseDefault,
justification: None,
arg0: None,
};
let sandbox_policy = SandboxPolicy::WorkspaceWrite {
writable_roots: writable_roots
.iter()
.map(|p| AbsolutePathBuf::try_from(p.as_path()).unwrap())
.collect(),
network_access: false,
// Exclude tmp-related folders from writable roots because we need a
// folder that is writable by tests but that we intentionally disallow
// writing to in the sandbox.
exclude_tmpdir_env_var: true,
exclude_slash_tmp: true,
};
let sandbox_program = env!("CARGO_BIN_EXE_codex-linux-sandbox");
let codex_linux_sandbox_exe = Some(PathBuf::from(sandbox_program));
let res = process_exec_tool_call(
params,
&sandbox_policy,
sandbox_cwd.as_path(),
&codex_linux_sandbox_exe,
None,
)
.await
.unwrap();
if res.exit_code != 0 {
println!("stdout:\n{}", res.stdout.text);
println!("stderr:\n{}", res.stderr.text);
panic!("exit code: {}", res.exit_code);
}
}
#[tokio::test]
async fn test_root_read() {
run_cmd(&["ls", "-l", "/bin"], &[], SHORT_TIMEOUT_MS).await;
}
#[tokio::test]
#[should_panic]
async fn test_root_write() {
let tmpfile = NamedTempFile::new().unwrap();
let tmpfile_path = tmpfile.path().to_string_lossy();
run_cmd(
&["bash", "-lc", &format!("echo blah > {tmpfile_path}")],
&[],
SHORT_TIMEOUT_MS,
)
.await;
}
#[tokio::test]
async fn test_dev_null_write() {
run_cmd(
&["bash", "-lc", "echo blah > /dev/null"],
&[],
// We have seen timeouts when running this test in CI on GitHub,
// so we are using a generous timeout until we can diagnose further.
LONG_TIMEOUT_MS,
)
.await;
}
#[tokio::test]
async fn test_writable_root() {
let tmpdir = tempfile::tempdir().unwrap();
let file_path = tmpdir.path().join("test");
run_cmd(
&[
"bash",
"-lc",
&format!("echo blah > {}", file_path.to_string_lossy()),
],
&[tmpdir.path().to_path_buf()],
// We have seen timeouts when running this test in CI on GitHub,
// so we are using a generous timeout until we can diagnose further.
LONG_TIMEOUT_MS,
)
.await;
}
#[tokio::test]
#[should_panic(expected = "Sandbox(Timeout")]
async fn test_timeout() {
run_cmd(&["sleep", "2"], &[], 50).await;
}
/// Helper that runs `cmd` under the Linux sandbox and asserts that the command
/// does NOT succeed (i.e. returns a non‑zero exit code) **unless** the binary
/// is missing in which case we silently treat it as an accepted skip so the
/// suite remains green on leaner CI images.
#[expect(clippy::expect_used)]
async fn assert_network_blocked(cmd: &[&str]) {
let cwd = std::env::current_dir().expect("cwd should exist");
let sandbox_cwd = cwd.clone();
let params = ExecParams {
command: cmd.iter().copied().map(str::to_owned).collect(),
cwd,
// Give the tool a generous 2-second timeout so even slow DNS timeouts
// do not stall the suite.
expiration: NETWORK_TIMEOUT_MS.into(),
env: create_env_from_core_vars(),
sandbox_permissions: SandboxPermissions::UseDefault,
justification: None,
arg0: None,
};
let sandbox_policy = SandboxPolicy::new_read_only_policy();
let sandbox_program = env!("CARGO_BIN_EXE_codex-linux-sandbox");
let codex_linux_sandbox_exe: Option<PathBuf> = Some(PathBuf::from(sandbox_program));
let result = process_exec_tool_call(
params,
&sandbox_policy,
sandbox_cwd.as_path(),
&codex_linux_sandbox_exe,
None,
)
.await;
let output = match result {
Ok(output) => output,
Err(CodexErr::Sandbox(SandboxErr::Denied { output })) => *output,
_ => {
panic!("expected sandbox denied error, got: {result:?}");
}
};
dbg!(&output.stderr.text);
dbg!(&output.stdout.text);
dbg!(&output.exit_code);
// A completely missing binary exits with 127. Anything else should also
// be non‑zero (EPERM from seccomp will usually bubble up as 1, 2, 13…)
// If—*and only if*—the command exits 0 we consider the sandbox breached.
if output.exit_code == 0 {
panic!(
"Network sandbox FAILED - {cmd:?} exited 0\nstdout:\n{}\nstderr:\n{}",
output.stdout.text, output.stderr.text
);
}
}
#[tokio::test]
async fn sandbox_blocks_curl() {
assert_network_blocked(&["curl", "-I", "http://openai.com"]).await;
}
#[tokio::test]
async fn sandbox_blocks_wget() {
assert_network_blocked(&["wget", "-qO-", "http://openai.com"]).await;
}
#[tokio::test]
async fn sandbox_blocks_ping() {
// ICMP requires raw socket – should be denied quickly with EPERM.
assert_network_blocked(&["ping", "-c", "1", "8.8.8.8"]).await;
}
#[tokio::test]
async fn sandbox_blocks_nc() {
// Zero‑length connection attempt to localhost.
assert_network_blocked(&["nc", "-z", "127.0.0.1", "80"]).await;
}
#[tokio::test]
async fn sandbox_blocks_ssh() {
// Force ssh to attempt a real TCP connection but fail quickly. `BatchMode`
// avoids password prompts, and `ConnectTimeout` keeps the hang time low.
assert_network_blocked(&[
"ssh",
"-o",
"BatchMode=yes",
"-o",
"ConnectTimeout=1",
"github.com",
])
.await;
}
#[tokio::test]
async fn sandbox_blocks_getent() {
assert_network_blocked(&["getent", "ahosts", "openai.com"]).await;
}
#[tokio::test]
async fn sandbox_blocks_dev_tcp_redirection() {
// This syntax is only supported by bash and zsh. We try bash first.
// Fallback generic socket attempt using /bin/sh with bash‑style /dev/tcp. Not
// all images ship bash, so we guard against 127 as well.
assert_network_blocked(&["bash", "-c", "echo hi > /dev/tcp/127.0.0.1/80"]).await;
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/linux-sandbox/tests/suite/mod.rs | codex-rs/linux-sandbox/tests/suite/mod.rs | // Aggregates all former standalone integration tests as modules.
mod landlock;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/lmstudio/src/lib.rs | codex-rs/lmstudio/src/lib.rs | mod client;
pub use client::LMStudioClient;
use codex_core::config::Config;
/// Default OSS model to use when `--oss` is passed without an explicit `-m`.
pub const DEFAULT_OSS_MODEL: &str = "openai/gpt-oss-20b";
/// Prepare the local OSS environment when `--oss` is selected.
///
/// - Ensures a local LM Studio server is reachable.
/// - Checks if the model exists locally and downloads it if missing.
pub async fn ensure_oss_ready(config: &Config) -> std::io::Result<()> {
let model = match config.model.as_ref() {
Some(model) => model,
None => DEFAULT_OSS_MODEL,
};
// Verify local LM Studio is reachable.
let lmstudio_client = LMStudioClient::try_from_provider(config).await?;
match lmstudio_client.fetch_models().await {
Ok(models) => {
if !models.iter().any(|m| m == model) {
lmstudio_client.download_model(model).await?;
}
}
Err(err) => {
// Not fatal; higher layers may still proceed and surface errors later.
tracing::warn!("Failed to query local models from LM Studio: {}.", err);
}
}
// Load the model in the background
tokio::spawn({
let client = lmstudio_client.clone();
let model = model.to_string();
async move {
if let Err(e) = client.load_model(&model).await {
tracing::warn!("Failed to load model {}: {}", model, e);
}
}
});
Ok(())
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/lmstudio/src/client.rs | codex-rs/lmstudio/src/client.rs | use codex_core::LMSTUDIO_OSS_PROVIDER_ID;
use codex_core::config::Config;
use std::io;
use std::path::Path;
#[derive(Clone)]
pub struct LMStudioClient {
client: reqwest::Client,
base_url: String,
}
const LMSTUDIO_CONNECTION_ERROR: &str = "LM Studio is not responding. Install from https://lmstudio.ai/download and run 'lms server start'.";
impl LMStudioClient {
pub async fn try_from_provider(config: &Config) -> std::io::Result<Self> {
let provider = config
.model_providers
.get(LMSTUDIO_OSS_PROVIDER_ID)
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
format!("Built-in provider {LMSTUDIO_OSS_PROVIDER_ID} not found",),
)
})?;
let base_url = provider.base_url.as_ref().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
"oss provider must have a base_url",
)
})?;
let client = reqwest::Client::builder()
.connect_timeout(std::time::Duration::from_secs(5))
.build()
.unwrap_or_else(|_| reqwest::Client::new());
let client = LMStudioClient {
client,
base_url: base_url.to_string(),
};
client.check_server().await?;
Ok(client)
}
async fn check_server(&self) -> io::Result<()> {
let url = format!("{}/models", self.base_url.trim_end_matches('/'));
let response = self.client.get(&url).send().await;
if let Ok(resp) = response {
if resp.status().is_success() {
Ok(())
} else {
Err(io::Error::other(format!(
"Server returned error: {} {LMSTUDIO_CONNECTION_ERROR}",
resp.status()
)))
}
} else {
Err(io::Error::other(LMSTUDIO_CONNECTION_ERROR))
}
}
// Load a model by sending an empty request with max_tokens 1
pub async fn load_model(&self, model: &str) -> io::Result<()> {
let url = format!("{}/responses", self.base_url.trim_end_matches('/'));
let request_body = serde_json::json!({
"model": model,
"input": "",
"max_output_tokens": 1
});
let response = self
.client
.post(&url)
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| io::Error::other(format!("Request failed: {e}")))?;
if response.status().is_success() {
tracing::info!("Successfully loaded model '{model}'");
Ok(())
} else {
Err(io::Error::other(format!(
"Failed to load model: {}",
response.status()
)))
}
}
// Return the list of models available on the LM Studio server.
pub async fn fetch_models(&self) -> io::Result<Vec<String>> {
let url = format!("{}/models", self.base_url.trim_end_matches('/'));
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| io::Error::other(format!("Request failed: {e}")))?;
if response.status().is_success() {
let json: serde_json::Value = response.json().await.map_err(|e| {
io::Error::new(io::ErrorKind::InvalidData, format!("JSON parse error: {e}"))
})?;
let models = json["data"]
.as_array()
.ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidData, "No 'data' array in response")
})?
.iter()
.filter_map(|model| model["id"].as_str())
.map(std::string::ToString::to_string)
.collect();
Ok(models)
} else {
Err(io::Error::other(format!(
"Failed to fetch models: {}",
response.status()
)))
}
}
// Find lms, checking fallback paths if not in PATH
fn find_lms() -> std::io::Result<String> {
Self::find_lms_with_home_dir(None)
}
fn find_lms_with_home_dir(home_dir: Option<&str>) -> std::io::Result<String> {
// First try 'lms' in PATH
if which::which("lms").is_ok() {
return Ok("lms".to_string());
}
// Platform-specific fallback paths
let home = match home_dir {
Some(dir) => dir.to_string(),
None => {
#[cfg(unix)]
{
std::env::var("HOME").unwrap_or_default()
}
#[cfg(windows)]
{
std::env::var("USERPROFILE").unwrap_or_default()
}
}
};
#[cfg(unix)]
let fallback_path = format!("{home}/.lmstudio/bin/lms");
#[cfg(windows)]
let fallback_path = format!("{home}/.lmstudio/bin/lms.exe");
if Path::new(&fallback_path).exists() {
Ok(fallback_path)
} else {
Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"LM Studio not found. Please install LM Studio from https://lmstudio.ai/",
))
}
}
pub async fn download_model(&self, model: &str) -> std::io::Result<()> {
let lms = Self::find_lms()?;
eprintln!("Downloading model: {model}");
let status = std::process::Command::new(&lms)
.args(["get", "--yes", model])
.stdout(std::process::Stdio::inherit())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
std::io::Error::other(format!("Failed to execute '{lms} get --yes {model}': {e}"))
})?;
if !status.success() {
return Err(std::io::Error::other(format!(
"Model download failed with exit code: {}",
status.code().unwrap_or(-1)
)));
}
tracing::info!("Successfully downloaded model '{model}'");
Ok(())
}
/// Low-level constructor given a raw host root, e.g. "http://localhost:1234".
#[cfg(test)]
fn from_host_root(host_root: impl Into<String>) -> Self {
let client = reqwest::Client::builder()
.connect_timeout(std::time::Duration::from_secs(5))
.build()
.unwrap_or_else(|_| reqwest::Client::new());
Self {
client,
base_url: host_root.into(),
}
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::expect_used, clippy::unwrap_used)]
use super::*;
#[tokio::test]
async fn test_fetch_models_happy_path() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_fetch_models_happy_path",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
let server = wiremock::MockServer::start().await;
wiremock::Mock::given(wiremock::matchers::method("GET"))
.and(wiremock::matchers::path("/models"))
.respond_with(
wiremock::ResponseTemplate::new(200).set_body_raw(
serde_json::json!({
"data": [
{"id": "openai/gpt-oss-20b"},
]
})
.to_string(),
"application/json",
),
)
.mount(&server)
.await;
let client = LMStudioClient::from_host_root(server.uri());
let models = client.fetch_models().await.expect("fetch models");
assert!(models.contains(&"openai/gpt-oss-20b".to_string()));
}
#[tokio::test]
async fn test_fetch_models_no_data_array() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_fetch_models_no_data_array",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
let server = wiremock::MockServer::start().await;
wiremock::Mock::given(wiremock::matchers::method("GET"))
.and(wiremock::matchers::path("/models"))
.respond_with(
wiremock::ResponseTemplate::new(200)
.set_body_raw(serde_json::json!({}).to_string(), "application/json"),
)
.mount(&server)
.await;
let client = LMStudioClient::from_host_root(server.uri());
let result = client.fetch_models().await;
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("No 'data' array in response")
);
}
#[tokio::test]
async fn test_fetch_models_server_error() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_fetch_models_server_error",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
let server = wiremock::MockServer::start().await;
wiremock::Mock::given(wiremock::matchers::method("GET"))
.and(wiremock::matchers::path("/models"))
.respond_with(wiremock::ResponseTemplate::new(500))
.mount(&server)
.await;
let client = LMStudioClient::from_host_root(server.uri());
let result = client.fetch_models().await;
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("Failed to fetch models: 500")
);
}
#[tokio::test]
async fn test_check_server_happy_path() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_check_server_happy_path",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
let server = wiremock::MockServer::start().await;
wiremock::Mock::given(wiremock::matchers::method("GET"))
.and(wiremock::matchers::path("/models"))
.respond_with(wiremock::ResponseTemplate::new(200))
.mount(&server)
.await;
let client = LMStudioClient::from_host_root(server.uri());
client
.check_server()
.await
.expect("server check should pass");
}
#[tokio::test]
async fn test_check_server_error() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_check_server_error",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
let server = wiremock::MockServer::start().await;
wiremock::Mock::given(wiremock::matchers::method("GET"))
.and(wiremock::matchers::path("/models"))
.respond_with(wiremock::ResponseTemplate::new(404))
.mount(&server)
.await;
let client = LMStudioClient::from_host_root(server.uri());
let result = client.check_server().await;
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("Server returned error: 404")
);
}
#[test]
fn test_find_lms() {
let result = LMStudioClient::find_lms();
match result {
Ok(_) => {
// lms was found in PATH - that's fine
}
Err(e) => {
// Expected error when LM Studio not installed
assert!(e.to_string().contains("LM Studio not found"));
}
}
}
#[test]
fn test_find_lms_with_mock_home() {
// Test fallback path construction without touching env vars
#[cfg(unix)]
{
let result = LMStudioClient::find_lms_with_home_dir(Some("/test/home"));
if let Err(e) = result {
assert!(e.to_string().contains("LM Studio not found"));
}
}
#[cfg(windows)]
{
let result = LMStudioClient::find_lms_with_home_dir(Some("C:\\test\\home"));
if let Err(e) = result {
assert!(e.to_string().contains("LM Studio not found"));
}
}
}
#[test]
fn test_from_host_root() {
let client = LMStudioClient::from_host_root("http://localhost:1234");
assert_eq!(client.base_url, "http://localhost:1234");
let client = LMStudioClient::from_host_root("https://example.com:8080/api");
assert_eq!(client.base_url, "https://example.com:8080/api");
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy/src/lib.rs | codex-rs/execpolicy/src/lib.rs | pub mod amend;
pub mod decision;
pub mod error;
pub mod execpolicycheck;
pub mod parser;
pub mod policy;
pub mod rule;
pub use amend::AmendError;
pub use amend::blocking_append_allow_prefix_rule;
pub use decision::Decision;
pub use error::Error;
pub use error::Result;
pub use execpolicycheck::ExecPolicyCheckCommand;
pub use parser::PolicyParser;
pub use policy::Evaluation;
pub use policy::Policy;
pub use rule::Rule;
pub use rule::RuleMatch;
pub use rule::RuleRef;
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy/src/parser.rs | codex-rs/execpolicy/src/parser.rs | use multimap::MultiMap;
use shlex;
use starlark::any::ProvidesStaticType;
use starlark::environment::GlobalsBuilder;
use starlark::environment::Module;
use starlark::eval::Evaluator;
use starlark::starlark_module;
use starlark::syntax::AstModule;
use starlark::syntax::Dialect;
use starlark::values::Value;
use starlark::values::list::ListRef;
use starlark::values::list::UnpackList;
use starlark::values::none::NoneType;
use std::cell::RefCell;
use std::cell::RefMut;
use std::sync::Arc;
use crate::decision::Decision;
use crate::error::Error;
use crate::error::Result;
use crate::rule::PatternToken;
use crate::rule::PrefixPattern;
use crate::rule::PrefixRule;
use crate::rule::RuleRef;
use crate::rule::validate_match_examples;
use crate::rule::validate_not_match_examples;
pub struct PolicyParser {
builder: RefCell<PolicyBuilder>,
}
impl Default for PolicyParser {
fn default() -> Self {
Self::new()
}
}
impl PolicyParser {
pub fn new() -> Self {
Self {
builder: RefCell::new(PolicyBuilder::new()),
}
}
/// Parses a policy, tagging parser errors with `policy_identifier` so failures include the
/// identifier alongside line numbers.
pub fn parse(&mut self, policy_identifier: &str, policy_file_contents: &str) -> Result<()> {
let mut dialect = Dialect::Extended.clone();
dialect.enable_f_strings = true;
let ast = AstModule::parse(
policy_identifier,
policy_file_contents.to_string(),
&dialect,
)
.map_err(Error::Starlark)?;
let globals = GlobalsBuilder::standard().with(policy_builtins).build();
let module = Module::new();
{
let mut eval = Evaluator::new(&module);
eval.extra = Some(&self.builder);
eval.eval_module(ast, &globals).map_err(Error::Starlark)?;
}
Ok(())
}
pub fn build(self) -> crate::policy::Policy {
self.builder.into_inner().build()
}
}
#[derive(Debug, ProvidesStaticType)]
struct PolicyBuilder {
rules_by_program: MultiMap<String, RuleRef>,
}
impl PolicyBuilder {
fn new() -> Self {
Self {
rules_by_program: MultiMap::new(),
}
}
fn add_rule(&mut self, rule: RuleRef) {
self.rules_by_program
.insert(rule.program().to_string(), rule);
}
fn build(self) -> crate::policy::Policy {
crate::policy::Policy::new(self.rules_by_program)
}
}
fn parse_pattern<'v>(pattern: UnpackList<Value<'v>>) -> Result<Vec<PatternToken>> {
let tokens: Vec<PatternToken> = pattern
.items
.into_iter()
.map(parse_pattern_token)
.collect::<Result<_>>()?;
if tokens.is_empty() {
Err(Error::InvalidPattern("pattern cannot be empty".to_string()))
} else {
Ok(tokens)
}
}
fn parse_pattern_token<'v>(value: Value<'v>) -> Result<PatternToken> {
if let Some(s) = value.unpack_str() {
Ok(PatternToken::Single(s.to_string()))
} else if let Some(list) = ListRef::from_value(value) {
let tokens: Vec<String> = list
.content()
.iter()
.map(|value| {
value
.unpack_str()
.ok_or_else(|| {
Error::InvalidPattern(format!(
"pattern alternative must be a string (got {})",
value.get_type()
))
})
.map(str::to_string)
})
.collect::<Result<_>>()?;
match tokens.as_slice() {
[] => Err(Error::InvalidPattern(
"pattern alternatives cannot be empty".to_string(),
)),
[single] => Ok(PatternToken::Single(single.clone())),
_ => Ok(PatternToken::Alts(tokens)),
}
} else {
Err(Error::InvalidPattern(format!(
"pattern element must be a string or list of strings (got {})",
value.get_type()
)))
}
}
fn parse_examples<'v>(examples: UnpackList<Value<'v>>) -> Result<Vec<Vec<String>>> {
examples.items.into_iter().map(parse_example).collect()
}
fn parse_example<'v>(value: Value<'v>) -> Result<Vec<String>> {
if let Some(raw) = value.unpack_str() {
parse_string_example(raw)
} else if let Some(list) = ListRef::from_value(value) {
parse_list_example(list)
} else {
Err(Error::InvalidExample(format!(
"example must be a string or list of strings (got {})",
value.get_type()
)))
}
}
fn parse_string_example(raw: &str) -> Result<Vec<String>> {
let tokens = shlex::split(raw).ok_or_else(|| {
Error::InvalidExample("example string has invalid shell syntax".to_string())
})?;
if tokens.is_empty() {
Err(Error::InvalidExample(
"example cannot be an empty string".to_string(),
))
} else {
Ok(tokens)
}
}
fn parse_list_example(list: &ListRef) -> Result<Vec<String>> {
let tokens: Vec<String> = list
.content()
.iter()
.map(|value| {
value
.unpack_str()
.ok_or_else(|| {
Error::InvalidExample(format!(
"example tokens must be strings (got {})",
value.get_type()
))
})
.map(str::to_string)
})
.collect::<Result<_>>()?;
if tokens.is_empty() {
Err(Error::InvalidExample(
"example cannot be an empty list".to_string(),
))
} else {
Ok(tokens)
}
}
fn policy_builder<'v, 'a>(eval: &Evaluator<'v, 'a, '_>) -> RefMut<'a, PolicyBuilder> {
#[expect(clippy::expect_used)]
eval.extra
.as_ref()
.expect("policy_builder requires Evaluator.extra to be populated")
.downcast_ref::<RefCell<PolicyBuilder>>()
.expect("Evaluator.extra must contain a PolicyBuilder")
.borrow_mut()
}
#[starlark_module]
fn policy_builtins(builder: &mut GlobalsBuilder) {
fn prefix_rule<'v>(
pattern: UnpackList<Value<'v>>,
decision: Option<&'v str>,
r#match: Option<UnpackList<Value<'v>>>,
not_match: Option<UnpackList<Value<'v>>>,
eval: &mut Evaluator<'v, '_, '_>,
) -> anyhow::Result<NoneType> {
let decision = match decision {
Some(raw) => Decision::parse(raw)?,
None => Decision::Allow,
};
let pattern_tokens = parse_pattern(pattern)?;
let matches: Vec<Vec<String>> =
r#match.map(parse_examples).transpose()?.unwrap_or_default();
let not_matches: Vec<Vec<String>> = not_match
.map(parse_examples)
.transpose()?
.unwrap_or_default();
let mut builder = policy_builder(eval);
let (first_token, remaining_tokens) = pattern_tokens
.split_first()
.ok_or_else(|| Error::InvalidPattern("pattern cannot be empty".to_string()))?;
let rest: Arc<[PatternToken]> = remaining_tokens.to_vec().into();
let rules: Vec<RuleRef> = first_token
.alternatives()
.iter()
.map(|head| {
Arc::new(PrefixRule {
pattern: PrefixPattern {
first: Arc::from(head.as_str()),
rest: rest.clone(),
},
decision,
}) as RuleRef
})
.collect();
validate_not_match_examples(&rules, ¬_matches)?;
validate_match_examples(&rules, &matches)?;
rules.into_iter().for_each(|rule| builder.add_rule(rule));
Ok(NoneType)
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy/src/policy.rs | codex-rs/execpolicy/src/policy.rs | use crate::decision::Decision;
use crate::error::Error;
use crate::error::Result;
use crate::rule::PatternToken;
use crate::rule::PrefixPattern;
use crate::rule::PrefixRule;
use crate::rule::RuleMatch;
use crate::rule::RuleRef;
use multimap::MultiMap;
use serde::Deserialize;
use serde::Serialize;
use std::sync::Arc;
type HeuristicsFallback<'a> = Option<&'a dyn Fn(&[String]) -> Decision>;
#[derive(Clone, Debug)]
pub struct Policy {
rules_by_program: MultiMap<String, RuleRef>,
}
impl Policy {
pub fn new(rules_by_program: MultiMap<String, RuleRef>) -> Self {
Self { rules_by_program }
}
pub fn empty() -> Self {
Self::new(MultiMap::new())
}
pub fn rules(&self) -> &MultiMap<String, RuleRef> {
&self.rules_by_program
}
pub fn add_prefix_rule(&mut self, prefix: &[String], decision: Decision) -> Result<()> {
let (first_token, rest) = prefix
.split_first()
.ok_or_else(|| Error::InvalidPattern("prefix cannot be empty".to_string()))?;
let rule: RuleRef = Arc::new(PrefixRule {
pattern: PrefixPattern {
first: Arc::from(first_token.as_str()),
rest: rest
.iter()
.map(|token| PatternToken::Single(token.clone()))
.collect::<Vec<_>>()
.into(),
},
decision,
});
self.rules_by_program.insert(first_token.clone(), rule);
Ok(())
}
pub fn check<F>(&self, cmd: &[String], heuristics_fallback: &F) -> Evaluation
where
F: Fn(&[String]) -> Decision,
{
let matched_rules = self.matches_for_command(cmd, Some(heuristics_fallback));
Evaluation::from_matches(matched_rules)
}
pub fn check_multiple<Commands, F>(
&self,
commands: Commands,
heuristics_fallback: &F,
) -> Evaluation
where
Commands: IntoIterator,
Commands::Item: AsRef<[String]>,
F: Fn(&[String]) -> Decision,
{
let matched_rules: Vec<RuleMatch> = commands
.into_iter()
.flat_map(|command| {
self.matches_for_command(command.as_ref(), Some(heuristics_fallback))
})
.collect();
Evaluation::from_matches(matched_rules)
}
pub fn matches_for_command(
&self,
cmd: &[String],
heuristics_fallback: HeuristicsFallback<'_>,
) -> Vec<RuleMatch> {
let mut matched_rules: Vec<RuleMatch> = match cmd.first() {
Some(first) => self
.rules_by_program
.get_vec(first)
.map(|rules| rules.iter().filter_map(|rule| rule.matches(cmd)).collect())
.unwrap_or_default(),
None => Vec::new(),
};
if let (true, Some(heuristics_fallback)) = (matched_rules.is_empty(), heuristics_fallback) {
matched_rules.push(RuleMatch::HeuristicsRuleMatch {
command: cmd.to_vec(),
decision: heuristics_fallback(cmd),
});
}
matched_rules
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Evaluation {
pub decision: Decision,
#[serde(rename = "matchedRules")]
pub matched_rules: Vec<RuleMatch>,
}
impl Evaluation {
pub fn is_match(&self) -> bool {
self.matched_rules
.iter()
.any(|rule_match| !matches!(rule_match, RuleMatch::HeuristicsRuleMatch { .. }))
}
fn from_matches(matched_rules: Vec<RuleMatch>) -> Self {
let decision = matched_rules
.iter()
.map(RuleMatch::decision)
.max()
.unwrap_or(Decision::Allow);
Self {
decision,
matched_rules,
}
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
openai/codex | https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy/src/decision.rs | codex-rs/execpolicy/src/decision.rs | use serde::Deserialize;
use serde::Serialize;
use crate::error::Error;
use crate::error::Result;
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Decision {
/// Command may run without further approval.
Allow,
/// Request explicit user approval; rejected outright when running with `approval_policy="never"`.
Prompt,
/// Command is blocked without further consideration.
Forbidden,
}
impl Decision {
pub fn parse(raw: &str) -> Result<Self> {
match raw {
"allow" => Ok(Self::Allow),
"prompt" => Ok(Self::Prompt),
"forbidden" => Ok(Self::Forbidden),
other => Err(Error::InvalidDecision(other.to_string())),
}
}
}
| rust | Apache-2.0 | 279283fe02bf0ce7f93a160db34dd8cf9c8f42c8 | 2026-01-04T15:31:59.292600Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.