repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/exec_cell/model.rs
codex-rs/tui/src/exec_cell/model.rs
use std::time::Duration; use std::time::Instant; use codex_core::protocol::ExecCommandSource; use codex_protocol::parse_command::ParsedCommand; #[derive(Clone, Debug, Default)] pub(crate) struct CommandOutput { pub(crate) exit_code: i32, /// The aggregated stderr + stdout interleaved. pub(crate) aggregated_output: String, /// The formatted output of the command, as seen by the model. pub(crate) formatted_output: String, } #[derive(Debug, Clone)] pub(crate) struct ExecCall { pub(crate) call_id: String, pub(crate) command: Vec<String>, pub(crate) parsed: Vec<ParsedCommand>, pub(crate) output: Option<CommandOutput>, pub(crate) source: ExecCommandSource, pub(crate) start_time: Option<Instant>, pub(crate) duration: Option<Duration>, pub(crate) interaction_input: Option<String>, } #[derive(Debug)] pub(crate) struct ExecCell { pub(crate) calls: Vec<ExecCall>, animations_enabled: bool, } impl ExecCell { pub(crate) fn new(call: ExecCall, animations_enabled: bool) -> Self { Self { calls: vec![call], animations_enabled, } } pub(crate) fn with_added_call( &self, call_id: String, command: Vec<String>, parsed: Vec<ParsedCommand>, source: ExecCommandSource, interaction_input: Option<String>, ) -> Option<Self> { let call = ExecCall { call_id, command, parsed, output: None, source, start_time: Some(Instant::now()), duration: None, interaction_input, }; if self.is_exploring_cell() && Self::is_exploring_call(&call) { Some(Self { calls: [self.calls.clone(), vec![call]].concat(), animations_enabled: self.animations_enabled, }) } else { None } } pub(crate) fn complete_call( &mut self, call_id: &str, output: CommandOutput, duration: Duration, ) { if let Some(call) = self.calls.iter_mut().rev().find(|c| c.call_id == call_id) { call.output = Some(output); call.duration = Some(duration); call.start_time = None; } } pub(crate) fn should_flush(&self) -> bool { !self.is_exploring_cell() && self.calls.iter().all(|c| c.output.is_some()) } pub(crate) fn mark_failed(&mut self) { for call in self.calls.iter_mut() { if call.output.is_none() { let elapsed = call .start_time .map(|st| st.elapsed()) .unwrap_or_else(|| Duration::from_millis(0)); call.start_time = None; call.duration = Some(elapsed); call.output = Some(CommandOutput { exit_code: 1, formatted_output: String::new(), aggregated_output: String::new(), }); } } } pub(crate) fn is_exploring_cell(&self) -> bool { self.calls.iter().all(Self::is_exploring_call) } pub(crate) fn is_active(&self) -> bool { self.calls.iter().any(|c| c.output.is_none()) } pub(crate) fn active_start_time(&self) -> Option<Instant> { self.calls .iter() .find(|c| c.output.is_none()) .and_then(|c| c.start_time) } pub(crate) fn animations_enabled(&self) -> bool { self.animations_enabled } pub(crate) fn iter_calls(&self) -> impl Iterator<Item = &ExecCall> { self.calls.iter() } pub(super) fn is_exploring_call(call: &ExecCall) -> bool { !matches!(call.source, ExecCommandSource::UserShell) && !call.parsed.is_empty() && call.parsed.iter().all(|p| { matches!( p, ParsedCommand::Read { .. } | ParsedCommand::ListFiles { .. } | ParsedCommand::Search { .. } ) }) } } impl ExecCall { pub(crate) fn is_user_shell_command(&self) -> bool { matches!(self.source, ExecCommandSource::UserShell) } pub(crate) fn is_unified_exec_interaction(&self) -> bool { matches!(self.source, ExecCommandSource::UnifiedExecInteraction) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/exec_cell/mod.rs
codex-rs/tui/src/exec_cell/mod.rs
mod model; mod render; pub(crate) use model::CommandOutput; #[cfg(test)] pub(crate) use model::ExecCall; pub(crate) use model::ExecCell; pub(crate) use render::OutputLinesParams; pub(crate) use render::TOOL_CALL_MAX_LINES; pub(crate) use render::new_active_exec_command; pub(crate) use render::output_lines; pub(crate) use render::spinner;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/bin/md-events.rs
codex-rs/tui/src/bin/md-events.rs
use std::io::Read; use std::io::{self}; fn main() { let mut input = String::new(); if let Err(err) = io::stdin().read_to_string(&mut input) { eprintln!("failed to read stdin: {err}"); std::process::exit(1); } let parser = pulldown_cmark::Parser::new(&input); for event in parser { println!("{event:?}"); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/notifications/osc9.rs
codex-rs/tui/src/notifications/osc9.rs
use std::fmt; use std::io; use std::io::stdout; use crossterm::Command; use ratatui::crossterm::execute; #[derive(Debug, Default)] pub struct Osc9Backend; impl Osc9Backend { pub fn notify(&mut self, message: &str) -> io::Result<()> { execute!(stdout(), PostNotification(message.to_string())) } } /// Command that emits an OSC 9 desktop notification with a message. #[derive(Debug, Clone)] pub struct PostNotification(pub String); impl Command for PostNotification { fn write_ansi(&self, f: &mut impl fmt::Write) -> fmt::Result { write!(f, "\x1b]9;{}\x07", self.0) } #[cfg(windows)] fn execute_winapi(&self) -> io::Result<()> { Err(std::io::Error::other( "tried to execute PostNotification using WinAPI; use ANSI instead", )) } #[cfg(windows)] fn is_ansi_code_supported(&self) -> bool { true } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/notifications/mod.rs
codex-rs/tui/src/notifications/mod.rs
mod osc9; mod windows_toast; use std::env; use std::io; use codex_core::env::is_wsl; use osc9::Osc9Backend; use windows_toast::WindowsToastBackend; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum NotificationBackendKind { Osc9, WindowsToast, } #[derive(Debug)] pub enum DesktopNotificationBackend { Osc9(Osc9Backend), WindowsToast(WindowsToastBackend), } impl DesktopNotificationBackend { pub fn osc9() -> Self { Self::Osc9(Osc9Backend) } pub fn windows_toast() -> Self { Self::WindowsToast(WindowsToastBackend::default()) } pub fn kind(&self) -> NotificationBackendKind { match self { DesktopNotificationBackend::Osc9(_) => NotificationBackendKind::Osc9, DesktopNotificationBackend::WindowsToast(_) => NotificationBackendKind::WindowsToast, } } pub fn notify(&mut self, message: &str) -> io::Result<()> { match self { DesktopNotificationBackend::Osc9(backend) => backend.notify(message), DesktopNotificationBackend::WindowsToast(backend) => backend.notify(message), } } } pub fn detect_backend() -> DesktopNotificationBackend { if should_use_windows_toasts() { tracing::info!( "Windows Terminal session detected under WSL; using Windows toast notifications" ); DesktopNotificationBackend::windows_toast() } else { DesktopNotificationBackend::osc9() } } fn should_use_windows_toasts() -> bool { is_wsl() && env::var_os("WT_SESSION").is_some() } #[cfg(test)] mod tests { use super::NotificationBackendKind; use super::detect_backend; use serial_test::serial; use std::ffi::OsString; struct EnvVarGuard { key: &'static str, original: Option<OsString>, } impl EnvVarGuard { fn set(key: &'static str, value: &str) -> Self { let original = std::env::var_os(key); unsafe { std::env::set_var(key, value); } Self { key, original } } fn remove(key: &'static str) -> Self { let original = std::env::var_os(key); unsafe { std::env::remove_var(key); } Self { key, original } } } impl Drop for EnvVarGuard { fn drop(&mut self) { unsafe { match &self.original { Some(value) => std::env::set_var(self.key, value), None => std::env::remove_var(self.key), } } } } #[test] #[serial] fn defaults_to_osc9_outside_wsl() { let _wsl_guard = EnvVarGuard::remove("WSL_DISTRO_NAME"); let _wt_guard = EnvVarGuard::remove("WT_SESSION"); assert_eq!(detect_backend().kind(), NotificationBackendKind::Osc9); } #[test] #[serial] fn waits_for_windows_terminal() { let _wsl_guard = EnvVarGuard::set("WSL_DISTRO_NAME", "Ubuntu"); let _wt_guard = EnvVarGuard::remove("WT_SESSION"); assert_eq!(detect_backend().kind(), NotificationBackendKind::Osc9); } #[cfg(target_os = "linux")] #[test] #[serial] fn selects_windows_toast_in_wsl_windows_terminal() { let _wsl_guard = EnvVarGuard::set("WSL_DISTRO_NAME", "Ubuntu"); let _wt_guard = EnvVarGuard::set("WT_SESSION", "abc"); assert_eq!( detect_backend().kind(), NotificationBackendKind::WindowsToast ); } #[cfg(not(target_os = "linux"))] #[test] #[serial] fn stays_on_osc9_outside_linux_even_with_wsl_env() { let _wsl_guard = EnvVarGuard::set("WSL_DISTRO_NAME", "Ubuntu"); let _wt_guard = EnvVarGuard::set("WT_SESSION", "abc"); assert_eq!(detect_backend().kind(), NotificationBackendKind::Osc9); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/notifications/windows_toast.rs
codex-rs/tui/src/notifications/windows_toast.rs
use std::io; use std::process::Command; use std::process::Stdio; use base64::Engine as _; use base64::engine::general_purpose::STANDARD as BASE64; const APP_ID: &str = "Codex"; const POWERSHELL_EXE: &str = "powershell.exe"; #[derive(Debug)] pub struct WindowsToastBackend { encoded_title: String, } impl WindowsToastBackend { pub fn notify(&mut self, message: &str) -> io::Result<()> { let encoded_body = encode_argument(message); let encoded_command = build_encoded_command(&self.encoded_title, &encoded_body); spawn_powershell(encoded_command) } } impl Default for WindowsToastBackend { fn default() -> Self { WindowsToastBackend { encoded_title: encode_argument(APP_ID), } } } fn spawn_powershell(encoded_command: String) -> io::Result<()> { let mut command = Command::new(POWERSHELL_EXE); command .arg("-NoProfile") .arg("-NoLogo") .arg("-EncodedCommand") .arg(encoded_command) .stdin(Stdio::null()) .stdout(Stdio::null()) .stderr(Stdio::null()); let status = command.status()?; if status.success() { Ok(()) } else { Err(io::Error::other(format!( "{POWERSHELL_EXE} exited with status {status}" ))) } } fn build_encoded_command(encoded_title: &str, encoded_body: &str) -> String { let script = build_ps_script(encoded_title, encoded_body); encode_script_for_powershell(&script) } fn build_ps_script(encoded_title: &str, encoded_body: &str) -> String { format!( r#" $encoding = [System.Text.Encoding]::UTF8 $titleText = $encoding.GetString([System.Convert]::FromBase64String("{encoded_title}")) $bodyText = $encoding.GetString([System.Convert]::FromBase64String("{encoded_body}")) [Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null $doc = [Windows.UI.Notifications.ToastNotificationManager]::GetTemplateContent([Windows.UI.Notifications.ToastTemplateType]::ToastText02) $textNodes = $doc.GetElementsByTagName("text") $textNodes.Item(0).AppendChild($doc.CreateTextNode($titleText)) | Out-Null $textNodes.Item(1).AppendChild($doc.CreateTextNode($bodyText)) | Out-Null $toast = [Windows.UI.Notifications.ToastNotification]::new($doc) [Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier('Codex').Show($toast) "#, ) } fn encode_script_for_powershell(script: &str) -> String { let mut wide: Vec<u8> = Vec::with_capacity((script.len() + 1) * 2); for unit in script.encode_utf16() { let bytes = unit.to_le_bytes(); wide.extend_from_slice(&bytes); } BASE64.encode(wide) } fn encode_argument(value: &str) -> String { BASE64.encode(escape_for_xml(value)) } pub fn escape_for_xml(input: &str) -> String { let mut escaped = String::with_capacity(input.len()); for ch in input.chars() { match ch { '&' => escaped.push_str("&amp;"), '<' => escaped.push_str("&lt;"), '>' => escaped.push_str("&gt;"), '"' => escaped.push_str("&quot;"), '\'' => escaped.push_str("&apos;"), _ => escaped.push(ch), } } escaped } #[cfg(test)] mod tests { use super::encode_script_for_powershell; use super::escape_for_xml; use pretty_assertions::assert_eq; #[test] fn escapes_xml_entities() { assert_eq!(escape_for_xml("5 > 3"), "5 &gt; 3"); assert_eq!(escape_for_xml("a & b"), "a &amp; b"); assert_eq!(escape_for_xml("<tag>"), "&lt;tag&gt;"); assert_eq!(escape_for_xml("\"quoted\""), "&quot;quoted&quot;"); assert_eq!(escape_for_xml("single 'quote'"), "single &apos;quote&apos;"); } #[test] fn leaves_safe_text_unmodified() { assert_eq!(escape_for_xml("codex"), "codex"); assert_eq!(escape_for_xml("multi word text"), "multi word text"); } #[test] fn encodes_utf16le_for_powershell() { assert_eq!(encode_script_for_powershell("A"), "QQA="); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/render/highlight.rs
codex-rs/tui/src/render/highlight.rs
use ratatui::style::Style; use ratatui::style::Stylize; use ratatui::text::Line; use ratatui::text::Span; use std::sync::OnceLock; use tree_sitter_highlight::Highlight; use tree_sitter_highlight::HighlightConfiguration; use tree_sitter_highlight::HighlightEvent; use tree_sitter_highlight::Highlighter; // Ref: https://github.com/tree-sitter/tree-sitter-bash/blob/master/queries/highlights.scm #[derive(Copy, Clone)] enum BashHighlight { Comment, Constant, Embedded, Function, Keyword, Number, Operator, Property, String, } impl BashHighlight { const ALL: [Self; 9] = [ Self::Comment, Self::Constant, Self::Embedded, Self::Function, Self::Keyword, Self::Number, Self::Operator, Self::Property, Self::String, ]; const fn as_str(self) -> &'static str { match self { Self::Comment => "comment", Self::Constant => "constant", Self::Embedded => "embedded", Self::Function => "function", Self::Keyword => "keyword", Self::Number => "number", Self::Operator => "operator", Self::Property => "property", Self::String => "string", } } fn style(self) -> Style { match self { Self::Comment | Self::Operator | Self::String => Style::default().dim(), _ => Style::default(), } } } static HIGHLIGHT_CONFIG: OnceLock<HighlightConfiguration> = OnceLock::new(); fn highlight_names() -> &'static [&'static str] { static NAMES: OnceLock<[&'static str; BashHighlight::ALL.len()]> = OnceLock::new(); NAMES .get_or_init(|| BashHighlight::ALL.map(BashHighlight::as_str)) .as_slice() } fn highlight_config() -> &'static HighlightConfiguration { HIGHLIGHT_CONFIG.get_or_init(|| { let language = tree_sitter_bash::LANGUAGE.into(); #[expect(clippy::expect_used)] let mut config = HighlightConfiguration::new( language, "bash", tree_sitter_bash::HIGHLIGHT_QUERY, "", "", ) .expect("load bash highlight query"); config.configure(highlight_names()); config }) } fn highlight_for(highlight: Highlight) -> BashHighlight { BashHighlight::ALL[highlight.0] } fn push_segment(lines: &mut Vec<Line<'static>>, segment: &str, style: Option<Style>) { for (i, part) in segment.split('\n').enumerate() { if i > 0 { lines.push(Line::from("")); } if part.is_empty() { continue; } let span = match style { Some(style) => Span::styled(part.to_string(), style), None => part.to_string().into(), }; if let Some(last) = lines.last_mut() { last.spans.push(span); } } } /// Convert a bash script into per-line styled content using tree-sitter's /// bash highlight query. The highlighter is streamed so multi-line content is /// split into `Line`s while preserving style boundaries. pub(crate) fn highlight_bash_to_lines(script: &str) -> Vec<Line<'static>> { let mut highlighter = Highlighter::new(); let iterator = match highlighter.highlight(highlight_config(), script.as_bytes(), None, |_| None) { Ok(iter) => iter, Err(_) => return vec![script.to_string().into()], }; let mut lines: Vec<Line<'static>> = vec![Line::from("")]; let mut highlight_stack: Vec<Highlight> = Vec::new(); for event in iterator { match event { Ok(HighlightEvent::HighlightStart(highlight)) => highlight_stack.push(highlight), Ok(HighlightEvent::HighlightEnd) => { highlight_stack.pop(); } Ok(HighlightEvent::Source { start, end }) => { if start == end { continue; } let style = highlight_stack.last().map(|h| highlight_for(*h).style()); push_segment(&mut lines, &script[start..end], style); } Err(_) => return vec![script.to_string().into()], } } if lines.is_empty() { vec![Line::from("")] } else { lines } } #[cfg(test)] mod tests { use super::*; use pretty_assertions::assert_eq; use ratatui::style::Modifier; fn reconstructed(lines: &[Line<'static>]) -> String { lines .iter() .map(|l| { l.spans .iter() .map(|sp| sp.content.clone()) .collect::<String>() }) .collect::<Vec<_>>() .join("\n") } fn dimmed_tokens(lines: &[Line<'static>]) -> Vec<String> { lines .iter() .flat_map(|l| l.spans.iter()) .filter(|sp| sp.style.add_modifier.contains(Modifier::DIM)) .map(|sp| sp.content.clone().into_owned()) .map(|token| token.trim().to_string()) .filter(|token| !token.is_empty()) .collect() } #[test] fn dims_expected_bash_operators() { let s = "echo foo && bar || baz | qux & (echo hi)"; let lines = highlight_bash_to_lines(s); assert_eq!(reconstructed(&lines), s); let dimmed = dimmed_tokens(&lines); assert!(dimmed.contains(&"&&".to_string())); assert!(dimmed.contains(&"|".to_string())); assert!(!dimmed.contains(&"echo".to_string())); } #[test] fn dims_redirects_and_strings() { let s = "echo \"hi\" > out.txt; echo 'ok'"; let lines = highlight_bash_to_lines(s); assert_eq!(reconstructed(&lines), s); let dimmed = dimmed_tokens(&lines); assert!(dimmed.contains(&">".to_string())); assert!(dimmed.contains(&"\"hi\"".to_string())); assert!(dimmed.contains(&"'ok'".to_string())); } #[test] fn highlights_command_and_strings() { let s = "echo \"hi\""; let lines = highlight_bash_to_lines(s); let mut echo_style = None; let mut string_style = None; for span in &lines[0].spans { let text = span.content.as_ref(); if text == "echo" { echo_style = Some(span.style); } if text == "\"hi\"" { string_style = Some(span.style); } } let echo_style = echo_style.expect("echo span missing"); let string_style = string_style.expect("string span missing"); assert!(echo_style.fg.is_none()); assert!(!echo_style.add_modifier.contains(Modifier::DIM)); assert!(string_style.add_modifier.contains(Modifier::DIM)); } #[test] fn highlights_heredoc_body_as_string() { let s = "cat <<EOF\nheredoc body\nEOF"; let lines = highlight_bash_to_lines(s); let body_line = &lines[1]; let mut body_style = None; for span in &body_line.spans { if span.content.as_ref() == "heredoc body" { body_style = Some(span.style); } } let body_style = body_style.expect("missing heredoc span"); assert!(body_style.add_modifier.contains(Modifier::DIM)); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/render/renderable.rs
codex-rs/tui/src/render/renderable.rs
use std::sync::Arc; use ratatui::buffer::Buffer; use ratatui::layout::Rect; use ratatui::text::Line; use ratatui::text::Span; use ratatui::widgets::Paragraph; use ratatui::widgets::WidgetRef; use crate::render::Insets; use crate::render::RectExt as _; pub trait Renderable { fn render(&self, area: Rect, buf: &mut Buffer); fn desired_height(&self, width: u16) -> u16; fn cursor_pos(&self, _area: Rect) -> Option<(u16, u16)> { None } } pub enum RenderableItem<'a> { Owned(Box<dyn Renderable + 'a>), Borrowed(&'a dyn Renderable), } impl<'a> Renderable for RenderableItem<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { match self { RenderableItem::Owned(child) => child.render(area, buf), RenderableItem::Borrowed(child) => child.render(area, buf), } } fn desired_height(&self, width: u16) -> u16 { match self { RenderableItem::Owned(child) => child.desired_height(width), RenderableItem::Borrowed(child) => child.desired_height(width), } } fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> { match self { RenderableItem::Owned(child) => child.cursor_pos(area), RenderableItem::Borrowed(child) => child.cursor_pos(area), } } } impl<'a> From<Box<dyn Renderable + 'a>> for RenderableItem<'a> { fn from(value: Box<dyn Renderable + 'a>) -> Self { RenderableItem::Owned(value) } } impl<'a, R> From<R> for Box<dyn Renderable + 'a> where R: Renderable + 'a, { fn from(value: R) -> Self { Box::new(value) } } impl Renderable for () { fn render(&self, _area: Rect, _buf: &mut Buffer) {} fn desired_height(&self, _width: u16) -> u16 { 0 } } impl Renderable for &str { fn render(&self, area: Rect, buf: &mut Buffer) { self.render_ref(area, buf); } fn desired_height(&self, _width: u16) -> u16 { 1 } } impl Renderable for String { fn render(&self, area: Rect, buf: &mut Buffer) { self.render_ref(area, buf); } fn desired_height(&self, _width: u16) -> u16 { 1 } } impl<'a> Renderable for Span<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { self.render_ref(area, buf); } fn desired_height(&self, _width: u16) -> u16 { 1 } } impl<'a> Renderable for Line<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { WidgetRef::render_ref(self, area, buf); } fn desired_height(&self, _width: u16) -> u16 { 1 } } impl<'a> Renderable for Paragraph<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { self.render_ref(area, buf); } fn desired_height(&self, width: u16) -> u16 { self.line_count(width) as u16 } } impl<R: Renderable> Renderable for Option<R> { fn render(&self, area: Rect, buf: &mut Buffer) { if let Some(renderable) = self { renderable.render(area, buf); } } fn desired_height(&self, width: u16) -> u16 { if let Some(renderable) = self { renderable.desired_height(width) } else { 0 } } } impl<R: Renderable> Renderable for Arc<R> { fn render(&self, area: Rect, buf: &mut Buffer) { self.as_ref().render(area, buf); } fn desired_height(&self, width: u16) -> u16 { self.as_ref().desired_height(width) } } pub struct ColumnRenderable<'a> { children: Vec<RenderableItem<'a>>, } impl Renderable for ColumnRenderable<'_> { fn render(&self, area: Rect, buf: &mut Buffer) { let mut y = area.y; for child in &self.children { let child_area = Rect::new(area.x, y, area.width, child.desired_height(area.width)) .intersection(area); if !child_area.is_empty() { child.render(child_area, buf); } y += child_area.height; } } fn desired_height(&self, width: u16) -> u16 { self.children .iter() .map(|child| child.desired_height(width)) .sum() } /// Returns the cursor position of the first child that has a cursor position, offset by the /// child's position in the column. /// /// It is generally assumed that either zero or one child will have a cursor position. fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> { let mut y = area.y; for child in &self.children { let child_area = Rect::new(area.x, y, area.width, child.desired_height(area.width)) .intersection(area); if !child_area.is_empty() && let Some((px, py)) = child.cursor_pos(child_area) { return Some((px, py)); } y += child_area.height; } None } } impl<'a> ColumnRenderable<'a> { pub fn new() -> Self { Self { children: vec![] } } pub fn with<I, T>(children: I) -> Self where I: IntoIterator<Item = T>, T: Into<RenderableItem<'a>>, { Self { children: children.into_iter().map(Into::into).collect(), } } pub fn push(&mut self, child: impl Into<Box<dyn Renderable + 'a>>) { self.children.push(RenderableItem::Owned(child.into())); } #[allow(dead_code)] pub fn push_ref<R>(&mut self, child: &'a R) where R: Renderable + 'a, { self.children .push(RenderableItem::Borrowed(child as &'a dyn Renderable)); } } pub struct FlexChild<'a> { flex: i32, child: RenderableItem<'a>, } pub struct FlexRenderable<'a> { children: Vec<FlexChild<'a>>, } /// Lays out children in a column, with the ability to specify a flex factor for each child. /// /// Children with flex factor > 0 will be allocated the remaining space after the non-flex children, /// proportional to the flex factor. impl<'a> FlexRenderable<'a> { pub fn new() -> Self { Self { children: vec![] } } pub fn push(&mut self, flex: i32, child: impl Into<RenderableItem<'a>>) { self.children.push(FlexChild { flex, child: child.into(), }); } /// Loosely inspired by Flutter's Flex widget. /// /// Ref https://github.com/flutter/flutter/blob/3fd81edbf1e015221e143c92b2664f4371bdc04a/packages/flutter/lib/src/rendering/flex.dart#L1205-L1209 fn allocate(&self, area: Rect) -> Vec<Rect> { let mut allocated_rects = Vec::with_capacity(self.children.len()); let mut child_sizes = vec![0; self.children.len()]; let mut allocated_size = 0; let mut total_flex = 0; // 1. Allocate space to non-flex children. let max_size = area.height; let mut last_flex_child_idx = 0; for (i, FlexChild { flex, child }) in self.children.iter().enumerate() { if *flex > 0 { total_flex += flex; last_flex_child_idx = i; } else { child_sizes[i] = child .desired_height(area.width) .min(max_size.saturating_sub(allocated_size)); allocated_size += child_sizes[i]; } } let free_space = max_size.saturating_sub(allocated_size); // 2. Allocate space to flex children, proportional to their flex factor. let mut allocated_flex_space = 0; if total_flex > 0 { let space_per_flex = free_space / total_flex as u16; for (i, FlexChild { flex, child }) in self.children.iter().enumerate() { if *flex > 0 { // Last flex child gets all the remaining space, to prevent a rounding error // from not allocating all the space. let max_child_extent = if i == last_flex_child_idx { free_space - allocated_flex_space } else { space_per_flex * *flex as u16 }; let child_size = child.desired_height(area.width).min(max_child_extent); child_sizes[i] = child_size; allocated_size += child_size; allocated_flex_space += child_size; } } } let mut y = area.y; for size in child_sizes { let child_area = Rect::new(area.x, y, area.width, size); allocated_rects.push(child_area); y += child_area.height; } allocated_rects } } impl<'a> Renderable for FlexRenderable<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { self.allocate(area) .into_iter() .zip(self.children.iter()) .for_each(|(rect, child)| { child.child.render(rect, buf); }); } fn desired_height(&self, width: u16) -> u16 { self.allocate(Rect::new(0, 0, width, u16::MAX)) .last() .map(|rect| rect.bottom()) .unwrap_or(0) } fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> { self.allocate(area) .into_iter() .zip(self.children.iter()) .find_map(|(rect, child)| child.child.cursor_pos(rect)) } } pub struct RowRenderable<'a> { children: Vec<(u16, RenderableItem<'a>)>, } impl Renderable for RowRenderable<'_> { fn render(&self, area: Rect, buf: &mut Buffer) { let mut x = area.x; for (width, child) in &self.children { let available_width = area.width.saturating_sub(x - area.x); let child_area = Rect::new(x, area.y, (*width).min(available_width), area.height); if child_area.is_empty() { break; } child.render(child_area, buf); x = x.saturating_add(*width); } } fn desired_height(&self, width: u16) -> u16 { let mut max_height = 0; let mut width_remaining = width; for (child_width, child) in &self.children { let w = (*child_width).min(width_remaining); if w == 0 { break; } let height = child.desired_height(w); if height > max_height { max_height = height; } width_remaining = width_remaining.saturating_sub(w); } max_height } fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> { let mut x = area.x; for (width, child) in &self.children { let available_width = area.width.saturating_sub(x - area.x); let child_area = Rect::new(x, area.y, (*width).min(available_width), area.height); if !child_area.is_empty() && let Some(pos) = child.cursor_pos(child_area) { return Some(pos); } x = x.saturating_add(*width); } None } } impl<'a> RowRenderable<'a> { pub fn new() -> Self { Self { children: vec![] } } pub fn push(&mut self, width: u16, child: impl Into<Box<dyn Renderable>>) { self.children .push((width, RenderableItem::Owned(child.into()))); } #[allow(dead_code)] pub fn push_ref<R>(&mut self, width: u16, child: &'a R) where R: Renderable + 'a, { self.children .push((width, RenderableItem::Borrowed(child as &'a dyn Renderable))); } } pub struct InsetRenderable<'a> { child: RenderableItem<'a>, insets: Insets, } impl<'a> Renderable for InsetRenderable<'a> { fn render(&self, area: Rect, buf: &mut Buffer) { self.child.render(area.inset(self.insets), buf); } fn desired_height(&self, width: u16) -> u16 { self.child .desired_height(width - self.insets.left - self.insets.right) + self.insets.top + self.insets.bottom } fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> { self.child.cursor_pos(area.inset(self.insets)) } } impl<'a> InsetRenderable<'a> { pub fn new(child: impl Into<RenderableItem<'a>>, insets: Insets) -> Self { Self { child: child.into(), insets, } } } pub trait RenderableExt<'a> { fn inset(self, insets: Insets) -> RenderableItem<'a>; } impl<'a, R> RenderableExt<'a> for R where R: Renderable + 'a, { fn inset(self, insets: Insets) -> RenderableItem<'a> { let child: RenderableItem<'a> = RenderableItem::Owned(Box::new(self) as Box<dyn Renderable + 'a>); RenderableItem::Owned(Box::new(InsetRenderable { child, insets })) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/render/mod.rs
codex-rs/tui/src/render/mod.rs
use ratatui::layout::Rect; pub mod highlight; pub mod line_utils; pub mod renderable; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Insets { left: u16, top: u16, right: u16, bottom: u16, } impl Insets { pub fn tlbr(top: u16, left: u16, bottom: u16, right: u16) -> Self { Self { top, left, bottom, right, } } pub fn vh(v: u16, h: u16) -> Self { Self { top: v, left: h, bottom: v, right: h, } } } pub trait RectExt { fn inset(&self, insets: Insets) -> Rect; } impl RectExt for Rect { fn inset(&self, insets: Insets) -> Rect { let horizontal = insets.left.saturating_add(insets.right); let vertical = insets.top.saturating_add(insets.bottom); Rect { x: self.x.saturating_add(insets.left), y: self.y.saturating_add(insets.top), width: self.width.saturating_sub(horizontal), height: self.height.saturating_sub(vertical), } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/render/line_utils.rs
codex-rs/tui/src/render/line_utils.rs
use ratatui::text::Line; use ratatui::text::Span; /// Clone a borrowed ratatui `Line` into an owned `'static` line. pub fn line_to_static(line: &Line<'_>) -> Line<'static> { Line { style: line.style, alignment: line.alignment, spans: line .spans .iter() .map(|s| Span { style: s.style, content: std::borrow::Cow::Owned(s.content.to_string()), }) .collect(), } } /// Append owned copies of borrowed lines to `out`. pub fn push_owned_lines<'a>(src: &[Line<'a>], out: &mut Vec<Line<'static>>) { for l in src { out.push(line_to_static(l)); } } /// Consider a line blank if it has no spans or only spans whose contents are /// empty or consist solely of spaces (no tabs/newlines). pub fn is_blank_line_spaces_only(line: &Line<'_>) -> bool { if line.spans.is_empty() { return true; } line.spans .iter() .all(|s| s.content.is_empty() || s.content.chars().all(|c| c == ' ')) } /// Prefix each line with `initial_prefix` for the first line and /// `subsequent_prefix` for following lines. Returns a new Vec of owned lines. pub fn prefix_lines( lines: Vec<Line<'static>>, initial_prefix: Span<'static>, subsequent_prefix: Span<'static>, ) -> Vec<Line<'static>> { lines .into_iter() .enumerate() .map(|(i, l)| { let mut spans = Vec::with_capacity(l.spans.len() + 1); spans.push(if i == 0 { initial_prefix.clone() } else { subsequent_prefix.clone() }); spans.extend(l.spans); Line::from(spans).style(l.style) }) .collect() }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/tests.rs
codex-rs/tui/src/status/tests.rs
use super::new_status_output; use super::rate_limit_snapshot_display; use crate::history_cell::HistoryCell; use chrono::Duration as ChronoDuration; use chrono::TimeZone; use chrono::Utc; use codex_core::AuthManager; use codex_core::config::Config; use codex_core::config::ConfigBuilder; use codex_core::models_manager::manager::ModelsManager; use codex_core::protocol::CreditsSnapshot; use codex_core::protocol::RateLimitSnapshot; use codex_core::protocol::RateLimitWindow; use codex_core::protocol::SandboxPolicy; use codex_core::protocol::TokenUsage; use codex_core::protocol::TokenUsageInfo; use codex_protocol::config_types::ReasoningSummary; use codex_protocol::openai_models::ReasoningEffort; use insta::assert_snapshot; use ratatui::prelude::*; use std::path::PathBuf; use tempfile::TempDir; async fn test_config(temp_home: &TempDir) -> Config { ConfigBuilder::default() .codex_home(temp_home.path().to_path_buf()) .build() .await .expect("load config") } fn test_auth_manager(config: &Config) -> AuthManager { AuthManager::new( config.codex_home.clone(), false, config.cli_auth_credentials_store_mode, ) } fn token_info_for(model_slug: &str, config: &Config, usage: &TokenUsage) -> TokenUsageInfo { let context_window = ModelsManager::construct_model_family_offline(model_slug, config) .context_window .or(config.model_context_window); TokenUsageInfo { total_token_usage: usage.clone(), last_token_usage: usage.clone(), model_context_window: context_window, } } fn render_lines(lines: &[Line<'static>]) -> Vec<String> { lines .iter() .map(|line| { line.spans .iter() .map(|span| span.content.as_ref()) .collect::<String>() }) .collect() } fn sanitize_directory(lines: Vec<String>) -> Vec<String> { lines .into_iter() .map(|line| { if let (Some(dir_pos), Some(pipe_idx)) = (line.find("Directory: "), line.rfind('│')) { let prefix = &line[..dir_pos + "Directory: ".len()]; let suffix = &line[pipe_idx..]; let content_width = pipe_idx.saturating_sub(dir_pos + "Directory: ".len()); let replacement = "[[workspace]]"; let mut rebuilt = prefix.to_string(); rebuilt.push_str(replacement); if content_width > replacement.len() { rebuilt.push_str(&" ".repeat(content_width - replacement.len())); } rebuilt.push_str(suffix); rebuilt } else { line } }) .collect() } fn reset_at_from(captured_at: &chrono::DateTime<chrono::Local>, seconds: i64) -> i64 { (*captured_at + ChronoDuration::seconds(seconds)) .with_timezone(&Utc) .timestamp() } #[tokio::test] async fn status_snapshot_includes_reasoning_details() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.model_provider_id = "openai".to_string(); config.model_reasoning_effort = Some(ReasoningEffort::High); config.model_reasoning_summary = ReasoningSummary::Detailed; config .sandbox_policy .set(SandboxPolicy::WorkspaceWrite { writable_roots: Vec::new(), network_access: false, exclude_tmpdir_env_var: false, exclude_slash_tmp: false, }) .expect("set sandbox policy"); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 1_200, cached_input_tokens: 200, output_tokens: 900, reasoning_output_tokens: 150, total_tokens: 2_250, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 1, 2, 3, 4, 5) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 72.5, window_minutes: Some(300), resets_at: Some(reset_at_from(&captured_at, 600)), }), secondary: Some(RateLimitWindow { used_percent: 45.0, window_minutes: Some(10080), resets_at: Some(reset_at_from(&captured_at, 1_200)), }), credits: None, plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_includes_monthly_limit() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.model_provider_id = "openai".to_string(); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 800, cached_input_tokens: 0, output_tokens: 400, reasoning_output_tokens: 0, total_tokens: 1_200, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 5, 6, 7, 8, 9) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 12.0, window_minutes: Some(43_200), resets_at: Some(reset_at_from(&captured_at, 86_400)), }), secondary: None, credits: None, plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_shows_unlimited_credits() { let temp_home = TempDir::new().expect("temp home"); let config = test_config(&temp_home).await; let auth_manager = test_auth_manager(&config); let usage = TokenUsage::default(); let captured_at = chrono::Local .with_ymd_and_hms(2024, 2, 3, 4, 5, 6) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: None, secondary: None, credits: Some(CreditsSnapshot { has_credits: true, unlimited: true, balance: None, }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let rendered = render_lines(&composite.display_lines(120)); assert!( rendered .iter() .any(|line| line.contains("Credits:") && line.contains("Unlimited")), "expected Credits: Unlimited line, got {rendered:?}" ); } #[tokio::test] async fn status_snapshot_shows_positive_credits() { let temp_home = TempDir::new().expect("temp home"); let config = test_config(&temp_home).await; let auth_manager = test_auth_manager(&config); let usage = TokenUsage::default(); let captured_at = chrono::Local .with_ymd_and_hms(2024, 3, 4, 5, 6, 7) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: None, secondary: None, credits: Some(CreditsSnapshot { has_credits: true, unlimited: false, balance: Some("12.5".to_string()), }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let rendered = render_lines(&composite.display_lines(120)); assert!( rendered .iter() .any(|line| line.contains("Credits:") && line.contains("13 credits")), "expected Credits line with rounded credits, got {rendered:?}" ); } #[tokio::test] async fn status_snapshot_hides_zero_credits() { let temp_home = TempDir::new().expect("temp home"); let config = test_config(&temp_home).await; let auth_manager = test_auth_manager(&config); let usage = TokenUsage::default(); let captured_at = chrono::Local .with_ymd_and_hms(2024, 4, 5, 6, 7, 8) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: None, secondary: None, credits: Some(CreditsSnapshot { has_credits: true, unlimited: false, balance: Some("0".to_string()), }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let rendered = render_lines(&composite.display_lines(120)); assert!( rendered.iter().all(|line| !line.contains("Credits:")), "expected no Credits line, got {rendered:?}" ); } #[tokio::test] async fn status_snapshot_hides_when_has_no_credits_flag() { let temp_home = TempDir::new().expect("temp home"); let config = test_config(&temp_home).await; let auth_manager = test_auth_manager(&config); let usage = TokenUsage::default(); let captured_at = chrono::Local .with_ymd_and_hms(2024, 5, 6, 7, 8, 9) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: None, secondary: None, credits: Some(CreditsSnapshot { has_credits: false, unlimited: true, balance: None, }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let rendered = render_lines(&composite.display_lines(120)); assert!( rendered.iter().all(|line| !line.contains("Credits:")), "expected no Credits line when has_credits is false, got {rendered:?}" ); } #[tokio::test] async fn status_card_token_usage_excludes_cached_tokens() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 1_200, cached_input_tokens: 200, output_tokens: 900, reasoning_output_tokens: 0, total_tokens: 2_100, }; let now = chrono::Local .with_ymd_and_hms(2024, 1, 1, 0, 0, 0) .single() .expect("timestamp"); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, None, None, now, &model_slug, ); let rendered = render_lines(&composite.display_lines(120)); assert!( rendered.iter().all(|line| !line.contains("cached")), "cached tokens should not be displayed, got: {rendered:?}" ); } #[tokio::test] async fn status_snapshot_truncates_in_narrow_terminal() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.model_provider_id = "openai".to_string(); config.model_reasoning_effort = Some(ReasoningEffort::High); config.model_reasoning_summary = ReasoningSummary::Detailed; config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 1_200, cached_input_tokens: 200, output_tokens: 900, reasoning_output_tokens: 150, total_tokens: 2_250, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 1, 2, 3, 4, 5) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 72.5, window_minutes: Some(300), resets_at: Some(reset_at_from(&captured_at, 600)), }), secondary: None, credits: None, plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(70)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_shows_missing_limits_message() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 500, cached_input_tokens: 0, output_tokens: 250, reasoning_output_tokens: 0, total_tokens: 750, }; let now = chrono::Local .with_ymd_and_hms(2024, 2, 3, 4, 5, 6) .single() .expect("timestamp"); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, None, None, now, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_includes_credits_and_limits() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 1_500, cached_input_tokens: 100, output_tokens: 600, reasoning_output_tokens: 0, total_tokens: 2_200, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 7, 8, 9, 10, 11) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 45.0, window_minutes: Some(300), resets_at: Some(reset_at_from(&captured_at, 900)), }), secondary: Some(RateLimitWindow { used_percent: 30.0, window_minutes: Some(10_080), resets_at: Some(reset_at_from(&captured_at, 2_700)), }), credits: Some(CreditsSnapshot { has_credits: true, unlimited: false, balance: Some("37.5".to_string()), }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_shows_empty_limits_message() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 500, cached_input_tokens: 0, output_tokens: 250, reasoning_output_tokens: 0, total_tokens: 750, }; let snapshot = RateLimitSnapshot { primary: None, secondary: None, credits: None, plan_type: None, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 6, 7, 8, 9, 10) .single() .expect("timestamp"); let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, captured_at, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_shows_stale_limits_message() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex-max".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 1_200, cached_input_tokens: 200, output_tokens: 900, reasoning_output_tokens: 150, total_tokens: 2_250, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 1, 2, 3, 4, 5) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 72.5, window_minutes: Some(300), resets_at: Some(reset_at_from(&captured_at, 600)), }), secondary: Some(RateLimitWindow { used_percent: 40.0, window_minutes: Some(10_080), resets_at: Some(reset_at_from(&captured_at, 1_800)), }), credits: None, plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let now = captured_at + ChronoDuration::minutes(20); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, now, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_snapshot_cached_limits_hide_credits_without_flag() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model = Some("gpt-5.1-codex".to_string()); config.cwd = PathBuf::from("/workspace/tests"); let auth_manager = test_auth_manager(&config); let usage = TokenUsage { input_tokens: 900, cached_input_tokens: 200, output_tokens: 350, reasoning_output_tokens: 0, total_tokens: 1_450, }; let captured_at = chrono::Local .with_ymd_and_hms(2024, 9, 10, 11, 12, 13) .single() .expect("timestamp"); let snapshot = RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 60.0, window_minutes: Some(300), resets_at: Some(reset_at_from(&captured_at, 1_200)), }), secondary: Some(RateLimitWindow { used_percent: 35.0, window_minutes: Some(10_080), resets_at: Some(reset_at_from(&captured_at, 2_400)), }), credits: Some(CreditsSnapshot { has_credits: false, unlimited: false, balance: Some("80".to_string()), }), plan_type: None, }; let rate_display = rate_limit_snapshot_display(&snapshot, captured_at); let now = captured_at + ChronoDuration::minutes(20); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = token_info_for(&model_slug, &config, &usage); let composite = new_status_output( &config, &auth_manager, Some(&token_info), &usage, &None, Some(&rate_display), None, now, &model_slug, ); let mut rendered_lines = render_lines(&composite.display_lines(80)); if cfg!(windows) { for line in &mut rendered_lines { *line = line.replace('\\', "/"); } } let sanitized = sanitize_directory(rendered_lines).join("\n"); assert_snapshot!(sanitized); } #[tokio::test] async fn status_context_window_uses_last_usage() { let temp_home = TempDir::new().expect("temp home"); let mut config = test_config(&temp_home).await; config.model_context_window = Some(272_000); let auth_manager = test_auth_manager(&config); let total_usage = TokenUsage { input_tokens: 12_800, cached_input_tokens: 0, output_tokens: 879, reasoning_output_tokens: 0, total_tokens: 102_000, }; let last_usage = TokenUsage { input_tokens: 12_800, cached_input_tokens: 0, output_tokens: 879, reasoning_output_tokens: 0, total_tokens: 13_679, }; let now = chrono::Local .with_ymd_and_hms(2024, 6, 1, 12, 0, 0) .single() .expect("timestamp"); let model_slug = ModelsManager::get_model_offline(config.model.as_deref()); let token_info = TokenUsageInfo { total_token_usage: total_usage.clone(), last_token_usage: last_usage, model_context_window: config.model_context_window, }; let composite = new_status_output( &config, &auth_manager, Some(&token_info), &total_usage, &None, None, None, now, &model_slug, ); let rendered_lines = render_lines(&composite.display_lines(80)); let context_line = rendered_lines .into_iter() .find(|line| line.contains("Context window")) .expect("context line"); assert!( context_line.contains("13.7K used / 272K"), "expected context line to reflect last usage tokens, got: {context_line}" ); assert!( !context_line.contains("102K"), "context line should not use total aggregated tokens, got: {context_line}" ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/helpers.rs
codex-rs/tui/src/status/helpers.rs
use crate::exec_command::relativize_to_home; use crate::text_formatting; use chrono::DateTime; use chrono::Local; use codex_app_server_protocol::AuthMode; use codex_core::AuthManager; use codex_core::config::Config; use codex_core::project_doc::discover_project_doc_paths; use codex_protocol::account::PlanType; use std::path::Path; use unicode_width::UnicodeWidthStr; use super::account::StatusAccountDisplay; fn normalize_agents_display_path(path: &Path) -> String { dunce::simplified(path).display().to_string() } pub(crate) fn compose_model_display( model_name: &str, entries: &[(&str, String)], ) -> (String, Vec<String>) { let mut details: Vec<String> = Vec::new(); if let Some((_, effort)) = entries.iter().find(|(k, _)| *k == "reasoning effort") { details.push(format!("reasoning {}", effort.to_ascii_lowercase())); } if let Some((_, summary)) = entries.iter().find(|(k, _)| *k == "reasoning summaries") { let summary = summary.trim(); if summary.eq_ignore_ascii_case("none") || summary.eq_ignore_ascii_case("off") { details.push("summaries off".to_string()); } else if !summary.is_empty() { details.push(format!("summaries {}", summary.to_ascii_lowercase())); } } (model_name.to_string(), details) } pub(crate) fn compose_agents_summary(config: &Config) -> String { match discover_project_doc_paths(config) { Ok(paths) => { let mut rels: Vec<String> = Vec::new(); for p in paths { let file_name = p .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_else(|| "<unknown>".to_string()); let display = if let Some(parent) = p.parent() { if parent == config.cwd { file_name.clone() } else { let mut cur = config.cwd.as_path(); let mut ups = 0usize; let mut reached = false; while let Some(c) = cur.parent() { if cur == parent { reached = true; break; } cur = c; ups += 1; } if reached { let up = format!("..{}", std::path::MAIN_SEPARATOR); format!("{}{}", up.repeat(ups), file_name) } else if let Ok(stripped) = p.strip_prefix(&config.cwd) { normalize_agents_display_path(stripped) } else { normalize_agents_display_path(&p) } } } else { normalize_agents_display_path(&p) }; rels.push(display); } if rels.is_empty() { "<none>".to_string() } else { rels.join(", ") } } Err(_) => "<none>".to_string(), } } pub(crate) fn compose_account_display( auth_manager: &AuthManager, plan: Option<PlanType>, ) -> Option<StatusAccountDisplay> { let auth = auth_manager.auth()?; match auth.mode { AuthMode::ChatGPT => { let email = auth.get_account_email(); let plan = plan .map(|plan_type| title_case(format!("{plan_type:?}").as_str())) .or_else(|| Some("Unknown".to_string())); Some(StatusAccountDisplay::ChatGpt { email, plan }) } AuthMode::ApiKey => Some(StatusAccountDisplay::ApiKey), } } pub(crate) fn format_tokens_compact(value: i64) -> String { let value = value.max(0); if value == 0 { return "0".to_string(); } if value < 1_000 { return value.to_string(); } let value_f64 = value as f64; let (scaled, suffix) = if value >= 1_000_000_000_000 { (value_f64 / 1_000_000_000_000.0, "T") } else if value >= 1_000_000_000 { (value_f64 / 1_000_000_000.0, "B") } else if value >= 1_000_000 { (value_f64 / 1_000_000.0, "M") } else { (value_f64 / 1_000.0, "K") }; let decimals = if scaled < 10.0 { 2 } else if scaled < 100.0 { 1 } else { 0 }; let mut formatted = format!("{scaled:.decimals$}"); if formatted.contains('.') { while formatted.ends_with('0') { formatted.pop(); } if formatted.ends_with('.') { formatted.pop(); } } format!("{formatted}{suffix}") } pub(crate) fn format_directory_display(directory: &Path, max_width: Option<usize>) -> String { let formatted = if let Some(rel) = relativize_to_home(directory) { if rel.as_os_str().is_empty() { "~".to_string() } else { format!("~{}{}", std::path::MAIN_SEPARATOR, rel.display()) } } else { directory.display().to_string() }; if let Some(max_width) = max_width { if max_width == 0 { return String::new(); } if UnicodeWidthStr::width(formatted.as_str()) > max_width { return text_formatting::center_truncate_path(&formatted, max_width); } } formatted } pub(crate) fn format_reset_timestamp(dt: DateTime<Local>, captured_at: DateTime<Local>) -> String { let time = dt.format("%H:%M").to_string(); if dt.date_naive() == captured_at.date_naive() { time } else { format!("{time} on {}", dt.format("%-d %b")) } } pub(crate) fn title_case(s: &str) -> String { if s.is_empty() { return String::new(); } let mut chars = s.chars(); let first = match chars.next() { Some(c) => c, None => return String::new(), }; let rest: String = chars.as_str().to_ascii_lowercase(); first.to_uppercase().collect::<String>() + &rest }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/mod.rs
codex-rs/tui/src/status/mod.rs
mod account; mod card; mod format; mod helpers; mod rate_limits; pub(crate) use card::new_status_output; pub(crate) use helpers::format_tokens_compact; pub(crate) use rate_limits::RateLimitSnapshotDisplay; pub(crate) use rate_limits::rate_limit_snapshot_display; #[cfg(test)] mod tests;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/format.rs
codex-rs/tui/src/status/format.rs
use ratatui::prelude::*; use ratatui::style::Stylize; use std::collections::BTreeSet; use unicode_width::UnicodeWidthChar; use unicode_width::UnicodeWidthStr; #[derive(Debug, Clone)] pub(crate) struct FieldFormatter { indent: &'static str, label_width: usize, value_offset: usize, value_indent: String, } impl FieldFormatter { pub(crate) const INDENT: &'static str = " "; pub(crate) fn from_labels<S>(labels: impl IntoIterator<Item = S>) -> Self where S: AsRef<str>, { let label_width = labels .into_iter() .map(|label| UnicodeWidthStr::width(label.as_ref())) .max() .unwrap_or(0); let indent_width = UnicodeWidthStr::width(Self::INDENT); let value_offset = indent_width + label_width + 1 + 3; Self { indent: Self::INDENT, label_width, value_offset, value_indent: " ".repeat(value_offset), } } pub(crate) fn line( &self, label: &'static str, value_spans: Vec<Span<'static>>, ) -> Line<'static> { Line::from(self.full_spans(label, value_spans)) } pub(crate) fn continuation(&self, mut spans: Vec<Span<'static>>) -> Line<'static> { let mut all_spans = Vec::with_capacity(spans.len() + 1); all_spans.push(Span::from(self.value_indent.clone()).dim()); all_spans.append(&mut spans); Line::from(all_spans) } pub(crate) fn value_width(&self, available_inner_width: usize) -> usize { available_inner_width.saturating_sub(self.value_offset) } pub(crate) fn full_spans( &self, label: &str, mut value_spans: Vec<Span<'static>>, ) -> Vec<Span<'static>> { let mut spans = Vec::with_capacity(value_spans.len() + 1); spans.push(self.label_span(label)); spans.append(&mut value_spans); spans } fn label_span(&self, label: &str) -> Span<'static> { let mut buf = String::with_capacity(self.value_offset); buf.push_str(self.indent); buf.push_str(label); buf.push(':'); let label_width = UnicodeWidthStr::width(label); let padding = 3 + self.label_width.saturating_sub(label_width); for _ in 0..padding { buf.push(' '); } Span::from(buf).dim() } } pub(crate) fn push_label(labels: &mut Vec<String>, seen: &mut BTreeSet<String>, label: &str) { if seen.contains(label) { return; } let owned = label.to_string(); seen.insert(owned.clone()); labels.push(owned); } pub(crate) fn line_display_width(line: &Line<'static>) -> usize { line.iter() .map(|span| UnicodeWidthStr::width(span.content.as_ref())) .sum() } pub(crate) fn truncate_line_to_width(line: Line<'static>, max_width: usize) -> Line<'static> { if max_width == 0 { return Line::from(Vec::<Span<'static>>::new()); } let mut used = 0usize; let mut spans_out: Vec<Span<'static>> = Vec::new(); for span in line.spans { let text = span.content.into_owned(); let style = span.style; let span_width = UnicodeWidthStr::width(text.as_str()); if span_width == 0 { spans_out.push(Span::styled(text, style)); continue; } if used >= max_width { break; } if used + span_width <= max_width { used += span_width; spans_out.push(Span::styled(text, style)); continue; } let mut truncated = String::new(); for ch in text.chars() { let ch_width = UnicodeWidthChar::width(ch).unwrap_or(0); if used + ch_width > max_width { break; } truncated.push(ch); used += ch_width; } if !truncated.is_empty() { spans_out.push(Span::styled(truncated, style)); } break; } Line::from(spans_out) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/card.rs
codex-rs/tui/src/status/card.rs
use crate::history_cell::CompositeHistoryCell; use crate::history_cell::HistoryCell; use crate::history_cell::PlainHistoryCell; use crate::history_cell::with_border_with_inner_width; use crate::version::CODEX_CLI_VERSION; use chrono::DateTime; use chrono::Local; use codex_common::create_config_summary_entries; use codex_core::config::Config; use codex_core::protocol::NetworkAccess; use codex_core::protocol::SandboxPolicy; use codex_core::protocol::TokenUsage; use codex_core::protocol::TokenUsageInfo; use codex_protocol::ConversationId; use codex_protocol::account::PlanType; use ratatui::prelude::*; use ratatui::style::Stylize; use std::collections::BTreeSet; use std::path::PathBuf; use super::account::StatusAccountDisplay; use super::format::FieldFormatter; use super::format::line_display_width; use super::format::push_label; use super::format::truncate_line_to_width; use super::helpers::compose_account_display; use super::helpers::compose_agents_summary; use super::helpers::compose_model_display; use super::helpers::format_directory_display; use super::helpers::format_tokens_compact; use super::rate_limits::RateLimitSnapshotDisplay; use super::rate_limits::StatusRateLimitData; use super::rate_limits::StatusRateLimitRow; use super::rate_limits::StatusRateLimitValue; use super::rate_limits::compose_rate_limit_data; use super::rate_limits::format_status_limit_summary; use super::rate_limits::render_status_limit_progress_bar; use crate::wrapping::RtOptions; use crate::wrapping::word_wrap_lines; use codex_core::AuthManager; #[derive(Debug, Clone)] struct StatusContextWindowData { percent_remaining: i64, tokens_in_context: i64, window: i64, } #[derive(Debug, Clone)] pub(crate) struct StatusTokenUsageData { total: i64, input: i64, output: i64, context_window: Option<StatusContextWindowData>, } #[derive(Debug)] struct StatusHistoryCell { model_name: String, model_details: Vec<String>, directory: PathBuf, approval: String, sandbox: String, agents_summary: String, account: Option<StatusAccountDisplay>, session_id: Option<String>, token_usage: StatusTokenUsageData, rate_limits: StatusRateLimitData, } #[allow(clippy::too_many_arguments)] pub(crate) fn new_status_output( config: &Config, auth_manager: &AuthManager, token_info: Option<&TokenUsageInfo>, total_usage: &TokenUsage, session_id: &Option<ConversationId>, rate_limits: Option<&RateLimitSnapshotDisplay>, plan_type: Option<PlanType>, now: DateTime<Local>, model_name: &str, ) -> CompositeHistoryCell { let command = PlainHistoryCell::new(vec!["/status".magenta().into()]); let card = StatusHistoryCell::new( config, auth_manager, token_info, total_usage, session_id, rate_limits, plan_type, now, model_name, ); CompositeHistoryCell::new(vec![Box::new(command), Box::new(card)]) } impl StatusHistoryCell { #[allow(clippy::too_many_arguments)] fn new( config: &Config, auth_manager: &AuthManager, token_info: Option<&TokenUsageInfo>, total_usage: &TokenUsage, session_id: &Option<ConversationId>, rate_limits: Option<&RateLimitSnapshotDisplay>, plan_type: Option<PlanType>, now: DateTime<Local>, model_name: &str, ) -> Self { let config_entries = create_config_summary_entries(config, model_name); let (model_name, model_details) = compose_model_display(model_name, &config_entries); let approval = config_entries .iter() .find(|(k, _)| *k == "approval") .map(|(_, v)| v.clone()) .unwrap_or_else(|| "<unknown>".to_string()); let sandbox = match config.sandbox_policy.get() { SandboxPolicy::DangerFullAccess => "danger-full-access".to_string(), SandboxPolicy::ReadOnly => "read-only".to_string(), SandboxPolicy::WorkspaceWrite { .. } => "workspace-write".to_string(), SandboxPolicy::ExternalSandbox { network_access } => { if matches!(network_access, NetworkAccess::Enabled) { "external-sandbox (network access enabled)".to_string() } else { "external-sandbox".to_string() } } }; let agents_summary = compose_agents_summary(config); let account = compose_account_display(auth_manager, plan_type); let session_id = session_id.as_ref().map(std::string::ToString::to_string); let default_usage = TokenUsage::default(); let (context_usage, context_window) = match token_info { Some(info) => (&info.last_token_usage, info.model_context_window), None => (&default_usage, config.model_context_window), }; let context_window = context_window.map(|window| StatusContextWindowData { percent_remaining: context_usage.percent_of_context_window_remaining(window), tokens_in_context: context_usage.tokens_in_context_window(), window, }); let token_usage = StatusTokenUsageData { total: total_usage.blended_total(), input: total_usage.non_cached_input(), output: total_usage.output_tokens, context_window, }; let rate_limits = compose_rate_limit_data(rate_limits, now); Self { model_name, model_details, directory: config.cwd.clone(), approval, sandbox, agents_summary, account, session_id, token_usage, rate_limits, } } fn token_usage_spans(&self) -> Vec<Span<'static>> { let total_fmt = format_tokens_compact(self.token_usage.total); let input_fmt = format_tokens_compact(self.token_usage.input); let output_fmt = format_tokens_compact(self.token_usage.output); vec![ Span::from(total_fmt), Span::from(" total "), Span::from(" (").dim(), Span::from(input_fmt).dim(), Span::from(" input").dim(), Span::from(" + ").dim(), Span::from(output_fmt).dim(), Span::from(" output").dim(), Span::from(")").dim(), ] } fn context_window_spans(&self) -> Option<Vec<Span<'static>>> { let context = self.token_usage.context_window.as_ref()?; let percent = context.percent_remaining; let used_fmt = format_tokens_compact(context.tokens_in_context); let window_fmt = format_tokens_compact(context.window); Some(vec![ Span::from(format!("{percent}% left")), Span::from(" (").dim(), Span::from(used_fmt).dim(), Span::from(" used / ").dim(), Span::from(window_fmt).dim(), Span::from(")").dim(), ]) } fn rate_limit_lines( &self, available_inner_width: usize, formatter: &FieldFormatter, ) -> Vec<Line<'static>> { match &self.rate_limits { StatusRateLimitData::Available(rows_data) => { if rows_data.is_empty() { return vec![ formatter.line("Limits", vec![Span::from("data not available yet").dim()]), ]; } self.rate_limit_row_lines(rows_data, available_inner_width, formatter) } StatusRateLimitData::Stale(rows_data) => { let mut lines = self.rate_limit_row_lines(rows_data, available_inner_width, formatter); lines.push(formatter.line( "Warning", vec![Span::from("limits may be stale - start new turn to refresh.").dim()], )); lines } StatusRateLimitData::Missing => { vec![formatter.line("Limits", vec![Span::from("data not available yet").dim()])] } } } fn rate_limit_row_lines( &self, rows: &[StatusRateLimitRow], available_inner_width: usize, formatter: &FieldFormatter, ) -> Vec<Line<'static>> { let mut lines = Vec::with_capacity(rows.len().saturating_mul(2)); for row in rows { match &row.value { StatusRateLimitValue::Window { percent_used, resets_at, } => { let percent_remaining = (100.0 - percent_used).clamp(0.0, 100.0); let value_spans = vec![ Span::from(render_status_limit_progress_bar(percent_remaining)), Span::from(" "), Span::from(format_status_limit_summary(percent_remaining)), ]; let base_spans = formatter.full_spans(row.label.as_str(), value_spans); let base_line = Line::from(base_spans.clone()); if let Some(resets_at) = resets_at.as_ref() { let resets_span = Span::from(format!("(resets {resets_at})")).dim(); let mut inline_spans = base_spans.clone(); inline_spans.push(Span::from(" ").dim()); inline_spans.push(resets_span.clone()); if line_display_width(&Line::from(inline_spans.clone())) <= available_inner_width { lines.push(Line::from(inline_spans)); } else { lines.push(base_line); lines.push(formatter.continuation(vec![resets_span])); } } else { lines.push(base_line); } } StatusRateLimitValue::Text(text) => { let label = row.label.clone(); let spans = formatter.full_spans(label.as_str(), vec![Span::from(text.clone())]); lines.push(Line::from(spans)); } } } lines } fn collect_rate_limit_labels(&self, seen: &mut BTreeSet<String>, labels: &mut Vec<String>) { match &self.rate_limits { StatusRateLimitData::Available(rows) => { if rows.is_empty() { push_label(labels, seen, "Limits"); } else { for row in rows { push_label(labels, seen, row.label.as_str()); } } } StatusRateLimitData::Stale(rows) => { for row in rows { push_label(labels, seen, row.label.as_str()); } push_label(labels, seen, "Warning"); } StatusRateLimitData::Missing => push_label(labels, seen, "Limits"), } } } impl HistoryCell for StatusHistoryCell { fn display_lines(&self, width: u16) -> Vec<Line<'static>> { let mut lines: Vec<Line<'static>> = Vec::new(); lines.push(Line::from(vec![ Span::from(format!("{}>_ ", FieldFormatter::INDENT)).dim(), Span::from("OpenAI Codex").bold(), Span::from(" ").dim(), Span::from(format!("(v{CODEX_CLI_VERSION})")).dim(), ])); lines.push(Line::from(Vec::<Span<'static>>::new())); let available_inner_width = usize::from(width.saturating_sub(4)); if available_inner_width == 0 { return Vec::new(); } let account_value = self.account.as_ref().map(|account| match account { StatusAccountDisplay::ChatGpt { email, plan } => match (email, plan) { (Some(email), Some(plan)) => format!("{email} ({plan})"), (Some(email), None) => email.clone(), (None, Some(plan)) => plan.clone(), (None, None) => "ChatGPT".to_string(), }, StatusAccountDisplay::ApiKey => { "API key configured (run codex login to use ChatGPT)".to_string() } }); let mut labels: Vec<String> = vec!["Model", "Directory", "Approval", "Sandbox", "Agents.md"] .into_iter() .map(str::to_string) .collect(); let mut seen: BTreeSet<String> = labels.iter().cloned().collect(); if account_value.is_some() { push_label(&mut labels, &mut seen, "Account"); } if self.session_id.is_some() { push_label(&mut labels, &mut seen, "Session"); } push_label(&mut labels, &mut seen, "Token usage"); if self.token_usage.context_window.is_some() { push_label(&mut labels, &mut seen, "Context window"); } self.collect_rate_limit_labels(&mut seen, &mut labels); let formatter = FieldFormatter::from_labels(labels.iter().map(String::as_str)); let value_width = formatter.value_width(available_inner_width); let note_first_line = Line::from(vec![ Span::from("Visit ").cyan(), "https://chatgpt.com/codex/settings/usage" .cyan() .underlined(), Span::from(" for up-to-date").cyan(), ]); let note_second_line = Line::from(vec![ Span::from("information on rate limits and credits").cyan(), ]); let note_lines = word_wrap_lines( [note_first_line, note_second_line], RtOptions::new(available_inner_width), ); lines.extend(note_lines); lines.push(Line::from(Vec::<Span<'static>>::new())); let mut model_spans = vec![Span::from(self.model_name.clone())]; if !self.model_details.is_empty() { model_spans.push(Span::from(" (").dim()); model_spans.push(Span::from(self.model_details.join(", ")).dim()); model_spans.push(Span::from(")").dim()); } let directory_value = format_directory_display(&self.directory, Some(value_width)); lines.push(formatter.line("Model", model_spans)); lines.push(formatter.line("Directory", vec![Span::from(directory_value)])); lines.push(formatter.line("Approval", vec![Span::from(self.approval.clone())])); lines.push(formatter.line("Sandbox", vec![Span::from(self.sandbox.clone())])); lines.push(formatter.line("Agents.md", vec![Span::from(self.agents_summary.clone())])); if let Some(account_value) = account_value { lines.push(formatter.line("Account", vec![Span::from(account_value)])); } if let Some(session) = self.session_id.as_ref() { lines.push(formatter.line("Session", vec![Span::from(session.clone())])); } lines.push(Line::from(Vec::<Span<'static>>::new())); // Hide token usage only for ChatGPT subscribers if !matches!(self.account, Some(StatusAccountDisplay::ChatGpt { .. })) { lines.push(formatter.line("Token usage", self.token_usage_spans())); } if let Some(spans) = self.context_window_spans() { lines.push(formatter.line("Context window", spans)); } lines.extend(self.rate_limit_lines(available_inner_width, &formatter)); let content_width = lines.iter().map(line_display_width).max().unwrap_or(0); let inner_width = content_width.min(available_inner_width); let truncated_lines: Vec<Line<'static>> = lines .into_iter() .map(|line| truncate_line_to_width(line, inner_width)) .collect(); with_border_with_inner_width(truncated_lines, inner_width) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/rate_limits.rs
codex-rs/tui/src/status/rate_limits.rs
use crate::chatwidget::get_limits_duration; use crate::text_formatting::capitalize_first; use super::helpers::format_reset_timestamp; use chrono::DateTime; use chrono::Duration as ChronoDuration; use chrono::Local; use chrono::Utc; use codex_core::protocol::CreditsSnapshot as CoreCreditsSnapshot; use codex_core::protocol::RateLimitSnapshot; use codex_core::protocol::RateLimitWindow; const STATUS_LIMIT_BAR_SEGMENTS: usize = 20; const STATUS_LIMIT_BAR_FILLED: &str = "█"; const STATUS_LIMIT_BAR_EMPTY: &str = "░"; #[derive(Debug, Clone)] pub(crate) struct StatusRateLimitRow { pub label: String, pub value: StatusRateLimitValue, } #[derive(Debug, Clone)] pub(crate) enum StatusRateLimitValue { Window { percent_used: f64, resets_at: Option<String>, }, Text(String), } #[derive(Debug, Clone)] pub(crate) enum StatusRateLimitData { Available(Vec<StatusRateLimitRow>), Stale(Vec<StatusRateLimitRow>), Missing, } pub(crate) const RATE_LIMIT_STALE_THRESHOLD_MINUTES: i64 = 15; #[derive(Debug, Clone)] pub(crate) struct RateLimitWindowDisplay { pub used_percent: f64, pub resets_at: Option<String>, pub window_minutes: Option<i64>, } impl RateLimitWindowDisplay { fn from_window(window: &RateLimitWindow, captured_at: DateTime<Local>) -> Self { let resets_at = window .resets_at .and_then(|seconds| DateTime::<Utc>::from_timestamp(seconds, 0)) .map(|dt| dt.with_timezone(&Local)) .map(|dt| format_reset_timestamp(dt, captured_at)); Self { used_percent: window.used_percent, resets_at, window_minutes: window.window_minutes, } } } #[derive(Debug, Clone)] pub(crate) struct RateLimitSnapshotDisplay { pub captured_at: DateTime<Local>, pub primary: Option<RateLimitWindowDisplay>, pub secondary: Option<RateLimitWindowDisplay>, pub credits: Option<CreditsSnapshotDisplay>, } #[derive(Debug, Clone)] pub(crate) struct CreditsSnapshotDisplay { pub has_credits: bool, pub unlimited: bool, pub balance: Option<String>, } pub(crate) fn rate_limit_snapshot_display( snapshot: &RateLimitSnapshot, captured_at: DateTime<Local>, ) -> RateLimitSnapshotDisplay { RateLimitSnapshotDisplay { captured_at, primary: snapshot .primary .as_ref() .map(|window| RateLimitWindowDisplay::from_window(window, captured_at)), secondary: snapshot .secondary .as_ref() .map(|window| RateLimitWindowDisplay::from_window(window, captured_at)), credits: snapshot.credits.as_ref().map(CreditsSnapshotDisplay::from), } } impl From<&CoreCreditsSnapshot> for CreditsSnapshotDisplay { fn from(value: &CoreCreditsSnapshot) -> Self { Self { has_credits: value.has_credits, unlimited: value.unlimited, balance: value.balance.clone(), } } } pub(crate) fn compose_rate_limit_data( snapshot: Option<&RateLimitSnapshotDisplay>, now: DateTime<Local>, ) -> StatusRateLimitData { match snapshot { Some(snapshot) => { let mut rows = Vec::with_capacity(3); if let Some(primary) = snapshot.primary.as_ref() { let label: String = primary .window_minutes .map(get_limits_duration) .unwrap_or_else(|| "5h".to_string()); let label = capitalize_first(&label); rows.push(StatusRateLimitRow { label: format!("{label} limit"), value: StatusRateLimitValue::Window { percent_used: primary.used_percent, resets_at: primary.resets_at.clone(), }, }); } if let Some(secondary) = snapshot.secondary.as_ref() { let label: String = secondary .window_minutes .map(get_limits_duration) .unwrap_or_else(|| "weekly".to_string()); let label = capitalize_first(&label); rows.push(StatusRateLimitRow { label: format!("{label} limit"), value: StatusRateLimitValue::Window { percent_used: secondary.used_percent, resets_at: secondary.resets_at.clone(), }, }); } if let Some(credits) = snapshot.credits.as_ref() && let Some(row) = credit_status_row(credits) { rows.push(row); } let is_stale = now.signed_duration_since(snapshot.captured_at) > ChronoDuration::minutes(RATE_LIMIT_STALE_THRESHOLD_MINUTES); if rows.is_empty() { StatusRateLimitData::Available(vec![]) } else if is_stale { StatusRateLimitData::Stale(rows) } else { StatusRateLimitData::Available(rows) } } None => StatusRateLimitData::Missing, } } pub(crate) fn render_status_limit_progress_bar(percent_remaining: f64) -> String { let ratio = (percent_remaining / 100.0).clamp(0.0, 1.0); let filled = (ratio * STATUS_LIMIT_BAR_SEGMENTS as f64).round() as usize; let filled = filled.min(STATUS_LIMIT_BAR_SEGMENTS); let empty = STATUS_LIMIT_BAR_SEGMENTS.saturating_sub(filled); format!( "[{}{}]", STATUS_LIMIT_BAR_FILLED.repeat(filled), STATUS_LIMIT_BAR_EMPTY.repeat(empty) ) } pub(crate) fn format_status_limit_summary(percent_remaining: f64) -> String { format!("{percent_remaining:.0}% left") } /// Builds a single `StatusRateLimitRow` for credits when the snapshot indicates /// that the account has credit tracking enabled. When credits are unlimited we /// show that fact explicitly; otherwise we render the rounded balance in /// credits. Accounts with credits = 0 skip this section entirely. fn credit_status_row(credits: &CreditsSnapshotDisplay) -> Option<StatusRateLimitRow> { if !credits.has_credits { return None; } if credits.unlimited { return Some(StatusRateLimitRow { label: "Credits".to_string(), value: StatusRateLimitValue::Text("Unlimited".to_string()), }); } let balance = credits.balance.as_ref()?; let display_balance = format_credit_balance(balance)?; Some(StatusRateLimitRow { label: "Credits".to_string(), value: StatusRateLimitValue::Text(format!("{display_balance} credits")), }) } fn format_credit_balance(raw: &str) -> Option<String> { let trimmed = raw.trim(); if trimmed.is_empty() { return None; } if let Ok(int_value) = trimmed.parse::<i64>() && int_value > 0 { return Some(int_value.to_string()); } if let Ok(value) = trimmed.parse::<f64>() && value > 0.0 { let rounded = value.round() as i64; return Some(rounded.to_string()); } None }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/src/status/account.rs
codex-rs/tui/src/status/account.rs
#[derive(Debug, Clone)] pub(crate) enum StatusAccountDisplay { ChatGpt { email: Option<String>, plan: Option<String>, }, ApiKey, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/test_backend.rs
codex-rs/tui/tests/test_backend.rs
#[path = "../src/test_backend.rs"] mod inner; pub use inner::VT100Backend;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/all.rs
codex-rs/tui/tests/all.rs
// Single integration test binary that aggregates all test modules. // The submodules live in `tests/suite/`. #[cfg(feature = "vt100-tests")] mod test_backend; mod suite;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/suite/vt100_live_commit.rs
codex-rs/tui/tests/suite/vt100_live_commit.rs
#![cfg(feature = "vt100-tests")] use crate::test_backend::VT100Backend; use ratatui::layout::Rect; use ratatui::text::Line; #[test] fn live_001_commit_on_overflow() { let backend = VT100Backend::new(20, 6); let mut term = match codex_tui::custom_terminal::Terminal::with_options(backend) { Ok(t) => t, Err(e) => panic!("failed to construct terminal: {e}"), }; let area = Rect::new(0, 5, 20, 1); term.set_viewport_area(area); // Build 5 explicit rows at width 20. let mut rb = codex_tui::live_wrap::RowBuilder::new(20); rb.push_fragment("one\n"); rb.push_fragment("two\n"); rb.push_fragment("three\n"); rb.push_fragment("four\n"); rb.push_fragment("five\n"); // Keep the last 3 in the live ring; commit the first 2. let commit_rows = rb.drain_commit_ready(3); let lines: Vec<Line<'static>> = commit_rows.into_iter().map(|r| r.text.into()).collect(); codex_tui::insert_history::insert_history_lines(&mut term, lines) .expect("Failed to insert history lines in test"); let screen = term.backend().vt100().screen(); // The words "one" and "two" should appear above the viewport. let joined = screen.contents(); assert!( joined.contains("one"), "expected committed 'one' to be visible\n{joined}" ); assert!( joined.contains("two"), "expected committed 'two' to be visible\n{joined}" ); // The last three (three,four,five) remain in the live ring, not committed here. }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/suite/mod.rs
codex-rs/tui/tests/suite/mod.rs
// Aggregates all former standalone integration tests as modules. mod status_indicator; mod vt100_history; mod vt100_live_commit;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/suite/vt100_history.rs
codex-rs/tui/tests/suite/vt100_history.rs
#![cfg(feature = "vt100-tests")] #![expect(clippy::expect_used)] use crate::test_backend::VT100Backend; use ratatui::layout::Rect; use ratatui::style::Stylize; use ratatui::text::Line; // Small helper macro to assert a collection contains an item with a clearer // failure message. macro_rules! assert_contains { ($collection:expr, $item:expr $(,)?) => { assert!( $collection.contains(&$item), "Expected {:?} to contain {:?}", $collection, $item ); }; ($collection:expr, $item:expr, $($arg:tt)+) => { assert!($collection.contains(&$item), $($arg)+); }; } struct TestScenario { term: codex_tui::custom_terminal::Terminal<VT100Backend>, } impl TestScenario { fn new(width: u16, height: u16, viewport: Rect) -> Self { let backend = VT100Backend::new(width, height); let mut term = codex_tui::custom_terminal::Terminal::with_options(backend) .expect("failed to construct terminal"); term.set_viewport_area(viewport); Self { term } } fn run_insert(&mut self, lines: Vec<Line<'static>>) { codex_tui::insert_history::insert_history_lines(&mut self.term, lines) .expect("Failed to insert history lines in test"); } } #[test] fn basic_insertion_no_wrap() { // Screen of 20x6; viewport is the last row (height=1 at y=5) let area = Rect::new(0, 5, 20, 1); let mut scenario = TestScenario::new(20, 6, area); let lines = vec!["first".into(), "second".into()]; scenario.run_insert(lines); let rows = scenario.term.backend().vt100().screen().contents(); assert_contains!(rows, String::from("first")); assert_contains!(rows, String::from("second")); } #[test] fn long_token_wraps() { let area = Rect::new(0, 5, 20, 1); let mut scenario = TestScenario::new(20, 6, area); let long = "A".repeat(45); // > 2 lines at width 20 let lines = vec![long.clone().into()]; scenario.run_insert(lines); let screen = scenario.term.backend().vt100().screen(); // Count total A's on the screen let mut count_a = 0usize; for row in 0..6 { for col in 0..20 { if let Some(cell) = screen.cell(row, col) && let Some(ch) = cell.contents().chars().next() && ch == 'A' { count_a += 1; } } } assert_eq!( count_a, long.len(), "wrapped content did not preserve all characters" ); } #[test] fn emoji_and_cjk() { let area = Rect::new(0, 5, 20, 1); let mut scenario = TestScenario::new(20, 6, area); let text = String::from("😀😀😀😀😀 你好世界"); let lines = vec![text.clone().into()]; scenario.run_insert(lines); let rows = scenario.term.backend().vt100().screen().contents(); for ch in text.chars().filter(|c| !c.is_whitespace()) { assert!( rows.contains(ch), "missing character {ch:?} in reconstructed screen" ); } } #[test] fn mixed_ansi_spans() { let area = Rect::new(0, 5, 20, 1); let mut scenario = TestScenario::new(20, 6, area); let line = vec!["red".red(), "+plain".into()].into(); scenario.run_insert(vec![line]); let rows = scenario.term.backend().vt100().screen().contents(); assert_contains!(rows, String::from("red+plain")); } #[test] fn cursor_restoration() { let area = Rect::new(0, 5, 20, 1); let mut scenario = TestScenario::new(20, 6, area); let lines = vec!["x".into()]; scenario.run_insert(lines); assert_eq!(scenario.term.last_known_cursor_pos, (0, 0).into()); } #[test] fn word_wrap_no_mid_word_split() { // Screen of 40x10; viewport is the last row let area = Rect::new(0, 9, 40, 1); let mut scenario = TestScenario::new(40, 10, area); let sample = "Years passed, and Willowmere thrived in peace and friendship. Mira’s herb garden flourished with both ordinary and enchanted plants, and travelers spoke of the kindness of the woman who tended them."; scenario.run_insert(vec![sample.into()]); let joined = scenario.term.backend().vt100().screen().contents(); assert!( !joined.contains("bo\nth"), "word 'both' should not be split across lines:\n{joined}" ); } #[test] fn em_dash_and_space_word_wrap() { // Repro from report: ensure we break before "inside", not mid-word. let area = Rect::new(0, 9, 40, 1); let mut scenario = TestScenario::new(40, 10, area); let sample = "Mara found an old key on the shore. Curious, she opened a tarnished box half-buried in sand—and inside lay a single, glowing seed."; scenario.run_insert(vec![sample.into()]); let joined = scenario.term.backend().vt100().screen().contents(); assert!( !joined.contains("insi\nde"), "word 'inside' should not be split across lines:\n{joined}" ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/tui/tests/suite/status_indicator.rs
codex-rs/tui/tests/suite/status_indicator.rs
//! Regression test: ensure that `StatusIndicatorWidget` sanitises ANSI escape //! sequences so that no raw `\x1b` bytes are written into the backing //! buffer. Rendering logic is tricky to unit‑test end‑to‑end, therefore we //! verify the *public* contract of `ansi_escape_line()` which the widget now //! relies on. use codex_ansi_escape::ansi_escape_line; #[test] fn ansi_escape_line_strips_escape_sequences() { let text_in_ansi_red = "\x1b[31mRED\x1b[0m"; // The returned line must contain three printable glyphs and **no** raw // escape bytes. let line = ansi_escape_line(text_in_ansi_red); let combined: String = line .spans .iter() .map(|span| span.content.to_string()) .collect(); assert_eq!(combined, "RED"); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/lib.rs
codex-rs/codex-client/src/lib.rs
mod default_client; mod error; mod request; mod retry; mod sse; mod telemetry; mod transport; pub use crate::default_client::CodexHttpClient; pub use crate::default_client::CodexRequestBuilder; pub use crate::error::StreamError; pub use crate::error::TransportError; pub use crate::request::Request; pub use crate::request::Response; pub use crate::retry::RetryOn; pub use crate::retry::RetryPolicy; pub use crate::retry::backoff; pub use crate::retry::run_with_retry; pub use crate::sse::sse_stream; pub use crate::telemetry::RequestTelemetry; pub use crate::transport::ByteStream; pub use crate::transport::HttpTransport; pub use crate::transport::ReqwestTransport; pub use crate::transport::StreamResponse;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/telemetry.rs
codex-rs/codex-client/src/telemetry.rs
use crate::error::TransportError; use http::StatusCode; use std::time::Duration; /// API specific telemetry. pub trait RequestTelemetry: Send + Sync { fn on_request( &self, attempt: u64, status: Option<StatusCode>, error: Option<&TransportError>, duration: Duration, ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/default_client.rs
codex-rs/codex-client/src/default_client.rs
use http::Error as HttpError; use opentelemetry::global; use opentelemetry::propagation::Injector; use reqwest::IntoUrl; use reqwest::Method; use reqwest::Response; use reqwest::header::HeaderMap; use reqwest::header::HeaderName; use reqwest::header::HeaderValue; use serde::Serialize; use std::collections::HashMap; use std::fmt::Display; use std::time::Duration; use tracing::Span; use tracing_opentelemetry::OpenTelemetrySpanExt; #[derive(Clone, Debug)] pub struct CodexHttpClient { inner: reqwest::Client, } impl CodexHttpClient { pub fn new(inner: reqwest::Client) -> Self { Self { inner } } pub fn get<U>(&self, url: U) -> CodexRequestBuilder where U: IntoUrl, { self.request(Method::GET, url) } pub fn post<U>(&self, url: U) -> CodexRequestBuilder where U: IntoUrl, { self.request(Method::POST, url) } pub fn request<U>(&self, method: Method, url: U) -> CodexRequestBuilder where U: IntoUrl, { let url_str = url.as_str().to_string(); CodexRequestBuilder::new(self.inner.request(method.clone(), url), method, url_str) } } #[must_use = "requests are not sent unless `send` is awaited"] #[derive(Debug)] pub struct CodexRequestBuilder { builder: reqwest::RequestBuilder, method: Method, url: String, } impl CodexRequestBuilder { fn new(builder: reqwest::RequestBuilder, method: Method, url: String) -> Self { Self { builder, method, url, } } fn map(self, f: impl FnOnce(reqwest::RequestBuilder) -> reqwest::RequestBuilder) -> Self { Self { builder: f(self.builder), method: self.method, url: self.url, } } pub fn headers(self, headers: HeaderMap) -> Self { self.map(|builder| builder.headers(headers)) } pub fn header<K, V>(self, key: K, value: V) -> Self where HeaderName: TryFrom<K>, <HeaderName as TryFrom<K>>::Error: Into<HttpError>, HeaderValue: TryFrom<V>, <HeaderValue as TryFrom<V>>::Error: Into<HttpError>, { self.map(|builder| builder.header(key, value)) } pub fn bearer_auth<T>(self, token: T) -> Self where T: Display, { self.map(|builder| builder.bearer_auth(token)) } pub fn timeout(self, timeout: Duration) -> Self { self.map(|builder| builder.timeout(timeout)) } pub fn json<T>(self, value: &T) -> Self where T: ?Sized + Serialize, { self.map(|builder| builder.json(value)) } pub async fn send(self) -> Result<Response, reqwest::Error> { let headers = trace_headers(); match self.builder.headers(headers).send().await { Ok(response) => { let request_ids = Self::extract_request_ids(&response); tracing::debug!( method = %self.method, url = %self.url, status = %response.status(), request_ids = ?request_ids, version = ?response.version(), "Request completed" ); Ok(response) } Err(error) => { let status = error.status(); tracing::debug!( method = %self.method, url = %self.url, status = status.map(|s| s.as_u16()), error = %error, "Request failed" ); Err(error) } } } fn extract_request_ids(response: &Response) -> HashMap<String, String> { ["cf-ray", "x-request-id", "x-oai-request-id"] .iter() .filter_map(|&name| { let header_name = HeaderName::from_static(name); let value = response.headers().get(header_name)?; let value = value.to_str().ok()?.to_owned(); Some((name.to_owned(), value)) }) .collect() } } struct HeaderMapInjector<'a>(&'a mut HeaderMap); impl<'a> Injector for HeaderMapInjector<'a> { fn set(&mut self, key: &str, value: String) { if let (Ok(name), Ok(val)) = ( HeaderName::from_bytes(key.as_bytes()), HeaderValue::from_str(&value), ) { self.0.insert(name, val); } } } fn trace_headers() -> HeaderMap { let mut headers = HeaderMap::new(); global::get_text_map_propagator(|prop| { prop.inject_context( &Span::current().context(), &mut HeaderMapInjector(&mut headers), ); }); headers } #[cfg(test)] mod tests { use super::*; use opentelemetry::propagation::Extractor; use opentelemetry::propagation::TextMapPropagator; use opentelemetry::trace::TraceContextExt; use opentelemetry::trace::TracerProvider; use opentelemetry_sdk::propagation::TraceContextPropagator; use opentelemetry_sdk::trace::SdkTracerProvider; use tracing::trace_span; use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; #[test] fn inject_trace_headers_uses_current_span_context() { global::set_text_map_propagator(TraceContextPropagator::new()); let provider = SdkTracerProvider::builder().build(); let tracer = provider.tracer("test-tracer"); let subscriber = tracing_subscriber::registry().with(tracing_opentelemetry::layer().with_tracer(tracer)); let _guard = subscriber.set_default(); let span = trace_span!("client_request"); let _entered = span.enter(); let span_context = span.context().span().span_context().clone(); let headers = trace_headers(); let extractor = HeaderMapExtractor(&headers); let extracted = TraceContextPropagator::new().extract(&extractor); let extracted_span = extracted.span(); let extracted_context = extracted_span.span_context(); assert!(extracted_context.is_valid()); assert_eq!(extracted_context.trace_id(), span_context.trace_id()); assert_eq!(extracted_context.span_id(), span_context.span_id()); } struct HeaderMapExtractor<'a>(&'a HeaderMap); impl<'a> Extractor for HeaderMapExtractor<'a> { fn get(&self, key: &str) -> Option<&str> { self.0.get(key).and_then(|value| value.to_str().ok()) } fn keys(&self) -> Vec<&str> { self.0.keys().map(HeaderName::as_str).collect() } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/error.rs
codex-rs/codex-client/src/error.rs
use http::HeaderMap; use http::StatusCode; use thiserror::Error; #[derive(Debug, Error)] pub enum TransportError { #[error("http {status}: {body:?}")] Http { status: StatusCode, headers: Option<HeaderMap>, body: Option<String>, }, #[error("retry limit reached")] RetryLimit, #[error("timeout")] Timeout, #[error("network error: {0}")] Network(String), #[error("request build error: {0}")] Build(String), } #[derive(Debug, Error)] pub enum StreamError { #[error("stream failed: {0}")] Stream(String), #[error("timeout")] Timeout, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/transport.rs
codex-rs/codex-client/src/transport.rs
use crate::default_client::CodexHttpClient; use crate::default_client::CodexRequestBuilder; use crate::error::TransportError; use crate::request::Request; use crate::request::Response; use async_trait::async_trait; use bytes::Bytes; use futures::StreamExt; use futures::stream::BoxStream; use http::HeaderMap; use http::Method; use http::StatusCode; use tracing::Level; use tracing::enabled; use tracing::trace; pub type ByteStream = BoxStream<'static, Result<Bytes, TransportError>>; pub struct StreamResponse { pub status: StatusCode, pub headers: HeaderMap, pub bytes: ByteStream, } #[async_trait] pub trait HttpTransport: Send + Sync { async fn execute(&self, req: Request) -> Result<Response, TransportError>; async fn stream(&self, req: Request) -> Result<StreamResponse, TransportError>; } #[derive(Clone, Debug)] pub struct ReqwestTransport { client: CodexHttpClient, } impl ReqwestTransport { pub fn new(client: reqwest::Client) -> Self { Self { client: CodexHttpClient::new(client), } } fn build(&self, req: Request) -> Result<CodexRequestBuilder, TransportError> { let mut builder = self .client .request( Method::from_bytes(req.method.as_str().as_bytes()).unwrap_or(Method::GET), &req.url, ) .headers(req.headers); if let Some(timeout) = req.timeout { builder = builder.timeout(timeout); } if let Some(body) = req.body { builder = builder.json(&body); } Ok(builder) } fn map_error(err: reqwest::Error) -> TransportError { if err.is_timeout() { TransportError::Timeout } else { TransportError::Network(err.to_string()) } } } #[async_trait] impl HttpTransport for ReqwestTransport { async fn execute(&self, req: Request) -> Result<Response, TransportError> { if enabled!(Level::TRACE) { trace!( "{} to {}: {}", req.method, req.url, req.body.as_ref().unwrap_or_default() ); } let builder = self.build(req)?; let resp = builder.send().await.map_err(Self::map_error)?; let status = resp.status(); let headers = resp.headers().clone(); let bytes = resp.bytes().await.map_err(Self::map_error)?; if !status.is_success() { let body = String::from_utf8(bytes.to_vec()).ok(); return Err(TransportError::Http { status, headers: Some(headers), body, }); } Ok(Response { status, headers, body: bytes, }) } async fn stream(&self, req: Request) -> Result<StreamResponse, TransportError> { if enabled!(Level::TRACE) { trace!( "{} to {}: {}", req.method, req.url, req.body.as_ref().unwrap_or_default() ); } let builder = self.build(req)?; let resp = builder.send().await.map_err(Self::map_error)?; let status = resp.status(); let headers = resp.headers().clone(); if !status.is_success() { let body = resp.text().await.ok(); return Err(TransportError::Http { status, headers: Some(headers), body, }); } let stream = resp .bytes_stream() .map(|result| result.map_err(Self::map_error)); Ok(StreamResponse { status, headers, bytes: Box::pin(stream), }) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/request.rs
codex-rs/codex-client/src/request.rs
use bytes::Bytes; use http::Method; use reqwest::header::HeaderMap; use serde::Serialize; use serde_json::Value; use std::time::Duration; #[derive(Debug, Clone)] pub struct Request { pub method: Method, pub url: String, pub headers: HeaderMap, pub body: Option<Value>, pub timeout: Option<Duration>, } impl Request { pub fn new(method: Method, url: String) -> Self { Self { method, url, headers: HeaderMap::new(), body: None, timeout: None, } } pub fn with_json<T: Serialize>(mut self, body: &T) -> Self { self.body = serde_json::to_value(body).ok(); self } } #[derive(Debug, Clone)] pub struct Response { pub status: http::StatusCode, pub headers: HeaderMap, pub body: Bytes, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/retry.rs
codex-rs/codex-client/src/retry.rs
use crate::error::TransportError; use crate::request::Request; use rand::Rng; use std::future::Future; use std::time::Duration; use tokio::time::sleep; #[derive(Debug, Clone)] pub struct RetryPolicy { pub max_attempts: u64, pub base_delay: Duration, pub retry_on: RetryOn, } #[derive(Debug, Clone)] pub struct RetryOn { pub retry_429: bool, pub retry_5xx: bool, pub retry_transport: bool, } impl RetryOn { pub fn should_retry(&self, err: &TransportError, attempt: u64, max_attempts: u64) -> bool { if attempt >= max_attempts { return false; } match err { TransportError::Http { status, .. } => { (self.retry_429 && status.as_u16() == 429) || (self.retry_5xx && status.is_server_error()) } TransportError::Timeout | TransportError::Network(_) => self.retry_transport, _ => false, } } } pub fn backoff(base: Duration, attempt: u64) -> Duration { if attempt == 0 { return base; } let exp = 2u64.saturating_pow(attempt as u32 - 1); let millis = base.as_millis() as u64; let raw = millis.saturating_mul(exp); let jitter: f64 = rand::rng().random_range(0.9..1.1); Duration::from_millis((raw as f64 * jitter) as u64) } pub async fn run_with_retry<T, F, Fut>( policy: RetryPolicy, mut make_req: impl FnMut() -> Request, op: F, ) -> Result<T, TransportError> where F: Fn(Request, u64) -> Fut, Fut: Future<Output = Result<T, TransportError>>, { for attempt in 0..=policy.max_attempts { let req = make_req(); match op(req, attempt).await { Ok(resp) => return Ok(resp), Err(err) if policy .retry_on .should_retry(&err, attempt, policy.max_attempts) => { sleep(backoff(policy.base_delay, attempt + 1)).await; } Err(err) => return Err(err), } } Err(TransportError::RetryLimit) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/codex-client/src/sse.rs
codex-rs/codex-client/src/sse.rs
use crate::error::StreamError; use crate::transport::ByteStream; use eventsource_stream::Eventsource; use futures::StreamExt; use tokio::sync::mpsc; use tokio::time::Duration; use tokio::time::timeout; /// Minimal SSE helper that forwards raw `data:` frames as UTF-8 strings. /// /// Errors and idle timeouts are sent as `Err(StreamError)` before the task exits. pub fn sse_stream( stream: ByteStream, idle_timeout: Duration, tx: mpsc::Sender<Result<String, StreamError>>, ) { tokio::spawn(async move { let mut stream = stream .map(|res| res.map_err(|e| StreamError::Stream(e.to_string()))) .eventsource(); loop { match timeout(idle_timeout, stream.next()).await { Ok(Some(Ok(ev))) => { if tx.send(Ok(ev.data.clone())).await.is_err() { return; } } Ok(Some(Err(e))) => { let _ = tx.send(Err(StreamError::Stream(e.to_string()))).await; return; } Ok(None) => { let _ = tx .send(Err(StreamError::Stream( "stream closed before completion".into(), ))) .await; return; } Err(_) => { let _ = tx.send(Err(StreamError::Timeout)).await; return; } } } }); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/string/src/lib.rs
codex-rs/utils/string/src/lib.rs
// Truncate a &str to a byte budget at a char boundary (prefix) #[inline] pub fn take_bytes_at_char_boundary(s: &str, maxb: usize) -> &str { if s.len() <= maxb { return s; } let mut last_ok = 0; for (i, ch) in s.char_indices() { let nb = i + ch.len_utf8(); if nb > maxb { break; } last_ok = nb; } &s[..last_ok] } // Take a suffix of a &str within a byte budget at a char boundary #[inline] pub fn take_last_bytes_at_char_boundary(s: &str, maxb: usize) -> &str { if s.len() <= maxb { return s; } let mut start = s.len(); let mut used = 0usize; for (i, ch) in s.char_indices().rev() { let nb = ch.len_utf8(); if used + nb > maxb { break; } start = i; used += nb; if start == 0 { break; } } &s[start..] }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/image/src/lib.rs
codex-rs/utils/image/src/lib.rs
use std::num::NonZeroUsize; use std::path::Path; use std::sync::LazyLock; use crate::error::ImageProcessingError; use base64::Engine; use base64::engine::general_purpose::STANDARD as BASE64_STANDARD; use codex_utils_cache::BlockingLruCache; use codex_utils_cache::sha1_digest; use image::ColorType; use image::DynamicImage; use image::GenericImageView; use image::ImageEncoder; use image::ImageFormat; use image::codecs::jpeg::JpegEncoder; use image::codecs::png::PngEncoder; use image::imageops::FilterType; /// Maximum width used when resizing images before uploading. pub const MAX_WIDTH: u32 = 2048; /// Maximum height used when resizing images before uploading. pub const MAX_HEIGHT: u32 = 768; pub mod error; #[derive(Debug, Clone)] pub struct EncodedImage { pub bytes: Vec<u8>, pub mime: String, pub width: u32, pub height: u32, } impl EncodedImage { pub fn into_data_url(self) -> String { let encoded = BASE64_STANDARD.encode(&self.bytes); format!("data:{};base64,{}", self.mime, encoded) } } static IMAGE_CACHE: LazyLock<BlockingLruCache<[u8; 20], EncodedImage>> = LazyLock::new(|| BlockingLruCache::new(NonZeroUsize::new(32).unwrap_or(NonZeroUsize::MIN))); pub fn load_and_resize_to_fit(path: &Path) -> Result<EncodedImage, ImageProcessingError> { let path_buf = path.to_path_buf(); let file_bytes = read_file_bytes(path, &path_buf)?; let key = sha1_digest(&file_bytes); IMAGE_CACHE.get_or_try_insert_with(key, move || { let format = match image::guess_format(&file_bytes) { Ok(ImageFormat::Png) => Some(ImageFormat::Png), Ok(ImageFormat::Jpeg) => Some(ImageFormat::Jpeg), _ => None, }; let dynamic = image::load_from_memory(&file_bytes).map_err(|source| { ImageProcessingError::Decode { path: path_buf.clone(), source, } })?; let (width, height) = dynamic.dimensions(); let encoded = if width <= MAX_WIDTH && height <= MAX_HEIGHT { if let Some(format) = format { let mime = format_to_mime(format); EncodedImage { bytes: file_bytes, mime, width, height, } } else { let (bytes, output_format) = encode_image(&dynamic, ImageFormat::Png)?; let mime = format_to_mime(output_format); EncodedImage { bytes, mime, width, height, } } } else { let resized = dynamic.resize(MAX_WIDTH, MAX_HEIGHT, FilterType::Triangle); let target_format = format.unwrap_or(ImageFormat::Png); let (bytes, output_format) = encode_image(&resized, target_format)?; let mime = format_to_mime(output_format); EncodedImage { bytes, mime, width: resized.width(), height: resized.height(), } }; Ok(encoded) }) } fn read_file_bytes(path: &Path, path_for_error: &Path) -> Result<Vec<u8>, ImageProcessingError> { match tokio::runtime::Handle::try_current() { // If we're inside a Tokio runtime, avoid block_on (it panics on worker threads). // Use block_in_place and do a standard blocking read safely. Ok(_) => tokio::task::block_in_place(|| std::fs::read(path)).map_err(|source| { ImageProcessingError::Read { path: path_for_error.to_path_buf(), source, } }), // Outside a runtime, just read synchronously. Err(_) => std::fs::read(path).map_err(|source| ImageProcessingError::Read { path: path_for_error.to_path_buf(), source, }), } } fn encode_image( image: &DynamicImage, preferred_format: ImageFormat, ) -> Result<(Vec<u8>, ImageFormat), ImageProcessingError> { let target_format = match preferred_format { ImageFormat::Jpeg => ImageFormat::Jpeg, _ => ImageFormat::Png, }; let mut buffer = Vec::new(); match target_format { ImageFormat::Png => { let rgba = image.to_rgba8(); let encoder = PngEncoder::new(&mut buffer); encoder .write_image( rgba.as_raw(), image.width(), image.height(), ColorType::Rgba8.into(), ) .map_err(|source| ImageProcessingError::Encode { format: target_format, source, })?; } ImageFormat::Jpeg => { let mut encoder = JpegEncoder::new_with_quality(&mut buffer, 85); encoder .encode_image(image) .map_err(|source| ImageProcessingError::Encode { format: target_format, source, })?; } _ => unreachable!("unsupported target_format should have been handled earlier"), } Ok((buffer, target_format)) } fn format_to_mime(format: ImageFormat) -> String { match format { ImageFormat::Jpeg => "image/jpeg".to_string(), _ => "image/png".to_string(), } } #[cfg(test)] mod tests { use super::*; use image::GenericImageView; use image::ImageBuffer; use image::Rgba; use tempfile::NamedTempFile; #[tokio::test(flavor = "multi_thread")] async fn returns_original_image_when_within_bounds() { let temp_file = NamedTempFile::new().expect("temp file"); let image = ImageBuffer::from_pixel(64, 32, Rgba([10u8, 20, 30, 255])); image .save_with_format(temp_file.path(), ImageFormat::Png) .expect("write png to temp file"); let original_bytes = std::fs::read(temp_file.path()).expect("read written image"); let encoded = load_and_resize_to_fit(temp_file.path()).expect("process image"); assert_eq!(encoded.width, 64); assert_eq!(encoded.height, 32); assert_eq!(encoded.mime, "image/png"); assert_eq!(encoded.bytes, original_bytes); } #[tokio::test(flavor = "multi_thread")] async fn downscales_large_image() { let temp_file = NamedTempFile::new().expect("temp file"); let image = ImageBuffer::from_pixel(4096, 2048, Rgba([200u8, 10, 10, 255])); image .save_with_format(temp_file.path(), ImageFormat::Png) .expect("write png to temp file"); let processed = load_and_resize_to_fit(temp_file.path()).expect("process image"); assert!(processed.width <= MAX_WIDTH); assert!(processed.height <= MAX_HEIGHT); let loaded = image::load_from_memory(&processed.bytes).expect("read resized bytes back into image"); assert_eq!(loaded.dimensions(), (processed.width, processed.height)); } #[tokio::test(flavor = "multi_thread")] async fn fails_cleanly_for_invalid_images() { let temp_file = NamedTempFile::new().expect("temp file"); std::fs::write(temp_file.path(), b"not an image").expect("write bytes"); let err = load_and_resize_to_fit(temp_file.path()).expect_err("invalid image should fail"); match err { ImageProcessingError::Decode { .. } => {} _ => panic!("unexpected error variant"), } } #[tokio::test(flavor = "multi_thread")] async fn reprocesses_updated_file_contents() { { IMAGE_CACHE.clear(); } let temp_file = NamedTempFile::new().expect("temp file"); let first_image = ImageBuffer::from_pixel(32, 16, Rgba([20u8, 120, 220, 255])); first_image .save_with_format(temp_file.path(), ImageFormat::Png) .expect("write initial image"); let first = load_and_resize_to_fit(temp_file.path()).expect("process first image"); let second_image = ImageBuffer::from_pixel(96, 48, Rgba([50u8, 60, 70, 255])); second_image .save_with_format(temp_file.path(), ImageFormat::Png) .expect("write updated image"); let second = load_and_resize_to_fit(temp_file.path()).expect("process updated image"); assert_eq!(first.width, 32); assert_eq!(first.height, 16); assert_eq!(second.width, 96); assert_eq!(second.height, 48); assert_ne!(second.bytes, first.bytes); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/image/src/error.rs
codex-rs/utils/image/src/error.rs
use image::ImageError; use image::ImageFormat; use std::path::PathBuf; use thiserror::Error; #[derive(Debug, Error)] pub enum ImageProcessingError { #[error("failed to read image at {path}: {source}")] Read { path: PathBuf, #[source] source: std::io::Error, }, #[error("failed to decode image at {path}: {source}")] Decode { path: PathBuf, #[source] source: image::ImageError, }, #[error("failed to encode image as {format:?}: {source}")] Encode { format: ImageFormat, #[source] source: image::ImageError, }, } impl ImageProcessingError { pub fn is_invalid_image(&self) -> bool { matches!( self, ImageProcessingError::Decode { source: ImageError::Decoding(_), .. } ) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/cache/src/lib.rs
codex-rs/utils/cache/src/lib.rs
use std::borrow::Borrow; use std::hash::Hash; use std::num::NonZeroUsize; use lru::LruCache; use sha1::Digest; use sha1::Sha1; use tokio::sync::Mutex; use tokio::sync::MutexGuard; /// A minimal LRU cache protected by a Tokio mutex. /// Calls outside a Tokio runtime are no-ops. pub struct BlockingLruCache<K, V> { inner: Mutex<LruCache<K, V>>, } impl<K, V> BlockingLruCache<K, V> where K: Eq + Hash, { /// Creates a cache with the provided non-zero capacity. #[must_use] pub fn new(capacity: NonZeroUsize) -> Self { Self { inner: Mutex::new(LruCache::new(capacity)), } } /// Returns a clone of the cached value for `key`, or computes and inserts it. pub fn get_or_insert_with(&self, key: K, value: impl FnOnce() -> V) -> V where V: Clone, { if let Some(mut guard) = lock_if_runtime(&self.inner) { if let Some(v) = guard.get(&key) { return v.clone(); } let v = value(); // Insert and return a clone to keep ownership in the cache. guard.put(key, v.clone()); return v; } value() } /// Like `get_or_insert_with`, but the value factory may fail. pub fn get_or_try_insert_with<E>( &self, key: K, value: impl FnOnce() -> Result<V, E>, ) -> Result<V, E> where V: Clone, { if let Some(mut guard) = lock_if_runtime(&self.inner) { if let Some(v) = guard.get(&key) { return Ok(v.clone()); } let v = value()?; guard.put(key, v.clone()); return Ok(v); } value() } /// Builds a cache if `capacity` is non-zero, returning `None` otherwise. #[must_use] pub fn try_with_capacity(capacity: usize) -> Option<Self> { NonZeroUsize::new(capacity).map(Self::new) } /// Returns a clone of the cached value corresponding to `key`, if present. pub fn get<Q>(&self, key: &Q) -> Option<V> where K: Borrow<Q>, Q: Hash + Eq + ?Sized, V: Clone, { let mut guard = lock_if_runtime(&self.inner)?; guard.get(key).cloned() } /// Inserts `value` for `key`, returning the previous entry if it existed. pub fn insert(&self, key: K, value: V) -> Option<V> { let mut guard = lock_if_runtime(&self.inner)?; guard.put(key, value) } /// Removes the entry for `key` if it exists, returning it. pub fn remove<Q>(&self, key: &Q) -> Option<V> where K: Borrow<Q>, Q: Hash + Eq + ?Sized, { let mut guard = lock_if_runtime(&self.inner)?; guard.pop(key) } /// Clears all entries from the cache. pub fn clear(&self) { if let Some(mut guard) = lock_if_runtime(&self.inner) { guard.clear(); } } /// Executes `callback` with a mutable reference to the underlying cache. pub fn with_mut<R>(&self, callback: impl FnOnce(&mut LruCache<K, V>) -> R) -> R { if let Some(mut guard) = lock_if_runtime(&self.inner) { callback(&mut guard) } else { let mut disabled = LruCache::unbounded(); callback(&mut disabled) } } /// Provides direct access to the cache guard when a Tokio runtime is available. pub fn blocking_lock(&self) -> Option<MutexGuard<'_, LruCache<K, V>>> { lock_if_runtime(&self.inner) } } fn lock_if_runtime<K, V>(m: &Mutex<LruCache<K, V>>) -> Option<MutexGuard<'_, LruCache<K, V>>> where K: Eq + Hash, { tokio::runtime::Handle::try_current().ok()?; Some(tokio::task::block_in_place(|| m.blocking_lock())) } /// Computes the SHA-1 digest of `bytes`. /// /// Useful for content-based cache keys when you want to avoid staleness /// caused by path-only keys. #[must_use] pub fn sha1_digest(bytes: &[u8]) -> [u8; 20] { let mut hasher = Sha1::new(); hasher.update(bytes); let result = hasher.finalize(); let mut out = [0; 20]; out.copy_from_slice(&result); out } #[cfg(test)] mod tests { use super::BlockingLruCache; use std::num::NonZeroUsize; #[tokio::test(flavor = "multi_thread")] async fn stores_and_retrieves_values() { let cache = BlockingLruCache::new(NonZeroUsize::new(2).expect("capacity")); assert!(cache.get(&"first").is_none()); cache.insert("first", 1); assert_eq!(cache.get(&"first"), Some(1)); } #[tokio::test(flavor = "multi_thread")] async fn evicts_least_recently_used() { let cache = BlockingLruCache::new(NonZeroUsize::new(2).expect("capacity")); cache.insert("a", 1); cache.insert("b", 2); assert_eq!(cache.get(&"a"), Some(1)); cache.insert("c", 3); assert!(cache.get(&"b").is_none()); assert_eq!(cache.get(&"a"), Some(1)); assert_eq!(cache.get(&"c"), Some(3)); } #[test] fn disabled_without_runtime() { let cache = BlockingLruCache::new(NonZeroUsize::new(2).expect("capacity")); cache.insert("first", 1); assert!(cache.get(&"first").is_none()); assert_eq!(cache.get_or_insert_with("first", || 2), 2); assert!(cache.get(&"first").is_none()); assert!(cache.remove(&"first").is_none()); cache.clear(); let result = cache.with_mut(|inner| { inner.put("tmp", 3); inner.get(&"tmp").cloned() }); assert_eq!(result, Some(3)); assert!(cache.get(&"tmp").is_none()); assert!(cache.blocking_lock().is_none()); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/cargo-bin/src/lib.rs
codex-rs/utils/cargo-bin/src/lib.rs
use std::ffi::OsString; use std::path::PathBuf; #[derive(Debug, thiserror::Error)] pub enum CargoBinError { #[error("failed to read current exe")] CurrentExe { #[source] source: std::io::Error, }, #[error("failed to read current directory")] CurrentDir { #[source] source: std::io::Error, }, #[error("CARGO_BIN_EXE env var {key} resolved to {path:?}, but it does not exist")] ResolvedPathDoesNotExist { key: String, path: PathBuf }, #[error("could not locate binary {name:?}; tried env vars {env_keys:?}; {fallback}")] NotFound { name: String, env_keys: Vec<String>, fallback: String, }, } /// Returns an absolute path to a binary target built for the current test run. /// /// In `cargo test`, `CARGO_BIN_EXE_*` env vars are absolute, but Buck2 may set /// them to project-relative paths (e.g. `buck-out/...`). Those paths break if a /// test later changes its working directory. This helper makes the path /// absolute up-front so callers can safely `chdir` afterwards. pub fn cargo_bin(name: &str) -> Result<PathBuf, CargoBinError> { let env_keys = cargo_bin_env_keys(name); for key in &env_keys { if let Some(value) = std::env::var_os(key) { return resolve_bin_from_env(key, value); } } match assert_cmd::Command::cargo_bin(name) { Ok(cmd) => { let abs = absolutize_from_buck_or_cwd(PathBuf::from(cmd.get_program()))?; if abs.exists() { Ok(abs) } else { Err(CargoBinError::ResolvedPathDoesNotExist { key: "assert_cmd::Command::cargo_bin".to_owned(), path: abs, }) } } Err(err) => Err(CargoBinError::NotFound { name: name.to_owned(), env_keys, fallback: format!("assert_cmd fallback failed: {err}"), }), } } fn cargo_bin_env_keys(name: &str) -> Vec<String> { let mut keys = Vec::with_capacity(2); keys.push(format!("CARGO_BIN_EXE_{name}")); // Cargo replaces dashes in target names when exporting env vars. let underscore_name = name.replace('-', "_"); if underscore_name != name { keys.push(format!("CARGO_BIN_EXE_{underscore_name}")); } keys } fn resolve_bin_from_env(key: &str, value: OsString) -> Result<PathBuf, CargoBinError> { let abs = absolutize_from_buck_or_cwd(PathBuf::from(value))?; if abs.exists() { Ok(abs) } else { Err(CargoBinError::ResolvedPathDoesNotExist { key: key.to_owned(), path: abs, }) } } fn absolutize_from_buck_or_cwd(path: PathBuf) -> Result<PathBuf, CargoBinError> { if path.is_absolute() { return Ok(path); } if let Some(root) = buck_project_root().map_err(|source| CargoBinError::CurrentExe { source })? { return Ok(root.join(path)); } Ok(std::env::current_dir() .map_err(|source| CargoBinError::CurrentDir { source })? .join(path)) } /// Best-effort attempt to find the Buck project root for the currently running /// process. /// /// Prefer this over `env!("CARGO_MANIFEST_DIR")` when running under Buck2: our /// Buck generator sets `CARGO_MANIFEST_DIR="."` for compilation, which makes /// `env!("CARGO_MANIFEST_DIR")` unusable for locating workspace files. pub fn buck_project_root() -> Result<Option<PathBuf>, std::io::Error> { if let Some(root) = std::env::var_os("BUCK_PROJECT_ROOT") { let root = PathBuf::from(root); if root.is_absolute() { return Ok(Some(root)); } } // Fall back to deriving the project root from the location of the test // runner executable: // <project>/buck-out/v2/gen/.../__tests__/test-binary let exe = std::env::current_exe()?; for ancestor in exe.ancestors() { if ancestor.file_name().is_some_and(|name| name == "buck-out") { return Ok(ancestor.parent().map(PathBuf::from)); } } Ok(None) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/json-to-toml/src/lib.rs
codex-rs/utils/json-to-toml/src/lib.rs
use serde_json::Value as JsonValue; use toml::Value as TomlValue; /// Convert a `serde_json::Value` into a semantically equivalent `toml::Value`. pub fn json_to_toml(v: JsonValue) -> TomlValue { match v { JsonValue::Null => TomlValue::String(String::new()), JsonValue::Bool(b) => TomlValue::Boolean(b), JsonValue::Number(n) => { if let Some(i) = n.as_i64() { TomlValue::Integer(i) } else if let Some(f) = n.as_f64() { TomlValue::Float(f) } else { TomlValue::String(n.to_string()) } } JsonValue::String(s) => TomlValue::String(s), JsonValue::Array(arr) => TomlValue::Array(arr.into_iter().map(json_to_toml).collect()), JsonValue::Object(map) => { let tbl = map .into_iter() .map(|(k, v)| (k, json_to_toml(v))) .collect::<toml::value::Table>(); TomlValue::Table(tbl) } } } #[cfg(test)] mod tests { use super::*; use pretty_assertions::assert_eq; use serde_json::json; #[test] fn json_number_to_toml() { let json_value = json!(123); assert_eq!(TomlValue::Integer(123), json_to_toml(json_value)); } #[test] fn json_array_to_toml() { let json_value = json!([true, 1]); assert_eq!( TomlValue::Array(vec![TomlValue::Boolean(true), TomlValue::Integer(1)]), json_to_toml(json_value) ); } #[test] fn json_bool_to_toml() { let json_value = json!(false); assert_eq!(TomlValue::Boolean(false), json_to_toml(json_value)); } #[test] fn json_float_to_toml() { let json_value = json!(1.25); assert_eq!(TomlValue::Float(1.25), json_to_toml(json_value)); } #[test] fn json_null_to_toml() { let json_value = serde_json::Value::Null; assert_eq!(TomlValue::String(String::new()), json_to_toml(json_value)); } #[test] fn json_object_nested() { let json_value = json!({ "outer": { "inner": 2 } }); let expected = { let mut inner = toml::value::Table::new(); inner.insert("inner".into(), TomlValue::Integer(2)); let mut outer = toml::value::Table::new(); outer.insert("outer".into(), TomlValue::Table(inner)); TomlValue::Table(outer) }; assert_eq!(json_to_toml(json_value), expected); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/readiness/src/lib.rs
codex-rs/utils/readiness/src/lib.rs
//! Readiness flag with token-based authorization and async waiting (Tokio). use std::collections::HashSet; use std::fmt; use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicI32; use std::sync::atomic::Ordering; use std::time::Duration; use tokio::sync::Mutex; use tokio::sync::watch; use tokio::time; /// Opaque subscription token returned by `subscribe()`. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub struct Token(i32); const LOCK_TIMEOUT: Duration = Duration::from_millis(1000); #[async_trait::async_trait] pub trait Readiness: Send + Sync + 'static { /// Returns true if the flag is currently marked ready. At least one token needs to be marked /// as ready before. /// `true` is not reversible. fn is_ready(&self) -> bool; /// Subscribe to readiness and receive an authorization token. /// /// If the flag is already ready, returns `FlagAlreadyReady`. async fn subscribe(&self) -> Result<Token, errors::ReadinessError>; /// Attempt to mark the flag ready, validated by the provided token. /// /// Returns `true` iff: /// - `token` is currently subscribed, and /// - the flag was not already ready. async fn mark_ready(&self, token: Token) -> Result<bool, errors::ReadinessError>; /// Asynchronously wait until the flag becomes ready. async fn wait_ready(&self); } pub struct ReadinessFlag { /// Atomic for cheap reads. ready: AtomicBool, /// Used to generate the next i32 token. next_id: AtomicI32, /// Set of active subscriptions. tokens: Mutex<HashSet<Token>>, /// Broadcasts readiness to async waiters. tx: watch::Sender<bool>, } impl ReadinessFlag { /// Create a new, not-yet-ready flag. pub fn new() -> Self { let (tx, _rx) = watch::channel(false); Self { ready: AtomicBool::new(false), next_id: AtomicI32::new(1), // Reserve 0. tokens: Mutex::new(HashSet::new()), tx, } } async fn with_tokens<R>( &self, f: impl FnOnce(&mut HashSet<Token>) -> R, ) -> Result<R, errors::ReadinessError> { let mut guard = time::timeout(LOCK_TIMEOUT, self.tokens.lock()) .await .map_err(|_| errors::ReadinessError::TokenLockFailed)?; Ok(f(&mut guard)) } fn load_ready(&self) -> bool { self.ready.load(Ordering::Acquire) } } impl Default for ReadinessFlag { fn default() -> Self { Self::new() } } impl fmt::Debug for ReadinessFlag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ReadinessFlag") .field("ready", &self.load_ready()) .finish() } } #[async_trait::async_trait] impl Readiness for ReadinessFlag { fn is_ready(&self) -> bool { if self.load_ready() { return true; } if let Ok(tokens) = self.tokens.try_lock() && tokens.is_empty() { let was_ready = self.ready.swap(true, Ordering::AcqRel); drop(tokens); if !was_ready { let _ = self.tx.send(true); } return true; } self.load_ready() } async fn subscribe(&self) -> Result<Token, errors::ReadinessError> { if self.load_ready() { return Err(errors::ReadinessError::FlagAlreadyReady); } // Generate a token; ensure it's not 0. let token = Token(self.next_id.fetch_add(1, Ordering::Relaxed)); // Recheck readiness while holding the lock so mark_ready can't flip the flag between the // check above and inserting the token. let inserted = self .with_tokens(|tokens| { if self.load_ready() { return false; } tokens.insert(token); true }) .await?; if !inserted { return Err(errors::ReadinessError::FlagAlreadyReady); } Ok(token) } async fn mark_ready(&self, token: Token) -> Result<bool, errors::ReadinessError> { if self.load_ready() { return Ok(false); } if token.0 == 0 { return Ok(false); // Never authorize. } let marked = self .with_tokens(|set| { if !set.remove(&token) { return false; // invalid or already used } self.ready.store(true, Ordering::Release); set.clear(); // no further tokens needed once ready true }) .await?; if !marked { return Ok(false); } // Best-effort broadcast; ignore error if there are no receivers. let _ = self.tx.send(true); Ok(true) } async fn wait_ready(&self) { if self.is_ready() { return; } let mut rx = self.tx.subscribe(); // Fast-path check before awaiting. if *rx.borrow() { return; } // Await changes until true is observed. while rx.changed().await.is_ok() { if *rx.borrow() { break; } } } } mod errors { use thiserror::Error; #[derive(Debug, Error)] pub enum ReadinessError { #[error("Failed to acquire readiness token lock")] TokenLockFailed, #[error("Flag is already ready. Impossible to subscribe")] FlagAlreadyReady, } } #[cfg(test)] mod tests { use std::sync::Arc; use super::Readiness; use super::ReadinessFlag; use super::Token; use super::errors::ReadinessError; use assert_matches::assert_matches; #[tokio::test] async fn subscribe_and_mark_ready_roundtrip() -> Result<(), ReadinessError> { let flag = ReadinessFlag::new(); let token = flag.subscribe().await?; assert!(flag.mark_ready(token).await?); assert!(flag.is_ready()); Ok(()) } #[tokio::test] async fn subscribe_after_ready_returns_none() -> Result<(), ReadinessError> { let flag = ReadinessFlag::new(); let token = flag.subscribe().await?; assert!(flag.mark_ready(token).await?); assert!(flag.subscribe().await.is_err()); Ok(()) } #[tokio::test] async fn mark_ready_rejects_unknown_token() -> Result<(), ReadinessError> { let flag = ReadinessFlag::new(); assert!(!flag.mark_ready(Token(42)).await?); assert!(!flag.load_ready()); assert!(flag.is_ready()); Ok(()) } #[tokio::test] async fn wait_ready_unblocks_after_mark_ready() -> Result<(), ReadinessError> { let flag = Arc::new(ReadinessFlag::new()); let token = flag.subscribe().await?; let waiter = { let flag = Arc::clone(&flag); tokio::spawn(async move { flag.wait_ready().await; }) }; assert!(flag.mark_ready(token).await?); waiter.await.expect("waiting task should not panic"); Ok(()) } #[tokio::test] async fn mark_ready_twice_uses_single_token() -> Result<(), ReadinessError> { let flag = ReadinessFlag::new(); let token = flag.subscribe().await?; assert!(flag.mark_ready(token).await?); assert!(!flag.mark_ready(token).await?); Ok(()) } #[tokio::test] async fn is_ready_without_subscribers_marks_flag_ready() -> Result<(), ReadinessError> { let flag = ReadinessFlag::new(); assert!(flag.is_ready()); assert!(flag.is_ready()); assert_matches!( flag.subscribe().await, Err(ReadinessError::FlagAlreadyReady) ); Ok(()) } #[tokio::test] async fn subscribe_returns_error_when_lock_is_held() { let flag = ReadinessFlag::new(); let _guard = flag .tokens .try_lock() .expect("initial lock acquisition should succeed"); let err = flag .subscribe() .await .expect_err("contended subscribe should report a lock failure"); assert_matches!(err, ReadinessError::TokenLockFailed); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/errors.rs
codex-rs/utils/git/src/errors.rs
use std::path::PathBuf; use std::process::ExitStatus; use std::string::FromUtf8Error; use thiserror::Error; use walkdir::Error as WalkdirError; /// Errors returned while managing git worktree snapshots. #[derive(Debug, Error)] pub enum GitToolingError { #[error("git command `{command}` failed with status {status}: {stderr}")] GitCommand { command: String, status: ExitStatus, stderr: String, }, #[error("git command `{command}` produced non-UTF-8 output")] GitOutputUtf8 { command: String, #[source] source: FromUtf8Error, }, #[error("{path:?} is not a git repository")] NotAGitRepository { path: PathBuf }, #[error("path {path:?} must be relative to the repository root")] NonRelativePath { path: PathBuf }, #[error("path {path:?} escapes the repository root")] PathEscapesRepository { path: PathBuf }, #[error("failed to process path inside worktree")] PathPrefix(#[from] std::path::StripPrefixError), #[error(transparent)] Walkdir(#[from] WalkdirError), #[error(transparent)] Io(#[from] std::io::Error), }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/ghost_commits.rs
codex-rs/utils/git/src/ghost_commits.rs
use std::collections::BTreeMap; use std::collections::HashSet; use std::ffi::OsString; use std::fs; use std::io; use std::path::Component; use std::path::Path; use std::path::PathBuf; use tempfile::Builder; use crate::GhostCommit; use crate::GitToolingError; use crate::operations::apply_repo_prefix_to_force_include; use crate::operations::ensure_git_repository; use crate::operations::normalize_relative_path; use crate::operations::repo_subdir; use crate::operations::resolve_head; use crate::operations::resolve_repository_root; use crate::operations::run_git_for_status; use crate::operations::run_git_for_stdout; use crate::operations::run_git_for_stdout_all; /// Default commit message used for ghost commits when none is provided. const DEFAULT_COMMIT_MESSAGE: &str = "codex snapshot"; /// Default threshold for ignoring large untracked directories. const DEFAULT_IGNORE_LARGE_UNTRACKED_DIRS: i64 = 200; /// Default threshold (10 MiB) for excluding large untracked files from ghost snapshots. const DEFAULT_IGNORE_LARGE_UNTRACKED_FILES: i64 = 10 * 1024 * 1024; /// Directories that should always be ignored when capturing ghost snapshots, /// even if they are not listed in .gitignore. /// /// These are typically large dependency or build trees that are not useful /// for undo and can cause snapshots to grow without bound. const DEFAULT_IGNORED_DIR_NAMES: &[&str] = &[ "node_modules", ".venv", "venv", "env", ".env", "dist", "build", ".pytest_cache", ".mypy_cache", ".cache", ".tox", "__pycache__", ]; /// Options to control ghost commit creation. pub struct CreateGhostCommitOptions<'a> { pub repo_path: &'a Path, pub message: Option<&'a str>, pub force_include: Vec<PathBuf>, pub ghost_snapshot: GhostSnapshotConfig, } /// Options to control ghost commit restoration. pub struct RestoreGhostCommitOptions<'a> { pub repo_path: &'a Path, pub ghost_snapshot: GhostSnapshotConfig, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct GhostSnapshotConfig { pub ignore_large_untracked_files: Option<i64>, pub ignore_large_untracked_dirs: Option<i64>, pub disable_warnings: bool, } impl Default for GhostSnapshotConfig { fn default() -> Self { Self { ignore_large_untracked_files: Some(DEFAULT_IGNORE_LARGE_UNTRACKED_FILES), ignore_large_untracked_dirs: Some(DEFAULT_IGNORE_LARGE_UNTRACKED_DIRS), disable_warnings: false, } } } /// Summary produced alongside a ghost snapshot. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct GhostSnapshotReport { pub large_untracked_dirs: Vec<LargeUntrackedDir>, pub ignored_untracked_files: Vec<IgnoredUntrackedFile>, } /// Directory containing a large amount of untracked content. #[derive(Debug, Clone, PartialEq, Eq)] pub struct LargeUntrackedDir { pub path: PathBuf, pub file_count: i64, } /// Untracked file excluded from the snapshot because of its size. #[derive(Debug, Clone, PartialEq, Eq)] pub struct IgnoredUntrackedFile { pub path: PathBuf, pub byte_size: i64, } impl<'a> CreateGhostCommitOptions<'a> { /// Creates options scoped to the provided repository path. pub fn new(repo_path: &'a Path) -> Self { Self { repo_path, message: None, force_include: Vec::new(), ghost_snapshot: GhostSnapshotConfig::default(), } } /// Sets a custom commit message for the ghost commit. pub fn message(mut self, message: &'a str) -> Self { self.message = Some(message); self } pub fn ghost_snapshot(mut self, ghost_snapshot: GhostSnapshotConfig) -> Self { self.ghost_snapshot = ghost_snapshot; self } /// Exclude untracked files larger than `bytes` from the snapshot commit. /// /// These files are still treated as untracked for preservation purposes (i.e. they will not be /// deleted by undo), but they will not be captured in the snapshot tree. pub fn ignore_large_untracked_files(mut self, bytes: i64) -> Self { if bytes > 0 { self.ghost_snapshot.ignore_large_untracked_files = Some(bytes); } else { self.ghost_snapshot.ignore_large_untracked_files = None; } self } /// Supplies the entire force-include path list at once. pub fn force_include<I>(mut self, paths: I) -> Self where I: IntoIterator<Item = PathBuf>, { self.force_include = paths.into_iter().collect(); self } /// Adds a single path to the force-include list. pub fn push_force_include<P>(mut self, path: P) -> Self where P: Into<PathBuf>, { self.force_include.push(path.into()); self } } impl<'a> RestoreGhostCommitOptions<'a> { /// Creates restore options scoped to the provided repository path. pub fn new(repo_path: &'a Path) -> Self { Self { repo_path, ghost_snapshot: GhostSnapshotConfig::default(), } } pub fn ghost_snapshot(mut self, ghost_snapshot: GhostSnapshotConfig) -> Self { self.ghost_snapshot = ghost_snapshot; self } /// Exclude untracked files larger than `bytes` from undo cleanup. /// /// These files are treated as "always preserve" to avoid deleting large local artifacts. pub fn ignore_large_untracked_files(mut self, bytes: i64) -> Self { if bytes > 0 { self.ghost_snapshot.ignore_large_untracked_files = Some(bytes); } else { self.ghost_snapshot.ignore_large_untracked_files = None; } self } /// Ignore untracked directories that contain at least `file_count` untracked files. pub fn ignore_large_untracked_dirs(mut self, file_count: i64) -> Self { if file_count > 0 { self.ghost_snapshot.ignore_large_untracked_dirs = Some(file_count); } else { self.ghost_snapshot.ignore_large_untracked_dirs = None; } self } } fn detect_large_untracked_dirs( files: &[PathBuf], dirs: &[PathBuf], threshold: Option<i64>, ) -> Vec<LargeUntrackedDir> { let Some(threshold) = threshold else { return Vec::new(); }; if threshold <= 0 { return Vec::new(); } let mut counts: BTreeMap<PathBuf, i64> = BTreeMap::new(); let mut sorted_dirs: Vec<&PathBuf> = dirs.iter().collect(); sorted_dirs.sort_by(|a, b| { let a_components = a.components().count(); let b_components = b.components().count(); b_components.cmp(&a_components).then_with(|| a.cmp(b)) }); for file in files { let mut key: Option<PathBuf> = None; for dir in &sorted_dirs { if file.starts_with(dir.as_path()) { key = Some((*dir).clone()); break; } } let key = key.unwrap_or_else(|| { file.parent() .map(PathBuf::from) .unwrap_or_else(|| PathBuf::from(".")) }); let entry = counts.entry(key).or_insert(0); *entry += 1; } let mut result: Vec<LargeUntrackedDir> = counts .into_iter() .filter(|(_, count)| *count >= threshold) .map(|(path, file_count)| LargeUntrackedDir { path, file_count }) .collect(); result.sort_by(|a, b| { b.file_count .cmp(&a.file_count) .then_with(|| a.path.cmp(&b.path)) }); result } fn to_session_relative_path(path: &Path, repo_prefix: Option<&Path>) -> PathBuf { match repo_prefix { Some(prefix) => path .strip_prefix(prefix) .map(PathBuf::from) .unwrap_or_else(|_| path.to_path_buf()), None => path.to_path_buf(), } } /// Create a ghost commit capturing the current state of the repository's working tree. pub fn create_ghost_commit( options: &CreateGhostCommitOptions<'_>, ) -> Result<GhostCommit, GitToolingError> { create_ghost_commit_with_report(options).map(|(commit, _)| commit) } /// Compute a report describing the working tree for a ghost snapshot without creating a commit. pub fn capture_ghost_snapshot_report( options: &CreateGhostCommitOptions<'_>, ) -> Result<GhostSnapshotReport, GitToolingError> { ensure_git_repository(options.repo_path)?; let repo_root = resolve_repository_root(options.repo_path)?; let repo_prefix = repo_subdir(repo_root.as_path(), options.repo_path); let force_include = prepare_force_include(repo_prefix.as_deref(), &options.force_include)?; let existing_untracked = capture_existing_untracked( repo_root.as_path(), repo_prefix.as_deref(), options.ghost_snapshot.ignore_large_untracked_files, options.ghost_snapshot.ignore_large_untracked_dirs, &force_include, )?; let warning_ignored_files = existing_untracked .ignored_untracked_files .iter() .map(|file| IgnoredUntrackedFile { path: to_session_relative_path(file.path.as_path(), repo_prefix.as_deref()), byte_size: file.byte_size, }) .collect::<Vec<_>>(); let warning_ignored_dirs = existing_untracked .ignored_large_untracked_dirs .iter() .map(|dir| LargeUntrackedDir { path: to_session_relative_path(dir.path.as_path(), repo_prefix.as_deref()), file_count: dir.file_count, }) .collect::<Vec<_>>(); Ok(GhostSnapshotReport { large_untracked_dirs: warning_ignored_dirs, ignored_untracked_files: warning_ignored_files, }) } /// Create a ghost commit capturing the current state of the repository's working tree along with a report. pub fn create_ghost_commit_with_report( options: &CreateGhostCommitOptions<'_>, ) -> Result<(GhostCommit, GhostSnapshotReport), GitToolingError> { ensure_git_repository(options.repo_path)?; let repo_root = resolve_repository_root(options.repo_path)?; let repo_prefix = repo_subdir(repo_root.as_path(), options.repo_path); let parent = resolve_head(repo_root.as_path())?; let force_include = prepare_force_include(repo_prefix.as_deref(), &options.force_include)?; let status_snapshot = capture_status_snapshot( repo_root.as_path(), repo_prefix.as_deref(), options.ghost_snapshot.ignore_large_untracked_files, options.ghost_snapshot.ignore_large_untracked_dirs, &force_include, )?; let existing_untracked = status_snapshot.untracked; let warning_ignored_files = existing_untracked .ignored_untracked_files .iter() .map(|file| IgnoredUntrackedFile { path: to_session_relative_path(file.path.as_path(), repo_prefix.as_deref()), byte_size: file.byte_size, }) .collect::<Vec<_>>(); let large_untracked_dirs = existing_untracked .ignored_large_untracked_dirs .iter() .map(|dir| LargeUntrackedDir { path: to_session_relative_path(dir.path.as_path(), repo_prefix.as_deref()), file_count: dir.file_count, }) .collect::<Vec<_>>(); let index_tempdir = Builder::new().prefix("codex-git-index-").tempdir()?; let index_path = index_tempdir.path().join("index"); let base_env = vec![( OsString::from("GIT_INDEX_FILE"), OsString::from(index_path.as_os_str()), )]; // Use a temporary index so snapshotting does not disturb the user's index state. // Example plumbing sequence: // GIT_INDEX_FILE=/tmp/index git read-tree HEAD // GIT_INDEX_FILE=/tmp/index git add --all -- <paths> // GIT_INDEX_FILE=/tmp/index git write-tree // GIT_INDEX_FILE=/tmp/index git commit-tree <tree> -p <parent> -m "codex snapshot" // Pre-populate the temporary index with HEAD so unchanged tracked files // are included in the snapshot tree. if let Some(parent_sha) = parent.as_deref() { run_git_for_status( repo_root.as_path(), vec![OsString::from("read-tree"), OsString::from(parent_sha)], Some(base_env.as_slice()), )?; } let mut index_paths = status_snapshot.tracked_paths; index_paths.extend(existing_untracked.untracked_files_for_index.iter().cloned()); let index_paths = dedupe_paths(index_paths); // Stage tracked + new files into the temp index so write-tree reflects the working tree. // We use `git add --all` to make deletions show up in the snapshot tree too. add_paths_to_index(repo_root.as_path(), base_env.as_slice(), &index_paths)?; if !force_include.is_empty() { let mut args = Vec::with_capacity(force_include.len() + 2); args.push(OsString::from("add")); args.push(OsString::from("--force")); args.extend( force_include .iter() .map(|path| OsString::from(path.as_os_str())), ); run_git_for_status(repo_root.as_path(), args, Some(base_env.as_slice()))?; } let tree_id = run_git_for_stdout( repo_root.as_path(), vec![OsString::from("write-tree")], Some(base_env.as_slice()), )?; let mut commit_env = base_env; commit_env.extend(default_commit_identity()); let message = options.message.unwrap_or(DEFAULT_COMMIT_MESSAGE); let commit_args = { let mut result = vec![OsString::from("commit-tree"), OsString::from(&tree_id)]; if let Some(parent) = parent.as_deref() { result.extend([OsString::from("-p"), OsString::from(parent)]); } result.extend([OsString::from("-m"), OsString::from(message)]); result }; // `git commit-tree` writes a detached commit object without updating refs, // which keeps snapshots out of the user's branch history. // Retrieve commit ID. let commit_id = run_git_for_stdout( repo_root.as_path(), commit_args, Some(commit_env.as_slice()), )?; let ghost_commit = GhostCommit::new( commit_id, parent, merge_preserved_untracked_files( existing_untracked.files, &existing_untracked.ignored_untracked_files, ), merge_preserved_untracked_dirs( existing_untracked.dirs, &existing_untracked.ignored_large_untracked_dirs, ), ); Ok(( ghost_commit, GhostSnapshotReport { large_untracked_dirs, ignored_untracked_files: warning_ignored_files, }, )) } /// Restore the working tree to match the provided ghost commit. pub fn restore_ghost_commit(repo_path: &Path, commit: &GhostCommit) -> Result<(), GitToolingError> { restore_ghost_commit_with_options(&RestoreGhostCommitOptions::new(repo_path), commit) } /// Restore the working tree using the provided options. pub fn restore_ghost_commit_with_options( options: &RestoreGhostCommitOptions<'_>, commit: &GhostCommit, ) -> Result<(), GitToolingError> { ensure_git_repository(options.repo_path)?; let repo_root = resolve_repository_root(options.repo_path)?; let repo_prefix = repo_subdir(repo_root.as_path(), options.repo_path); let current_untracked = capture_existing_untracked( repo_root.as_path(), repo_prefix.as_deref(), options.ghost_snapshot.ignore_large_untracked_files, options.ghost_snapshot.ignore_large_untracked_dirs, &[], )?; restore_to_commit_inner(repo_root.as_path(), repo_prefix.as_deref(), commit.id())?; remove_new_untracked( repo_root.as_path(), commit.preexisting_untracked_files(), commit.preexisting_untracked_dirs(), current_untracked, ) } /// Restore the working tree to match the given commit ID. pub fn restore_to_commit(repo_path: &Path, commit_id: &str) -> Result<(), GitToolingError> { ensure_git_repository(repo_path)?; let repo_root = resolve_repository_root(repo_path)?; let repo_prefix = repo_subdir(repo_root.as_path(), repo_path); restore_to_commit_inner(repo_root.as_path(), repo_prefix.as_deref(), commit_id) } /// Restores the working tree and index to the given commit using `git restore`. /// The repository root and optional repository-relative prefix limit the restore scope. fn restore_to_commit_inner( repo_root: &Path, repo_prefix: Option<&Path>, commit_id: &str, ) -> Result<(), GitToolingError> { // `git restore` resets the working tree to the snapshot commit. // We intentionally avoid --staged to preserve user's staged changes. // While this might leave some Codex-staged changes in the index (if Codex ran `git add`), // it prevents data loss for users who use the index as a save point. // Data safety > cleanliness. // Example: // git restore --source <commit> --worktree -- <prefix> let mut restore_args = vec![ OsString::from("restore"), OsString::from("--source"), OsString::from(commit_id), OsString::from("--worktree"), OsString::from("--"), ]; if let Some(prefix) = repo_prefix { restore_args.push(prefix.as_os_str().to_os_string()); } else { restore_args.push(OsString::from(".")); } run_git_for_status(repo_root, restore_args, None)?; Ok(()) } #[derive(Default)] struct UntrackedSnapshot { files: Vec<PathBuf>, dirs: Vec<PathBuf>, untracked_files_for_index: Vec<PathBuf>, ignored_untracked_files: Vec<IgnoredUntrackedFile>, ignored_large_untracked_dirs: Vec<LargeUntrackedDir>, ignored_large_untracked_dir_files: Vec<PathBuf>, } #[derive(Default)] struct StatusSnapshot { tracked_paths: Vec<PathBuf>, untracked: UntrackedSnapshot, } /// Captures the working tree status under `repo_root`, optionally limited by `repo_prefix`. /// Returns the result as a `StatusSnapshot`. fn capture_status_snapshot( repo_root: &Path, repo_prefix: Option<&Path>, ignore_large_untracked_files: Option<i64>, ignore_large_untracked_dirs: Option<i64>, force_include: &[PathBuf], ) -> Result<StatusSnapshot, GitToolingError> { // Ask git for the zero-delimited porcelain status so we can enumerate // tracked, untracked, and ignored entries (including ones filtered by prefix). // This keeps the snapshot consistent without multiple git invocations. let mut args = vec![ OsString::from("status"), OsString::from("--porcelain=2"), OsString::from("-z"), OsString::from("--untracked-files=all"), ]; if let Some(prefix) = repo_prefix { args.push(OsString::from("--")); args.push(prefix.as_os_str().to_os_string()); } let output = run_git_for_stdout_all(repo_root, args, None)?; if output.is_empty() { return Ok(StatusSnapshot::default()); } let mut snapshot = StatusSnapshot::default(); let mut untracked_files_for_dir_scan: Vec<PathBuf> = Vec::new(); let mut expect_rename_source = false; for entry in output.split('\0') { if entry.is_empty() { continue; } if expect_rename_source { let normalized = normalize_relative_path(Path::new(entry))?; snapshot.tracked_paths.push(normalized); expect_rename_source = false; continue; } let record_type = entry.as_bytes().first().copied().unwrap_or(b' '); match record_type { b'?' | b'!' => { let mut parts = entry.splitn(2, ' '); let code = parts.next(); let path_part = parts.next(); let (Some(code), Some(path_part)) = (code, path_part) else { continue; }; if path_part.is_empty() { continue; } let normalized = normalize_relative_path(Path::new(path_part))?; if should_ignore_for_snapshot(&normalized) { continue; } let absolute = repo_root.join(&normalized); let is_dir = absolute.is_dir(); if is_dir { snapshot.untracked.dirs.push(normalized); } else if code == "?" { untracked_files_for_dir_scan.push(normalized.clone()); if let Some(threshold) = ignore_large_untracked_files && threshold > 0 && !is_force_included(&normalized, force_include) && let Ok(Some(byte_size)) = untracked_file_size(&absolute) && byte_size > threshold { snapshot .untracked .ignored_untracked_files .push(IgnoredUntrackedFile { path: normalized, byte_size, }); } else { snapshot.untracked.files.push(normalized.clone()); snapshot .untracked .untracked_files_for_index .push(normalized); } } else { snapshot.untracked.files.push(normalized); } } b'1' => { if let Some(path) = extract_status_path_after_fields(entry, 8) { let normalized = normalize_relative_path(Path::new(path))?; snapshot.tracked_paths.push(normalized); } } b'2' => { if let Some(path) = extract_status_path_after_fields(entry, 9) { let normalized = normalize_relative_path(Path::new(path))?; snapshot.tracked_paths.push(normalized); } expect_rename_source = true; } b'u' => { if let Some(path) = extract_status_path_after_fields(entry, 10) { let normalized = normalize_relative_path(Path::new(path))?; snapshot.tracked_paths.push(normalized); } } _ => {} } } if let Some(threshold) = ignore_large_untracked_dirs && threshold > 0 { let ignored_large_untracked_dirs = detect_large_untracked_dirs( &untracked_files_for_dir_scan, &snapshot.untracked.dirs, Some(threshold), ) .into_iter() .filter(|entry| !entry.path.as_os_str().is_empty() && entry.path != Path::new(".")) .collect::<Vec<_>>(); if !ignored_large_untracked_dirs.is_empty() { let ignored_dir_paths = ignored_large_untracked_dirs .iter() .map(|entry| entry.path.as_path()) .collect::<Vec<_>>(); snapshot .untracked .files .retain(|path| !ignored_dir_paths.iter().any(|dir| path.starts_with(dir))); snapshot .untracked .dirs .retain(|path| !ignored_dir_paths.iter().any(|dir| path.starts_with(dir))); snapshot .untracked .untracked_files_for_index .retain(|path| !ignored_dir_paths.iter().any(|dir| path.starts_with(dir))); snapshot.untracked.ignored_untracked_files.retain(|file| { !ignored_dir_paths .iter() .any(|dir| file.path.starts_with(dir)) }); snapshot.untracked.ignored_large_untracked_dir_files = untracked_files_for_dir_scan .into_iter() .filter(|path| ignored_dir_paths.iter().any(|dir| path.starts_with(dir))) .collect(); snapshot.untracked.ignored_large_untracked_dirs = ignored_large_untracked_dirs; } } Ok(snapshot) } /// Captures the untracked and ignored entries under `repo_root`, optionally limited by `repo_prefix`. /// Returns the result as an `UntrackedSnapshot`. fn capture_existing_untracked( repo_root: &Path, repo_prefix: Option<&Path>, ignore_large_untracked_files: Option<i64>, ignore_large_untracked_dirs: Option<i64>, force_include: &[PathBuf], ) -> Result<UntrackedSnapshot, GitToolingError> { Ok(capture_status_snapshot( repo_root, repo_prefix, ignore_large_untracked_files, ignore_large_untracked_dirs, force_include, )? .untracked) } fn extract_status_path_after_fields(record: &str, fields_before_path: i64) -> Option<&str> { if fields_before_path <= 0 { return None; } let mut spaces = 0_i64; for (idx, byte) in record.as_bytes().iter().enumerate() { if *byte == b' ' { spaces += 1; if spaces == fields_before_path { return record.get((idx + 1)..).filter(|path| !path.is_empty()); } } } None } fn should_ignore_for_snapshot(path: &Path) -> bool { path.components().any(|component| { if let Component::Normal(name) = component && let Some(name_str) = name.to_str() { return DEFAULT_IGNORED_DIR_NAMES .iter() .any(|ignored| ignored == &name_str); } false }) } fn prepare_force_include( repo_prefix: Option<&Path>, force_include: &[PathBuf], ) -> Result<Vec<PathBuf>, GitToolingError> { let normalized_force = force_include .iter() .map(PathBuf::as_path) .map(normalize_relative_path) .collect::<Result<Vec<_>, _>>()?; Ok(apply_repo_prefix_to_force_include( repo_prefix, &normalized_force, )) } fn is_force_included(path: &Path, force_include: &[PathBuf]) -> bool { force_include .iter() .any(|candidate| path.starts_with(candidate.as_path())) } fn untracked_file_size(path: &Path) -> io::Result<Option<i64>> { let Ok(metadata) = fs::symlink_metadata(path) else { return Ok(None); }; let Ok(len_i64) = i64::try_from(metadata.len()) else { return Ok(Some(i64::MAX)); }; Ok(Some(len_i64)) } fn add_paths_to_index( repo_root: &Path, env: &[(OsString, OsString)], paths: &[PathBuf], ) -> Result<(), GitToolingError> { if paths.is_empty() { return Ok(()); } let chunk_size = usize::try_from(64_i64).unwrap_or(1); for chunk in paths.chunks(chunk_size) { let mut args = vec![ OsString::from("add"), OsString::from("--all"), OsString::from("--"), ]; args.extend(chunk.iter().map(|path| path.as_os_str().to_os_string())); // Chunk the argv to avoid oversized command lines on large repos. run_git_for_status(repo_root, args, Some(env))?; } Ok(()) } fn dedupe_paths(paths: Vec<PathBuf>) -> Vec<PathBuf> { let mut seen = HashSet::new(); let mut result = Vec::new(); for path in paths { if seen.insert(path.clone()) { result.push(path); } } result } fn merge_preserved_untracked_files( mut files: Vec<PathBuf>, ignored: &[IgnoredUntrackedFile], ) -> Vec<PathBuf> { if ignored.is_empty() { return files; } files.extend(ignored.iter().map(|entry| entry.path.clone())); files } fn merge_preserved_untracked_dirs( mut dirs: Vec<PathBuf>, ignored_large_dirs: &[LargeUntrackedDir], ) -> Vec<PathBuf> { if ignored_large_dirs.is_empty() { return dirs; } for entry in ignored_large_dirs { if dirs.iter().any(|dir| dir == &entry.path) { continue; } dirs.push(entry.path.clone()); } dirs } /// Removes untracked files and directories that were not present when the snapshot was captured. fn remove_new_untracked( repo_root: &Path, preserved_files: &[PathBuf], preserved_dirs: &[PathBuf], current: UntrackedSnapshot, ) -> Result<(), GitToolingError> { if current.files.is_empty() && current.dirs.is_empty() { return Ok(()); } let preserved_file_set: HashSet<PathBuf> = preserved_files.iter().cloned().collect(); let preserved_dirs_vec: Vec<PathBuf> = preserved_dirs.to_vec(); for path in current.files { if should_preserve(&path, &preserved_file_set, &preserved_dirs_vec) { continue; } remove_path(&repo_root.join(&path))?; } for dir in current.dirs { if should_preserve(&dir, &preserved_file_set, &preserved_dirs_vec) { continue; } remove_path(&repo_root.join(&dir))?; } Ok(()) } /// Determines whether an untracked path should be kept because it existed in the snapshot. fn should_preserve( path: &Path, preserved_files: &HashSet<PathBuf>, preserved_dirs: &[PathBuf], ) -> bool { if preserved_files.contains(path) { return true; } preserved_dirs .iter() .any(|dir| path.starts_with(dir.as_path())) } /// Deletes the file or directory at the provided path, ignoring if it is already absent. fn remove_path(path: &Path) -> Result<(), GitToolingError> { match fs::symlink_metadata(path) { Ok(metadata) => { if metadata.is_dir() { fs::remove_dir_all(path)?; } else { fs::remove_file(path)?; } } Err(err) => { if err.kind() == io::ErrorKind::NotFound { return Ok(()); } return Err(err.into()); } } Ok(()) } /// Returns the default author and committer identity for ghost commits. fn default_commit_identity() -> Vec<(OsString, OsString)> { vec![ ( OsString::from("GIT_AUTHOR_NAME"), OsString::from("Codex Snapshot"), ), ( OsString::from("GIT_AUTHOR_EMAIL"), OsString::from("snapshot@codex.local"), ), ( OsString::from("GIT_COMMITTER_NAME"), OsString::from("Codex Snapshot"), ), ( OsString::from("GIT_COMMITTER_EMAIL"), OsString::from("snapshot@codex.local"), ), ] } #[cfg(test)] mod tests { use super::*; use crate::operations::run_git_for_stdout; use assert_matches::assert_matches; use pretty_assertions::assert_eq; use std::fs::File; use std::process::Command; use walkdir::WalkDir; /// Runs a git command in the test repository and asserts success. fn run_git_in(repo_path: &Path, args: &[&str]) { let status = Command::new("git") .current_dir(repo_path) .args(args) .status() .expect("git command"); assert!(status.success(), "git command failed: {args:?}"); } /// Runs a git command and returns its trimmed stdout output. fn run_git_stdout(repo_path: &Path, args: &[&str]) -> String { let output = Command::new("git") .current_dir(repo_path) .args(args) .output() .expect("git command"); assert!(output.status.success(), "git command failed: {args:?}"); String::from_utf8_lossy(&output.stdout).trim().to_string() } /// Initializes a repository with consistent settings for cross-platform tests. fn init_test_repo(repo: &Path) { run_git_in(repo, &["init", "--initial-branch=main"]); run_git_in(repo, &["config", "core.autocrlf", "false"]); } fn create_sparse_file(path: &Path, bytes: i64) -> io::Result<()> { let file_len = u64::try_from(bytes).map_err(|_| io::Error::from(io::ErrorKind::InvalidInput))?; let file = File::create(path)?; file.set_len(file_len)?; Ok(()) } #[test] /// Verifies a ghost commit can be created and restored end to end. fn create_and_restore_roundtrip() -> Result<(), GitToolingError> { let temp = tempfile::tempdir()?; let repo = temp.path(); init_test_repo(repo); std::fs::write(repo.join("tracked.txt"), "initial\n")?; std::fs::write(repo.join("delete-me.txt"), "to be removed\n")?; run_git_in(repo, &["add", "tracked.txt", "delete-me.txt"]); run_git_in( repo, &[ "-c", "user.name=Tester", "-c", "user.email=test@example.com", "commit", "-m", "init", ], );
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
true
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/branch.rs
codex-rs/utils/git/src/branch.rs
use std::ffi::OsString; use std::path::Path; use crate::GitToolingError; use crate::operations::ensure_git_repository; use crate::operations::resolve_head; use crate::operations::resolve_repository_root; use crate::operations::run_git_for_stdout; /// Returns the merge-base commit between `HEAD` and the latest version between local /// and remote of the provided branch, if both exist. /// /// The function mirrors `git merge-base HEAD <branch>` but returns `Ok(None)` when /// the repository has no `HEAD` yet or when the branch cannot be resolved. pub fn merge_base_with_head( repo_path: &Path, branch: &str, ) -> Result<Option<String>, GitToolingError> { ensure_git_repository(repo_path)?; let repo_root = resolve_repository_root(repo_path)?; let head = match resolve_head(repo_root.as_path())? { Some(head) => head, None => return Ok(None), }; let Some(branch_ref) = resolve_branch_ref(repo_root.as_path(), branch)? else { return Ok(None); }; let preferred_ref = if let Some(upstream) = resolve_upstream_if_remote_ahead(repo_root.as_path(), branch)? { resolve_branch_ref(repo_root.as_path(), &upstream)?.unwrap_or(branch_ref) } else { branch_ref }; let merge_base = run_git_for_stdout( repo_root.as_path(), vec![ OsString::from("merge-base"), OsString::from(head), OsString::from(preferred_ref), ], None, )?; Ok(Some(merge_base)) } fn resolve_branch_ref(repo_root: &Path, branch: &str) -> Result<Option<String>, GitToolingError> { let rev = run_git_for_stdout( repo_root, vec![ OsString::from("rev-parse"), OsString::from("--verify"), OsString::from(branch), ], None, ); match rev { Ok(rev) => Ok(Some(rev)), Err(GitToolingError::GitCommand { .. }) => Ok(None), Err(other) => Err(other), } } fn resolve_upstream_if_remote_ahead( repo_root: &Path, branch: &str, ) -> Result<Option<String>, GitToolingError> { let upstream = match run_git_for_stdout( repo_root, vec![ OsString::from("rev-parse"), OsString::from("--abbrev-ref"), OsString::from("--symbolic-full-name"), OsString::from(format!("{branch}@{{upstream}}")), ], None, ) { Ok(name) => { let trimmed = name.trim(); if trimmed.is_empty() { return Ok(None); } trimmed.to_string() } Err(GitToolingError::GitCommand { .. }) => return Ok(None), Err(other) => return Err(other), }; let counts = match run_git_for_stdout( repo_root, vec![ OsString::from("rev-list"), OsString::from("--left-right"), OsString::from("--count"), OsString::from(format!("{branch}...{upstream}")), ], None, ) { Ok(counts) => counts, Err(GitToolingError::GitCommand { .. }) => return Ok(None), Err(other) => return Err(other), }; let mut parts = counts.split_whitespace(); let _left: i64 = parts.next().unwrap_or("0").parse().unwrap_or(0); let right: i64 = parts.next().unwrap_or("0").parse().unwrap_or(0); if right > 0 { Ok(Some(upstream)) } else { Ok(None) } } #[cfg(test)] mod tests { use super::merge_base_with_head; use crate::GitToolingError; use pretty_assertions::assert_eq; use std::path::Path; use std::process::Command; use tempfile::tempdir; fn run_git_in(repo_path: &Path, args: &[&str]) { let status = Command::new("git") .current_dir(repo_path) .args(args) .status() .expect("git command"); assert!(status.success(), "git command failed: {args:?}"); } fn run_git_stdout(repo_path: &Path, args: &[&str]) -> String { let output = Command::new("git") .current_dir(repo_path) .args(args) .output() .expect("git command"); assert!(output.status.success(), "git command failed: {args:?}"); String::from_utf8_lossy(&output.stdout).trim().to_string() } fn init_test_repo(repo_path: &Path) { run_git_in(repo_path, &["init", "--initial-branch=main"]); run_git_in(repo_path, &["config", "core.autocrlf", "false"]); } fn commit(repo_path: &Path, message: &str) { run_git_in( repo_path, &[ "-c", "user.name=Tester", "-c", "user.email=test@example.com", "commit", "-m", message, ], ); } #[test] fn merge_base_returns_shared_commit() -> Result<(), GitToolingError> { let temp = tempdir()?; let repo = temp.path(); init_test_repo(repo); std::fs::write(repo.join("base.txt"), "base\n")?; run_git_in(repo, &["add", "base.txt"]); commit(repo, "base commit"); run_git_in(repo, &["checkout", "-b", "feature"]); std::fs::write(repo.join("feature.txt"), "feature change\n")?; run_git_in(repo, &["add", "feature.txt"]); commit(repo, "feature commit"); run_git_in(repo, &["checkout", "main"]); std::fs::write(repo.join("main.txt"), "main change\n")?; run_git_in(repo, &["add", "main.txt"]); commit(repo, "main commit"); run_git_in(repo, &["checkout", "feature"]); let expected = run_git_stdout(repo, &["merge-base", "HEAD", "main"]); let merge_base = merge_base_with_head(repo, "main")?; assert_eq!(merge_base, Some(expected)); Ok(()) } #[test] fn merge_base_prefers_upstream_when_remote_ahead() -> Result<(), GitToolingError> { let temp = tempdir()?; let repo = temp.path().join("repo"); let remote = temp.path().join("remote.git"); std::fs::create_dir_all(&repo)?; std::fs::create_dir_all(&remote)?; run_git_in(&remote, &["init", "--bare"]); run_git_in(&repo, &["init", "--initial-branch=main"]); run_git_in(&repo, &["config", "core.autocrlf", "false"]); std::fs::write(repo.join("base.txt"), "base\n")?; run_git_in(&repo, &["add", "base.txt"]); commit(&repo, "base commit"); run_git_in( &repo, &["remote", "add", "origin", remote.to_str().unwrap()], ); run_git_in(&repo, &["push", "-u", "origin", "main"]); run_git_in(&repo, &["checkout", "-b", "feature"]); std::fs::write(repo.join("feature.txt"), "feature change\n")?; run_git_in(&repo, &["add", "feature.txt"]); commit(&repo, "feature commit"); run_git_in(&repo, &["checkout", "--orphan", "rewrite"]); run_git_in(&repo, &["rm", "-rf", "."]); std::fs::write(repo.join("new-main.txt"), "rewritten main\n")?; run_git_in(&repo, &["add", "new-main.txt"]); commit(&repo, "rewrite main"); run_git_in(&repo, &["branch", "-M", "rewrite", "main"]); run_git_in(&repo, &["branch", "--set-upstream-to=origin/main", "main"]); run_git_in(&repo, &["checkout", "feature"]); run_git_in(&repo, &["fetch", "origin"]); let expected = run_git_stdout(&repo, &["merge-base", "HEAD", "origin/main"]); let merge_base = merge_base_with_head(&repo, "main")?; assert_eq!(merge_base, Some(expected)); Ok(()) } #[test] fn merge_base_returns_none_when_branch_missing() -> Result<(), GitToolingError> { let temp = tempdir()?; let repo = temp.path(); init_test_repo(repo); std::fs::write(repo.join("tracked.txt"), "tracked\n")?; run_git_in(repo, &["add", "tracked.txt"]); commit(repo, "initial"); let merge_base = merge_base_with_head(repo, "missing-branch")?; assert_eq!(merge_base, None); Ok(()) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/lib.rs
codex-rs/utils/git/src/lib.rs
use std::fmt; use std::path::PathBuf; mod apply; mod branch; mod errors; mod ghost_commits; mod operations; mod platform; pub use apply::ApplyGitRequest; pub use apply::ApplyGitResult; pub use apply::apply_git_patch; pub use apply::extract_paths_from_patch; pub use apply::parse_git_apply_output; pub use apply::stage_paths; pub use branch::merge_base_with_head; pub use errors::GitToolingError; pub use ghost_commits::CreateGhostCommitOptions; pub use ghost_commits::GhostSnapshotConfig; pub use ghost_commits::GhostSnapshotReport; pub use ghost_commits::IgnoredUntrackedFile; pub use ghost_commits::LargeUntrackedDir; pub use ghost_commits::RestoreGhostCommitOptions; pub use ghost_commits::capture_ghost_snapshot_report; pub use ghost_commits::create_ghost_commit; pub use ghost_commits::create_ghost_commit_with_report; pub use ghost_commits::restore_ghost_commit; pub use ghost_commits::restore_ghost_commit_with_options; pub use ghost_commits::restore_to_commit; pub use platform::create_symlink; use schemars::JsonSchema; use serde::Deserialize; use serde::Serialize; use ts_rs::TS; type CommitID = String; /// Details of a ghost commit created from a repository state. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, TS)] pub struct GhostCommit { id: CommitID, parent: Option<CommitID>, preexisting_untracked_files: Vec<PathBuf>, preexisting_untracked_dirs: Vec<PathBuf>, } impl GhostCommit { /// Create a new ghost commit wrapper from a raw commit ID and optional parent. pub fn new( id: CommitID, parent: Option<CommitID>, preexisting_untracked_files: Vec<PathBuf>, preexisting_untracked_dirs: Vec<PathBuf>, ) -> Self { Self { id, parent, preexisting_untracked_files, preexisting_untracked_dirs, } } /// Commit ID for the snapshot. pub fn id(&self) -> &str { &self.id } /// Parent commit ID, if the repository had a `HEAD` at creation time. pub fn parent(&self) -> Option<&str> { self.parent.as_deref() } /// Untracked or ignored files that already existed when the snapshot was captured. pub fn preexisting_untracked_files(&self) -> &[PathBuf] { &self.preexisting_untracked_files } /// Untracked or ignored directories that already existed when the snapshot was captured. pub fn preexisting_untracked_dirs(&self) -> &[PathBuf] { &self.preexisting_untracked_dirs } } impl fmt::Display for GhostCommit { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.id) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/platform.rs
codex-rs/utils/git/src/platform.rs
use std::path::Path; use crate::GitToolingError; #[cfg(unix)] pub fn create_symlink( _source: &Path, link_target: &Path, destination: &Path, ) -> Result<(), GitToolingError> { use std::os::unix::fs::symlink; symlink(link_target, destination)?; Ok(()) } #[cfg(windows)] pub fn create_symlink( source: &Path, link_target: &Path, destination: &Path, ) -> Result<(), GitToolingError> { use std::os::windows::fs::FileTypeExt; use std::os::windows::fs::symlink_dir; use std::os::windows::fs::symlink_file; let metadata = std::fs::symlink_metadata(source)?; if metadata.file_type().is_symlink_dir() { symlink_dir(link_target, destination)?; } else { symlink_file(link_target, destination)?; } Ok(()) } #[cfg(not(any(unix, windows)))] compile_error!("codex-git symlink support is only implemented for Unix and Windows");
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/apply.rs
codex-rs/utils/git/src/apply.rs
//! Helpers for applying unified diffs using the system `git` binary. //! //! The entry point is [`apply_git_patch`], which writes a diff to a temporary //! file, shells out to `git apply` with the right flags, and then parses the //! command’s output into structured details. Callers can opt into dry-run //! mode via [`ApplyGitRequest::preflight`] and inspect the resulting paths to //! learn what would change before applying for real. use once_cell::sync::Lazy; use regex::Regex; use std::ffi::OsStr; use std::io; use std::path::Path; use std::path::PathBuf; /// Parameters for invoking [`apply_git_patch`]. #[derive(Debug, Clone)] pub struct ApplyGitRequest { pub cwd: PathBuf, pub diff: String, pub revert: bool, pub preflight: bool, } /// Result of running [`apply_git_patch`], including paths gleaned from stdout/stderr. #[derive(Debug, Clone)] pub struct ApplyGitResult { pub exit_code: i32, pub applied_paths: Vec<String>, pub skipped_paths: Vec<String>, pub conflicted_paths: Vec<String>, pub stdout: String, pub stderr: String, pub cmd_for_log: String, } /// Apply a unified diff to the target repository by shelling out to `git apply`. /// /// When [`ApplyGitRequest::preflight`] is `true`, this behaves like `git apply --check` and /// leaves the working tree untouched while still parsing the command output for diagnostics. pub fn apply_git_patch(req: &ApplyGitRequest) -> io::Result<ApplyGitResult> { let git_root = resolve_git_root(&req.cwd)?; // Write unified diff into a temporary file let (tmpdir, patch_path) = write_temp_patch(&req.diff)?; // Keep tmpdir alive until function end to ensure the file exists let _guard = tmpdir; if req.revert && !req.preflight { // Stage WT paths first to avoid index mismatch on revert. stage_paths(&git_root, &req.diff)?; } // Build git args let mut args: Vec<String> = vec!["apply".into(), "--3way".into()]; if req.revert { args.push("-R".into()); } // Optional: additional git config via env knob (defaults OFF) let mut cfg_parts: Vec<String> = Vec::new(); if let Ok(cfg) = std::env::var("CODEX_APPLY_GIT_CFG") { for pair in cfg.split(',') { let p = pair.trim(); if p.is_empty() || !p.contains('=') { continue; } cfg_parts.push("-c".into()); cfg_parts.push(p.to_string()); } } args.push(patch_path.to_string_lossy().to_string()); // Optional preflight: dry-run only; do not modify working tree if req.preflight { let mut check_args = vec!["apply".to_string(), "--check".to_string()]; if req.revert { check_args.push("-R".to_string()); } check_args.push(patch_path.to_string_lossy().to_string()); let rendered = render_command_for_log(&git_root, &cfg_parts, &check_args); let (c_code, c_out, c_err) = run_git(&git_root, &cfg_parts, &check_args)?; let (mut applied_paths, mut skipped_paths, mut conflicted_paths) = parse_git_apply_output(&c_out, &c_err); applied_paths.sort(); applied_paths.dedup(); skipped_paths.sort(); skipped_paths.dedup(); conflicted_paths.sort(); conflicted_paths.dedup(); return Ok(ApplyGitResult { exit_code: c_code, applied_paths, skipped_paths, conflicted_paths, stdout: c_out, stderr: c_err, cmd_for_log: rendered, }); } let cmd_for_log = render_command_for_log(&git_root, &cfg_parts, &args); let (code, stdout, stderr) = run_git(&git_root, &cfg_parts, &args)?; let (mut applied_paths, mut skipped_paths, mut conflicted_paths) = parse_git_apply_output(&stdout, &stderr); applied_paths.sort(); applied_paths.dedup(); skipped_paths.sort(); skipped_paths.dedup(); conflicted_paths.sort(); conflicted_paths.dedup(); Ok(ApplyGitResult { exit_code: code, applied_paths, skipped_paths, conflicted_paths, stdout, stderr, cmd_for_log, }) } fn resolve_git_root(cwd: &Path) -> io::Result<PathBuf> { let out = std::process::Command::new("git") .arg("rev-parse") .arg("--show-toplevel") .current_dir(cwd) .output()?; let code = out.status.code().unwrap_or(-1); if code != 0 { return Err(io::Error::other(format!( "not a git repository (exit {}): {}", code, String::from_utf8_lossy(&out.stderr) ))); } let root = String::from_utf8_lossy(&out.stdout).trim().to_string(); Ok(PathBuf::from(root)) } fn write_temp_patch(diff: &str) -> io::Result<(tempfile::TempDir, PathBuf)> { let dir = tempfile::tempdir()?; let path = dir.path().join("patch.diff"); std::fs::write(&path, diff)?; Ok((dir, path)) } fn run_git(cwd: &Path, git_cfg: &[String], args: &[String]) -> io::Result<(i32, String, String)> { let mut cmd = std::process::Command::new("git"); for p in git_cfg { cmd.arg(p); } for a in args { cmd.arg(a); } let out = cmd.current_dir(cwd).output()?; let code = out.status.code().unwrap_or(-1); let stdout = String::from_utf8_lossy(&out.stdout).into_owned(); let stderr = String::from_utf8_lossy(&out.stderr).into_owned(); Ok((code, stdout, stderr)) } fn quote_shell(s: &str) -> String { let simple = s .chars() .all(|c| c.is_ascii_alphanumeric() || "-_.:/@%+".contains(c)); if simple { s.to_string() } else { format!("'{}'", s.replace('\'', "'\\''")) } } fn render_command_for_log(cwd: &Path, git_cfg: &[String], args: &[String]) -> String { let mut parts: Vec<String> = Vec::new(); parts.push("git".to_string()); for a in git_cfg { parts.push(quote_shell(a)); } for a in args { parts.push(quote_shell(a)); } format!( "(cd {} && {})", quote_shell(&cwd.display().to_string()), parts.join(" ") ) } /// Collect every path referenced by the diff headers inside `diff --git` sections. pub fn extract_paths_from_patch(diff_text: &str) -> Vec<String> { static RE: Lazy<Regex> = Lazy::new(|| { Regex::new(r"(?m)^diff --git a/(.*?) b/(.*)$") .unwrap_or_else(|e| panic!("invalid regex: {e}")) }); let mut set = std::collections::BTreeSet::new(); for caps in RE.captures_iter(diff_text) { if let Some(a) = caps.get(1).map(|m| m.as_str()) && a != "/dev/null" && !a.trim().is_empty() { set.insert(a.to_string()); } if let Some(b) = caps.get(2).map(|m| m.as_str()) && b != "/dev/null" && !b.trim().is_empty() { set.insert(b.to_string()); } } set.into_iter().collect() } /// Stage only the files that actually exist on disk for the given diff. pub fn stage_paths(git_root: &Path, diff: &str) -> io::Result<()> { let paths = extract_paths_from_patch(diff); let mut existing: Vec<String> = Vec::new(); for p in paths { let joined = git_root.join(&p); if std::fs::symlink_metadata(&joined).is_ok() { existing.push(p); } } if existing.is_empty() { return Ok(()); } let mut cmd = std::process::Command::new("git"); cmd.arg("add"); cmd.arg("--"); for p in &existing { cmd.arg(OsStr::new(p)); } let out = cmd.current_dir(git_root).output()?; let _code = out.status.code().unwrap_or(-1); // We do not hard fail staging; best-effort is OK. Return Ok even on non-zero. Ok(()) } // ============ Parser ported from VS Code (TS) ============ /// Parse `git apply` output into applied/skipped/conflicted path groupings. pub fn parse_git_apply_output( stdout: &str, stderr: &str, ) -> (Vec<String>, Vec<String>, Vec<String>) { let combined = [stdout, stderr] .iter() .filter(|s| !s.is_empty()) .cloned() .collect::<Vec<&str>>() .join("\n"); let mut applied = std::collections::BTreeSet::new(); let mut skipped = std::collections::BTreeSet::new(); let mut conflicted = std::collections::BTreeSet::new(); let mut last_seen_path: Option<String> = None; fn add(set: &mut std::collections::BTreeSet<String>, raw: &str) { let trimmed = raw.trim(); if trimmed.is_empty() { return; } let first = trimmed.chars().next().unwrap_or('\0'); let last = trimmed.chars().last().unwrap_or('\0'); let unquoted = if (first == '"' || first == '\'') && last == first && trimmed.len() >= 2 { &trimmed[1..trimmed.len() - 1] } else { trimmed }; if !unquoted.is_empty() { set.insert(unquoted.to_string()); } } static APPLIED_CLEAN: Lazy<Regex> = Lazy::new(|| regex_ci("^Applied patch(?: to)?\\s+(?P<path>.+?)\\s+cleanly\\.?$")); static APPLIED_CONFLICTS: Lazy<Regex> = Lazy::new(|| regex_ci("^Applied patch(?: to)?\\s+(?P<path>.+?)\\s+with conflicts\\.?$")); static APPLYING_WITH_REJECTS: Lazy<Regex> = Lazy::new(|| { regex_ci("^Applying patch\\s+(?P<path>.+?)\\s+with\\s+\\d+\\s+rejects?\\.{0,3}$") }); static CHECKING_PATCH: Lazy<Regex> = Lazy::new(|| regex_ci("^Checking patch\\s+(?P<path>.+?)\\.\\.\\.$")); static UNMERGED_LINE: Lazy<Regex> = Lazy::new(|| regex_ci("^U\\s+(?P<path>.+)$")); static PATCH_FAILED: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+patch failed:\\s+(?P<path>.+?)(?::\\d+)?(?:\\s|$)")); static DOES_NOT_APPLY: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+patch does not apply$")); static THREE_WAY_START: Lazy<Regex> = Lazy::new(|| { regex_ci("^(?:Performing three-way merge|Falling back to three-way merge)\\.\\.\\.$") }); static THREE_WAY_FAILED: Lazy<Regex> = Lazy::new(|| regex_ci("^Failed to perform three-way merge\\.\\.\\.$")); static FALLBACK_DIRECT: Lazy<Regex> = Lazy::new(|| regex_ci("^Falling back to direct application\\.\\.\\.$")); static LACKS_BLOB: Lazy<Regex> = Lazy::new(|| { regex_ci( "^(?:error: )?repository lacks the necessary blob to (?:perform|fall back on) 3-?way merge\\.?$", ) }); static INDEX_MISMATCH: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+does not match index\\b")); static NOT_IN_INDEX: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+does not exist in index\\b")); static ALREADY_EXISTS_WT: Lazy<Regex> = Lazy::new(|| { regex_ci("^error:\\s+(?P<path>.+?)\\s+already exists in (?:the )?working directory\\b") }); static FILE_EXISTS: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+patch failed:\\s+(?P<path>.+?)\\s+File exists")); static RENAMED_DELETED: Lazy<Regex> = Lazy::new(|| regex_ci("^error:\\s+path\\s+(?P<path>.+?)\\s+has been renamed\\/deleted")); static CANNOT_APPLY_BINARY: Lazy<Regex> = Lazy::new(|| { regex_ci( "^error:\\s+cannot apply binary patch to\\s+['\\\"]?(?P<path>.+?)['\\\"]?\\s+without full index line$", ) }); static BINARY_DOES_NOT_APPLY: Lazy<Regex> = Lazy::new(|| { regex_ci("^error:\\s+binary patch does not apply to\\s+['\\\"]?(?P<path>.+?)['\\\"]?$") }); static BINARY_INCORRECT_RESULT: Lazy<Regex> = Lazy::new(|| { regex_ci( "^error:\\s+binary patch to\\s+['\\\"]?(?P<path>.+?)['\\\"]?\\s+creates incorrect result\\b", ) }); static CANNOT_READ_CURRENT: Lazy<Regex> = Lazy::new(|| { regex_ci("^error:\\s+cannot read the current contents of\\s+['\\\"]?(?P<path>.+?)['\\\"]?$") }); static SKIPPED_PATCH: Lazy<Regex> = Lazy::new(|| regex_ci("^Skipped patch\\s+['\\\"]?(?P<path>.+?)['\\\"]\\.$")); static CANNOT_MERGE_BINARY_WARN: Lazy<Regex> = Lazy::new(|| { regex_ci( "^warning:\\s*Cannot merge binary files:\\s+(?P<path>.+?)\\s+\\(ours\\s+vs\\.\\s+theirs\\)", ) }); for raw_line in combined.lines() { let line = raw_line.trim(); if line.is_empty() { continue; } // === "Checking patch <path>..." tracking === if let Some(c) = CHECKING_PATCH.captures(line) { if let Some(m) = c.name("path") { last_seen_path = Some(m.as_str().to_string()); } continue; } // === Status lines === if let Some(c) = APPLIED_CLEAN.captures(line) { if let Some(m) = c.name("path") { add(&mut applied, m.as_str()); let p = applied.iter().next_back().cloned(); if let Some(p) = p { conflicted.remove(&p); skipped.remove(&p); last_seen_path = Some(p); } } continue; } if let Some(c) = APPLIED_CONFLICTS.captures(line) { if let Some(m) = c.name("path") { add(&mut conflicted, m.as_str()); let p = conflicted.iter().next_back().cloned(); if let Some(p) = p { applied.remove(&p); skipped.remove(&p); last_seen_path = Some(p); } } continue; } if let Some(c) = APPLYING_WITH_REJECTS.captures(line) { if let Some(m) = c.name("path") { add(&mut conflicted, m.as_str()); let p = conflicted.iter().next_back().cloned(); if let Some(p) = p { applied.remove(&p); skipped.remove(&p); last_seen_path = Some(p); } } continue; } // === “U <path>” after conflicts === if let Some(c) = UNMERGED_LINE.captures(line) { if let Some(m) = c.name("path") { add(&mut conflicted, m.as_str()); let p = conflicted.iter().next_back().cloned(); if let Some(p) = p { applied.remove(&p); skipped.remove(&p); last_seen_path = Some(p); } } continue; } // === Early hints === if PATCH_FAILED.is_match(line) || DOES_NOT_APPLY.is_match(line) { if let Some(c) = PATCH_FAILED .captures(line) .or_else(|| DOES_NOT_APPLY.captures(line)) && let Some(m) = c.name("path") { add(&mut skipped, m.as_str()); last_seen_path = Some(m.as_str().to_string()); } continue; } // === Ignore narration === if THREE_WAY_START.is_match(line) || FALLBACK_DIRECT.is_match(line) { continue; } // === 3-way failed entirely; attribute to last_seen_path === if THREE_WAY_FAILED.is_match(line) || LACKS_BLOB.is_match(line) { if let Some(p) = last_seen_path.clone() { add(&mut skipped, &p); applied.remove(&p); conflicted.remove(&p); } continue; } // === Skips / I/O problems === if let Some(c) = INDEX_MISMATCH .captures(line) .or_else(|| NOT_IN_INDEX.captures(line)) .or_else(|| ALREADY_EXISTS_WT.captures(line)) .or_else(|| FILE_EXISTS.captures(line)) .or_else(|| RENAMED_DELETED.captures(line)) .or_else(|| CANNOT_APPLY_BINARY.captures(line)) .or_else(|| BINARY_DOES_NOT_APPLY.captures(line)) .or_else(|| BINARY_INCORRECT_RESULT.captures(line)) .or_else(|| CANNOT_READ_CURRENT.captures(line)) .or_else(|| SKIPPED_PATCH.captures(line)) { if let Some(m) = c.name("path") { add(&mut skipped, m.as_str()); let p_now = skipped.iter().next_back().cloned(); if let Some(p) = p_now { applied.remove(&p); conflicted.remove(&p); last_seen_path = Some(p); } } continue; } // === Warnings that imply conflicts === if let Some(c) = CANNOT_MERGE_BINARY_WARN.captures(line) { if let Some(m) = c.name("path") { add(&mut conflicted, m.as_str()); let p = conflicted.iter().next_back().cloned(); if let Some(p) = p { applied.remove(&p); skipped.remove(&p); last_seen_path = Some(p); } } continue; } } // Final precedence: conflicts > applied > skipped for p in conflicted.iter() { applied.remove(p); skipped.remove(p); } for p in applied.iter() { skipped.remove(p); } ( applied.into_iter().collect(), skipped.into_iter().collect(), conflicted.into_iter().collect(), ) } fn regex_ci(pat: &str) -> Regex { Regex::new(&format!("(?i){pat}")).unwrap_or_else(|e| panic!("invalid regex: {e}")) } #[cfg(test)] mod tests { use super::*; use std::path::Path; use std::sync::Mutex; use std::sync::OnceLock; fn env_lock() -> &'static Mutex<()> { static LOCK: OnceLock<Mutex<()>> = OnceLock::new(); LOCK.get_or_init(|| Mutex::new(())) } fn run(cwd: &Path, args: &[&str]) -> (i32, String, String) { let out = std::process::Command::new(args[0]) .args(&args[1..]) .current_dir(cwd) .output() .expect("spawn ok"); ( out.status.code().unwrap_or(-1), String::from_utf8_lossy(&out.stdout).into_owned(), String::from_utf8_lossy(&out.stderr).into_owned(), ) } fn init_repo() -> tempfile::TempDir { let dir = tempfile::tempdir().expect("tempdir"); let root = dir.path(); // git init and minimal identity let _ = run(root, &["git", "init"]); let _ = run(root, &["git", "config", "user.email", "codex@example.com"]); let _ = run(root, &["git", "config", "user.name", "Codex"]); dir } fn read_file_normalized(path: &Path) -> String { std::fs::read_to_string(path) .expect("read file") .replace("\r\n", "\n") } #[test] fn apply_add_success() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); let diff = "diff --git a/hello.txt b/hello.txt\nnew file mode 100644\n--- /dev/null\n+++ b/hello.txt\n@@ -0,0 +1,2 @@\n+hello\n+world\n"; let req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let r = apply_git_patch(&req).expect("run apply"); assert_eq!(r.exit_code, 0, "exit code 0"); // File exists now assert!(root.join("hello.txt").exists()); } #[test] fn apply_modify_conflict() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); // seed file and commit std::fs::write(root.join("file.txt"), "line1\nline2\nline3\n").unwrap(); let _ = run(root, &["git", "add", "file.txt"]); let _ = run(root, &["git", "commit", "-m", "seed"]); // local edit (unstaged) std::fs::write(root.join("file.txt"), "line1\nlocal2\nline3\n").unwrap(); // patch wants to change the same line differently let diff = "diff --git a/file.txt b/file.txt\n--- a/file.txt\n+++ b/file.txt\n@@ -1,3 +1,3 @@\n line1\n-line2\n+remote2\n line3\n"; let req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let r = apply_git_patch(&req).expect("run apply"); assert_ne!(r.exit_code, 0, "non-zero exit on conflict"); } #[test] fn apply_modify_skipped_missing_index() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); // Try to modify a file that is not in the index let diff = "diff --git a/ghost.txt b/ghost.txt\n--- a/ghost.txt\n+++ b/ghost.txt\n@@ -1,1 +1,1 @@\n-old\n+new\n"; let req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let r = apply_git_patch(&req).expect("run apply"); assert_ne!(r.exit_code, 0, "non-zero exit on missing index"); } #[test] fn apply_then_revert_success() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); // Seed file and commit original content std::fs::write(root.join("file.txt"), "orig\n").unwrap(); let _ = run(root, &["git", "add", "file.txt"]); let _ = run(root, &["git", "commit", "-m", "seed"]); // Forward patch: orig -> ORIG let diff = "diff --git a/file.txt b/file.txt\n--- a/file.txt\n+++ b/file.txt\n@@ -1,1 +1,1 @@\n-orig\n+ORIG\n"; let apply_req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let res_apply = apply_git_patch(&apply_req).expect("apply ok"); assert_eq!(res_apply.exit_code, 0, "forward apply succeeded"); let after_apply = read_file_normalized(&root.join("file.txt")); assert_eq!(after_apply, "ORIG\n"); // Revert patch: ORIG -> orig (stage paths first; engine handles it) let revert_req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: true, preflight: false, }; let res_revert = apply_git_patch(&revert_req).expect("revert ok"); assert_eq!(res_revert.exit_code, 0, "revert apply succeeded"); let after_revert = read_file_normalized(&root.join("file.txt")); assert_eq!(after_revert, "orig\n"); } #[test] fn revert_preflight_does_not_stage_index() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); // Seed repo and apply forward patch so the working tree reflects the change. std::fs::write(root.join("file.txt"), "orig\n").unwrap(); let _ = run(root, &["git", "add", "file.txt"]); let _ = run(root, &["git", "commit", "-m", "seed"]); let diff = "diff --git a/file.txt b/file.txt\n--- a/file.txt\n+++ b/file.txt\n@@ -1,1 +1,1 @@\n-orig\n+ORIG\n"; let apply_req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let res_apply = apply_git_patch(&apply_req).expect("apply ok"); assert_eq!(res_apply.exit_code, 0, "forward apply succeeded"); let (commit_code, _, commit_err) = run(root, &["git", "commit", "-am", "apply change"]); assert_eq!(commit_code, 0, "commit applied change: {commit_err}"); let (_code_before, staged_before, _stderr_before) = run(root, &["git", "diff", "--cached", "--name-only"]); let preflight_req = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: true, preflight: true, }; let res_preflight = apply_git_patch(&preflight_req).expect("preflight ok"); assert_eq!(res_preflight.exit_code, 0, "revert preflight succeeded"); let (_code_after, staged_after, _stderr_after) = run(root, &["git", "diff", "--cached", "--name-only"]); assert_eq!( staged_after.trim(), staged_before.trim(), "preflight should not stage new paths", ); let after_preflight = read_file_normalized(&root.join("file.txt")); assert_eq!(after_preflight, "ORIG\n"); } #[test] fn preflight_blocks_partial_changes() { let _g = env_lock().lock().unwrap(); let repo = init_repo(); let root = repo.path(); // Build a multi-file diff: one valid add (ok.txt) and one invalid modify (ghost.txt) let diff = "diff --git a/ok.txt b/ok.txt\nnew file mode 100644\n--- /dev/null\n+++ b/ok.txt\n@@ -0,0 +1,2 @@\n+alpha\n+beta\n\n\ diff --git a/ghost.txt b/ghost.txt\n--- a/ghost.txt\n+++ b/ghost.txt\n@@ -1,1 +1,1 @@\n-old\n+new\n"; // 1) With preflight enabled, nothing should be changed (even though ok.txt could be added) let req1 = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: true, }; let r1 = apply_git_patch(&req1).expect("preflight apply"); assert_ne!(r1.exit_code, 0, "preflight reports failure"); assert!( !root.join("ok.txt").exists(), "preflight must prevent adding ok.txt" ); assert!( r1.cmd_for_log.contains("--check"), "preflight path recorded --check" ); // 2) Without preflight, we should see no --check in the executed command let req2 = ApplyGitRequest { cwd: root.to_path_buf(), diff: diff.to_string(), revert: false, preflight: false, }; let r2 = apply_git_patch(&req2).expect("direct apply"); assert_ne!(r2.exit_code, 0, "apply is expected to fail overall"); assert!( !r2.cmd_for_log.contains("--check"), "non-preflight path should not use --check" ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/git/src/operations.rs
codex-rs/utils/git/src/operations.rs
use std::ffi::OsStr; use std::ffi::OsString; use std::path::Component; use std::path::Path; use std::path::PathBuf; use std::process::Command; use crate::GitToolingError; pub(crate) fn ensure_git_repository(path: &Path) -> Result<(), GitToolingError> { match run_git_for_stdout( path, vec![ OsString::from("rev-parse"), OsString::from("--is-inside-work-tree"), ], None, ) { Ok(output) if output.trim() == "true" => Ok(()), Ok(_) => Err(GitToolingError::NotAGitRepository { path: path.to_path_buf(), }), Err(GitToolingError::GitCommand { status, .. }) if status.code() == Some(128) => { Err(GitToolingError::NotAGitRepository { path: path.to_path_buf(), }) } Err(err) => Err(err), } } pub(crate) fn resolve_head(path: &Path) -> Result<Option<String>, GitToolingError> { match run_git_for_stdout( path, vec![ OsString::from("rev-parse"), OsString::from("--verify"), OsString::from("HEAD"), ], None, ) { Ok(sha) => Ok(Some(sha)), Err(GitToolingError::GitCommand { status, .. }) if status.code() == Some(128) => Ok(None), Err(other) => Err(other), } } pub(crate) fn normalize_relative_path(path: &Path) -> Result<PathBuf, GitToolingError> { let mut result = PathBuf::new(); let mut saw_component = false; for component in path.components() { saw_component = true; match component { Component::Normal(part) => result.push(part), Component::CurDir => {} Component::ParentDir => { if !result.pop() { return Err(GitToolingError::PathEscapesRepository { path: path.to_path_buf(), }); } } Component::RootDir | Component::Prefix(_) => { return Err(GitToolingError::NonRelativePath { path: path.to_path_buf(), }); } } } if !saw_component { return Err(GitToolingError::NonRelativePath { path: path.to_path_buf(), }); } Ok(result) } pub(crate) fn resolve_repository_root(path: &Path) -> Result<PathBuf, GitToolingError> { let root = run_git_for_stdout( path, vec![ OsString::from("rev-parse"), OsString::from("--show-toplevel"), ], None, )?; Ok(PathBuf::from(root)) } pub(crate) fn apply_repo_prefix_to_force_include( prefix: Option<&Path>, paths: &[PathBuf], ) -> Vec<PathBuf> { if paths.is_empty() { return Vec::new(); } match prefix { Some(prefix) => paths.iter().map(|path| prefix.join(path)).collect(), None => paths.to_vec(), } } pub(crate) fn repo_subdir(repo_root: &Path, repo_path: &Path) -> Option<PathBuf> { if repo_root == repo_path { return None; } repo_path .strip_prefix(repo_root) .ok() .and_then(non_empty_path) .or_else(|| { let repo_root_canon = repo_root.canonicalize().ok()?; let repo_path_canon = repo_path.canonicalize().ok()?; repo_path_canon .strip_prefix(&repo_root_canon) .ok() .and_then(non_empty_path) }) } fn non_empty_path(path: &Path) -> Option<PathBuf> { if path.as_os_str().is_empty() { None } else { Some(path.to_path_buf()) } } pub(crate) fn run_git_for_status<I, S>( dir: &Path, args: I, env: Option<&[(OsString, OsString)]>, ) -> Result<(), GitToolingError> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { run_git(dir, args, env)?; Ok(()) } pub(crate) fn run_git_for_stdout<I, S>( dir: &Path, args: I, env: Option<&[(OsString, OsString)]>, ) -> Result<String, GitToolingError> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { let run = run_git(dir, args, env)?; String::from_utf8(run.output.stdout) .map(|value| value.trim().to_string()) .map_err(|source| GitToolingError::GitOutputUtf8 { command: run.command, source, }) } /// Executes `git` and returns the full stdout without trimming so callers /// can parse delimiter-sensitive output, propagating UTF-8 errors with context. pub(crate) fn run_git_for_stdout_all<I, S>( dir: &Path, args: I, env: Option<&[(OsString, OsString)]>, ) -> Result<String, GitToolingError> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { // Keep the raw stdout untouched so callers can parse delimiter-sensitive // output (e.g. NUL-separated paths) without trimming artefacts. let run = run_git(dir, args, env)?; // Propagate UTF-8 conversion failures with the command context for debugging. String::from_utf8(run.output.stdout).map_err(|source| GitToolingError::GitOutputUtf8 { command: run.command, source, }) } fn run_git<I, S>( dir: &Path, args: I, env: Option<&[(OsString, OsString)]>, ) -> Result<GitRun, GitToolingError> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { let iterator = args.into_iter(); let (lower, upper) = iterator.size_hint(); let mut args_vec = Vec::with_capacity(upper.unwrap_or(lower)); for arg in iterator { args_vec.push(OsString::from(arg.as_ref())); } let command_string = build_command_string(&args_vec); let mut command = Command::new("git"); command.current_dir(dir); if let Some(envs) = env { for (key, value) in envs { command.env(key, value); } } command.args(&args_vec); let output = command.output()?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); return Err(GitToolingError::GitCommand { command: command_string, status: output.status, stderr, }); } Ok(GitRun { command: command_string, output, }) } fn build_command_string(args: &[OsString]) -> String { if args.is_empty() { return "git".to_string(); } let joined = args .iter() .map(|arg| arg.to_string_lossy().into_owned()) .collect::<Vec<_>>() .join(" "); format!("git {joined}") } struct GitRun { command: String, output: std::process::Output, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/pty/src/lib.rs
codex-rs/utils/pty/src/lib.rs
use core::fmt; use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; use std::sync::atomic::AtomicBool; use std::sync::Arc; use std::sync::Mutex as StdMutex; use std::time::Duration; #[cfg(windows)] mod win; use anyhow::Result; #[cfg(not(windows))] use portable_pty::native_pty_system; use portable_pty::CommandBuilder; use portable_pty::MasterPty; use portable_pty::PtySize; use portable_pty::SlavePty; use tokio::sync::broadcast; use tokio::sync::mpsc; use tokio::sync::oneshot; use tokio::sync::Mutex as TokioMutex; use tokio::task::JoinHandle; pub struct PtyPairWrapper { pub _slave: Option<Box<dyn SlavePty + Send>>, pub _master: Box<dyn MasterPty + Send>, } #[derive(Debug)] pub struct ExecCommandSession { writer_tx: mpsc::Sender<Vec<u8>>, output_tx: broadcast::Sender<Vec<u8>>, killer: StdMutex<Option<Box<dyn portable_pty::ChildKiller + Send + Sync>>>, reader_handle: StdMutex<Option<JoinHandle<()>>>, writer_handle: StdMutex<Option<JoinHandle<()>>>, wait_handle: StdMutex<Option<JoinHandle<()>>>, exit_status: Arc<AtomicBool>, exit_code: Arc<StdMutex<Option<i32>>>, // PtyPair must be preserved because the process will receive Control+C if the // slave is closed _pair: StdMutex<PtyPairWrapper>, } impl fmt::Debug for PtyPairWrapper { fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { Ok(()) } } impl ExecCommandSession { #[allow(clippy::too_many_arguments)] pub fn new( writer_tx: mpsc::Sender<Vec<u8>>, output_tx: broadcast::Sender<Vec<u8>>, initial_output_rx: broadcast::Receiver<Vec<u8>>, killer: Box<dyn portable_pty::ChildKiller + Send + Sync>, reader_handle: JoinHandle<()>, writer_handle: JoinHandle<()>, wait_handle: JoinHandle<()>, exit_status: Arc<AtomicBool>, exit_code: Arc<StdMutex<Option<i32>>>, pair: PtyPairWrapper, ) -> (Self, broadcast::Receiver<Vec<u8>>) { ( Self { writer_tx, output_tx, killer: StdMutex::new(Some(killer)), reader_handle: StdMutex::new(Some(reader_handle)), writer_handle: StdMutex::new(Some(writer_handle)), wait_handle: StdMutex::new(Some(wait_handle)), exit_status, exit_code, _pair: StdMutex::new(pair), }, initial_output_rx, ) } pub fn writer_sender(&self) -> mpsc::Sender<Vec<u8>> { self.writer_tx.clone() } pub fn output_receiver(&self) -> broadcast::Receiver<Vec<u8>> { self.output_tx.subscribe() } pub fn has_exited(&self) -> bool { self.exit_status.load(std::sync::atomic::Ordering::SeqCst) } pub fn exit_code(&self) -> Option<i32> { self.exit_code.lock().ok().and_then(|guard| *guard) } pub fn terminate(&self) { if let Ok(mut killer_opt) = self.killer.lock() { if let Some(mut killer) = killer_opt.take() { let _ = killer.kill(); } } if let Ok(mut h) = self.reader_handle.lock() { if let Some(handle) = h.take() { handle.abort(); } } if let Ok(mut h) = self.writer_handle.lock() { if let Some(handle) = h.take() { handle.abort(); } } if let Ok(mut h) = self.wait_handle.lock() { if let Some(handle) = h.take() { handle.abort(); } } } } impl Drop for ExecCommandSession { fn drop(&mut self) { self.terminate(); } } #[derive(Debug)] pub struct SpawnedPty { pub session: ExecCommandSession, pub output_rx: broadcast::Receiver<Vec<u8>>, pub exit_rx: oneshot::Receiver<i32>, } #[allow(unreachable_code)] pub fn conpty_supported() -> bool { // Annotation required because `win` can't be compiled on other OS. #[cfg(windows)] return win::conpty_supported(); true } #[cfg(windows)] fn platform_native_pty_system() -> Box<dyn portable_pty::PtySystem + Send> { Box::new(win::ConPtySystem::default()) } #[cfg(not(windows))] fn platform_native_pty_system() -> Box<dyn portable_pty::PtySystem + Send> { native_pty_system() } pub async fn spawn_pty_process( program: &str, args: &[String], cwd: &Path, env: &HashMap<String, String>, arg0: &Option<String>, ) -> Result<SpawnedPty> { if program.is_empty() { anyhow::bail!("missing program for PTY spawn"); } let pty_system = platform_native_pty_system(); let pair = pty_system.openpty(PtySize { rows: 24, cols: 80, pixel_width: 0, pixel_height: 0, })?; let mut command_builder = CommandBuilder::new(arg0.as_ref().unwrap_or(&program.to_string())); command_builder.cwd(cwd); command_builder.env_clear(); for arg in args { command_builder.arg(arg); } for (key, value) in env { command_builder.env(key, value); } let mut child = pair.slave.spawn_command(command_builder)?; let killer = child.clone_killer(); let (writer_tx, mut writer_rx) = mpsc::channel::<Vec<u8>>(128); let (output_tx, _) = broadcast::channel::<Vec<u8>>(256); // Subscribe before starting the reader thread. let initial_output_rx = output_tx.subscribe(); let mut reader = pair.master.try_clone_reader()?; let output_tx_clone = output_tx.clone(); let reader_handle: JoinHandle<()> = tokio::task::spawn_blocking(move || { let mut buf = [0u8; 8_192]; loop { match reader.read(&mut buf) { Ok(0) => break, Ok(n) => { let _ = output_tx_clone.send(buf[..n].to_vec()); } Err(ref e) if e.kind() == ErrorKind::Interrupted => continue, Err(ref e) if e.kind() == ErrorKind::WouldBlock => { std::thread::sleep(Duration::from_millis(5)); continue; } Err(_) => break, } } }); let writer = pair.master.take_writer()?; let writer = Arc::new(TokioMutex::new(writer)); let writer_handle: JoinHandle<()> = tokio::spawn({ let writer = Arc::clone(&writer); async move { while let Some(bytes) = writer_rx.recv().await { let mut guard = writer.lock().await; use std::io::Write; let _ = guard.write_all(&bytes); let _ = guard.flush(); } } }); let (exit_tx, exit_rx) = oneshot::channel::<i32>(); let exit_status = Arc::new(AtomicBool::new(false)); let wait_exit_status = Arc::clone(&exit_status); let exit_code = Arc::new(StdMutex::new(None)); let wait_exit_code = Arc::clone(&exit_code); let wait_handle: JoinHandle<()> = tokio::task::spawn_blocking(move || { let code = match child.wait() { Ok(status) => status.exit_code() as i32, Err(_) => -1, }; wait_exit_status.store(true, std::sync::atomic::Ordering::SeqCst); if let Ok(mut guard) = wait_exit_code.lock() { *guard = Some(code); } let _ = exit_tx.send(code); }); let pair = PtyPairWrapper { _slave: if cfg!(windows) { // Keep the slave handle alive on Windows to prevent the process from receiving Control+C Some(pair.slave) } else { None }, _master: pair.master, }; let (session, output_rx) = ExecCommandSession::new( writer_tx, output_tx, initial_output_rx, killer, reader_handle, writer_handle, wait_handle, exit_status, exit_code, pair, ); Ok(SpawnedPty { session, output_rx, exit_rx, }) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/pty/src/win/mod.rs
codex-rs/utils/pty/src/win/mod.rs
#![allow(clippy::unwrap_used)] // This file is copied from https://github.com/wezterm/wezterm (MIT license). // Copyright (c) 2018-Present Wez Furlong // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use anyhow::Context as _; use filedescriptor::OwnedHandle; use portable_pty::Child; use portable_pty::ChildKiller; use portable_pty::ExitStatus; use std::io::Error as IoError; use std::io::Result as IoResult; use std::os::windows::io::AsRawHandle; use std::pin::Pin; use std::sync::Mutex; use std::task::Context; use std::task::Poll; use winapi::shared::minwindef::DWORD; use winapi::um::minwinbase::STILL_ACTIVE; use winapi::um::processthreadsapi::*; use winapi::um::synchapi::WaitForSingleObject; use winapi::um::winbase::INFINITE; pub mod conpty; mod procthreadattr; mod psuedocon; pub use conpty::ConPtySystem; pub use psuedocon::conpty_supported; #[derive(Debug)] pub struct WinChild { proc: Mutex<OwnedHandle>, } impl WinChild { fn is_complete(&mut self) -> IoResult<Option<ExitStatus>> { let mut status: DWORD = 0; let proc = self.proc.lock().unwrap().try_clone().unwrap(); let res = unsafe { GetExitCodeProcess(proc.as_raw_handle() as _, &mut status) }; if res != 0 { if status == STILL_ACTIVE { Ok(None) } else { Ok(Some(ExitStatus::with_exit_code(status))) } } else { Ok(None) } } fn do_kill(&mut self) -> IoResult<()> { let proc = self.proc.lock().unwrap().try_clone().unwrap(); let res = unsafe { TerminateProcess(proc.as_raw_handle() as _, 1) }; let err = IoError::last_os_error(); if res != 0 { Err(err) } else { Ok(()) } } } impl ChildKiller for WinChild { fn kill(&mut self) -> IoResult<()> { self.do_kill().ok(); Ok(()) } fn clone_killer(&self) -> Box<dyn ChildKiller + Send + Sync> { let proc = self.proc.lock().unwrap().try_clone().unwrap(); Box::new(WinChildKiller { proc }) } } #[derive(Debug)] pub struct WinChildKiller { proc: OwnedHandle, } impl ChildKiller for WinChildKiller { fn kill(&mut self) -> IoResult<()> { let res = unsafe { TerminateProcess(self.proc.as_raw_handle() as _, 1) }; let err = IoError::last_os_error(); if res != 0 { Err(err) } else { Ok(()) } } fn clone_killer(&self) -> Box<dyn ChildKiller + Send + Sync> { let proc = self.proc.try_clone().unwrap(); Box::new(WinChildKiller { proc }) } } impl Child for WinChild { fn try_wait(&mut self) -> IoResult<Option<ExitStatus>> { self.is_complete() } fn wait(&mut self) -> IoResult<ExitStatus> { if let Ok(Some(status)) = self.try_wait() { return Ok(status); } let proc = self.proc.lock().unwrap().try_clone().unwrap(); unsafe { WaitForSingleObject(proc.as_raw_handle() as _, INFINITE); } let mut status: DWORD = 0; let res = unsafe { GetExitCodeProcess(proc.as_raw_handle() as _, &mut status) }; if res != 0 { Ok(ExitStatus::with_exit_code(status)) } else { Err(IoError::last_os_error()) } } fn process_id(&self) -> Option<u32> { let res = unsafe { GetProcessId(self.proc.lock().unwrap().as_raw_handle() as _) }; if res == 0 { None } else { Some(res) } } fn as_raw_handle(&self) -> Option<std::os::windows::io::RawHandle> { let proc = self.proc.lock().unwrap(); Some(proc.as_raw_handle()) } } impl std::future::Future for WinChild { type Output = anyhow::Result<ExitStatus>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<anyhow::Result<ExitStatus>> { match self.is_complete() { Ok(Some(status)) => Poll::Ready(Ok(status)), Err(err) => Poll::Ready(Err(err).context("Failed to retrieve process exit status")), Ok(None) => { let proc = self.proc.lock().unwrap().try_clone()?; let waker = cx.waker().clone(); std::thread::spawn(move || { unsafe { WaitForSingleObject(proc.as_raw_handle() as _, INFINITE); } waker.wake(); }); Poll::Pending } } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/pty/src/win/psuedocon.rs
codex-rs/utils/pty/src/win/psuedocon.rs
#![allow(clippy::expect_used)] #![allow(clippy::upper_case_acronyms)] // This file is copied from https://github.com/wezterm/wezterm (MIT license). // Copyright (c) 2018-Present Wez Furlong // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use super::WinChild; use crate::win::procthreadattr::ProcThreadAttributeList; use anyhow::bail; use anyhow::ensure; use anyhow::Error; use filedescriptor::FileDescriptor; use filedescriptor::OwnedHandle; use lazy_static::lazy_static; use portable_pty::cmdbuilder::CommandBuilder; use shared_library::shared_library; use std::env; use std::ffi::OsStr; use std::ffi::OsString; use std::io::Error as IoError; use std::mem; use std::os::windows::ffi::OsStrExt; use std::os::windows::ffi::OsStringExt; use std::os::windows::io::AsRawHandle; use std::os::windows::io::FromRawHandle; use std::path::Path; use std::ptr; use std::sync::Mutex; use winapi::shared::minwindef::DWORD; use winapi::shared::ntdef::NTSTATUS; use winapi::shared::ntstatus::STATUS_SUCCESS; use winapi::shared::winerror::HRESULT; use winapi::shared::winerror::S_OK; use winapi::um::handleapi::*; use winapi::um::processthreadsapi::*; use winapi::um::winbase::CREATE_UNICODE_ENVIRONMENT; use winapi::um::winbase::EXTENDED_STARTUPINFO_PRESENT; use winapi::um::winbase::STARTF_USESTDHANDLES; use winapi::um::winbase::STARTUPINFOEXW; use winapi::um::wincon::COORD; use winapi::um::winnt::HANDLE; use winapi::um::winnt::OSVERSIONINFOW; pub type HPCON = HANDLE; pub const PSEUDOCONSOLE_RESIZE_QUIRK: DWORD = 0x2; #[allow(dead_code)] pub const PSEUDOCONSOLE_PASSTHROUGH_MODE: DWORD = 0x8; // https://learn.microsoft.com/en-gb/windows/console/createpseudoconsole // https://learn.microsoft.com/en-gb/windows/release-health/release-information const MIN_CONPTY_BUILD: u32 = 17_763; shared_library!(ConPtyFuncs, pub fn CreatePseudoConsole( size: COORD, hInput: HANDLE, hOutput: HANDLE, flags: DWORD, hpc: *mut HPCON ) -> HRESULT, pub fn ResizePseudoConsole(hpc: HPCON, size: COORD) -> HRESULT, pub fn ClosePseudoConsole(hpc: HPCON), ); shared_library!(Ntdll, pub fn RtlGetVersion( version_info: *mut OSVERSIONINFOW ) -> NTSTATUS, ); fn load_conpty() -> ConPtyFuncs { let kernel = ConPtyFuncs::open(Path::new("kernel32.dll")).expect( "this system does not support conpty. Windows 10 October 2018 or newer is required", ); if let Ok(sideloaded) = ConPtyFuncs::open(Path::new("conpty.dll")) { sideloaded } else { kernel } } lazy_static! { static ref CONPTY: ConPtyFuncs = load_conpty(); } pub fn conpty_supported() -> bool { windows_build_number().is_some_and(|build| build >= MIN_CONPTY_BUILD) } fn windows_build_number() -> Option<u32> { let ntdll = Ntdll::open(Path::new("ntdll.dll")).ok()?; let mut info: OSVERSIONINFOW = unsafe { mem::zeroed() }; info.dwOSVersionInfoSize = mem::size_of::<OSVERSIONINFOW>() as u32; let status = unsafe { (ntdll.RtlGetVersion)(&mut info) }; if status == STATUS_SUCCESS { Some(info.dwBuildNumber) } else { None } } pub struct PsuedoCon { con: HPCON, } unsafe impl Send for PsuedoCon {} unsafe impl Sync for PsuedoCon {} impl Drop for PsuedoCon { fn drop(&mut self) { unsafe { (CONPTY.ClosePseudoConsole)(self.con) }; } } impl PsuedoCon { pub fn new(size: COORD, input: FileDescriptor, output: FileDescriptor) -> Result<Self, Error> { let mut con: HPCON = INVALID_HANDLE_VALUE; let result = unsafe { (CONPTY.CreatePseudoConsole)( size, input.as_raw_handle() as _, output.as_raw_handle() as _, PSEUDOCONSOLE_RESIZE_QUIRK, &mut con, ) }; ensure!( result == S_OK, "failed to create psuedo console: HRESULT {result}" ); Ok(Self { con }) } pub fn resize(&self, size: COORD) -> Result<(), Error> { let result = unsafe { (CONPTY.ResizePseudoConsole)(self.con, size) }; ensure!( result == S_OK, "failed to resize console to {}x{}: HRESULT: {}", size.X, size.Y, result ); Ok(()) } pub fn spawn_command(&self, cmd: CommandBuilder) -> anyhow::Result<WinChild> { let mut si: STARTUPINFOEXW = unsafe { mem::zeroed() }; si.StartupInfo.cb = mem::size_of::<STARTUPINFOEXW>() as u32; si.StartupInfo.dwFlags = STARTF_USESTDHANDLES; si.StartupInfo.hStdInput = INVALID_HANDLE_VALUE; si.StartupInfo.hStdOutput = INVALID_HANDLE_VALUE; si.StartupInfo.hStdError = INVALID_HANDLE_VALUE; let mut attrs = ProcThreadAttributeList::with_capacity(1)?; attrs.set_pty(self.con)?; si.lpAttributeList = attrs.as_mut_ptr(); let mut pi: PROCESS_INFORMATION = unsafe { mem::zeroed() }; let (mut exe, mut cmdline) = build_cmdline(&cmd)?; let cmd_os = OsString::from_wide(&cmdline); let cwd = resolve_current_directory(&cmd); let mut env_block = build_environment_block(&cmd); let res = unsafe { CreateProcessW( exe.as_mut_ptr(), cmdline.as_mut_ptr(), ptr::null_mut(), ptr::null_mut(), 0, EXTENDED_STARTUPINFO_PRESENT | CREATE_UNICODE_ENVIRONMENT, env_block.as_mut_ptr() as *mut _, cwd.as_ref().map_or(ptr::null(), std::vec::Vec::as_ptr), &mut si.StartupInfo, &mut pi, ) }; if res == 0 { let err = IoError::last_os_error(); let msg = format!( "CreateProcessW `{:?}` in cwd `{:?}` failed: {}", cmd_os, cwd.as_ref().map(|c| OsString::from_wide(c)), err ); log::error!("{msg}"); bail!("{msg}"); } let _main_thread = unsafe { OwnedHandle::from_raw_handle(pi.hThread as _) }; let proc = unsafe { OwnedHandle::from_raw_handle(pi.hProcess as _) }; Ok(WinChild { proc: Mutex::new(proc), }) } } fn resolve_current_directory(cmd: &CommandBuilder) -> Option<Vec<u16>> { let home = cmd .get_env("USERPROFILE") .and_then(|path| Path::new(path).is_dir().then(|| path.to_owned())); let cwd = cmd .get_cwd() .and_then(|path| Path::new(path).is_dir().then(|| path.to_owned())); let dir = cwd.or(home)?; let mut wide = Vec::new(); if Path::new(&dir).is_relative() { if let Ok(current_dir) = env::current_dir() { wide.extend(current_dir.join(&dir).as_os_str().encode_wide()); } else { wide.extend(dir.encode_wide()); } } else { wide.extend(dir.encode_wide()); } wide.push(0); Some(wide) } fn build_environment_block(cmd: &CommandBuilder) -> Vec<u16> { let mut block = Vec::new(); for (key, value) in cmd.iter_full_env_as_str() { block.extend(OsStr::new(key).encode_wide()); block.push(b'=' as u16); block.extend(OsStr::new(value).encode_wide()); block.push(0); } block.push(0); block } fn build_cmdline(cmd: &CommandBuilder) -> anyhow::Result<(Vec<u16>, Vec<u16>)> { let exe_os: OsString = if cmd.is_default_prog() { cmd.get_env("ComSpec") .unwrap_or(OsStr::new("cmd.exe")) .to_os_string() } else { let argv = cmd.get_argv(); let Some(first) = argv.first() else { anyhow::bail!("missing program name"); }; search_path(cmd, first) }; let mut cmdline = Vec::new(); append_quoted(&exe_os, &mut cmdline); for arg in cmd.get_argv().iter().skip(1) { cmdline.push(' ' as u16); ensure!( !arg.encode_wide().any(|c| c == 0), "invalid encoding for command line argument {arg:?}" ); append_quoted(arg, &mut cmdline); } cmdline.push(0); let mut exe: Vec<u16> = exe_os.encode_wide().collect(); exe.push(0); Ok((exe, cmdline)) } fn search_path(cmd: &CommandBuilder, exe: &OsStr) -> OsString { if let Some(path) = cmd.get_env("PATH") { let extensions = cmd.get_env("PATHEXT").unwrap_or(OsStr::new(".EXE")); for path in env::split_paths(path) { let candidate = path.join(exe); if candidate.exists() { return candidate.into_os_string(); } for ext in env::split_paths(extensions) { let ext = ext.to_str().unwrap_or(""); let path = path .join(exe) .with_extension(ext.strip_prefix('.').unwrap_or(ext)); if path.exists() { return path.into_os_string(); } } } } exe.to_os_string() } fn append_quoted(arg: &OsStr, cmdline: &mut Vec<u16>) { if !arg.is_empty() && !arg.encode_wide().any(|c| { c == ' ' as u16 || c == '\t' as u16 || c == '\n' as u16 || c == '\x0b' as u16 || c == '\"' as u16 }) { cmdline.extend(arg.encode_wide()); return; } cmdline.push('"' as u16); let arg: Vec<_> = arg.encode_wide().collect(); let mut i = 0; while i < arg.len() { let mut num_backslashes = 0; while i < arg.len() && arg[i] == '\\' as u16 { i += 1; num_backslashes += 1; } if i == arg.len() { for _ in 0..num_backslashes * 2 { cmdline.push('\\' as u16); } break; } else if arg[i] == b'"' as u16 { for _ in 0..num_backslashes * 2 + 1 { cmdline.push('\\' as u16); } cmdline.push(arg[i]); } else { for _ in 0..num_backslashes { cmdline.push('\\' as u16); } cmdline.push(arg[i]); } i += 1; } cmdline.push('"' as u16); } #[cfg(test)] mod tests { use super::windows_build_number; use super::MIN_CONPTY_BUILD; #[test] fn windows_build_number_returns_value() { // We can't stably check the version of the GH workers, but we can // at least check that this. let version = windows_build_number().unwrap(); assert!(version > MIN_CONPTY_BUILD); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/pty/src/win/conpty.rs
codex-rs/utils/pty/src/win/conpty.rs
#![allow(clippy::unwrap_used)] // This file is copied from https://github.com/wezterm/wezterm (MIT license). // Copyright (c) 2018-Present Wez Furlong // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use crate::win::psuedocon::PsuedoCon; use anyhow::Error; use filedescriptor::FileDescriptor; use filedescriptor::Pipe; use portable_pty::cmdbuilder::CommandBuilder; use portable_pty::Child; use portable_pty::MasterPty; use portable_pty::PtyPair; use portable_pty::PtySize; use portable_pty::PtySystem; use portable_pty::SlavePty; use std::sync::Arc; use std::sync::Mutex; use winapi::um::wincon::COORD; #[derive(Default)] pub struct ConPtySystem {} impl PtySystem for ConPtySystem { fn openpty(&self, size: PtySize) -> anyhow::Result<PtyPair> { let stdin = Pipe::new()?; let stdout = Pipe::new()?; let con = PsuedoCon::new( COORD { X: size.cols as i16, Y: size.rows as i16, }, stdin.read, stdout.write, )?; let master = ConPtyMasterPty { inner: Arc::new(Mutex::new(Inner { con, readable: stdout.read, writable: Some(stdin.write), size, })), }; let slave = ConPtySlavePty { inner: master.inner.clone(), }; Ok(PtyPair { master: Box::new(master), slave: Box::new(slave), }) } } struct Inner { con: PsuedoCon, readable: FileDescriptor, writable: Option<FileDescriptor>, size: PtySize, } impl Inner { pub fn resize( &mut self, num_rows: u16, num_cols: u16, pixel_width: u16, pixel_height: u16, ) -> Result<(), Error> { self.con.resize(COORD { X: num_cols as i16, Y: num_rows as i16, })?; self.size = PtySize { rows: num_rows, cols: num_cols, pixel_width, pixel_height, }; Ok(()) } } #[derive(Clone)] pub struct ConPtyMasterPty { inner: Arc<Mutex<Inner>>, } pub struct ConPtySlavePty { inner: Arc<Mutex<Inner>>, } impl MasterPty for ConPtyMasterPty { fn resize(&self, size: PtySize) -> anyhow::Result<()> { let mut inner = self.inner.lock().unwrap(); inner.resize(size.rows, size.cols, size.pixel_width, size.pixel_height) } fn get_size(&self) -> Result<PtySize, Error> { let inner = self.inner.lock().unwrap(); Ok(inner.size) } fn try_clone_reader(&self) -> anyhow::Result<Box<dyn std::io::Read + Send>> { Ok(Box::new(self.inner.lock().unwrap().readable.try_clone()?)) } fn take_writer(&self) -> anyhow::Result<Box<dyn std::io::Write + Send>> { Ok(Box::new( self.inner .lock() .unwrap() .writable .take() .ok_or_else(|| anyhow::anyhow!("writer already taken"))?, )) } } impl SlavePty for ConPtySlavePty { fn spawn_command(&self, cmd: CommandBuilder) -> anyhow::Result<Box<dyn Child + Send + Sync>> { let inner = self.inner.lock().unwrap(); let child = inner.con.spawn_command(cmd)?; Ok(Box::new(child)) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/pty/src/win/procthreadattr.rs
codex-rs/utils/pty/src/win/procthreadattr.rs
#![allow(clippy::uninit_vec)] // This file is copied from https://github.com/wezterm/wezterm (MIT license). // Copyright (c) 2018-Present Wez Furlong // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use super::psuedocon::HPCON; use anyhow::ensure; use anyhow::Error; use std::io::Error as IoError; use std::mem; use std::ptr; use winapi::shared::minwindef::DWORD; use winapi::um::processthreadsapi::*; const PROC_THREAD_ATTRIBUTE_PSEUDOCONSOLE: usize = 0x00020016; pub struct ProcThreadAttributeList { data: Vec<u8>, } impl ProcThreadAttributeList { pub fn with_capacity(num_attributes: DWORD) -> Result<Self, Error> { let mut bytes_required: usize = 0; unsafe { InitializeProcThreadAttributeList( ptr::null_mut(), num_attributes, 0, &mut bytes_required, ) }; let mut data = Vec::with_capacity(bytes_required); unsafe { data.set_len(bytes_required) }; let attr_ptr = data.as_mut_slice().as_mut_ptr() as *mut _; let res = unsafe { InitializeProcThreadAttributeList(attr_ptr, num_attributes, 0, &mut bytes_required) }; ensure!( res != 0, "InitializeProcThreadAttributeList failed: {}", IoError::last_os_error() ); Ok(Self { data }) } pub fn as_mut_ptr(&mut self) -> LPPROC_THREAD_ATTRIBUTE_LIST { self.data.as_mut_slice().as_mut_ptr() as *mut _ } pub fn set_pty(&mut self, con: HPCON) -> Result<(), Error> { let res = unsafe { UpdateProcThreadAttribute( self.as_mut_ptr(), 0, PROC_THREAD_ATTRIBUTE_PSEUDOCONSOLE, con, mem::size_of::<HPCON>(), ptr::null_mut(), ptr::null_mut(), ) }; ensure!( res != 0, "UpdateProcThreadAttribute failed: {}", IoError::last_os_error() ); Ok(()) } } impl Drop for ProcThreadAttributeList { fn drop(&mut self) { unsafe { DeleteProcThreadAttributeList(self.as_mut_ptr()) }; } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/utils/absolute-path/src/lib.rs
codex-rs/utils/absolute-path/src/lib.rs
use path_absolutize::Absolutize; use schemars::JsonSchema; use serde::Deserialize; use serde::Deserializer; use serde::Serialize; use serde::de::Error as SerdeError; use std::cell::RefCell; use std::path::Display; use std::path::Path; use std::path::PathBuf; use ts_rs::TS; /// A path that is guaranteed to be absolute and normalized (though it is not /// guaranteed to be canonicalized or exist on the filesystem). /// /// IMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set /// using [AbsolutePathBufGuard::new]. If no base path is set, the /// deserialization will fail unless the path being deserialized is already /// absolute. #[derive(Debug, Clone, PartialEq, Eq, Serialize, JsonSchema, TS)] pub struct AbsolutePathBuf(PathBuf); impl AbsolutePathBuf { pub fn resolve_path_against_base<P: AsRef<Path>, B: AsRef<Path>>( path: P, base_path: B, ) -> std::io::Result<Self> { let absolute_path = path.as_ref().absolutize_from(base_path.as_ref())?; Ok(Self(absolute_path.into_owned())) } pub fn from_absolute_path<P: AsRef<Path>>(path: P) -> std::io::Result<Self> { let absolute_path = path.as_ref().absolutize()?; Ok(Self(absolute_path.into_owned())) } pub fn current_dir() -> std::io::Result<Self> { let current_dir = std::env::current_dir()?; Self::from_absolute_path(current_dir) } pub fn join<P: AsRef<Path>>(&self, path: P) -> std::io::Result<Self> { Self::resolve_path_against_base(path, &self.0) } pub fn parent(&self) -> Option<Self> { self.0.parent().map(|p| { #[expect(clippy::expect_used)] Self::from_absolute_path(p).expect("parent of AbsolutePathBuf must be absolute") }) } pub fn as_path(&self) -> &Path { &self.0 } pub fn into_path_buf(self) -> PathBuf { self.0 } pub fn to_path_buf(&self) -> PathBuf { self.0.clone() } pub fn to_string_lossy(&self) -> std::borrow::Cow<'_, str> { self.0.to_string_lossy() } pub fn display(&self) -> Display<'_> { self.0.display() } } impl AsRef<Path> for AbsolutePathBuf { fn as_ref(&self) -> &Path { &self.0 } } impl From<AbsolutePathBuf> for PathBuf { fn from(path: AbsolutePathBuf) -> Self { path.into_path_buf() } } impl TryFrom<&Path> for AbsolutePathBuf { type Error = std::io::Error; fn try_from(value: &Path) -> Result<Self, Self::Error> { Self::from_absolute_path(value) } } impl TryFrom<PathBuf> for AbsolutePathBuf { type Error = std::io::Error; fn try_from(value: PathBuf) -> Result<Self, Self::Error> { Self::from_absolute_path(value) } } impl TryFrom<&str> for AbsolutePathBuf { type Error = std::io::Error; fn try_from(value: &str) -> Result<Self, Self::Error> { Self::from_absolute_path(value) } } impl TryFrom<String> for AbsolutePathBuf { type Error = std::io::Error; fn try_from(value: String) -> Result<Self, Self::Error> { Self::from_absolute_path(value) } } thread_local! { static ABSOLUTE_PATH_BASE: RefCell<Option<PathBuf>> = const { RefCell::new(None) }; } /// Ensure this guard is held while deserializing `AbsolutePathBuf` values to /// provide a base path for resolving relative paths. Because this relies on /// thread-local storage, the deserialization must be single-threaded and /// occur on the same thread that created the guard. pub struct AbsolutePathBufGuard; impl AbsolutePathBufGuard { pub fn new(base_path: &Path) -> Self { ABSOLUTE_PATH_BASE.with(|cell| { *cell.borrow_mut() = Some(base_path.to_path_buf()); }); Self } } impl Drop for AbsolutePathBufGuard { fn drop(&mut self) { ABSOLUTE_PATH_BASE.with(|cell| { *cell.borrow_mut() = None; }); } } impl<'de> Deserialize<'de> for AbsolutePathBuf { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let path = PathBuf::deserialize(deserializer)?; ABSOLUTE_PATH_BASE.with(|cell| match cell.borrow().as_deref() { Some(base) => { Ok(Self::resolve_path_against_base(path, base).map_err(SerdeError::custom)?) } None if path.is_absolute() => { Self::from_absolute_path(path).map_err(SerdeError::custom) } None => Err(SerdeError::custom( "AbsolutePathBuf deserialized without a base path", )), }) } } #[cfg(test)] mod tests { use super::*; use tempfile::tempdir; #[test] fn create_with_absolute_path_ignores_base_path() { let base_dir = tempdir().expect("base dir"); let absolute_dir = tempdir().expect("absolute dir"); let base_path = base_dir.path(); let absolute_path = absolute_dir.path().join("file.txt"); let abs_path_buf = AbsolutePathBuf::resolve_path_against_base(absolute_path.clone(), base_path) .expect("failed to create"); assert_eq!(abs_path_buf.as_path(), absolute_path.as_path()); } #[test] fn relative_path_is_resolved_against_base_path() { let temp_dir = tempdir().expect("base dir"); let base_dir = temp_dir.path(); let abs_path_buf = AbsolutePathBuf::resolve_path_against_base("file.txt", base_dir) .expect("failed to create"); assert_eq!(abs_path_buf.as_path(), base_dir.join("file.txt").as_path()); } #[test] fn guard_used_in_deserialization() { let temp_dir = tempdir().expect("base dir"); let base_dir = temp_dir.path(); let relative_path = "subdir/file.txt"; let abs_path_buf = { let _guard = AbsolutePathBufGuard::new(base_dir); serde_json::from_str::<AbsolutePathBuf>(&format!(r#""{relative_path}""#)) .expect("failed to deserialize") }; assert_eq!( abs_path_buf.as_path(), base_dir.join(relative_path).as_path() ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/elapsed.rs
codex-rs/common/src/elapsed.rs
use std::time::Duration; use std::time::Instant; /// Returns a string representing the elapsed time since `start_time` like /// "1m 15s" or "1.50s". pub fn format_elapsed(start_time: Instant) -> String { format_duration(start_time.elapsed()) } /// Convert a [`std::time::Duration`] into a human-readable, compact string. /// /// Formatting rules: /// * < 1 s -> "{milli}ms" /// * < 60 s -> "{sec:.2}s" (two decimal places) /// * >= 60 s -> "{min}m {sec:02}s" pub fn format_duration(duration: Duration) -> String { let millis = duration.as_millis() as i64; format_elapsed_millis(millis) } fn format_elapsed_millis(millis: i64) -> String { if millis < 1000 { format!("{millis}ms") } else if millis < 60_000 { format!("{:.2}s", millis as f64 / 1000.0) } else { let minutes = millis / 60_000; let seconds = (millis % 60_000) / 1000; format!("{minutes}m {seconds:02}s") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_format_duration_subsecond() { // Durations < 1s should be rendered in milliseconds with no decimals. let dur = Duration::from_millis(250); assert_eq!(format_duration(dur), "250ms"); // Exactly zero should still work. let dur_zero = Duration::from_millis(0); assert_eq!(format_duration(dur_zero), "0ms"); } #[test] fn test_format_duration_seconds() { // Durations between 1s (inclusive) and 60s (exclusive) should be // printed with 2-decimal-place seconds. let dur = Duration::from_millis(1_500); // 1.5s assert_eq!(format_duration(dur), "1.50s"); // 59.999s rounds to 60.00s let dur2 = Duration::from_millis(59_999); assert_eq!(format_duration(dur2), "60.00s"); } #[test] fn test_format_duration_minutes() { // Durations ≥ 1 minute should be printed mmss. let dur = Duration::from_millis(75_000); // 1m15s assert_eq!(format_duration(dur), "1m 15s"); let dur_exact = Duration::from_millis(60_000); // 1m0s assert_eq!(format_duration(dur_exact), "1m 00s"); let dur_long = Duration::from_millis(3_601_000); assert_eq!(format_duration(dur_long), "60m 01s"); } #[test] fn test_format_duration_one_hour_has_space() { let dur_hour = Duration::from_millis(3_600_000); assert_eq!(format_duration(dur_hour), "60m 00s"); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/lib.rs
codex-rs/common/src/lib.rs
#[cfg(feature = "cli")] mod approval_mode_cli_arg; #[cfg(feature = "elapsed")] pub mod elapsed; #[cfg(feature = "cli")] pub use approval_mode_cli_arg::ApprovalModeCliArg; #[cfg(feature = "cli")] mod sandbox_mode_cli_arg; #[cfg(feature = "cli")] pub use sandbox_mode_cli_arg::SandboxModeCliArg; #[cfg(feature = "cli")] pub mod format_env_display; #[cfg(any(feature = "cli", test))] mod config_override; #[cfg(feature = "cli")] pub use config_override::CliConfigOverrides; mod sandbox_summary; #[cfg(feature = "sandbox_summary")] pub use sandbox_summary::summarize_sandbox_policy; mod config_summary; pub use config_summary::create_config_summary_entries; // Shared fuzzy matcher (used by TUI selection popups and other UI filtering) pub mod fuzzy_match; // Shared approval presets (AskForApproval + Sandbox) used by TUI and MCP server // Not to be confused with AskForApproval, which we should probably rename to EscalationPolicy. pub mod approval_presets; // Shared OSS provider utilities used by TUI and exec pub mod oss;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/oss.rs
codex-rs/common/src/oss.rs
//! OSS provider utilities shared between TUI and exec. use codex_core::LMSTUDIO_OSS_PROVIDER_ID; use codex_core::OLLAMA_OSS_PROVIDER_ID; use codex_core::config::Config; /// Returns the default model for a given OSS provider. pub fn get_default_model_for_oss_provider(provider_id: &str) -> Option<&'static str> { match provider_id { LMSTUDIO_OSS_PROVIDER_ID => Some(codex_lmstudio::DEFAULT_OSS_MODEL), OLLAMA_OSS_PROVIDER_ID => Some(codex_ollama::DEFAULT_OSS_MODEL), _ => None, } } /// Ensures the specified OSS provider is ready (models downloaded, service reachable). pub async fn ensure_oss_provider_ready( provider_id: &str, config: &Config, ) -> Result<(), std::io::Error> { match provider_id { LMSTUDIO_OSS_PROVIDER_ID => { codex_lmstudio::ensure_oss_ready(config) .await .map_err(|e| std::io::Error::other(format!("OSS setup failed: {e}")))?; } OLLAMA_OSS_PROVIDER_ID => { codex_ollama::ensure_oss_ready(config) .await .map_err(|e| std::io::Error::other(format!("OSS setup failed: {e}")))?; } _ => { // Unknown provider, skip setup } } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_get_default_model_for_provider_lmstudio() { let result = get_default_model_for_oss_provider(LMSTUDIO_OSS_PROVIDER_ID); assert_eq!(result, Some(codex_lmstudio::DEFAULT_OSS_MODEL)); } #[test] fn test_get_default_model_for_provider_ollama() { let result = get_default_model_for_oss_provider(OLLAMA_OSS_PROVIDER_ID); assert_eq!(result, Some(codex_ollama::DEFAULT_OSS_MODEL)); } #[test] fn test_get_default_model_for_provider_unknown() { let result = get_default_model_for_oss_provider("unknown-provider"); assert_eq!(result, None); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/approval_mode_cli_arg.rs
codex-rs/common/src/approval_mode_cli_arg.rs
//! Standard type to use with the `--approval-mode` CLI option. //! Available when the `cli` feature is enabled for the crate. use clap::ValueEnum; use codex_core::protocol::AskForApproval; #[derive(Clone, Copy, Debug, ValueEnum)] #[value(rename_all = "kebab-case")] pub enum ApprovalModeCliArg { /// Only run "trusted" commands (e.g. ls, cat, sed) without asking for user /// approval. Will escalate to the user if the model proposes a command that /// is not in the "trusted" set. Untrusted, /// Run all commands without asking for user approval. /// Only asks for approval if a command fails to execute, in which case it /// will escalate to the user to ask for un-sandboxed execution. OnFailure, /// The model decides when to ask the user for approval. OnRequest, /// Never ask for user approval /// Execution failures are immediately returned to the model. Never, } impl From<ApprovalModeCliArg> for AskForApproval { fn from(value: ApprovalModeCliArg) -> Self { match value { ApprovalModeCliArg::Untrusted => AskForApproval::UnlessTrusted, ApprovalModeCliArg::OnFailure => AskForApproval::OnFailure, ApprovalModeCliArg::OnRequest => AskForApproval::OnRequest, ApprovalModeCliArg::Never => AskForApproval::Never, } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/fuzzy_match.rs
codex-rs/common/src/fuzzy_match.rs
/// Simple case-insensitive subsequence matcher used for fuzzy filtering. /// /// Returns the indices (character positions) of the matched characters in the /// ORIGINAL `haystack` string and a score where smaller is better. /// /// Unicode correctness: we perform the match on a lowercased copy of the /// haystack and needle but maintain a mapping from each character in the /// lowercased haystack back to the original character index in `haystack`. /// This ensures the returned indices can be safely used with /// `str::chars().enumerate()` consumers for highlighting, even when /// lowercasing expands certain characters (e.g., ß → ss, İ → i̇). pub fn fuzzy_match(haystack: &str, needle: &str) -> Option<(Vec<usize>, i32)> { if needle.is_empty() { return Some((Vec::new(), i32::MAX)); } let mut lowered_chars: Vec<char> = Vec::new(); let mut lowered_to_orig_char_idx: Vec<usize> = Vec::new(); for (orig_idx, ch) in haystack.chars().enumerate() { for lc in ch.to_lowercase() { lowered_chars.push(lc); lowered_to_orig_char_idx.push(orig_idx); } } let lowered_needle: Vec<char> = needle.to_lowercase().chars().collect(); let mut result_orig_indices: Vec<usize> = Vec::with_capacity(lowered_needle.len()); let mut last_lower_pos: Option<usize> = None; let mut cur = 0usize; for &nc in lowered_needle.iter() { let mut found_at: Option<usize> = None; while cur < lowered_chars.len() { if lowered_chars[cur] == nc { found_at = Some(cur); cur += 1; break; } cur += 1; } let pos = found_at?; result_orig_indices.push(lowered_to_orig_char_idx[pos]); last_lower_pos = Some(pos); } let first_lower_pos = if result_orig_indices.is_empty() { 0usize } else { let target_orig = result_orig_indices[0]; lowered_to_orig_char_idx .iter() .position(|&oi| oi == target_orig) .unwrap_or(0) }; // last defaults to first for single-hit; score = extra span between first/last hit // minus needle len (≥0). // Strongly reward prefix matches by subtracting 100 when the first hit is at index 0. let last_lower_pos = last_lower_pos.unwrap_or(first_lower_pos); let window = (last_lower_pos as i32 - first_lower_pos as i32 + 1) - (lowered_needle.len() as i32); let mut score = window.max(0); if first_lower_pos == 0 { score -= 100; } result_orig_indices.sort_unstable(); result_orig_indices.dedup(); Some((result_orig_indices, score)) } /// Convenience wrapper to get only the indices for a fuzzy match. pub fn fuzzy_indices(haystack: &str, needle: &str) -> Option<Vec<usize>> { fuzzy_match(haystack, needle).map(|(mut idx, _)| { idx.sort_unstable(); idx.dedup(); idx }) } #[cfg(test)] mod tests { use super::*; #[test] fn ascii_basic_indices() { let (idx, score) = match fuzzy_match("hello", "hl") { Some(v) => v, None => panic!("expected a match"), }; assert_eq!(idx, vec![0, 2]); // 'h' at 0, 'l' at 2 -> window 1; start-of-string bonus applies (-100) assert_eq!(score, -99); } #[test] fn unicode_dotted_i_istanbul_highlighting() { let (idx, score) = match fuzzy_match("İstanbul", "is") { Some(v) => v, None => panic!("expected a match"), }; assert_eq!(idx, vec![0, 1]); // Matches at lowered positions 0 and 2 -> window 1; start-of-string bonus applies assert_eq!(score, -99); } #[test] fn unicode_german_sharp_s_casefold() { assert!(fuzzy_match("straße", "strasse").is_none()); } #[test] fn prefer_contiguous_match_over_spread() { let (_idx_a, score_a) = match fuzzy_match("abc", "abc") { Some(v) => v, None => panic!("expected a match"), }; let (_idx_b, score_b) = match fuzzy_match("a-b-c", "abc") { Some(v) => v, None => panic!("expected a match"), }; // Contiguous window -> 0; start-of-string bonus -> -100 assert_eq!(score_a, -100); // Spread over 5 chars for 3-letter needle -> window 2; with bonus -> -98 assert_eq!(score_b, -98); assert!(score_a < score_b); } #[test] fn start_of_string_bonus_applies() { let (_idx_a, score_a) = match fuzzy_match("file_name", "file") { Some(v) => v, None => panic!("expected a match"), }; let (_idx_b, score_b) = match fuzzy_match("my_file_name", "file") { Some(v) => v, None => panic!("expected a match"), }; // Start-of-string contiguous -> window 0; bonus -> -100 assert_eq!(score_a, -100); // Non-prefix contiguous -> window 0; no bonus -> 0 assert_eq!(score_b, 0); assert!(score_a < score_b); } #[test] fn empty_needle_matches_with_max_score_and_no_indices() { let (idx, score) = match fuzzy_match("anything", "") { Some(v) => v, None => panic!("empty needle should match"), }; assert!(idx.is_empty()); assert_eq!(score, i32::MAX); } #[test] fn case_insensitive_matching_basic() { let (idx, score) = match fuzzy_match("FooBar", "foO") { Some(v) => v, None => panic!("expected a match"), }; assert_eq!(idx, vec![0, 1, 2]); // Contiguous prefix match (case-insensitive) -> window 0 with bonus assert_eq!(score, -100); } #[test] fn indices_are_deduped_for_multichar_lowercase_expansion() { let needle = "\u{0069}\u{0307}"; // "i" + combining dot above let (idx, score) = match fuzzy_match("İ", needle) { Some(v) => v, None => panic!("expected a match"), }; assert_eq!(idx, vec![0]); // Lowercasing 'İ' expands to two chars; contiguous prefix -> window 0 with bonus assert_eq!(score, -100); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/config_summary.rs
codex-rs/common/src/config_summary.rs
use codex_core::WireApi; use codex_core::config::Config; use crate::sandbox_summary::summarize_sandbox_policy; /// Build a list of key/value pairs summarizing the effective configuration. pub fn create_config_summary_entries(config: &Config, model: &str) -> Vec<(&'static str, String)> { let mut entries = vec![ ("workdir", config.cwd.display().to_string()), ("model", model.to_string()), ("provider", config.model_provider_id.clone()), ("approval", config.approval_policy.value().to_string()), ( "sandbox", summarize_sandbox_policy(config.sandbox_policy.get()), ), ]; if config.model_provider.wire_api == WireApi::Responses { let reasoning_effort = config .model_reasoning_effort .map(|effort| effort.to_string()); entries.push(( "reasoning effort", reasoning_effort.unwrap_or_else(|| "none".to_string()), )); entries.push(( "reasoning summaries", config.model_reasoning_summary.to_string(), )); } entries }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/sandbox_mode_cli_arg.rs
codex-rs/common/src/sandbox_mode_cli_arg.rs
//! Standard type to use with the `--sandbox` (`-s`) CLI option. //! //! This mirrors the variants of [`codex_core::protocol::SandboxPolicy`], but //! without any of the associated data so it can be expressed as a simple flag //! on the command-line. Users that need to tweak the advanced options for //! `workspace-write` can continue to do so via `-c` overrides or their //! `config.toml`. use clap::ValueEnum; use codex_protocol::config_types::SandboxMode; #[derive(Clone, Copy, Debug, ValueEnum)] #[value(rename_all = "kebab-case")] pub enum SandboxModeCliArg { ReadOnly, WorkspaceWrite, DangerFullAccess, } impl From<SandboxModeCliArg> for SandboxMode { fn from(value: SandboxModeCliArg) -> Self { match value { SandboxModeCliArg::ReadOnly => SandboxMode::ReadOnly, SandboxModeCliArg::WorkspaceWrite => SandboxMode::WorkspaceWrite, SandboxModeCliArg::DangerFullAccess => SandboxMode::DangerFullAccess, } } } #[cfg(test)] mod tests { use super::*; use pretty_assertions::assert_eq; #[test] fn maps_cli_args_to_protocol_modes() { assert_eq!(SandboxMode::ReadOnly, SandboxModeCliArg::ReadOnly.into()); assert_eq!( SandboxMode::WorkspaceWrite, SandboxModeCliArg::WorkspaceWrite.into() ); assert_eq!( SandboxMode::DangerFullAccess, SandboxModeCliArg::DangerFullAccess.into() ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/approval_presets.rs
codex-rs/common/src/approval_presets.rs
use codex_core::protocol::AskForApproval; use codex_core::protocol::SandboxPolicy; /// A simple preset pairing an approval policy with a sandbox policy. #[derive(Debug, Clone)] pub struct ApprovalPreset { /// Stable identifier for the preset. pub id: &'static str, /// Display label shown in UIs. pub label: &'static str, /// Short human description shown next to the label in UIs. pub description: &'static str, /// Approval policy to apply. pub approval: AskForApproval, /// Sandbox policy to apply. pub sandbox: SandboxPolicy, } /// Built-in list of approval presets that pair approval and sandbox policy. /// /// Keep this UI-agnostic so it can be reused by both TUI and MCP server. pub fn builtin_approval_presets() -> Vec<ApprovalPreset> { vec![ ApprovalPreset { id: "read-only", label: "Read Only", description: "Requires approval to edit files and run commands.", approval: AskForApproval::OnRequest, sandbox: SandboxPolicy::ReadOnly, }, ApprovalPreset { id: "auto", label: "Agent", description: "Read and edit files, and run commands.", approval: AskForApproval::OnRequest, sandbox: SandboxPolicy::new_workspace_write_policy(), }, ApprovalPreset { id: "full-access", label: "Agent (full access)", description: "Codex can edit files outside this workspace and run commands with network access. Exercise caution when using.", approval: AskForApproval::Never, sandbox: SandboxPolicy::DangerFullAccess, }, ] }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/sandbox_summary.rs
codex-rs/common/src/sandbox_summary.rs
use codex_core::protocol::NetworkAccess; use codex_core::protocol::SandboxPolicy; pub fn summarize_sandbox_policy(sandbox_policy: &SandboxPolicy) -> String { match sandbox_policy { SandboxPolicy::DangerFullAccess => "danger-full-access".to_string(), SandboxPolicy::ReadOnly => "read-only".to_string(), SandboxPolicy::ExternalSandbox { network_access } => { let mut summary = "external-sandbox".to_string(); if matches!(network_access, NetworkAccess::Enabled) { summary.push_str(" (network access enabled)"); } summary } SandboxPolicy::WorkspaceWrite { writable_roots, network_access, exclude_tmpdir_env_var, exclude_slash_tmp, } => { let mut summary = "workspace-write".to_string(); let mut writable_entries = Vec::<String>::new(); writable_entries.push("workdir".to_string()); if !*exclude_slash_tmp { writable_entries.push("/tmp".to_string()); } if !*exclude_tmpdir_env_var { writable_entries.push("$TMPDIR".to_string()); } writable_entries.extend( writable_roots .iter() .map(|p| p.to_string_lossy().to_string()), ); summary.push_str(&format!(" [{}]", writable_entries.join(", "))); if *network_access { summary.push_str(" (network access enabled)"); } summary } } } #[cfg(test)] mod tests { use super::*; use codex_utils_absolute_path::AbsolutePathBuf; use pretty_assertions::assert_eq; #[test] fn summarizes_external_sandbox_without_network_access_suffix() { let summary = summarize_sandbox_policy(&SandboxPolicy::ExternalSandbox { network_access: NetworkAccess::Restricted, }); assert_eq!(summary, "external-sandbox"); } #[test] fn summarizes_external_sandbox_with_enabled_network() { let summary = summarize_sandbox_policy(&SandboxPolicy::ExternalSandbox { network_access: NetworkAccess::Enabled, }); assert_eq!(summary, "external-sandbox (network access enabled)"); } #[test] fn workspace_write_summary_still_includes_network_access() { let root = if cfg!(windows) { "C:\\repo" } else { "/repo" }; let writable_root = AbsolutePathBuf::try_from(root).unwrap(); let summary = summarize_sandbox_policy(&SandboxPolicy::WorkspaceWrite { writable_roots: vec![writable_root.clone()], network_access: true, exclude_tmpdir_env_var: true, exclude_slash_tmp: true, }); assert_eq!( summary, format!( "workspace-write [workdir, {}] (network access enabled)", writable_root.to_string_lossy() ) ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/format_env_display.rs
codex-rs/common/src/format_env_display.rs
use std::collections::HashMap; pub fn format_env_display(env: Option<&HashMap<String, String>>, env_vars: &[String]) -> String { let mut parts: Vec<String> = Vec::new(); if let Some(map) = env { let mut pairs: Vec<_> = map.iter().collect(); pairs.sort_by(|(a, _), (b, _)| a.cmp(b)); parts.extend(pairs.into_iter().map(|(key, _)| format!("{key}=*****"))); } if !env_vars.is_empty() { parts.extend(env_vars.iter().map(|var| format!("{var}=*****"))); } if parts.is_empty() { "-".to_string() } else { parts.join(", ") } } #[cfg(test)] mod tests { use super::*; #[test] fn returns_dash_when_empty() { assert_eq!(format_env_display(None, &[]), "-"); let empty_map = HashMap::new(); assert_eq!(format_env_display(Some(&empty_map), &[]), "-"); } #[test] fn formats_sorted_env_pairs() { let mut env = HashMap::new(); env.insert("B".to_string(), "two".to_string()); env.insert("A".to_string(), "one".to_string()); assert_eq!(format_env_display(Some(&env), &[]), "A=*****, B=*****"); } #[test] fn formats_env_vars_with_dollar_prefix() { let vars = vec!["TOKEN".to_string(), "PATH".to_string()]; assert_eq!(format_env_display(None, &vars), "TOKEN=*****, PATH=*****"); } #[test] fn combines_env_pairs_and_vars() { let mut env = HashMap::new(); env.insert("HOME".to_string(), "/tmp".to_string()); let vars = vec!["TOKEN".to_string()]; assert_eq!( format_env_display(Some(&env), &vars), "HOME=*****, TOKEN=*****" ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/common/src/config_override.rs
codex-rs/common/src/config_override.rs
//! Support for `-c key=value` overrides shared across Codex CLI tools. //! //! This module provides a [`CliConfigOverrides`] struct that can be embedded //! into a `clap`-derived CLI struct using `#[clap(flatten)]`. Each occurrence //! of `-c key=value` (or `--config key=value`) will be collected as a raw //! string. Helper methods are provided to convert the raw strings into //! key/value pairs as well as to apply them onto a mutable //! `serde_json::Value` representing the configuration tree. use clap::ArgAction; use clap::Parser; use serde::de::Error as SerdeError; use toml::Value; /// CLI option that captures arbitrary configuration overrides specified as /// `-c key=value`. It intentionally keeps both halves **unparsed** so that the /// calling code can decide how to interpret the right-hand side. #[derive(Parser, Debug, Default, Clone)] pub struct CliConfigOverrides { /// Override a configuration value that would otherwise be loaded from /// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override /// nested values. The `value` portion is parsed as TOML. If it fails to /// parse as TOML, the raw string is used as a literal. /// /// Examples: /// - `-c model="o3"` /// - `-c 'sandbox_permissions=["disk-full-read-access"]'` /// - `-c shell_environment_policy.inherit=all` #[arg( short = 'c', long = "config", value_name = "key=value", action = ArgAction::Append, global = true, )] pub raw_overrides: Vec<String>, } impl CliConfigOverrides { /// Parse the raw strings captured from the CLI into a list of `(path, /// value)` tuples where `value` is a `serde_json::Value`. pub fn parse_overrides(&self) -> Result<Vec<(String, Value)>, String> { self.raw_overrides .iter() .map(|s| { // Only split on the *first* '=' so values are free to contain // the character. let mut parts = s.splitn(2, '='); let key = match parts.next() { Some(k) => k.trim(), None => return Err("Override missing key".to_string()), }; let value_str = parts .next() .ok_or_else(|| format!("Invalid override (missing '='): {s}"))? .trim(); if key.is_empty() { return Err(format!("Empty key in override: {s}")); } // Attempt to parse as TOML. If that fails, treat it as a raw // string. This allows convenient usage such as // `-c model=o3` without the quotes. let value: Value = match parse_toml_value(value_str) { Ok(v) => v, Err(_) => { // Strip leading/trailing quotes if present let trimmed = value_str.trim().trim_matches(|c| c == '"' || c == '\''); Value::String(trimmed.to_string()) } }; Ok((key.to_string(), value)) }) .collect() } /// Apply all parsed overrides onto `target`. Intermediate objects will be /// created as necessary. Values located at the destination path will be /// replaced. pub fn apply_on_value(&self, target: &mut Value) -> Result<(), String> { let overrides = self.parse_overrides()?; for (path, value) in overrides { apply_single_override(target, &path, value); } Ok(()) } } /// Apply a single override onto `root`, creating intermediate objects as /// necessary. fn apply_single_override(root: &mut Value, path: &str, value: Value) { use toml::value::Table; let parts: Vec<&str> = path.split('.').collect(); let mut current = root; for (i, part) in parts.iter().enumerate() { let is_last = i == parts.len() - 1; if is_last { match current { Value::Table(tbl) => { tbl.insert((*part).to_string(), value); } _ => { let mut tbl = Table::new(); tbl.insert((*part).to_string(), value); *current = Value::Table(tbl); } } return; } // Traverse or create intermediate table. match current { Value::Table(tbl) => { current = tbl .entry((*part).to_string()) .or_insert_with(|| Value::Table(Table::new())); } _ => { *current = Value::Table(Table::new()); if let Value::Table(tbl) = current { current = tbl .entry((*part).to_string()) .or_insert_with(|| Value::Table(Table::new())); } } } } } fn parse_toml_value(raw: &str) -> Result<Value, toml::de::Error> { let wrapped = format!("_x_ = {raw}"); let table: toml::Table = toml::from_str(&wrapped)?; table .get("_x_") .cloned() .ok_or_else(|| SerdeError::custom("missing sentinel key")) } #[cfg(all(test, feature = "cli"))] mod tests { use super::*; #[test] fn parses_basic_scalar() { let v = parse_toml_value("42").expect("parse"); assert_eq!(v.as_integer(), Some(42)); } #[test] fn parses_bool() { let true_literal = parse_toml_value("true").expect("parse"); assert_eq!(true_literal.as_bool(), Some(true)); let false_literal = parse_toml_value("false").expect("parse"); assert_eq!(false_literal.as_bool(), Some(false)); } #[test] fn fails_on_unquoted_string() { assert!(parse_toml_value("hello").is_err()); } #[test] fn parses_array() { let v = parse_toml_value("[1, 2, 3]").expect("parse"); let arr = v.as_array().expect("array"); assert_eq!(arr.len(), 3); } #[test] fn parses_inline_table() { let v = parse_toml_value("{a = 1, b = 2}").expect("parse"); let tbl = v.as_table().expect("table"); assert_eq!(tbl.get("a").unwrap().as_integer(), Some(1)); assert_eq!(tbl.get("b").unwrap().as_integer(), Some(2)); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/build.rs
codex-rs/execpolicy-legacy/build.rs
fn main() { println!("cargo:rerun-if-changed=src/default.policy"); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/valid_exec.rs
codex-rs/execpolicy-legacy/src/valid_exec.rs
use crate::arg_type::ArgType; use crate::error::Result; use serde::Serialize; /// exec() invocation that has been accepted by a `Policy`. #[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)] pub struct ValidExec { pub program: String, pub flags: Vec<MatchedFlag>, pub opts: Vec<MatchedOpt>, pub args: Vec<MatchedArg>, /// If non-empty, a prioritized list of paths to try instead of `program`. /// For example, `/bin/ls` is harder to compromise than whatever `ls` /// happens to be in the user's `$PATH`, so `/bin/ls` would be included for /// `ls`. The caller is free to disregard this list and use `program`. pub system_path: Vec<String>, } impl ValidExec { pub fn new(program: &str, args: Vec<MatchedArg>, system_path: &[&str]) -> Self { Self { program: program.to_string(), flags: vec![], opts: vec![], args, system_path: system_path.iter().map(|&s| s.to_string()).collect(), } } /// Whether a possible side effect of running this command includes writing /// a file. pub fn might_write_files(&self) -> bool { self.opts.iter().any(|opt| opt.r#type.might_write_file()) || self.args.iter().any(|opt| opt.r#type.might_write_file()) } } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub struct MatchedArg { pub index: usize, pub r#type: ArgType, pub value: String, } impl MatchedArg { pub fn new(index: usize, r#type: ArgType, value: &str) -> Result<Self> { r#type.validate(value)?; Ok(Self { index, r#type, value: value.to_string(), }) } } /// A match for an option declared with opt() in a .policy file. #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub struct MatchedOpt { /// Name of the option that was matched. pub name: String, /// Value supplied for the option. pub value: String, /// Type of the value supplied for the option. pub r#type: ArgType, } impl MatchedOpt { pub fn new(name: &str, value: &str, r#type: ArgType) -> Result<Self> { r#type.validate(value)?; Ok(Self { name: name.to_string(), value: value.to_string(), r#type, }) } pub fn name(&self) -> &str { &self.name } } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub struct MatchedFlag { /// Name of the flag that was matched. pub name: String, } impl MatchedFlag { pub fn new(name: &str) -> Self { Self { name: name.to_string(), } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/lib.rs
codex-rs/execpolicy-legacy/src/lib.rs
#![allow(clippy::type_complexity)] #![allow(clippy::too_many_arguments)] #[macro_use] extern crate starlark; mod arg_matcher; mod arg_resolver; mod arg_type; mod error; mod exec_call; mod execv_checker; mod opt; mod policy; mod policy_parser; mod program; mod sed_command; mod valid_exec; pub use arg_matcher::ArgMatcher; pub use arg_resolver::PositionalArg; pub use arg_type::ArgType; pub use error::Error; pub use error::Result; pub use exec_call::ExecCall; pub use execv_checker::ExecvChecker; pub use opt::Opt; pub use policy::Policy; pub use policy_parser::PolicyParser; pub use program::Forbidden; pub use program::MatchedExec; pub use program::NegativeExamplePassedCheck; pub use program::PositiveExampleFailedCheck; pub use program::ProgramSpec; pub use sed_command::parse_sed_command; pub use valid_exec::MatchedArg; pub use valid_exec::MatchedFlag; pub use valid_exec::MatchedOpt; pub use valid_exec::ValidExec; const DEFAULT_POLICY: &str = include_str!("default.policy"); pub fn get_default_policy() -> starlark::Result<Policy> { let parser = PolicyParser::new("#default", DEFAULT_POLICY); parser.parse() }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/program.rs
codex-rs/execpolicy-legacy/src/program.rs
use serde::Serialize; use std::collections::HashMap; use std::collections::HashSet; use crate::ArgType; use crate::ExecCall; use crate::arg_matcher::ArgMatcher; use crate::arg_resolver::PositionalArg; use crate::arg_resolver::resolve_observed_args_with_patterns; use crate::error::Error; use crate::error::Result; use crate::opt::Opt; use crate::opt::OptMeta; use crate::valid_exec::MatchedFlag; use crate::valid_exec::MatchedOpt; use crate::valid_exec::ValidExec; #[derive(Debug)] pub struct ProgramSpec { pub program: String, pub system_path: Vec<String>, pub option_bundling: bool, pub combined_format: bool, pub allowed_options: HashMap<String, Opt>, pub arg_patterns: Vec<ArgMatcher>, forbidden: Option<String>, required_options: HashSet<String>, should_match: Vec<Vec<String>>, should_not_match: Vec<Vec<String>>, } impl ProgramSpec { pub fn new( program: String, system_path: Vec<String>, option_bundling: bool, combined_format: bool, allowed_options: HashMap<String, Opt>, arg_patterns: Vec<ArgMatcher>, forbidden: Option<String>, should_match: Vec<Vec<String>>, should_not_match: Vec<Vec<String>>, ) -> Self { let required_options = allowed_options .iter() .filter_map(|(name, opt)| { if opt.required { Some(name.clone()) } else { None } }) .collect(); Self { program, system_path, option_bundling, combined_format, allowed_options, arg_patterns, forbidden, required_options, should_match, should_not_match, } } } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub enum MatchedExec { Match { exec: ValidExec }, Forbidden { cause: Forbidden, reason: String }, } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub enum Forbidden { Program { program: String, exec_call: ExecCall, }, Arg { arg: String, exec_call: ExecCall, }, Exec { exec: ValidExec, }, } impl ProgramSpec { // TODO(mbolin): The idea is that there should be a set of rules defined for // a program and the args should be checked against the rules to determine // if the program should be allowed to run. pub fn check(&self, exec_call: &ExecCall) -> Result<MatchedExec> { let mut expecting_option_value: Option<(String, ArgType)> = None; let mut args = Vec::<PositionalArg>::new(); let mut matched_flags = Vec::<MatchedFlag>::new(); let mut matched_opts = Vec::<MatchedOpt>::new(); for (index, arg) in exec_call.args.iter().enumerate() { if let Some(expected) = expecting_option_value { // If we are expecting an option value, then the next argument // should be the value for the option. // This had better not be another option! let (name, arg_type) = expected; if arg.starts_with("-") { return Err(Error::OptionFollowedByOptionInsteadOfValue { program: self.program.clone(), option: name, value: arg.clone(), }); } matched_opts.push(MatchedOpt::new(&name, arg, arg_type)?); expecting_option_value = None; } else if arg == "--" { return Err(Error::DoubleDashNotSupportedYet { program: self.program.clone(), }); } else if arg.starts_with("-") { match self.allowed_options.get(arg) { Some(opt) => { match &opt.meta { OptMeta::Flag => { matched_flags.push(MatchedFlag { name: arg.clone() }); // A flag does not expect an argument: continue. continue; } OptMeta::Value(arg_type) => { expecting_option_value = Some((arg.clone(), arg_type.clone())); continue; } } } None => { // It could be an --option=value style flag... } } return Err(Error::UnknownOption { program: self.program.clone(), option: arg.clone(), }); } else { args.push(PositionalArg { index, value: arg.clone(), }); } } if let Some(expected) = expecting_option_value { let (name, _arg_type) = expected; return Err(Error::OptionMissingValue { program: self.program.clone(), option: name, }); } let matched_args = resolve_observed_args_with_patterns(&self.program, args, &self.arg_patterns)?; // Verify all required options are present. let matched_opt_names: HashSet<String> = matched_opts .iter() .map(|opt| opt.name().to_string()) .collect(); if !matched_opt_names.is_superset(&self.required_options) { let mut options = self .required_options .difference(&matched_opt_names) .map(String::from) .collect::<Vec<_>>(); options.sort(); return Err(Error::MissingRequiredOptions { program: self.program.clone(), options, }); } let exec = ValidExec { program: self.program.clone(), flags: matched_flags, opts: matched_opts, args: matched_args, system_path: self.system_path.clone(), }; match &self.forbidden { Some(reason) => Ok(MatchedExec::Forbidden { cause: Forbidden::Exec { exec }, reason: reason.clone(), }), None => Ok(MatchedExec::Match { exec }), } } pub fn verify_should_match_list(&self) -> Vec<PositiveExampleFailedCheck> { let mut violations = Vec::new(); for good in &self.should_match { let exec_call = ExecCall { program: self.program.clone(), args: good.clone(), }; match self.check(&exec_call) { Ok(_) => {} Err(error) => { violations.push(PositiveExampleFailedCheck { program: self.program.clone(), args: good.clone(), error, }); } } } violations } pub fn verify_should_not_match_list(&self) -> Vec<NegativeExamplePassedCheck> { let mut violations = Vec::new(); for bad in &self.should_not_match { let exec_call = ExecCall { program: self.program.clone(), args: bad.clone(), }; if self.check(&exec_call).is_ok() { violations.push(NegativeExamplePassedCheck { program: self.program.clone(), args: bad.clone(), }); } } violations } } #[derive(Debug, Eq, PartialEq)] pub struct PositiveExampleFailedCheck { pub program: String, pub args: Vec<String>, pub error: Error, } #[derive(Debug, Eq, PartialEq)] pub struct NegativeExamplePassedCheck { pub program: String, pub args: Vec<String>, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/policy_parser.rs
codex-rs/execpolicy-legacy/src/policy_parser.rs
#![allow(clippy::needless_lifetimes)] use crate::Opt; use crate::Policy; use crate::ProgramSpec; use crate::arg_matcher::ArgMatcher; use crate::opt::OptMeta; use log::info; use multimap::MultiMap; use regex_lite::Regex; use starlark::any::ProvidesStaticType; use starlark::environment::GlobalsBuilder; use starlark::environment::LibraryExtension; use starlark::environment::Module; use starlark::eval::Evaluator; use starlark::syntax::AstModule; use starlark::syntax::Dialect; use starlark::values::Heap; use starlark::values::list::UnpackList; use starlark::values::none::NoneType; use std::cell::RefCell; use std::collections::HashMap; pub struct PolicyParser { policy_source: String, unparsed_policy: String, } impl PolicyParser { pub fn new(policy_source: &str, unparsed_policy: &str) -> Self { Self { policy_source: policy_source.to_string(), unparsed_policy: unparsed_policy.to_string(), } } pub fn parse(&self) -> starlark::Result<Policy> { let mut dialect = Dialect::Extended.clone(); dialect.enable_f_strings = true; let ast = AstModule::parse(&self.policy_source, self.unparsed_policy.clone(), &dialect)?; let globals = GlobalsBuilder::extended_by(&[LibraryExtension::Typing]) .with(policy_builtins) .build(); let module = Module::new(); let heap = Heap::new(); module.set("ARG_OPAQUE_VALUE", heap.alloc(ArgMatcher::OpaqueNonFile)); module.set("ARG_RFILE", heap.alloc(ArgMatcher::ReadableFile)); module.set("ARG_WFILE", heap.alloc(ArgMatcher::WriteableFile)); module.set("ARG_RFILES", heap.alloc(ArgMatcher::ReadableFiles)); module.set( "ARG_RFILES_OR_CWD", heap.alloc(ArgMatcher::ReadableFilesOrCwd), ); module.set("ARG_POS_INT", heap.alloc(ArgMatcher::PositiveInteger)); module.set("ARG_SED_COMMAND", heap.alloc(ArgMatcher::SedCommand)); module.set( "ARG_UNVERIFIED_VARARGS", heap.alloc(ArgMatcher::UnverifiedVarargs), ); let policy_builder = PolicyBuilder::new(); { let mut eval = Evaluator::new(&module); eval.extra = Some(&policy_builder); eval.eval_module(ast, &globals)?; } let policy = policy_builder.build(); policy.map_err(|e| starlark::Error::new_kind(starlark::ErrorKind::Other(e.into()))) } } #[derive(Debug)] pub struct ForbiddenProgramRegex { pub regex: regex_lite::Regex, pub reason: String, } #[derive(Debug, ProvidesStaticType)] struct PolicyBuilder { programs: RefCell<MultiMap<String, ProgramSpec>>, forbidden_program_regexes: RefCell<Vec<ForbiddenProgramRegex>>, forbidden_substrings: RefCell<Vec<String>>, } impl PolicyBuilder { fn new() -> Self { Self { programs: RefCell::new(MultiMap::new()), forbidden_program_regexes: RefCell::new(Vec::new()), forbidden_substrings: RefCell::new(Vec::new()), } } fn build(self) -> Result<Policy, regex_lite::Error> { let programs = self.programs.into_inner(); let forbidden_program_regexes = self.forbidden_program_regexes.into_inner(); let forbidden_substrings = self.forbidden_substrings.into_inner(); Policy::new(programs, forbidden_program_regexes, forbidden_substrings) } fn add_program_spec(&self, program_spec: ProgramSpec) { info!("adding program spec: {program_spec:?}"); let name = program_spec.program.clone(); let mut programs = self.programs.borrow_mut(); programs.insert(name, program_spec); } fn add_forbidden_substrings(&self, substrings: &[String]) { let mut forbidden_substrings = self.forbidden_substrings.borrow_mut(); forbidden_substrings.extend_from_slice(substrings); } fn add_forbidden_program_regex(&self, regex: Regex, reason: String) { let mut forbidden_program_regexes = self.forbidden_program_regexes.borrow_mut(); forbidden_program_regexes.push(ForbiddenProgramRegex { regex, reason }); } } #[starlark_module] fn policy_builtins(builder: &mut GlobalsBuilder) { fn define_program<'v>( program: String, system_path: Option<UnpackList<String>>, option_bundling: Option<bool>, combined_format: Option<bool>, options: Option<UnpackList<Opt>>, args: Option<UnpackList<ArgMatcher>>, forbidden: Option<String>, should_match: Option<UnpackList<UnpackList<String>>>, should_not_match: Option<UnpackList<UnpackList<String>>>, eval: &mut Evaluator, ) -> anyhow::Result<NoneType> { let option_bundling = option_bundling.unwrap_or(false); let system_path = system_path.map_or_else(Vec::new, |v| v.items.to_vec()); let combined_format = combined_format.unwrap_or(false); let options = options.map_or_else(Vec::new, |v| v.items.to_vec()); let args = args.map_or_else(Vec::new, |v| v.items.to_vec()); let mut allowed_options = HashMap::<String, Opt>::new(); for opt in options { let name = opt.name().to_string(); if allowed_options .insert(opt.name().to_string(), opt) .is_some() { return Err(anyhow::format_err!("duplicate flag: {name}")); } } let program_spec = ProgramSpec::new( program, system_path, option_bundling, combined_format, allowed_options, args, forbidden, should_match .map_or_else(Vec::new, |v| v.items.to_vec()) .into_iter() .map(|v| v.items.to_vec()) .collect(), should_not_match .map_or_else(Vec::new, |v| v.items.to_vec()) .into_iter() .map(|v| v.items.to_vec()) .collect(), ); #[expect(clippy::unwrap_used)] let policy_builder = eval .extra .as_ref() .unwrap() .downcast_ref::<PolicyBuilder>() .unwrap(); policy_builder.add_program_spec(program_spec); Ok(NoneType) } fn forbid_substrings( strings: UnpackList<String>, eval: &mut Evaluator, ) -> anyhow::Result<NoneType> { #[expect(clippy::unwrap_used)] let policy_builder = eval .extra .as_ref() .unwrap() .downcast_ref::<PolicyBuilder>() .unwrap(); policy_builder.add_forbidden_substrings(&strings.items.to_vec()); Ok(NoneType) } fn forbid_program_regex( regex: String, reason: String, eval: &mut Evaluator, ) -> anyhow::Result<NoneType> { #[expect(clippy::unwrap_used)] let policy_builder = eval .extra .as_ref() .unwrap() .downcast_ref::<PolicyBuilder>() .unwrap(); let compiled_regex = regex_lite::Regex::new(&regex)?; policy_builder.add_forbidden_program_regex(compiled_regex, reason); Ok(NoneType) } fn opt(name: String, r#type: ArgMatcher, required: Option<bool>) -> anyhow::Result<Opt> { Ok(Opt::new( name, OptMeta::Value(r#type.arg_type()), required.unwrap_or(false), )) } fn flag(name: String) -> anyhow::Result<Opt> { Ok(Opt::new(name, OptMeta::Flag, false)) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/sed_command.rs
codex-rs/execpolicy-legacy/src/sed_command.rs
use crate::error::Error; use crate::error::Result; pub fn parse_sed_command(sed_command: &str) -> Result<()> { // For now, we parse only commands like `122,202p`. if let Some(stripped) = sed_command.strip_suffix("p") && let Some((first, rest)) = stripped.split_once(",") && first.parse::<u64>().is_ok() && rest.parse::<u64>().is_ok() { return Ok(()); } Err(Error::SedCommandNotProvablySafe { command: sed_command.to_string(), }) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/arg_resolver.rs
codex-rs/execpolicy-legacy/src/arg_resolver.rs
use serde::Serialize; use crate::arg_matcher::ArgMatcher; use crate::arg_matcher::ArgMatcherCardinality; use crate::error::Error; use crate::error::Result; use crate::valid_exec::MatchedArg; #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub struct PositionalArg { pub index: usize, pub value: String, } pub fn resolve_observed_args_with_patterns( program: &str, args: Vec<PositionalArg>, arg_patterns: &Vec<ArgMatcher>, ) -> Result<Vec<MatchedArg>> { // Naive matching implementation. Among `arg_patterns`, there is allowed to // be at most one vararg pattern. Assuming `arg_patterns` is non-empty, we // end up with either: // // - all `arg_patterns` in `prefix_patterns` // - `arg_patterns` split across `prefix_patterns` (which could be empty), // one `vararg_pattern`, and `suffix_patterns` (which could also empty). // // From there, we start by matching everything in `prefix_patterns`. // Then we calculate how many positional args should be matched by // `suffix_patterns` and use that to determine how many args are left to // be matched by `vararg_pattern` (which could be zero). // // After associating positional args with `vararg_pattern`, we match the // `suffix_patterns` with the remaining args. let ParitionedArgs { num_prefix_args, num_suffix_args, prefix_patterns, suffix_patterns, vararg_pattern, } = partition_args(program, arg_patterns)?; let mut matched_args = Vec::<MatchedArg>::new(); let prefix = get_range_checked(&args, 0..num_prefix_args)?; let mut prefix_arg_index = 0; for pattern in prefix_patterns { let n = pattern .cardinality() .is_exact() .ok_or(Error::InternalInvariantViolation { message: "expected exact cardinality".to_string(), })?; for positional_arg in &prefix[prefix_arg_index..prefix_arg_index + n] { let matched_arg = MatchedArg::new( positional_arg.index, pattern.arg_type(), &positional_arg.value.clone(), )?; matched_args.push(matched_arg); } prefix_arg_index += n; } if num_suffix_args > args.len() { return Err(Error::NotEnoughArgs { program: program.to_string(), args, arg_patterns: arg_patterns.clone(), }); } let initial_suffix_args_index = args.len() - num_suffix_args; if prefix_arg_index > initial_suffix_args_index { return Err(Error::PrefixOverlapsSuffix {}); } if let Some(pattern) = vararg_pattern { let vararg = get_range_checked(&args, prefix_arg_index..initial_suffix_args_index)?; match pattern.cardinality() { ArgMatcherCardinality::One => { return Err(Error::InternalInvariantViolation { message: "vararg pattern should not have cardinality of one".to_string(), }); } ArgMatcherCardinality::AtLeastOne => { if vararg.is_empty() { return Err(Error::VarargMatcherDidNotMatchAnything { program: program.to_string(), matcher: pattern, }); } else { for positional_arg in vararg { let matched_arg = MatchedArg::new( positional_arg.index, pattern.arg_type(), &positional_arg.value.clone(), )?; matched_args.push(matched_arg); } } } ArgMatcherCardinality::ZeroOrMore => { for positional_arg in vararg { let matched_arg = MatchedArg::new( positional_arg.index, pattern.arg_type(), &positional_arg.value.clone(), )?; matched_args.push(matched_arg); } } } } let suffix = get_range_checked(&args, initial_suffix_args_index..args.len())?; let mut suffix_arg_index = 0; for pattern in suffix_patterns { let n = pattern .cardinality() .is_exact() .ok_or(Error::InternalInvariantViolation { message: "expected exact cardinality".to_string(), })?; for positional_arg in &suffix[suffix_arg_index..suffix_arg_index + n] { let matched_arg = MatchedArg::new( positional_arg.index, pattern.arg_type(), &positional_arg.value.clone(), )?; matched_args.push(matched_arg); } suffix_arg_index += n; } if matched_args.len() < args.len() { let extra_args = get_range_checked(&args, matched_args.len()..args.len())?; Err(Error::UnexpectedArguments { program: program.to_string(), args: extra_args.to_vec(), }) } else { Ok(matched_args) } } #[derive(Default)] struct ParitionedArgs { num_prefix_args: usize, num_suffix_args: usize, prefix_patterns: Vec<ArgMatcher>, suffix_patterns: Vec<ArgMatcher>, vararg_pattern: Option<ArgMatcher>, } fn partition_args(program: &str, arg_patterns: &Vec<ArgMatcher>) -> Result<ParitionedArgs> { let mut in_prefix = true; let mut partitioned_args = ParitionedArgs::default(); for pattern in arg_patterns { match pattern.cardinality().is_exact() { Some(n) => { if in_prefix { partitioned_args.prefix_patterns.push(pattern.clone()); partitioned_args.num_prefix_args += n; } else { partitioned_args.suffix_patterns.push(pattern.clone()); partitioned_args.num_suffix_args += n; } } None => match partitioned_args.vararg_pattern { None => { partitioned_args.vararg_pattern = Some(pattern.clone()); in_prefix = false; } Some(existing_pattern) => { return Err(Error::MultipleVarargPatterns { program: program.to_string(), first: existing_pattern, second: pattern.clone(), }); } }, } } Ok(partitioned_args) } fn get_range_checked<T>(vec: &[T], range: std::ops::Range<usize>) -> Result<&[T]> { if range.start > range.end { Err(Error::RangeStartExceedsEnd { start: range.start, end: range.end, }) } else if range.end > vec.len() { Err(Error::RangeEndOutOfBounds { end: range.end, len: vec.len(), }) } else { Ok(&vec[range]) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/policy.rs
codex-rs/execpolicy-legacy/src/policy.rs
use multimap::MultiMap; use regex_lite::Error as RegexError; use regex_lite::Regex; use crate::ExecCall; use crate::Forbidden; use crate::MatchedExec; use crate::NegativeExamplePassedCheck; use crate::ProgramSpec; use crate::error::Error; use crate::error::Result; use crate::policy_parser::ForbiddenProgramRegex; use crate::program::PositiveExampleFailedCheck; pub struct Policy { programs: MultiMap<String, ProgramSpec>, forbidden_program_regexes: Vec<ForbiddenProgramRegex>, forbidden_substrings_pattern: Option<Regex>, } impl Policy { pub fn new( programs: MultiMap<String, ProgramSpec>, forbidden_program_regexes: Vec<ForbiddenProgramRegex>, forbidden_substrings: Vec<String>, ) -> std::result::Result<Self, RegexError> { let forbidden_substrings_pattern = if forbidden_substrings.is_empty() { None } else { let escaped_substrings = forbidden_substrings .iter() .map(|s| regex_lite::escape(s)) .collect::<Vec<_>>() .join("|"); Some(Regex::new(&format!("({escaped_substrings})"))?) }; Ok(Self { programs, forbidden_program_regexes, forbidden_substrings_pattern, }) } pub fn check(&self, exec_call: &ExecCall) -> Result<MatchedExec> { let ExecCall { program, args } = &exec_call; for ForbiddenProgramRegex { regex, reason } in &self.forbidden_program_regexes { if regex.is_match(program) { return Ok(MatchedExec::Forbidden { cause: Forbidden::Program { program: program.clone(), exec_call: exec_call.clone(), }, reason: reason.clone(), }); } } for arg in args { if let Some(regex) = &self.forbidden_substrings_pattern && regex.is_match(arg) { return Ok(MatchedExec::Forbidden { cause: Forbidden::Arg { arg: arg.clone(), exec_call: exec_call.clone(), }, reason: format!("arg `{arg}` contains forbidden substring"), }); } } let mut last_err = Err(Error::NoSpecForProgram { program: program.clone(), }); if let Some(spec_list) = self.programs.get_vec(program) { for spec in spec_list { match spec.check(exec_call) { Ok(matched_exec) => return Ok(matched_exec), Err(err) => { last_err = Err(err); } } } } last_err } pub fn check_each_good_list_individually(&self) -> Vec<PositiveExampleFailedCheck> { let mut violations = Vec::new(); for (_program, spec) in self.programs.flat_iter() { violations.extend(spec.verify_should_match_list()); } violations } pub fn check_each_bad_list_individually(&self) -> Vec<NegativeExamplePassedCheck> { let mut violations = Vec::new(); for (_program, spec) in self.programs.flat_iter() { violations.extend(spec.verify_should_not_match_list()); } violations } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/error.rs
codex-rs/execpolicy-legacy/src/error.rs
use std::path::PathBuf; use serde::Serialize; use crate::arg_matcher::ArgMatcher; use crate::arg_resolver::PositionalArg; use serde_with::DisplayFromStr; use serde_with::serde_as; pub type Result<T> = std::result::Result<T, Error>; #[serde_as] #[derive(Debug, Eq, PartialEq, Serialize)] #[serde(tag = "type")] pub enum Error { NoSpecForProgram { program: String, }, OptionMissingValue { program: String, option: String, }, OptionFollowedByOptionInsteadOfValue { program: String, option: String, value: String, }, UnknownOption { program: String, option: String, }, UnexpectedArguments { program: String, args: Vec<PositionalArg>, }, DoubleDashNotSupportedYet { program: String, }, MultipleVarargPatterns { program: String, first: ArgMatcher, second: ArgMatcher, }, RangeStartExceedsEnd { start: usize, end: usize, }, RangeEndOutOfBounds { end: usize, len: usize, }, PrefixOverlapsSuffix {}, NotEnoughArgs { program: String, args: Vec<PositionalArg>, arg_patterns: Vec<ArgMatcher>, }, InternalInvariantViolation { message: String, }, VarargMatcherDidNotMatchAnything { program: String, matcher: ArgMatcher, }, EmptyFileName {}, LiteralValueDidNotMatch { expected: String, actual: String, }, InvalidPositiveInteger { value: String, }, MissingRequiredOptions { program: String, options: Vec<String>, }, SedCommandNotProvablySafe { command: String, }, ReadablePathNotInReadableFolders { file: PathBuf, folders: Vec<PathBuf>, }, WriteablePathNotInWriteableFolders { file: PathBuf, folders: Vec<PathBuf>, }, CannotCheckRelativePath { file: PathBuf, }, CannotCanonicalizePath { file: String, #[serde_as(as = "DisplayFromStr")] error: std::io::ErrorKind, }, }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/exec_call.rs
codex-rs/execpolicy-legacy/src/exec_call.rs
use std::fmt::Display; use serde::Serialize; #[derive(Clone, Debug, Eq, PartialEq, Serialize)] pub struct ExecCall { pub program: String, pub args: Vec<String>, } impl ExecCall { pub fn new(program: &str, args: &[&str]) -> Self { Self { program: program.to_string(), args: args.iter().map(|&s| s.into()).collect(), } } } impl Display for ExecCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.program)?; for arg in &self.args { write!(f, " {arg}")?; } Ok(()) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/arg_matcher.rs
codex-rs/execpolicy-legacy/src/arg_matcher.rs
#![allow(clippy::needless_lifetimes)] use crate::arg_type::ArgType; use crate::starlark::values::ValueLike; use allocative::Allocative; use derive_more::derive::Display; use starlark::any::ProvidesStaticType; use starlark::values::AllocValue; use starlark::values::Heap; use starlark::values::NoSerialize; use starlark::values::StarlarkValue; use starlark::values::UnpackValue; use starlark::values::Value; use starlark::values::starlark_value; use starlark::values::string::StarlarkStr; /// Patterns that lists of arguments should be compared against. #[derive(Clone, Debug, Display, Eq, PartialEq, NoSerialize, ProvidesStaticType, Allocative)] #[display("{}", self)] pub enum ArgMatcher { /// Literal string value. Literal(String), /// We cannot say what type of value this should match, but it is *not* a file path. OpaqueNonFile, /// Required readable file. ReadableFile, /// Required writeable file. WriteableFile, /// Non-empty list of readable files. ReadableFiles, /// Non-empty list of readable files, or empty list, implying readable cwd. ReadableFilesOrCwd, /// Positive integer, like one that is required for `head -n`. PositiveInteger, /// Bespoke matcher for safe sed commands. SedCommand, /// Matches an arbitrary number of arguments without attributing any /// particular meaning to them. Caller is responsible for interpreting them. UnverifiedVarargs, } impl ArgMatcher { pub fn cardinality(&self) -> ArgMatcherCardinality { match self { ArgMatcher::Literal(_) | ArgMatcher::OpaqueNonFile | ArgMatcher::ReadableFile | ArgMatcher::WriteableFile | ArgMatcher::PositiveInteger | ArgMatcher::SedCommand => ArgMatcherCardinality::One, ArgMatcher::ReadableFiles => ArgMatcherCardinality::AtLeastOne, ArgMatcher::ReadableFilesOrCwd | ArgMatcher::UnverifiedVarargs => { ArgMatcherCardinality::ZeroOrMore } } } pub fn arg_type(&self) -> ArgType { match self { ArgMatcher::Literal(value) => ArgType::Literal(value.clone()), ArgMatcher::OpaqueNonFile => ArgType::OpaqueNonFile, ArgMatcher::ReadableFile => ArgType::ReadableFile, ArgMatcher::WriteableFile => ArgType::WriteableFile, ArgMatcher::ReadableFiles => ArgType::ReadableFile, ArgMatcher::ReadableFilesOrCwd => ArgType::ReadableFile, ArgMatcher::PositiveInteger => ArgType::PositiveInteger, ArgMatcher::SedCommand => ArgType::SedCommand, ArgMatcher::UnverifiedVarargs => ArgType::Unknown, } } } pub enum ArgMatcherCardinality { One, AtLeastOne, ZeroOrMore, } impl ArgMatcherCardinality { pub fn is_exact(&self) -> Option<usize> { match self { ArgMatcherCardinality::One => Some(1), ArgMatcherCardinality::AtLeastOne => None, ArgMatcherCardinality::ZeroOrMore => None, } } } impl<'v> AllocValue<'v> for ArgMatcher { fn alloc_value(self, heap: &'v Heap) -> Value<'v> { heap.alloc_simple(self) } } #[starlark_value(type = "ArgMatcher")] impl<'v> StarlarkValue<'v> for ArgMatcher { type Canonical = ArgMatcher; } impl<'v> UnpackValue<'v> for ArgMatcher { type Error = starlark::Error; fn unpack_value_impl(value: Value<'v>) -> starlark::Result<Option<Self>> { if let Some(str) = value.downcast_ref::<StarlarkStr>() { Ok(Some(ArgMatcher::Literal(str.as_str().to_string()))) } else { Ok(value.downcast_ref::<ArgMatcher>().cloned()) } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/opt.rs
codex-rs/execpolicy-legacy/src/opt.rs
#![allow(clippy::needless_lifetimes)] use crate::ArgType; use crate::starlark::values::ValueLike; use allocative::Allocative; use derive_more::derive::Display; use starlark::any::ProvidesStaticType; use starlark::values::AllocValue; use starlark::values::Heap; use starlark::values::NoSerialize; use starlark::values::StarlarkValue; use starlark::values::UnpackValue; use starlark::values::Value; use starlark::values::starlark_value; /// Command line option that takes a value. #[derive(Clone, Debug, Display, PartialEq, Eq, ProvidesStaticType, NoSerialize, Allocative)] #[display("opt({})", opt)] pub struct Opt { /// The option as typed on the command line, e.g., `-h` or `--help`. If /// it can be used in the `--name=value` format, then this should be /// `--name` (though this is subject to change). pub opt: String, pub meta: OptMeta, pub required: bool, } /// When defining an Opt, use as specific an OptMeta as possible. #[derive(Clone, Debug, Display, PartialEq, Eq, ProvidesStaticType, NoSerialize, Allocative)] #[display("{}", self)] pub enum OptMeta { /// Option does not take a value. Flag, /// Option takes a single value matching the specified type. Value(ArgType), } impl Opt { pub fn new(opt: String, meta: OptMeta, required: bool) -> Self { Self { opt, meta, required, } } pub fn name(&self) -> &str { &self.opt } } #[starlark_value(type = "Opt")] impl<'v> StarlarkValue<'v> for Opt { type Canonical = Opt; } impl<'v> UnpackValue<'v> for Opt { type Error = starlark::Error; fn unpack_value_impl(value: Value<'v>) -> starlark::Result<Option<Self>> { // TODO(mbolin): It fels like this should be doable without cloning? // Cannot simply consume the value? Ok(value.downcast_ref::<Opt>().cloned()) } } impl<'v> AllocValue<'v> for Opt { fn alloc_value(self, heap: &'v Heap) -> Value<'v> { heap.alloc_simple(self) } } #[starlark_value(type = "OptMeta")] impl<'v> StarlarkValue<'v> for OptMeta { type Canonical = OptMeta; }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/main.rs
codex-rs/execpolicy-legacy/src/main.rs
use anyhow::Result; use clap::Parser; use clap::Subcommand; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::PolicyParser; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; use serde::Deserialize; use serde::Serialize; use serde::de; use starlark::Error as StarlarkError; use std::path::PathBuf; use std::str::FromStr; const MATCHED_BUT_WRITES_FILES_EXIT_CODE: i32 = 12; const MIGHT_BE_SAFE_EXIT_CODE: i32 = 13; const FORBIDDEN_EXIT_CODE: i32 = 14; #[derive(Parser, Deserialize, Debug)] #[command(version, about, long_about = None)] pub struct Args { /// If the command fails the policy, exit with 13, but print parseable JSON /// to stdout. #[clap(long)] pub require_safe: bool, /// Path to the policy file. #[clap(long, short = 'p')] pub policy: Option<PathBuf>, #[command(subcommand)] pub command: Command, } #[derive(Clone, Debug, Deserialize, Subcommand)] pub enum Command { /// Checks the command as if the arguments were the inputs to execv(3). Check { #[arg(trailing_var_arg = true)] command: Vec<String>, }, /// Checks the command encoded as a JSON object. #[clap(name = "check-json")] CheckJson { /// JSON object with "program" (str) and "args" (list[str]) fields. #[serde(deserialize_with = "deserialize_from_json")] exec: ExecArg, }, } #[derive(Clone, Debug, Deserialize)] pub struct ExecArg { pub program: String, #[serde(default)] pub args: Vec<String>, } fn main() -> Result<()> { env_logger::init(); let args = Args::parse(); let policy = match args.policy { Some(policy) => { let policy_source = policy.to_string_lossy().to_string(); let unparsed_policy = std::fs::read_to_string(policy)?; let parser = PolicyParser::new(&policy_source, &unparsed_policy); parser.parse() } None => get_default_policy(), }; let policy = policy.map_err(StarlarkError::into_anyhow)?; let exec = match args.command { Command::Check { command } => match command.split_first() { Some((first, rest)) => ExecArg { program: first.to_string(), args: rest.to_vec(), }, None => { eprintln!("no command provided"); std::process::exit(1); } }, Command::CheckJson { exec } => exec, }; let (output, exit_code) = check_command(&policy, exec, args.require_safe); let json = serde_json::to_string(&output)?; println!("{json}"); std::process::exit(exit_code); } fn check_command( policy: &Policy, ExecArg { program, args }: ExecArg, check: bool, ) -> (Output, i32) { let exec_call = ExecCall { program, args }; match policy.check(&exec_call) { Ok(MatchedExec::Match { exec }) => { if exec.might_write_files() { let exit_code = if check { MATCHED_BUT_WRITES_FILES_EXIT_CODE } else { 0 }; (Output::Match { r#match: exec }, exit_code) } else { (Output::Safe { r#match: exec }, 0) } } Ok(MatchedExec::Forbidden { reason, cause }) => { let exit_code = if check { FORBIDDEN_EXIT_CODE } else { 0 }; (Output::Forbidden { reason, cause }, exit_code) } Err(err) => { let exit_code = if check { MIGHT_BE_SAFE_EXIT_CODE } else { 0 }; (Output::Unverified { error: err }, exit_code) } } } #[derive(Debug, Serialize)] #[serde(tag = "result")] pub enum Output { /// The command is verified as safe. #[serde(rename = "safe")] Safe { r#match: ValidExec }, /// The command has matched a rule in the policy, but the caller should /// decide whether it is "safe" given the files it wants to write. #[serde(rename = "match")] Match { r#match: ValidExec }, /// The user is forbidden from running the command. #[serde(rename = "forbidden")] Forbidden { reason: String, cause: codex_execpolicy_legacy::Forbidden, }, /// The safety of the command could not be verified. #[serde(rename = "unverified")] Unverified { error: codex_execpolicy_legacy::Error, }, } fn deserialize_from_json<'de, D>(deserializer: D) -> Result<ExecArg, D::Error> where D: de::Deserializer<'de>, { let s = String::deserialize(deserializer)?; let decoded = serde_json::from_str(&s) .map_err(|e| serde::de::Error::custom(format!("JSON parse error: {e}")))?; Ok(decoded) } impl FromStr for ExecArg { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { serde_json::from_str(s).map_err(Into::into) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/execv_checker.rs
codex-rs/execpolicy-legacy/src/execv_checker.rs
use std::borrow::Cow; use std::ffi::OsString; use std::path::Path; use std::path::PathBuf; use crate::ArgType; use crate::Error::CannotCanonicalizePath; use crate::Error::CannotCheckRelativePath; use crate::Error::ReadablePathNotInReadableFolders; use crate::Error::WriteablePathNotInWriteableFolders; use crate::ExecCall; use crate::MatchedExec; use crate::Policy; use crate::Result; use crate::ValidExec; use path_absolutize::*; macro_rules! check_file_in_folders { ($file:expr, $folders:expr, $error:ident) => { if !$folders.iter().any(|folder| $file.starts_with(folder)) { return Err($error { file: $file.clone(), folders: $folders.to_vec(), }); } }; } pub struct ExecvChecker { execv_policy: Policy, } impl ExecvChecker { pub fn new(execv_policy: Policy) -> Self { Self { execv_policy } } pub fn r#match(&self, exec_call: &ExecCall) -> Result<MatchedExec> { self.execv_policy.check(exec_call) } /// The caller is responsible for ensuring readable_folders and /// writeable_folders are in canonical form. pub fn check( &self, valid_exec: ValidExec, cwd: &Option<OsString>, readable_folders: &[PathBuf], writeable_folders: &[PathBuf], ) -> Result<String> { for (arg_type, value) in valid_exec .args .into_iter() .map(|arg| (arg.r#type, arg.value)) .chain( valid_exec .opts .into_iter() .map(|opt| (opt.r#type, opt.value)), ) { match arg_type { ArgType::ReadableFile => { let readable_file = ensure_absolute_path(&value, cwd)?; check_file_in_folders!( readable_file, readable_folders, ReadablePathNotInReadableFolders ); } ArgType::WriteableFile => { let writeable_file = ensure_absolute_path(&value, cwd)?; check_file_in_folders!( writeable_file, writeable_folders, WriteablePathNotInWriteableFolders ); } ArgType::OpaqueNonFile | ArgType::Unknown | ArgType::PositiveInteger | ArgType::SedCommand | ArgType::Literal(_) => { continue; } } } let mut program = valid_exec.program.to_string(); for system_path in valid_exec.system_path { if is_executable_file(&system_path) { program = system_path; break; } } Ok(program) } } fn ensure_absolute_path(path: &str, cwd: &Option<OsString>) -> Result<PathBuf> { let file = PathBuf::from(path); let result = if file.is_relative() { match cwd { Some(cwd) => file.absolutize_from(cwd), None => return Err(CannotCheckRelativePath { file }), } } else { file.absolutize() }; result .map(Cow::into_owned) .map_err(|error| CannotCanonicalizePath { file: path.to_string(), error: error.kind(), }) } fn is_executable_file(path: &str) -> bool { let file_path = Path::new(path); if let Ok(metadata) = std::fs::metadata(file_path) { #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; let permissions = metadata.permissions(); // Check if the file is executable (by checking the executable bit for the owner) return metadata.is_file() && (permissions.mode() & 0o111 != 0); } #[cfg(windows)] { // TODO(mbolin): Check against PATHEXT environment variable. return metadata.is_file(); } } false } #[cfg(test)] mod tests { use tempfile::TempDir; use super::*; use crate::MatchedArg; use crate::PolicyParser; use anyhow::Result; use anyhow::anyhow; fn setup(fake_cp: &Path) -> ExecvChecker { let source = format!( r#" define_program( program="cp", args=[ARG_RFILE, ARG_WFILE], system_path=[{fake_cp:?}] ) "# ); let parser = PolicyParser::new("#test", &source); let policy = parser.parse().unwrap(); ExecvChecker::new(policy) } #[test] fn test_check_valid_input_files() -> Result<()> { let temp_dir = TempDir::new()?; // Create an executable file that can be used with the system_path arg. let fake_cp = temp_dir.path().join("cp"); #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; let fake_cp_file = std::fs::File::create(&fake_cp)?; let mut permissions = fake_cp_file.metadata()?.permissions(); permissions.set_mode(0o755); std::fs::set_permissions(&fake_cp, permissions)?; } #[cfg(windows)] { std::fs::File::create(&fake_cp)?; } // Create root_path and reference to files under the root. let root_path = temp_dir.path().to_path_buf(); let source_path = root_path.join("source"); let dest_path = root_path.join("dest"); let cp = fake_cp.to_str().unwrap().to_string(); let root = root_path.to_str().unwrap().to_string(); let source = source_path.to_str().unwrap().to_string(); let dest = dest_path.to_str().unwrap().to_string(); let cwd = Some(root_path.clone().into()); let checker = setup(&fake_cp); let exec_call = ExecCall { program: "cp".into(), args: vec![source, dest.clone()], }; let valid_exec = match checker.r#match(&exec_call).map_err(|e| anyhow!("{e:?}"))? { MatchedExec::Match { exec } => exec, unexpected => panic!("Expected a safe exec but got {unexpected:?}"), }; // No readable or writeable folders specified. assert_eq!( checker.check(valid_exec.clone(), &cwd, &[], &[]), Err(ReadablePathNotInReadableFolders { file: source_path, folders: vec![] }), ); // Only readable folders specified. assert_eq!( checker.check( valid_exec.clone(), &cwd, std::slice::from_ref(&root_path), &[] ), Err(WriteablePathNotInWriteableFolders { file: dest_path.clone(), folders: vec![] }), ); // Both readable and writeable folders specified. assert_eq!( checker.check( valid_exec, &cwd, std::slice::from_ref(&root_path), std::slice::from_ref(&root_path) ), Ok(cp.clone()), ); // Args are the readable and writeable folders, not files within the // folders. let exec_call_folders_as_args = ExecCall { program: "cp".into(), args: vec![root.clone(), root], }; let valid_exec_call_folders_as_args = match checker .r#match(&exec_call_folders_as_args) .map_err(|e| anyhow!("{e:?}"))? { MatchedExec::Match { exec } => exec, _ => panic!("Expected a safe exec"), }; assert_eq!( checker.check( valid_exec_call_folders_as_args, &cwd, std::slice::from_ref(&root_path), std::slice::from_ref(&root_path) ), Ok(cp), ); // Specify a parent of a readable folder as input. let exec_with_parent_of_readable_folder = ValidExec { program: "cp".into(), args: vec![ MatchedArg::new( 0, ArgType::ReadableFile, root_path.parent().unwrap().to_str().unwrap(), ) .map_err(|e| anyhow!("{e:?}"))?, MatchedArg::new(1, ArgType::WriteableFile, &dest).map_err(|e| anyhow!("{e:?}"))?, ], ..Default::default() }; assert_eq!( checker.check( exec_with_parent_of_readable_folder, &cwd, std::slice::from_ref(&root_path), std::slice::from_ref(&dest_path) ), Err(ReadablePathNotInReadableFolders { file: root_path.parent().unwrap().to_path_buf(), folders: vec![root_path.clone()] }), ); Ok(()) } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/src/arg_type.rs
codex-rs/execpolicy-legacy/src/arg_type.rs
#![allow(clippy::needless_lifetimes)] use crate::error::Error; use crate::error::Result; use crate::sed_command::parse_sed_command; use allocative::Allocative; use derive_more::derive::Display; use serde::Serialize; use starlark::any::ProvidesStaticType; use starlark::values::StarlarkValue; use starlark::values::starlark_value; #[derive(Debug, Clone, Display, Eq, PartialEq, ProvidesStaticType, Allocative, Serialize)] #[display("{}", self)] pub enum ArgType { Literal(String), /// We cannot say what this argument represents, but it is *not* a file path. OpaqueNonFile, /// A file (or directory) that can be expected to be read as part of this command. ReadableFile, /// A file (or directory) that can be expected to be written as part of this command. WriteableFile, /// Positive integer, like one that is required for `head -n`. PositiveInteger, /// Bespoke arg type for a safe sed command. SedCommand, /// Type is unknown: it may or may not be a file. Unknown, } impl ArgType { pub fn validate(&self, value: &str) -> Result<()> { match self { ArgType::Literal(literal_value) => { if value != *literal_value { Err(Error::LiteralValueDidNotMatch { expected: literal_value.clone(), actual: value.to_string(), }) } else { Ok(()) } } ArgType::ReadableFile => { if value.is_empty() { Err(Error::EmptyFileName {}) } else { Ok(()) } } ArgType::WriteableFile => { if value.is_empty() { Err(Error::EmptyFileName {}) } else { Ok(()) } } ArgType::OpaqueNonFile | ArgType::Unknown => Ok(()), ArgType::PositiveInteger => match value.parse::<u64>() { Ok(0) => Err(Error::InvalidPositiveInteger { value: value.to_string(), }), Ok(_) => Ok(()), Err(_) => Err(Error::InvalidPositiveInteger { value: value.to_string(), }), }, ArgType::SedCommand => parse_sed_command(value), } } pub fn might_write_file(&self) -> bool { match self { ArgType::WriteableFile | ArgType::Unknown => true, ArgType::Literal(_) | ArgType::OpaqueNonFile | ArgType::PositiveInteger | ArgType::ReadableFile | ArgType::SedCommand => false, } } } #[starlark_value(type = "ArgType")] impl<'v> StarlarkValue<'v> for ArgType { type Canonical = ArgType; }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/all.rs
codex-rs/execpolicy-legacy/tests/all.rs
// Single integration test binary that aggregates all test modules. // The submodules live in `tests/suite/`. mod suite;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/cp.rs
codex-rs/execpolicy-legacy/tests/suite/cp.rs
extern crate codex_execpolicy_legacy; use codex_execpolicy_legacy::ArgMatcher; use codex_execpolicy_legacy::ArgType; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedArg; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::Result; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; #[expect(clippy::expect_used)] fn setup() -> Policy { get_default_policy().expect("failed to load default policy") } #[test] fn test_cp_no_args() { let policy = setup(); let cp = ExecCall::new("cp", &[]); assert_eq!( Err(Error::NotEnoughArgs { program: "cp".to_string(), args: vec![], arg_patterns: vec![ArgMatcher::ReadableFiles, ArgMatcher::WriteableFile] }), policy.check(&cp) ) } #[test] fn test_cp_one_arg() { let policy = setup(); let cp = ExecCall::new("cp", &["foo/bar"]); assert_eq!( Err(Error::VarargMatcherDidNotMatchAnything { program: "cp".to_string(), matcher: ArgMatcher::ReadableFiles, }), policy.check(&cp) ); } #[test] fn test_cp_one_file() -> Result<()> { let policy = setup(); let cp = ExecCall::new("cp", &["foo/bar", "../baz"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "cp", vec![ MatchedArg::new(0, ArgType::ReadableFile, "foo/bar")?, MatchedArg::new(1, ArgType::WriteableFile, "../baz")?, ], &["/bin/cp", "/usr/bin/cp"] ) }), policy.check(&cp) ); Ok(()) } #[test] fn test_cp_multiple_files() -> Result<()> { let policy = setup(); let cp = ExecCall::new("cp", &["foo", "bar", "baz"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "cp", vec![ MatchedArg::new(0, ArgType::ReadableFile, "foo")?, MatchedArg::new(1, ArgType::ReadableFile, "bar")?, MatchedArg::new(2, ArgType::WriteableFile, "baz")?, ], &["/bin/cp", "/usr/bin/cp"] ) }), policy.check(&cp) ); Ok(()) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/ls.rs
codex-rs/execpolicy-legacy/tests/suite/ls.rs
extern crate codex_execpolicy_legacy; use codex_execpolicy_legacy::ArgType; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedArg; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::MatchedFlag; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::Result; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; #[expect(clippy::expect_used)] fn setup() -> Policy { get_default_policy().expect("failed to load default policy") } #[test] fn test_ls_no_args() { let policy = setup(); let ls = ExecCall::new("ls", &[]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new("ls", vec![], &["/bin/ls", "/usr/bin/ls"]) }), policy.check(&ls) ); } #[test] fn test_ls_dash_a_dash_l() { let policy = setup(); let args = &["-a", "-l"]; let ls_a_l = ExecCall::new("ls", args); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "ls".into(), flags: vec![MatchedFlag::new("-a"), MatchedFlag::new("-l")], system_path: ["/bin/ls".into(), "/usr/bin/ls".into()].into(), ..Default::default() } }), policy.check(&ls_a_l) ); } #[test] fn test_ls_dash_z() { let policy = setup(); // -z is currently an invalid option for ls, but it has so many options, // perhaps it will get added at some point... let ls_z = ExecCall::new("ls", &["-z"]); assert_eq!( Err(Error::UnknownOption { program: "ls".into(), option: "-z".into() }), policy.check(&ls_z) ); } #[test] fn test_ls_dash_al() { let policy = setup(); // This currently fails, but it should pass once option_bundling=True is implemented. let ls_al = ExecCall::new("ls", &["-al"]); assert_eq!( Err(Error::UnknownOption { program: "ls".into(), option: "-al".into() }), policy.check(&ls_al) ); } #[test] fn test_ls_one_file_arg() -> Result<()> { let policy = setup(); let ls_one_file_arg = ExecCall::new("ls", &["foo"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "ls", vec![MatchedArg::new(0, ArgType::ReadableFile, "foo")?], &["/bin/ls", "/usr/bin/ls"] ) }), policy.check(&ls_one_file_arg) ); Ok(()) } #[test] fn test_ls_multiple_file_args() -> Result<()> { let policy = setup(); let ls_multiple_file_args = ExecCall::new("ls", &["foo", "bar", "baz"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "ls", vec![ MatchedArg::new(0, ArgType::ReadableFile, "foo")?, MatchedArg::new(1, ArgType::ReadableFile, "bar")?, MatchedArg::new(2, ArgType::ReadableFile, "baz")?, ], &["/bin/ls", "/usr/bin/ls"] ) }), policy.check(&ls_multiple_file_args) ); Ok(()) } #[test] fn test_ls_multiple_flags_and_file_args() -> Result<()> { let policy = setup(); let ls_multiple_flags_and_file_args = ExecCall::new("ls", &["-l", "-a", "foo", "bar", "baz"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "ls".into(), flags: vec![MatchedFlag::new("-l"), MatchedFlag::new("-a")], args: vec![ MatchedArg::new(2, ArgType::ReadableFile, "foo")?, MatchedArg::new(3, ArgType::ReadableFile, "bar")?, MatchedArg::new(4, ArgType::ReadableFile, "baz")?, ], system_path: ["/bin/ls".into(), "/usr/bin/ls".into()].into(), ..Default::default() } }), policy.check(&ls_multiple_flags_and_file_args) ); Ok(()) } #[test] fn test_flags_after_file_args() -> Result<()> { let policy = setup(); // TODO(mbolin): While this is "safe" in that it will not do anything bad // to the user's machine, it will fail because apparently `ls` does not // allow flags after file arguments (as some commands do). We should // extend define_program() to make this part of the configuration so that // this command is disallowed. let ls_flags_after_file_args = ExecCall::new("ls", &["foo", "-l"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "ls".into(), flags: vec![MatchedFlag::new("-l")], args: vec![MatchedArg::new(0, ArgType::ReadableFile, "foo")?], system_path: ["/bin/ls".into(), "/usr/bin/ls".into()].into(), ..Default::default() } }), policy.check(&ls_flags_after_file_args) ); Ok(()) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/pwd.rs
codex-rs/execpolicy-legacy/tests/suite/pwd.rs
extern crate codex_execpolicy_legacy; use std::vec; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::MatchedFlag; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::PositionalArg; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; #[expect(clippy::expect_used)] fn setup() -> Policy { get_default_policy().expect("failed to load default policy") } #[test] fn test_pwd_no_args() { let policy = setup(); let pwd = ExecCall::new("pwd", &[]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "pwd".into(), ..Default::default() } }), policy.check(&pwd) ); } #[test] fn test_pwd_capital_l() { let policy = setup(); let pwd = ExecCall::new("pwd", &["-L"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "pwd".into(), flags: vec![MatchedFlag::new("-L")], ..Default::default() } }), policy.check(&pwd) ); } #[test] fn test_pwd_capital_p() { let policy = setup(); let pwd = ExecCall::new("pwd", &["-P"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "pwd".into(), flags: vec![MatchedFlag::new("-P")], ..Default::default() } }), policy.check(&pwd) ); } #[test] fn test_pwd_extra_args() { let policy = setup(); let pwd = ExecCall::new("pwd", &["foo", "bar"]); assert_eq!( Err(Error::UnexpectedArguments { program: "pwd".to_string(), args: vec![ PositionalArg { index: 0, value: "foo".to_string() }, PositionalArg { index: 1, value: "bar".to_string() }, ], }), policy.check(&pwd) ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/literal.rs
codex-rs/execpolicy-legacy/tests/suite/literal.rs
use codex_execpolicy_legacy::ArgType; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedArg; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::PolicyParser; use codex_execpolicy_legacy::Result; use codex_execpolicy_legacy::ValidExec; extern crate codex_execpolicy_legacy; #[test] fn test_invalid_subcommand() -> Result<()> { let unparsed_policy = r#" define_program( program="fake_executable", args=["subcommand", "sub-subcommand"], ) "#; let parser = PolicyParser::new("test_invalid_subcommand", unparsed_policy); let policy = parser.parse().expect("failed to parse policy"); let valid_call = ExecCall::new("fake_executable", &["subcommand", "sub-subcommand"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "fake_executable", vec![ MatchedArg::new(0, ArgType::Literal("subcommand".to_string()), "subcommand")?, MatchedArg::new( 1, ArgType::Literal("sub-subcommand".to_string()), "sub-subcommand" )?, ], &[] ) }), policy.check(&valid_call) ); let invalid_call = ExecCall::new("fake_executable", &["subcommand", "not-a-real-subcommand"]); assert_eq!( Err(Error::LiteralValueDidNotMatch { expected: "sub-subcommand".to_string(), actual: "not-a-real-subcommand".to_string() }), policy.check(&invalid_call) ); Ok(()) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/good.rs
codex-rs/execpolicy-legacy/tests/suite/good.rs
use codex_execpolicy_legacy::PositiveExampleFailedCheck; use codex_execpolicy_legacy::get_default_policy; #[test] fn verify_everything_in_good_list_is_allowed() { let policy = get_default_policy().expect("failed to load default policy"); let violations = policy.check_each_good_list_individually(); assert_eq!(Vec::<PositiveExampleFailedCheck>::new(), violations); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/bad.rs
codex-rs/execpolicy-legacy/tests/suite/bad.rs
use codex_execpolicy_legacy::NegativeExamplePassedCheck; use codex_execpolicy_legacy::get_default_policy; #[test] fn verify_everything_in_bad_list_is_rejected() { let policy = get_default_policy().expect("failed to load default policy"); let violations = policy.check_each_bad_list_individually(); assert_eq!(Vec::<NegativeExamplePassedCheck>::new(), violations); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/mod.rs
codex-rs/execpolicy-legacy/tests/suite/mod.rs
// Aggregates all former standalone integration tests as modules. mod bad; mod cp; mod good; mod head; mod literal; mod ls; mod parse_sed_command; mod pwd; mod sed;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/head.rs
codex-rs/execpolicy-legacy/tests/suite/head.rs
use codex_execpolicy_legacy::ArgMatcher; use codex_execpolicy_legacy::ArgType; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedArg; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::MatchedOpt; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::Result; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; extern crate codex_execpolicy_legacy; #[expect(clippy::expect_used)] fn setup() -> Policy { get_default_policy().expect("failed to load default policy") } #[test] fn test_head_no_args() { let policy = setup(); let head = ExecCall::new("head", &[]); // It is actually valid to call `head` without arguments: it will read from // stdin instead of from a file. Though recall that a command rejected by // the policy is not "unsafe:" it just means that this library cannot // *guarantee* that the command is safe. // // If we start verifying individual components of a shell command, such as: // `find . -name | head -n 10`, then it might be important to allow the // no-arg case. assert_eq!( Err(Error::VarargMatcherDidNotMatchAnything { program: "head".to_string(), matcher: ArgMatcher::ReadableFiles, }), policy.check(&head) ) } #[test] fn test_head_one_file_no_flags() -> Result<()> { let policy = setup(); let head = ExecCall::new("head", &["src/extension.ts"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec::new( "head", vec![MatchedArg::new( 0, ArgType::ReadableFile, "src/extension.ts" )?], &["/bin/head", "/usr/bin/head"] ) }), policy.check(&head) ); Ok(()) } #[test] fn test_head_one_flag_one_file() -> Result<()> { let policy = setup(); let head = ExecCall::new("head", &["-n", "100", "src/extension.ts"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "head".to_string(), flags: vec![], opts: vec![ MatchedOpt::new("-n", "100", ArgType::PositiveInteger) .expect("should validate") ], args: vec![MatchedArg::new( 2, ArgType::ReadableFile, "src/extension.ts" )?], system_path: vec!["/bin/head".to_string(), "/usr/bin/head".to_string()], } }), policy.check(&head) ); Ok(()) } #[test] fn test_head_invalid_n_as_0() { let policy = setup(); let head = ExecCall::new("head", &["-n", "0", "src/extension.ts"]); assert_eq!( Err(Error::InvalidPositiveInteger { value: "0".to_string(), }), policy.check(&head) ) } #[test] fn test_head_invalid_n_as_nonint_float() { let policy = setup(); let head = ExecCall::new("head", &["-n", "1.5", "src/extension.ts"]); assert_eq!( Err(Error::InvalidPositiveInteger { value: "1.5".to_string(), }), policy.check(&head) ) } #[test] fn test_head_invalid_n_as_float() { let policy = setup(); let head = ExecCall::new("head", &["-n", "1.0", "src/extension.ts"]); assert_eq!( Err(Error::InvalidPositiveInteger { value: "1.0".to_string(), }), policy.check(&head) ) } #[test] fn test_head_invalid_n_as_negative_int() { let policy = setup(); let head = ExecCall::new("head", &["-n", "-1", "src/extension.ts"]); assert_eq!( Err(Error::OptionFollowedByOptionInsteadOfValue { program: "head".to_string(), option: "-n".to_string(), value: "-1".to_string(), }), policy.check(&head) ) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/sed.rs
codex-rs/execpolicy-legacy/tests/suite/sed.rs
extern crate codex_execpolicy_legacy; use codex_execpolicy_legacy::ArgType; use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::ExecCall; use codex_execpolicy_legacy::MatchedArg; use codex_execpolicy_legacy::MatchedExec; use codex_execpolicy_legacy::MatchedFlag; use codex_execpolicy_legacy::MatchedOpt; use codex_execpolicy_legacy::Policy; use codex_execpolicy_legacy::Result; use codex_execpolicy_legacy::ValidExec; use codex_execpolicy_legacy::get_default_policy; #[expect(clippy::expect_used)] fn setup() -> Policy { get_default_policy().expect("failed to load default policy") } #[test] fn test_sed_print_specific_lines() -> Result<()> { let policy = setup(); let sed = ExecCall::new("sed", &["-n", "122,202p", "hello.txt"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "sed".to_string(), flags: vec![MatchedFlag::new("-n")], args: vec![ MatchedArg::new(1, ArgType::SedCommand, "122,202p")?, MatchedArg::new(2, ArgType::ReadableFile, "hello.txt")?, ], system_path: vec!["/usr/bin/sed".to_string()], ..Default::default() } }), policy.check(&sed) ); Ok(()) } #[test] fn test_sed_print_specific_lines_with_e_flag() -> Result<()> { let policy = setup(); let sed = ExecCall::new("sed", &["-n", "-e", "122,202p", "hello.txt"]); assert_eq!( Ok(MatchedExec::Match { exec: ValidExec { program: "sed".to_string(), flags: vec![MatchedFlag::new("-n")], opts: vec![ MatchedOpt::new("-e", "122,202p", ArgType::SedCommand) .expect("should validate") ], args: vec![MatchedArg::new(3, ArgType::ReadableFile, "hello.txt")?], system_path: vec!["/usr/bin/sed".to_string()], } }), policy.check(&sed) ); Ok(()) } #[test] fn test_sed_reject_dangerous_command() { let policy = setup(); let sed = ExecCall::new("sed", &["-e", "s/y/echo hi/e", "hello.txt"]); assert_eq!( Err(Error::SedCommandNotProvablySafe { command: "s/y/echo hi/e".to_string(), }), policy.check(&sed) ); } #[test] fn test_sed_verify_e_or_pattern_is_required() { let policy = setup(); let sed = ExecCall::new("sed", &["122,202p"]); assert_eq!( Err(Error::MissingRequiredOptions { program: "sed".to_string(), options: vec!["-e".to_string()], }), policy.check(&sed) ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/execpolicy-legacy/tests/suite/parse_sed_command.rs
codex-rs/execpolicy-legacy/tests/suite/parse_sed_command.rs
use codex_execpolicy_legacy::Error; use codex_execpolicy_legacy::parse_sed_command; #[test] fn parses_simple_print_command() { assert_eq!(parse_sed_command("122,202p"), Ok(())); } #[test] fn rejects_malformed_print_command() { assert_eq!( parse_sed_command("122,202"), Err(Error::SedCommandNotProvablySafe { command: "122,202".to_string(), }) ); assert_eq!( parse_sed_command("122202"), Err(Error::SedCommandNotProvablySafe { command: "122202".to_string(), }) ); }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/error_code.rs
codex-rs/app-server/src/error_code.rs
pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600; pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/lib.rs
codex-rs/app-server/src/lib.rs
#![deny(clippy::print_stdout, clippy::print_stderr)] use codex_common::CliConfigOverrides; use codex_core::config::Config; use std::io::ErrorKind; use std::io::Result as IoResult; use std::path::PathBuf; use crate::message_processor::MessageProcessor; use crate::outgoing_message::OutgoingMessage; use crate::outgoing_message::OutgoingMessageSender; use codex_app_server_protocol::JSONRPCMessage; use codex_feedback::CodexFeedback; use tokio::io::AsyncBufReadExt; use tokio::io::AsyncWriteExt; use tokio::io::BufReader; use tokio::io::{self}; use tokio::sync::mpsc; use toml::Value as TomlValue; use tracing::debug; use tracing::error; use tracing::info; use tracing_subscriber::EnvFilter; use tracing_subscriber::Layer; use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; mod bespoke_event_handling; mod codex_message_processor; mod config_api; mod error_code; mod fuzzy_file_search; mod message_processor; mod models; mod outgoing_message; /// Size of the bounded channels used to communicate between tasks. The value /// is a balance between throughput and memory usage – 128 messages should be /// plenty for an interactive CLI. const CHANNEL_CAPACITY: usize = 128; pub async fn run_main( codex_linux_sandbox_exe: Option<PathBuf>, cli_config_overrides: CliConfigOverrides, ) -> IoResult<()> { // Set up channels. let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY); let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(CHANNEL_CAPACITY); // Task: read from stdin, push to `incoming_tx`. let stdin_reader_handle = tokio::spawn({ async move { let stdin = io::stdin(); let reader = BufReader::new(stdin); let mut lines = reader.lines(); while let Some(line) = lines.next_line().await.unwrap_or_default() { match serde_json::from_str::<JSONRPCMessage>(&line) { Ok(msg) => { if incoming_tx.send(msg).await.is_err() { // Receiver gone – nothing left to do. break; } } Err(e) => error!("Failed to deserialize JSONRPCMessage: {e}"), } } debug!("stdin reader finished (EOF)"); } }); // Parse CLI overrides once and derive the base Config eagerly so later // components do not need to work with raw TOML values. let cli_kv_overrides = cli_config_overrides.parse_overrides().map_err(|e| { std::io::Error::new( ErrorKind::InvalidInput, format!("error parsing -c overrides: {e}"), ) })?; let config = Config::load_with_cli_overrides(cli_kv_overrides.clone()) .await .map_err(|e| { std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}")) })?; let feedback = CodexFeedback::new(); let otel = codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| { std::io::Error::new( ErrorKind::InvalidData, format!("error loading otel config: {e}"), ) })?; // Install a simple subscriber so `tracing` output is visible. Users can // control the log level with `RUST_LOG`. let stderr_fmt = tracing_subscriber::fmt::layer() .with_writer(std::io::stderr) .with_span_events(tracing_subscriber::fmt::format::FmtSpan::FULL) .with_filter(EnvFilter::from_default_env()); let feedback_layer = feedback.logger_layer(); let feedback_metadata_layer = feedback.metadata_layer(); let otel_logger_layer = otel.as_ref().and_then(|o| o.logger_layer()); let otel_tracing_layer = otel.as_ref().and_then(|o| o.tracing_layer()); let _ = tracing_subscriber::registry() .with(stderr_fmt) .with(feedback_layer) .with(feedback_metadata_layer) .with(otel_logger_layer) .with(otel_tracing_layer) .try_init(); // Task: process incoming messages. let processor_handle = tokio::spawn({ let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx); let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone(); let mut processor = MessageProcessor::new( outgoing_message_sender, codex_linux_sandbox_exe, std::sync::Arc::new(config), cli_overrides, feedback.clone(), ); async move { while let Some(msg) = incoming_rx.recv().await { match msg { JSONRPCMessage::Request(r) => processor.process_request(r).await, JSONRPCMessage::Response(r) => processor.process_response(r).await, JSONRPCMessage::Notification(n) => processor.process_notification(n).await, JSONRPCMessage::Error(e) => processor.process_error(e), } } info!("processor task exited (channel closed)"); } }); // Task: write outgoing messages to stdout. let stdout_writer_handle = tokio::spawn(async move { let mut stdout = io::stdout(); while let Some(outgoing_message) = outgoing_rx.recv().await { let Ok(value) = serde_json::to_value(outgoing_message) else { error!("Failed to convert OutgoingMessage to JSON value"); continue; }; match serde_json::to_string(&value) { Ok(mut json) => { json.push('\n'); if let Err(e) = stdout.write_all(json.as_bytes()).await { error!("Failed to write to stdout: {e}"); break; } } Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"), } } info!("stdout writer exited (channel closed)"); }); // Wait for all tasks to finish. The typical exit path is the stdin reader // hitting EOF which, once it drops `incoming_tx`, propagates shutdown to // the processor and then to the stdout task. let _ = tokio::join!(stdin_reader_handle, processor_handle, stdout_writer_handle); Ok(()) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/bespoke_event_handling.rs
codex-rs/app-server/src/bespoke_event_handling.rs
use crate::codex_message_processor::ApiVersion; use crate::codex_message_processor::PendingInterrupts; use crate::codex_message_processor::TurnSummary; use crate::codex_message_processor::TurnSummaryStore; use crate::outgoing_message::OutgoingMessageSender; use codex_app_server_protocol::AccountRateLimitsUpdatedNotification; use codex_app_server_protocol::AgentMessageDeltaNotification; use codex_app_server_protocol::ApplyPatchApprovalParams; use codex_app_server_protocol::ApplyPatchApprovalResponse; use codex_app_server_protocol::ApprovalDecision; use codex_app_server_protocol::CodexErrorInfo as V2CodexErrorInfo; use codex_app_server_protocol::CommandAction as V2ParsedCommand; use codex_app_server_protocol::CommandExecutionOutputDeltaNotification; use codex_app_server_protocol::CommandExecutionRequestApprovalParams; use codex_app_server_protocol::CommandExecutionRequestApprovalResponse; use codex_app_server_protocol::CommandExecutionStatus; use codex_app_server_protocol::ContextCompactedNotification; use codex_app_server_protocol::DeprecationNoticeNotification; use codex_app_server_protocol::ErrorNotification; use codex_app_server_protocol::ExecCommandApprovalParams; use codex_app_server_protocol::ExecCommandApprovalResponse; use codex_app_server_protocol::ExecPolicyAmendment as V2ExecPolicyAmendment; use codex_app_server_protocol::FileChangeOutputDeltaNotification; use codex_app_server_protocol::FileChangeRequestApprovalParams; use codex_app_server_protocol::FileChangeRequestApprovalResponse; use codex_app_server_protocol::FileUpdateChange; use codex_app_server_protocol::InterruptConversationResponse; use codex_app_server_protocol::ItemCompletedNotification; use codex_app_server_protocol::ItemStartedNotification; use codex_app_server_protocol::McpToolCallError; use codex_app_server_protocol::McpToolCallResult; use codex_app_server_protocol::McpToolCallStatus; use codex_app_server_protocol::PatchApplyStatus; use codex_app_server_protocol::PatchChangeKind as V2PatchChangeKind; use codex_app_server_protocol::RawResponseItemCompletedNotification; use codex_app_server_protocol::ReasoningSummaryPartAddedNotification; use codex_app_server_protocol::ReasoningSummaryTextDeltaNotification; use codex_app_server_protocol::ReasoningTextDeltaNotification; use codex_app_server_protocol::ServerNotification; use codex_app_server_protocol::ServerRequestPayload; use codex_app_server_protocol::TerminalInteractionNotification; use codex_app_server_protocol::ThreadItem; use codex_app_server_protocol::ThreadTokenUsage; use codex_app_server_protocol::ThreadTokenUsageUpdatedNotification; use codex_app_server_protocol::Turn; use codex_app_server_protocol::TurnCompletedNotification; use codex_app_server_protocol::TurnDiffUpdatedNotification; use codex_app_server_protocol::TurnError; use codex_app_server_protocol::TurnInterruptResponse; use codex_app_server_protocol::TurnPlanStep; use codex_app_server_protocol::TurnPlanUpdatedNotification; use codex_app_server_protocol::TurnStatus; use codex_core::CodexConversation; use codex_core::parse_command::shlex_join; use codex_core::protocol::ApplyPatchApprovalRequestEvent; use codex_core::protocol::Event; use codex_core::protocol::EventMsg; use codex_core::protocol::ExecApprovalRequestEvent; use codex_core::protocol::ExecCommandEndEvent; use codex_core::protocol::FileChange as CoreFileChange; use codex_core::protocol::McpToolCallBeginEvent; use codex_core::protocol::McpToolCallEndEvent; use codex_core::protocol::Op; use codex_core::protocol::ReviewDecision; use codex_core::protocol::TokenCountEvent; use codex_core::protocol::TurnDiffEvent; use codex_core::review_format::format_review_findings_block; use codex_core::review_prompts; use codex_protocol::ConversationId; use codex_protocol::plan_tool::UpdatePlanArgs; use codex_protocol::protocol::ReviewOutputEvent; use std::collections::HashMap; use std::convert::TryFrom; use std::path::PathBuf; use std::sync::Arc; use tokio::sync::oneshot; use tracing::error; type JsonValue = serde_json::Value; pub(crate) async fn apply_bespoke_event_handling( event: Event, conversation_id: ConversationId, conversation: Arc<CodexConversation>, outgoing: Arc<OutgoingMessageSender>, pending_interrupts: PendingInterrupts, turn_summary_store: TurnSummaryStore, api_version: ApiVersion, ) { let Event { id: event_turn_id, msg, } = event; match msg { EventMsg::TaskComplete(_ev) => { handle_turn_complete( conversation_id, event_turn_id, &outgoing, &turn_summary_store, ) .await; } EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent { call_id, turn_id, changes, reason, grant_root, }) => match api_version { ApiVersion::V1 => { let params = ApplyPatchApprovalParams { conversation_id, call_id, file_changes: changes.clone(), reason, grant_root, }; let rx = outgoing .send_request(ServerRequestPayload::ApplyPatchApproval(params)) .await; tokio::spawn(async move { on_patch_approval_response(event_turn_id, rx, conversation).await; }); } ApiVersion::V2 => { // Until we migrate the core to be aware of a first class FileChangeItem // and emit the corresponding EventMsg, we repurpose the call_id as the item_id. let item_id = call_id.clone(); let patch_changes = convert_patch_changes(&changes); let first_start = { let mut map = turn_summary_store.lock().await; let summary = map.entry(conversation_id).or_default(); summary.file_change_started.insert(item_id.clone()) }; if first_start { let item = ThreadItem::FileChange { id: item_id.clone(), changes: patch_changes.clone(), status: PatchApplyStatus::InProgress, }; let notification = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemStarted(notification)) .await; } let params = FileChangeRequestApprovalParams { thread_id: conversation_id.to_string(), turn_id: turn_id.clone(), item_id: item_id.clone(), reason, grant_root, }; let rx = outgoing .send_request(ServerRequestPayload::FileChangeRequestApproval(params)) .await; tokio::spawn(async move { on_file_change_request_approval_response( event_turn_id, conversation_id, item_id, patch_changes, rx, conversation, outgoing, turn_summary_store, ) .await; }); } }, EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent { call_id, turn_id, command, cwd, reason, proposed_execpolicy_amendment, parsed_cmd, }) => match api_version { ApiVersion::V1 => { let params = ExecCommandApprovalParams { conversation_id, call_id, command, cwd, reason, parsed_cmd, }; let rx = outgoing .send_request(ServerRequestPayload::ExecCommandApproval(params)) .await; tokio::spawn(async move { on_exec_approval_response(event_turn_id, rx, conversation).await; }); } ApiVersion::V2 => { let item_id = call_id.clone(); let command_actions = parsed_cmd .iter() .cloned() .map(V2ParsedCommand::from) .collect::<Vec<_>>(); let command_string = shlex_join(&command); let proposed_execpolicy_amendment_v2 = proposed_execpolicy_amendment.map(V2ExecPolicyAmendment::from); let params = CommandExecutionRequestApprovalParams { thread_id: conversation_id.to_string(), turn_id: turn_id.clone(), // Until we migrate the core to be aware of a first class CommandExecutionItem // and emit the corresponding EventMsg, we repurpose the call_id as the item_id. item_id: item_id.clone(), reason, proposed_execpolicy_amendment: proposed_execpolicy_amendment_v2, }; let rx = outgoing .send_request(ServerRequestPayload::CommandExecutionRequestApproval( params, )) .await; tokio::spawn(async move { on_command_execution_request_approval_response( event_turn_id, conversation_id, item_id, command_string, cwd, command_actions, rx, conversation, outgoing, ) .await; }); } }, // TODO(celia): properly construct McpToolCall TurnItem in core. EventMsg::McpToolCallBegin(begin_event) => { let notification = construct_mcp_tool_call_notification( begin_event, conversation_id.to_string(), event_turn_id.clone(), ) .await; outgoing .send_server_notification(ServerNotification::ItemStarted(notification)) .await; } EventMsg::McpToolCallEnd(end_event) => { let notification = construct_mcp_tool_call_end_notification( end_event, conversation_id.to_string(), event_turn_id.clone(), ) .await; outgoing .send_server_notification(ServerNotification::ItemCompleted(notification)) .await; } EventMsg::AgentMessageContentDelta(event) => { let notification = AgentMessageDeltaNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id: event.item_id, delta: event.delta, }; outgoing .send_server_notification(ServerNotification::AgentMessageDelta(notification)) .await; } EventMsg::ContextCompacted(..) => { let notification = ContextCompactedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), }; outgoing .send_server_notification(ServerNotification::ContextCompacted(notification)) .await; } EventMsg::DeprecationNotice(event) => { let notification = DeprecationNoticeNotification { summary: event.summary, details: event.details, }; outgoing .send_server_notification(ServerNotification::DeprecationNotice(notification)) .await; } EventMsg::ReasoningContentDelta(event) => { let notification = ReasoningSummaryTextDeltaNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id: event.item_id, delta: event.delta, summary_index: event.summary_index, }; outgoing .send_server_notification(ServerNotification::ReasoningSummaryTextDelta( notification, )) .await; } EventMsg::ReasoningRawContentDelta(event) => { let notification = ReasoningTextDeltaNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id: event.item_id, delta: event.delta, content_index: event.content_index, }; outgoing .send_server_notification(ServerNotification::ReasoningTextDelta(notification)) .await; } EventMsg::AgentReasoningSectionBreak(event) => { let notification = ReasoningSummaryPartAddedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id: event.item_id, summary_index: event.summary_index, }; outgoing .send_server_notification(ServerNotification::ReasoningSummaryPartAdded( notification, )) .await; } EventMsg::TokenCount(token_count_event) => { handle_token_count_event(conversation_id, event_turn_id, token_count_event, &outgoing) .await; } EventMsg::Error(ev) => { let turn_error = TurnError { message: ev.message, codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from), additional_details: None, }; handle_error(conversation_id, turn_error.clone(), &turn_summary_store).await; outgoing .send_server_notification(ServerNotification::Error(ErrorNotification { error: turn_error, will_retry: false, thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), })) .await; } EventMsg::StreamError(ev) => { // We don't need to update the turn summary store for stream errors as they are intermediate error states for retries, // but we notify the client. let turn_error = TurnError { message: ev.message, codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from), additional_details: ev.additional_details, }; outgoing .send_server_notification(ServerNotification::Error(ErrorNotification { error: turn_error, will_retry: true, thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), })) .await; } EventMsg::ViewImageToolCall(view_image_event) => { let item = ThreadItem::ImageView { id: view_image_event.call_id.clone(), path: view_image_event.path.to_string_lossy().into_owned(), }; let started = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item: item.clone(), }; outgoing .send_server_notification(ServerNotification::ItemStarted(started)) .await; let completed = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(completed)) .await; } EventMsg::EnteredReviewMode(review_request) => { let review = review_request .user_facing_hint .unwrap_or_else(|| review_prompts::user_facing_hint(&review_request.target)); let item = ThreadItem::EnteredReviewMode { id: event_turn_id.clone(), review, }; let started = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item: item.clone(), }; outgoing .send_server_notification(ServerNotification::ItemStarted(started)) .await; let completed = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(completed)) .await; } EventMsg::ItemStarted(item_started_event) => { let item: ThreadItem = item_started_event.item.clone().into(); let notification = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemStarted(notification)) .await; } EventMsg::ItemCompleted(item_completed_event) => { let item: ThreadItem = item_completed_event.item.clone().into(); let notification = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(notification)) .await; } EventMsg::ExitedReviewMode(review_event) => { let review = match review_event.review_output { Some(output) => render_review_output_text(&output), None => REVIEW_FALLBACK_MESSAGE.to_string(), }; let item = ThreadItem::ExitedReviewMode { id: event_turn_id.clone(), review, }; let started = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item: item.clone(), }; outgoing .send_server_notification(ServerNotification::ItemStarted(started)) .await; let completed = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(completed)) .await; } EventMsg::RawResponseItem(raw_response_item_event) => { maybe_emit_raw_response_item_completed( api_version, conversation_id, &event_turn_id, raw_response_item_event.item, outgoing.as_ref(), ) .await; } EventMsg::PatchApplyBegin(patch_begin_event) => { // Until we migrate the core to be aware of a first class FileChangeItem // and emit the corresponding EventMsg, we repurpose the call_id as the item_id. let item_id = patch_begin_event.call_id.clone(); let first_start = { let mut map = turn_summary_store.lock().await; let summary = map.entry(conversation_id).or_default(); summary.file_change_started.insert(item_id.clone()) }; if first_start { let item = ThreadItem::FileChange { id: item_id.clone(), changes: convert_patch_changes(&patch_begin_event.changes), status: PatchApplyStatus::InProgress, }; let notification = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemStarted(notification)) .await; } } EventMsg::PatchApplyEnd(patch_end_event) => { // Until we migrate the core to be aware of a first class FileChangeItem // and emit the corresponding EventMsg, we repurpose the call_id as the item_id. let item_id = patch_end_event.call_id.clone(); let status = if patch_end_event.success { PatchApplyStatus::Completed } else { PatchApplyStatus::Failed }; let changes = convert_patch_changes(&patch_end_event.changes); complete_file_change_item( conversation_id, item_id, changes, status, event_turn_id.clone(), outgoing.as_ref(), &turn_summary_store, ) .await; } EventMsg::ExecCommandBegin(exec_command_begin_event) => { let item_id = exec_command_begin_event.call_id.clone(); let command_actions = exec_command_begin_event .parsed_cmd .into_iter() .map(V2ParsedCommand::from) .collect::<Vec<_>>(); let command = shlex_join(&exec_command_begin_event.command); let cwd = exec_command_begin_event.cwd; let process_id = exec_command_begin_event.process_id; let item = ThreadItem::CommandExecution { id: item_id, command, cwd, process_id, status: CommandExecutionStatus::InProgress, command_actions, aggregated_output: None, exit_code: None, duration_ms: None, }; let notification = ItemStartedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemStarted(notification)) .await; } EventMsg::ExecCommandOutputDelta(exec_command_output_delta_event) => { let item_id = exec_command_output_delta_event.call_id.clone(); let delta = String::from_utf8_lossy(&exec_command_output_delta_event.chunk).to_string(); // The underlying EventMsg::ExecCommandOutputDelta is used for shell, unified_exec, // and apply_patch tool calls. We represent apply_patch with the FileChange item, and // everything else with the CommandExecution item. // // We need to detect which item type it is so we can emit the right notification. // We already have state tracking FileChange items on item/started, so let's use that. let is_file_change = { let map = turn_summary_store.lock().await; map.get(&conversation_id) .is_some_and(|summary| summary.file_change_started.contains(&item_id)) }; if is_file_change { let notification = FileChangeOutputDeltaNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id, delta, }; outgoing .send_server_notification(ServerNotification::FileChangeOutputDelta( notification, )) .await; } else { let notification = CommandExecutionOutputDeltaNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id, delta, }; outgoing .send_server_notification(ServerNotification::CommandExecutionOutputDelta( notification, )) .await; } } EventMsg::TerminalInteraction(terminal_event) => { let item_id = terminal_event.call_id.clone(); let notification = TerminalInteractionNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item_id, process_id: terminal_event.process_id, stdin: terminal_event.stdin, }; outgoing .send_server_notification(ServerNotification::TerminalInteraction(notification)) .await; } EventMsg::ExecCommandEnd(exec_command_end_event) => { let ExecCommandEndEvent { call_id, command, cwd, parsed_cmd, process_id, aggregated_output, exit_code, duration, .. } = exec_command_end_event; let status = if exit_code == 0 { CommandExecutionStatus::Completed } else { CommandExecutionStatus::Failed }; let command_actions = parsed_cmd .into_iter() .map(V2ParsedCommand::from) .collect::<Vec<_>>(); let aggregated_output = if aggregated_output.is_empty() { None } else { Some(aggregated_output) }; let duration_ms = i64::try_from(duration.as_millis()).unwrap_or(i64::MAX); let item = ThreadItem::CommandExecution { id: call_id, command: shlex_join(&command), cwd, process_id, status, command_actions, aggregated_output, exit_code: Some(exit_code), duration_ms: Some(duration_ms), }; let notification = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.clone(), item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(notification)) .await; } // If this is a TurnAborted, reply to any pending interrupt requests. EventMsg::TurnAborted(turn_aborted_event) => { let pending = { let mut map = pending_interrupts.lock().await; map.remove(&conversation_id).unwrap_or_default() }; if !pending.is_empty() { for (rid, ver) in pending { match ver { ApiVersion::V1 => { let response = InterruptConversationResponse { abort_reason: turn_aborted_event.reason.clone(), }; outgoing.send_response(rid, response).await; } ApiVersion::V2 => { let response = TurnInterruptResponse {}; outgoing.send_response(rid, response).await; } } } } handle_turn_interrupted( conversation_id, event_turn_id, &outgoing, &turn_summary_store, ) .await; } EventMsg::TurnDiff(turn_diff_event) => { handle_turn_diff( conversation_id, &event_turn_id, turn_diff_event, api_version, outgoing.as_ref(), ) .await; } EventMsg::PlanUpdate(plan_update_event) => { handle_turn_plan_update( conversation_id, &event_turn_id, plan_update_event, api_version, outgoing.as_ref(), ) .await; } _ => {} } } async fn handle_turn_diff( conversation_id: ConversationId, event_turn_id: &str, turn_diff_event: TurnDiffEvent, api_version: ApiVersion, outgoing: &OutgoingMessageSender, ) { if let ApiVersion::V2 = api_version { let notification = TurnDiffUpdatedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.to_string(), diff: turn_diff_event.unified_diff, }; outgoing .send_server_notification(ServerNotification::TurnDiffUpdated(notification)) .await; } } async fn handle_turn_plan_update( conversation_id: ConversationId, event_turn_id: &str, plan_update_event: UpdatePlanArgs, api_version: ApiVersion, outgoing: &OutgoingMessageSender, ) { if let ApiVersion::V2 = api_version { let notification = TurnPlanUpdatedNotification { thread_id: conversation_id.to_string(), turn_id: event_turn_id.to_string(), explanation: plan_update_event.explanation, plan: plan_update_event .plan .into_iter() .map(TurnPlanStep::from) .collect(), }; outgoing .send_server_notification(ServerNotification::TurnPlanUpdated(notification)) .await; } } async fn emit_turn_completed_with_status( conversation_id: ConversationId, event_turn_id: String, status: TurnStatus, error: Option<TurnError>, outgoing: &OutgoingMessageSender, ) { let notification = TurnCompletedNotification { thread_id: conversation_id.to_string(), turn: Turn { id: event_turn_id, items: vec![], error, status, }, }; outgoing .send_server_notification(ServerNotification::TurnCompleted(notification)) .await; } async fn complete_file_change_item( conversation_id: ConversationId, item_id: String, changes: Vec<FileUpdateChange>, status: PatchApplyStatus, turn_id: String, outgoing: &OutgoingMessageSender, turn_summary_store: &TurnSummaryStore, ) { { let mut map = turn_summary_store.lock().await; if let Some(summary) = map.get_mut(&conversation_id) { summary.file_change_started.remove(&item_id); } } let item = ThreadItem::FileChange { id: item_id, changes, status, }; let notification = ItemCompletedNotification { thread_id: conversation_id.to_string(), turn_id, item, }; outgoing .send_server_notification(ServerNotification::ItemCompleted(notification)) .await; } #[allow(clippy::too_many_arguments)] async fn complete_command_execution_item( conversation_id: ConversationId, turn_id: String, item_id: String, command: String, cwd: PathBuf, process_id: Option<String>, command_actions: Vec<V2ParsedCommand>, status: CommandExecutionStatus, outgoing: &OutgoingMessageSender, ) { let item = ThreadItem::CommandExecution { id: item_id, command, cwd, process_id, status, command_actions, aggregated_output: None, exit_code: None, duration_ms: None, };
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
true
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/config_api.rs
codex-rs/app-server/src/config_api.rs
use crate::error_code::INTERNAL_ERROR_CODE; use crate::error_code::INVALID_REQUEST_ERROR_CODE; use codex_app_server_protocol::ConfigBatchWriteParams; use codex_app_server_protocol::ConfigReadParams; use codex_app_server_protocol::ConfigReadResponse; use codex_app_server_protocol::ConfigValueWriteParams; use codex_app_server_protocol::ConfigWriteErrorCode; use codex_app_server_protocol::ConfigWriteResponse; use codex_app_server_protocol::JSONRPCErrorError; use codex_core::config::ConfigService; use codex_core::config::ConfigServiceError; use serde_json::json; use std::path::PathBuf; use toml::Value as TomlValue; #[derive(Clone)] pub(crate) struct ConfigApi { service: ConfigService, } impl ConfigApi { pub(crate) fn new(codex_home: PathBuf, cli_overrides: Vec<(String, TomlValue)>) -> Self { Self { service: ConfigService::new(codex_home, cli_overrides), } } pub(crate) async fn read( &self, params: ConfigReadParams, ) -> Result<ConfigReadResponse, JSONRPCErrorError> { self.service.read(params).await.map_err(map_error) } pub(crate) async fn write_value( &self, params: ConfigValueWriteParams, ) -> Result<ConfigWriteResponse, JSONRPCErrorError> { self.service.write_value(params).await.map_err(map_error) } pub(crate) async fn batch_write( &self, params: ConfigBatchWriteParams, ) -> Result<ConfigWriteResponse, JSONRPCErrorError> { self.service.batch_write(params).await.map_err(map_error) } } fn map_error(err: ConfigServiceError) -> JSONRPCErrorError { if let Some(code) = err.write_error_code() { return config_write_error(code, err.to_string()); } JSONRPCErrorError { code: INTERNAL_ERROR_CODE, message: err.to_string(), data: None, } } fn config_write_error(code: ConfigWriteErrorCode, message: impl Into<String>) -> JSONRPCErrorError { JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: message.into(), data: Some(json!({ "config_write_error_code": code, })), } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/message_processor.rs
codex-rs/app-server/src/message_processor.rs
use std::path::PathBuf; use std::sync::Arc; use crate::codex_message_processor::CodexMessageProcessor; use crate::config_api::ConfigApi; use crate::error_code::INVALID_REQUEST_ERROR_CODE; use crate::outgoing_message::OutgoingMessageSender; use codex_app_server_protocol::ClientInfo; use codex_app_server_protocol::ClientRequest; use codex_app_server_protocol::ConfigBatchWriteParams; use codex_app_server_protocol::ConfigReadParams; use codex_app_server_protocol::ConfigValueWriteParams; use codex_app_server_protocol::InitializeResponse; use codex_app_server_protocol::JSONRPCError; use codex_app_server_protocol::JSONRPCErrorError; use codex_app_server_protocol::JSONRPCNotification; use codex_app_server_protocol::JSONRPCRequest; use codex_app_server_protocol::JSONRPCResponse; use codex_app_server_protocol::RequestId; use codex_core::AuthManager; use codex_core::ConversationManager; use codex_core::config::Config; use codex_core::default_client::USER_AGENT_SUFFIX; use codex_core::default_client::get_codex_user_agent; use codex_feedback::CodexFeedback; use codex_protocol::protocol::SessionSource; use toml::Value as TomlValue; pub(crate) struct MessageProcessor { outgoing: Arc<OutgoingMessageSender>, codex_message_processor: CodexMessageProcessor, config_api: ConfigApi, initialized: bool, } impl MessageProcessor { /// Create a new `MessageProcessor`, retaining a handle to the outgoing /// `Sender` so handlers can enqueue messages to be written to stdout. pub(crate) fn new( outgoing: OutgoingMessageSender, codex_linux_sandbox_exe: Option<PathBuf>, config: Arc<Config>, cli_overrides: Vec<(String, TomlValue)>, feedback: CodexFeedback, ) -> Self { let outgoing = Arc::new(outgoing); let auth_manager = AuthManager::shared( config.codex_home.clone(), false, config.cli_auth_credentials_store_mode, ); let conversation_manager = Arc::new(ConversationManager::new( auth_manager.clone(), SessionSource::VSCode, )); let codex_message_processor = CodexMessageProcessor::new( auth_manager, conversation_manager, outgoing.clone(), codex_linux_sandbox_exe, Arc::clone(&config), cli_overrides.clone(), feedback, ); let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides); Self { outgoing, codex_message_processor, config_api, initialized: false, } } pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) { let request_id = request.id.clone(); let request_json = match serde_json::to_value(&request) { Ok(request_json) => request_json, Err(err) => { let error = JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: format!("Invalid request: {err}"), data: None, }; self.outgoing.send_error(request_id, error).await; return; } }; let codex_request = match serde_json::from_value::<ClientRequest>(request_json) { Ok(codex_request) => codex_request, Err(err) => { let error = JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: format!("Invalid request: {err}"), data: None, }; self.outgoing.send_error(request_id, error).await; return; } }; match codex_request { // Handle Initialize internally so CodexMessageProcessor does not have to concern // itself with the `initialized` bool. ClientRequest::Initialize { request_id, params } => { if self.initialized { let error = JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: "Already initialized".to_string(), data: None, }; self.outgoing.send_error(request_id, error).await; return; } else { let ClientInfo { name, title: _title, version, } = params.client_info; let user_agent_suffix = format!("{name}; {version}"); if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() { *suffix = Some(user_agent_suffix); } let user_agent = get_codex_user_agent(); let response = InitializeResponse { user_agent }; self.outgoing.send_response(request_id, response).await; self.initialized = true; return; } } _ => { if !self.initialized { let error = JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: "Not initialized".to_string(), data: None, }; self.outgoing.send_error(request_id, error).await; return; } } } match codex_request { ClientRequest::ConfigRead { request_id, params } => { self.handle_config_read(request_id, params).await; } ClientRequest::ConfigValueWrite { request_id, params } => { self.handle_config_value_write(request_id, params).await; } ClientRequest::ConfigBatchWrite { request_id, params } => { self.handle_config_batch_write(request_id, params).await; } other => { self.codex_message_processor.process_request(other).await; } } } pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) { // Currently, we do not expect to receive any notifications from the // client, so we just log them. tracing::info!("<- notification: {:?}", notification); } /// Handle a standalone JSON-RPC response originating from the peer. pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) { tracing::info!("<- response: {:?}", response); let JSONRPCResponse { id, result, .. } = response; self.outgoing.notify_client_response(id, result).await } /// Handle an error object received from the peer. pub(crate) fn process_error(&mut self, err: JSONRPCError) { tracing::error!("<- error: {:?}", err); } async fn handle_config_read(&self, request_id: RequestId, params: ConfigReadParams) { match self.config_api.read(params).await { Ok(response) => self.outgoing.send_response(request_id, response).await, Err(error) => self.outgoing.send_error(request_id, error).await, } } async fn handle_config_value_write( &self, request_id: RequestId, params: ConfigValueWriteParams, ) { match self.config_api.write_value(params).await { Ok(response) => self.outgoing.send_response(request_id, response).await, Err(error) => self.outgoing.send_error(request_id, error).await, } } async fn handle_config_batch_write( &self, request_id: RequestId, params: ConfigBatchWriteParams, ) { match self.config_api.batch_write(params).await { Ok(response) => self.outgoing.send_response(request_id, response).await, Err(error) => self.outgoing.send_error(request_id, error).await, } } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/codex_message_processor.rs
codex-rs/app-server/src/codex_message_processor.rs
use crate::bespoke_event_handling::apply_bespoke_event_handling; use crate::error_code::INTERNAL_ERROR_CODE; use crate::error_code::INVALID_REQUEST_ERROR_CODE; use crate::fuzzy_file_search::run_fuzzy_file_search; use crate::models::supported_models; use crate::outgoing_message::OutgoingMessageSender; use crate::outgoing_message::OutgoingNotification; use chrono::DateTime; use chrono::Utc; use codex_app_server_protocol::Account; use codex_app_server_protocol::AccountLoginCompletedNotification; use codex_app_server_protocol::AccountUpdatedNotification; use codex_app_server_protocol::AddConversationListenerParams; use codex_app_server_protocol::AddConversationSubscriptionResponse; use codex_app_server_protocol::ArchiveConversationParams; use codex_app_server_protocol::ArchiveConversationResponse; use codex_app_server_protocol::AskForApproval; use codex_app_server_protocol::AuthMode; use codex_app_server_protocol::AuthStatusChangeNotification; use codex_app_server_protocol::CancelLoginAccountParams; use codex_app_server_protocol::CancelLoginAccountResponse; use codex_app_server_protocol::CancelLoginAccountStatus; use codex_app_server_protocol::CancelLoginChatGptResponse; use codex_app_server_protocol::ClientRequest; use codex_app_server_protocol::CommandExecParams; use codex_app_server_protocol::ConversationGitInfo; use codex_app_server_protocol::ConversationSummary; use codex_app_server_protocol::ExecOneOffCommandResponse; use codex_app_server_protocol::FeedbackUploadParams; use codex_app_server_protocol::FeedbackUploadResponse; use codex_app_server_protocol::FuzzyFileSearchParams; use codex_app_server_protocol::FuzzyFileSearchResponse; use codex_app_server_protocol::GetAccountParams; use codex_app_server_protocol::GetAccountRateLimitsResponse; use codex_app_server_protocol::GetAccountResponse; use codex_app_server_protocol::GetAuthStatusParams; use codex_app_server_protocol::GetAuthStatusResponse; use codex_app_server_protocol::GetConversationSummaryParams; use codex_app_server_protocol::GetConversationSummaryResponse; use codex_app_server_protocol::GetUserAgentResponse; use codex_app_server_protocol::GetUserSavedConfigResponse; use codex_app_server_protocol::GitDiffToRemoteResponse; use codex_app_server_protocol::GitInfo as ApiGitInfo; use codex_app_server_protocol::InputItem as WireInputItem; use codex_app_server_protocol::InterruptConversationParams; use codex_app_server_protocol::JSONRPCErrorError; use codex_app_server_protocol::ListConversationsParams; use codex_app_server_protocol::ListConversationsResponse; use codex_app_server_protocol::ListMcpServerStatusParams; use codex_app_server_protocol::ListMcpServerStatusResponse; use codex_app_server_protocol::LoginAccountParams; use codex_app_server_protocol::LoginApiKeyParams; use codex_app_server_protocol::LoginApiKeyResponse; use codex_app_server_protocol::LoginChatGptCompleteNotification; use codex_app_server_protocol::LoginChatGptResponse; use codex_app_server_protocol::LogoutAccountResponse; use codex_app_server_protocol::LogoutChatGptResponse; use codex_app_server_protocol::McpServerOauthLoginCompletedNotification; use codex_app_server_protocol::McpServerOauthLoginParams; use codex_app_server_protocol::McpServerOauthLoginResponse; use codex_app_server_protocol::McpServerStatus; use codex_app_server_protocol::ModelListParams; use codex_app_server_protocol::ModelListResponse; use codex_app_server_protocol::NewConversationParams; use codex_app_server_protocol::NewConversationResponse; use codex_app_server_protocol::RemoveConversationListenerParams; use codex_app_server_protocol::RemoveConversationSubscriptionResponse; use codex_app_server_protocol::RequestId; use codex_app_server_protocol::ResumeConversationParams; use codex_app_server_protocol::ResumeConversationResponse; use codex_app_server_protocol::ReviewDelivery as ApiReviewDelivery; use codex_app_server_protocol::ReviewStartParams; use codex_app_server_protocol::ReviewStartResponse; use codex_app_server_protocol::ReviewTarget as ApiReviewTarget; use codex_app_server_protocol::SandboxMode; use codex_app_server_protocol::SendUserMessageParams; use codex_app_server_protocol::SendUserMessageResponse; use codex_app_server_protocol::SendUserTurnParams; use codex_app_server_protocol::SendUserTurnResponse; use codex_app_server_protocol::ServerNotification; use codex_app_server_protocol::SessionConfiguredNotification; use codex_app_server_protocol::SetDefaultModelParams; use codex_app_server_protocol::SetDefaultModelResponse; use codex_app_server_protocol::SkillsListParams; use codex_app_server_protocol::SkillsListResponse; use codex_app_server_protocol::Thread; use codex_app_server_protocol::ThreadArchiveParams; use codex_app_server_protocol::ThreadArchiveResponse; use codex_app_server_protocol::ThreadItem; use codex_app_server_protocol::ThreadListParams; use codex_app_server_protocol::ThreadListResponse; use codex_app_server_protocol::ThreadResumeParams; use codex_app_server_protocol::ThreadResumeResponse; use codex_app_server_protocol::ThreadStartParams; use codex_app_server_protocol::ThreadStartResponse; use codex_app_server_protocol::ThreadStartedNotification; use codex_app_server_protocol::Turn; use codex_app_server_protocol::TurnError; use codex_app_server_protocol::TurnInterruptParams; use codex_app_server_protocol::TurnStartParams; use codex_app_server_protocol::TurnStartResponse; use codex_app_server_protocol::TurnStartedNotification; use codex_app_server_protocol::TurnStatus; use codex_app_server_protocol::UserInfoResponse; use codex_app_server_protocol::UserInput as V2UserInput; use codex_app_server_protocol::UserSavedConfig; use codex_app_server_protocol::build_turns_from_event_msgs; use codex_backend_client::Client as BackendClient; use codex_core::AuthManager; use codex_core::CodexConversation; use codex_core::ConversationManager; use codex_core::Cursor as RolloutCursor; use codex_core::INTERACTIVE_SESSION_SOURCES; use codex_core::InitialHistory; use codex_core::NewConversation; use codex_core::RolloutRecorder; use codex_core::SessionMeta; use codex_core::auth::CLIENT_ID; use codex_core::auth::login_with_api_key; use codex_core::config::Config; use codex_core::config::ConfigOverrides; use codex_core::config::ConfigService; use codex_core::config::edit::ConfigEditsBuilder; use codex_core::config::types::McpServerTransportConfig; use codex_core::default_client::get_codex_user_agent; use codex_core::exec::ExecParams; use codex_core::exec_env::create_env; use codex_core::features::Feature; use codex_core::find_conversation_path_by_id_str; use codex_core::git_info::git_diff_to_remote; use codex_core::mcp::collect_mcp_snapshot; use codex_core::mcp::group_tools_by_server; use codex_core::parse_cursor; use codex_core::protocol::EventMsg; use codex_core::protocol::Op; use codex_core::protocol::ReviewDelivery as CoreReviewDelivery; use codex_core::protocol::ReviewRequest; use codex_core::protocol::ReviewTarget as CoreReviewTarget; use codex_core::protocol::SessionConfiguredEvent; use codex_core::read_head_for_summary; use codex_core::sandboxing::SandboxPermissions; use codex_feedback::CodexFeedback; use codex_login::ServerOptions as LoginServerOptions; use codex_login::ShutdownHandle; use codex_login::run_login_server; use codex_protocol::ConversationId; use codex_protocol::config_types::ForcedLoginMethod; use codex_protocol::items::TurnItem; use codex_protocol::models::ResponseItem; use codex_protocol::protocol::GitInfo as CoreGitInfo; use codex_protocol::protocol::McpAuthStatus as CoreMcpAuthStatus; use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot; use codex_protocol::protocol::RolloutItem; use codex_protocol::protocol::SessionMetaLine; use codex_protocol::protocol::USER_MESSAGE_BEGIN; use codex_protocol::user_input::UserInput as CoreInputItem; use codex_rmcp_client::perform_oauth_login_return_url; use codex_utils_json_to_toml::json_to_toml; use std::collections::HashMap; use std::collections::HashSet; use std::ffi::OsStr; use std::io::Error as IoError; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use std::time::Duration; use tokio::select; use tokio::sync::Mutex; use tokio::sync::oneshot; use toml::Value as TomlValue; use tracing::error; use tracing::info; use tracing::warn; use uuid::Uuid; type PendingInterruptQueue = Vec<(RequestId, ApiVersion)>; pub(crate) type PendingInterrupts = Arc<Mutex<HashMap<ConversationId, PendingInterruptQueue>>>; /// Per-conversation accumulation of the latest states e.g. error message while a turn runs. #[derive(Default, Clone)] pub(crate) struct TurnSummary { pub(crate) file_change_started: HashSet<String>, pub(crate) last_error: Option<TurnError>, } pub(crate) type TurnSummaryStore = Arc<Mutex<HashMap<ConversationId, TurnSummary>>>; const THREAD_LIST_DEFAULT_LIMIT: usize = 25; const THREAD_LIST_MAX_LIMIT: usize = 100; // Duration before a ChatGPT login attempt is abandoned. const LOGIN_CHATGPT_TIMEOUT: Duration = Duration::from_secs(10 * 60); struct ActiveLogin { shutdown_handle: ShutdownHandle, login_id: Uuid, } #[derive(Clone, Copy, Debug)] enum CancelLoginError { NotFound(Uuid), } impl Drop for ActiveLogin { fn drop(&mut self) { self.shutdown_handle.shutdown(); } } /// Handles JSON-RPC messages for Codex conversations. pub(crate) struct CodexMessageProcessor { auth_manager: Arc<AuthManager>, conversation_manager: Arc<ConversationManager>, outgoing: Arc<OutgoingMessageSender>, codex_linux_sandbox_exe: Option<PathBuf>, config: Arc<Config>, cli_overrides: Vec<(String, TomlValue)>, conversation_listeners: HashMap<Uuid, oneshot::Sender<()>>, active_login: Arc<Mutex<Option<ActiveLogin>>>, // Queue of pending interrupt requests per conversation. We reply when TurnAborted arrives. pending_interrupts: PendingInterrupts, turn_summary_store: TurnSummaryStore, pending_fuzzy_searches: Arc<Mutex<HashMap<String, Arc<AtomicBool>>>>, feedback: CodexFeedback, } #[derive(Clone, Copy, Debug)] pub(crate) enum ApiVersion { V1, V2, } impl CodexMessageProcessor { async fn conversation_from_thread_id( &self, thread_id: &str, ) -> Result<(ConversationId, Arc<CodexConversation>), JSONRPCErrorError> { // Resolve conversation id from v2 thread id string. let conversation_id = ConversationId::from_string(thread_id).map_err(|err| JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: format!("invalid thread id: {err}"), data: None, })?; let conversation = self .conversation_manager .get_conversation(conversation_id) .await .map_err(|_| JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: format!("conversation not found: {conversation_id}"), data: None, })?; Ok((conversation_id, conversation)) } pub fn new( auth_manager: Arc<AuthManager>, conversation_manager: Arc<ConversationManager>, outgoing: Arc<OutgoingMessageSender>, codex_linux_sandbox_exe: Option<PathBuf>, config: Arc<Config>, cli_overrides: Vec<(String, TomlValue)>, feedback: CodexFeedback, ) -> Self { Self { auth_manager, conversation_manager, outgoing, codex_linux_sandbox_exe, config, cli_overrides, conversation_listeners: HashMap::new(), active_login: Arc::new(Mutex::new(None)), pending_interrupts: Arc::new(Mutex::new(HashMap::new())), turn_summary_store: Arc::new(Mutex::new(HashMap::new())), pending_fuzzy_searches: Arc::new(Mutex::new(HashMap::new())), feedback, } } async fn load_latest_config(&self) -> Result<Config, JSONRPCErrorError> { Config::load_with_cli_overrides(self.cli_overrides.clone()) .await .map_err(|err| JSONRPCErrorError { code: INTERNAL_ERROR_CODE, message: format!("failed to reload config: {err}"), data: None, }) } fn review_request_from_target( target: ApiReviewTarget, ) -> Result<(ReviewRequest, String), JSONRPCErrorError> { fn invalid_request(message: String) -> JSONRPCErrorError { JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message, data: None, } } let cleaned_target = match target { ApiReviewTarget::UncommittedChanges => ApiReviewTarget::UncommittedChanges, ApiReviewTarget::BaseBranch { branch } => { let branch = branch.trim().to_string(); if branch.is_empty() { return Err(invalid_request("branch must not be empty".to_string())); } ApiReviewTarget::BaseBranch { branch } } ApiReviewTarget::Commit { sha, title } => { let sha = sha.trim().to_string(); if sha.is_empty() { return Err(invalid_request("sha must not be empty".to_string())); } let title = title .map(|t| t.trim().to_string()) .filter(|t| !t.is_empty()); ApiReviewTarget::Commit { sha, title } } ApiReviewTarget::Custom { instructions } => { let trimmed = instructions.trim().to_string(); if trimmed.is_empty() { return Err(invalid_request( "instructions must not be empty".to_string(), )); } ApiReviewTarget::Custom { instructions: trimmed, } } }; let core_target = match cleaned_target { ApiReviewTarget::UncommittedChanges => CoreReviewTarget::UncommittedChanges, ApiReviewTarget::BaseBranch { branch } => CoreReviewTarget::BaseBranch { branch }, ApiReviewTarget::Commit { sha, title } => CoreReviewTarget::Commit { sha, title }, ApiReviewTarget::Custom { instructions } => CoreReviewTarget::Custom { instructions }, }; let hint = codex_core::review_prompts::user_facing_hint(&core_target); let review_request = ReviewRequest { target: core_target, user_facing_hint: Some(hint.clone()), }; Ok((review_request, hint)) } pub async fn process_request(&mut self, request: ClientRequest) { match request { ClientRequest::Initialize { .. } => { panic!("Initialize should be handled in MessageProcessor"); } // === v2 Thread/Turn APIs === ClientRequest::ThreadStart { request_id, params } => { self.thread_start(request_id, params).await; } ClientRequest::ThreadResume { request_id, params } => { self.thread_resume(request_id, params).await; } ClientRequest::ThreadArchive { request_id, params } => { self.thread_archive(request_id, params).await; } ClientRequest::ThreadList { request_id, params } => { self.thread_list(request_id, params).await; } ClientRequest::SkillsList { request_id, params } => { self.skills_list(request_id, params).await; } ClientRequest::TurnStart { request_id, params } => { self.turn_start(request_id, params).await; } ClientRequest::TurnInterrupt { request_id, params } => { self.turn_interrupt(request_id, params).await; } ClientRequest::ReviewStart { request_id, params } => { self.review_start(request_id, params).await; } ClientRequest::NewConversation { request_id, params } => { // Do not tokio::spawn() to process new_conversation() // asynchronously because we need to ensure the conversation is // created before processing any subsequent messages. self.process_new_conversation(request_id, params).await; } ClientRequest::GetConversationSummary { request_id, params } => { self.get_conversation_summary(request_id, params).await; } ClientRequest::ListConversations { request_id, params } => { self.handle_list_conversations(request_id, params).await; } ClientRequest::ModelList { request_id, params } => { let outgoing = self.outgoing.clone(); let conversation_manager = self.conversation_manager.clone(); let config = self.config.clone(); tokio::spawn(async move { Self::list_models(outgoing, conversation_manager, config, request_id, params) .await; }); } ClientRequest::McpServerOauthLogin { request_id, params } => { self.mcp_server_oauth_login(request_id, params).await; } ClientRequest::McpServerStatusList { request_id, params } => { self.list_mcp_server_status(request_id, params).await; } ClientRequest::LoginAccount { request_id, params } => { self.login_v2(request_id, params).await; } ClientRequest::LogoutAccount { request_id, params: _, } => { self.logout_v2(request_id).await; } ClientRequest::CancelLoginAccount { request_id, params } => { self.cancel_login_v2(request_id, params).await; } ClientRequest::GetAccount { request_id, params } => { self.get_account(request_id, params).await; } ClientRequest::ResumeConversation { request_id, params } => { self.handle_resume_conversation(request_id, params).await; } ClientRequest::ArchiveConversation { request_id, params } => { self.archive_conversation(request_id, params).await; } ClientRequest::SendUserMessage { request_id, params } => { self.send_user_message(request_id, params).await; } ClientRequest::SendUserTurn { request_id, params } => { self.send_user_turn(request_id, params).await; } ClientRequest::InterruptConversation { request_id, params } => { self.interrupt_conversation(request_id, params).await; } ClientRequest::AddConversationListener { request_id, params } => { self.add_conversation_listener(request_id, params).await; } ClientRequest::RemoveConversationListener { request_id, params } => { self.remove_conversation_listener(request_id, params).await; } ClientRequest::GitDiffToRemote { request_id, params } => { self.git_diff_to_origin(request_id, params.cwd).await; } ClientRequest::LoginApiKey { request_id, params } => { self.login_api_key_v1(request_id, params).await; } ClientRequest::LoginChatGpt { request_id, params: _, } => { self.login_chatgpt_v1(request_id).await; } ClientRequest::CancelLoginChatGpt { request_id, params } => { self.cancel_login_chatgpt(request_id, params.login_id).await; } ClientRequest::LogoutChatGpt { request_id, params: _, } => { self.logout_v1(request_id).await; } ClientRequest::GetAuthStatus { request_id, params } => { self.get_auth_status(request_id, params).await; } ClientRequest::GetUserSavedConfig { request_id, params: _, } => { self.get_user_saved_config(request_id).await; } ClientRequest::SetDefaultModel { request_id, params } => { self.set_default_model(request_id, params).await; } ClientRequest::GetUserAgent { request_id, params: _, } => { self.get_user_agent(request_id).await; } ClientRequest::UserInfo { request_id, params: _, } => { self.get_user_info(request_id).await; } ClientRequest::FuzzyFileSearch { request_id, params } => { self.fuzzy_file_search(request_id, params).await; } ClientRequest::OneOffCommandExec { request_id, params } => { self.exec_one_off_command(request_id, params).await; } ClientRequest::ExecOneOffCommand { request_id, params } => { self.exec_one_off_command(request_id, params.into()).await; } ClientRequest::ConfigRead { .. } | ClientRequest::ConfigValueWrite { .. } | ClientRequest::ConfigBatchWrite { .. } => { warn!("Config request reached CodexMessageProcessor unexpectedly"); } ClientRequest::GetAccountRateLimits { request_id, params: _, } => { self.get_account_rate_limits(request_id).await; } ClientRequest::FeedbackUpload { request_id, params } => { self.upload_feedback(request_id, params).await; } } } async fn login_v2(&mut self, request_id: RequestId, params: LoginAccountParams) { match params { LoginAccountParams::ApiKey { api_key } => { self.login_api_key_v2(request_id, LoginApiKeyParams { api_key }) .await; } LoginAccountParams::Chatgpt => { self.login_chatgpt_v2(request_id).await; } } } async fn login_api_key_common( &mut self, params: &LoginApiKeyParams, ) -> std::result::Result<(), JSONRPCErrorError> { if matches!( self.config.forced_login_method, Some(ForcedLoginMethod::Chatgpt) ) { return Err(JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: "API key login is disabled. Use ChatGPT login instead.".to_string(), data: None, }); } // Cancel any active login attempt. { let mut guard = self.active_login.lock().await; if let Some(active) = guard.take() { drop(active); } } match login_with_api_key( &self.config.codex_home, &params.api_key, self.config.cli_auth_credentials_store_mode, ) { Ok(()) => { self.auth_manager.reload(); Ok(()) } Err(err) => Err(JSONRPCErrorError { code: INTERNAL_ERROR_CODE, message: format!("failed to save api key: {err}"), data: None, }), } } async fn login_api_key_v1(&mut self, request_id: RequestId, params: LoginApiKeyParams) { match self.login_api_key_common(&params).await { Ok(()) => { self.outgoing .send_response(request_id, LoginApiKeyResponse {}) .await; let payload = AuthStatusChangeNotification { auth_method: self.auth_manager.auth().map(|auth| auth.mode), }; self.outgoing .send_server_notification(ServerNotification::AuthStatusChange(payload)) .await; } Err(error) => { self.outgoing.send_error(request_id, error).await; } } } async fn login_api_key_v2(&mut self, request_id: RequestId, params: LoginApiKeyParams) { match self.login_api_key_common(&params).await { Ok(()) => { let response = codex_app_server_protocol::LoginAccountResponse::ApiKey {}; self.outgoing.send_response(request_id, response).await; let payload_login_completed = AccountLoginCompletedNotification { login_id: None, success: true, error: None, }; self.outgoing .send_server_notification(ServerNotification::AccountLoginCompleted( payload_login_completed, )) .await; let payload_v2 = AccountUpdatedNotification { auth_mode: self.auth_manager.auth().map(|auth| auth.mode), }; self.outgoing .send_server_notification(ServerNotification::AccountUpdated(payload_v2)) .await; } Err(error) => { self.outgoing.send_error(request_id, error).await; } } } // Build options for a ChatGPT login attempt; performs validation. async fn login_chatgpt_common( &self, ) -> std::result::Result<LoginServerOptions, JSONRPCErrorError> { let config = self.config.as_ref(); if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) { return Err(JSONRPCErrorError { code: INVALID_REQUEST_ERROR_CODE, message: "ChatGPT login is disabled. Use API key login instead.".to_string(), data: None, }); } Ok(LoginServerOptions { open_browser: false, ..LoginServerOptions::new( config.codex_home.clone(), CLIENT_ID.to_string(), config.forced_chatgpt_workspace_id.clone(), config.cli_auth_credentials_store_mode, ) }) } // Deprecated in favor of login_chatgpt_v2. async fn login_chatgpt_v1(&mut self, request_id: RequestId) { match self.login_chatgpt_common().await { Ok(opts) => match run_login_server(opts) { Ok(server) => { let login_id = Uuid::new_v4(); let shutdown_handle = server.cancel_handle(); // Replace active login if present. { let mut guard = self.active_login.lock().await; if let Some(existing) = guard.take() { drop(existing); } *guard = Some(ActiveLogin { shutdown_handle: shutdown_handle.clone(), login_id, }); } // Spawn background task to monitor completion. let outgoing_clone = self.outgoing.clone(); let active_login = self.active_login.clone(); let auth_manager = self.auth_manager.clone(); let auth_url = server.auth_url.clone(); tokio::spawn(async move { let (success, error_msg) = match tokio::time::timeout( LOGIN_CHATGPT_TIMEOUT, server.block_until_done(), ) .await { Ok(Ok(())) => (true, None), Ok(Err(err)) => (false, Some(format!("Login server error: {err}"))), Err(_elapsed) => { shutdown_handle.shutdown(); (false, Some("Login timed out".to_string())) } }; let payload = LoginChatGptCompleteNotification { login_id, success, error: error_msg.clone(), }; outgoing_clone .send_server_notification(ServerNotification::LoginChatGptComplete( payload, )) .await; if success { auth_manager.reload(); // Notify clients with the actual current auth mode. let current_auth_method = auth_manager.auth().map(|a| a.mode); let payload = AuthStatusChangeNotification { auth_method: current_auth_method, }; outgoing_clone .send_server_notification(ServerNotification::AuthStatusChange( payload, )) .await; } // Clear the active login if it matches this attempt. It may have been replaced or cancelled. let mut guard = active_login.lock().await; if guard.as_ref().map(|l| l.login_id) == Some(login_id) { *guard = None; } }); let response = LoginChatGptResponse { login_id, auth_url }; self.outgoing.send_response(request_id, response).await; } Err(err) => { let error = JSONRPCErrorError { code: INTERNAL_ERROR_CODE, message: format!("failed to start login server: {err}"), data: None, }; self.outgoing.send_error(request_id, error).await; } }, Err(err) => { self.outgoing.send_error(request_id, err).await; } } } async fn login_chatgpt_v2(&mut self, request_id: RequestId) { match self.login_chatgpt_common().await { Ok(opts) => match run_login_server(opts) { Ok(server) => { let login_id = Uuid::new_v4(); let shutdown_handle = server.cancel_handle(); // Replace active login if present. { let mut guard = self.active_login.lock().await; if let Some(existing) = guard.take() { drop(existing); } *guard = Some(ActiveLogin { shutdown_handle: shutdown_handle.clone(), login_id, }); } // Spawn background task to monitor completion. let outgoing_clone = self.outgoing.clone(); let active_login = self.active_login.clone(); let auth_manager = self.auth_manager.clone(); let auth_url = server.auth_url.clone(); tokio::spawn(async move { let (success, error_msg) = match tokio::time::timeout( LOGIN_CHATGPT_TIMEOUT, server.block_until_done(), ) .await { Ok(Ok(())) => (true, None), Ok(Err(err)) => (false, Some(format!("Login server error: {err}"))), Err(_elapsed) => { shutdown_handle.shutdown(); (false, Some("Login timed out".to_string())) } }; let payload_v2 = AccountLoginCompletedNotification {
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
true
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/main.rs
codex-rs/app-server/src/main.rs
use codex_app_server::run_main; use codex_arg0::arg0_dispatch_or_else; use codex_common::CliConfigOverrides; fn main() -> anyhow::Result<()> { arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move { run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?; Ok(()) }) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/outgoing_message.rs
codex-rs/app-server/src/outgoing_message.rs
use std::collections::HashMap; use std::sync::atomic::AtomicI64; use std::sync::atomic::Ordering; use codex_app_server_protocol::JSONRPCErrorError; use codex_app_server_protocol::RequestId; use codex_app_server_protocol::Result; use codex_app_server_protocol::ServerNotification; use codex_app_server_protocol::ServerRequest; use codex_app_server_protocol::ServerRequestPayload; use serde::Serialize; use tokio::sync::Mutex; use tokio::sync::mpsc; use tokio::sync::oneshot; use tracing::warn; use crate::error_code::INTERNAL_ERROR_CODE; #[cfg(test)] use codex_protocol::account::PlanType; /// Sends messages to the client and manages request callbacks. pub(crate) struct OutgoingMessageSender { next_request_id: AtomicI64, sender: mpsc::Sender<OutgoingMessage>, request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>, } impl OutgoingMessageSender { pub(crate) fn new(sender: mpsc::Sender<OutgoingMessage>) -> Self { Self { next_request_id: AtomicI64::new(0), sender, request_id_to_callback: Mutex::new(HashMap::new()), } } pub(crate) async fn send_request( &self, request: ServerRequestPayload, ) -> oneshot::Receiver<Result> { let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed)); let outgoing_message_id = id.clone(); let (tx_approve, rx_approve) = oneshot::channel(); { let mut request_id_to_callback = self.request_id_to_callback.lock().await; request_id_to_callback.insert(id, tx_approve); } let outgoing_message = OutgoingMessage::Request(request.request_with_id(outgoing_message_id.clone())); if let Err(err) = self.sender.send(outgoing_message).await { warn!("failed to send request {outgoing_message_id:?} to client: {err:?}"); let mut request_id_to_callback = self.request_id_to_callback.lock().await; request_id_to_callback.remove(&outgoing_message_id); } rx_approve } pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) { let entry = { let mut request_id_to_callback = self.request_id_to_callback.lock().await; request_id_to_callback.remove_entry(&id) }; match entry { Some((id, sender)) => { if let Err(err) = sender.send(result) { warn!("could not notify callback for {id:?} due to: {err:?}"); } } None => { warn!("could not find callback for {id:?}"); } } } pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) { match serde_json::to_value(response) { Ok(result) => { let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result }); if let Err(err) = self.sender.send(outgoing_message).await { warn!("failed to send response to client: {err:?}"); } } Err(err) => { self.send_error( id, JSONRPCErrorError { code: INTERNAL_ERROR_CODE, message: format!("failed to serialize response: {err}"), data: None, }, ) .await; } } } pub(crate) async fn send_server_notification(&self, notification: ServerNotification) { if let Err(err) = self .sender .send(OutgoingMessage::AppServerNotification(notification)) .await { warn!("failed to send server notification to client: {err:?}"); } } /// All notifications should be migrated to [`ServerNotification`] and /// [`OutgoingMessage::Notification`] should be removed. pub(crate) async fn send_notification(&self, notification: OutgoingNotification) { let outgoing_message = OutgoingMessage::Notification(notification); if let Err(err) = self.sender.send(outgoing_message).await { warn!("failed to send notification to client: {err:?}"); } } pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) { let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error }); if let Err(err) = self.sender.send(outgoing_message).await { warn!("failed to send error to client: {err:?}"); } } } /// Outgoing message from the server to the client. #[derive(Debug, Clone, Serialize)] #[serde(untagged)] pub(crate) enum OutgoingMessage { Request(ServerRequest), Notification(OutgoingNotification), /// AppServerNotification is specific to the case where this is run as an /// "app server" as opposed to an MCP server. AppServerNotification(ServerNotification), Response(OutgoingResponse), Error(OutgoingError), } #[derive(Debug, Clone, PartialEq, Serialize)] pub(crate) struct OutgoingNotification { pub method: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub params: Option<serde_json::Value>, } #[derive(Debug, Clone, PartialEq, Serialize)] pub(crate) struct OutgoingResponse { pub id: RequestId, pub result: Result, } #[derive(Debug, Clone, PartialEq, Serialize)] pub(crate) struct OutgoingError { pub error: JSONRPCErrorError, pub id: RequestId, } #[cfg(test)] mod tests { use codex_app_server_protocol::AccountLoginCompletedNotification; use codex_app_server_protocol::AccountRateLimitsUpdatedNotification; use codex_app_server_protocol::AccountUpdatedNotification; use codex_app_server_protocol::AuthMode; use codex_app_server_protocol::LoginChatGptCompleteNotification; use codex_app_server_protocol::RateLimitSnapshot; use codex_app_server_protocol::RateLimitWindow; use pretty_assertions::assert_eq; use serde_json::json; use uuid::Uuid; use super::*; #[test] fn verify_server_notification_serialization() { let notification = ServerNotification::LoginChatGptComplete(LoginChatGptCompleteNotification { login_id: Uuid::nil(), success: true, error: None, }); let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification); assert_eq!( json!({ "method": "loginChatGptComplete", "params": { "loginId": Uuid::nil(), "success": true, "error": null, }, }), serde_json::to_value(jsonrpc_notification) .expect("ensure the strum macros serialize the method field correctly"), "ensure the strum macros serialize the method field correctly" ); } #[test] fn verify_account_login_completed_notification_serialization() { let notification = ServerNotification::AccountLoginCompleted(AccountLoginCompletedNotification { login_id: Some(Uuid::nil().to_string()), success: true, error: None, }); let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification); assert_eq!( json!({ "method": "account/login/completed", "params": { "loginId": Uuid::nil().to_string(), "success": true, "error": null, }, }), serde_json::to_value(jsonrpc_notification) .expect("ensure the notification serializes correctly"), "ensure the notification serializes correctly" ); } #[test] fn verify_account_rate_limits_notification_serialization() { let notification = ServerNotification::AccountRateLimitsUpdated(AccountRateLimitsUpdatedNotification { rate_limits: RateLimitSnapshot { primary: Some(RateLimitWindow { used_percent: 25, window_duration_mins: Some(15), resets_at: Some(123), }), secondary: None, credits: None, plan_type: Some(PlanType::Plus), }, }); let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification); assert_eq!( json!({ "method": "account/rateLimits/updated", "params": { "rateLimits": { "primary": { "usedPercent": 25, "windowDurationMins": 15, "resetsAt": 123 }, "secondary": null, "credits": null, "planType": "plus" } }, }), serde_json::to_value(jsonrpc_notification) .expect("ensure the notification serializes correctly"), "ensure the notification serializes correctly" ); } #[test] fn verify_account_updated_notification_serialization() { let notification = ServerNotification::AccountUpdated(AccountUpdatedNotification { auth_mode: Some(AuthMode::ApiKey), }); let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification); assert_eq!( json!({ "method": "account/updated", "params": { "authMode": "apikey" }, }), serde_json::to_value(jsonrpc_notification) .expect("ensure the notification serializes correctly"), "ensure the notification serializes correctly" ); } }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/fuzzy_file_search.rs
codex-rs/app-server/src/fuzzy_file_search.rs
use std::num::NonZero; use std::num::NonZeroUsize; use std::path::PathBuf; use std::sync::Arc; use std::sync::atomic::AtomicBool; use codex_app_server_protocol::FuzzyFileSearchResult; use codex_file_search as file_search; use tokio::task::JoinSet; use tracing::warn; const LIMIT_PER_ROOT: usize = 50; const MAX_THREADS: usize = 12; const COMPUTE_INDICES: bool = true; pub(crate) async fn run_fuzzy_file_search( query: String, roots: Vec<String>, cancellation_flag: Arc<AtomicBool>, ) -> Vec<FuzzyFileSearchResult> { if roots.is_empty() { return Vec::new(); } #[expect(clippy::expect_used)] let limit_per_root = NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize"); let cores = std::thread::available_parallelism() .map(std::num::NonZero::get) .unwrap_or(1); let threads = cores.min(MAX_THREADS); let threads_per_root = (threads / roots.len()).max(1); let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN); let mut files: Vec<FuzzyFileSearchResult> = Vec::new(); let mut join_set = JoinSet::new(); for root in roots { let search_dir = PathBuf::from(&root); let query = query.clone(); let cancel_flag = cancellation_flag.clone(); join_set.spawn_blocking(move || { match file_search::run( query.as_str(), limit_per_root, &search_dir, Vec::new(), threads, cancel_flag, COMPUTE_INDICES, true, ) { Ok(res) => Ok((root, res)), Err(err) => Err((root, err)), } }); } while let Some(res) = join_set.join_next().await { match res { Ok(Ok((root, res))) => { for m in res.matches { let path = m.path; let file_name = file_search::file_name_from_path(&path); let result = FuzzyFileSearchResult { root: root.clone(), path, file_name, score: m.score, indices: m.indices, }; files.push(result); } } Ok(Err((root, err))) => { warn!("fuzzy-file-search in dir '{root}' failed: {err}"); } Err(err) => { warn!("fuzzy-file-search join_next failed: {err}"); } } } files.sort_by(file_search::cmp_by_score_desc_then_path_asc::< FuzzyFileSearchResult, _, _, >(|f| f.score, |f| f.path.as_str())); files }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/src/models.rs
codex-rs/app-server/src/models.rs
use std::sync::Arc; use codex_app_server_protocol::Model; use codex_app_server_protocol::ReasoningEffortOption; use codex_core::ConversationManager; use codex_core::config::Config; use codex_protocol::openai_models::ModelPreset; use codex_protocol::openai_models::ReasoningEffortPreset; pub async fn supported_models( conversation_manager: Arc<ConversationManager>, config: &Config, ) -> Vec<Model> { conversation_manager .list_models(config) .await .into_iter() .map(model_from_preset) .collect() } fn model_from_preset(preset: ModelPreset) -> Model { Model { id: preset.id.to_string(), model: preset.model.to_string(), display_name: preset.display_name.to_string(), description: preset.description.to_string(), supported_reasoning_efforts: reasoning_efforts_from_preset( preset.supported_reasoning_efforts, ), default_reasoning_effort: preset.default_reasoning_effort, is_default: preset.is_default, } } fn reasoning_efforts_from_preset( efforts: Vec<ReasoningEffortPreset>, ) -> Vec<ReasoningEffortOption> { efforts .iter() .map(|preset| ReasoningEffortOption { reasoning_effort: preset.effort, description: preset.description.to_string(), }) .collect() }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/tests/all.rs
codex-rs/app-server/tests/all.rs
// Single integration test binary that aggregates all test modules. // The submodules live in `tests/suite/`. mod suite;
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false
openai/codex
https://github.com/openai/codex/blob/279283fe02bf0ce7f93a160db34dd8cf9c8f42c8/codex-rs/app-server/tests/suite/config.rs
codex-rs/app-server/tests/suite/config.rs
use anyhow::Result; use app_test_support::McpProcess; use app_test_support::test_tmp_path; use app_test_support::to_response; use codex_app_server_protocol::GetUserSavedConfigResponse; use codex_app_server_protocol::JSONRPCResponse; use codex_app_server_protocol::Profile; use codex_app_server_protocol::RequestId; use codex_app_server_protocol::SandboxSettings; use codex_app_server_protocol::Tools; use codex_app_server_protocol::UserSavedConfig; use codex_core::protocol::AskForApproval; use codex_protocol::config_types::ForcedLoginMethod; use codex_protocol::config_types::ReasoningSummary; use codex_protocol::config_types::SandboxMode; use codex_protocol::config_types::Verbosity; use codex_protocol::openai_models::ReasoningEffort; use pretty_assertions::assert_eq; use std::collections::HashMap; use std::path::Path; use tempfile::TempDir; use tokio::time::timeout; const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); fn create_config_toml(codex_home: &Path) -> std::io::Result<()> { let writable_root = test_tmp_path(); let config_toml = codex_home.join("config.toml"); std::fs::write( config_toml, format!( r#" model = "gpt-5.1-codex-max" approval_policy = "on-request" sandbox_mode = "workspace-write" model_reasoning_summary = "detailed" model_reasoning_effort = "high" model_verbosity = "medium" profile = "test" forced_chatgpt_workspace_id = "12345678-0000-0000-0000-000000000000" forced_login_method = "chatgpt" [sandbox_workspace_write] writable_roots = [{}] network_access = true exclude_tmpdir_env_var = true exclude_slash_tmp = true [tools] web_search = false view_image = true [profiles.test] model = "gpt-4o" approval_policy = "on-request" model_reasoning_effort = "high" model_reasoning_summary = "detailed" model_verbosity = "medium" model_provider = "openai" chatgpt_base_url = "https://api.chatgpt.com" "#, serde_json::json!(writable_root) ), ) } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn get_config_toml_parses_all_fields() -> Result<()> { let codex_home = TempDir::new()?; create_config_toml(codex_home.path())?; let mut mcp = McpProcess::new(codex_home.path()).await?; timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??; let request_id = mcp.send_get_user_saved_config_request().await?; let resp: JSONRPCResponse = timeout( DEFAULT_READ_TIMEOUT, mcp.read_stream_until_response_message(RequestId::Integer(request_id)), ) .await??; let config: GetUserSavedConfigResponse = to_response(resp)?; let writable_root = test_tmp_path(); let expected = GetUserSavedConfigResponse { config: UserSavedConfig { approval_policy: Some(AskForApproval::OnRequest), sandbox_mode: Some(SandboxMode::WorkspaceWrite), sandbox_settings: Some(SandboxSettings { writable_roots: vec![writable_root], network_access: Some(true), exclude_tmpdir_env_var: Some(true), exclude_slash_tmp: Some(true), }), forced_chatgpt_workspace_id: Some("12345678-0000-0000-0000-000000000000".into()), forced_login_method: Some(ForcedLoginMethod::Chatgpt), model: Some("gpt-5.1-codex-max".into()), model_reasoning_effort: Some(ReasoningEffort::High), model_reasoning_summary: Some(ReasoningSummary::Detailed), model_verbosity: Some(Verbosity::Medium), tools: Some(Tools { web_search: Some(false), view_image: Some(true), }), profile: Some("test".to_string()), profiles: HashMap::from([( "test".into(), Profile { model: Some("gpt-4o".into()), approval_policy: Some(AskForApproval::OnRequest), model_reasoning_effort: Some(ReasoningEffort::High), model_reasoning_summary: Some(ReasoningSummary::Detailed), model_verbosity: Some(Verbosity::Medium), model_provider: Some("openai".into()), chatgpt_base_url: Some("https://api.chatgpt.com".into()), }, )]), }, }; assert_eq!(config, expected); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn get_config_toml_empty() -> Result<()> { let codex_home = TempDir::new()?; let mut mcp = McpProcess::new(codex_home.path()).await?; timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??; let request_id = mcp.send_get_user_saved_config_request().await?; let resp: JSONRPCResponse = timeout( DEFAULT_READ_TIMEOUT, mcp.read_stream_until_response_message(RequestId::Integer(request_id)), ) .await??; let config: GetUserSavedConfigResponse = to_response(resp)?; let expected = GetUserSavedConfigResponse { config: UserSavedConfig { approval_policy: None, sandbox_mode: None, sandbox_settings: None, forced_chatgpt_workspace_id: None, forced_login_method: None, model: None, model_reasoning_effort: None, model_reasoning_summary: None, model_verbosity: None, tools: None, profile: None, profiles: HashMap::new(), }, }; assert_eq!(config, expected); Ok(()) }
rust
Apache-2.0
279283fe02bf0ce7f93a160db34dd8cf9c8f42c8
2026-01-04T15:31:59.292600Z
false