File size: 10,243 Bytes
070daf8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 | import asyncio
import json
import logging
import subprocess
import sys
import uuid
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from pathlib import Path
from typing import Any, Optional
from agent.config import Config
from agent.context_manager.manager import ContextManager
logger = logging.getLogger(__name__)
# Local max-token lookup — avoids litellm.get_max_tokens() which can hang
# on network calls for certain providers (known litellm issue).
_MAX_TOKENS_MAP: dict[str, int] = {
# Anthropic
"anthropic/claude-opus-4-5-20251101": 200_000,
"anthropic/claude-sonnet-4-5-20250929": 200_000,
"anthropic/claude-sonnet-4-20250514": 200_000,
"anthropic/claude-haiku-3-5-20241022": 200_000,
"anthropic/claude-3-5-sonnet-20241022": 200_000,
"anthropic/claude-3-opus-20240229": 200_000,
"huggingface/novita/MiniMaxAI/MiniMax-M2.1": 196_608,
"huggingface/novita/moonshotai/Kimi-K2.5": 262_144,
"huggingface/novita/zai-org/GLM-5": 200_000,
}
_DEFAULT_MAX_TOKENS = 200_000
def _get_max_tokens_safe(model_name: str) -> int:
"""Return the max context window for a model without network calls."""
tokens = _MAX_TOKENS_MAP.get(model_name)
if tokens:
return tokens
# Fallback: try litellm but with a short timeout via threading
try:
from litellm import get_max_tokens
result = get_max_tokens(model_name)
if result and isinstance(result, int):
return result
logger.warning(
f"get_max_tokens returned {result} for {model_name}, using default"
)
return _DEFAULT_MAX_TOKENS
except Exception as e:
logger.warning(f"get_max_tokens failed for {model_name}, using default: {e}")
return _DEFAULT_MAX_TOKENS
class OpType(Enum):
USER_INPUT = "user_input"
EXEC_APPROVAL = "exec_approval"
INTERRUPT = "interrupt"
UNDO = "undo"
COMPACT = "compact"
SHUTDOWN = "shutdown"
@dataclass
class Event:
event_type: str
data: Optional[dict[str, Any]] = None
class Session:
"""
Maintains agent session state
Similar to Session in codex-rs/core/src/codex.rs
"""
def __init__(
self,
event_queue: asyncio.Queue,
config: Config | None = None,
tool_router=None,
context_manager: ContextManager | None = None,
):
self.tool_router = tool_router
tool_specs = tool_router.get_tool_specs_for_llm() if tool_router else []
self.context_manager = context_manager or ContextManager(
max_context=_get_max_tokens_safe(config.model_name),
compact_size=0.1,
untouched_messages=5,
tool_specs=tool_specs,
)
self.event_queue = event_queue
self.session_id = str(uuid.uuid4())
self.config = config or Config(
model_name="anthropic/claude-sonnet-4-5-20250929",
)
self.is_running = True
self.current_task: asyncio.Task | None = None
self.pending_approval: Optional[dict[str, Any]] = None
# User's HF OAuth token — set by session_manager after construction
self.hf_token: Optional[str] = None
# Session folder for file isolation
self.session_folder: Optional[str] = None
# Session trajectory logging
self.logged_events: list[dict] = []
self.session_start_time = datetime.now().isoformat()
self.turn_count: int = 0
self.last_auto_save_turn: int = 0
# Generated files tracking
self.generated_files: list[dict] = []
# Thinking chain tracking
self.thinking_steps: list[dict] = []
self.current_thinking_step: dict | None = None
self.execution_plan: list[dict] = []
async def send_event(self, event: Event) -> None:
"""Send event back to client and log to trajectory"""
await self.event_queue.put(event)
# Log event to trajectory
self.logged_events.append(
{
"timestamp": datetime.now().isoformat(),
"event_type": event.event_type,
"data": event.data,
}
)
def interrupt(self) -> None:
"""Interrupt current running task"""
if self.current_task and not self.current_task.done():
self.current_task.cancel()
def increment_turn(self) -> None:
"""Increment turn counter (called after each user interaction)"""
self.turn_count += 1
async def auto_save_if_needed(self) -> None:
"""Check if auto-save should trigger and save if so (completely non-blocking)"""
if not self.config.save_sessions:
return
interval = self.config.auto_save_interval
if interval <= 0:
return
turns_since_last_save = self.turn_count - self.last_auto_save_turn
if turns_since_last_save >= interval:
logger.info(f"Auto-saving session (turn {self.turn_count})...")
# Fire-and-forget save - returns immediately
self.save_and_upload_detached(self.config.session_dataset_repo)
self.last_auto_save_turn = self.turn_count
def add_generated_file(self, file_data: dict) -> None:
"""Add a generated file to the session tracking."""
self.generated_files.append(file_data)
def get_generated_files(self) -> list[dict]:
"""Get all generated files for this session."""
return self.generated_files
def add_thinking_step(self, step: dict) -> None:
"""Add a thinking step to the chain."""
self.thinking_steps.append(step)
self.current_thinking_step = step
def update_thinking_step(self, step_id: str, updates: dict) -> None:
"""Update a thinking step with new data."""
for step in self.thinking_steps:
if step.get("id") == step_id:
step.update(updates)
break
def set_execution_plan(self, plan: list[dict]) -> None:
"""Set the execution plan for the current turn."""
self.execution_plan = plan
def clear_thinking_chain(self) -> None:
"""Clear the thinking chain for a new turn."""
self.thinking_steps = []
self.current_thinking_step = None
self.execution_plan = []
def get_trajectory(self) -> dict:
"""Serialize complete session trajectory for logging"""
return {
"session_id": self.session_id,
"session_start_time": self.session_start_time,
"session_end_time": datetime.now().isoformat(),
"model_name": self.config.model_name,
"messages": [msg.model_dump() for msg in self.context_manager.items],
"events": self.logged_events,
}
def save_trajectory_local(
self,
directory: str = "session_logs",
upload_status: str = "pending",
dataset_url: Optional[str] = None,
) -> Optional[str]:
"""
Save trajectory to local JSON file as backup with upload status
Args:
directory: Directory to save logs (default: "session_logs")
upload_status: Status of upload attempt ("pending", "success", "failed")
dataset_url: URL of dataset if upload succeeded
Returns:
Path to saved file if successful, None otherwise
"""
try:
log_dir = Path(directory)
log_dir.mkdir(parents=True, exist_ok=True)
trajectory = self.get_trajectory()
# Add upload metadata
trajectory["upload_status"] = upload_status
trajectory["upload_url"] = dataset_url
trajectory["last_save_time"] = datetime.now().isoformat()
filename = f"session_{self.session_id}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
filepath = log_dir / filename
with open(filepath, "w") as f:
json.dump(trajectory, f, indent=2)
return str(filepath)
except Exception as e:
logger.error(f"Failed to save session locally: {e}")
return None
def save_and_upload_detached(self, repo_id: str) -> Optional[str]:
"""
Save session locally and spawn detached subprocess for upload (fire-and-forget)
Args:
repo_id: HuggingFace dataset repo ID
Returns:
Path to local save file
"""
# Save locally first (fast, synchronous)
local_path = self.save_trajectory_local(upload_status="pending")
if not local_path:
return None
# Spawn detached subprocess for upload (fire-and-forget)
try:
uploader_script = Path(__file__).parent / "session_uploader.py"
# Use Popen with detached process
subprocess.Popen(
[sys.executable, str(uploader_script), "upload", local_path, repo_id],
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
start_new_session=True, # Detach from parent
)
except Exception as e:
logger.warning(f"Failed to spawn upload subprocess: {e}")
return local_path
@staticmethod
def retry_failed_uploads_detached(
directory: str = "session_logs", repo_id: Optional[str] = None
) -> None:
"""
Spawn detached subprocess to retry failed/pending uploads (fire-and-forget)
Args:
directory: Directory containing session logs
repo_id: Target dataset repo ID
"""
if not repo_id:
return
try:
uploader_script = Path(__file__).parent / "session_uploader.py"
# Spawn detached subprocess for retry
subprocess.Popen(
[sys.executable, str(uploader_script), "retry", directory, repo_id],
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
start_new_session=True, # Detach from parent
)
except Exception as e:
logger.warning(f"Failed to spawn retry subprocess: {e}")
|