Aksel Joonas Reedi commited on
Commit
6a518b4
·
2 Parent(s): 7fa8e8d bc8323d

comprehensive session logging

Browse files

Add session trajectory logging with race-condition-free uploads

.gitignore CHANGED
@@ -15,4 +15,5 @@ wheels/
15
  *.csv
16
  /logs
17
  hf-agent-leaderboard/
18
- .cursor/
 
 
15
  *.csv
16
  /logs
17
  hf-agent-leaderboard/
18
+ .cursor/
19
+ session_logs/
agent/config.py CHANGED
@@ -19,6 +19,9 @@ class Config(BaseModel):
19
 
20
  model_name: str
21
  mcpServers: dict[str, MCPServerConfig] = {}
 
 
 
22
 
23
 
24
  def substitute_env_vars(obj: Any) -> Any:
 
19
 
20
  model_name: str
21
  mcpServers: dict[str, MCPServerConfig] = {}
22
+ save_sessions: bool = True
23
+ session_dataset_repo: str = "smolagents/hf-agent-sessions"
24
+ auto_save_interval: int = 3 # Save every N user turns (0 = disabled)
25
 
26
 
27
  def substitute_env_vars(obj: Any) -> Any:
agent/core/agent_loop.py CHANGED
@@ -255,6 +255,11 @@ class Handlers:
255
  data={"history_size": len(session.context_manager.items)},
256
  )
257
  )
 
 
 
 
 
258
  return final_response
259
 
260
  @staticmethod
@@ -414,6 +419,14 @@ class Handlers:
414
  @staticmethod
415
  async def shutdown(session: Session) -> bool:
416
  """Handle shutdown (like shutdown in codex.rs:1329)"""
 
 
 
 
 
 
 
 
417
  session.is_running = False
418
  await session.send_event(Event(event_type="shutdown"))
419
  return True
@@ -474,26 +487,47 @@ async def submission_loop(
474
  session = Session(event_queue, config=config, tool_router=tool_router)
475
  print("Agent loop started")
476
 
477
- # Main processing loop
478
- async with tool_router:
479
- # Emit ready event after initialization
480
- await session.send_event(
481
- Event(event_type="ready", data={"message": "Agent initialized"})
482
  )
483
 
484
- while session.is_running:
485
- submission = await submission_queue.get()
 
 
 
 
 
486
 
487
- try:
488
- should_continue = await process_submission(session, submission)
489
- if not should_continue:
 
 
 
 
 
 
490
  break
491
- except asyncio.CancelledError:
492
- break
493
- except Exception as e:
494
- print(f" Error in agent loop: {e}")
495
- await session.send_event(
496
- Event(event_type="error", data={"error": str(e)})
497
- )
498
 
499
- print("🛑 Agent loop exited")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
255
  data={"history_size": len(session.context_manager.items)},
256
  )
257
  )
258
+
259
+ # Increment turn counter and check for auto-save
260
+ session.increment_turn()
261
+ await session.auto_save_if_needed()
262
+
263
  return final_response
264
 
265
  @staticmethod
 
419
  @staticmethod
420
  async def shutdown(session: Session) -> bool:
421
  """Handle shutdown (like shutdown in codex.rs:1329)"""
422
+ # Save session trajectory if enabled (fire-and-forget, returns immediately)
423
+ if session.config.save_sessions:
424
+ print("💾 Saving session...")
425
+ repo_id = session.config.session_dataset_repo
426
+ local_path = session.save_and_upload_detached(repo_id)
427
+ if local_path:
428
+ print("✅ Session saved locally, upload in progress")
429
+
430
  session.is_running = False
431
  await session.send_event(Event(event_type="shutdown"))
432
  return True
 
487
  session = Session(event_queue, config=config, tool_router=tool_router)
488
  print("Agent loop started")
489
 
490
+ # Retry any failed uploads from previous sessions (fire-and-forget)
491
+ if config and config.save_sessions:
492
+ Session.retry_failed_uploads_detached(
493
+ directory="session_logs", repo_id=config.session_dataset_repo
 
494
  )
495
 
496
+ try:
497
+ # Main processing loop
498
+ async with tool_router:
499
+ # Emit ready event after initialization
500
+ await session.send_event(
501
+ Event(event_type="ready", data={"message": "Agent initialized"})
502
+ )
503
 
504
+ while session.is_running:
505
+ submission = await submission_queue.get()
506
+
507
+ try:
508
+ should_continue = await process_submission(session, submission)
509
+ if not should_continue:
510
+ break
511
+ except asyncio.CancelledError:
512
+ print("\n⚠️ Agent loop cancelled")
513
  break
514
+ except Exception as e:
515
+ print(f"❌ Error in agent loop: {e}")
516
+ await session.send_event(
517
+ Event(event_type="error", data={"error": str(e)})
518
+ )
 
 
519
 
520
+ print("🛑 Agent loop exited")
521
+
522
+ finally:
523
+ # Emergency save if session saving is enabled and shutdown wasn't called properly
524
+ if session.config.save_sessions and session.is_running:
525
+ print("\n💾 Emergency save: preserving session before exit...")
526
+ try:
527
+ local_path = session.save_and_upload_detached(
528
+ session.config.session_dataset_repo
529
+ )
530
+ if local_path:
531
+ print("✅ Emergency save successful, upload in progress")
532
+ except Exception as e:
533
+ print(f"❌ Emergency save failed: {e}")
agent/core/session.py CHANGED
@@ -1,7 +1,12 @@
1
  import asyncio
 
 
 
2
  import uuid
3
  from dataclasses import dataclass
 
4
  from enum import Enum
 
5
  from typing import Any, Optional
6
 
7
  from litellm import get_max_tokens
@@ -55,11 +60,176 @@ class Session:
55
  self.current_task: asyncio.Task | None = None
56
  self.pending_approval: Optional[dict[str, Any]] = None
57
 
 
 
 
 
 
 
58
  async def send_event(self, event: Event) -> None:
59
- """Send event back to client"""
60
  await self.event_queue.put(event)
61
 
 
 
 
 
 
 
 
 
 
62
  def interrupt(self) -> None:
63
  """Interrupt current running task"""
64
  if self.current_task and not self.current_task.done():
65
  self.current_task.cancel()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import asyncio
2
+ import json
3
+ import subprocess
4
+ import sys
5
  import uuid
6
  from dataclasses import dataclass
7
+ from datetime import datetime
8
  from enum import Enum
9
+ from pathlib import Path
10
  from typing import Any, Optional
11
 
12
  from litellm import get_max_tokens
 
60
  self.current_task: asyncio.Task | None = None
61
  self.pending_approval: Optional[dict[str, Any]] = None
62
 
63
+ # Session trajectory logging
64
+ self.logged_events: list[dict] = []
65
+ self.session_start_time = datetime.now().isoformat()
66
+ self.turn_count: int = 0
67
+ self.last_auto_save_turn: int = 0
68
+
69
  async def send_event(self, event: Event) -> None:
70
+ """Send event back to client and log to trajectory"""
71
  await self.event_queue.put(event)
72
 
73
+ # Log event to trajectory
74
+ self.logged_events.append(
75
+ {
76
+ "timestamp": datetime.now().isoformat(),
77
+ "event_type": event.event_type,
78
+ "data": event.data,
79
+ }
80
+ )
81
+
82
  def interrupt(self) -> None:
83
  """Interrupt current running task"""
84
  if self.current_task and not self.current_task.done():
85
  self.current_task.cancel()
86
+
87
+ def increment_turn(self) -> None:
88
+ """Increment turn counter (called after each user interaction)"""
89
+ self.turn_count += 1
90
+
91
+ async def auto_save_if_needed(self) -> None:
92
+ """Check if auto-save should trigger and save if so (completely non-blocking)"""
93
+ if not self.config.save_sessions:
94
+ return
95
+
96
+ interval = self.config.auto_save_interval
97
+ if interval <= 0:
98
+ return
99
+
100
+ turns_since_last_save = self.turn_count - self.last_auto_save_turn
101
+ if turns_since_last_save >= interval:
102
+ print(f"\n💾 Auto-saving session (turn {self.turn_count})...")
103
+ # Fire-and-forget save - returns immediately
104
+ self.save_and_upload_detached(self.config.session_dataset_repo)
105
+ self.last_auto_save_turn = self.turn_count
106
+
107
+ def get_trajectory(self) -> dict:
108
+ """Serialize complete session trajectory for logging"""
109
+ return {
110
+ "session_id": self.session_id,
111
+ "session_start_time": self.session_start_time,
112
+ "session_end_time": datetime.now().isoformat(),
113
+ "model_name": self.config.model_name,
114
+ "messages": [msg.model_dump() for msg in self.context_manager.items],
115
+ "events": self.logged_events,
116
+ }
117
+
118
+ def save_trajectory_local(
119
+ self,
120
+ directory: str = "session_logs",
121
+ upload_status: str = "pending",
122
+ dataset_url: Optional[str] = None,
123
+ ) -> Optional[str]:
124
+ """
125
+ Save trajectory to local JSON file as backup with upload status
126
+
127
+ Args:
128
+ directory: Directory to save logs (default: "session_logs")
129
+ upload_status: Status of upload attempt ("pending", "success", "failed")
130
+ dataset_url: URL of dataset if upload succeeded
131
+
132
+ Returns:
133
+ Path to saved file if successful, None otherwise
134
+ """
135
+ try:
136
+ log_dir = Path(directory)
137
+ log_dir.mkdir(parents=True, exist_ok=True)
138
+
139
+ trajectory = self.get_trajectory()
140
+
141
+ # Add upload metadata
142
+ trajectory["upload_status"] = upload_status
143
+ trajectory["upload_url"] = dataset_url
144
+ trajectory["last_save_time"] = datetime.now().isoformat()
145
+
146
+ filename = f"session_{self.session_id}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
147
+ filepath = log_dir / filename
148
+
149
+ with open(filepath, "w") as f:
150
+ json.dump(trajectory, f, indent=2)
151
+
152
+ return str(filepath)
153
+ except Exception as e:
154
+ print(f"Failed to save session locally: {e}")
155
+ return None
156
+
157
+ def update_local_save_status(
158
+ self, filepath: str, upload_status: str, dataset_url: Optional[str] = None
159
+ ) -> bool:
160
+ """Update the upload status of an existing local save file"""
161
+ try:
162
+ with open(filepath, "r") as f:
163
+ data = json.load(f)
164
+
165
+ data["upload_status"] = upload_status
166
+ data["upload_url"] = dataset_url
167
+ data["last_save_time"] = datetime.now().isoformat()
168
+
169
+ with open(filepath, "w") as f:
170
+ json.dump(data, f, indent=2)
171
+
172
+ return True
173
+ except Exception as e:
174
+ print(f"Failed to update local save status: {e}")
175
+ return False
176
+
177
+ def save_and_upload_detached(self, repo_id: str) -> Optional[str]:
178
+ """
179
+ Save session locally and spawn detached subprocess for upload (fire-and-forget)
180
+
181
+ Args:
182
+ repo_id: HuggingFace dataset repo ID
183
+
184
+ Returns:
185
+ Path to local save file
186
+ """
187
+ # Save locally first (fast, synchronous)
188
+ local_path = self.save_trajectory_local(upload_status="pending")
189
+ if not local_path:
190
+ return None
191
+
192
+ # Spawn detached subprocess for upload (fire-and-forget)
193
+ try:
194
+ uploader_script = Path(__file__).parent / "session_uploader.py"
195
+
196
+ # Use Popen with detached process
197
+ subprocess.Popen(
198
+ [sys.executable, str(uploader_script), "upload", local_path, repo_id],
199
+ stdin=subprocess.DEVNULL,
200
+ stdout=subprocess.DEVNULL,
201
+ stderr=subprocess.DEVNULL,
202
+ start_new_session=True, # Detach from parent
203
+ )
204
+ except Exception as e:
205
+ print(f"⚠️ Failed to spawn upload subprocess: {e}")
206
+
207
+ return local_path
208
+
209
+ @staticmethod
210
+ def retry_failed_uploads_detached(
211
+ directory: str = "session_logs", repo_id: Optional[str] = None
212
+ ) -> None:
213
+ """
214
+ Spawn detached subprocess to retry failed/pending uploads (fire-and-forget)
215
+
216
+ Args:
217
+ directory: Directory containing session logs
218
+ repo_id: Target dataset repo ID
219
+ """
220
+ if not repo_id:
221
+ return
222
+
223
+ try:
224
+ uploader_script = Path(__file__).parent / "session_uploader.py"
225
+
226
+ # Spawn detached subprocess for retry
227
+ subprocess.Popen(
228
+ [sys.executable, str(uploader_script), "retry", directory, repo_id],
229
+ stdin=subprocess.DEVNULL,
230
+ stdout=subprocess.DEVNULL,
231
+ stderr=subprocess.DEVNULL,
232
+ start_new_session=True, # Detach from parent
233
+ )
234
+ except Exception as e:
235
+ print(f"⚠️ Failed to spawn retry subprocess: {e}")
agent/core/session_uploader.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Standalone script for uploading session trajectories to HuggingFace.
4
+ This runs as a separate process to avoid blocking the main agent.
5
+ Uses individual file uploads to avoid race conditions.
6
+ """
7
+
8
+ import json
9
+ import os
10
+ import sys
11
+ from datetime import datetime
12
+ from pathlib import Path
13
+
14
+
15
+ def upload_session_as_file(
16
+ session_file: str, repo_id: str, max_retries: int = 3
17
+ ) -> bool:
18
+ """
19
+ Upload a single session as an individual JSONL file (no race conditions)
20
+
21
+ Args:
22
+ session_file: Path to local session JSON file
23
+ repo_id: HuggingFace dataset repo ID
24
+ max_retries: Number of retry attempts
25
+
26
+ Returns:
27
+ True if successful, False otherwise
28
+ """
29
+ try:
30
+ from huggingface_hub import HfApi
31
+ except ImportError:
32
+ print("Error: huggingface_hub library not available", file=sys.stderr)
33
+ return False
34
+
35
+ try:
36
+ # Load session data
37
+ with open(session_file, "r") as f:
38
+ data = json.load(f)
39
+
40
+ # Check if already uploaded
41
+ upload_status = data.get("upload_status")
42
+ if upload_status == "success":
43
+ return True
44
+
45
+ hf_token = os.getenv("HF_TOKEN")
46
+ if not hf_token:
47
+ # Update status to failed
48
+ data["upload_status"] = "failed"
49
+ with open(session_file, "w") as f:
50
+ json.dump(data, f, indent=2)
51
+ return False
52
+
53
+ # Prepare JSONL content (single line)
54
+ # Store messages and events as JSON strings to avoid schema conflicts
55
+ session_row = {
56
+ "session_id": data["session_id"],
57
+ "session_start_time": data["session_start_time"],
58
+ "session_end_time": data["session_end_time"],
59
+ "model_name": data["model_name"],
60
+ "messages": json.dumps(data["messages"]),
61
+ "events": json.dumps(data["events"]),
62
+ }
63
+
64
+ # Create temporary JSONL file
65
+ import tempfile
66
+
67
+ with tempfile.NamedTemporaryFile(
68
+ mode="w", suffix=".jsonl", delete=False
69
+ ) as tmp:
70
+ json.dump(session_row, tmp) # Single line JSON
71
+ tmp_path = tmp.name
72
+
73
+ try:
74
+ # Generate unique path in repo: sessions/YYYY-MM-DD/session_id.jsonl
75
+ session_id = data["session_id"]
76
+ date_str = datetime.fromisoformat(data["session_start_time"]).strftime(
77
+ "%Y-%m-%d"
78
+ )
79
+ repo_path = f"sessions/{date_str}/{session_id}.jsonl"
80
+
81
+ # Upload with retries
82
+ api = HfApi()
83
+ for attempt in range(max_retries):
84
+ try:
85
+ # Try to create repo if it doesn't exist (idempotent)
86
+ try:
87
+ api.create_repo(
88
+ repo_id=repo_id,
89
+ repo_type="dataset",
90
+ private=True,
91
+ token=hf_token,
92
+ exist_ok=True, # Don't fail if already exists
93
+ )
94
+
95
+ except Exception:
96
+ # Repo might already exist, continue
97
+ pass
98
+
99
+ # Upload the session file
100
+ api.upload_file(
101
+ path_or_fileobj=tmp_path,
102
+ path_in_repo=repo_path,
103
+ repo_id=repo_id,
104
+ repo_type="dataset",
105
+ token=hf_token,
106
+ commit_message=f"Add session {session_id}",
107
+ )
108
+
109
+ # Update local status to success
110
+ data["upload_status"] = "success"
111
+ data["upload_url"] = f"https://huggingface.co/datasets/{repo_id}"
112
+ with open(session_file, "w") as f:
113
+ json.dump(data, f, indent=2)
114
+
115
+ return True
116
+
117
+ except Exception:
118
+ if attempt < max_retries - 1:
119
+ import time
120
+
121
+ wait_time = 2**attempt
122
+ time.sleep(wait_time)
123
+ else:
124
+ # Final attempt failed
125
+ data["upload_status"] = "failed"
126
+ with open(session_file, "w") as f:
127
+ json.dump(data, f, indent=2)
128
+ return False
129
+
130
+ finally:
131
+ # Clean up temp file
132
+ try:
133
+ os.unlink(tmp_path)
134
+ except Exception:
135
+ pass
136
+
137
+ except Exception as e:
138
+ print(f"Error uploading session: {e}", file=sys.stderr)
139
+ return False
140
+
141
+
142
+ def retry_failed_uploads(directory: str, repo_id: str):
143
+ """Retry all failed/pending uploads in a directory"""
144
+ log_dir = Path(directory)
145
+ if not log_dir.exists():
146
+ return
147
+
148
+ session_files = list(log_dir.glob("session_*.json"))
149
+
150
+ for filepath in session_files:
151
+ try:
152
+ with open(filepath, "r") as f:
153
+ data = json.load(f)
154
+
155
+ upload_status = data.get("upload_status", "unknown")
156
+
157
+ # Only retry pending or failed uploads
158
+ if upload_status in ["pending", "failed"]:
159
+ upload_session_as_file(str(filepath), repo_id)
160
+
161
+ except Exception:
162
+ pass
163
+
164
+
165
+ if __name__ == "__main__":
166
+ if len(sys.argv) < 3:
167
+ print("Usage: session_uploader.py <command> <args...>")
168
+ sys.exit(1)
169
+
170
+ command = sys.argv[1]
171
+
172
+ if command == "upload":
173
+ # python session_uploader.py upload <session_file> <repo_id>
174
+ if len(sys.argv) < 4:
175
+ print("Usage: session_uploader.py upload <session_file> <repo_id>")
176
+ sys.exit(1)
177
+ session_file = sys.argv[2]
178
+ repo_id = sys.argv[3]
179
+ success = upload_session_as_file(session_file, repo_id)
180
+ sys.exit(0 if success else 1)
181
+
182
+ elif command == "retry":
183
+ # python session_uploader.py retry <directory> <repo_id>
184
+ if len(sys.argv) < 4:
185
+ print("Usage: session_uploader.py retry <directory> <repo_id>")
186
+ sys.exit(1)
187
+ directory = sys.argv[2]
188
+ repo_id = sys.argv[3]
189
+ retry_failed_uploads(directory, repo_id)
190
+ sys.exit(0)
191
+
192
+ else:
193
+ print(f"Unknown command: {command}")
194
+ sys.exit(1)
agent/main.py CHANGED
@@ -415,8 +415,7 @@ async def main():
415
  )
416
  await submission_queue.put(shutdown_submission)
417
 
418
- # Wait for tasks to complete
419
- await asyncio.wait_for(agent_task, timeout=2.0)
420
  listener_task.cancel()
421
 
422
  print("✨ Goodbye!\n")
 
415
  )
416
  await submission_queue.put(shutdown_submission)
417
 
418
+ await asyncio.wait_for(agent_task, timeout=5.0)
 
419
  listener_task.cancel()
420
 
421
  print("✨ Goodbye!\n")
configs/main_agent_config.json CHANGED
@@ -1,5 +1,7 @@
1
  {
2
- "model_name": "anthropic/claude-sonnet-4-5-20250929",
 
 
3
  "mcpServers": {
4
  "hf-mcp-server": {
5
  "transport": "http",
 
1
  {
2
+ "model_name": "anthropic/claude-opus-4-5-20251101",
3
+ "save_sessions": true,
4
+ "session_dataset_repo": "smolagents/hf-agent-sessions",
5
  "mcpServers": {
6
  "hf-mcp-server": {
7
  "transport": "http",
pyproject.toml CHANGED
@@ -5,6 +5,7 @@ description = "Add your description here"
5
  readme = "README.md"
6
  requires-python = ">=3.12"
7
  dependencies = [
 
8
  # Core dependencies (always required)
9
  "pydantic>=2.12.3",
10
  "python-dotenv>=1.2.1",
@@ -22,6 +23,7 @@ agent = [
22
  "thefuzz>=0.22.1",
23
  "nbconvert>=7.16.6",
24
  "nbformat>=5.10.4",
 
25
  ]
26
 
27
  # Evaluation/benchmarking dependencies
 
5
  readme = "README.md"
6
  requires-python = ">=3.12"
7
  dependencies = [
8
+ "datasets>=4.4.1",
9
  # Core dependencies (always required)
10
  "pydantic>=2.12.3",
11
  "python-dotenv>=1.2.1",
 
23
  "thefuzz>=0.22.1",
24
  "nbconvert>=7.16.6",
25
  "nbformat>=5.10.4",
26
+ "datasets>=4.3.0", # For session logging to HF datasets
27
  ]
28
 
29
  # Evaluation/benchmarking dependencies
uv.lock CHANGED
@@ -902,12 +902,14 @@ name = "hf-agent"
902
  version = "0.1.0"
903
  source = { virtual = "." }
904
  dependencies = [
 
905
  { name = "pydantic" },
906
  { name = "python-dotenv" },
907
  ]
908
 
909
  [package.optional-dependencies]
910
  agent = [
 
911
  { name = "fastmcp" },
912
  { name = "huggingface-hub" },
913
  { name = "litellm" },
@@ -946,6 +948,8 @@ eval = [
946
 
947
  [package.metadata]
948
  requires-dist = [
 
 
949
  { name = "datasets", marker = "extra == 'eval'", specifier = ">=4.3.0" },
950
  { name = "fastmcp", marker = "extra == 'agent'", specifier = ">=2.4.0" },
951
  { name = "hf-agent", extras = ["agent", "eval", "dev"], marker = "extra == 'all'" },
 
902
  version = "0.1.0"
903
  source = { virtual = "." }
904
  dependencies = [
905
+ { name = "datasets" },
906
  { name = "pydantic" },
907
  { name = "python-dotenv" },
908
  ]
909
 
910
  [package.optional-dependencies]
911
  agent = [
912
+ { name = "datasets" },
913
  { name = "fastmcp" },
914
  { name = "huggingface-hub" },
915
  { name = "litellm" },
 
948
 
949
  [package.metadata]
950
  requires-dist = [
951
+ { name = "datasets", specifier = ">=4.4.1" },
952
+ { name = "datasets", marker = "extra == 'agent'", specifier = ">=4.3.0" },
953
  { name = "datasets", marker = "extra == 'eval'", specifier = ">=4.3.0" },
954
  { name = "fastmcp", marker = "extra == 'agent'", specifier = ">=2.4.0" },
955
  { name = "hf-agent", extras = ["agent", "eval", "dev"], marker = "extra == 'all'" },