Spaces:
Running
Running
Commit
·
bcbd2ec
1
Parent(s):
39f0c54
Security: Fix critical vulnerabilities before public release
Browse files- Restrict CORS to specific allowed origins (was wildcard *)
- Add path traversal protection in agentgraph.py
- Remove error detail leakage in 46 API endpoints (no longer expose str(e))
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
- backend/app.py +12 -3
- backend/routers/agentgraph.py +49 -19
- backend/routers/graph_comparison.py +5 -5
- backend/routers/knowledge_graphs.py +13 -13
- backend/routers/methods.py +5 -5
- backend/routers/observability.py +9 -9
- backend/routers/temporal_graphs.py +3 -3
- backend/routers/traces.py +12 -12
backend/app.py
CHANGED
|
@@ -47,13 +47,22 @@ logger = logging.getLogger("agent_monitoring_server")
|
|
| 47 |
# Create FastAPI app
|
| 48 |
app = FastAPI(title="Agent Monitoring System", version="1.0.0")
|
| 49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
# Add CORS middleware (first, so it's outermost)
|
| 51 |
app.add_middleware(
|
| 52 |
CORSMiddleware,
|
| 53 |
-
allow_origins=
|
| 54 |
allow_credentials=True,
|
| 55 |
-
allow_methods=["
|
| 56 |
-
allow_headers=["
|
| 57 |
)
|
| 58 |
|
| 59 |
# IMPORTANT: Middleware runs in REVERSE order of add_middleware() calls!
|
|
|
|
| 47 |
# Create FastAPI app
|
| 48 |
app = FastAPI(title="Agent Monitoring System", version="1.0.0")
|
| 49 |
|
| 50 |
+
# Define allowed CORS origins (security fix: restrict from wildcard)
|
| 51 |
+
ALLOWED_ORIGINS = [
|
| 52 |
+
"http://localhost:3001",
|
| 53 |
+
"http://localhost:5280",
|
| 54 |
+
"http://localhost:7860",
|
| 55 |
+
"https://holistic-ai-agentgraph.hf.space",
|
| 56 |
+
"https://huggingface.co",
|
| 57 |
+
]
|
| 58 |
+
|
| 59 |
# Add CORS middleware (first, so it's outermost)
|
| 60 |
app.add_middleware(
|
| 61 |
CORSMiddleware,
|
| 62 |
+
allow_origins=ALLOWED_ORIGINS,
|
| 63 |
allow_credentials=True,
|
| 64 |
+
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
| 65 |
+
allow_headers=["Content-Type", "Authorization"],
|
| 66 |
)
|
| 67 |
|
| 68 |
# IMPORTANT: Middleware runs in REVERSE order of add_middleware() calls!
|
backend/routers/agentgraph.py
CHANGED
|
@@ -1,15 +1,29 @@
|
|
| 1 |
from fastapi import APIRouter, Request
|
| 2 |
from fastapi.responses import HTMLResponse, FileResponse, JSONResponse
|
| 3 |
-
import
|
| 4 |
|
| 5 |
router = APIRouter()
|
| 6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
@router.get("/agentgraph", response_class=HTMLResponse)
|
| 8 |
async def agentgraph_interface(request: Request):
|
| 9 |
"""Serve the React-based AgentGraph interface (requires authentication)"""
|
| 10 |
# Serve the built React app from the new location
|
| 11 |
-
dist_path = "
|
| 12 |
-
if
|
| 13 |
with open(dist_path, 'r') as f:
|
| 14 |
content = f.read()
|
| 15 |
return HTMLResponse(content=content)
|
|
@@ -20,29 +34,45 @@ async def agentgraph_interface(request: Request):
|
|
| 20 |
status_code=503
|
| 21 |
)
|
| 22 |
|
|
|
|
| 23 |
@router.get("/agentgraph/{path:path}")
|
| 24 |
async def agentgraph_assets(path: str):
|
| 25 |
-
"""Serve static assets for the React app"""
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
return JSONResponse(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
|
| 32 |
@router.get("/assets/{path:path}")
|
| 33 |
async def serve_assets(path: str):
|
| 34 |
-
"""Serve React assets from /assets/ path"""
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
return JSONResponse(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
@router.get("/vite.svg")
|
| 42 |
async def serve_vite_svg():
|
| 43 |
"""Serve the vite.svg favicon"""
|
| 44 |
-
file_path = "
|
| 45 |
-
if
|
| 46 |
return FileResponse(file_path)
|
| 47 |
-
|
| 48 |
-
return JSONResponse(content={"error": "Favicon not found"}, status_code=404)
|
|
|
|
| 1 |
from fastapi import APIRouter, Request
|
| 2 |
from fastapi.responses import HTMLResponse, FileResponse, JSONResponse
|
| 3 |
+
from pathlib import Path
|
| 4 |
|
| 5 |
router = APIRouter()
|
| 6 |
|
| 7 |
+
# Security: Define base directories for path traversal protection
|
| 8 |
+
DIST_DIR = Path("frontend/dist").resolve()
|
| 9 |
+
ASSETS_DIR = Path("frontend/dist/assets").resolve()
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def is_safe_path(base_dir: Path, requested_path: Path) -> bool:
|
| 13 |
+
"""Check if the requested path is within the allowed base directory"""
|
| 14 |
+
try:
|
| 15 |
+
resolved = requested_path.resolve()
|
| 16 |
+
return str(resolved).startswith(str(base_dir))
|
| 17 |
+
except (OSError, ValueError):
|
| 18 |
+
return False
|
| 19 |
+
|
| 20 |
+
|
| 21 |
@router.get("/agentgraph", response_class=HTMLResponse)
|
| 22 |
async def agentgraph_interface(request: Request):
|
| 23 |
"""Serve the React-based AgentGraph interface (requires authentication)"""
|
| 24 |
# Serve the built React app from the new location
|
| 25 |
+
dist_path = DIST_DIR / "index.html"
|
| 26 |
+
if dist_path.exists():
|
| 27 |
with open(dist_path, 'r') as f:
|
| 28 |
content = f.read()
|
| 29 |
return HTMLResponse(content=content)
|
|
|
|
| 34 |
status_code=503
|
| 35 |
)
|
| 36 |
|
| 37 |
+
|
| 38 |
@router.get("/agentgraph/{path:path}")
|
| 39 |
async def agentgraph_assets(path: str):
|
| 40 |
+
"""Serve static assets for the React app with path traversal protection"""
|
| 41 |
+
requested_path = (DIST_DIR / path).resolve()
|
| 42 |
+
|
| 43 |
+
# Security: Prevent path traversal attacks
|
| 44 |
+
if not is_safe_path(DIST_DIR, requested_path):
|
| 45 |
+
return JSONResponse(
|
| 46 |
+
content={"error": "Access denied"},
|
| 47 |
+
status_code=403
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
if requested_path.is_file():
|
| 51 |
+
return FileResponse(requested_path)
|
| 52 |
+
return JSONResponse(content={"error": "File not found"}, status_code=404)
|
| 53 |
+
|
| 54 |
|
| 55 |
@router.get("/assets/{path:path}")
|
| 56 |
async def serve_assets(path: str):
|
| 57 |
+
"""Serve React assets from /assets/ path with path traversal protection"""
|
| 58 |
+
requested_path = (ASSETS_DIR / path).resolve()
|
| 59 |
+
|
| 60 |
+
# Security: Prevent path traversal attacks
|
| 61 |
+
if not is_safe_path(ASSETS_DIR, requested_path):
|
| 62 |
+
return JSONResponse(
|
| 63 |
+
content={"error": "Access denied"},
|
| 64 |
+
status_code=403
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
if requested_path.is_file():
|
| 68 |
+
return FileResponse(requested_path)
|
| 69 |
+
return JSONResponse(content={"error": "Asset not found"}, status_code=404)
|
| 70 |
+
|
| 71 |
|
| 72 |
@router.get("/vite.svg")
|
| 73 |
async def serve_vite_svg():
|
| 74 |
"""Serve the vite.svg favicon"""
|
| 75 |
+
file_path = DIST_DIR / "vite.svg"
|
| 76 |
+
if file_path.exists():
|
| 77 |
return FileResponse(file_path)
|
| 78 |
+
return JSONResponse(content={"error": "Favicon not found"}, status_code=404)
|
|
|
backend/routers/graph_comparison.py
CHANGED
|
@@ -103,7 +103,7 @@ async def list_available_graphs(db: Session = Depends(get_db)):
|
|
| 103 |
|
| 104 |
except Exception as e:
|
| 105 |
logger.error(f"Error listing graphs: {str(e)}")
|
| 106 |
-
raise HTTPException(status_code=500, detail=
|
| 107 |
|
| 108 |
def _format_graph_data(graph: KnowledgeGraph) -> Dict[str, Any]:
|
| 109 |
"""Helper function to format graph data consistently"""
|
|
@@ -250,7 +250,7 @@ async def compare_graphs(
|
|
| 250 |
raise
|
| 251 |
except Exception as e:
|
| 252 |
logger.error(f"Error comparing graphs {graph1_id} and {graph2_id}: {str(e)}")
|
| 253 |
-
raise HTTPException(status_code=500, detail=
|
| 254 |
|
| 255 |
@router.get("/compare/{graph1_id}/{graph2_id}")
|
| 256 |
async def get_comparison(
|
|
@@ -359,7 +359,7 @@ async def get_graph_details(graph_id: int, db: Session = Depends(get_db)):
|
|
| 359 |
raise
|
| 360 |
except Exception as e:
|
| 361 |
logger.error(f"Error getting graph details for {graph_id}: {str(e)}")
|
| 362 |
-
raise HTTPException(status_code=500, detail=
|
| 363 |
|
| 364 |
@router.get("/cache/info")
|
| 365 |
async def get_cache_info():
|
|
@@ -381,7 +381,7 @@ async def get_cache_info():
|
|
| 381 |
|
| 382 |
except Exception as e:
|
| 383 |
logger.error(f"Error getting cache info: {str(e)}")
|
| 384 |
-
raise HTTPException(status_code=500, detail=
|
| 385 |
|
| 386 |
@router.delete("/cache/clear")
|
| 387 |
async def clear_cache():
|
|
@@ -408,4 +408,4 @@ async def clear_cache():
|
|
| 408 |
raise
|
| 409 |
except Exception as e:
|
| 410 |
logger.error(f"Error clearing cache: {str(e)}")
|
| 411 |
-
raise HTTPException(status_code=500, detail=
|
|
|
|
| 103 |
|
| 104 |
except Exception as e:
|
| 105 |
logger.error(f"Error listing graphs: {str(e)}")
|
| 106 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while listing graphs")
|
| 107 |
|
| 108 |
def _format_graph_data(graph: KnowledgeGraph) -> Dict[str, Any]:
|
| 109 |
"""Helper function to format graph data consistently"""
|
|
|
|
| 250 |
raise
|
| 251 |
except Exception as e:
|
| 252 |
logger.error(f"Error comparing graphs {graph1_id} and {graph2_id}: {str(e)}")
|
| 253 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while comparing graphs")
|
| 254 |
|
| 255 |
@router.get("/compare/{graph1_id}/{graph2_id}")
|
| 256 |
async def get_comparison(
|
|
|
|
| 359 |
raise
|
| 360 |
except Exception as e:
|
| 361 |
logger.error(f"Error getting graph details for {graph_id}: {str(e)}")
|
| 362 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while getting graph details")
|
| 363 |
|
| 364 |
@router.get("/cache/info")
|
| 365 |
async def get_cache_info():
|
|
|
|
| 381 |
|
| 382 |
except Exception as e:
|
| 383 |
logger.error(f"Error getting cache info: {str(e)}")
|
| 384 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while getting cache info")
|
| 385 |
|
| 386 |
@router.delete("/cache/clear")
|
| 387 |
async def clear_cache():
|
|
|
|
| 408 |
raise
|
| 409 |
except Exception as e:
|
| 410 |
logger.error(f"Error clearing cache: {str(e)}")
|
| 411 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while clearing cache")
|
backend/routers/knowledge_graphs.py
CHANGED
|
@@ -54,7 +54,7 @@ async def get_knowledge_graphs(db: Session = Depends(get_db_session)):
|
|
| 54 |
except Exception as e:
|
| 55 |
raise HTTPException(
|
| 56 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 57 |
-
detail=
|
| 58 |
)
|
| 59 |
|
| 60 |
|
|
@@ -86,7 +86,7 @@ async def get_latest_knowledge_graph(db: Session = Depends(get_db_session)):
|
|
| 86 |
raise e
|
| 87 |
raise HTTPException(
|
| 88 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 89 |
-
detail=
|
| 90 |
)
|
| 91 |
|
| 92 |
|
|
@@ -112,7 +112,7 @@ async def download_latest_knowledge_graph(db: Session = Depends(get_db_session))
|
|
| 112 |
raise e
|
| 113 |
raise HTTPException(
|
| 114 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 115 |
-
detail=
|
| 116 |
)
|
| 117 |
|
| 118 |
|
|
@@ -153,7 +153,7 @@ async def get_knowledge_graph(graph_id: str, db: Session = Depends(get_db_sessio
|
|
| 153 |
logger.error(f"Database error fetching graph {graph_id}: {str(e)}")
|
| 154 |
raise HTTPException(
|
| 155 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 156 |
-
detail=
|
| 157 |
)
|
| 158 |
|
| 159 |
|
|
@@ -168,7 +168,7 @@ async def get_platform_stats(db: Session = Depends(get_db_session)):
|
|
| 168 |
except Exception as e:
|
| 169 |
raise HTTPException(
|
| 170 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 171 |
-
detail=
|
| 172 |
)
|
| 173 |
|
| 174 |
|
|
@@ -500,7 +500,7 @@ def get_knowledge_graph(
|
|
| 500 |
raise e
|
| 501 |
raise HTTPException(
|
| 502 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 503 |
-
detail=
|
| 504 |
)
|
| 505 |
|
| 506 |
|
|
@@ -529,7 +529,7 @@ def download_knowledge_graph(
|
|
| 529 |
raise e
|
| 530 |
raise HTTPException(
|
| 531 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 532 |
-
detail=
|
| 533 |
)
|
| 534 |
|
| 535 |
|
|
@@ -588,7 +588,7 @@ async def download_knowledge_graph_by_id_or_filename(
|
|
| 588 |
logger.error(f"Error downloading knowledge graph: {str(e)}")
|
| 589 |
raise HTTPException(
|
| 590 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 591 |
-
detail=
|
| 592 |
)
|
| 593 |
|
| 594 |
|
|
@@ -780,7 +780,7 @@ async def delete_knowledge_graph_by_id(
|
|
| 780 |
logger.error(f"Error deleting knowledge graph: {str(e)}")
|
| 781 |
raise HTTPException(
|
| 782 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 783 |
-
detail=
|
| 784 |
)
|
| 785 |
|
| 786 |
# Helper function to sanitize JSON data
|
|
@@ -911,7 +911,7 @@ async def analyze_knowledge_graph(kg_id: str, background_tasks: BackgroundTasks,
|
|
| 911 |
raise http_ex
|
| 912 |
except Exception as e:
|
| 913 |
logger.error(f"Error scheduling causal analysis: {str(e)}")
|
| 914 |
-
raise HTTPException(status_code=500, detail=
|
| 915 |
|
| 916 |
@router.get("/knowledge-graphs/{kg_id}/status")
|
| 917 |
async def get_knowledge_graph_status(kg_id: str, session: Session = Depends(get_db)):
|
|
@@ -1117,7 +1117,7 @@ async def get_stage_results(kg_id: str, stage: str, session: Session = Depends(g
|
|
| 1117 |
|
| 1118 |
except Exception as e:
|
| 1119 |
logger.error(f"Error retrieving stage results: {str(e)}")
|
| 1120 |
-
raise HTTPException(status_code=500, detail=
|
| 1121 |
|
| 1122 |
@router.delete("/knowledge-graphs/{kg_id}/stage-results/{stage}")
|
| 1123 |
async def clear_stage_results(kg_id: str, stage: str, session: Session = Depends(get_db)):
|
|
@@ -1210,7 +1210,7 @@ async def clear_stage_results(kg_id: str, stage: str, session: Session = Depends
|
|
| 1210 |
except Exception as e:
|
| 1211 |
session.rollback()
|
| 1212 |
logger.error(f"Error clearing stage {stage} for KG {kg_id}: {str(e)}")
|
| 1213 |
-
raise HTTPException(status_code=500, detail=
|
| 1214 |
|
| 1215 |
@router.put("/knowledge-graphs/{kg_id}/update-prompt-reconstruction")
|
| 1216 |
async def update_prompt_reconstruction(kg_id: str, session: Session = Depends(get_db)):
|
|
@@ -1266,4 +1266,4 @@ async def reset_knowledge_graph(kg_id: str, session: Session = Depends(get_db)):
|
|
| 1266 |
}
|
| 1267 |
except Exception as e:
|
| 1268 |
logger.error(f"Error resetting knowledge graph: {str(e)}")
|
| 1269 |
-
raise HTTPException(status_code=500, detail=
|
|
|
|
| 54 |
except Exception as e:
|
| 55 |
raise HTTPException(
|
| 56 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 57 |
+
detail="An internal error occurred while fetching knowledge graphs"
|
| 58 |
)
|
| 59 |
|
| 60 |
|
|
|
|
| 86 |
raise e
|
| 87 |
raise HTTPException(
|
| 88 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 89 |
+
detail="An internal error occurred while retrieving knowledge graph"
|
| 90 |
)
|
| 91 |
|
| 92 |
|
|
|
|
| 112 |
raise e
|
| 113 |
raise HTTPException(
|
| 114 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 115 |
+
detail="An internal error occurred while retrieving knowledge graph"
|
| 116 |
)
|
| 117 |
|
| 118 |
|
|
|
|
| 153 |
logger.error(f"Database error fetching graph {graph_id}: {str(e)}")
|
| 154 |
raise HTTPException(
|
| 155 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 156 |
+
detail="An internal error occurred while fetching knowledge graph"
|
| 157 |
)
|
| 158 |
|
| 159 |
|
|
|
|
| 168 |
except Exception as e:
|
| 169 |
raise HTTPException(
|
| 170 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 171 |
+
detail="An internal error occurred while fetching platform statistics"
|
| 172 |
)
|
| 173 |
|
| 174 |
|
|
|
|
| 500 |
raise e
|
| 501 |
raise HTTPException(
|
| 502 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 503 |
+
detail="An internal error occurred while retrieving knowledge graph"
|
| 504 |
)
|
| 505 |
|
| 506 |
|
|
|
|
| 529 |
raise e
|
| 530 |
raise HTTPException(
|
| 531 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 532 |
+
detail="An internal error occurred while retrieving knowledge graph"
|
| 533 |
)
|
| 534 |
|
| 535 |
|
|
|
|
| 588 |
logger.error(f"Error downloading knowledge graph: {str(e)}")
|
| 589 |
raise HTTPException(
|
| 590 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 591 |
+
detail="An internal error occurred while downloading knowledge graph"
|
| 592 |
)
|
| 593 |
|
| 594 |
|
|
|
|
| 780 |
logger.error(f"Error deleting knowledge graph: {str(e)}")
|
| 781 |
raise HTTPException(
|
| 782 |
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 783 |
+
detail="An internal error occurred while deleting knowledge graph"
|
| 784 |
)
|
| 785 |
|
| 786 |
# Helper function to sanitize JSON data
|
|
|
|
| 911 |
raise http_ex
|
| 912 |
except Exception as e:
|
| 913 |
logger.error(f"Error scheduling causal analysis: {str(e)}")
|
| 914 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while scheduling causal analysis")
|
| 915 |
|
| 916 |
@router.get("/knowledge-graphs/{kg_id}/status")
|
| 917 |
async def get_knowledge_graph_status(kg_id: str, session: Session = Depends(get_db)):
|
|
|
|
| 1117 |
|
| 1118 |
except Exception as e:
|
| 1119 |
logger.error(f"Error retrieving stage results: {str(e)}")
|
| 1120 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving stage results")
|
| 1121 |
|
| 1122 |
@router.delete("/knowledge-graphs/{kg_id}/stage-results/{stage}")
|
| 1123 |
async def clear_stage_results(kg_id: str, stage: str, session: Session = Depends(get_db)):
|
|
|
|
| 1210 |
except Exception as e:
|
| 1211 |
session.rollback()
|
| 1212 |
logger.error(f"Error clearing stage {stage} for KG {kg_id}: {str(e)}")
|
| 1213 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while clearing stage results")
|
| 1214 |
|
| 1215 |
@router.put("/knowledge-graphs/{kg_id}/update-prompt-reconstruction")
|
| 1216 |
async def update_prompt_reconstruction(kg_id: str, session: Session = Depends(get_db)):
|
|
|
|
| 1266 |
}
|
| 1267 |
except Exception as e:
|
| 1268 |
logger.error(f"Error resetting knowledge graph: {str(e)}")
|
| 1269 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while resetting knowledge graph")
|
backend/routers/methods.py
CHANGED
|
@@ -48,7 +48,7 @@ async def get_available_methods(method_type: Optional[str] = None, schema_type:
|
|
| 48 |
}
|
| 49 |
}
|
| 50 |
except Exception as e:
|
| 51 |
-
raise HTTPException(status_code=500, detail=
|
| 52 |
|
| 53 |
|
| 54 |
# Removed separate production/baseline endpoints - use unified /available endpoint with filtering
|
|
@@ -72,7 +72,7 @@ async def get_default_method() -> Dict[str, Any]:
|
|
| 72 |
"method_info": method_info
|
| 73 |
}
|
| 74 |
except Exception as e:
|
| 75 |
-
raise HTTPException(status_code=500, detail=
|
| 76 |
|
| 77 |
|
| 78 |
@router.get("/{method_name}")
|
|
@@ -102,7 +102,7 @@ async def get_method_info(method_name: str) -> Dict[str, Any]:
|
|
| 102 |
except HTTPException:
|
| 103 |
raise
|
| 104 |
except Exception as e:
|
| 105 |
-
raise HTTPException(status_code=500, detail=
|
| 106 |
|
| 107 |
|
| 108 |
@router.post("/{method_name}/validate")
|
|
@@ -133,7 +133,7 @@ async def validate_method(method_name: str) -> Dict[str, Any]:
|
|
| 133 |
"method_info": validation_result["method_info"]
|
| 134 |
}
|
| 135 |
except Exception as e:
|
| 136 |
-
raise HTTPException(status_code=500, detail=
|
| 137 |
|
| 138 |
|
| 139 |
@router.get("/{method_name}/schema-compatibility")
|
|
@@ -163,7 +163,7 @@ async def get_method_schema_compatibility(method_name: str) -> Dict[str, Any]:
|
|
| 163 |
except HTTPException:
|
| 164 |
raise
|
| 165 |
except Exception as e:
|
| 166 |
-
raise HTTPException(status_code=500, detail=
|
| 167 |
|
| 168 |
|
| 169 |
# Removed separate filter endpoints - use unified /available endpoint with query parameters
|
|
|
|
| 48 |
}
|
| 49 |
}
|
| 50 |
except Exception as e:
|
| 51 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving methods")
|
| 52 |
|
| 53 |
|
| 54 |
# Removed separate production/baseline endpoints - use unified /available endpoint with filtering
|
|
|
|
| 72 |
"method_info": method_info
|
| 73 |
}
|
| 74 |
except Exception as e:
|
| 75 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving default method")
|
| 76 |
|
| 77 |
|
| 78 |
@router.get("/{method_name}")
|
|
|
|
| 102 |
except HTTPException:
|
| 103 |
raise
|
| 104 |
except Exception as e:
|
| 105 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving method info")
|
| 106 |
|
| 107 |
|
| 108 |
@router.post("/{method_name}/validate")
|
|
|
|
| 133 |
"method_info": validation_result["method_info"]
|
| 134 |
}
|
| 135 |
except Exception as e:
|
| 136 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while validating method")
|
| 137 |
|
| 138 |
|
| 139 |
@router.get("/{method_name}/schema-compatibility")
|
|
|
|
| 163 |
except HTTPException:
|
| 164 |
raise
|
| 165 |
except Exception as e:
|
| 166 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving compatibility info")
|
| 167 |
|
| 168 |
|
| 169 |
# Removed separate filter endpoints - use unified /available endpoint with query parameters
|
backend/routers/observability.py
CHANGED
|
@@ -150,7 +150,7 @@ def test_langfuse_connection(public_key: str, secret_key: str, host: Optional[st
|
|
| 150 |
return True
|
| 151 |
except Exception as e:
|
| 152 |
logger.error(f"Failed to connect to Langfuse: {str(e)}")
|
| 153 |
-
raise HTTPException(status_code=400, detail=
|
| 154 |
|
| 155 |
def test_langsmith_connection(api_key: str) -> bool:
|
| 156 |
"""Test LangSmith connection by listing projects"""
|
|
@@ -161,7 +161,7 @@ def test_langsmith_connection(api_key: str) -> bool:
|
|
| 161 |
return True
|
| 162 |
except Exception as e:
|
| 163 |
logger.error(f"Failed to connect to LangSmith: {str(e)}")
|
| 164 |
-
raise HTTPException(status_code=400, detail=
|
| 165 |
|
| 166 |
def get_connection_projects(platform: str, public_key: str, secret_key: str, host: Optional[str]) -> List[Dict]:
|
| 167 |
"""Get projects for a platform connection"""
|
|
@@ -309,7 +309,7 @@ def fetch_langsmith_traces(connection: ObservabilityConnection, db: Session, pro
|
|
| 309 |
logger.info(f"Found {len(projects)} projects")
|
| 310 |
except Exception as e:
|
| 311 |
logger.error(f"Error listing projects: {e}")
|
| 312 |
-
raise HTTPException(status_code=500, detail=
|
| 313 |
|
| 314 |
# Export runs from specific project only
|
| 315 |
all_traces = []
|
|
@@ -426,7 +426,7 @@ def fetch_langsmith_traces(connection: ObservabilityConnection, db: Session, pro
|
|
| 426 |
|
| 427 |
except Exception as e:
|
| 428 |
logger.error(f"Error fetching LangSmith traces: {str(e)}")
|
| 429 |
-
raise HTTPException(status_code=500, detail=
|
| 430 |
|
| 431 |
# Request/Response Models
|
| 432 |
class ConnectionRequest(BaseModel):
|
|
@@ -674,7 +674,7 @@ async def fetch_traces_by_connection(
|
|
| 674 |
|
| 675 |
except Exception as e:
|
| 676 |
logger.error(f"Failed to fetch traces from connection {connection_id}: {str(e)}")
|
| 677 |
-
raise HTTPException(status_code=500, detail=
|
| 678 |
|
| 679 |
@router.post("/connections/{connection_id}/import")
|
| 680 |
async def import_traces_by_connection(
|
|
@@ -837,7 +837,7 @@ async def import_traces_by_connection(
|
|
| 837 |
|
| 838 |
except Exception as e:
|
| 839 |
logger.error(f"Failed to import traces from connection {connection_id}: {str(e)}")
|
| 840 |
-
raise HTTPException(status_code=500, detail=
|
| 841 |
|
| 842 |
@router.get("/traces/{trace_id}/download")
|
| 843 |
async def download_trace_by_id(trace_id: str, db: Session = Depends(get_db)): # noqa: B008
|
|
@@ -859,7 +859,7 @@ async def get_resource_usage():
|
|
| 859 |
return {"cpu_usage": cpu_usage, "memory_usage": memory_usage}
|
| 860 |
except Exception as e:
|
| 861 |
logger.error(f"Error retrieving resource usage: {str(e)}")
|
| 862 |
-
raise HTTPException(status_code=500, detail=
|
| 863 |
|
| 864 |
@router.post("/clean-up")
|
| 865 |
async def clean_up(session: Session = Depends(get_db)): # noqa: B008
|
|
@@ -870,7 +870,7 @@ async def clean_up(session: Session = Depends(get_db)): # noqa: B008
|
|
| 870 |
return {"success": True, "message": "Resources cleaned up successfully"}
|
| 871 |
except Exception as e:
|
| 872 |
logger.error(f"Error cleaning up resources: {str(e)}")
|
| 873 |
-
raise HTTPException(status_code=500, detail=
|
| 874 |
|
| 875 |
@router.get("/environment")
|
| 876 |
async def get_environment():
|
|
@@ -994,4 +994,4 @@ async def cleanup_stuck_tasks():
|
|
| 994 |
return {"success": True, "cleaned_tasks": cleaned_tasks}
|
| 995 |
except Exception as e:
|
| 996 |
logger.error(f"Error cleaning up stuck tasks: {str(e)}")
|
| 997 |
-
raise HTTPException(status_code=500, detail=
|
|
|
|
| 150 |
return True
|
| 151 |
except Exception as e:
|
| 152 |
logger.error(f"Failed to connect to Langfuse: {str(e)}")
|
| 153 |
+
raise HTTPException(status_code=400, detail="Failed to connect to Langfuse") from e
|
| 154 |
|
| 155 |
def test_langsmith_connection(api_key: str) -> bool:
|
| 156 |
"""Test LangSmith connection by listing projects"""
|
|
|
|
| 161 |
return True
|
| 162 |
except Exception as e:
|
| 163 |
logger.error(f"Failed to connect to LangSmith: {str(e)}")
|
| 164 |
+
raise HTTPException(status_code=400, detail="Failed to connect to LangSmith") from e
|
| 165 |
|
| 166 |
def get_connection_projects(platform: str, public_key: str, secret_key: str, host: Optional[str]) -> List[Dict]:
|
| 167 |
"""Get projects for a platform connection"""
|
|
|
|
| 309 |
logger.info(f"Found {len(projects)} projects")
|
| 310 |
except Exception as e:
|
| 311 |
logger.error(f"Error listing projects: {e}")
|
| 312 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while listing projects") from e
|
| 313 |
|
| 314 |
# Export runs from specific project only
|
| 315 |
all_traces = []
|
|
|
|
| 426 |
|
| 427 |
except Exception as e:
|
| 428 |
logger.error(f"Error fetching LangSmith traces: {str(e)}")
|
| 429 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while fetching traces") from e
|
| 430 |
|
| 431 |
# Request/Response Models
|
| 432 |
class ConnectionRequest(BaseModel):
|
|
|
|
| 674 |
|
| 675 |
except Exception as e:
|
| 676 |
logger.error(f"Failed to fetch traces from connection {connection_id}: {str(e)}")
|
| 677 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while fetching traces") from e
|
| 678 |
|
| 679 |
@router.post("/connections/{connection_id}/import")
|
| 680 |
async def import_traces_by_connection(
|
|
|
|
| 837 |
|
| 838 |
except Exception as e:
|
| 839 |
logger.error(f"Failed to import traces from connection {connection_id}: {str(e)}")
|
| 840 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while importing traces") from e
|
| 841 |
|
| 842 |
@router.get("/traces/{trace_id}/download")
|
| 843 |
async def download_trace_by_id(trace_id: str, db: Session = Depends(get_db)): # noqa: B008
|
|
|
|
| 859 |
return {"cpu_usage": cpu_usage, "memory_usage": memory_usage}
|
| 860 |
except Exception as e:
|
| 861 |
logger.error(f"Error retrieving resource usage: {str(e)}")
|
| 862 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving resource usage") from e
|
| 863 |
|
| 864 |
@router.post("/clean-up")
|
| 865 |
async def clean_up(session: Session = Depends(get_db)): # noqa: B008
|
|
|
|
| 870 |
return {"success": True, "message": "Resources cleaned up successfully"}
|
| 871 |
except Exception as e:
|
| 872 |
logger.error(f"Error cleaning up resources: {str(e)}")
|
| 873 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while cleaning up resources") from e
|
| 874 |
|
| 875 |
@router.get("/environment")
|
| 876 |
async def get_environment():
|
|
|
|
| 994 |
return {"success": True, "cleaned_tasks": cleaned_tasks}
|
| 995 |
except Exception as e:
|
| 996 |
logger.error(f"Error cleaning up stuck tasks: {str(e)}")
|
| 997 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while cleaning up stuck tasks") from e
|
backend/routers/temporal_graphs.py
CHANGED
|
@@ -118,7 +118,7 @@ async def get_temporal_graph_data(trace_id: str, processing_run_id: str = None,
|
|
| 118 |
raise
|
| 119 |
except Exception as e:
|
| 120 |
logger.error(f"Error getting temporal graph data for trace {trace_id}: {str(e)}")
|
| 121 |
-
raise HTTPException(status_code=500, detail=
|
| 122 |
|
| 123 |
|
| 124 |
@router.get("/traces-with-windows")
|
|
@@ -195,7 +195,7 @@ async def get_traces_with_windows(db: Session = Depends(get_db)):
|
|
| 195 |
|
| 196 |
except Exception as e:
|
| 197 |
logger.error(f"Error getting traces with windows: {str(e)}")
|
| 198 |
-
raise HTTPException(status_code=500, detail=
|
| 199 |
|
| 200 |
|
| 201 |
@router.get("/trace/{trace_id}/processing-runs")
|
|
@@ -264,4 +264,4 @@ async def get_processing_runs_for_trace(trace_id: str, db: Session = Depends(get
|
|
| 264 |
|
| 265 |
except Exception as e:
|
| 266 |
logger.error(f"Error getting processing runs for trace {trace_id}: {str(e)}")
|
| 267 |
-
raise HTTPException(status_code=500, detail=
|
|
|
|
| 118 |
raise
|
| 119 |
except Exception as e:
|
| 120 |
logger.error(f"Error getting temporal graph data for trace {trace_id}: {str(e)}")
|
| 121 |
+
raise HTTPException(status_code=500, detail="An internal error occurred")
|
| 122 |
|
| 123 |
|
| 124 |
@router.get("/traces-with-windows")
|
|
|
|
| 195 |
|
| 196 |
except Exception as e:
|
| 197 |
logger.error(f"Error getting traces with windows: {str(e)}")
|
| 198 |
+
raise HTTPException(status_code=500, detail="An internal error occurred")
|
| 199 |
|
| 200 |
|
| 201 |
@router.get("/trace/{trace_id}/processing-runs")
|
|
|
|
| 264 |
|
| 265 |
except Exception as e:
|
| 266 |
logger.error(f"Error getting processing runs for trace {trace_id}: {str(e)}")
|
| 267 |
+
raise HTTPException(status_code=500, detail="An internal error occurred")
|
backend/routers/traces.py
CHANGED
|
@@ -247,7 +247,7 @@ async def upload_trace(
|
|
| 247 |
except Exception as e:
|
| 248 |
logger = logging.getLogger("agent_monitoring_server")
|
| 249 |
logger.error(f"Error uploading trace: {str(e)}")
|
| 250 |
-
raise HTTPException(status_code=500, detail=
|
| 251 |
|
| 252 |
@router.get("/{trace_id}")
|
| 253 |
async def get_trace_by_id(trace_id: str, db: Session = Depends(get_db)):
|
|
@@ -516,7 +516,7 @@ async def get_knowledge_graphs_for_trace_id(trace_id: str, db: Session = Depends
|
|
| 516 |
except Exception as e:
|
| 517 |
logger = logging.getLogger("agent_monitoring_server")
|
| 518 |
logger.error(f"Error retrieving knowledge graphs for trace {trace_id}: {str(e)}")
|
| 519 |
-
raise HTTPException(status_code=500, detail=
|
| 520 |
|
| 521 |
@router.get("/{trace_id}/content")
|
| 522 |
async def get_trace_content(trace_id: str, db: Session = Depends(get_db)):
|
|
@@ -646,7 +646,7 @@ async def regenerate_trace_metadata(
|
|
| 646 |
except Exception as e:
|
| 647 |
logger = logging.getLogger("agent_monitoring_server")
|
| 648 |
logger.error(f"Error regenerating metadata for trace {trace_id}: {str(e)}")
|
| 649 |
-
raise HTTPException(status_code=500, detail=
|
| 650 |
|
| 651 |
|
| 652 |
@router.post("/{trace_id}/fix-long-lines")
|
|
@@ -705,7 +705,7 @@ async def fix_long_lines(
|
|
| 705 |
except Exception as e:
|
| 706 |
logger = logging.getLogger("agent_monitoring_server")
|
| 707 |
logger.error(f"Error applying line splitting to trace {trace_id}: {str(e)}")
|
| 708 |
-
raise HTTPException(status_code=500, detail=
|
| 709 |
|
| 710 |
@router.get("/{trace_id}/content-numbered")
|
| 711 |
async def get_trace_content_numbered(trace_id: str, db: Session = Depends(get_db)):
|
|
@@ -761,7 +761,7 @@ async def create_context_document(
|
|
| 761 |
except ValueError as e:
|
| 762 |
raise HTTPException(status_code=400, detail=str(e))
|
| 763 |
except Exception as e:
|
| 764 |
-
raise HTTPException(status_code=500, detail=
|
| 765 |
|
| 766 |
|
| 767 |
@router.get("/{trace_id}/context")
|
|
@@ -777,7 +777,7 @@ async def get_context_documents(
|
|
| 777 |
except ValueError as e:
|
| 778 |
raise HTTPException(status_code=404, detail=str(e))
|
| 779 |
except Exception as e:
|
| 780 |
-
raise HTTPException(status_code=500, detail=
|
| 781 |
|
| 782 |
|
| 783 |
@router.put("/{trace_id}/context/{context_id}")
|
|
@@ -803,7 +803,7 @@ async def update_context_document(
|
|
| 803 |
except ValueError as e:
|
| 804 |
raise HTTPException(status_code=404, detail=str(e))
|
| 805 |
except Exception as e:
|
| 806 |
-
raise HTTPException(status_code=500, detail=
|
| 807 |
|
| 808 |
|
| 809 |
@router.delete("/{trace_id}/context/{context_id}")
|
|
@@ -823,7 +823,7 @@ async def delete_context_document(
|
|
| 823 |
except ValueError as e:
|
| 824 |
raise HTTPException(status_code=404, detail=str(e))
|
| 825 |
except Exception as e:
|
| 826 |
-
raise HTTPException(status_code=500, detail=
|
| 827 |
|
| 828 |
|
| 829 |
@router.post("/{trace_id}/context/upload")
|
|
@@ -871,7 +871,7 @@ async def upload_context_file(
|
|
| 871 |
except ValueError as e:
|
| 872 |
raise HTTPException(status_code=400, detail=str(e))
|
| 873 |
except Exception as e:
|
| 874 |
-
raise HTTPException(status_code=500, detail=
|
| 875 |
|
| 876 |
|
| 877 |
@router.post("/{trace_id}/context/auto-generate")
|
|
@@ -918,7 +918,7 @@ async def auto_generate_context_documents_endpoint(
|
|
| 918 |
except Exception as e:
|
| 919 |
logger = logging.getLogger("agent_monitoring_server")
|
| 920 |
logger.error(f"Error auto-generating context documents for trace {trace_id}: {str(e)}")
|
| 921 |
-
raise HTTPException(status_code=500, detail=
|
| 922 |
|
| 923 |
@router.get("/{trace_id}/enhanced-statistics")
|
| 924 |
async def get_enhanced_trace_statistics(trace_id: str, db: Session = Depends(get_db)):
|
|
@@ -1062,7 +1062,7 @@ async def get_enhanced_trace_statistics(trace_id: str, db: Session = Depends(get
|
|
| 1062 |
except Exception as e:
|
| 1063 |
logger = logging.getLogger("agent_monitoring_server")
|
| 1064 |
logger.error(f"Error generating enhanced statistics for trace {trace_id}: {str(e)}")
|
| 1065 |
-
raise HTTPException(status_code=500, detail=
|
| 1066 |
|
| 1067 |
class ChunkingConfig(BaseModel):
|
| 1068 |
min_chunk_size: Optional[int] = None
|
|
@@ -1132,5 +1132,5 @@ async def process_trace(
|
|
| 1132 |
logger.error(f"Error starting trace processing task: {e}")
|
| 1133 |
raise HTTPException(
|
| 1134 |
status_code=500,
|
| 1135 |
-
detail=
|
| 1136 |
)
|
|
|
|
| 247 |
except Exception as e:
|
| 248 |
logger = logging.getLogger("agent_monitoring_server")
|
| 249 |
logger.error(f"Error uploading trace: {str(e)}")
|
| 250 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while uploading trace")
|
| 251 |
|
| 252 |
@router.get("/{trace_id}")
|
| 253 |
async def get_trace_by_id(trace_id: str, db: Session = Depends(get_db)):
|
|
|
|
| 516 |
except Exception as e:
|
| 517 |
logger = logging.getLogger("agent_monitoring_server")
|
| 518 |
logger.error(f"Error retrieving knowledge graphs for trace {trace_id}: {str(e)}")
|
| 519 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving knowledge graphs")
|
| 520 |
|
| 521 |
@router.get("/{trace_id}/content")
|
| 522 |
async def get_trace_content(trace_id: str, db: Session = Depends(get_db)):
|
|
|
|
| 646 |
except Exception as e:
|
| 647 |
logger = logging.getLogger("agent_monitoring_server")
|
| 648 |
logger.error(f"Error regenerating metadata for trace {trace_id}: {str(e)}")
|
| 649 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while regenerating metadata")
|
| 650 |
|
| 651 |
|
| 652 |
@router.post("/{trace_id}/fix-long-lines")
|
|
|
|
| 705 |
except Exception as e:
|
| 706 |
logger = logging.getLogger("agent_monitoring_server")
|
| 707 |
logger.error(f"Error applying line splitting to trace {trace_id}: {str(e)}")
|
| 708 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while applying line splitting")
|
| 709 |
|
| 710 |
@router.get("/{trace_id}/content-numbered")
|
| 711 |
async def get_trace_content_numbered(trace_id: str, db: Session = Depends(get_db)):
|
|
|
|
| 761 |
except ValueError as e:
|
| 762 |
raise HTTPException(status_code=400, detail=str(e))
|
| 763 |
except Exception as e:
|
| 764 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while creating context document")
|
| 765 |
|
| 766 |
|
| 767 |
@router.get("/{trace_id}/context")
|
|
|
|
| 777 |
except ValueError as e:
|
| 778 |
raise HTTPException(status_code=404, detail=str(e))
|
| 779 |
except Exception as e:
|
| 780 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while retrieving context documents")
|
| 781 |
|
| 782 |
|
| 783 |
@router.put("/{trace_id}/context/{context_id}")
|
|
|
|
| 803 |
except ValueError as e:
|
| 804 |
raise HTTPException(status_code=404, detail=str(e))
|
| 805 |
except Exception as e:
|
| 806 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while updating context document")
|
| 807 |
|
| 808 |
|
| 809 |
@router.delete("/{trace_id}/context/{context_id}")
|
|
|
|
| 823 |
except ValueError as e:
|
| 824 |
raise HTTPException(status_code=404, detail=str(e))
|
| 825 |
except Exception as e:
|
| 826 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while deleting context document")
|
| 827 |
|
| 828 |
|
| 829 |
@router.post("/{trace_id}/context/upload")
|
|
|
|
| 871 |
except ValueError as e:
|
| 872 |
raise HTTPException(status_code=400, detail=str(e))
|
| 873 |
except Exception as e:
|
| 874 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while uploading context file")
|
| 875 |
|
| 876 |
|
| 877 |
@router.post("/{trace_id}/context/auto-generate")
|
|
|
|
| 918 |
except Exception as e:
|
| 919 |
logger = logging.getLogger("agent_monitoring_server")
|
| 920 |
logger.error(f"Error auto-generating context documents for trace {trace_id}: {str(e)}")
|
| 921 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while auto-generating context documents")
|
| 922 |
|
| 923 |
@router.get("/{trace_id}/enhanced-statistics")
|
| 924 |
async def get_enhanced_trace_statistics(trace_id: str, db: Session = Depends(get_db)):
|
|
|
|
| 1062 |
except Exception as e:
|
| 1063 |
logger = logging.getLogger("agent_monitoring_server")
|
| 1064 |
logger.error(f"Error generating enhanced statistics for trace {trace_id}: {str(e)}")
|
| 1065 |
+
raise HTTPException(status_code=500, detail="An internal error occurred while generating enhanced statistics")
|
| 1066 |
|
| 1067 |
class ChunkingConfig(BaseModel):
|
| 1068 |
min_chunk_size: Optional[int] = None
|
|
|
|
| 1132 |
logger.error(f"Error starting trace processing task: {e}")
|
| 1133 |
raise HTTPException(
|
| 1134 |
status_code=500,
|
| 1135 |
+
detail="An internal error occurred while starting trace processing"
|
| 1136 |
)
|