Spaces:
Running
Running
File size: 2,191 Bytes
2a8faae 20953d6 2a8faae 20953d6 2a8faae 20953d6 2a8faae 20953d6 2a8faae 20953d6 2a8faae 20953d6 2a8faae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
"""
Medical Query Router for RAG AI Advisor
"""
import asyncio
from fastapi import APIRouter, HTTPException
from fastapi.responses import StreamingResponse
import sys
import os
# Add src to path for imports
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from core.agent import safe_run_agent, safe_run_agent_streaming
router = APIRouter(tags=["medical"])
@router.get("/ask")
async def ask(query: str, session_id: str = "default"):
"""
Process a medical query - agent decides which tools to use
Args:
query: The medical question or query
session_id: Optional session identifier for conversation continuity (default: "default")
"""
try:
response = await safe_run_agent(user_input=query, session_id=session_id)
return {"response": response, "session_id": session_id}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Error processing medical query: {str(e)}"
)
@router.get("/ask/stream")
async def ask_stream(query: str, session_id: str = "default"):
"""
Process a medical query with streaming response - agent decides which tools to use
Args:
query: The medical question or query
session_id: Optional session identifier for conversation continuity (default: "default")
"""
async def event_stream():
try:
chunk_buffer = ""
async for chunk in safe_run_agent_streaming(user_input=query, session_id=session_id):
chunk_buffer += chunk
# Send chunks in reasonable sizes for smoother streaming
if len(chunk_buffer) >= 10: # Adjust this value as needed
yield chunk_buffer
chunk_buffer = ""
await asyncio.sleep(0.01) # Small delay for smoother streaming
# Send any remaining content
if chunk_buffer:
yield chunk_buffer
except Exception as e:
yield f"Error: {str(e)}"
return StreamingResponse(event_stream(), media_type="text/markdown")
|