moazx's picture
Initial commit
2a8faae
raw
history blame
1.75 kB
"""
Medical Query Router for RAG AI Advisor
"""
import asyncio
from fastapi import APIRouter, HTTPException
from fastapi.responses import StreamingResponse
import sys
import os
# Add src to path for imports
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from core.agent import safe_run_agent, safe_run_agent_streaming
router = APIRouter(tags=["medical"])
@router.get("/ask")
async def ask(query: str):
"""
Process a medical query - agent decides which tools to use
"""
try:
response = await safe_run_agent(user_input=query)
return {"response": response}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Error processing medical query: {str(e)}"
)
@router.get("/ask/stream")
async def ask_stream(query: str):
"""
Process a medical query with streaming response - agent decides which tools to use
"""
async def event_stream():
try:
chunk_buffer = ""
async for chunk in safe_run_agent_streaming(user_input=query):
chunk_buffer += chunk
# Send chunks in reasonable sizes for smoother streaming
if len(chunk_buffer) >= 10: # Adjust this value as needed
yield chunk_buffer
chunk_buffer = ""
await asyncio.sleep(0.01) # Small delay for smoother streaming
# Send any remaining content
if chunk_buffer:
yield chunk_buffer
except Exception as e:
yield f"Error: {str(e)}"
return StreamingResponse(event_stream(), media_type="text/markdown")