File size: 1,647 Bytes
cff1a2a
 
 
 
 
 
72bff80
 
cff1a2a
 
72bff80
 
cff1a2a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from fastapi import FastAPI, HTTPException, Body
from pydantic import BaseModel
from typing import List, Dict, Optional, Any
from agents.graph import app as agent_app
# from ingestion.pipeline import IngestionPipeline # Optional: Trigger via API

from api.plans import router as plans_router

app = FastAPI(title="Insurance Advisory AI Agent", version="1.0.0")

app.include_router(plans_router)

class ChatRequest(BaseModel):
    message: str
    chat_history: Optional[List[str]] = []

class ChatResponse(BaseModel):
    answer: str
    intent: str
    context_used: Optional[List[str]] = None

@app.get("/")
def health_check():
    return {"status": "active", "system": "Insurance Advisory Agent"}

@app.post("/chat", response_model=ChatResponse)
async def chat_endpoint(request: ChatRequest):
    """
    Main chat endpoint. Routes query through the Multi-Agent Graph.
    """
    try:
        # Initial state
        initial_state = {
            "input": request.message,
            "chat_history": request.chat_history or [],
            "intent": "",
            "context": [],
            "answer": "",
            "metadata_filters": {}
        }
        
        # Invoke Graph
        result = agent_app.invoke(initial_state)
        
        return ChatResponse(
            answer=result.get("answer", "No response generated."),
            intent=result.get("intent", "unknown"),
            context_used=result.get("context", [])
        )
        
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=8000)