trygithubactions / api /routers /context_analysis.py
subashpoudel's picture
Made changes in ideation
f054586
raw
history blame
1.87 kB
import ast
import json
from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from api.stored_data import stored_data
from src.genai.context_analysis_agent.agent import IntroductionChatbot
router = APIRouter()
class UserMessage(BaseModel):
message: str
context_analysis_graph = IntroductionChatbot()
## ---------------------- Passing in json ------------------------
@router.post("/context-analysis")
def context_analysis(msg: UserMessage):
def event_generator():
accumulated_response = ""
# Stream tokens or partial chunks from your chat generator
for chunk in context_analysis_graph.chat(msg.message):
accumulated_response += chunk
payload = {
"streamed_response": chunk,
}
yield json.dumps(payload) + "\n" # JSON per line
# After streaming finished, get completion info
last_response = context_analysis_graph.messages[-1]["content"]
if context_analysis_graph.is_complete(last_response):
details = context_analysis_graph.extract_details()
if type(details) != dict:
details = details.model_dump()
if isinstance(details, str):
details = ast.literal_eval(details)
stored_data["business_details"] = details
context_analysis_graph.reset()
final_payload = {
"response": accumulated_response,
"complete": True,
"business_details": details
}
else:
final_payload = {
"response": accumulated_response,
"complete": False
}
yield json.dumps(final_payload) + "\n"
return StreamingResponse(event_generator(), media_type="text/event-stream")