Mohammad Wasil
final update
e1550b3
import uuid, time, sys
from contextlib import asynccontextmanager
from loguru import logger
from fastapi import FastAPI, HTTPException, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST
import os
from schemas import ChatRequest, ChatResponse
from agent import SupportAgent
import httpx
from fastapi import Request
from starlette.responses import StreamingResponse
from prometheus_client import make_asgi_app
os.environ["ANONYMIZED_TELEMETRY"] = "False"
# Loguru Setup
logger.remove()
logger.add(sys.stdout, format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level}</level> | {message}", level="INFO")
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("Initializing SmartCoffee Agent for Hugging Face...")
try:
# Initialize agent once and store in app state
app.state.agent = SupportAgent()
logger.success("SupportAgent successfully initialized.")
except Exception as e:
logger.error(f"Critical Startup Error: {e}")
yield
logger.info("Shutting down...")
app = FastAPI(title="SmartCoffee AI 2026", lifespan=lifespan)
"""
Adding the dashaboard for monitoring
"""
@app.api_route("/grafana/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
async def grafana_proxy(path: str, request: Request):
async with httpx.AsyncClient(follow_redirects=True, timeout=30.0) as client:
resp = await client.request(
request.method,
f"http://127.0.0.1:3000/{path}",
params=request.query_params,
headers={
k: v
for k, v in request.headers.items()
if k.lower() not in ("host", "content-length")
},
content=await request.body(),
)
return StreamingResponse(
resp.aiter_raw(),
status_code=resp.status_code,
headers={
k: v
for k, v in resp.headers.items()
if k.lower() not in ("content-encoding", "transfer-encoding")
},
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# 1. ADDED: Health Check Route (Fixes the 404 error)
@app.get("/health")
async def health():
return {"status": "healthy", "time": time.time(), "agent_loaded": hasattr(app.state, 'agent')}
# 2. Metrics Route
@app.get("/metrics")
def metrics():
return Response(generate_latest(), media_type=CONTENT_TYPE_LATEST)
# 3. Core Chat API (Check that frontend sends 'question' and 'session_id')
@app.post("/api/v1/chat", response_model=ChatResponse)
async def chat(request: ChatRequest):
if request.session_id == "default":
request.session_id = f"hf_{uuid.uuid4().hex[:12]}"
if not hasattr(app.state, 'agent'):
raise HTTPException(status_code=503, detail="Agent logic is still initializing or failed to load.")
try:
# Calls the .run() method you shared in your agent.py
result = app.state.agent.run(request.question, session_id=request.session_id)
return ChatResponse(
question=request.question,
answer=result["answer"],
session_id=request.session_id,
timestamp=result.get("timestamp", time.time())
)
except Exception as e:
logger.error(f"Chat Execution Error: {e}")
raise HTTPException(status_code=500, detail=str(e))
# 4. Sync Route (Add if your JS CONFIG.API_ENDPOINT still uses /chat/sync)
@app.post("/api/v1/chat/sync", response_model=ChatResponse)
async def chat_sync(request: ChatRequest):
return await chat(request)
# 5. Static Files (MOUNTED LAST to prevent route hijacking)
app.mount("/", StaticFiles(directory="frontend", html=True), name="static")
app.mount("/metrics", make_asgi_app())