Spaces:
Running
Running
File size: 1,342 Bytes
8e453ef 966632b 8e453ef 966632b 8e453ef 966632b 8e453ef 966632b 8e453ef |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from fastapi import FastAPI
from pydantic import BaseModel
from typing import Any, Dict, List
import os
from process_aware_rag import ProcessAwareRAG
app = FastAPI(title="Legal Process Guide API", version="1.0.0")
class Message(BaseModel):
role: str # "user" or "assistant"
content: str
class QueryRequest(BaseModel):
query: str
history: List[Message] = [] # user sends past chat messages
class QueryResponse(BaseModel):
response: str
classification: Dict[str, Any]
debug_info: Dict[str, Any]
@app.on_event("startup")
def startup_event() -> None:
# Initialize once for the app lifecycle
app.state.rag = ProcessAwareRAG()
@app.get("/health")
def health() -> Dict[str, str]:
return {"status": "ok"}
@app.post("/query", response_model=QueryResponse)
def query_endpoint(req: QueryRequest) -> QueryResponse:
# Pass query + history into RAG
result = app.state.rag.process_query(req.query, [m.dict() for m in req.history])
return QueryResponse(
response=result["response"],
classification=result["classification"],
debug_info=result["debug_info"],
)
# For local run: uvicorn api:app --host 0.0.0.0 --port 8000
if __name__ == "__main__":
import uvicorn
uvicorn.run("api:app", host="0.0.0.0", port=int(os.getenv("PORT", 8000)), reload=False)
|