Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, HTTPException | |
| from pydantic import BaseModel | |
| from model_loader import predictVerdict, getConfidence | |
| from rag_service import evaluateCase | |
| import uvicorn | |
| app = FastAPI(title="Legal RAG Backend", version="1.0.0") | |
| class PredictRequest(BaseModel): | |
| text: str | |
| class PredictResponse(BaseModel): | |
| verdict: str | |
| confidence: float | |
| class ExplainResponse(BaseModel): | |
| verdict: str | |
| legalBertVerdict: str | |
| confidence: float | |
| explanation: str | |
| retrievedChunks: dict | |
| prompt: str | |
| async def healthCheck(): | |
| return {"status": "ok"} | |
| async def predict(request: PredictRequest): | |
| try: | |
| verdictResult = predictVerdict(request.text) | |
| confidenceScore = getConfidence(request.text) | |
| return PredictResponse(verdict=verdictResult, confidence=confidenceScore) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| async def explain(request: PredictRequest): | |
| try: | |
| result = evaluateCase(request.text) | |
| return ExplainResponse( | |
| verdict=result["verdict"], | |
| legalBertVerdict=result["legalBertVerdict"], | |
| confidence=result["confidence"], | |
| explanation=result["explanation"], | |
| retrievedChunks=result["retrievedChunks"], | |
| prompt=result["prompt"] | |
| ) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| if __name__ == "__main__": | |
| uvicorn.run(app, host="0.0.0.0", port=7860) | |