File size: 2,066 Bytes
6e641d9
270f7bb
9844436
 
 
270f7bb
9844436
 
 
9bcd4bc
052c000
 
 
 
 
 
 
 
270f7bb
9bcd4bc
9844436
 
dbbebbe
 
270f7bb
052c000
 
dbbebbe
9bcd4bc
270f7bb
 
93a05a4
052c000
93a05a4
dbbebbe
 
270f7bb
 
9bcd4bc
93a05a4
bdcb450
dbbebbe
 
 
 
 
9bcd4bc
dbbebbe
270f7bb
dbbebbe
9bcd4bc
9844436
 
 
 
 
 
 
 
 
 
 
 
 
 
9bcd4bc
9844436
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import os
import logging
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from huggingface_hub import snapshot_download
from app.api.routes import router
from app.core.config import settings


HF_CACHE_DIR = "/tmp/hf_cache"
os.makedirs(HF_CACHE_DIR, exist_ok=True)

os.environ["HF_HOME"] = HF_CACHE_DIR
os.environ["TRANSFORMERS_CACHE"] = HF_CACHE_DIR
os.environ["HF_HUB_CACHE"] = HF_CACHE_DIR
os.environ["HF_DATASETS_CACHE"] = HF_CACHE_DIR
os.environ["HF_METRICS_CACHE"] = HF_CACHE_DIR


logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info("Downloading model and dataset from Hugging Face Hub...")

HF_TOKEN = os.getenv("HF_TOKEN")
if not HF_TOKEN:
    raise RuntimeError("HF_TOKEN environment variable not set")


MODEL_PATH = snapshot_download(
    repo_id="negi2725/legalBert",
    token=HF_TOKEN,
    local_dir="/tmp/legalbert_model",
    local_dir_use_symlinks=False
)

FAISS_INDEX_PATH = snapshot_download(
    repo_id="negi2725/dataRag",
    repo_type="dataset", 
    token=HF_TOKEN,
    local_dir="/tmp/faiss_indexes"
)

logger.info(f"FAISS index files downloaded to: {FAISS_INDEX_PATH}")
logger.info(f"Model files downloaded to: {MODEL_PATH}")


os.environ["MODEL_PATH"] = MODEL_PATH
os.environ["FAISS_INDEX_PATH"] = FAISS_INDEX_PATH


app = FastAPI(
    title="Legal RAG Analysis API",
    description="FastAPI backend for legal case analysis using RAG system with LegalBERT predictions and Gemini AI evaluation",
    version="1.0.0"
)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


app.include_router(router, prefix="/api/v1")

@app.get("/")
async def root():
    return {"message": "Legal RAG Analysis API", "version": "1.0.0"}

@app.get("/health")
async def health_check():
    return {"status": "healthy", "message": "API is running"}

if __name__ == "__main__":
    uvicorn.run(
        "main:app",
        host="0.0.0.0",
        port=5000,
        reload=True
    )