Spaces:
Runtime error
Runtime error
File size: 3,470 Bytes
d11b44e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 | import os, sys
import mlflow
import mlflow.pyfunc
import pandas as pd
from dotenv import load_dotenv
# Load .env BEFORE anything else
load_dotenv()
# Ensure the project root (which contains 'src') is in sys.path
# project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
# if project_root not in sys.path:
# sys.path.insert(0, project_root)
class FastApiHandler:
"""Handler for rent price prediction using MLflow pipeline model."""
def __init__(
self,
):
self.model = None
self.model_uri = None
self._configure_gcp_credentials()
self.load_model() # Load once at startup
# -----------------------------------------------------------
# Configure Google Cloud authentication
# -----------------------------------------------------------
@staticmethod
def _configure_gcp_credentials():
"""Loads GCP credentials from HF ENV or system ENV."""
# Hugging Face Spaces: JSON secret
creds_json = os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON")
if creds_json:
print("π Configuring GCP credentials from ENV JSON...")
with open("/tmp/gcp_creds.json", "w") as f:
f.write(creds_json)
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "/tmp/gcp_creds.json"
# Local dev or Docker with .env
elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
print("π Using GOOGLE_APPLICATION_CREDENTIALS from environment")
else:
print("β οΈ WARNING: No GCP credentials provided! GCS model loading may fail.")
# -----------------------------------------------------------
# Load the MLflow model
# -----------------------------------------------------------
def load_model(self):
self.model_uri = os.getenv("MODEL_URI")
if not self.model_uri:
raise RuntimeError("MODEL_URI env var not set")
print(f"π Loading MLflow model: {self.model_uri}")
self.model = mlflow.pyfunc.load_model(self.model_uri)
print("β
Model loaded successfully")
# -----------------------------------------------------------
# Predict
# -----------------------------------------------------------
def predict(self, model_params: dict) -> float:
if self.model is None:
raise RuntimeError("Model not loaded")
df = pd.DataFrame([model_params])
preds = self.model.predict(df)
return float(preds[0])
def explain_prediction(self, model_params: dict) -> dict:
if self.model is None:
raise RuntimeError("Model not loaded")
df = pd.DataFrame([model_params])
# π₯ Unwrap the custom RentPricePipeline
python_model = self.model.unwrap_python_model()
explanation = python_model.explain_predictions(df)
return explanation
# -----------------------------------------------------------
# FastAPI-compatible handler
# -----------------------------------------------------------
def handle(self, params: dict) -> dict:
if "model_params" not in params:
return {"error": "Missing 'model_params' in request"}
try:
prediction = self.predict(params["model_params"])
except Exception as e:
return {"error": str(e)}
return {
"prediction": prediction,
"inputs": params["model_params"]
} |