Spaces:
Runtime error
Runtime error
| import os, sys | |
| import mlflow | |
| import mlflow.pyfunc | |
| import pandas as pd | |
| from dotenv import load_dotenv | |
| # Load .env BEFORE anything else | |
| load_dotenv() | |
| # Ensure the project root (which contains 'src') is in sys.path | |
| # project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) | |
| # if project_root not in sys.path: | |
| # sys.path.insert(0, project_root) | |
| class FastApiHandler: | |
| """Handler for rent price prediction using MLflow pipeline model.""" | |
| def __init__( | |
| self, | |
| ): | |
| self.model = None | |
| self.model_uri = None | |
| self._configure_gcp_credentials() | |
| self.load_model() # Load once at startup | |
| # ----------------------------------------------------------- | |
| # Configure Google Cloud authentication | |
| # ----------------------------------------------------------- | |
| def _configure_gcp_credentials(): | |
| """Loads GCP credentials from HF ENV or system ENV.""" | |
| # Hugging Face Spaces: JSON secret | |
| creds_json = os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON") | |
| if creds_json: | |
| print("π Configuring GCP credentials from ENV JSON...") | |
| with open("/tmp/gcp_creds.json", "w") as f: | |
| f.write(creds_json) | |
| os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "/tmp/gcp_creds.json" | |
| # Local dev or Docker with .env | |
| elif os.getenv("GOOGLE_APPLICATION_CREDENTIALS"): | |
| print("π Using GOOGLE_APPLICATION_CREDENTIALS from environment") | |
| else: | |
| print("β οΈ WARNING: No GCP credentials provided! GCS model loading may fail.") | |
| # ----------------------------------------------------------- | |
| # Load the MLflow model | |
| # ----------------------------------------------------------- | |
| def load_model(self): | |
| self.model_uri = os.getenv("MODEL_URI") | |
| if not self.model_uri: | |
| raise RuntimeError("MODEL_URI env var not set") | |
| print(f"π Loading MLflow model: {self.model_uri}") | |
| self.model = mlflow.pyfunc.load_model(self.model_uri) | |
| print("β Model loaded successfully") | |
| # ----------------------------------------------------------- | |
| # Predict | |
| # ----------------------------------------------------------- | |
| def predict(self, model_params: dict) -> float: | |
| if self.model is None: | |
| raise RuntimeError("Model not loaded") | |
| df = pd.DataFrame([model_params]) | |
| preds = self.model.predict(df) | |
| return float(preds[0]) | |
| def explain_prediction(self, model_params: dict) -> dict: | |
| if self.model is None: | |
| raise RuntimeError("Model not loaded") | |
| df = pd.DataFrame([model_params]) | |
| # π₯ Unwrap the custom RentPricePipeline | |
| python_model = self.model.unwrap_python_model() | |
| explanation = python_model.explain_predictions(df) | |
| return explanation | |
| # ----------------------------------------------------------- | |
| # FastAPI-compatible handler | |
| # ----------------------------------------------------------- | |
| def handle(self, params: dict) -> dict: | |
| if "model_params" not in params: | |
| return {"error": "Missing 'model_params' in request"} | |
| try: | |
| prediction = self.predict(params["model_params"]) | |
| except Exception as e: | |
| return {"error": str(e)} | |
| return { | |
| "prediction": prediction, | |
| "inputs": params["model_params"] | |
| } |