File size: 3,201 Bytes
32c3d1a 8fd21e5 5c5a4df 8fd21e5 43ba4ed d5c01b2 43ba4ed 32c3d1a c973f7f e2f1a0e 32c3d1a 5b5608f 8fd21e5 e2f1a0e 8fd21e5 32c3d1a 8fd21e5 e75488c 5b5608f 32c3d1a 5c5a4df c973f7f 8fd21e5 c973f7f 32c3d1a 5c5a4df 62f8788 8fd21e5 5c5a4df e75488c 8fd21e5 02cb96d e75488c 8fd21e5 e75488c 5b5608f c973f7f 7edb2ec 32c3d1a 7edb2ec e75488c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
from flask import Flask, request, jsonify, Response
import os
import joblib
import pandas as pd
from typing import Any, Optional
app = Flask(__name__)
# You can override this in Space Settings → Environment variables
MODEL_PATH = os.getenv("MODEL_PATH", "best_model_random_forest.joblib")
_model: Optional[Any] = None # raw object or dict bundle
_pipe: Optional[Any] = None # estimator/pipeline to call .predict()
_model_error: Optional[str] = None
def load_model_if_needed():
"""Load the model lazily so the app can boot even if the model is missing."""
global _model, _pipe, _model_error
if _pipe is not None or _model_error is not None:
return
try:
if not os.path.exists(MODEL_PATH):
_model_error = f"Model file not found at '{MODEL_PATH}'. Upload it or set MODEL_PATH."
return
_model = joblib.load(MODEL_PATH)
_pipe = _model["pipeline"] if isinstance(_model, dict) and "pipeline" in _model else _model
except Exception as e:
_model_error = f"Failed to load model from '{MODEL_PATH}': {e}"
# --- Readiness / health -----------------------------------------------------------
@app.route("/", methods=["GET"])
def root_html():
# super-lightweight HTML so the platform health probe definitely gets a 200
return Response(
"<!doctype html><html><head><meta charset='utf-8'><title>Backend</title></head>"
"<body><h1>Backend running ✅</h1><p>See <code>/health</code> and <code>/predict</code>.</p></body></html>",
mimetype="text/html",
status=200,
)
@app.route("/__ping__", methods=["GET"])
def ping_plain():
return Response("ok", mimetype="text/plain", status=200)
@app.route("/health", methods=["GET"])
def health():
load_model_if_needed()
status = "ok" if _pipe is not None and _model_error is None else "degraded"
return jsonify({"status": status, "model_path": MODEL_PATH, "model_error": _model_error})
# --- Inference endpoint -----------------------------------------------------------
@app.route("/predict", methods=["POST"])
def predict():
load_model_if_needed()
if _pipe is None:
return jsonify({"error": f"Model not available. Details: {_model_error}"}), 500
data = request.get_json(force=True)
# Accept single object, list of objects, or {"records":[...]}
if isinstance(data, dict) and "records" in data:
df = pd.DataFrame(data["records"])
elif isinstance(data, list):
df = pd.DataFrame(data)
elif isinstance(data, dict):
df = pd.DataFrame([data])
else:
return jsonify({"error": "Unsupported payload format"}), 400
try:
preds = _pipe.predict(df)
predictions = [float(p) for p in preds]
return jsonify({"predictions": predictions})
except Exception as e:
return jsonify({"error": str(e)}), 400
# Local dev only; Spaces imports app:app with gunicorn
if __name__ == "__main__":
# Hugging Face expects apps to run on the $PORT (defaults to 7860)
port = int(os.getenv("PORT", 7860))
print(f"✅ Starting Flask on port {port} for Hugging Face Spaces")
app.run(host="0.0.0.0", port=port, debug=False)
|