Rizwan9 commited on
Commit
8fd21e5
·
verified ·
1 Parent(s): 64e2334

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -33
app.py CHANGED
@@ -1,67 +1,83 @@
1
  from flask import Flask, request, jsonify
 
2
  import joblib
3
  import pandas as pd
4
- import os
5
 
6
  app = Flask(__name__)
7
 
8
- # Default model path (can be changed via Space environment variable)
9
  MODEL_PATH = os.getenv("MODEL_PATH", "best_model_random_forest.joblib")
10
 
11
- # Load model if it exists
12
- if not os.path.exists(MODEL_PATH):
13
- raise FileNotFoundError(
14
- f"❌ Model file not found: {MODEL_PATH}. Please ensure it's uploaded to the Space root directory."
15
- )
16
 
17
- model_bundle = joblib.load(MODEL_PATH)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- # Support both raw model or model dict with a pipeline
20
- pipe = model_bundle["pipeline"] if isinstance(model_bundle, dict) and "pipeline" in model_bundle else model_bundle
21
 
22
  @app.route("/", methods=["GET"])
23
  def home():
 
24
  return jsonify({
25
  "status": "ok",
26
- "message": "Backend Flask API is running successfully 🎉",
27
- "model_loaded": True,
28
- "model_path": MODEL_PATH
 
29
  })
30
 
 
31
  @app.route("/health", methods=["GET"])
32
  def health():
33
- return jsonify({"status": "ok", "model_path": MODEL_PATH})
 
 
 
34
 
35
  @app.route("/predict", methods=["POST"])
36
  def predict():
37
- """
38
- Example input:
39
- {
40
- "feature1": value1,
41
- "feature2": value2,
42
- ...
43
- }
44
- or
45
- [
46
- {"feature1": value1, "feature2": value2},
47
- {"feature1": value3, "feature2": value4}
48
- ]
49
- """
50
  data = request.get_json(force=True)
51
 
52
- # Normalize input to a DataFrame
53
- df = pd.DataFrame([data]) if isinstance(data, dict) else pd.DataFrame(data)
 
 
 
 
 
 
 
54
 
55
  try:
56
- preds = pipe.predict(df)
57
- # Convert numpy types to native Python floats
58
  predictions = [float(p) for p in preds]
59
  return jsonify({"predictions": predictions})
60
  except Exception as e:
61
  return jsonify({"error": str(e)}), 400
62
 
63
 
64
- # Entry point for local development; Spaces uses gunicorn automatically
65
  if __name__ == "__main__":
66
- port = int(os.getenv("PORT", 7860)) # HF Spaces use port 7860 by default
67
  app.run(host="0.0.0.0", port=port, debug=False)
 
1
  from flask import Flask, request, jsonify
2
+ import os
3
  import joblib
4
  import pandas as pd
5
+ from typing import Any, Optional
6
 
7
  app = Flask(__name__)
8
 
 
9
  MODEL_PATH = os.getenv("MODEL_PATH", "best_model_random_forest.joblib")
10
 
11
+ _model: Optional[Any] = None # lazy-loaded
12
+ _pipe: Optional[Any] = None
13
+ _model_error: Optional[str] = None
14
+
 
15
 
16
+ def load_model_if_needed():
17
+ """Load the model/pipeline once, on demand. Keeps the app booting even if loading fails."""
18
+ global _model, _pipe, _model_error
19
+ if _pipe is not None or _model_error is not None:
20
+ return
21
+
22
+ try:
23
+ if not os.path.exists(MODEL_PATH):
24
+ _model_error = f"Model file not found at '{MODEL_PATH}'. Upload it or set MODEL_PATH."
25
+ return
26
+
27
+ _model = joblib.load(MODEL_PATH)
28
+ _pipe = _model["pipeline"] if isinstance(_model, dict) and "pipeline" in _model else _model
29
+
30
+ except Exception as e:
31
+ # Capture the error so the app keeps running and can report it on / and /health
32
+ _model_error = f"Failed to load model from '{MODEL_PATH}': {e}"
33
 
 
 
34
 
35
  @app.route("/", methods=["GET"])
36
  def home():
37
+ load_model_if_needed()
38
  return jsonify({
39
  "status": "ok",
40
+ "framework": "flask",
41
+ "model_path": MODEL_PATH,
42
+ "model_loaded": _pipe is not None and _model_error is None,
43
+ "model_error": _model_error
44
  })
45
 
46
+
47
  @app.route("/health", methods=["GET"])
48
  def health():
49
+ load_model_if_needed()
50
+ status = "ok" if _pipe is not None and _model_error is None else "degraded"
51
+ return jsonify({"status": status, "model_path": MODEL_PATH, "model_error": _model_error})
52
+
53
 
54
  @app.route("/predict", methods=["POST"])
55
  def predict():
56
+ load_model_if_needed()
57
+ if _pipe is None:
58
+ return jsonify({"error": f"Model not available. Details: {_model_error}"}), 500
59
+
 
 
 
 
 
 
 
 
 
60
  data = request.get_json(force=True)
61
 
62
+ # Accept single object, list of objects, or {"records":[...]}
63
+ if isinstance(data, dict) and "records" in data:
64
+ df = pd.DataFrame(data["records"])
65
+ elif isinstance(data, list):
66
+ df = pd.DataFrame(data)
67
+ elif isinstance(data, dict):
68
+ df = pd.DataFrame([data])
69
+ else:
70
+ return jsonify({"error": "Unsupported payload format"}), 400
71
 
72
  try:
73
+ preds = _pipe.predict(df)
 
74
  predictions = [float(p) for p in preds]
75
  return jsonify({"predictions": predictions})
76
  except Exception as e:
77
  return jsonify({"error": str(e)}), 400
78
 
79
 
80
+ # Local dev only; Spaces runs gunicorn and ignores this block.
81
  if __name__ == "__main__":
82
+ port = int(os.getenv("PORT", 5000)) # Spaces Server SDK often binds to 5000
83
  app.run(host="0.0.0.0", port=port, debug=False)