Rizwan9 commited on
Commit
32c3d1a
·
verified ·
1 Parent(s): 8fd21e5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -8
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from flask import Flask, request, jsonify
2
  import os
3
  import joblib
4
  import pandas as pd
@@ -6,15 +6,16 @@ from typing import Any, Optional
6
 
7
  app = Flask(__name__)
8
 
 
9
  MODEL_PATH = os.getenv("MODEL_PATH", "best_model_random_forest.joblib")
10
 
11
- _model: Optional[Any] = None # lazy-loaded
12
- _pipe: Optional[Any] = None
13
  _model_error: Optional[str] = None
14
 
15
 
16
  def load_model_if_needed():
17
- """Load the model/pipeline once, on demand. Keeps the app booting even if loading fails."""
18
  global _model, _pipe, _model_error
19
  if _pipe is not None or _model_error is not None:
20
  return
@@ -28,10 +29,16 @@ def load_model_if_needed():
28
  _pipe = _model["pipeline"] if isinstance(_model, dict) and "pipeline" in _model else _model
29
 
30
  except Exception as e:
31
- # Capture the error so the app keeps running and can report it on / and /health
32
  _model_error = f"Failed to load model from '{MODEL_PATH}': {e}"
33
 
34
 
 
 
 
 
 
 
 
35
  @app.route("/", methods=["GET"])
36
  def home():
37
  load_model_if_needed()
@@ -43,7 +50,6 @@ def home():
43
  "model_error": _model_error
44
  })
45
 
46
-
47
  @app.route("/health", methods=["GET"])
48
  def health():
49
  load_model_if_needed()
@@ -51,6 +57,8 @@ def health():
51
  return jsonify({"status": status, "model_path": MODEL_PATH, "model_error": _model_error})
52
 
53
 
 
 
54
  @app.route("/predict", methods=["POST"])
55
  def predict():
56
  load_model_if_needed()
@@ -77,7 +85,9 @@ def predict():
77
  return jsonify({"error": str(e)}), 400
78
 
79
 
80
- # Local dev only; Spaces runs gunicorn and ignores this block.
81
  if __name__ == "__main__":
82
- port = int(os.getenv("PORT", 5000)) # Spaces Server SDK often binds to 5000
 
 
83
  app.run(host="0.0.0.0", port=port, debug=False)
 
1
+ from flask import Flask, request, jsonify, Response
2
  import os
3
  import joblib
4
  import pandas as pd
 
6
 
7
  app = Flask(__name__)
8
 
9
+ # You can override this in Space Settings → Environment variables
10
  MODEL_PATH = os.getenv("MODEL_PATH", "best_model_random_forest.joblib")
11
 
12
+ _model: Optional[Any] = None # raw object or dict bundle
13
+ _pipe: Optional[Any] = None # the actual estimator/pipeline to call .predict()
14
  _model_error: Optional[str] = None
15
 
16
 
17
  def load_model_if_needed():
18
+ """Load the model lazily so the app can boot even if the model is missing."""
19
  global _model, _pipe, _model_error
20
  if _pipe is not None or _model_error is not None:
21
  return
 
29
  _pipe = _model["pipeline"] if isinstance(_model, dict) and "pipeline" in _model else _model
30
 
31
  except Exception as e:
 
32
  _model_error = f"Failed to load model from '{MODEL_PATH}': {e}"
33
 
34
 
35
+ # --- Readiness / health endpoints -------------------------------------------------
36
+
37
+ @app.route("/__ping__", methods=["GET"])
38
+ def ping_plain():
39
+ # Very light, plain-text for platform health checks
40
+ return Response("ok", mimetype="text/plain", status=200)
41
+
42
  @app.route("/", methods=["GET"])
43
  def home():
44
  load_model_if_needed()
 
50
  "model_error": _model_error
51
  })
52
 
 
53
  @app.route("/health", methods=["GET"])
54
  def health():
55
  load_model_if_needed()
 
57
  return jsonify({"status": status, "model_path": MODEL_PATH, "model_error": _model_error})
58
 
59
 
60
+ # --- Inference endpoint -----------------------------------------------------------
61
+
62
  @app.route("/predict", methods=["POST"])
63
  def predict():
64
  load_model_if_needed()
 
85
  return jsonify({"error": str(e)}), 400
86
 
87
 
88
+ # Local dev only; Spaces runs gunicorn to import app:app
89
  if __name__ == "__main__":
90
+ # Use HF’s common port for local dev; platform will manage ports in production
91
+ port = int(os.getenv("PORT", 7860))
92
+ print(f"Starting Flask dev server on http://0.0.0.0:{port}")
93
  app.run(host="0.0.0.0", port=port, debug=False)