kmsmohamedansar commited on
Commit
4804eae
·
verified ·
1 Parent(s): a722107

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -8
app.py CHANGED
@@ -3,17 +3,17 @@ import pickle
3
  import numpy as np
4
  from sklearn.preprocessing import StandardScaler
5
 
6
- # Load the model at startup
7
  with open("rf_model.pkl", "rb") as f:
8
  MODEL = pickle.load(f)
9
 
10
- # Prediction function
11
  def predict(f1, f2, f3, f4, f5):
12
  X = np.array([[f1, f2, f3, f4, f5]])
13
  X_scaled = StandardScaler().fit_transform(X)
14
  return int(MODEL.predict(X_scaled)[0])
15
 
16
- # Build the interface
17
  demo = gr.Interface(
18
  fn=predict,
19
  inputs=[
@@ -24,11 +24,15 @@ demo = gr.Interface(
24
  gr.Slider(0, 10, step=1, value=4, label="Feature 5"),
25
  ],
26
  outputs=gr.Label(label="Prediction"),
27
- title="TaskMaster Job Scheduler",
28
- description="RandomForest inference on 5 synthetic features",
29
  )
30
 
31
  if __name__ == "__main__":
32
- # Bind to 0.0.0.0 so Spaces can route traffic
33
- # block=True keeps the Python process alive (no immediate exit)
34
- demo.launch(server_name="0.0.0.0", server_port=7860, block=True)
 
 
 
 
 
3
  import numpy as np
4
  from sklearn.preprocessing import StandardScaler
5
 
6
+ # 1) Load your model once at startup
7
  with open("rf_model.pkl", "rb") as f:
8
  MODEL = pickle.load(f)
9
 
10
+ # 2) A simple predict function
11
  def predict(f1, f2, f3, f4, f5):
12
  X = np.array([[f1, f2, f3, f4, f5]])
13
  X_scaled = StandardScaler().fit_transform(X)
14
  return int(MODEL.predict(X_scaled)[0])
15
 
16
+ # 3) Build your Gradio interface
17
  demo = gr.Interface(
18
  fn=predict,
19
  inputs=[
 
24
  gr.Slider(0, 10, step=1, value=4, label="Feature 5"),
25
  ],
26
  outputs=gr.Label(label="Prediction"),
27
+ title="TaskMaster Job Scheduler",
28
+ description="Enter five feature values to get a RandomForest prediction."
29
  )
30
 
31
  if __name__ == "__main__":
32
+ # This tells Gradio to bind to all interfaces, on port 7860,
33
+ # and to BLOCK the Python thread (so the container stays up).
34
+ demo.launch(
35
+ server_name="0.0.0.0",
36
+ server_port=7860,
37
+ prevent_thread_lock=False
38
+ )