SagarChhabriya commited on
Commit
33ad2a3
·
verified ·
1 Parent(s): 65ca3fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +180 -451
app.py CHANGED
@@ -1,452 +1,181 @@
1
- # ### app.py for vercel deployment #####
2
-
3
- # from fastapi import FastAPI, HTTPException
4
- # from pydantic import BaseModel
5
- # import joblib
6
- # import numpy as np
7
- # # import pandas as pd
8
- # import os
9
-
10
- # app = FastAPI(title="ML Model API", version="1.0.0")
11
-
12
- # # Load model and feature names
13
- # try:
14
- # model = joblib.load('model/model.joblib')
15
- # feature_names = joblib.load('model/feature_names.joblib')
16
- # print("Model loaded successfully!")
17
- # except Exception as e:
18
- # print(f"Error loading model: {e}")
19
- # model = None
20
- # feature_names = []
21
-
22
- # # Define input schema
23
- # class PredictionInput(BaseModel):
24
- # sepal_length: float
25
- # sepal_width: float
26
- # petal_length: float
27
- # petal_width: float
28
-
29
- # class BatchPredictionInput(BaseModel):
30
- # data: list[list[float]] # List of feature arrays
31
-
32
- # @app.get("/")
33
- # async def root():
34
- # return {
35
- # "message": "ML Model API is running!",
36
- # "endpoints": {
37
- # "health": "/health",
38
- # "single_prediction": "/predict",
39
- # "batch_prediction": "/predict-batch",
40
- # "model_info": "/model-info"
41
- # }
42
- # }
43
-
44
- # @app.get("/health")
45
- # async def health_check():
46
- # return {
47
- # "status": "healthy",
48
- # "model_loaded": model is not None,
49
- # "model_type": "RandomForestClassifier" if model else "None"
50
- # }
51
-
52
- # @app.get("/model-info")
53
- # async def model_info():
54
- # if not model:
55
- # raise HTTPException(status_code=500, detail="Model not loaded")
56
-
57
- # return {
58
- # "model_type": str(type(model).__name__),
59
- # "feature_names": feature_names,
60
- # "n_features": len(feature_names),
61
- # "n_classes": getattr(model, 'n_classes_', 'Unknown')
62
- # }
63
-
64
- # @app.post("/predict")
65
- # async def predict_single(input_data: PredictionInput):
66
- # if not model:
67
- # raise HTTPException(status_code=500, detail="Model not loaded")
68
-
69
- # try:
70
- # # Convert input to array
71
- # features = np.array([
72
- # input_data.sepal_length,
73
- # input_data.sepal_width,
74
- # input_data.petal_length,
75
- # input_data.petal_width
76
- # ]).reshape(1, -1)
77
-
78
- # # Make prediction
79
- # prediction = model.predict(features)
80
- # probabilities = model.predict_proba(features)
81
-
82
- # return {
83
- # "prediction": int(prediction[0]),
84
- # "probabilities": probabilities[0].tolist(),
85
- # "class_names": ["setosa", "versicolor", "virginica"], # Replace with your class names
86
- # "input_features": input_data.dict()
87
- # }
88
- # except Exception as e:
89
- # raise HTTPException(status_code=400, detail=f"Prediction error: {str(e)}")
90
-
91
- # @app.post("/predict-batch")
92
- # async def predict_batch(input_data: BatchPredictionInput):
93
- # if not model:
94
- # raise HTTPException(status_code=500, detail="Model not loaded")
95
-
96
- # try:
97
- # # Convert to numpy array
98
- # features = np.array(input_data.data)
99
-
100
- # # Validate input shape
101
- # if features.shape[1] != len(feature_names):
102
- # raise HTTPException(
103
- # status_code=400,
104
- # detail=f"Expected {len(feature_names)} features, got {features.shape[1]}"
105
- # )
106
-
107
- # # Make predictions
108
- # predictions = model.predict(features)
109
- # probabilities = model.predict_proba(features)
110
-
111
- # return {
112
- # "predictions": predictions.tolist(),
113
- # "probabilities": probabilities.tolist(),
114
- # "batch_size": len(predictions)
115
- # }
116
- # except Exception as e:
117
- # raise HTTPException(status_code=400, detail=f"Batch prediction error: {str(e)}")
118
-
119
- # # For local development
120
- # if __name__ == "__main__":
121
- # import uvicorn
122
- # uvicorn.run(app, host="0.0.0.0", port=8000)
123
-
124
-
125
- ############ Via HuggingFace ################
126
- # from fastapi import FastAPI, HTTPException
127
- # from pydantic import BaseModel
128
- # from huggingface_hub import hf_hub_download
129
- # import joblib
130
- # import numpy as np
131
- # import os
132
-
133
- # app = FastAPI(title="ML Model API on Hugging Face", version="1.0.0")
134
-
135
- # # Hugging Face model repository
136
- # HF_REPO_ID = "SagarChhabriya/ml-model-api"
137
- # MODEL_FILENAME = "model.joblib"
138
- # FEATURE_NAMES_FILENAME = "feature_names.joblib"
139
-
140
- # # Load model from Hugging Face Hub
141
- # def load_model_from_hf():
142
- # try:
143
- # print("📥 Downloading model from Hugging Face Hub...")
144
-
145
- # # Download model file
146
- # model_path = hf_hub_download(
147
- # repo_id=HF_REPO_ID,
148
- # filename=MODEL_FILENAME
149
- # )
150
-
151
- # # Download feature names file
152
- # feature_names_path = hf_hub_download(
153
- # repo_id=HF_REPO_ID,
154
- # filename=FEATURE_NAMES_FILENAME
155
- # )
156
-
157
- # # Load files
158
- # model = joblib.load(model_path)
159
- # feature_names = joblib.load(feature_names_path)
160
-
161
- # print("Model and feature names loaded successfully from Hugging Face!")
162
- # return model, feature_names
163
-
164
- # except Exception as e:
165
- # print(f"Error loading from Hugging Face: {e}")
166
- # return None, []
167
-
168
- # # Load model on startup
169
- # model, feature_names = load_model_from_hf()
170
-
171
- # # Define input schema
172
- # class PredictionInput(BaseModel):
173
- # sepal_length: float
174
- # sepal_width: float
175
- # petal_length: float
176
- # petal_width: float
177
-
178
- # class BatchPredictionInput(BaseModel):
179
- # data: list[list[float]] # List of feature arrays
180
-
181
- # @app.get("/")
182
- # async def root():
183
- # return {
184
- # "message": "ML Model API deployed on Hugging Face Spaces! 🚀",
185
- # "endpoints": {
186
- # "health": "/health",
187
- # "single_prediction": "/predict",
188
- # "batch_prediction": "/predict-batch",
189
- # "model_info": "/model-info",
190
- # "docs": "/docs"
191
- # },
192
- # "model_source": "Hugging Face Hub",
193
- # "repository": HF_REPO_ID
194
- # }
195
-
196
- # @app.get("/health")
197
- # async def health_check():
198
- # return {
199
- # "status": "healthy",
200
- # "model_loaded": model is not None,
201
- # "model_type": "RandomForestClassifier" if model else "None",
202
- # "features_loaded": len(feature_names) > 0
203
- # }
204
-
205
- # @app.get("/model-info")
206
- # async def model_info():
207
- # if not model:
208
- # raise HTTPException(status_code=500, detail="Model not loaded")
209
-
210
- # return {
211
- # "model_type": str(type(model).__name__),
212
- # "feature_names": feature_names,
213
- # "n_features": len(feature_names),
214
- # "n_classes": getattr(model, 'n_classes_', 'Unknown'),
215
- # "source": HF_REPO_ID
216
- # }
217
-
218
- # @app.post("/predict")
219
- # async def predict_single(input_data: PredictionInput):
220
- # if not model:
221
- # raise HTTPException(status_code=500, detail="Model not loaded")
222
-
223
- # try:
224
- # # Convert input to array
225
- # features = np.array([
226
- # input_data.sepal_length,
227
- # input_data.sepal_width,
228
- # input_data.petal_length,
229
- # input_data.petal_width
230
- # ]).reshape(1, -1)
231
-
232
- # # Make prediction
233
- # prediction = model.predict(features)
234
- # probabilities = model.predict_proba(features)
235
-
236
- # return {
237
- # "prediction": int(prediction[0]),
238
- # "probabilities": probabilities[0].tolist(),
239
- # "class_names": ["setosa", "versicolor", "virginica"],
240
- # "input_features": input_data.dict(),
241
- # "model_source": HF_REPO_ID
242
- # }
243
- # except Exception as e:
244
- # raise HTTPException(status_code=400, detail=f"Prediction error: {str(e)}")
245
-
246
- # @app.post("/predict-batch")
247
- # async def predict_batch(input_data: BatchPredictionInput):
248
- # if not model:
249
- # raise HTTPException(status_code=500, detail="Model not loaded")
250
-
251
- # try:
252
- # # Convert to numpy array
253
- # features = np.array(input_data.data)
254
-
255
- # # Validate input shape
256
- # if features.shape[1] != len(feature_names):
257
- # raise HTTPException(
258
- # status_code=400,
259
- # detail=f"Expected {len(feature_names)} features, got {features.shape[1]}"
260
- # )
261
-
262
- # # Make predictions
263
- # predictions = model.predict(features)
264
- # probabilities = model.predict_proba(features)
265
-
266
- # return {
267
- # "predictions": predictions.tolist(),
268
- # "probabilities": probabilities.tolist(),
269
- # "batch_size": len(predictions),
270
- # "feature_names": feature_names,
271
- # "model_source": HF_REPO_ID
272
- # }
273
- # except Exception as e:
274
- # raise HTTPException(status_code=400, detail=f"Batch prediction error: {str(e)}")
275
-
276
-
277
-
278
-
279
-
280
-
281
-
282
-
283
-
284
- ############################ HuggingFace + GitHub ###############################
285
- from fastapi import FastAPI, HTTPException
286
- from pydantic import BaseModel
287
- import joblib
288
- import numpy as np
289
- import requests
290
- import tempfile
291
- import os
292
-
293
- app = FastAPI(title="ML Model API", version="1.0.0")
294
-
295
- # GitHub raw content URLs - using YOUR actual GitHub repository
296
- GITHUB_MODEL_URL = "https://raw.githubusercontent.com/SagarChhabriya/ml-model-api/main/model/model.joblib"
297
- GITHUB_FEATURES_URL = "https://raw.githubusercontent.com/SagarChhabriya/ml-model-api/main/model/feature_names.joblib"
298
-
299
- # Load model from GitHub
300
- def load_model_from_github():
301
- try:
302
- print("📥 Downloading model from GitHub...")
303
-
304
- # Download model file
305
- model_response = requests.get(GITHUB_MODEL_URL)
306
- model_response.raise_for_status() # Raise error if download fails
307
-
308
- # Download feature names file
309
- features_response = requests.get(GITHUB_FEATURES_URL)
310
- features_response.raise_for_status()
311
-
312
- # Save to temporary files
313
- with tempfile.NamedTemporaryFile(delete=False, suffix='.joblib') as model_tmp:
314
- model_tmp.write(model_response.content)
315
- model_path = model_tmp.name
316
-
317
- with tempfile.NamedTemporaryFile(delete=False, suffix='.joblib') as features_tmp:
318
- features_tmp.write(features_response.content)
319
- features_path = features_tmp.name
320
-
321
- # Load the files
322
- model = joblib.load(model_path)
323
- feature_names = joblib.load(features_path)
324
-
325
- # Clean up temporary files
326
- os.unlink(model_path)
327
- os.unlink(features_path)
328
-
329
- print("✅ Model loaded successfully from GitHub!")
330
- print(f"📊 Model type: {type(model).__name__}")
331
- print(f"📈 Features: {feature_names}")
332
-
333
- return model, feature_names
334
-
335
- except Exception as e:
336
- print(f"❌ Error loading from GitHub: {e}")
337
- return None, []
338
-
339
- # Load model on startup
340
- model, feature_names = load_model_from_github()
341
-
342
- # Define input schema
343
- class PredictionInput(BaseModel):
344
- sepal_length: float
345
- sepal_width: float
346
- petal_length: float
347
- petal_width: float
348
-
349
- class BatchPredictionInput(BaseModel):
350
- data: list[list[float]]
351
-
352
- @app.get("/")
353
- async def root():
354
- return {
355
- "message": "ML Model API deployed on Hugging Face Spaces! 🚀",
356
- "endpoints": {
357
- "health": "/health",
358
- "single_prediction": "/predict",
359
- "batch_prediction": "/predict-batch",
360
- "model_info": "/model-info",
361
- "docs": "/docs"
362
- },
363
- "model_loaded": model is not None,
364
- "model_source": "GitHub"
365
- }
366
-
367
- @app.get("/health")
368
- async def health_check():
369
- return {
370
- "status": "healthy" if model else "unhealthy",
371
- "model_loaded": model is not None,
372
- "model_type": "RandomForestClassifier" if model else "None"
373
- }
374
-
375
- @app.get("/model-info")
376
- async def model_info():
377
- if not model:
378
- raise HTTPException(status_code=500, detail="Model not loaded")
379
-
380
- return {
381
- "model_type": str(type(model).__name__),
382
- "feature_names": feature_names,
383
- "n_features": len(feature_names),
384
- "n_classes": getattr(model, 'n_classes_', 'Unknown')
385
- }
386
-
387
- @app.post("/predict")
388
- async def predict_single(input_data: PredictionInput):
389
- if not model:
390
- raise HTTPException(status_code=500, detail="Model not loaded")
391
-
392
- try:
393
- # Convert input to array
394
- features = np.array([
395
- input_data.sepal_length,
396
- input_data.sepal_width,
397
- input_data.petal_length,
398
- input_data.petal_width
399
- ]).reshape(1, -1)
400
-
401
- # Make prediction
402
- prediction = model.predict(features)
403
- probabilities = model.predict_proba(features)
404
-
405
- return {
406
- "prediction": int(prediction[0]),
407
- "probabilities": probabilities[0].tolist(),
408
- "class_names": ["setosa", "versicolor", "virginica"],
409
- "input_features": input_data.dict()
410
- }
411
- except Exception as e:
412
- raise HTTPException(status_code=400, detail=f"Prediction error: {str(e)}")
413
-
414
- @app.post("/predict-batch")
415
- async def predict_batch(input_data: BatchPredictionInput):
416
- if not model:
417
- raise HTTPException(status_code=500, detail="Model not loaded")
418
-
419
- try:
420
- # Convert to numpy array
421
- features = np.array(input_data.data)
422
-
423
- # Validate input shape
424
- if features.shape[1] != len(feature_names):
425
- raise HTTPException(
426
- status_code=400,
427
- detail=f"Expected {len(feature_names)} features, got {features.shape[1]}"
428
- )
429
-
430
- # Make predictions
431
- predictions = model.predict(features)
432
- probabilities = model.predict_proba(features)
433
-
434
- return {
435
- "predictions": predictions.tolist(),
436
- "probabilities": probabilities.tolist(),
437
- "batch_size": len(predictions)
438
- }
439
- except Exception as e:
440
- raise HTTPException(status_code=400, detail=f"Batch prediction error: {str(e)}")
441
-
442
- @app.get("/debug")
443
- async def debug():
444
- """Debug endpoint to check model loading status"""
445
- return {
446
- "model_loaded": model is not None,
447
- "features_loaded": len(feature_names) > 0 if feature_names else False,
448
- "feature_names": feature_names if feature_names else "Not loaded",
449
- "model_type": str(type(model).__name__) if model else "Not loaded",
450
- "github_model_url": GITHUB_MODEL_URL,
451
- "github_features_url": GITHUB_FEATURES_URL
452
  }
 
1
+ ############################ HuggingFace + GitHub ###############################
2
+ from fastapi import FastAPI, HTTPException
3
+ from pydantic import BaseModel
4
+ import joblib
5
+ import numpy as np
6
+ import requests
7
+ import tempfile
8
+ import os
9
+ from fastapi.middleware.cors import CORSMiddleware
10
+
11
+
12
+
13
+ app = FastAPI(title="ML Model API", version="1.0.0")
14
+
15
+ # Add CORS middleware - THIS IS CRITICAL FOR HUGGING FACE
16
+ app.add_middleware(
17
+ CORSMiddleware,
18
+ allow_origins=["*"], # Allows all origins
19
+ allow_credentials=True,
20
+ allow_methods=["*"], # Allows all methods
21
+ allow_headers=["*"], # Allows all headers
22
+ )
23
+
24
+ # GitHub raw content URLs - using YOUR actual GitHub repository
25
+ GITHUB_MODEL_URL = "https://raw.githubusercontent.com/SagarChhabriya/ml-model-api/main/model/model.joblib"
26
+ GITHUB_FEATURES_URL = "https://raw.githubusercontent.com/SagarChhabriya/ml-model-api/main/model/feature_names.joblib"
27
+
28
+ # Load model from GitHub
29
+ def load_model_from_github():
30
+ try:
31
+ print("📥 Downloading model from GitHub...")
32
+
33
+ # Download model file
34
+ model_response = requests.get(GITHUB_MODEL_URL)
35
+ model_response.raise_for_status() # Raise error if download fails
36
+
37
+ # Download feature names file
38
+ features_response = requests.get(GITHUB_FEATURES_URL)
39
+ features_response.raise_for_status()
40
+
41
+ # Save to temporary files
42
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.joblib') as model_tmp:
43
+ model_tmp.write(model_response.content)
44
+ model_path = model_tmp.name
45
+
46
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.joblib') as features_tmp:
47
+ features_tmp.write(features_response.content)
48
+ features_path = features_tmp.name
49
+
50
+ # Load the files
51
+ model = joblib.load(model_path)
52
+ feature_names = joblib.load(features_path)
53
+
54
+ # Clean up temporary files
55
+ os.unlink(model_path)
56
+ os.unlink(features_path)
57
+
58
+ print(" Model loaded successfully from GitHub!")
59
+ print(f"📊 Model type: {type(model).__name__}")
60
+ print(f"📈 Features: {feature_names}")
61
+
62
+ return model, feature_names
63
+
64
+ except Exception as e:
65
+ print(f"❌ Error loading from GitHub: {e}")
66
+ return None, []
67
+
68
+ # Load model on startup
69
+ model, feature_names = load_model_from_github()
70
+
71
+ # Define input schema
72
+ class PredictionInput(BaseModel):
73
+ sepal_length: float
74
+ sepal_width: float
75
+ petal_length: float
76
+ petal_width: float
77
+
78
+ class BatchPredictionInput(BaseModel):
79
+ data: list[list[float]]
80
+
81
+ @app.get("/")
82
+ async def root():
83
+ return {
84
+ "message": "ML Model API deployed on Hugging Face Spaces! 🚀",
85
+ "endpoints": {
86
+ "health": "/health",
87
+ "single_prediction": "/predict",
88
+ "batch_prediction": "/predict-batch",
89
+ "model_info": "/model-info",
90
+ "docs": "/docs"
91
+ },
92
+ "model_loaded": model is not None,
93
+ "model_source": "GitHub"
94
+ }
95
+
96
+ @app.get("/health")
97
+ async def health_check():
98
+ return {
99
+ "status": "healthy" if model else "unhealthy",
100
+ "model_loaded": model is not None,
101
+ "model_type": "RandomForestClassifier" if model else "None"
102
+ }
103
+
104
+ @app.get("/model-info")
105
+ async def model_info():
106
+ if not model:
107
+ raise HTTPException(status_code=500, detail="Model not loaded")
108
+
109
+ return {
110
+ "model_type": str(type(model).__name__),
111
+ "feature_names": feature_names,
112
+ "n_features": len(feature_names),
113
+ "n_classes": getattr(model, 'n_classes_', 'Unknown')
114
+ }
115
+
116
+ @app.post("/predict")
117
+ async def predict_single(input_data: PredictionInput):
118
+ if not model:
119
+ raise HTTPException(status_code=500, detail="Model not loaded")
120
+
121
+ try:
122
+ # Convert input to array
123
+ features = np.array([
124
+ input_data.sepal_length,
125
+ input_data.sepal_width,
126
+ input_data.petal_length,
127
+ input_data.petal_width
128
+ ]).reshape(1, -1)
129
+
130
+ # Make prediction
131
+ prediction = model.predict(features)
132
+ probabilities = model.predict_proba(features)
133
+
134
+ return {
135
+ "prediction": int(prediction[0]),
136
+ "probabilities": probabilities[0].tolist(),
137
+ "class_names": ["setosa", "versicolor", "virginica"],
138
+ "input_features": input_data.dict()
139
+ }
140
+ except Exception as e:
141
+ raise HTTPException(status_code=400, detail=f"Prediction error: {str(e)}")
142
+
143
+ @app.post("/predict-batch")
144
+ async def predict_batch(input_data: BatchPredictionInput):
145
+ if not model:
146
+ raise HTTPException(status_code=500, detail="Model not loaded")
147
+
148
+ try:
149
+ # Convert to numpy array
150
+ features = np.array(input_data.data)
151
+
152
+ # Validate input shape
153
+ if features.shape[1] != len(feature_names):
154
+ raise HTTPException(
155
+ status_code=400,
156
+ detail=f"Expected {len(feature_names)} features, got {features.shape[1]}"
157
+ )
158
+
159
+ # Make predictions
160
+ predictions = model.predict(features)
161
+ probabilities = model.predict_proba(features)
162
+
163
+ return {
164
+ "predictions": predictions.tolist(),
165
+ "probabilities": probabilities.tolist(),
166
+ "batch_size": len(predictions)
167
+ }
168
+ except Exception as e:
169
+ raise HTTPException(status_code=400, detail=f"Batch prediction error: {str(e)}")
170
+
171
+ @app.get("/debug")
172
+ async def debug():
173
+ """Debug endpoint to check model loading status"""
174
+ return {
175
+ "model_loaded": model is not None,
176
+ "features_loaded": len(feature_names) > 0 if feature_names else False,
177
+ "feature_names": feature_names if feature_names else "Not loaded",
178
+ "model_type": str(type(model).__name__) if model else "Not loaded",
179
+ "github_model_url": GITHUB_MODEL_URL,
180
+ "github_features_url": GITHUB_FEATURES_URL
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
181
  }