Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
|
@@ -5,6 +5,7 @@ import xgboost as xgb
|
|
| 5 |
import numpy as np
|
| 6 |
import pickle
|
| 7 |
from huggingface_hub import hf_hub_download
|
|
|
|
| 8 |
|
| 9 |
app = FastAPI(title="Headache Predictor API")
|
| 10 |
|
|
@@ -15,15 +16,22 @@ model = None
|
|
| 15 |
async def load_model():
|
| 16 |
global model
|
| 17 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
model_path = hf_hub_download(
|
| 19 |
repo_id="emp-admin/headache-predictor-xgboost",
|
| 20 |
-
filename="model.pkl"
|
|
|
|
| 21 |
)
|
| 22 |
with open(model_path, 'rb') as f:
|
| 23 |
model = pickle.load(f)
|
| 24 |
print("✅ Model loaded successfully")
|
| 25 |
except Exception as e:
|
| 26 |
print(f"❌ Error loading model: {e}")
|
|
|
|
|
|
|
| 27 |
|
| 28 |
class PredictionRequest(BaseModel):
|
| 29 |
features: list[float]
|
|
|
|
| 5 |
import numpy as np
|
| 6 |
import pickle
|
| 7 |
from huggingface_hub import hf_hub_download
|
| 8 |
+
import os
|
| 9 |
|
| 10 |
app = FastAPI(title="Headache Predictor API")
|
| 11 |
|
|
|
|
| 16 |
async def load_model():
|
| 17 |
global model
|
| 18 |
try:
|
| 19 |
+
# Set cache directory to writable location
|
| 20 |
+
cache_dir = "/tmp/hf_cache"
|
| 21 |
+
os.makedirs(cache_dir, exist_ok=True)
|
| 22 |
+
|
| 23 |
model_path = hf_hub_download(
|
| 24 |
repo_id="emp-admin/headache-predictor-xgboost",
|
| 25 |
+
filename="model.pkl",
|
| 26 |
+
cache_dir=cache_dir
|
| 27 |
)
|
| 28 |
with open(model_path, 'rb') as f:
|
| 29 |
model = pickle.load(f)
|
| 30 |
print("✅ Model loaded successfully")
|
| 31 |
except Exception as e:
|
| 32 |
print(f"❌ Error loading model: {e}")
|
| 33 |
+
import traceback
|
| 34 |
+
traceback.print_exc()
|
| 35 |
|
| 36 |
class PredictionRequest(BaseModel):
|
| 37 |
features: list[float]
|