Spaces:
Runtime error
Runtime error
File size: 1,116 Bytes
481d5dc 7606f5a 59ece61 d39381c 59ece61 10f87bb 7606f5a 481d5dc 7606f5a 7f8e055 0cf4af9 481d5dc 7606f5a d39381c 481d5dc 7606f5a d39381c 51be983 a18bacd 481d5dc ba4b2a5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
from pydantic import BaseModel
from huggingface_hub import hf_hub_download
import joblib
from fastapi import FastAPI
import numpy as np
import os
from typing import List
# Set a different cache directory
os.environ["HF_HOME"] = "/tmp/hf_cache"
app = FastAPI()
# Download model
model_path = hf_hub_download(
repo_id="haseebnawazz/sleep_stage_classifier-RF",
filename="class_balanced_RF_model.joblib",
cache_dir="/tmp/hf_cache"
)
model = joblib.load(model_path)
class FeatureInput(BaseModel):
features: List[float]
@app.post("/predict")
def predict(input: FeatureInput):
try:
features = np.array(input.features).reshape(1, -1)
prediction = model.predict(features).tolist()
return {"prediction": prediction}
except Exception as e:
error_message = traceback.format_exc()
print("[SERVER ERROR]:", error_message) # This will show in Hugging Face logs
return {"error": str(e)}
@app.get("/")
async def read_root():
return {"message": "Welcome to the API"}
@app.get("/logs")
async def get_logs():
return {"logs": "container logs here"}
|