Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI | |
| from pydantic import BaseModel | |
| from huggingface_hub import hf_hub_download | |
| import joblib | |
| import pandas as pd | |
| app = FastAPI(title="Food Surplus Predictor API") | |
| # Download model | |
| model_path = hf_hub_download( | |
| repo_id="BeeBasic/food-for-all", | |
| filename="best_model.joblib", | |
| repo_type="model" | |
| ) | |
| model = joblib.load(model_path) | |
| # Define schema for input | |
| class CanteenInput(BaseModel): | |
| canteen_id: str | |
| canteen_name: str | |
| day: int | |
| month: int | |
| year: int | |
| day_of_week: int | |
| class RequestBody(BaseModel): | |
| data: list[CanteenInput] | |
| def home(): | |
| return {"message": "Food Surplus Prediction API is running!"} | |
| def predict_surplus(request: RequestBody): | |
| # Convert input to DataFrame | |
| df = pd.DataFrame([canteen.dict() for canteen in request.data]) | |
| # One-hot encode categorical columns | |
| df_encoded = pd.get_dummies(df, columns=["canteen_id", "canteen_name"]) | |
| # Align columns with model features | |
| model_features = getattr(model, "feature_names_", None) | |
| if model_features: | |
| for col in model_features: | |
| if col not in df_encoded.columns: | |
| df_encoded[col] = 0 | |
| df_encoded = df_encoded[model_features] | |
| # Predict | |
| predictions = model.predict(df_encoded) | |
| df["predicted_surplus"] = predictions | |
| return df.to_dict(orient="records") | |
| def fetch_data(date: str): | |
| """ | |
| Temporary endpoint so your frontend doesn't explode. | |
| Replace this with an actual DB lookup later if you want real data. | |
| """ | |
| # You can later connect this to your stored predictions or history table. | |
| sample_response = { | |
| "date": date, | |
| "canteen_id": "C002", | |
| "canteen_name": "Anna University Mess", | |
| "predicted_surplus": 24.0 | |
| } | |
| return sample_response | |