Upload folder using huggingface_hub
Browse files- Dockerfile +2 -0
- app.py +5 -12
Dockerfile
CHANGED
|
@@ -14,6 +14,8 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
| 14 |
# Copy the Flask application and the serialized model
|
| 15 |
COPY app.py .
|
| 16 |
COPY tuned_xgb_sales_forecaster.pkl .
|
|
|
|
|
|
|
| 17 |
|
| 18 |
# Expose the port the Flask app will run on
|
| 19 |
EXPOSE 7860
|
|
|
|
| 14 |
# Copy the Flask application and the serialized model
|
| 15 |
COPY app.py .
|
| 16 |
COPY tuned_xgb_sales_forecaster.pkl .
|
| 17 |
+
COPY tuned_xgb_sales_forecaster.json .
|
| 18 |
+
COPY SuperKart.csv .
|
| 19 |
|
| 20 |
# Expose the port the Flask app will run on
|
| 21 |
EXPOSE 7860
|
app.py
CHANGED
|
@@ -1,12 +1,5 @@
|
|
| 1 |
# app.py β SuperKart Sales Forecaster Backend
|
| 2 |
|
| 3 |
-
import os
|
| 4 |
-
import joblib
|
| 5 |
-
import numpy as np
|
| 6 |
-
import pandas as pd
|
| 7 |
-
from flask import Flask, request, jsonify
|
| 8 |
-
from xgboost import XGBRegressor
|
| 9 |
-
|
| 10 |
# ---------------------------------------------------------
|
| 11 |
# Flask App Setup
|
| 12 |
# ---------------------------------------------------------
|
|
@@ -37,16 +30,16 @@ model_pipeline = None
|
|
| 37 |
try:
|
| 38 |
if os.path.exists(MODEL_PKL):
|
| 39 |
model_pipeline = joblib.load(MODEL_PKL)
|
| 40 |
-
print(f"
|
| 41 |
elif os.path.exists(MODEL_JSON):
|
| 42 |
xgb_model = XGBRegressor()
|
| 43 |
xgb_model.load_model(MODEL_JSON)
|
| 44 |
model_pipeline = xgb_model
|
| 45 |
-
print(f"
|
| 46 |
else:
|
| 47 |
raise FileNotFoundError("No model file found in backend_files/")
|
| 48 |
except Exception as e:
|
| 49 |
-
print(f"
|
| 50 |
|
| 51 |
# ---------------------------------------------------------
|
| 52 |
# Root Route (Required by Hugging Face Spaces)
|
|
@@ -92,7 +85,7 @@ def predict_sales():
|
|
| 92 |
})
|
| 93 |
|
| 94 |
except Exception as e:
|
| 95 |
-
print(f"
|
| 96 |
return jsonify({'error': f'Prediction failed: {str(e)}'}), 400
|
| 97 |
|
| 98 |
# ---------------------------------------------------------
|
|
@@ -100,5 +93,5 @@ def predict_sales():
|
|
| 100 |
# ---------------------------------------------------------
|
| 101 |
if __name__ == '__main__':
|
| 102 |
port = int(os.environ.get("PORT", 7860)) # Hugging Face uses 7860
|
| 103 |
-
print(f"
|
| 104 |
app.run(host='0.0.0.0', port=port)
|
|
|
|
| 1 |
# app.py β SuperKart Sales Forecaster Backend
|
| 2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
# ---------------------------------------------------------
|
| 4 |
# Flask App Setup
|
| 5 |
# ---------------------------------------------------------
|
|
|
|
| 30 |
try:
|
| 31 |
if os.path.exists(MODEL_PKL):
|
| 32 |
model_pipeline = joblib.load(MODEL_PKL)
|
| 33 |
+
print(f"Model pipeline loaded successfully from {MODEL_PKL}")
|
| 34 |
elif os.path.exists(MODEL_JSON):
|
| 35 |
xgb_model = XGBRegressor()
|
| 36 |
xgb_model.load_model(MODEL_JSON)
|
| 37 |
model_pipeline = xgb_model
|
| 38 |
+
print(f"XGBoost model loaded from {MODEL_JSON}")
|
| 39 |
else:
|
| 40 |
raise FileNotFoundError("No model file found in backend_files/")
|
| 41 |
except Exception as e:
|
| 42 |
+
print(f"CRITICAL ERROR: Unable to load model β {e}")
|
| 43 |
|
| 44 |
# ---------------------------------------------------------
|
| 45 |
# Root Route (Required by Hugging Face Spaces)
|
|
|
|
| 85 |
})
|
| 86 |
|
| 87 |
except Exception as e:
|
| 88 |
+
print(f"Prediction error: {e}")
|
| 89 |
return jsonify({'error': f'Prediction failed: {str(e)}'}), 400
|
| 90 |
|
| 91 |
# ---------------------------------------------------------
|
|
|
|
| 93 |
# ---------------------------------------------------------
|
| 94 |
if __name__ == '__main__':
|
| 95 |
port = int(os.environ.get("PORT", 7860)) # Hugging Face uses 7860
|
| 96 |
+
print(f"Starting {app.name} on port {port} ...")
|
| 97 |
app.run(host='0.0.0.0', port=port)
|