Spaces:
Runtime error
Runtime error
File size: 1,700 Bytes
d8aa3d3 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | # app.py
import joblib
import numpy as np
import pandas as pd
from flask import Flask, request, jsonify
# Define the model filename
MODEL_FILE = 'tuned_xgb_sales_forecaster.pkl'
# Define the 10 feature columns expected by the model pipeline
FEATURE_COLS = [
'Product_Weight', 'Product_Sugar_Content', 'Product_Allocated_Area',
'Product_Type', 'Product_MRP', 'Store_Size',
'Store_Location_City_Type', 'Store_Type', 'Store_Age',
'Product_Category_Simplified'
]
# --- Load the Model Pipeline ---
try:
model_pipeline = joblib.load(MODEL_FILE)
print("Model loaded successfully.")
except Exception as e:
print(f"CRITICAL ERROR: Model not found: {e}. Check Dockerfile.")
model_pipeline = None
app = Flask(__name__)
@app.route('/predict', methods=['POST'])
def predict_sales():
if model_pipeline is None:
return jsonify({'error': 'Server setup error: Model not loaded.'}), 500
try:
data = request.get_json(force=True)
# Ensure input data matches the feature columns
input_df = pd.DataFrame(data, columns=FEATURE_COLS)
# Prediction on log scale
log_prediction = model_pipeline.predict(input_df)
# Inverse transformation: sales = exp(y) - 1
prediction_original_scale = np.expm1(log_prediction)
response = {
'status': 'success',
'predicted_sales_revenue': round(prediction_original_scale[0], 2)
}
return jsonify(response)
except Exception as e:
return jsonify({'error': f'Prediction logic failed: {str(e)}'}), 400
if __name__ == '__main__':
# This runs the server inside the Docker container
app.run(host='0.0.0.0', port=5000)
|