File size: 3,492 Bytes
a3de9bb f8e1cf1 9ddf179 f8e1cf1 9ddf179 a3de9bb 7b066a8 a3de9bb 7b066a8 a3de9bb 7b066a8 a3de9bb 8b40cb4 a3de9bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# backend_files/app.py
import numpy as np
import joblib
import pandas as pd
from flask import Flask, request, jsonify
import sys
import custom_transformers
# Patch sys.modules to let pickle find your custom classes
sys.modules['__main__'] = sys.modules[__name__]
sys.modules['__main__'].ProductPrefixExtractor = custom_transformers.ProductPrefixExtractor
sys.modules['__main__'].StoreAgeBinner = custom_transformers.StoreAgeBinner
sys.modules['__main__'].store_age_transformer = custom_transformers.store_age_transformer
sys.modules['__main__'].convert_year_to_age = custom_transformers.convert_year_to_age
# Initialize Flask app
sales_predictor_api = Flask("Retail Sales Prediction API")
# Load your trained model (change filename to your saved model file)
model = joblib.load("SuperKart_sales_rf_tuned_prediction_model_p1_0.joblib")
# Define a route for the home page (GET request)
@sales_predictor_api.get('/')
def home():
"""
This function handles GET requests to the root URL ('/') of the API.
It returns a simple welcome message.
"""
return "Welcome to the Retail Sales Prediction API!"
# Single prediction endpoint (POST)
@sales_predictor_api.route('/v1/sales', methods=['POST'])
def predict_sales():
"""
Expects JSON with feature key-value pairs.
Returns predicted sales value.
"""
data = request.get_json()
try:
# Extract features for prediction (replace keys with your exact feature names)
features = {
'Store_Establishment_Year': data['Store_Establishment_Year'],
'Product_MRP': data['Product_MRP'],
'Product_Weight': data['Product_Weight'],
'Store_Id': data['Store_Id'],
'Product_Type': data['Product_Type'],
'Product_Sugar_Content': data['Product_Sugar_Content'],
'Store_Location_City_Type': data['Store_Location_City_Type'],
'Store_Size': data['Store_Size'],
'Product_Allocated_Area': data['Product_Allocated_Area'],
'Product_Id': data['Product_Id'],
'Store_Type': data['Store_Type'],
# Add or remove features per your model input
}
# Convert to DataFrame for model input
input_df = pd.DataFrame([features])
# Predict sales
predicted_sales = model.predict(input_df)[0]
# Convert to float and round for JSON serialization
predicted_sales = round(float(predicted_sales), 2)
return jsonify({'Predicted_Sales': predicted_sales})
except Exception as e:
return jsonify({'error': str(e)}), 400
# Batch prediction endpoint (POST)
@sales_predictor_api.route('/v1/salesbatch', methods=['POST'])
def predict_sales_batch():
"""
Expects uploaded CSV file with all required features and an 'id' column.
Returns a JSON dict of {id: predicted_sales} for all entries.
"""
try:
file = request.files['file']
input_df = pd.read_csv(file)
# Predict sales for batch
preds = model.predict(input_df).tolist()
preds_rounded = [round(float(p), 2) for p in preds]
# Map property/product ID to prediction
ids = input_df['Product_Id'].tolist() # Ensure 'id' column exists in your batch data
results = dict(zip(ids, preds_rounded))
return jsonify(results)
except Exception as e:
return jsonify({"error": str(e)}), 400
# Run the Flask application in debug mode if this script is executed directly
if __name__ == "__main__":
sales_predictor_api.run(debug=True)
|