Spaces:
Sleeping
Sleeping
File size: 3,872 Bytes
2168d7e 6bf1b67 2168d7e 6bf1b67 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e adb1438 2168d7e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 | import numpy as np
import joblib
import pandas as pd
from flask import Flask, request, jsonify
from datetime import datetime
# Initialize the Flask application
sales_predictor_api = Flask("Super Kart Predictor")
# Load the trained machine learning model
model = joblib.load("SuperKart_model_v1_0.joblib")
# Define a route for the home page (GET request)
@sales_predictor_api.get('/')
def home():
"""
Welcome endpoint for the Retail Sales Prediction API
"""
return "Welcome to the Retail Product Sales Prediction API!"
# Define an endpoint for single product prediction (POST request)
@sales_predictor_api.post('/v1/sales')
def predict_sales():
"""
Endpoint for predicting sales for a single product-store combination
Expects JSON payload with product and store details
"""
try:
# Get the JSON data from the request body
data = request.get_json()
# Prepare input data with only the features used in training
input_data = pd.DataFrame([{
'Product_Weight': data['product_weight'],
'Product_Allocated_Area': data['product_allocated_area'],
'Product_MRP': data['product_mrp'],
'Store_Establishment_Year': data['store_establishment_year'],
'Product_Sugar_Content': data['product_sugar_content'],
'Product_Type': data['product_type'],
'Store_Size': data['store_size'],
'Store_Location_City_Type': data['store_location_city_type'],
'Store_Type': data['store_type']
}])
# Make prediction
predicted_sales = model.predict(input_data)[0]
# Return the predicted sales
return jsonify({
'predicted_sales': round(float(predicted_sales), 2),
'features_used': list(input_data.columns)
})
except Exception as e:
return jsonify({'error': str(e)}), 400
# Define an endpoint for batch prediction (POST request)
@sales_predictor_api.post('/v1/salesbatch')
def predict_sales_batch():
"""
Endpoint for batch predictions from CSV file
Expects CSV file with multiple product-store combinations
"""
try:
# Check if file was uploaded
if 'file' not in request.files:
return jsonify({'error': 'No file uploaded'}), 400
file = request.files['file']
# Read CSV file
input_data = pd.read_csv(file)
# Ensure we only keep the columns used in training
required_columns = [
'Product_Weight',
'Product_Allocated_Area',
'Product_MRP',
'Store_Establishment_Year',
'Product_Sugar_Content',
'Product_Type',
'Store_Size',
'Store_Location_City_Type',
'Store_Type'
]
# Verify all required columns are present
missing_cols = [col for col in required_columns if col not in input_data.columns]
if missing_cols:
return jsonify({'error': f'Missing required columns: {missing_cols}'}), 400
# Make predictions
predictions = model.predict(input_data[required_columns])
# Prepare results
results = []
for i, row in input_data.iterrows():
results.append({
'predicted_sales': round(float(predictions[i]), 2),
'product_type': row['Product_Type'],
'store_type': row['Store_Type']
})
return jsonify({
'predictions': results,
'features_used': required_columns
})
except Exception as e:
return jsonify({'error': str(e)}), 400
# Run the Flask application
if __name__ == '__main__':
sales_predictor_api.run(host='0.0.0.0', port=5000, debug=True)
|