DD009's picture
Upload folder using huggingface_hub
adb1438 verified
import numpy as np
import joblib
import pandas as pd
from flask import Flask, request, jsonify
from datetime import datetime
# Initialize the Flask application
sales_predictor_api = Flask("Super Kart Predictor")
# Load the trained machine learning model
model = joblib.load("SuperKart_model_v1_0.joblib")
# Define a route for the home page (GET request)
@sales_predictor_api.get('/')
def home():
"""
Welcome endpoint for the Retail Sales Prediction API
"""
return "Welcome to the Retail Product Sales Prediction API!"
# Define an endpoint for single product prediction (POST request)
@sales_predictor_api.post('/v1/sales')
def predict_sales():
"""
Endpoint for predicting sales for a single product-store combination
Expects JSON payload with product and store details
"""
try:
# Get the JSON data from the request body
data = request.get_json()
# Prepare input data with only the features used in training
input_data = pd.DataFrame([{
'Product_Weight': data['product_weight'],
'Product_Allocated_Area': data['product_allocated_area'],
'Product_MRP': data['product_mrp'],
'Store_Establishment_Year': data['store_establishment_year'],
'Product_Sugar_Content': data['product_sugar_content'],
'Product_Type': data['product_type'],
'Store_Size': data['store_size'],
'Store_Location_City_Type': data['store_location_city_type'],
'Store_Type': data['store_type']
}])
# Make prediction
predicted_sales = model.predict(input_data)[0]
# Return the predicted sales
return jsonify({
'predicted_sales': round(float(predicted_sales), 2),
'features_used': list(input_data.columns)
})
except Exception as e:
return jsonify({'error': str(e)}), 400
# Define an endpoint for batch prediction (POST request)
@sales_predictor_api.post('/v1/salesbatch')
def predict_sales_batch():
"""
Endpoint for batch predictions from CSV file
Expects CSV file with multiple product-store combinations
"""
try:
# Check if file was uploaded
if 'file' not in request.files:
return jsonify({'error': 'No file uploaded'}), 400
file = request.files['file']
# Read CSV file
input_data = pd.read_csv(file)
# Ensure we only keep the columns used in training
required_columns = [
'Product_Weight',
'Product_Allocated_Area',
'Product_MRP',
'Store_Establishment_Year',
'Product_Sugar_Content',
'Product_Type',
'Store_Size',
'Store_Location_City_Type',
'Store_Type'
]
# Verify all required columns are present
missing_cols = [col for col in required_columns if col not in input_data.columns]
if missing_cols:
return jsonify({'error': f'Missing required columns: {missing_cols}'}), 400
# Make predictions
predictions = model.predict(input_data[required_columns])
# Prepare results
results = []
for i, row in input_data.iterrows():
results.append({
'predicted_sales': round(float(predictions[i]), 2),
'product_type': row['Product_Type'],
'store_type': row['Store_Type']
})
return jsonify({
'predictions': results,
'features_used': required_columns
})
except Exception as e:
return jsonify({'error': str(e)}), 400
# Run the Flask application
if __name__ == '__main__':
sales_predictor_api.run(host='0.0.0.0', port=5000, debug=True)