PSstark's picture
Upload folder using huggingface_hub
53a5e3d verified
import numpy as np
import joblib
import pandas as pd
from flask import Flask, request, jsonify
# Initialize Flask app
superkart_api = Flask("SuperKart Sales Prediction API")
# Load the trained model
model = joblib.load("backend_files/rental_price_prediction_model_v1_0.joblib")
# Root endpoint
@superkart_api.get('/')
def home():
return "Welcome to the SuperKart Sales Prediction API!"
# Single prediction endpoint
@superkart_api.post('/v1/sales')
def predict_sales():
data = request.get_json()
sample = {
'Product_Weight': property_data['Product_Weight'],
'Product_Sugar_Content': property_data['Product_Sugar_Content'],
'Product_Allocated_Area': property_data['Product_Allocated_Area'],
'Product_Type': property_data['Product_Type'],
'Product_MRP': property_data['Product_MRP'],
'Store_Size': property_data['Store_Size'],
'Store_Location_City_Type': property_data['Store_Location_City_Type'],
'Store_Type': property_data['Store_Type'],
'Store_Age': 2025 - property_data['Store_Establishment_Year']
}
# Create DataFrame from incoming JSON
input_df = pd.DataFrame([data])
# Predict sales
predicted_sales = model.predict(input_df)[0]
predicted_sales = round(float(predicted_sales), 2)
return jsonify({'Predicted Sales (₹)': predicted_sales})
# Batch prediction endpoint
@superkart_api.post('/v1/salesbatch')
def predict_sales_batch():
file = request.files['file']
input_df = pd.read_csv(file)
# Predict for all rows
predicted_sales = model.predict(input_df)
predicted_sales = [round(float(s), 2) for s in predicted_sales]
# If you have Product_Id column to identify rows
if 'Product_Id' in input_df.columns:
ids = input_df['Product_Id'].tolist()
else:
ids = list(range(1, len(predicted_sales) + 1))
result = dict(zip(ids, predicted_sales))
return jsonify(result)
# Run the app
if __name__ == '__main__':
superkart_api.run(debug=True)