deepacsr's picture
Upload folder using huggingface_hub
2f70b04 verified
# Import necessary libraries
import numpy as np
import joblib # For loading the serialized model
import pandas as pd # For data manipulation
import logging # Added to generate logs for debugging purpose
from flask import Flask, request, jsonify # For creating the Flask API
# Initialize the Flask application
product_sales_predictor_api = Flask("SuperKart Product Sales Predictor")
# Load the trained machine learning model
model = joblib.load("SuperKart_Sales_prediction_model_v1_0.joblib")
# Define a route for the home page (GET request)
@product_sales_predictor_api.get('/')
def home():
"""
This function handles GET requests to the root URL ('/') of the API.
The function displays simple welcome message.
"""
# message added as part of debugging process to check the function was getting invoked
# code retained as it is after debugging
handler = logging.FileHandler('app.log')
product_sales_predictor_api.logger.addHandler(handler)
product_sales_predictor_api.logger.setLevel(logging.INFO)
product_sales_predictor_api.logger.info('GET INVOKED')
return "Welcome to the SuperKart Product Sales Prediction API!"
# Define an endpoint for single Product Sales prediction (POST request)
@product_sales_predictor_api.post('/v1/ProductSale')
def predict_Product_Sales():
"""
This function handles POST requests to the '/v1/ProductSale' endpoint.
It expects a JSON payload containing Proeduct details and returns
the predicted sales price as a JSON response.
"""
#All types of logging enabled when issue was faced in endpoint access.
#code retained as it is after debugging
product_sales_predictor_api.logger.info('single prediction entered')
print(">>> product endpoint invoked!", flush=True)
# Get the JSON data from the request body
product_data = request.get_json()
# Extract relevant features from the JSON data
sample = {
'Product_Type': product_data['Product_Type'],
'Product_MRP': product_data['Product_MRP'],
'Product_Weight': product_data['Product_Weight'],
'Product_Sugar_Content': product_data['Product_Sugar_Content'],
'Product_Allocated_Area': product_data['Product_Allocated_Area'],
'Store_Size': product_data['Store_Size'],
'Store_Location_City_Type': product_data['Store_Location_City_Type'],
'Store_Type': product_data['Store_Type']
}
# Convert the extracted data into a Pandas DataFrame
input_data = pd.DataFrame([sample])
# Make prediction (get log_price)
predicted_sales = model.predict(input_data)[0]
# Return the actual Predicted sales price
return jsonify({'Predicted Sales': predicted_sales})
# Define an endpoint for batch prediction (POST request)
@product_sales_predictor_api.post('/v1/batchsales')
def predict_sales_batch():
print(">>> Batch endpoint invoked!", flush=True)
try:
file = request.files.get('file')
print(">>> File received:", file is not None, flush=True)
#All messages used for debugging retained as it is
input_data = pd.read_csv(file)
print(">>> CSV loaded. Columns:", list(input_data.columns), flush=True)
#Drop Since the below columns where not used while builing the model
drop_cols = [
'Product_Id',
'Store_Id',
'Store_Establishment_Year',
'Product_Store_Sales_Total'
]
input_data = input_data.drop(columns=[c for c in drop_cols if c in input_data.columns])
print(">>> After column drop:", list(input_data.columns), flush=True)
predictions = model.predict(input_data)
predictions = [float(p) for p in predictions]
print(">>> Predictions completed", flush=True)
return jsonify({"predictions": predictions})
except Exception as e:
print(">>> ERROR:", str(e), flush=True)
return jsonify({"error": str(e)}), 500