File size: 3,961 Bytes
97cb699
 
 
 
51abe73
97cb699
 
ea20b75
97cb699
 
 
 
 
 
 
7816e88
97cb699
 
 
 
 
73932dd
4d289ae
2f70b04
73932dd
a94c7e0
 
51abe73
a94c7e0
97cb699
 
2f70b04
97cb699
 
 
 
 
 
 
 
28a5377
4d289ae
 
28a5377
434cbf4
 
97cb699
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5638b5f
97cb699
809f64e
145d050
 
 
03d40c5
2f70b04
145d050
 
03d40c5
2f70b04
145d050
 
 
 
 
 
 
 
03d40c5
145d050
 
03d40c5
145d050
03d40c5
145d050
97cb699
145d050
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
# Import necessary libraries
import numpy as np
import joblib  # For loading the serialized model
import pandas as pd  # For data manipulation
import logging # Added to generate logs for debugging purpose
from flask import Flask, request, jsonify  # For creating the Flask API


# Initialize the Flask application
product_sales_predictor_api = Flask("SuperKart Product Sales Predictor")

# Load the trained machine learning model
model = joblib.load("SuperKart_Sales_prediction_model_v1_0.joblib")

# Define a route for the home page (GET request)
@product_sales_predictor_api.get('/')
def home():
    """
    This function handles GET requests to the root URL ('/') of the API.
    The function displays simple welcome message.
    """

    # message added  as part of debugging process to check the function was getting invoked
    # code retained as it is after debugging
    handler = logging.FileHandler('app.log')
    product_sales_predictor_api.logger.addHandler(handler)
    product_sales_predictor_api.logger.setLevel(logging.INFO)
    product_sales_predictor_api.logger.info('GET INVOKED')

    return "Welcome to the SuperKart Product Sales Prediction API!"


# Define an endpoint for single Product Sales prediction (POST request)
@product_sales_predictor_api.post('/v1/ProductSale')
def predict_Product_Sales():
    """
    This function handles POST requests to the '/v1/ProductSale' endpoint.
    It expects a JSON payload containing Proeduct details and returns
    the predicted sales price as a JSON response.
    """

    #All types of logging enabled when issue was faced in endpoint access.
    #code retained as it is after debugging
    product_sales_predictor_api.logger.info('single prediction entered')
    
    print(">>> product endpoint invoked!", flush=True)
    # Get the JSON data from the request body
    product_data = request.get_json()

    # Extract relevant features from the JSON data
    sample = {
        'Product_Type': product_data['Product_Type'],
        'Product_MRP': product_data['Product_MRP'],
        'Product_Weight': product_data['Product_Weight'],
        'Product_Sugar_Content': product_data['Product_Sugar_Content'],
        'Product_Allocated_Area': product_data['Product_Allocated_Area'],
        'Store_Size': product_data['Store_Size'],
        'Store_Location_City_Type': product_data['Store_Location_City_Type'],
        'Store_Type': product_data['Store_Type']
    }

    # Convert the extracted data into a Pandas DataFrame
    input_data = pd.DataFrame([sample])

    # Make prediction (get log_price)
    predicted_sales = model.predict(input_data)[0]

    # Return the actual Predicted sales price
    return jsonify({'Predicted Sales': predicted_sales})


# Define an endpoint for batch prediction (POST request)
@product_sales_predictor_api.post('/v1/batchsales')
def predict_sales_batch():
    print(">>> Batch endpoint invoked!", flush=True)
    try:
        file = request.files.get('file')
        print(">>> File received:", file is not None, flush=True)

        #All messages used for debugging retained as it is
        input_data = pd.read_csv(file)
        print(">>> CSV loaded. Columns:", list(input_data.columns), flush=True)

        #Drop Since the below columns where not used while builing the model
        drop_cols = [
            'Product_Id',
            'Store_Id',
            'Store_Establishment_Year',
            'Product_Store_Sales_Total'
        ]
        input_data = input_data.drop(columns=[c for c in drop_cols if c in input_data.columns])
        print(">>> After column drop:", list(input_data.columns), flush=True)

        predictions = model.predict(input_data)
        predictions = [float(p) for p in predictions]

        print(">>> Predictions completed", flush=True)

        return jsonify({"predictions": predictions})

    except Exception as e:
        print(">>> ERROR:", str(e), flush=True)
        return jsonify({"error": str(e)}), 500