File size: 5,556 Bytes
3f187d4
bd726b0
 
 
 
 
 
3f187d4
 
 
 
bf10d29
3f187d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113

# Import necessary libraries
import numpy as np
import joblib  # For loading the serialized model
import pandas as pd  # For data manipulation
from flask import Flask, request, jsonify  # For creating the Flask API

# Initialize the Flask application
product_store_sales_predictor_api = Flask("SuperKart Product Store Sales Predictor")

# Load the trained machine learning model
model = joblib.load("superkart_sales_prediction_model_v1_0.joblib")

# Define a route for the home page (GET request)
@product_store_sales_predictor_api.get('/')
def home():
    """
    This function handles GET requests to the root URL ('/') of the API.
    It returns a simple welcome message.
    """
    return "Welcome to SuperKart Product Store Sales Predictor API!"

# Define an endpoint for single property prediction (POST request)
@product_store_sales_predictor_api.post('/v1/productsales')
def predict_product_sales():
    """
    This function handles POST requests to the '/v1/productsales' endpoint.
    It expects a JSON payload containing Product and store details and returns
    the predicted sales price as a JSON response.
    """
    # Get the JSON data from the request body
    product_data = request.get_json()

    # Extract relevant features from the JSON data
    sample = {
        'Product_Weight': product_data['Product_Weight'],
        'Product_Allocated_Area': product_data['Product_Allocated_Area'],
        'Product_MRP': product_data['Product_MRP'],
        'Store_Age': product_data['Store_Age'],
        'Product_Identifier': product_data['Product_Identifier'],
        'Product_Sugar_Content_No Sugar': product_data['Product_Sugar_Content_No Sugar'],
        'Product_Sugar_Content_Regular': product_data['Product_Sugar_Content_Regular'],
        'Product_Sugar_Content_reg': product_data['Product_Sugar_Content_reg'],
        'Product_Type_Breads': product_data['Product_Type_Breads'],
        'Product_Type_Breakfast': product_data['Product_Type_Breakfast'],
        'Product_Type_Canned': product_data['Product_Type_Canned'],
        'Product_Type_Dairy': product_data['Product_Type_Dairy'],
        'Product_Type_Frozen Foods': product_data['Product_Type_Frozen Foods'],
        'Product_Type_Fruits and Vegetables': product_data['Product_Type_Fruits and Vegetables'],
        'Product_Type_Hard Drinks': product_data['Product_Type_Hard Drinks'],
        'Product_Type_Health and Hygiene': product_data['Product_Type_Health and Hygiene'],
        'Product_Type_Household': product_data['Product_Type_Household'],
        'Product_Type_Meat': product_data['Product_Type_Meat'],
        'Product_Type_Others': product_data['Product_Type_Others'],
        'Product_Type_Seafood': product_data['Product_Type_Seafood'],
        'Product_Type_Snack Foods': product_data['Product_Type_Snack Foods'],
        'Product_Type_Soft Drinks': product_data['Product_Type_Soft Drinks'],
        'Product_Type_Starchy Foods': product_data['Product_Type_Starchy Foods'],
        'Store_Id_OUT002': product_data['Store_Id_OUT002'],
        'Store_Id_OUT003': product_data['Store_Id_OUT003'],
        'Store_Id_OUT004': product_data['Store_Id_OUT004'],
        'Store_Size_Medium': product_data['Store_Size_Medium'],
        'Store_Size_Small': product_data['Store_Size_Small'],
        'Store_Location_City_Type_Tier 2': product_data['Store_Location_City_Type_Tier 2'],
        'Store_Location_City_Type_Tier 3': product_data['Store_Location_City_Type_Tier 3'],
        'Store_Type_Food Mart': product_data['Store_Type_Food Mart'],
        'Store_Type_Supermarket Type1': product_data['Store_Type_Supermarket Type1'],
        'Store_Type_Supermarket Type2': product_data['Store_Type_Supermarket Type2']
    }

    # Convert the extracted data into a Pandas DataFrame
    input_data = pd.DataFrame([sample])

    # Make prediction (get log_price)
    predicted_sales = model.predict(input_data)[0]

    # Convert predicted_price to Python float
    predicted_sales = round(float(predicted_sales), 2)
    # The conversion above is needed as we convert the model prediction (log price) to actual price using np.exp, which returns predictions as NumPy float32 values.
    # When we send this value directly within a JSON response, Flask's jsonify function encounters a datatype error

    # Return the actual price
    return jsonify({'Predicted Product Store Sales Total': predicted_sales})


# Define an endpoint for batch prediction (POST request)
@product_store_sales_predictor_api.post('/v1/productsalesbatch')
def predict_product_sales_batch():
    """
    This function handles POST requests to the '/v1/productsalesbatch' endpoint.
    It expects a CSV file containing product and store details for multiple entries
    and returns the predicted sales as a dictionary in the JSON response.
    """
    # Get the uploaded CSV file from the request
    file = request.files['file']

    # Read the CSV file into a Pandas DataFrame
    input_data = pd.read_csv(file)

    # Make predictions for all products in the DataFrame
    predicted_sales = model.predict(input_data).tolist()

    # Create a dictionary of predictions with Product_Id as keys
    product_ids = input_data['Product_Id'].tolist()  # Assuming 'Product_Id' is the product ID column
    output_dict = dict(zip(product_ids, predicted_sales))

    # Return the predictions dictionary as a JSON response
    return output_dict

# Run the Flask application in debug mode if this script is executed directly
if __name__ == '__main__':
    product_store_sales_predictor_api.run(debug=True)