VRS1503 commited on
Commit
dcadc8d
·
verified ·
1 Parent(s): 4a4de02

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +70 -70
app.py CHANGED
@@ -1,36 +1,36 @@
1
- import os
2
- import joblib
3
- from flask import Flask, request, jsonify
4
- import pandas as pd
5
- import numpy as np
6
- import warnings
7
- import logging
8
- warnings.filterwarnings("ignore")
9
 
10
  # Initialize flask app with a name
11
- sales_prediction = Flask(__name__)
12
 
13
  # Configure logging
14
- logging.basicConfig(level=logging.DEBUG)
15
 
16
  # Load the trained model pipeline
17
  try:
18
- model = joblib.load("SuperKart_Sales_Prediction_Model.joblib")
19
- logging.info("Model loaded successfully.")
20
- except Exception as e:
21
- logging.error(f"Error loading model: {e}")
22
- raise
23
 
24
- # Define an endpoint for making predictions
25
- @sales_prediction.get('/')
26
  def home():
27
  """
28
  This function handles GET requests to the root URL ('/') of the API.
29
  It returns a simple welcome message.
30
  """
31
- return "Welcome to the SuperKart Sales Prediction App!"
32
 
33
- # Define an endpoint for single property prediction (POST request)
34
  @sales_prediction.post('/v1/predict')
35
  def predict_sales():
36
  """
@@ -40,53 +40,53 @@ def predict_sales():
40
  """
41
  # Get the JSON data from the request body
42
  try:
43
- business_data = request.get_json()
44
- logging.debug(f"Received data: {business_data}")
45
- except Exception as e:
46
- logging.error(f"Error decoding JSON: {e}")
47
- return jsonify({'error': f'Invalid JSON: {e}'}), 400
48
 
49
  # Extract relevant features from the JSON data
50
  try:
51
  business_data_sample = {
52
- 'Product_Weight': business_data['Product_Weight'],
53
- 'Product_Sugar_Content': business_data['Product_Sugar_Content'],
54
- 'Product_Type': business_data['Product_Type'],
55
- 'Product_Allocated_Area': business_data['Product_Allocated_Area'],
56
- 'Product_MRP': business_data['Product_MRP'],
57
- 'Store_Size': business_data['Store_Size'],
58
- 'Store_Location_City_Type': business_data['Store_Location_City_Type'],
59
- 'Store_Type': business_data['Store_Type'],
60
- 'Store_Current_Age': business_data['Store_Current_Age'] # Keep Store_Current_Age
61
  }
62
- except KeyError as e:
63
- logging.error(f"Missing key in JSON data: {e}")
64
- return jsonify({'error': f'Missing key: {e}'}), 400
65
- except TypeError as e:
66
- logging.error(f"Error with data types: {e}")
67
- return jsonify({'error': f'Incorrect data type: {e}'}), 400
68
 
69
  # Convert the extracted data into a Pandas DataFrame
70
  try:
71
- business_df = pd.DataFrame(business_data_sample, index=[0])
72
- logging.debug(f"DataFrame: {business_df.head().to_string()}")
73
- except Exception as e:
74
- logging.error(f"Error creating DataFrame: {e}")
75
- return jsonify({'error': f'Error creating DataFrame: {e}'}), 500
76
 
77
  # Make predictions using the loaded model
78
- try:
79
- prediction = model.predict(business_df)
80
- logging.debug(f"Prediction: {prediction}")
81
- except Exception as e:
82
- logging.error(f"Error during prediction: {e}")
83
- return jsonify({'error': f'Prediction error: {e}'}), 500
84
 
85
  # Return the prediction as a JSON response
86
- return jsonify({'prediction': list(prediction)})
87
 
88
  # Define an endpoint for batch prediction (POST request)
89
- @sales_prediction.post('/v1/batch_predict')
90
  def batch_predict():
91
  """
92
  This function handles POST requests to the '/v1/batch_predict' endpoint.
@@ -95,31 +95,31 @@ def batch_predict():
95
  """
96
  # Get the uploaded CSV file from the request
97
  try:
98
- file = request.files['file']
99
- logging.debug(f"Received file: {file.filename}")
100
- except Exception as e:
101
- logging.error(f"Error getting file: {e}")
102
- return jsonify({'error': f'Error getting file: {e}'}), 400
103
 
104
  # Read the CSV file into a Pandas DataFrame
105
  try:
106
- df = pd.read_csv(file)
107
- logging.debug(f"DataFrame shape: {df.shape}")
108
- except Exception as e:
109
- logging.error(f"Error reading CSV: {e}")
110
- return jsonify({'error': f'Error reading CSV file: {e}'}), 400
111
 
112
  # Make preductions using the loaded model
113
  try:
114
- prediction = model.predict(df)
115
- logging.debug(f"Prediction: {prediction}")
116
- except Exception as e:
117
- logging.error(f"Error during prediction: {e}")
118
- return jsonify({'error': f'Prediction error: {e}'}), 500
119
 
120
  # Return the prediction as a JSON response
121
- return jsonify({'prediction': list(prediction)})
122
 
123
  # Run the Flask app if this script is executed
124
- if __name__ == '_main_':
125
- sales_prediction.run(debug=True)
 
1
+ import os # Provides access to operating system functionalities like reading files, interacting with directories, and managing environment variable
2
+ import joblib # Used for saving and loading models efficiently. It allows serialization of Python objects, especially machine learning models.
3
+ from flask import Flask, request, jsonify # Imports Flask framework for building a web API. Flask initializes the app, request handles incoming data, and jsonify converts responses to JSON format.
4
+ import pandas as pd # Loads pandas, a powerful library for data analysis and manipulation, allowing structured data handling through DataFrames.
5
+ import numpy as np # Imports numpy, which provides support for numerical computations, including arrays, mathematical functions, and statistical operations.
6
+ import warnings # Enables control over Python warning messages that may arise due to deprecated features or ignored exceptions.
7
+ import logging # Helps in tracking events and errors within the application by logging messages systematically.
8
+ warnings.filterwarnings("ignore") # Suppresses warning messages to ensure cleaner output during execution.
9
 
10
  # Initialize flask app with a name
11
+ sales_prediction = Flask(__name__) # Creates Flask application
12
 
13
  # Configure logging
14
+ logging.basicConfig(level=logging.DEBUG) # Configures logging to display debug messages.
15
 
16
  # Load the trained model pipeline
17
  try:
18
+ model = joblib.load("SuperKart_Sales_Prediction_Model.joblib") # Loads saved model
19
+ logging.info("Model loaded successfully.") # Logs a success message
20
+ except Exception as e: # To log any exception during handling
21
+ logging.error(f"Error loading model: {e}") # Logs an error message if loading fails
22
+ raise # Raises the error for troubleshooting.
23
 
24
+ # Define an endpoint for making predictions # Returns a welcome message when accessed:
25
+ @sales_prediction.get('/')
26
  def home():
27
  """
28
  This function handles GET requests to the root URL ('/') of the API.
29
  It returns a simple welcome message.
30
  """
31
+ return "Welcome to the SuperKart Sales Prediction App!"
32
 
33
+ # Define an endpoint for single property prediction (POST request) # Handles single prediction requests and returns JSON response:
34
  @sales_prediction.post('/v1/predict')
35
  def predict_sales():
36
  """
 
40
  """
41
  # Get the JSON data from the request body
42
  try:
43
+ business_data = request.get_json() # Extracts JSON data from request body
44
+ logging.debug(f"Received data: {business_data}") # Logs the received data for debugging purposes
45
+ except Exception as e: # To log any exception during handling
46
+ logging.error(f"Error decoding JSON: {e}") # Logs an error if JSON decoding fails
47
+ return jsonify({'error': f'Invalid JSON: {e}'}), 400 # Returns a JSON-formatted error response with a 400 (Bad Request) status code. This informs the client that there was an issue with their request.
48
 
49
  # Extract relevant features from the JSON data
50
  try:
51
  business_data_sample = {
52
+ 'Product_Weight': business_data['Product_Weight'], # Extracts the weight of the product from the JSON data, which can be a crucial feature for predicting sales
53
+ 'Product_Sugar_Content': business_data['Product_Sugar_Content'], # Retrieves the sugar content classification (e.g., low sugar, regular) of the product, which might influence customer preferences
54
+ 'Product_Type': business_data['Product_Type'], # Identifies the broad category of the product, such as dairy, snacks, or frozen foods, helping in segmentation analysis.
55
+ 'Product_Allocated_Area': business_data['Product_Allocated_Area'], # Captures the ratio of display space allocated to this product in a store, affecting visibility and sales performance
56
+ 'Product_MRP': business_data['Product_MRP'], # Extracts the Maximum Retail Price of the product, a key factor influencing purchasing decisions.
57
+ 'Store_Size': business_data['Store_Size'], # Retrieves the store's size classification (e.g., small, medium, large), which affects inventory and foot traffic.
58
+ 'Store_Location_City_Type': business_data['Store_Location_City_Type'],# Identifies whether the store is in a Tier 1, Tier 2, or Tier 3 city, which impacts customer demographics and purchasing power
59
+ 'Store_Type': business_data['Store_Type'], # Specifies the store format (e.g., supermarket, food mart, or departmental store), affecting pricing strategy and sales
60
+ 'Store_Current_Age': business_data['Store_Current_Age'] # Calculates the store’s age based on its establishment year, potentially relevant for analyzing store performance over time.
61
  }
62
+ except KeyError as e:
63
+ logging.error(f"Missing key in JSON data: {e}") # Handles missing keys in JSON data
64
+ return jsonify({'error': f'Missing key: {e}'}), 400 # Logs an error message and returns a JSON response with an HTTP 400 status code (Bad Request)
65
+ except TypeError as e: # Handles TypeError, which occurs when the data received is in an unexpected format
66
+ logging.error(f"Error with data types: {e}") # Logs an error message
67
+ return jsonify({'error': f'Incorrect data type: {e}'}), 400 # Logs the issue and returns an error response with HTTP status 400.
68
 
69
  # Convert the extracted data into a Pandas DataFrame
70
  try:
71
+ business_df = pd.DataFrame(business_data_sample, index=[0]) # Transforms the extracted business data sample into a single-row Pandas DataFrame for structured analysis and model predictions.
72
+ logging.debug(f"DataFrame: {business_df.head().to_string()}") # Logs the first few rows of the DataFrame in a formatted string for clearer debugging output
73
+ except Exception as e: # To log any exception during handling
74
+ logging.error(f"Error creating DataFrame: {e}") # Logs an error message if DataFrame creation fails
75
+ return jsonify({'error': f'Error creating DataFrame: {e}'}), 500 # Returns a JSON-formatted error message with a 500 Internal Server Error status, signaling a failure in DataFrame processing.
76
 
77
  # Make predictions using the loaded model
78
+ try:
79
+ prediction = model.predict(business_df) # Uses the loaded model to make predictions on the business data
80
+ logging.debug(f"Prediction: {prediction}") # Logs an error message
81
+ except Exception as e: # To log any exception during handling
82
+ logging.error(f"Error during prediction: {e}") # Logs an error message if DataFrame creation fails
83
+ return jsonify({'error': f'Prediction error: {e}'}), 500 # Returning an error code 500 if any issue with JSON
84
 
85
  # Return the prediction as a JSON response
86
+ return jsonify({'prediction': list(prediction)}) # Returns the prediction as a JSON response
87
 
88
  # Define an endpoint for batch prediction (POST request)
89
+ @sales_prediction.post('/v1/batch_predict')
90
  def batch_predict():
91
  """
92
  This function handles POST requests to the '/v1/batch_predict' endpoint.
 
95
  """
96
  # Get the uploaded CSV file from the request
97
  try:
98
+ file = request.files['file'] # Retrieves the uploaded file from the request
99
+ logging.debug(f"Received file: {file.filename}") # Logs the received file for debugging purposes
100
+ except Exception as e: # To log any exception during handling
101
+ logging.error(f"Error getting file: {e}") # Logs an error if file retrieval fails
102
+ return jsonify({'error': f'Error getting file: {e}'}), 400 # Logs the issue and returns an error response with HTTP status 400.
103
 
104
  # Read the CSV file into a Pandas DataFrame
105
  try:
106
+ df = pd.read_csv(file) # creating a variable to store the csv file
107
+ logging.debug(f"DataFrame shape: {df.shape}") # Logs the shape of the DataFrame for debugging purposes
108
+ except Exception as e: # To log any exception during handling
109
+ logging.error(f"Error reading CSV: {e}") # Logs an error if CSV reading fails
110
+ return jsonify({'error': f'Error reading CSV file: {e}'}), 400 # Logs the issue and returns an error response with HTTP status 400.
111
 
112
  # Make preductions using the loaded model
113
  try:
114
+ prediction = model.predict(df) # Fit the model to variable 'prediction'
115
+ logging.debug(f"Prediction: {prediction}") # Logs the prediction for debugging purposes
116
+ except Exception as e: # To log any exception during handling
117
+ logging.error(f"Error during prediction: {e}") # Logs an error if prediction fails
118
+ return jsonify({'error': f'Prediction error: {e}'}), 500 # Returning an error code 500 if any issue with JSON
119
 
120
  # Return the prediction as a JSON response
121
+ return jsonify({'prediction': list(prediction)}) # Returns the prediction as a JSON response
122
 
123
  # Run the Flask app if this script is executed
124
+ if __name__ == '_main_': # Checks if the script is executed directly.
125
+ sales_prediction.run(debug=True) # Display the sales predicted and handle any debug issue