simnid commited on
Commit
45d99fc
·
verified ·
1 Parent(s): c3b0f5c

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +93 -0
  3. requirements.txt +8 -0
  4. superkart_sales_model.pkl +3 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `superkart_sales_api`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:superkart_sales_api"]
app.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np # For numerical operations
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ superkart_sales_api = Flask("SuperKart Sales Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("superkart_sales_model.pkl")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @superkart_sales_api.get('/')
15
+ def home():
16
+ """
17
+ This function handles GET requests to the root URL ('/') of the API.
18
+ It returns a simple welcome message.
19
+ """
20
+ return "Welcome to the SuperKart Sales Prediction API!"
21
+
22
+ # Define an endpoint for single product-store prediction (POST request)
23
+ @superkart_sales_api.post('/v1/predict')
24
+ def predict_sales():
25
+ """
26
+ This function handles POST requests to the '/v1/predict' endpoint.
27
+ It expects a JSON payload containing product and store details and returns
28
+ the predicted sales total as a JSON response.
29
+ """
30
+ # Get the JSON data from the request body
31
+ input_data = request.get_json()
32
+
33
+ # Extract relevant features from the JSON data
34
+ sample = {
35
+ 'Product_Weight': input_data['Product_Weight'],
36
+ 'Product_Allocated_Area': input_data['Product_Allocated_Area'],
37
+ 'Product_MRP': input_data['Product_MRP'],
38
+ 'Product_Sugar_Content': input_data['Product_Sugar_Content'],
39
+ 'Product_Type': input_data['Product_Type'],
40
+ 'Store_Size': input_data['Store_Size'],
41
+ 'Store_Location_City_Type': input_data['Store_Location_City_Type'],
42
+ 'Store_Type': input_data['Store_Type'],
43
+ 'Store_Age': input_data['Store_Age'],
44
+ 'Product_Category_Code': input_data['Product_Category_Code']
45
+ }
46
+
47
+ # Convert the extracted data into a Pandas DataFrame
48
+ input_df = pd.DataFrame([sample])
49
+
50
+ # Make prediction
51
+ predicted_sales = model.predict(input_df)[0]
52
+
53
+ # Convert predicted_sales to Python float and round
54
+ predicted_sales = round(float(predicted_sales), 2)
55
+
56
+ # Return the predicted sales
57
+ return jsonify({'Predicted Sales Total': predicted_sales})
58
+
59
+ # Define an endpoint for batch prediction (POST request)
60
+ @superkart_sales_api.post('/v1/predictbatch')
61
+ def predict_sales_batch():
62
+ """
63
+ This function handles POST requests to the '/v1/predictbatch' endpoint.
64
+ It expects a CSV file containing product-store details for multiple items
65
+ and returns the predicted sales totals as a dictionary in the JSON response.
66
+ """
67
+ # Get the uploaded CSV file from the request
68
+ file = request.files['file']
69
+
70
+ # Read the CSV file into a Pandas DataFrame
71
+ input_data = pd.read_csv(file)
72
+
73
+ # Make predictions for all items in the DataFrame
74
+ predicted_sales = model.predict(input_data)
75
+
76
+ # Round the predictions
77
+ predicted_sales = [round(float(sale), 2) for sale in predicted_sales]
78
+
79
+ # Create a dictionary of predictions with item IDs as keys
80
+ # Assuming there's an 'item_id' column in your CSV
81
+ if 'item_id' in input_data.columns:
82
+ item_ids = input_data['item_id'].tolist()
83
+ output_dict = dict(zip(item_ids, predicted_sales))
84
+ else:
85
+ # If no ID column, use index as keys
86
+ output_dict = {f"item_{i}": sale for i, sale in enumerate(predicted_sales)}
87
+
88
+ # Return the predictions dictionary as a JSON response
89
+ return output_dict
90
+
91
+ # Run the Flask application
92
+ if __name__ == '__main__':
93
+ superkart_sales_api.run(debug=True, host='0.0.0.0', port=5000)
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ joblib==1.4.2
5
+ Werkzeug==2.2.2
6
+ flask==2.2.2
7
+ gunicorn==20.1.0
8
+ requests==2.28.1
superkart_sales_model.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53a34a781f6fdcd956499cbd58f5703061920b1c9663941cd622722604aabb4c
3
+ size 74376321