DD009 commited on
Commit
2168d7e
·
verified ·
1 Parent(s): 62105fb

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. SuperKart_model_v1_0.joblib +3 -0
  3. app.py +120 -0
  4. requirements.txt +11 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:SuperKart_predictor_api"]
SuperKart_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe6646c64c50003d7be9e910550c26c0a3da5c1007f1b3c5e453864646bbf19c
3
+ size 259782
app.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import joblib
3
+ import pandas as pd
4
+ from flask import Flask, request, jsonify
5
+ from datetime import datetime
6
+
7
+ # Initialize the Flask application
8
+ sales_predictor_api = Flask("Retail Sales Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("retail_sales_prediction_model_v1_0.joblib")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @sales_predictor_api.get('/')
15
+ def home():
16
+ """
17
+ Welcome endpoint for the Retail Sales Prediction API
18
+ """
19
+ return "Welcome to the Retail Product Sales Prediction API!"
20
+
21
+ # Define an endpoint for single product prediction (POST request)
22
+ @sales_predictor_api.post('/v1/sales')
23
+ def predict_sales():
24
+ """
25
+ Endpoint for predicting sales for a single product-store combination
26
+ Expects JSON payload with product and store details
27
+ """
28
+ try:
29
+ # Get the JSON data from the request body
30
+ data = request.get_json()
31
+
32
+ # Calculate derived features
33
+ current_year = datetime.now().year
34
+ store_age = current_year - data['store_establishment_year']
35
+ product_density = data['product_weight'] / (data['product_allocated_area'] + 1e-6)
36
+ price_per_weight = data['product_mrp'] / (data['product_weight'] + 1e-6)
37
+ product_size = 'Small' if data['product_weight'] <= 10 else ('Medium' if data['product_weight'] <= 15 else 'Large')
38
+ store_tier_size = f"{data['store_location_city_type']}_{data['store_size']}"
39
+
40
+ # Prepare input data
41
+ input_data = pd.DataFrame([{
42
+ 'Product_Weight': data['product_weight'],
43
+ 'Product_Sugar_Content': data['product_sugar_content'],
44
+ 'Product_Allocated_Area': data['product_allocated_area'],
45
+ 'Product_Type': data['product_type'],
46
+ 'Product_MRP': data['product_mrp'],
47
+ 'Store_Id': data['store_id'],
48
+ 'Store_Establishment_Year': data['store_establishment_year'],
49
+ 'Store_Size': data['store_size'],
50
+ 'Store_Location_City_Type': data['store_location_city_type'],
51
+ 'Store_Type': data['store_type'],
52
+ 'Store_Age': store_age,
53
+ 'Product_Density': product_density,
54
+ 'Price_Per_Unit_Weight': price_per_weight,
55
+ 'Product_Size_Category': product_size,
56
+ 'Store_Tier_Size': store_tier_size
57
+ }])
58
+
59
+ # Make prediction
60
+ predicted_sales = model.predict(input_data)[0]
61
+
62
+ # Return the predicted sales
63
+ return jsonify({
64
+ 'predicted_sales': round(float(predicted_sales), 2),
65
+ 'store_age': store_age,
66
+ 'product_density': round(product_density, 2),
67
+ 'price_per_weight': round(price_per_weight, 2)
68
+ })
69
+
70
+ except Exception as e:
71
+ return jsonify({'error': str(e)}), 400
72
+
73
+ # Define an endpoint for batch prediction (POST request)
74
+ @sales_predictor_api.post('/v1/salesbatch')
75
+ def predict_sales_batch():
76
+ """
77
+ Endpoint for batch predictions from CSV file
78
+ Expects CSV file with multiple product-store combinations
79
+ """
80
+ try:
81
+ # Check if file was uploaded
82
+ if 'file' not in request.files:
83
+ return jsonify({'error': 'No file uploaded'}), 400
84
+
85
+ file = request.files['file']
86
+
87
+ # Read CSV file
88
+ input_data = pd.read_csv(file)
89
+
90
+ # Calculate derived features
91
+ current_year = datetime.now().year
92
+ input_data['Store_Age'] = current_year - input_data['Store_Establishment_Year']
93
+ input_data['Product_Density'] = input_data['Product_Weight'] / (input_data['Product_Allocated_Area'] + 1e-6)
94
+ input_data['Price_Per_Unit_Weight'] = input_data['Product_MRP'] / (input_data['Product_Weight'] + 1e-6)
95
+ input_data['Product_Size_Category'] = input_data['Product_Weight'].apply(
96
+ lambda x: 'Small' if x <= 10 else ('Medium' if x <= 15 else 'Large'))
97
+ input_data['Store_Tier_Size'] = input_data['Store_Location_City_Type'] + '_' + input_data['Store_Size']
98
+
99
+ # Make predictions
100
+ predictions = model.predict(input_data)
101
+
102
+ # Prepare results
103
+ results = []
104
+ for i, row in input_data.iterrows():
105
+ results.append({
106
+ 'product_id': row['Product_Id'],
107
+ 'store_id': row['Store_Id'],
108
+ 'predicted_sales': round(float(predictions[i]), 2),
109
+ 'product_type': row['Product_Type'],
110
+ 'store_type': row['Store_Type']
111
+ })
112
+
113
+ return jsonify({'predictions': results})
114
+
115
+ except Exception as e:
116
+ return jsonify({'error': str(e)}), 400
117
+
118
+ # Run the Flask application
119
+ if __name__ == '__main__':
120
+ sales_predictor_api.run(host='0.0.0.0', port=5000, debug=True)
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2