dpanchali commited on
Commit
16fa2da
·
verified ·
1 Parent(s): c70701a

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +86 -0
  3. final_sales_forecasting_model.joblib +3 -0
  4. requirements.txt +10 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]
app.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
+ import pandas as pd
3
+ from flask import Flask, request, jsonify
4
+
5
+ # Initialize Flask app
6
+ app = Flask(__name__)
7
+
8
+ # Load the trained sales forecasting model pipeline
9
+ model = joblib.load("final_sales_forecasting_model.joblib") #
10
+
11
+ # Define a route for the home page
12
+ @app.route('/')
13
+ def home():
14
+ return "Welcome to the SuperKart Sales Forecasting API"
15
+
16
+ # Define an endpoint to predict sales for a single product-store combination
17
+ @app.route('/predict_single', methods=['POST'])
18
+ def predict_single():
19
+ # Get JSON data from the request
20
+ data = request.get_json()
21
+
22
+ # Extract relevant features from the input data, ensuring correct order and names
23
+ # The keys in the dictionary should match the column names in your original training data X
24
+ try:
25
+ sample = {
26
+ 'Product_Id': data['Product_Id'],
27
+ 'Product_Weight': data['Product_Weight'],
28
+ 'Product_Sugar_Content': data['Product_Sugar_Content'],
29
+ 'Product_Allocated_Area': data['Product_Allocated_Area'],
30
+ 'Product_Type': data['Product_Type'],
31
+ 'Product_MRP': data['Product_MRP'],
32
+ 'Store_Id': data['Store_Id'],
33
+ 'Store_Establishment_Year': data['Store_Establishment_Year'],
34
+ 'Store_Size': data['Store_Size'],
35
+ 'Store_Location_City_Type': data['Store_Location_City_Type'],
36
+ 'Store_Type': data['Store_Type']
37
+ }
38
+
39
+ # Convert the extracted data into a DataFrame
40
+ input_data = pd.DataFrame([sample])
41
+
42
+ # Make a sales prediction using the trained model pipeline
43
+ prediction = model.predict(input_data).tolist()[0]
44
+
45
+ # Return the prediction as a JSON response
46
+ return jsonify({'predicted_sales': prediction})
47
+
48
+ except KeyError as e:
49
+ return jsonify({'error': f'Missing data for key: {e}'}), 400
50
+ except Exception as e:
51
+ return jsonify({'error': str(e)}), 500
52
+
53
+
54
+ # Define an endpoint to predict sales for a batch of product-store combinations from a CSV file
55
+ @app.route('/predict_batch', methods=['POST'])
56
+ def predict_batch():
57
+ # Get the uploaded file from the request
58
+ if 'file' not in request.files:
59
+ return jsonify({'error': 'No file part in the request'}), 400
60
+
61
+ file = request.files['file']
62
+
63
+ # If the user does not select a file, the browser submits an empty file without a filename.
64
+ if file.filename == '':
65
+ return jsonify({'error': 'No selected file'}), 400
66
+
67
+ if file:
68
+ try:
69
+ # Read the file into a DataFrame
70
+ input_data = pd.read_csv(file)
71
+
72
+ # Make sales predictions using the trained model pipeline
73
+ predictions = model.predict(input_data).tolist()
74
+
75
+ # Return the predictions as a JSON response
76
+ return jsonify({'predicted_sales': predictions})
77
+
78
+ except Exception as e:
79
+ return jsonify({'error': str(e)}), 500
80
+ else:
81
+ return jsonify({'error': 'Something went wrong with file upload'}), 500
82
+
83
+
84
+ # Run the Flask app in debug mode
85
+ if __name__ == '__main__':
86
+ app.run(debug=True)
final_sales_forecasting_model.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3de540996696fa424339c00294222bc600489164d994d93c7e5a3483dccd6e6
3
+ size 65134690
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]