siddhesh1981 commited on
Commit
68ba08d
·
verified ·
1 Parent(s): d4851a6

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +18 -0
  2. app.py +70 -0
  3. requirements.txt +10 -0
  4. sales_prediction_model_v1_0.joblib +3 -0
Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ FROM python:3.9-slim
3
+
4
+ # Set the working directory inside the container
5
+ WORKDIR /app
6
+
7
+ # Copy all files from the current directory to the container's working directory
8
+ COPY . .
9
+
10
+ # Install dependencies from the requirements file without using cache to reduce image size
11
+
12
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
13
+
14
+ # Define the command to start the application using Gunicorn with 4 worker processes
15
+ # - `-w 4`: Uses 4 worker processes for handling requests
16
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
17
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
18
+ CMD ["gunicorn", "-b", "0.0.0.0:7860", "app:sales_prediction_api"]
app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import joblib
4
+ from flask import Flask,jsonify,request
5
+
6
+ # Initialize flask app
7
+
8
+ sales_prediction_api=Flask("Forecasted Product Sales Predictor")
9
+
10
+ # load the model
11
+
12
+ model=joblib.load('sales_prediction_model_v1_0.joblib')
13
+
14
+ # create home endpoint
15
+
16
+ @sales_prediction_api.get('/')
17
+ def home():
18
+ return "Welcome to the Superkart product sales forecast API"
19
+
20
+ # create health check endpoint
21
+
22
+ #@sales_prediction_api.get('/health')
23
+ #def health_check():
24
+ # return jsonify({"status": "ok"}), 200
25
+
26
+ # create endpoint for single row data processing
27
+
28
+ @sales_prediction_api.post('/v1/data')
29
+ def predict_data():
30
+ data=request.get_json()
31
+
32
+ user_input={
33
+ 'Product_Weight':data['Product_Weight'],
34
+ 'Product_Sugar_Content':data['Product_Sugar_Content'],
35
+ 'Product_Allocated_Area':data['Product_Allocated_Area'],
36
+ 'Product_Type':data['Product_Type'],
37
+ 'Product_MRP':data['Product_MRP'],
38
+ 'Store_Id':data['Store_Id'],
39
+ 'Store_Establishment_Year':data['Store_Establishment_Year'],
40
+ 'Store_Size':data['Store_Size'],
41
+ 'Store_Location_City_Type':data['Store_Location_City_Type'],
42
+ 'Store_Type':data['Store_Type']
43
+ }
44
+
45
+ df=pd.DataFrame([user_input])
46
+
47
+ prediction=model.predict(df).tolist()[0]
48
+
49
+ return jsonify({'prediction':prediction})
50
+
51
+
52
+ # create endpoint for batch processing
53
+
54
+ @sales_prediction_api.post('/v1/databatch')
55
+ def predict_data_batch():
56
+
57
+ file1=request.files['file']
58
+
59
+ df_input=pd.read_csv(file1)
60
+
61
+ predictionlist=model.predict(df_input.drop(['Product_Id'],axis=1)).tolist()
62
+ idlist=df_input.Product_Id.values.tolist()
63
+
64
+ dictionary1= dict(zip(idlist,predictionlist))
65
+
66
+ return dictionary1
67
+
68
+ if __name__=='__main__':
69
+ app.run(debug=True)
70
+
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ feature-engine==1.6.2
sales_prediction_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e870687b65519cff02bcf9d2ad5e5e6b123a0c2c72ad62fb67bb406a4ea83b8
3
+ size 5812730