CodingBuddy commited on
Commit
6e01b8a
·
verified ·
1 Parent(s): c93fa90

Upload folder using huggingface_hub

Browse files
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]
SuperKart_model_deployment_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d406fadaa32d6c1161a883700a42286723514c051c8fc29786969e5ad9278244
3
+ size 8537345
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
+ import pandas as pd
3
+ from flask import Flask, request, jsonify
4
+
5
+ # Initialize Flask app with a name
6
+ app = Flask("SuperKart sales prediction app backend")
7
+
8
+ # Load the trained churn prediction model
9
+ model = joblib.load("SuperKart_model_deployment_model_v1_0.joblib")
10
+
11
+ # Define a route for the home page
12
+ @app.get('/')
13
+ def home():
14
+ return "Welcome to the SuperKart Sales Prediction API"
15
+
16
+ # Define an endpoint to predict sales of the single product in a store
17
+ @app.post('/v1/sales')
18
+ def predict_churn():
19
+ # Get JSON data from the request
20
+ store_data = request.get_json()
21
+
22
+ # Extract relevant store features from the input data
23
+ requestData = {
24
+ 'Product_Weight': store_data['Product_Weight'],
25
+ 'Product_Sugar_Content': store_data['Product_Sugar_Content'],
26
+ 'Product_Allocated_Area': store_data['Product_Allocated_Area'],
27
+ 'Product_Type': store_data['Product_Type'],
28
+ 'Product_MRP': store_data['Product_MRP'],
29
+ 'Store_Id': store_data['Store_Id'],
30
+ 'Store_Establishment_Year': store_data['Store_Establishment_Year'],
31
+ 'Store_Size': store_data['Store_Size'],
32
+ 'Store_Location_City_Type': store_data['Store_Location_City_Type'],
33
+ 'Store_Type': store_data['Store_Type'],
34
+ 'Product_Store_Sales_Total': store_data['Product_Store_Sales_Total']
35
+ }
36
+
37
+ # Convert the extracted data into a DataFrame
38
+ input_data = pd.DataFrame([requestData])
39
+
40
+ # Make a churn prediction using the trained model
41
+ prediction = model.predict(input_data).tolist()[0]
42
+
43
+ #Calculate the actual price
44
+ predicted_sales = np.exp(prediction)
45
+
46
+ # Convert predicted_price to Python float
47
+ predicted_sales = round(float(predicted_sales), 2)
48
+
49
+ # Return the prediction as a JSON response
50
+ return jsonify({'Predicted_Sale': predicted_sales})
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2