UncloudMe commited on
Commit
6b07176
·
verified ·
1 Parent(s): 9240583

Upload folder using huggingface_hub

Browse files
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:SK_Sales_Forecast_api"]
SuperKart_Sales_Forecast_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a874074e877e30b5ab16dfe0fe5ddcb1fd001ecca468fbe745f12a7fb3b4c714
3
+ size 206602
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ from datetime import datetime
3
+ import numpy as np
4
+ import joblib # For loading the serialized model
5
+ import pandas as pd # For data manipulation
6
+ from flask import Flask, request, jsonify # For creating the Flask API
7
+
8
+ # Initialize the Flask application
9
+ SK_Sales_Forecast_api = Flask("SK_Sales_Backend")
10
+
11
+ # Load the trained machine learning model
12
+ model = joblib.load("deployment/SuperKart_Sales_Forecast_model_v1_0.joblib")
13
+
14
+ # Define a route for the home page (GET request)
15
+ @SK_Sales_Forecast_api.get('/')
16
+
17
+ def home():
18
+ """
19
+ This function handles GET requests to the root URL ('/') of the API.
20
+ It returns a simple welcome message.
21
+ """
22
+ return "Welcome to the SuperKart Sales Forecast API!"
23
+
24
+ # Define an endpoint for single product sales prediction (POST request)
25
+ @SK_Sales_Forecast_api.post('/salespredict')
26
+
27
+
28
+ def predict_product_sale():
29
+ """
30
+ This function handles POST requests to the '/salespredict' endpoint.
31
+ It expects a JSON payload containing property details and returns
32
+ the predicted rental price as a JSON response.
33
+ """
34
+ # Get the JSON data from the request body
35
+ product_data = request.get_json()
36
+
37
+ # Extract relevant features from the JSON data
38
+ sample = {
39
+ 'Product_Id': product_data['Product_Id'],
40
+ 'Product_Weight': product_data['Product_Weight'],
41
+ 'Product_Sugar_Content': product_data['Product_Sugar_Content'],
42
+ 'Product_Allocated_Area': product_data['Product_Allocated_Area'],
43
+ 'Product_Type': product_data['Product_Type'],
44
+ 'Product_MRP': product_data['Product_MRP'],
45
+ 'Store_Establishment_Year': product_data['Store_Establishment_Year'],
46
+ 'Store_Size': product_data['Store_Size'],
47
+ 'Store_Location_City_Type': product_data['Store_Location_City_Type'],
48
+ 'Store_Type': product_data['Store_Type']
49
+ }
50
+
51
+ # Convert the extracted data into a Pandas DataFrame
52
+ input_data = pd.DataFrame([sample])
53
+
54
+ # Extract the Product_Code and Store_Age before feeding to the model
55
+ input_data["Product_Code"] = input_data["Product_Id"].str[:2]
56
+ input_data.drop("Product_Id", axis=1, inplace=True)
57
+
58
+ current_year = datetime.now().year
59
+ input_data["Store_Age"] = current_year - input_data["Store_Establishment_Year"]
60
+ input_data.drop("Store_Establishment_Year", axis=1, inplace=True)
61
+
62
+ # Make prediction
63
+ predicted_sale = model.predict(input_data)[0]
64
+
65
+ # Return the actual price
66
+ return jsonify({'Predicted Sale': predicted_sale})
67
+
68
+
69
+ # Define an endpoint for batch prediction (POST request)
70
+ @SK_Sales_Forecast_api.post('/salespredictbatch')
71
+
72
+
73
+ def predict_product_sale_batch():
74
+ """
75
+ This function handles POST requests to the '/salespredictbatch' endpoint.
76
+ It expects a CSV file containing property details for multiple properties
77
+ and returns the predicted rental prices as a dictionary in the JSON response.
78
+ """
79
+ # Get the uploaded CSV file from the request
80
+ file = request.files['file']
81
+
82
+ # Read the CSV file into a Pandas DataFrame
83
+ input_data = pd.read_csv(file)
84
+
85
+ # Extract the Product_Code and Store_Age before feeding to the model
86
+ input_data["Product_Code"] = input_data["Product_Id"].str[:2]
87
+ product_ids = input_data['Product_Id'].tolist()
88
+ input_data.drop("Product_Id", axis=1, inplace=True)
89
+
90
+ current_year = datetime.now().year
91
+ input_data["Store_Age"] = current_year - input_data["Store_Establishment_Year"]
92
+ input_data.drop("Store_Establishment_Year", axis=1, inplace=True)
93
+
94
+ # Make predictions for all properties in the DataFrame
95
+ predicted_sales = model.predict(input_data).tolist()
96
+
97
+ # Create a dictionary of predictions with product IDs as keys
98
+ #product_ids = input_data['Product_Id'].tolist()
99
+ output_dict = dict(zip(product_ids, predicted_sales)) # Use actual prices
100
+
101
+ # Return the predictions dictionary as a JSON response
102
+ return output_dict
103
+
104
+ # Run the Flask application in debug mode if this script is executed directly
105
+ if __name__ == '__main__':
106
+ SK_Sales_Forecast_api.run(debug=True)
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2