Hugo014 commited on
Commit
1a3aee9
·
verified ·
1 Parent(s): e970c48

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +2 -5
  2. app.py +94 -18
  3. requirements.txt +0 -2
Dockerfile CHANGED
@@ -6,11 +6,8 @@ WORKDIR /app
6
  # Copy all files from the current directory to the container's working directory
7
  COPY . .
8
 
9
- # Install dependencies from the requirements file without using cache to reduce image size
10
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
 
12
- # Define the command to start the application using Gunicorn with 4 worker processes
13
- # - `-w 4`: Uses 4 worker processes for handling requests
14
- # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
- # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
  CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:super_kart_api"]
 
6
  # Copy all files from the current directory to the container's working directory
7
  COPY . .
8
 
9
+ # Install dependencies
10
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
 
12
+ # Start with Gunicorn
 
 
 
13
  CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:super_kart_api"]
app.py CHANGED
@@ -1,14 +1,13 @@
1
- # Import necessary libraries
2
  import numpy as np
3
- import joblib # For loading the serialized model
4
- import pandas as pd # For data manipulation
5
- from flask import Flask, request, jsonify # For creating the Flask API
6
 
7
  # Initialize the Flask application
8
  super_kart_api = Flask("Super Kart Price Predictor")
9
 
10
- # Load the trained machine learning model (updated path to match deployment structure)
11
- model_path = "super_kart_model_v1_0.joblib"
12
  try:
13
  model = joblib.load(model_path)
14
  print(f"Model loaded successfully from {model_path}")
@@ -36,7 +35,6 @@ def predict_sales():
36
  input_data = request.get_json()
37
 
38
  # Extract relevant features from the JSON data
39
- # Note: Exclude Product_Id and Store_Id if they are not used in prediction
40
  sample = {
41
  'Product_Weight': input_data['Product_Weight'],
42
  'Product_Sugar_Content': input_data['Product_Sugar_Content'],
@@ -48,13 +46,14 @@ def predict_sales():
48
  'Store_Location_City_Type': input_data['Store_Location_City_Type'],
49
  'Store_Type': input_data['Store_Type']
50
  }
51
- # Convert the extracted data into a Pandas DataFrame
 
52
  features_df = pd.DataFrame([sample])
53
 
54
- # Apply one-hot encoding for nominal columns (matching training)
55
  features_df = pd.get_dummies(features_df, columns=['Product_Type', 'Store_Type'], drop_first=True)
56
 
57
- # Apply ordinal encoding (based on provided orders)
58
  sugar_mapping = {'No Sugar': 0, 'Low Sugar': 1, 'Regular': 2}
59
  size_mapping = {'Small': 0, 'Medium': 1, 'High': 2}
60
  city_mapping = {'Tier 3': 0, 'Tier 2': 1, 'Tier 1': 2}
@@ -63,19 +62,96 @@ def predict_sales():
63
  features_df['Store_Size'] = features_df['Store_Size'].map(size_mapping)
64
  features_df['Store_Location_City_Type'] = features_df['Store_Location_City_Type'].map(city_mapping)
65
 
66
- # Make prediction (assuming direct sales prediction; adjust if log-transformed)
67
  predicted_sales = model.predict(features_df)[0]
68
-
69
- # If your model predicts log(sales), uncomment and use this instead:
70
- # predicted_log_sales = model.predict(features_df)[0]
71
- # predicted_sales = np.exp(predicted_log_sales)
72
-
73
- # Convert to Python float and round to 2 decimals
74
  predicted_sales = round(float(predicted_sales), 2)
75
 
76
  # Return the predicted sales total
77
  return jsonify({'Predicted Sales Total (in dollars)': predicted_sales})
78
 
79
- # Run the app (for testing locally; remove or adjust for production)
80
  if __name__ == '__main__':
81
  super_kart_api.run(debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import numpy as np
2
+ import joblib
3
+ import pandas as pd
4
+ from flask import Flask, request, jsonify
5
 
6
  # Initialize the Flask application
7
  super_kart_api = Flask("Super Kart Price Predictor")
8
 
9
+ # Load the trained machine learning model
10
+ model_path = "backend_files/super_kart_model_v1_0.joblib"
11
  try:
12
  model = joblib.load(model_path)
13
  print(f"Model loaded successfully from {model_path}")
 
35
  input_data = request.get_json()
36
 
37
  # Extract relevant features from the JSON data
 
38
  sample = {
39
  'Product_Weight': input_data['Product_Weight'],
40
  'Product_Sugar_Content': input_data['Product_Sugar_Content'],
 
46
  'Store_Location_City_Type': input_data['Store_Location_City_Type'],
47
  'Store_Type': input_data['Store_Type']
48
  }
49
+
50
+ # Convert to DataFrame
51
  features_df = pd.DataFrame([sample])
52
 
53
+ # Apply one-hot encoding
54
  features_df = pd.get_dummies(features_df, columns=['Product_Type', 'Store_Type'], drop_first=True)
55
 
56
+ # Apply ordinal encoding
57
  sugar_mapping = {'No Sugar': 0, 'Low Sugar': 1, 'Regular': 2}
58
  size_mapping = {'Small': 0, 'Medium': 1, 'High': 2}
59
  city_mapping = {'Tier 3': 0, 'Tier 2': 1, 'Tier 1': 2}
 
62
  features_df['Store_Size'] = features_df['Store_Size'].map(size_mapping)
63
  features_df['Store_Location_City_Type'] = features_df['Store_Location_City_Type'].map(city_mapping)
64
 
65
+ # Make prediction
66
  predicted_sales = model.predict(features_df)[0]
 
 
 
 
 
 
67
  predicted_sales = round(float(predicted_sales), 2)
68
 
69
  # Return the predicted sales total
70
  return jsonify({'Predicted Sales Total (in dollars)': predicted_sales})
71
 
72
+ # Run the app (for testing locally)
73
  if __name__ == '__main__':
74
  super_kart_api.run(debug=True)
75
+
76
+ # %%writefile backend_files/app.py
77
+ # # Import necessary libraries
78
+ # import numpy as np
79
+ # import joblib # For loading the serialized model
80
+ # import pandas as pd # For data manipulation
81
+ # from flask import Flask, request, jsonify # For creating the Flask API
82
+
83
+ # # Initialize the Flask application
84
+ # super_kart_api = Flask("Super Kart Price Predictor")
85
+
86
+ # # Load the trained machine learning model (updated path to match deployment structure)
87
+ # model_path = "super_kart_model_v1_0.joblib"
88
+ # try:
89
+ # model = joblib.load(model_path)
90
+ # print(f"Model loaded successfully from {model_path}")
91
+ # except FileNotFoundError:
92
+ # raise FileNotFoundError(f"Model file not found at {model_path}. Ensure it's included in the deployment.")
93
+
94
+ # # Define a route for the home page (GET request)
95
+ # @super_kart_api.get('/')
96
+ # def home():
97
+ # """
98
+ # This function handles GET requests to the root URL ('/') of the API.
99
+ # It returns a simple welcome message.
100
+ # """
101
+ # return "Welcome to the Super Kart Price Prediction API!"
102
+
103
+ # # Define an endpoint for single product sales prediction (POST request)
104
+ # @super_kart_api.post('/v1/sales')
105
+ # def predict_sales():
106
+ # """
107
+ # This function handles POST requests to the '/v1/sales' endpoint.
108
+ # It expects a JSON payload containing product and store details and returns
109
+ # the predicted sales total as a JSON response.
110
+ # """
111
+ # # Get the JSON data from the request body
112
+ # input_data = request.get_json()
113
+
114
+ # # Extract relevant features from the JSON data
115
+ # # Note: Exclude Product_Id and Store_Id if they are not used in prediction
116
+ # sample = {
117
+ # 'Product_Weight': input_data['Product_Weight'],
118
+ # 'Product_Sugar_Content': input_data['Product_Sugar_Content'],
119
+ # 'Product_Allocated_Area': input_data['Product_Allocated_Area'],
120
+ # 'Product_Type': input_data['Product_Type'],
121
+ # 'Product_MRP': input_data['Product_MRP'],
122
+ # 'Store_Establishment_Year': input_data['Store_Establishment_Year'],
123
+ # 'Store_Size': input_data['Store_Size'],
124
+ # 'Store_Location_City_Type': input_data['Store_Location_City_Type'],
125
+ # 'Store_Type': input_data['Store_Type']
126
+ # }
127
+ # # Convert the extracted data into a Pandas DataFrame
128
+ # features_df = pd.DataFrame([sample])
129
+
130
+ # # Apply one-hot encoding for nominal columns (matching training)
131
+ # features_df = pd.get_dummies(features_df, columns=['Product_Type', 'Store_Type'], drop_first=True)
132
+
133
+ # # Apply ordinal encoding (based on provided orders)
134
+ # sugar_mapping = {'No Sugar': 0, 'Low Sugar': 1, 'Regular': 2}
135
+ # size_mapping = {'Small': 0, 'Medium': 1, 'High': 2}
136
+ # city_mapping = {'Tier 3': 0, 'Tier 2': 1, 'Tier 1': 2}
137
+
138
+ # features_df['Product_Sugar_Content'] = features_df['Product_Sugar_Content'].map(sugar_mapping)
139
+ # features_df['Store_Size'] = features_df['Store_Size'].map(size_mapping)
140
+ # features_df['Store_Location_City_Type'] = features_df['Store_Location_City_Type'].map(city_mapping)
141
+
142
+ # # Make prediction (assuming direct sales prediction; adjust if log-transformed)
143
+ # predicted_sales = model.predict(features_df)[0]
144
+
145
+ # # If your model predicts log(sales), uncomment and use this instead:
146
+ # # predicted_log_sales = model.predict(features_df)[0]
147
+ # # predicted_sales = np.exp(predicted_log_sales)
148
+
149
+ # # Convert to Python float and round to 2 decimals
150
+ # predicted_sales = round(float(predicted_sales), 2)
151
+
152
+ # # Return the predicted sales total
153
+ # return jsonify({'Predicted Sales Total (in dollars)': predicted_sales})
154
+
155
+ # # Run the app (for testing locally; remove or adjust for production)
156
+ # if __name__ == '__main__':
157
+ # super_kart_api.run(debug=True)
requirements.txt CHANGED
@@ -7,5 +7,3 @@ Werkzeug==3.1.3
7
  flask==3.1.1
8
  gunicorn==20.1.0
9
  requests==2.28.1
10
- uvicorn[standard]
11
- streamlit==1.43.2
 
7
  flask==3.1.1
8
  gunicorn==20.1.0
9
  requests==2.28.1