Anusha3 commited on
Commit
d725bda
·
verified ·
1 Parent(s): d70e5d1

Upload folder using huggingface_hub

Browse files
backend_files/Dockerfile CHANGED
@@ -13,4 +13,4 @@ RUN pip install --no-cache-dir --upgrade -r requirements.txt
13
  # - `-w 4`: Uses 4 worker processes for handling requests
14
  # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
  # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
- CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:rental_price_predictor_api"]
 
13
  # - `-w 4`: Uses 4 worker processes for handling requests
14
  # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
  # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:superkart_sales_api"]
backend_files/app.py CHANGED
@@ -20,75 +20,75 @@ def home():
20
  """
21
  return "Welcome to the SuperKart Sales Prediction API!"
22
 
23
- # Define an endpoint for single sales prediction (POST request)
 
24
  @superkart_sales_api.post('/v1/sales')
25
  def predict_sales():
26
  """
27
- This function handles POST requests to the '/v1/sales' endpoint.
28
- It expects a JSON payload containing product and store details
29
- and returns the predicted sales as a JSON response.
30
  """
31
- # Get the JSON data from the request body
32
- product_data = request.get_json()
33
-
34
- # Extract relevant features from the JSON data
35
- sample = {
36
- 'Product_Weight': product_data['Product_Weight'],
37
- 'Product_Allocated_Area': product_data['Product_Allocated_Area'],
38
- 'Product_MRP': product_data['Product_MRP'],
39
- 'Store_Establishment_Year': product_data['Store_Establishment_Year'],
40
- 'Product_Sugar_Content': product_data['Product_Sugar_Content'],
41
- 'Product_Type': product_data['Product_Type'],
42
- 'Store_Size': product_data['Store_Size'],
43
- 'Store_Location_City_Type': product_data['Store_Location_City_Type'],
44
- 'Store_Type': product_data['Store_Type']
45
- }
46
-
47
- # Convert the extracted data into a Pandas DataFrame
48
- input_data = pd.DataFrame([sample])
49
-
50
- # Make prediction
51
- predicted_sales = model.predict(input_data)[0]
52
-
53
- # Convert predicted_sales to Python float
54
- predicted_sales = round(float(predicted_sales), 2)
55
-
56
- # Return the prediction as JSON
57
- return jsonify({'Predicted Sales (units)': predicted_sales})
58
-
59
-
60
- # Define an endpoint for batch prediction (POST request)
 
 
 
 
 
61
  @superkart_sales_api.post('/v1/salesbatch')
62
  def predict_sales_batch():
63
  """
64
- This function handles POST requests to the '/v1/salesbatch' endpoint.
65
- It expects a CSV file containing product details for multiple products
66
- and returns the predicted sales as a dictionary in the JSON response.
67
  """
68
- # Get the uploaded CSV file from the request
69
- file = request.files['file']
70
-
71
- # Read the CSV file into a Pandas DataFrame
72
- input_data = pd.read_csv(file)
73
 
74
- # Make predictions for all rows in the DataFrame
75
- predicted_sales = model.predict(input_data).tolist()
76
 
77
- # Round and convert to float
78
- predicted_sales = [round(float(sale), 2) for sale in predicted_sales]
 
79
 
80
- # If the CSV has an 'id' column, use it as keys; else just index
81
- if 'id' in input_data.columns:
82
- ids = input_data['id'].tolist()
83
- else:
84
- ids = list(range(1, len(predicted_sales) + 1))
85
 
86
- output_dict = dict(zip(ids, predicted_sales))
87
 
88
- # Return the predictions dictionary as a JSON response
89
- return jsonify(output_dict)
90
 
91
-
92
- # Run the Flask application in debug mode if this script is executed directly
 
93
  if __name__ == '__main__':
94
- superkart_sales_api.run(debug=True)
 
 
20
  """
21
  return "Welcome to the SuperKart Sales Prediction API!"
22
 
23
+ # Endpoint for Single Prediction
24
+ # -------------------------------
25
  @superkart_sales_api.post('/v1/sales')
26
  def predict_sales():
27
  """
28
+ Predict sales for a single product-outlet combination
 
 
29
  """
30
+ try:
31
+ # Get JSON data from request
32
+ data = request.get_json()
33
+
34
+ # Extract relevant features
35
+ sample = {
36
+ 'Product_Weight': data['Product_Weight'],
37
+ 'Product_Allocated_Area': data['Product_Allocated_Area'],
38
+ 'Product_MRP': data['Product_MRP'],
39
+ 'Store_Establishment_Year': data['Store_Establishment_Year'],
40
+ 'Product_Sugar_Content': data['Product_Sugar_Content'],
41
+ 'Store_Size': data['Store_Size'],
42
+ 'Store_Location_City_Type': data['Store_Location_City_Type'],
43
+ 'Store_Type': data['Store_Type'],
44
+ 'Product_Type': data['Product_Type']
45
+
46
+ }
47
+
48
+ # Convert to DataFrame
49
+ input_df = pd.DataFrame([sample])
50
+
51
+ # Make prediction
52
+ prediction = model.predict(input_df)[0]
53
+
54
+ # Convert to float and round
55
+ prediction = round(float(prediction), 2)
56
+
57
+ return jsonify({"Predicted Sales": prediction})
58
+
59
+ except Exception as e:
60
+ return jsonify({"error": str(e)}), 500
61
+
62
+ # -------------------------------
63
+ # Endpoint for Batch Prediction
64
+ # -------------------------------
65
  @superkart_sales_api.post('/v1/salesbatch')
66
  def predict_sales_batch():
67
  """
68
+ Predict sales for multiple rows from a CSV file
 
 
69
  """
70
+ try:
71
+ # Get uploaded file
72
+ file = request.files['file']
 
 
73
 
74
+ # Read into DataFrame
75
+ input_df = pd.read_csv(file)
76
 
77
+ # Make predictions
78
+ predictions = model.predict(input_df).tolist()
79
+ predictions = [round(float(p), 2) for p in predictions]
80
 
81
+ # Return predictions in a dict format with row index as key
82
+ output_dict = {str(i): predictions[i] for i in range(len(predictions))}
 
 
 
83
 
84
+ return jsonify(output_dict)
85
 
86
+ except Exception as e:
87
+ return jsonify({"error": str(e)}), 500
88
 
89
+ # -------------------------------
90
+ # Run App
91
+ # -------------------------------
92
  if __name__ == '__main__':
93
+ superkart_sales_api.run(debug=True, host="0.0.0.0", port=7860)
94
+
backend_files/superkart_sales_prediction_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fcee8aeaaff84451c98d1846e7b158bfaf565d370611ce5b29d254d1cc9fe7d
3
+ size 207980
frontend_files/Dockerfile CHANGED
@@ -11,6 +11,7 @@ COPY . .
11
  RUN pip3 install -r requirements.txt
12
 
13
  # Define the command to run the Streamlit app on port 8501 and make it accessible externally
14
- CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
 
15
 
16
  # NOTE: Disable XSRF protection for easier external access in order to make batch predictions
 
11
  RUN pip3 install -r requirements.txt
12
 
13
  # Define the command to run the Streamlit app on port 8501 and make it accessible externally
14
+ #CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
15
+ CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
16
 
17
  # NOTE: Disable XSRF protection for easier external access in order to make batch predictions
frontend_files/app.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import streamlit as st
3
  import pandas as pd
4
  import requests
@@ -13,35 +12,42 @@ st.title("Superkart Sales Prediction App")
13
  # Section for online prediction
14
  st.subheader("Online Prediction")
15
 
16
- # Collect user input for product & store features
17
- product_weight = st.number_input("Product Weight (grams)", min_value=0.0, value=500.0, step=50.0)
18
- product_visibility = st.number_input("Product Visibility", min_value=0.0, max_value=1.0, step=0.01, value=0.05)
19
- product_mrp = st.number_input("Product MRP (₹)", min_value=0.0, value=120.0, step=1.0)
20
 
21
- outlet_establishment_year = st.number_input("Outlet Establishment Year", min_value=1950, max_value=2030, value=2000, step=1)
22
- outlet_size = st.selectbox("Outlet Size", ["Small", "Medium", "High"])
23
- outlet_location_type = st.selectbox("Outlet Location Type", ["Tier 1", "Tier 2", "Tier 3"])
24
- outlet_type = st.selectbox("Outlet Type", ["Grocery Store", "Supermarket Type1", "Supermarket Type2", "Supermarket Type3"])
 
 
 
 
 
 
 
 
 
 
25
 
26
  # Convert user input into a DataFrame
27
  input_data = pd.DataFrame([{
28
-
29
- 'Product_Weight': product_weight,
30
- 'Product_Visibility': product_visibility,
31
- 'Product_MRP': product_mrp,
32
- 'Outlet_Establishment_Year': outlet_establishment_year,
33
- 'Outlet_Size': outlet_size,
34
- 'Outlet_Location_Type': outlet_location_type,
35
- 'Outlet_Type': outlet_type
 
36
  }])
37
 
38
  # Make prediction when the "Predict" button is clicked
39
  if st.button("Predict Sales"):
40
  response = requests.post(
41
- "https://<username>-Superkart_Docker_space.hf.space/v1/sales",
42
  json=input_data.to_dict(orient='records')[0]
43
  ) # Send data to backend API
44
-
45
  if response.status_code == 200:
46
  prediction = response.json()['Predicted Sales']
47
  st.success(f"Predicted Sales: {prediction}")
@@ -58,13 +64,14 @@ uploaded_file = st.file_uploader("Upload CSV file for batch prediction", type=["
58
  if uploaded_file is not None:
59
  if st.button("Predict Batch Sales"):
60
  response = requests.post(
61
- "https://<username>-Superkart_Docker_space.hf.space/v1/salesbatch",
62
- files={"file": uploaded_file}
63
- ) # Send file to backend API
64
-
65
  if response.status_code == 200:
66
  predictions = response.json()
67
  st.success(" Batch predictions completed!")
68
  st.write(predictions) # Display the predictions
69
  else:
70
  st.error("Error making batch prediction.")
 
 
 
 
 
1
  import streamlit as st
2
  import pandas as pd
3
  import requests
 
12
  # Section for online prediction
13
  st.subheader("Online Prediction")
14
 
 
 
 
 
15
 
16
+ # Numeric inputs
17
+ Product_Weight = st.number_input("Product Weight (in grams)", min_value=0.0, value=500.0)
18
+ Product_Allocated_Area = st.number_input("Allocated Area (sq ft)", min_value=0.0, value=100.0)
19
+ Product_MRP = st.number_input("Product MRP", min_value=0.0, value=50.0)
20
+ Store_Establishment_Year = st.number_input("Store Establishment Year", min_value=1900, max_value=2025, value=2000)
21
+
22
+ # Categorical inputs
23
+ Product_Sugar_Content = st.selectbox("Product Sugar Content", ["Low", "Medium", "High"])
24
+ Product_Type = st.selectbox("Product Type", ["Food", "Beverage", "Snack", "Other"])
25
+ Store_Size = st.selectbox("Store Size", ["Small", "Medium", "Large"])
26
+ Store_Location_City_Type = st.selectbox("City Type", ["Tier 1", "Tier 2", "Tier 3"])
27
+ Store_Type = st.selectbox("Store Type", ["Mall", "Standalone", "Supermarket", "Other"])
28
+
29
+
30
 
31
  # Convert user input into a DataFrame
32
  input_data = pd.DataFrame([{
33
+ 'Product_Weight': Product_Weight,
34
+ 'Product_Allocated_Area': Product_Allocated_Area,
35
+ 'Product_MRP': Product_MRP,
36
+ 'Store_Establishment_Year': Store_Establishment_Year,
37
+ 'Product_Sugar_Content': Product_Sugar_Content,
38
+ 'Product_Type': Product_Type,
39
+ 'Store_Size': Store_Size,
40
+ 'Store_Location_City_Type': Store_Location_City_Type,
41
+ 'Store_Type': Store_Type
42
  }])
43
 
44
  # Make prediction when the "Predict" button is clicked
45
  if st.button("Predict Sales"):
46
  response = requests.post(
47
+ "https://Anusha3-Superkart-Backend-Docker-space.hf.space/v1/sales",
48
  json=input_data.to_dict(orient='records')[0]
49
  ) # Send data to backend API
50
+
51
  if response.status_code == 200:
52
  prediction = response.json()['Predicted Sales']
53
  st.success(f"Predicted Sales: {prediction}")
 
64
  if uploaded_file is not None:
65
  if st.button("Predict Batch Sales"):
66
  response = requests.post(
67
+ "https://Anusha3-Superkart-Backend-Docker-space.hf.space/v1/salesbatch", files={"file": uploaded_file} ) # Send file to backend API
68
+
 
 
69
  if response.status_code == 200:
70
  predictions = response.json()
71
  st.success(" Batch predictions completed!")
72
  st.write(predictions) # Display the predictions
73
  else:
74
  st.error("Error making batch prediction.")
75
+
76
+
77
+