pkulkar commited on
Commit
5a326b9
·
verified ·
1 Parent(s): ae938bd

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +9 -12
  2. app.py +94 -61
  3. requirements.txt +0 -6
Dockerfile CHANGED
@@ -1,19 +1,16 @@
1
-
2
- # Use the official Python image
3
- FROM python:3.9
4
 
5
  # Set the working directory inside the container
6
  WORKDIR /app
7
 
8
- # Copy the requirements file and install dependencies
9
- COPY requirements.txt .
10
- RUN pip install --no-cache-dir -r requirements.txt
11
-
12
- # Copy the rest of the app files
13
  COPY . .
14
 
15
- # Expose Streamlit's default port
16
- EXPOSE 8501
17
 
18
- # Run the Streamlit app
19
- CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
 
 
 
1
+ FROM python:3.9-slim
 
 
2
 
3
  # Set the working directory inside the container
4
  WORKDIR /app
5
 
6
+ # Copy all files from the current directory to the container's working directory
 
 
 
 
7
  COPY . .
8
 
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
 
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:sales_forecaster_api"]
app.py CHANGED
@@ -1,61 +1,94 @@
1
- import streamlit as st
2
- import requests
3
- import json
4
-
5
- st.title("SuperKart Sales Forecaster")
6
- st.write("Enter the details of the product and store to get a sales forecast.")
7
-
8
- # Create input fields for the user
9
- product_weight = st.number_input("Product Weight", min_value=0.0, format="%f")
10
- product_sugar_content = st.selectbox("Product Sugar Content", ['Low Sugar', 'Regular', 'No Sugar'])
11
- product_allocated_area = st.number_input("Product Allocated Area", min_value=0.0, format="%f")
12
- product_type = st.selectbox("Product Type", ['Dairy', 'Soft Drinks', 'Meat', 'Fruits and Vegetables', 'Household', 'Baking Goods', 'Snack Foods', 'Frozen Foods', 'Breakfast', 'Health and Hygiene', 'Hard Drinks', 'Canned', 'Bread', 'Starchy Foods', 'Others', 'Seafood'])
13
- product_mrp = st.number_input("Product MRP", min_value=0.0, format="%f")
14
- store_id = st.selectbox("Store ID", [f"Store_{i}" for i in range(1, 11)])
15
- store_establishment_year = st.number_input("Store Establishment Year", min_value=1900, max_value=2024, step=1)
16
- store_size = st.selectbox("Store Size", ['Medium', 'High', 'Low'])
17
- store_location_city_type = st.selectbox("Store Location City Type", ['Tier 1', 'Tier 3', 'Tier 2'])
18
- store_type = st.selectbox("Store Type", ['Supermarket Type 1', 'Supermarket Type 2', 'Departmental Store', 'Food Mart'])
19
-
20
- # Prepare the data to be sent to the API
21
- input_data = {
22
- 'Product_Weight': product_weight,
23
- 'Product_Sugar_Content': product_sugar_content,
24
- 'Product_Allocated_Area': product_allocated_area,
25
- 'Product_Type': product_type,
26
- 'Product_MRP': product_mrp,
27
- 'Store_Id': store_id,
28
- 'Store_Establishment_Year': store_establishment_year,
29
- 'Store_Size': store_size,
30
- 'Store_Location_City_Type': store_location_city_type,
31
- 'Store_Type': store_type,
32
- }
33
-
34
- if st.button("Predict Sales"):
35
- # Send the data to the Flask API
36
- try:
37
- response = requests.post("https://pkulkar-SalesForcasterFrontend.hf.space/v1/sales", json=input_data)
38
- if response.status_code == 200:
39
- prediction = response.json()
40
- st.success(f"Predicted Sales: {prediction['Predicted Price (in dollars)']:.2f}")
41
- else:
42
- st.error(f"Error predicting sales: {response.status_code} - {response.text}")
43
- except requests.exceptions.RequestException as e:
44
- st.error(f"Error connecting to the API: {e}")
45
-
46
- # Section for batch prediction
47
- st.subheader("Batch Prediction")
48
-
49
- # Allow users to upload a CSV file for batch prediction
50
- uploaded_file = st.file_uploader("Upload CSV file for batch prediction", type=["csv"])
51
-
52
- # Make batch prediction when the "Predict Batch" button is clicked
53
- if uploaded_file is not None:
54
- if st.button("Predict Sales Batch"):
55
- response = requests.post("https://pkulkar-SalesForcasterFrontend.hf.space/v1/salesbatch", files={"file": uploaded_file}) # Send file to Flask API
56
- if response.status_code == 200:
57
- predictions = response.json()
58
- st.success("Batch predictions completed!")
59
- st.write(predictions) # Display the predictions
60
- else:
61
- st.error("Error making batch prediction.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ sales_forecaster_api = Flask("SuperKart Sales Forecast")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("/content/drive/MyDrive/Python/Project7/deployment_files/sales_prediction_v1_0.joblib")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @sales_forecaster_api.get('/')
15
+ def home():
16
+ """
17
+ This function handles GET requests to the root URL ('/') of the API.
18
+ It returns a simple welcome message.
19
+ """
20
+ return "Welcome to the SuperKart Sales Forecast API!"
21
+
22
+ # Define an endpoint for single property prediction (POST request)
23
+ @sales_forecaster_api.post('/v1/sales')
24
+ def predict_sales_price():
25
+ """
26
+ This function handles POST requests to the '/v1/sales' endpoint.
27
+ It expects a JSON payload containing property details and returns
28
+ the predicted rental price as a JSON response.
29
+ """
30
+ # Get the JSON data from the request body
31
+ property_data = request.get_json()
32
+
33
+ # Extract relevant features from the JSON data
34
+ sample = {
35
+ 'Product_Weight': property_data['Product_Weight'],
36
+ 'Product_Sugar_Content': property_data['Product_Sugar_Content'],
37
+ 'Product_Allocated_Area': property_data['Product_Allocated_Area'],
38
+ 'Product_Type': property_data['Product_Type'],
39
+ 'Product_MRP': property_data['Product_MRP'],
40
+ 'Store_Id': property_data['Store_Id'],
41
+ 'Store_Establishment_Year': property_data['Store_Establishment_Year'],
42
+ 'Store_Size': property_data['Store_Size'],
43
+ 'Store_Location_City_Type': property_data['Store_Location_City_Type'],
44
+ 'Store_Type': property_data['Store_Type'],
45
+ }
46
+
47
+ # Convert the extracted data into a Pandas DataFrame
48
+ input_data = pd.DataFrame([sample])
49
+
50
+ # Make prediction (get log_price)
51
+ predicted_sales_price = model.predict(input_data)[0]
52
+
53
+ # Calculate actual price
54
+ predicted_price = np.exp(predicted_sales_price)
55
+
56
+ # Convert predicted_price to Python float
57
+ predicted_price = round(float(predicted_price), 2)
58
+ # The conversion above is needed as we convert the model prediction (log price) to actual price using np.exp, which returns predictions as NumPy float32 values.
59
+ # When we send this value directly within a JSON response, Flask's jsonify function encounters a datatype error
60
+
61
+ # Return the actual price
62
+ return jsonify({'Predicted Price (in dollars)': predicted_price})
63
+
64
+
65
+ # Define an endpoint for batch prediction (POST request)
66
+ @sales_forecaster_api.post('/v1/salesbatch')
67
+ def predict_sales_price_batch():
68
+ """
69
+ This function handles POST requests to the '/v1/salesbatch' endpoint.
70
+ It expects a CSV file containing property details for multiple properties
71
+ and returns the predicted rental prices as a dictionary in the JSON response.
72
+ """
73
+ # Get the uploaded CSV file from the request
74
+ file = request.files['file']
75
+
76
+ # Read the CSV file into a Pandas DataFrame
77
+ input_data = pd.read_csv(file)
78
+
79
+ # Make predictions for all properties in the DataFrame (get log_prices)
80
+ predicted_log_prices = model.predict(input_data).tolist()
81
+
82
+ # Calculate actual prices
83
+ predicted_prices = [round(float(np.exp(log_price)), 2) for log_price in predicted_log_prices]
84
+
85
+ # Create a dictionary of predictions with property IDs as keys
86
+ property_ids = input_data['id'].tolist() # Assuming 'id' is the property ID column
87
+ output_dict = dict(zip(property_ids, predicted_prices)) # Use actual prices
88
+
89
+ # Return the predictions dictionary as a JSON response
90
+ return output_dict
91
+
92
+ # Run the Flask application in debug mode if this script is executed directly
93
+ if __name__ == '__main__':
94
+ sales_forecaster_api.run(debug=True)
requirements.txt CHANGED
@@ -3,9 +3,3 @@ numpy==2.0.2
3
  scikit-learn==1.6.1
4
  xgboost==2.1.4
5
  joblib==1.4.2
6
- Werkzeug==2.2.2
7
- flask==2.2.2
8
- gunicorn==20.1.0
9
- requests==2.32.3
10
- uvicorn[standard]
11
- streamlit==1.43.2
 
3
  scikit-learn==1.6.1
4
  xgboost==2.1.4
5
  joblib==1.4.2