nrajwani commited on
Commit
cb7d5d6
·
verified ·
1 Parent(s): 652945c

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +9 -9
  2. app.py +91 -56
  3. requirements.txt +8 -0
Dockerfile CHANGED
@@ -1,16 +1,16 @@
1
- # Use a minimal base image with Python 3.9 installed
2
  FROM python:3.9-slim
3
 
4
- # Set the working directory inside the container to /app
5
  WORKDIR /app
6
 
7
- # Copy all files from the current directory on the host to the container's /app directory
8
  COPY . .
9
 
10
- # Install Python dependencies listed in requirements.txt
11
- RUN pip3 install -r requirements.txt
12
 
13
- # Define the command to run the Streamlit app on port 8501 and make it accessible externally
14
- CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
15
-
16
- # NOTE: Disable XSRF protection for easier external access in order to make batch predictions
 
 
 
1
  FROM python:3.9-slim
2
 
3
+ # Set the working directory inside the container
4
  WORKDIR /app
5
 
6
+ # Copy all files from the current directory to the container's working directory
7
  COPY . .
8
 
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
 
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:sales_predictor_api"]
app.py CHANGED
@@ -1,56 +1,91 @@
1
- import streamlit as st
2
- import pandas as pd
3
- import requests
4
-
5
- # Set the title of the Streamlit app
6
- st.title("Superkart Sales Prediction")
7
-
8
- # Section for online prediction
9
- st.subheader("Online Prediction")
10
-
11
- # Collect user input for property features
12
- store_type = st.selectbox("Store Type", ["Supermarket Type1", "Supermarket Type2", "Food Mart", "Departmental Store"])
13
- store_location_city_type = st.selectbox("Store Location City Type", ['Tier 2' 'Tier 1' 'Tier 3'])
14
- store_size = st.selectbox("Store Size", ['Medium' 'High' 'Small'])
15
- store_id = st.selectbox("Store Id", ['OUT004' 'OUT003' 'OUT001' 'OUT002'])
16
- product_sugar_content = st.number_input("Product Sugar Content", ['Low Sugar', 'Regular', 'No Sugar'])
17
- product_type = st.selectbox("Product Type", ['Frozen Foods', 'Dairy', 'Canned', 'Baking Goods', 'Health and Hygiene', 'Snack Foods', 'Meat', 'Household', 'Hard Drinks', 'Fruits and Vegetables', 'Breads', 'Soft Drinks', 'Breakfast', 'Others', 'Starchy Foods', 'Seafood'])
18
-
19
- # user_name = 'nrajwani'
20
- # repo_id = "nrajwani/SalesPredictionBackend"
21
-
22
- # Convert user input into a DataFrame
23
- input_data = pd.DataFrame([{
24
- 'store_type': store_type,
25
- 'store_location_city_type': store_location_city_type,
26
- 'store_size': store_size,
27
- 'store_id': store_id,
28
- 'product_sugar_content': product_sugar_content,
29
- 'product_type': product_type
30
- }])
31
-
32
- # Make prediction when the "Predict" button is clicked
33
- if st.button("Predict"):
34
- response = requests.post("https://<username>-<repo_id>.hf.space/v1/sales", json=input_data.to_dict(orient='records')[0]) # Send data to Flask API
35
- if response.status_code == 200:
36
- prediction = response.json()['Predicted Sales (in dollars)']
37
- st.success(f"Predicted Sales (in dollars): {prediction}")
38
- else:
39
- st.error("Error making prediction.")
40
-
41
- # Section for batch prediction
42
- st.subheader("Batch Prediction")
43
-
44
- # Allow users to upload a CSV file for batch prediction
45
- uploaded_file = st.file_uploader("Upload CSV file for batch prediction", type=["csv"])
46
-
47
- # Make batch prediction when the "Predict Batch" button is clicked
48
- if uploaded_file is not None:
49
- if st.button("Predict Batch"):
50
- response = requests.post("https://<username>-<repo_id>.hf.space/v1/salesbatch", files={"file": uploaded_file}) # Send file to Flask API
51
- if response.status_code == 200:
52
- predictions = response.json()
53
- st.success("Batch predictions completed!")
54
- st.write(predictions) # Display the predictions
55
- else:
56
- st.error("Error making batch prediction.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ sales_predictor_api = Flask("Superkart Sales Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("superkart_sales_prediction_model_v1_0.joblib")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @sales_predictor_api.get('/')
15
+ def home():
16
+ """
17
+ This function handles GET requests to the root URL ('/') of the API.
18
+ It returns a simple welcome message.
19
+ """
20
+ return "Welcome to the Superkart Sales Prediction API!"
21
+
22
+ # Define an endpoint for single property prediction (POST request)
23
+ @sales_predictor_api.post('/v1/sales')
24
+ def predict_sales():
25
+ """
26
+ This function handles POST requests to the '/v1/sales' endpoint.
27
+ It expects a JSON payload containing property details and returns
28
+ the predicted rental price as a JSON response.
29
+ """
30
+ # Get the JSON data from the request body
31
+ data = request.get_json()
32
+
33
+ # Extract relevant features from the JSON data
34
+ sample = {
35
+ 'product_sugar_content': data['Product_Sugar_Content'],
36
+ 'product_type': data['Product_Type'],
37
+ # 'store_establishment_year': data['Store_Establishment_Year'],
38
+ 'store_size': data['Store_Size'],
39
+ 'store_location_city_type': data['Store_Location_City_Type'],
40
+ 'store_type': data['Store_Type'],
41
+ 'store_id': data['Store_Id']
42
+ }
43
+
44
+ # Convert the extracted data into a Pandas DataFrame
45
+ input_data = pd.DataFrame([sample])
46
+
47
+ # Make prediction (get log_price)
48
+ predicted_log_sales = model.predict(input_data)[0]
49
+
50
+ # Calculate actual price
51
+ predicted_sales = np.exp(predicted_log_sales)
52
+
53
+ # Convert predicted_price to Python float
54
+ predicted_sales = round(float(predicted_sales), 2)
55
+ # The conversion above is needed as we convert the model prediction (log price) to actual price using np.exp, which returns predictions as NumPy float32 values.
56
+ # When we send this value directly within a JSON response, Flask's jsonify function encounters a datatype error
57
+
58
+ # Return the actual price
59
+ return jsonify({'Predicted Price (in dollars)': predicted_sales})
60
+
61
+
62
+ # Define an endpoint for batch prediction (POST request)
63
+ @sales_predictor_api.post('/v1/salesbatch')
64
+ def predict_sales_price_batch():
65
+ """
66
+ This function handles POST requests to the '/v1/salesbatch' endpoint.
67
+ It expects a CSV file containing property details for multiple properties
68
+ and returns the predicted rental prices as a dictionary in the JSON response.
69
+ """
70
+ # Get the uploaded CSV file from the request
71
+ file = request.files['file']
72
+
73
+ # Read the CSV file into a Pandas DataFrame
74
+ input_data = pd.read_csv(file)
75
+
76
+ # Make predictions for all properties in the DataFrame (get log_prices)
77
+ predicted_log_sales = model.predict(input_data).tolist()
78
+
79
+ # Calculate actual prices
80
+ predicted_sales = [round(float(np.exp(log_sales)), 2) for log_sales in predicted_log_sales]
81
+
82
+ # Create a dictionary of predictions with property IDs as keys
83
+ store_ids = input_data['Store_Id'].tolist() # Assuming 'id' is the property ID column
84
+ output_dict = dict(zip(store_ids, predicted_sales)) # Use actual prices
85
+
86
+ # Return the predictions dictionary as a JSON response
87
+ return output_dict
88
+
89
+ # Run the Flask application in debug mode if this script is executed directly
90
+ if __name__ == '__main__':
91
+ sales_predictor_api.run(debug=True)
requirements.txt CHANGED
@@ -1,3 +1,11 @@
1
  pandas==2.2.2
 
 
 
 
 
 
 
2
  requests==2.28.1
 
3
  streamlit==1.43.2
 
1
  pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
  requests==2.28.1
10
+ uvicorn[standard]
11
  streamlit==1.43.2