Tamilvelan commited on
Commit
a3a1b68
·
verified ·
1 Parent(s): a65c2db

Upload folder using huggingface_hub

Browse files
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `superkart_app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "superkart_app:superkart_sales_predictor_api"]
Store_sales_prediction_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c87fba09a66a8c846efb84df0699cbe47e3f39b6da17efc6740eb80eaab5c1ec
3
+ size 208397
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2
superkart_app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ superkart_sales_predictor_api = Flask("SuperKart Sales Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ # NOTE: Ensure the joblib file name matches your saved model file
12
+ model = joblib.load("Store_sales_prediction_model_v1_0.joblib")
13
+
14
+ # Define a route for the home page (GET request)
15
+ @superkart_sales_predictor_api.get('/')
16
+ def home():
17
+ """
18
+ This function handles GET requests to the root URL ('/') of the API.
19
+ It returns a simple welcome message for the SuperKart API New.
20
+ """
21
+ return "Welcome to the SuperKart Sales Prediction API!"
22
+
23
+ # Define an endpoint for single product-store prediction (POST request)
24
+ @superkart_sales_predictor_api.post('/v1/sales')
25
+ def predict_sales_total():
26
+ """
27
+ This function handles POST requests to the '/v1/sales' endpoint.
28
+ It expects a JSON payload containing product and store details and returns
29
+ the predicted sales total as a JSON response.
30
+ """
31
+ # Get the JSON data from the request body
32
+ product_store_data = request.get_json()
33
+
34
+ # Extract ALL relevant features from the JSON data.
35
+ # Features must match the exact order and names used during model training (X_train columns).
36
+ # NOTE: This list MUST be updated to include all features (including engineered/encoded ones)
37
+ sample = {
38
+ # Numerical/Engineered Features
39
+ 'Product_Weight': product_store_data['Product_Weight'],
40
+ 'Product_MRP': product_store_data['Product_MRP'],
41
+ 'Store_Age': product_store_data['Store_Age'], # Engineered
42
+ 'Product_Allocated_Area_Log': product_store_data['Product_Allocated_Area_Log'], # Engineered/Transformed
43
+
44
+ # Ordinal Encoded Features (if used Label Encoding 0, 1, 2, 3...)
45
+ 'Store_Size_Encoded': product_store_data['Store_Size_Encoded'],
46
+ 'Store_Location_City_Type_Encoded': product_store_data['Store_Location_City_Type_Encoded'],
47
+
48
+ # Nominal/One-Hot Encoded Features (must match model's columns)
49
+ # Assuming we pass the raw categorical values and let the Pipeline handle encoding.
50
+ # If your pipeline expects raw data, this is fine. If it expects encoded data,
51
+ # you need to manually encode or structure the input differently.
52
+ # For simplicity, we pass the original features here and rely on a comprehensive Pipeline:
53
+ 'Product_Sugar_Content': product_store_data['Product_Sugar_Content'],
54
+ 'Product_Type': product_store_data['Product_Type'],
55
+ 'Store_Type': product_store_data['Store_Type'],
56
+ }
57
+
58
+ # Convert the extracted data into a Pandas DataFrame
59
+ input_data = pd.DataFrame([sample])
60
+
61
+ # Make prediction (predicted_sales_total)
62
+ # Since our target was NOT log-transformed, we use the prediction directly.
63
+ predicted_sales_total = model.predict(input_data)[0]
64
+
65
+ # Predicted Sales total does not need np.exp or log price conversion.
66
+ # Convert prediction to Python float and round
67
+ predicted_sales_total = round(float(predicted_sales_total), 2)
68
+ # The conversion to float() is still needed to avoid JSON serialization errors from NumPy types.
69
+
70
+ # Return the predicted sales
71
+ return jsonify({'Predicted Sales Total (in dollars)': predicted_sales_total})
72
+
73
+ # Run the Flask application in debug mode if this script is executed directly
74
+ if __name__ == '__main__':
75
+ # You might need to specify host='0.0.0.0' for deployment environments like Docker/Hugging Face
76
+ superkart_sales_predictor_api.run(debug=True)