src/Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:super_kart_predictor_api"]
src/README.md ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: SuperKartPredictionBackend
3
+ emoji: 😻
4
+ colorFrom: pink
5
+ colorTo: red
6
+ sdk: docker
7
+ pinned: false
8
+ ---
9
+
10
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
src/app.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ super_kart_predictor_api = Flask("Super Kart Price Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("super_kart_prediction_model_v1.joblib")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @super_kart_predictor_api.get('/')
15
+ def home():
16
+ """
17
+ This function handles GET requests to the root URL ('/') of the API.
18
+ It returns a simple welcome message.
19
+ """
20
+ return "Welcome to the Airbnb Rental Price Prediction API!"
21
+
22
+ # Define an endpoint for single product sale total prediction (POST request)
23
+ @super_kart_predictor_api.post('/v1/saletotal')
24
+ def predict_sale_total_price():
25
+ """
26
+ This function handles POST requests to the '/v1/saletotal' endpoint.
27
+ It expects a JSON payload containing store product details and returns
28
+ the predicted sale total as a JSON response.
29
+ """
30
+ # Get the JSON data from the request body
31
+ product_data = request.get_json()
32
+
33
+ # Extract relevant features from the JSON data
34
+ sample = {
35
+ 'Product_Weight': product_data['Product_Weight'],
36
+ 'Product_Sugar_Content': product_data['Product_Sugar_Content'],
37
+ 'Product_Allocated_Area': product_data['Product_Allocated_Area'],
38
+ 'Product_Type': product_data['Product_Type'],
39
+ 'Product_MRP': product_data['Product_MRP'],
40
+ 'Store_Id': product_data['Store_Id'],
41
+ 'Store_Establishment_Year': product_data['Store_Establishment_Year'],
42
+ 'Store_Size': product_data['Store_Size'],
43
+ 'Store_Location_City_Type': product_data['Store_Location_City_Type'],
44
+ 'Store_Type': product_data['Store_Type']
45
+ }
46
+
47
+ # Convert the extracted data into a Pandas DataFrame
48
+ input_data = pd.DataFrame([sample])
49
+
50
+ # Make prediction (get sale total price)
51
+ predicted_sale_total_price = model.predict(input_data)[0]
52
+
53
+ # Return the price
54
+ return jsonify({'Predicted total revenue generated by the sale': predicted_sale_total_price})
55
+
56
+
57
+ # Define an endpoint for batch prediction (POST request)
58
+ @super_kart_predictor_api.post('/v1/saletotalbatch')
59
+ def predict_rental_price_batch():
60
+ """
61
+ This function handles POST requests to the '/v1/saletotalbatch' endpoint.
62
+ It expects a CSV file containing product details for multiple products
63
+ and returns the predicted sale totals as a dictionary in the JSON response.
64
+ """
65
+ # Get the uploaded CSV file from the request
66
+ file = request.files['file']
67
+
68
+ # Read the CSV file into a Pandas DataFrame
69
+ input_data = pd.read_csv(file)
70
+
71
+ # Make predictions for all products in the DataFrame (get sale total prices)
72
+ predicted_sale_totals = model.predict(input_data).tolist()
73
+
74
+ # Create a dictionary of predictions with product IDs as keys
75
+ Product_Ids = input_data['Product_Id'].tolist() # Assuming 'Product_Id' is the property ID column
76
+ output_dict = dict(zip(Product_Ids, predicted_sale_totals))
77
+
78
+ # Return the predictions dictionary as a JSON response
79
+ return output_dict
80
+
81
+ # Run the Flask application in debug mode if this script is executed directly
82
+ if __name__ == '__main__':
83
+ super_kart_predictor_api.run(debug=True)
src/requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2
src/super_kart_prediction_model_v1.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2406718cd02c0602a49715b956afefd4ac44c8a0f888a06c80b260333943d643
3
+ size 1074348