BabuRayapati commited on
Commit
9d4b81e
·
verified ·
1 Parent(s): 2fc4617

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +105 -0
  3. extraalearn_model_v1_0.joblib +3 -0
  4. requirements.txt +11 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:rental_price_predictor_api"]
app.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import necessary libraries
2
+ import numpy as np
3
+ import joblib # For loading the serialized model
4
+ import pandas as pd # For data manipulation
5
+ from flask import Flask, request, jsonify # For creating the Flask API
6
+
7
+ # Initialize the Flask application
8
+ extraalearn_predictor_api = Flask("Extraalearn Predictor")
9
+
10
+ # Load the trained machine learning model
11
+ model = joblib.load("extraalearn_model_v1_0.joblib")
12
+
13
+ # Define a route for the home page (GET request)
14
+ @extraalearn_predictor_api.get('/')
15
+ def home():
16
+ """
17
+ This function handles GET requests to the root URL ('/') of the API.
18
+ It returns a simple welcome message.
19
+ """
20
+ return "Welcome to the ExtraaLearn Prediction API!"
21
+
22
+ # Define an endpoint for prediction (POST request)
23
+ @extraalearn_predictor_api.post('/v1/extraalearn')
24
+ def predict_rental_price():
25
+ """
26
+ This function handles POST requests to the '/v1/extraalearn' endpoint.
27
+ It expects a JSON payload containing property details and returns
28
+ the predicted product price as a JSON response.
29
+ """
30
+ # Get the JSON data from the request body
31
+ data = request.get_json()
32
+
33
+ # Extract relevant features from the JSON data
34
+ sample =
35
+
36
+ {
37
+ 'ID': data['ID'],
38
+ 'Age': data['age'],
39
+ 'Current_Occupation': data['current_occupation'],
40
+ 'First_Interaction': data['first_interaction'],
41
+ 'Profile_Completed': data['profile_completed'],
42
+ 'Website_Visits': data['website_visits'],
43
+ 'Time_Spent_On_Website': data['time_spent_on_website'],
44
+ 'Page_Views_Per_Visit': data['page_views_per_visit'],
45
+ 'Last_Activity': data['last_activity'],
46
+ 'Print_Media_Type1': data['print_media_type1'],
47
+ 'Print_Media_Type2': data['print_media_type2'],
48
+ 'Digital_Media': data['digital_media'],
49
+ 'Educational_Channels': data['educational_channels'],
50
+ 'Referral': data['referral'],
51
+ 'Status': data['status']
52
+
53
+
54
+ }
55
+
56
+ # Convert the extracted data into a Pandas DataFrame
57
+ input_data = pd.DataFrame([sample])
58
+
59
+ # Make prediction (get log_price)
60
+ predicted_log_price = model.predict(input_data)[0]
61
+
62
+ # Calculate actual price
63
+ predicted_price = np.exp(predicted_log_price)
64
+
65
+ # Convert predicted_price to Python float
66
+ predicted_price = round(float(predicted_price), 2)
67
+ # The conversion above is needed as we convert the model prediction (log price) to actual price using np.exp, which returns predictions as NumPy float32 values.
68
+ # When we send this value directly within a JSON response, Flask's jsonify function encounters a datatype error
69
+
70
+ # Return the actual price
71
+ return jsonify({'Predicted Price (in dollars)': predicted_price})
72
+
73
+
74
+ # Define an endpoint for batch prediction (POST request)
75
+ @extraalearn_predictor_api.post('/v1/extraalearnbatch')
76
+ def predict_rental_price_batch():
77
+ """
78
+ This function handles POST requests to the '/v1/extraalearnbatch' endpoint.
79
+ It expects a CSV file containing property details for multiple properties
80
+ and returns the predicted rental prices as a dictionary in the JSON response.
81
+ """
82
+ # Get the uploaded CSV file from the request
83
+ file = request.files['file']
84
+
85
+ # Read the CSV file into a Pandas DataFrame
86
+ input_data = pd.read_csv(file)
87
+
88
+ # Make predictions for all properties in the DataFrame (get log_prices)
89
+ predicted_log_prices = model.predict(input_data).tolist()
90
+
91
+ # Calculate actual prices
92
+ predicted_status = [round(float(np.exp(log_price)), 2) for log_price in predicted_log_prices]
93
+
94
+ # Create a dictionary of predictions with property IDs as keys
95
+ # Example: using 'ID' as the key
96
+ ids = input_data['ID'].tolist() # This is like Product_Id
97
+ output_dict = dict(zip(ids, predicted_status)) # predicted_status = your model's output list
98
+
99
+
100
+ # Return the predictions dictionary as a JSON response
101
+ return output_dict
102
+
103
+ # Run the Flask application in debug mode if this script is executed directly
104
+ if __name__ == '__main__':
105
+ extraalearn_predictor_api.run(debug=True)
extraalearn_model_v1_0.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc9ab9fac09a2b8663913336d98c12b35e22918d66659a91a25ccd3421846f5
3
+ size 272846
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2