HarishMaths commited on
Commit
5575414
·
verified ·
1 Parent(s): c70be70

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +55 -0
  3. learn_model.joblib +3 -0
  4. requirements.txt +13 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:superkart_api"]
app.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Import necessary libraries
3
+ import numpy as np
4
+ import joblib # For loading the serialized model
5
+ import pandas as pd # For data manipulation
6
+ from flask import Flask, request, jsonify # For creating the Flask API
7
+
8
+ # Initialize Flask app with a name
9
+ learn_api = Flask("ExtraaLearn")
10
+
11
+ # Load the trained churn prediction model
12
+ model = joblib.load("backend_files/learn_model.joblib")
13
+
14
+ # Define a route for the home page
15
+ @superkart_api.get('/')
16
+ def home():
17
+ return "Welcome to the Lead Prediction System"
18
+
19
+ # Define an endpoint to predict churn for a single customer
20
+ @superkart_api.post('/v1/predict')
21
+ def predict_sales():
22
+ # Get JSON data from the request
23
+ data = request.get_json()
24
+
25
+ # Extract relevant customer features from the input data
26
+ sample = {
27
+ 'age': data['age'],
28
+ 'current_occupation': data['current_occupation'],
29
+ 'first_interaction': data['first_interaction'],
30
+ 'profile_completed': data['profile_completed'],
31
+ 'website_visits': data['website_visits'],
32
+ 'time_spent_on_website': data['time_spent_on_website'],
33
+ 'page_views_per_visit': data['page_views_per_visit'],
34
+ 'last_activity': data['last_activity'],
35
+ 'print_media_type1': data['print_media_type1'],
36
+ 'print_media_type2': data['print_media_type2'],
37
+ 'digital_media': data['digital_media'],
38
+ 'educational_channels': data['educational_channels'],
39
+ 'referral': data['referral']
40
+ }
41
+
42
+
43
+ # Convert the extracted data into a DataFrame
44
+ input_data = pd.DataFrame([sample])
45
+
46
+ # Make a prediction using the trained model
47
+ prediction = model.predict(input_data).tolist()[0]
48
+
49
+ # Return the prediction as a JSON response
50
+ return jsonify({'Lead': prediction})
51
+
52
+
53
+ # Run the Flask app in debug mode
54
+ if __name__ == '__main__':
55
+ learn_api.run(debug=True)
learn_model.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61a5bec65ff29b6f5e022d4a7c9b4c9c952c7e2adda476decb4f7ecbc873773a
3
+ size 197596
requirements.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ seaborn==0.13.2
5
+ joblib==1.4.2
6
+ xgboost==2.1.4
7
+ joblib==1.4.2
8
+ Werkzeug==2.2.2
9
+ flask==2.2.2
10
+ gunicorn==20.1.0
11
+ requests==2.32.3
12
+ uvicorn[standard]
13
+ streamlit==1.43.2