KarmaLoop commited on
Commit
6dc5182
·
verified ·
1 Parent(s): 5877ecc

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +16 -0
  2. app.py +67 -0
  3. requirements.txt +11 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:churn_predictor_api"]
app.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
+ import pandas as pd
3
+ from flask import Flask, request, jsonify
4
+ # Initialize Flask app with a name
5
+ sale_predictor_api = Flask("Store sale Predictor")
6
+
7
+
8
+ # Define a route for the home page
9
+ @sale_predictor_api.get('/')
10
+ def home():
11
+ return "Welcome to the Sale Prediction API!"
12
+
13
+ # Define an endpoint to predict sale for a single store
14
+ @sale_predictor_api.post('/v1/store')
15
+ def predict_sale():
16
+ # Get JSON data from the request
17
+ store_data = request.get_json()
18
+
19
+ # Extract relevant customer features from the input data
20
+ sample = {
21
+ 'Product_Weight': store_data['Product_Weight'],
22
+ 'Product_Sugar_Content': store_data['Product_Sugar_Content'],
23
+ 'Product_Allocated_Area': store_data['Product_Allocated_Area'],
24
+ 'Product_Type': store_data['Product_Type'],
25
+ 'Product_MRP': store_data['Product_MRP'],
26
+ 'Store_Establishment_Year': store_data['Store_Establishment_Year'],
27
+ 'Store_Size': store_data['Store_Size'],
28
+ 'Store_Location_City_Type': store_data['Store_Location_City_Type'],
29
+ 'Store_Type': store_data['Store_Type']
30
+ }
31
+
32
+ # Convert the extracted data into a DataFrame
33
+ input_data = pd.DataFrame([sample])
34
+
35
+ # Make a sale prediction using the trained model
36
+ prediction = model.predict(input_data).tolist()[0]
37
+
38
+ # Map prediction result to a human-readable label
39
+ prediction_label = "sale" if prediction == 1 else "not sale"
40
+
41
+ # Return the prediction as a JSON response
42
+ return jsonify({'Prediction': prediction_label})
43
+
44
+ # Define an endpoint to predict sale for a batch of customers
45
+ @sale_predictor_api.post('/v1/storebatch')
46
+ def predict_sale_batch():
47
+ # Get the uploaded CSV file from the request
48
+ file = request.files['file']
49
+
50
+ # Read the file into a DataFrame
51
+ input_data = pd.read_csv(file)
52
+
53
+ # Make predictions for the batch data and convert raw predictions into a readable format
54
+ predictions = [
55
+ 'sale' if x == 1
56
+ else "Not sale"
57
+ for x in model.predict(input_data.drop("CustomerId",axis=1)).tolist()
58
+ ]
59
+
60
+ cust_id_list = input_data.CustomerId.values.tolist()
61
+ output_dict = dict(zip(cust_id_list, predictions))
62
+
63
+ return output_dict
64
+
65
+ # Run the Flask app in debug mode
66
+ if __name__ == '__main__':
67
+ app.run(debug=True)
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.4.2
6
+ Werkzeug==2.2.2
7
+ flask==2.2.2
8
+ gunicorn==20.1.0
9
+ requests==2.28.1
10
+ uvicorn[standard]
11
+ streamlit==1.43.2