dpanchali commited on
Commit
4c1b22b
·
verified ·
1 Parent(s): d91bd5b

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +2 -2
  2. app.py +69 -54
  3. final_sales_forecasting_model.joblib +3 -0
Dockerfile CHANGED
@@ -11,6 +11,6 @@ RUN pip install --no-cache-dir -r requirements.txt
11
 
12
  # Define the command to start the application using Gunicorn with 4 worker processes
13
  # - `-w 4`: Uses 4 worker processes for handling requests
14
- # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
  # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
- CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]
 
11
 
12
  # Define the command to start the application using Gunicorn with 4 worker processes
13
  # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:5000`: Binds the server to port 5000 on all network interfaces (matching the Flask app port)
15
  # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:5000", "app:app"]
app.py CHANGED
@@ -2,70 +2,85 @@ import joblib
2
  import pandas as pd
3
  from flask import Flask, request, jsonify
4
 
5
- # Initialize Flask app with a name
6
- app = Flask("Telecom Customer Churn Predictor")
7
 
8
- # Load the trained churn prediction model
9
- model = joblib.load("churn_prediction_model_v1_0.joblib")
10
 
11
  # Define a route for the home page
12
- @app.get('/')
13
  def home():
14
- return "Welcome to the Telecom Customer Churn Prediction API"
15
 
16
- # Define an endpoint to predict churn for a single customer
17
- @app.post('/v1/customer')
18
- def predict_churn():
19
  # Get JSON data from the request
20
- customer_data = request.get_json()
21
-
22
- # Extract relevant customer features from the input data
23
- sample = {
24
- 'SeniorCitizen': customer_data['SeniorCitizen'],
25
- 'Partner': customer_data['Partner'],
26
- 'Dependents': customer_data['Dependents'],
27
- 'tenure': customer_data['tenure'],
28
- 'PhoneService': customer_data['PhoneService'],
29
- 'InternetService': customer_data['InternetService'],
30
- 'Contract': customer_data['Contract'],
31
- 'PaymentMethod': customer_data['PaymentMethod'],
32
- 'MonthlyCharges': customer_data['MonthlyCharges'],
33
- 'TotalCharges': customer_data['TotalCharges']
34
- }
35
-
36
- # Convert the extracted data into a DataFrame
37
- input_data = pd.DataFrame([sample])
38
-
39
- # Make a churn prediction using the trained model
40
- prediction = model.predict(input_data).tolist()[0]
41
-
42
- # Map prediction result to a human-readable label
43
- prediction_label = "churn" if prediction == 1 else "not churn"
44
-
45
- # Return the prediction as a JSON response
46
- return jsonify({'Prediction': prediction_label})
47
-
48
- # Define an endpoint to predict churn for a batch of customers
49
- @app.post('/v1/customerbatch')
50
- def predict_churn_batch():
51
- # Get the uploaded CSV file from the request
 
 
 
 
 
 
 
 
 
52
  file = request.files['file']
53
 
54
- # Read the file into a DataFrame
55
- input_data = pd.read_csv(file)
 
 
 
 
 
 
 
 
 
56
 
57
- # Make predictions for the batch data and convert raw predictions into a readable format
58
- predictions = [
59
- 'Churn' if x == 1
60
- else "Not Churn"
61
- for x in model.predict(input_data.drop("customerID",axis=1)).tolist()
62
- ]
63
 
64
- cust_id_list = input_data.customerID.values.tolist()
65
- output_dict = dict(zip(cust_id_list, predictions))
 
 
66
 
67
- return output_dict
68
 
69
- # Run the Flask app in debug mode
70
  if __name__ == '__main__':
71
- app.run(debug=True)
 
 
2
  import pandas as pd
3
  from flask import Flask, request, jsonify
4
 
5
+ # Initialize Flask app
6
+ app = Flask(__name__)
7
 
8
+ # Load the trained sales forecasting model pipeline
9
+ model = joblib.load("backend_files/final_sales_forecasting_model.joblib") # Adjust path as needed
10
 
11
  # Define a route for the home page
12
+ @app.route('/')
13
  def home():
14
+ return "Welcome to the SuperKart Sales Forecasting API"
15
 
16
+ # Define an endpoint to predict sales for a single product-store combination
17
+ @app.route('/predict_single', methods=['POST'])
18
+ def predict_single():
19
  # Get JSON data from the request
20
+ data = request.get_json()
21
+
22
+ # Extract relevant features from the input data, ensuring correct order and names
23
+ # The keys in the dictionary should match the column names in your original training data X
24
+ try:
25
+ sample = {
26
+ 'Product_Id': data['Product_Id'],
27
+ 'Product_Weight': data['Product_Weight'],
28
+ 'Product_Sugar_Content': data['Product_Sugar_Content'],
29
+ 'Product_Allocated_Area': data['Product_Allocated_Area'],
30
+ 'Product_Type': data['Product_Type'],
31
+ 'Product_MRP': data['Product_MRP'],
32
+ 'Store_Id': data['Store_Id'],
33
+ 'Store_Establishment_Year': data['Store_Establishment_Year'],
34
+ 'Store_Size': data['Store_Size'],
35
+ 'Store_Location_City_Type': data['Store_Location_City_Type'],
36
+ 'Store_Type': data['Store_Type']
37
+ }
38
+
39
+ # Convert the extracted data into a DataFrame
40
+ input_data = pd.DataFrame([sample])
41
+
42
+ # Make a sales prediction using the trained model pipeline
43
+ prediction = model.predict(input_data).tolist()[0]
44
+
45
+ # Return the prediction as a JSON response
46
+ return jsonify({'predicted_sales': prediction})
47
+
48
+ except KeyError as e:
49
+ return jsonify({'error': f'Missing data for key: {e}'}), 400
50
+ except Exception as e:
51
+ return jsonify({'error': str(e)}), 500
52
+
53
+
54
+ # Define an endpoint to predict sales for a batch of product-store combinations from a CSV file
55
+ @app.route('/predict_batch', methods=['POST'])
56
+ def predict_batch():
57
+ # Get the uploaded file from the request
58
+ if 'file' not in request.files:
59
+ return jsonify({'error': 'No file part in the request'}), 400
60
+
61
  file = request.files['file']
62
 
63
+ # If the user does not select a file, the browser submits an empty file without a filename.
64
+ if file.filename == '':
65
+ return jsonify({'error': 'No selected file'}), 400
66
+
67
+ if file:
68
+ try:
69
+ # Read the file into a DataFrame
70
+ input_data = pd.read_csv(file)
71
+
72
+ # Make sales predictions using the trained model pipeline
73
+ predictions = model.predict(input_data).tolist()
74
 
75
+ # Return the predictions as a JSON response
76
+ return jsonify({'predicted_sales': predictions})
 
 
 
 
77
 
78
+ except Exception as e:
79
+ return jsonify({'error': str(e)}), 500
80
+ else:
81
+ return jsonify({'error': 'Something went wrong with file upload'}), 500
82
 
 
83
 
 
84
  if __name__ == '__main__':
85
+ # Run the Flask app
86
+ app.run(debug=True, host='0.0.0.0', port=5000) # Run on all available interfaces and port 5000
final_sales_forecasting_model.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3de540996696fa424339c00294222bc600489164d994d93c7e5a3483dccd6e6
3
+ size 65134690