ExtraaLearn_API / app.py
vijayendras's picture
Upload folder using huggingface_hub
d47760e verified
# Import necessary libraries
import numpy as np
import joblib # For loading the serialized model
import pandas as pd # For data manipulation
from flask import Flask, request, jsonify # For creating the Flask API
# Initialize Flask app with a name
extraalearn_api = Flask("ExtraaLearn")
# Load the trained churn prediction model
model = joblib.load("extraalearn_model.joblib")
# Define a route for the home page
@extraalearn_api.get('/')
def home():
return "Welcome to the ExtraaLearn System"
# Define an endpoint to predict churn for a single lead
@extraalearn_api.post('/v1/predict')
def predict_sales():
# Get JSON data from the request
data = request.get_json()
# Extract relevant lead features from the input data
sample = {
'Age': data['age'],
'Current_Occupation': data['current_occupation'],
'First_Interaction': data['first_interaction'],
'Profile_Completed': data['profile_completed'],
'Website_Visits': data['website_visits'],
'Time_Spent_on_Website': data['time_spent_on_website'],
'Page_Views_Per_Visit': data['page_views_per_visit'],
'Last_Activity': data['last_activity'],
'Print_Media_Type1': data['print_media_type1'],
'Print_Media_Type2': data['print_media_type2'],
'Digital_Media': data['digital_media'],
'Educational_Channels': data['educational_channels'],
'Referral': data['referral']
}
# Convert the extracted data into a DataFrame
input_data = pd.DataFrame([sample])
# Make a churn prediction using the trained model
prediction = model.predict(input_data)[0]
# Return the prediction as a JSON response
return jsonify({'Sales': prediction})
# Run the Flask app in debug mode
if __name__ == '__main__':
extraalearn_api.run(debug=True)