superkart1 / app.py
Shivam174's picture
Upload folder using huggingface_hub
198e21d verified
import joblib
import pandas as pd
from flask import Flask, request, jsonify
# Initialize Flask app with a name
app = Flask("Superkar Model Prediction")
# Load the trained churn prediction model
model = joblib.load("super_kart_prediction_model_v1_0.joblib")
# Define a route for the home page
@app.get('/')
def home():
return "Welcome to the Superkart Model Prediction API"
# Define an endpoint to predict churn for a single customer
@app.post('/v1/product')
def predict_churn():
# Get JSON data from the request
data = request.get_json()
# Extract relevant customer features from the input data
sample = {
'Product_Weight': data['Product_Weight'],
'Product_Allocated_Area': data['Product_Allocated_Area'],
'Product_MRP': data['Product_MRP'],
'Store_Establishment_Year': data['Store_Establishment_Year'],
'Store_Size': data['Store_Size'],
'Store_Location_City_Type': data['Store_Location_City_Type'],
'Product_Sugar_Content': data['Product_Sugar_Content'],
'Product_Type': data['Product_Type'],
'Store_Type': data['Store_Type'],
}
# Convert the extracted data into a DataFrame
input_data = pd.DataFrame([sample])
# Make a churn prediction using the trained model
prediction = model.predict(input_data).tolist()[0]
# Return the prediction as a JSON response
return jsonify({'Prediction': round(prediction, 2)})
# Define an endpoint to predict churn for a batch of customers
@app.post('/v1/batch')
def predict_churn_batch():
# Get the uploaded CSV file from the request
file = request.files['file']
# Read the file into a DataFrame
input_data = pd.read_csv(file)
# Make predictions for the batch data and convert raw predictions into a readable format
predictions = model.predict(input_data.drop("Product_Id", axis=1)).tolist()
product_id_list = input_data.Product_Id.values.tolist()
output_dict = dict(zip(product_id_list, predictions))
return output_dict
# Run the Flask app in debug mode
if __name__ == '__main__':
app.run(debug=True)