Backend / app.py
laxmikantdeshpande's picture
Upload folder using huggingface_hub
eb245f7 verified
import joblib
import pandas as pd
import xgboost as xgb
from xgboost import XGBRegressor
import numpy as np
from flask import Flask, request, jsonify
# Initialize Flask app with a name
app = Flask(__name__)
# Load the trained churn prediction model
model = joblib.load("superkart_prediction_model_v1_0.joblib")
# Define a route for the home page
@app.get('/')
def home():
return "Welcome to the Superkart Sales Forecast API"
# Define an endpoint to predict churn for a single customer
@app.route('/v1/product', methods=['POST'])
def predict_forecast():
"""
This function handles POST requests to the '/v1/product' endpoint.
It expects a JSON payload containing property details and returns
the predicted rental price as a JSON response.
"""
print("🚀 /v1/product route hit")
# Get JSON data from the request
superKart_data = request.get_json()
# Extract relevant customer features from the input data
sample = {
'Product_Id': superKart_data['Product_Id'],
'Product_Weight': superKart_data['Product_Weight'],
'Product_Sugar_Content': superKart_data['Product_Sugar_Content'],
'Product_Allocated_Area': superKart_data['Product_Allocated_Area'],
'Product_Type': superKart_data['Product_Type'],
'Product_MRP': superKart_data['Product_MRP'],
'Store_Id': superKart_data['Store_Id'],
'Store_Establishment_Year': superKart_data['Store_Establishment_Year'],
'Store_Size': superKart_data['Store_Size'],
'Store_Location_City_Type': superKart_data['Store_Location_City_Type'],
'Store_Type': superKart_data['Store_Type']
}
# Convert the extracted data into a DataFrame
input_data = pd.DataFrame([sample])
predicted_sales_value = model.predict(input_data)[0] # Avoid .tolist()[0]
# Clip BEFORE applying exp
predicted_sales = np.clip(predicted_sales_value, -100, 100)
# Apply exp safely
#predicted_sales = np.exp(safe_value)
# Calculate actual price
#predicted_sales = np.exp(predicted_sales_value)
# Convert predicted_price to Python float
predicted_sales = round(float(predicted_sales), 2)
# Return the actual price
return jsonify({'Predicted Sales': predicted_sales})
# # Map prediction result to a human-readable label
# prediction_label = "churn" if prediction == 1 else "not churn"
# # Return the prediction as a JSON response
# return jsonify({'Prediction': prediction_label})
# Define an endpoint to predict churn for a single customer
#@app.route('/v1/product', methods=['POST'])
# Define an endpoint to predict churn for a batch of customers
@app.route('/v1/superKartBatch', methods=['POST'])
def predict_churn_batch():
"""
This function handles POST requests to the '/v1/superKartBatch' endpoint.
It expects a CSV file containing property details for multiple properties
and returns the predicted rental prices as a dictionary in the JSON response.
"""
# Get the uploaded CSV file from the request
file = request.files['file']
# Read the file into a DataFrame
input_data = pd.read_csv(file)
# Make predictions for all properties in the DataFrame (get log_sales)
predicted_log_sales = model.predict(input_data).tolist()
print("Raw predictions:", predicted_log_sales)
safe_log_sales = np.clip(predicted_log_sales, -100, 100)
predicted_sales = [round(float(np.exp(log_price)), 2) for log_price in safe_log_sales]
# Calculate actual sales
predicted_sales = [round(float(np.exp(log_price)), 2) for log_price in predicted_log_sales]
# Create a dictionary of predictions with property IDs as keys
if 'ProductId' in input_data.columns:
product_id_list = input_data['ProductId'].tolist()
elif 'Product_Id' in input_data.columns:
product_id_list = input_data['Product_Id'].tolist()
else:
product_id_list = [f"Product_{i}" for i in range(len(predicted_sales))]
output_dict = dict(zip(product_id_list, predicted_sales))
#raw_pred = model.predict(X)
#print("Raw:", raw_pred)
for rule in app.url_map.iter_rules():
print(f"🔗 Route: {rule} → Methods: {rule.methods}")
# Return the predictions dictionary as a JSON response
return output_dict