harikbab02's picture
Upload folder using huggingface_hub
b44786d verified
# Importing necessary libraries
from flask import Flask, request, jsonify # Flask framework and JSON utilities
from flask_cors import CORS # To handle Cross-Origin Resource Sharing (CORS)
import joblib # For loading saved model/pipeline objects
import numpy as np
import pandas as pd
import logging # For logging API activity
# Set logging level and format
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
# Initialize the Flask app
app = Flask(__name__)
# Enable CORS for the '/predict' endpoint
CORS(app, resources={r"/predict": {"origins": "*"}})
# Load the models, pipeline, and feature list
try:
model_gradboost = joblib.load('gradboost_RSCV.joblib') # Tuned Gradient Boosting model
model_rndmfrst = joblib.load('RndmFrstReg_RSCV.joblib') # Tuned Random Forest model
pipeline = joblib.load('pipeline.joblib') # Preprocessing pipeline
feature_names = joblib.load('feature_names.joblib') # Ordered feature list (optional, if used)
except Exception as ex:
logging.error(f'Exception in loading joblib file: {ex}')
# List of required input features for prediction
required_features = [
'Product_Weight',
'Product_Sugar_Content',
'Product_Allocated_Area',
'Product_Type',
'Product_MRP',
'Store_Size',
'Store_Location_City_Type',
'Store_Type',
'Age_Of_Store'
]
# Default endpoint for status check
@app.get('/')
def home():
logging.debug("Accessed endpoint of Home page")
return "Welcome to Superkart Prediction system"
# Prediction endpoint to handle POST requests
@app.route('/predict', methods=['POST'])
def predict():
try:
# Read JSON payload
data = request.get_json()
logging.debug(f"Input received: {data}")
# If no data sent
if not data:
return jsonify({'Error': 'No data provided for prediction'})
# Check for missing features
if not all(feature in data for feature in required_features):
feature_missing = [feature for feature in required_features if feature not in data]
logging.error(f"Exception feature missing: {feature_missing}")
return jsonify({'Exception': f"Feature missing {feature_missing}"})
# Convert input JSON to DataFrame
feature_for_prediction = pd.DataFrame([{
'Product_Weight': float(data['Product_Weight']),
'Product_Sugar_Content': data['Product_Sugar_Content'],
'Product_Allocated_Area': float(data['Product_Allocated_Area']),
'Product_Type': data['Product_Type'],
'Product_MRP': float(data['Product_MRP']),
'Store_Size': data['Store_Size'],
'Store_Location_City_Type': data['Store_Location_City_Type'],
'Store_Type': data['Store_Type'],
'Age_Of_Store': float(data['Age_Of_Store'])
}], columns=required_features)
# Transform input using the preprocessing pipeline
features_scaled = pipeline.transform(feature_for_prediction)
logging.debug(f"Features scaled: {features_scaled}")
# Predict using both models
prediction_gradboost = model_gradboost.predict(features_scaled)
prediction_randfrst = model_rndmfrst.predict(features_scaled)
# Log predictions
logging.debug(f"Prediction gradboost: {prediction_gradboost}")
logging.debug(f"Prediction randmfrst: {prediction_randfrst}")
# Return predictions as JSON
return jsonify({
'gradientBoosting': float(prediction_gradboost[0]),
'randomForest': float(prediction_randfrst[0])
})
except Exception as ex:
logging.error(f'Exception: {ex}')
return jsonify({'Exception': str(ex)})
# Run the Flask app on port 7860 (as required by Hugging Face Docker Spaces)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860, debug=False)