File size: 4,056 Bytes
b44786d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104

# Importing necessary libraries
from flask import Flask, request, jsonify         # Flask framework and JSON utilities
from flask_cors import CORS                       # To handle Cross-Origin Resource Sharing (CORS)
import joblib                                     # For loading saved model/pipeline objects
import numpy as np
import pandas as pd
import logging                                    # For logging API activity

# Set logging level and format
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')

# Initialize the Flask app
app = Flask(__name__)

# Enable CORS for the '/predict' endpoint
CORS(app, resources={r"/predict": {"origins": "*"}})

# Load the models, pipeline, and feature list
try:
    model_gradboost = joblib.load('gradboost_RSCV.joblib')         # Tuned Gradient Boosting model
    model_rndmfrst = joblib.load('RndmFrstReg_RSCV.joblib')        # Tuned Random Forest model
    pipeline = joblib.load('pipeline.joblib')                      # Preprocessing pipeline
    feature_names = joblib.load('feature_names.joblib')            # Ordered feature list (optional, if used)

except Exception as ex:
    logging.error(f'Exception in loading joblib file: {ex}')

# List of required input features for prediction
required_features = [
    'Product_Weight',
    'Product_Sugar_Content',
    'Product_Allocated_Area',
    'Product_Type',
    'Product_MRP',
    'Store_Size',
    'Store_Location_City_Type',
    'Store_Type',
    'Age_Of_Store'
]

# Default endpoint for status check
@app.get('/')
def home():
    logging.debug("Accessed endpoint of Home page")
    return "Welcome to Superkart Prediction system"

# Prediction endpoint to handle POST requests
@app.route('/predict', methods=['POST'])
def predict():
    try:
        # Read JSON payload
        data = request.get_json()
        logging.debug(f"Input received: {data}")

        # If no data sent
        if not data:
            return jsonify({'Error': 'No data provided for prediction'})

        # Check for missing features
        if not all(feature in data for feature in required_features):
            feature_missing = [feature for feature in required_features if feature not in data]
            logging.error(f"Exception feature missing: {feature_missing}")
            return jsonify({'Exception': f"Feature missing {feature_missing}"})

        # Convert input JSON to DataFrame
        feature_for_prediction = pd.DataFrame([{
            'Product_Weight': float(data['Product_Weight']),
            'Product_Sugar_Content': data['Product_Sugar_Content'],
            'Product_Allocated_Area': float(data['Product_Allocated_Area']),
            'Product_Type': data['Product_Type'],
            'Product_MRP': float(data['Product_MRP']),
            'Store_Size': data['Store_Size'],
            'Store_Location_City_Type': data['Store_Location_City_Type'],
            'Store_Type': data['Store_Type'],
            'Age_Of_Store': float(data['Age_Of_Store'])
        }], columns=required_features)

        # Transform input using the preprocessing pipeline
        features_scaled = pipeline.transform(feature_for_prediction)
        logging.debug(f"Features scaled: {features_scaled}")

        # Predict using both models
        prediction_gradboost = model_gradboost.predict(features_scaled)
        prediction_randfrst = model_rndmfrst.predict(features_scaled)

        # Log predictions
        logging.debug(f"Prediction gradboost: {prediction_gradboost}")
        logging.debug(f"Prediction randmfrst: {prediction_randfrst}")

        # Return predictions as JSON
        return jsonify({
            'gradientBoosting': float(prediction_gradboost[0]),
            'randomForest': float(prediction_randfrst[0])
        })

    except Exception as ex:
        logging.error(f'Exception: {ex}')
        return jsonify({'Exception': str(ex)})

# Run the Flask app on port 7860 (as required by Hugging Face Docker Spaces)
if __name__ == '__main__':
    app.run(host='0.0.0.0', port=7860, debug=False)