Docker-project / app.py
vamshf's picture
Upload app.py with huggingface_hub
e526624 verified
from flask import Flask,jsonify
# Initialize Flask app
superKart_Sales_forecast = Flask("SuperKart Sales Forecast")
# Load the serialized model
try:
loaded_model = joblib.load('tuned_random_forest_model.pkl')
print("Model loaded successfully!")
except Exception as e:
print(f"Error loading model: {e}")
loaded_model = None # Set model to None if loading fails
@superKart_Sales_forecast.route('/predict', methods=['POST'])
def predict():
if loaded_model is None:
return jsonify({'error': 'Model not loaded'}), 500
try:
# Get data from the request
data = request.get_json(force=True)
# Convert the incoming data to a pandas DataFrame
# Assuming the incoming data is a list of dictionaries, where each dictionary is a row
# The columns should match the features used during training (excluding the target)
input_df = pd.DataFrame(data)
# Ensure the columns are in the same order as the training data
# You might need to store the order of columns from X_train during training
# For now, assuming input_df columns match X_train columns
# A more robust solution would involve saving and loading the column order
# For demonstration, let's assume the column order is consistent
# Make predictions
predictions = loaded_model.predict(input_df)
# Convert predictions to a list and return as JSON
return jsonify(predictions.tolist())
except Exception as e:
return jsonify({'error': str(e)}), 400
# To run the Flask superKart_Sales_forecast (for local testing)
#if __name__ == '__main__':
# # This will run the server locally on port 5000
# # In a production environment, you would use a production-ready server like Gunicorn or uWSGI
# superKart_Sales_forecast.run(debug=True, host='0.0.0.0', port=5000)