Spaces:
Sleeping
Sleeping
| import numpy as np | |
| import joblib | |
| import pandas as pd | |
| from flask import Flask, request, jsonify | |
| # Initialize Flask app | |
| superkart_api = Flask("superkart_sales_api") | |
| # Load the trained model (must be in same folder as app.py) | |
| try: | |
| # This assumes 'superkart_prediction.joblib' is in the same directory as app.py | |
| model = joblib.load("superkart_prediction.joblib") | |
| print("β Model loaded successfully.") | |
| except Exception as e: | |
| print("β Model load failed:", e) | |
| raise e | |
| # Health check to show Backend is running | |
| def home(): | |
| return "β You are on Sales Prediction API for SuperKart" | |
| # Prediction endpoint | |
| def predict_sales(): | |
| try: | |
| data = request.get_json() | |
| if data is None: | |
| return jsonify({'error': "No JSON payload received"}), 400 | |
| print("Raw incoming data:", data) | |
| required_fields = [ | |
| 'Product_Id_char', | |
| 'Product_Weight', | |
| 'Product_Sugar_Content', | |
| 'Product_Allocated_Area', | |
| 'Product_MRP', | |
| 'Store_Size', | |
| 'Store_Location_City_Type', | |
| 'Store_Type', | |
| 'Store_Age_Years', | |
| 'Product_Type_Category' | |
| ] | |
| missing_fields = [f for f in required_fields if f not in data] | |
| if missing_fields: | |
| return jsonify({'error': f"Missing fields: {missing_fields}"}), 400 | |
| sample = { | |
| 'Product_Id_char': data['Product_Id_char'], | |
| 'Product_Weight': float(data['Product_Weight']), | |
| 'Product_Sugar_Content': data['Product_Sugar_Content'], | |
| 'Product_Allocated_Area': np.log1p(float(data['Product_Allocated_Area'])), | |
| 'Product_MRP': float(data['Product_MRP']), | |
| 'Store_Size': data['Store_Size'], | |
| 'Store_Location_City_Type': data['Store_Location_City_Type'], | |
| 'Store_Type': data['Store_Type'], | |
| 'Store_Age_Years': int(data['Store_Age_Years']), | |
| 'Product_Type_Category': data['Product_Type_Category'] | |
| } | |
| input_df = pd.DataFrame([sample]) | |
| print("Transformed input for model:\n", input_df) | |
| prediction = model.predict(input_df).tolist()[0] | |
| return jsonify({'Predicted_Sales': prediction}) | |
| except Exception as e: | |
| print("β Error during prediction:", str(e)) | |
| return jsonify({'error': f"Prediction failed: {str(e)}"}), 500 | |
| # BATCH SALES PREDICTION | |
| # Corrected decorator to use the `superkart_api` instance | |
| def predict_sales_batch(): | |
| try: | |
| file = request.files.get("file") | |
| if file is None: | |
| return jsonify({"error": "No CSV file uploaded under key 'file'"}), 400 | |
| df = pd.read_csv(file) | |
| log_preds = model.predict(df).tolist() | |
| predictions = [round(float(np.exp(p)), 2) for p in log_preds] | |
| id_col = next((c for c in ("id", "ID", "Product_Id") if c in df.columns), None) | |
| if id_col: | |
| ids = df[id_col].astype(str).tolist() | |
| result = dict(zip(ids, predictions)) | |
| else: | |
| result = {str(i): predictions[i] for i in range(len(predictions))} | |
| return jsonify({"predictions": result}), 200 | |
| except Exception as e: | |
| return jsonify({"error": str(e)}), 400 | |
| # Local testing | |
| if __name__ == '__main__': | |
| # This will still use superkart_api for local runs | |
| superkart_api.run(debug=True, host='0.0.0.0', port=7860) | |