Spaces:
Sleeping
Sleeping
| import joblib | |
| import pandas as pd | |
| from flask import Flask, request, jsonify | |
| # Initialize Flask app with a name | |
| SuperKart_predictor_api = Flask ("SuperKart Predictor") | |
| # Load the trained churn prediction model | |
| model = joblib.load ("SuperKart_prediction_model_v1_0.joblib") | |
| # Define a route for the home page | |
| def home (): | |
| return "Welcome to the Super Kart Sales Prediction!" | |
| # Define an endpoint to predict sales for Super Kart | |
| def predict_sales (): | |
| # Get JSON data from the request | |
| sales_data = request.get_json () | |
| import datetime | |
| current_year = datetime.datetime.now ().year # dynamic current year | |
| # Extract relevant features from the input data | |
| data_info = { | |
| 'Product_Weight' : sales_data ['Product_Weight'], | |
| 'Product_Sugar_Content' : sales_data ['Product_Sugar_Content'], | |
| 'Product_Allocated_Area' : sales_data ['Product_Allocated_Area'], | |
| 'Product_Type' : sales_data ['Product_Type'], | |
| 'Product_MRP' : sales_data ['Product_MRP'], | |
| 'Store_Id' : sales_data ['Store_Id'], | |
| 'Store_Size' : sales_data ['Store_Size'], | |
| 'Store_Location_City_Type' : sales_data ['Store_Location_City_Type'], | |
| 'Store_Type' : sales_data ['Store_Type'] | |
| } | |
| # Convert the extracted data into a DataFrame | |
| input_data = pd.DataFrame ([data_info]) | |
| # Enforce types | |
| #input_data["Store_Establishment_Year"] = input_data["Store_Establishment_Year"].astype (int) | |
| numeric_cols = ["Product_Weight", "Product_Allocated_Area", "Product_MRP"] | |
| input_data[numeric_cols] = input_data[numeric_cols].astype (float) | |
| # Make a churn prediction using the trained model | |
| #prediction = model.predict (input_data) | |
| predicted_sales = model.predict (input_data).tolist ()[0] | |
| # Return the prediction as a JSON response | |
| return jsonify ({'SalesPrediction': predicted_sales}) | |
| #return jsonify ({'SalesPrediction': 6789.56}) | |
| # Define an endpoint to predict sales for a batch of data | |
| # here we assume the data to conatain same columns as per the data provided for this project | |
| def predict_churn_batch (): | |
| # Get the uploaded CSV file from the request | |
| file = request.files ['file'] | |
| # Read the file into a DataFrame | |
| input_data = pd.read_csv (file) | |
| # Handle Product_Id if present | |
| if "Product_Id" in input_data.columns: | |
| product_ids = input_data["Product_Id"].copy () | |
| X = input_data.drop(columns=["Product_Id"]) | |
| else: | |
| product_ids = None | |
| X = input_data | |
| # Make predictions | |
| predictions = model.predict (X).tolist () | |
| # Prepare response | |
| if product_ids is not None: | |
| # Return mapping of Product_Id to predictions | |
| output_dict = dict(zip(product_ids.tolist (), predictions)) | |
| else: | |
| # If no Product_Id, just return index → predictions | |
| output_dict = dict(zip(input_data.index.tolist (), predictions)) | |
| return jsonify (output_dict) | |
| # Run the Flask app | |
| if __name__ == "__main__": | |
| import os | |
| port = int (os.environ.get("PORT", 7860)) | |
| SuperKart_predictor_api.run(host="0.0.0.0", port=port) | |