deepacsr commited on
Commit
145d050
·
verified ·
1 Parent(s): 809f64e

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +21 -36
app.py CHANGED
@@ -70,48 +70,33 @@ def predict_Product_Sales():
70
 
71
 
72
  # Define an endpoint for batch prediction (POST request)
 
73
  @product_sales_predictor_api.post('/v1/ProductBatchSales')
74
  def predict_sales_batch():
75
- """
76
- This function handles POST requests to the '/v1/ProductBatchSales' endpoint.
77
- It expects a CSV file containing details for multiple products
78
- and returns the predicted product sales prices as a dictionary in the JSON response.
79
- """
80
-
81
  print(">>> Batch endpoint invoked!", flush=True)
82
- product_sales_predictor_api.logger.info('Batch endpoint invoked')
83
-
84
- # Get the uploaded CSV file from the request
85
- file = request.files['file']
86
-
87
- # Read the CSV file into a Pandas DataFrame
88
- input_data = pd.read_csv(file)
89
-
90
- # Save Product_Id for mapping in output
91
- product_ids = input_data['Product_Id'].tolist()
92
-
93
- # Columns that your model does NOT need
94
- drop_cols = [
95
- 'Product_Id',
96
- 'Store_Id',
97
- 'Store_Establishment_Year',
98
- 'Product_Store_Sales_Total' # target column
99
- ]
100
 
101
- # Drop only columns that exist in the CSV
102
- input_data = input_data.drop(columns=[c for c in drop_cols if c in input_data.columns])
103
 
104
- # Now df contains ONLY the features used for training
105
- predictions = model.predict(input_data)
 
 
 
 
 
 
106
 
107
- # Convert NumPy types → Python float
108
- predictions = [float(p) for p in predictions]
109
 
110
- # Prepare output with Product_Id as keys
111
- output = dict(zip(product_ids, predictions))
112
 
113
- return jsonify(output)
114
 
115
- # Run the Flask application in debug mode if this script is executed directly
116
- if __name__ == '__main__':
117
- product_sales_predictor_api.run(debug=True)
 
70
 
71
 
72
  # Define an endpoint for batch prediction (POST request)
73
+
74
  @product_sales_predictor_api.post('/v1/ProductBatchSales')
75
  def predict_sales_batch():
 
 
 
 
 
 
76
  print(">>> Batch endpoint invoked!", flush=True)
77
+ try:
78
+ file = request.files.get('file')
79
+ print(">>> File received:", file is not None, flush=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
+ input_data = pd.read_csv(file)
82
+ print(">>> CSV loaded. Columns:", list(input_data.columns), flush=True)
83
 
84
+ drop_cols = [
85
+ 'Product_Id',
86
+ 'Store_Id',
87
+ 'Store_Establishment_Year',
88
+ 'Product_Store_Sales_Total'
89
+ ]
90
+ input_data = input_data.drop(columns=[c for c in drop_cols if c in input_data.columns])
91
+ print(">>> After column drop:", list(input_data.columns), flush=True)
92
 
93
+ predictions = model.predict(input_data)
94
+ predictions = [float(p) for p in predictions]
95
 
96
+ print(">>> Predictions completed", flush=True)
 
97
 
98
+ return jsonify({"predictions": predictions})
99
 
100
+ except Exception as e:
101
+ print(">>> ERROR:", str(e), flush=True)
102
+ return jsonify({"error": str(e)}), 500