DD009 commited on
Commit
adb1438
·
verified ·
1 Parent(s): 222d983

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. SuperKart_model_v1_0.joblib +2 -2
  2. app.py +38 -44
SuperKart_model_v1_0.joblib CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe6646c64c50003d7be9e910550c26c0a3da5c1007f1b3c5e453864646bbf19c
3
- size 259782
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a08df3146149137d37d160f03eefa1551eaf5d25846acfb8fe5642cab851026f
3
+ size 260541
app.py CHANGED
@@ -28,45 +28,29 @@ def predict_sales():
28
  try:
29
  # Get the JSON data from the request body
30
  data = request.get_json()
31
-
32
- # Calculate derived features
33
- current_year = datetime.now().year
34
- store_age = current_year - data['store_establishment_year']
35
- product_density = data['product_weight'] / (data['product_allocated_area'] + 1e-6)
36
- price_per_weight = data['product_mrp'] / (data['product_weight'] + 1e-6)
37
- product_size = 'Small' if data['product_weight'] <= 10 else ('Medium' if data['product_weight'] <= 15 else 'Large')
38
- store_tier_size = f"{data['store_location_city_type']}_{data['store_size']}"
39
-
40
- # Prepare input data
41
  input_data = pd.DataFrame([{
42
  'Product_Weight': data['product_weight'],
43
- 'Product_Sugar_Content': data['product_sugar_content'],
44
  'Product_Allocated_Area': data['product_allocated_area'],
45
- 'Product_Type': data['product_type'],
46
  'Product_MRP': data['product_mrp'],
47
- 'Store_Id': data['store_id'],
48
  'Store_Establishment_Year': data['store_establishment_year'],
 
 
49
  'Store_Size': data['store_size'],
50
  'Store_Location_City_Type': data['store_location_city_type'],
51
- 'Store_Type': data['store_type'],
52
- 'Store_Age': store_age,
53
- 'Product_Density': product_density,
54
- 'Price_Per_Unit_Weight': price_per_weight,
55
- 'Product_Size_Category': product_size,
56
- 'Store_Tier_Size': store_tier_size
57
  }])
58
-
59
  # Make prediction
60
  predicted_sales = model.predict(input_data)[0]
61
-
62
  # Return the predicted sales
63
  return jsonify({
64
  'predicted_sales': round(float(predicted_sales), 2),
65
- 'store_age': store_age,
66
- 'product_density': round(product_density, 2),
67
- 'price_per_weight': round(price_per_weight, 2)
68
  })
69
-
70
  except Exception as e:
71
  return jsonify({'error': str(e)}), 400
72
 
@@ -81,37 +65,47 @@ def predict_sales_batch():
81
  # Check if file was uploaded
82
  if 'file' not in request.files:
83
  return jsonify({'error': 'No file uploaded'}), 400
84
-
85
  file = request.files['file']
86
-
87
  # Read CSV file
88
  input_data = pd.read_csv(file)
89
-
90
- # Calculate derived features
91
- current_year = datetime.now().year
92
- input_data['Store_Age'] = current_year - input_data['Store_Establishment_Year']
93
- input_data['Product_Density'] = input_data['Product_Weight'] / (input_data['Product_Allocated_Area'] + 1e-6)
94
- input_data['Price_Per_Unit_Weight'] = input_data['Product_MRP'] / (input_data['Product_Weight'] + 1e-6)
95
- input_data['Product_Size_Category'] = input_data['Product_Weight'].apply(
96
- lambda x: 'Small' if x <= 10 else ('Medium' if x <= 15 else 'Large'))
97
- input_data['Store_Tier_Size'] = input_data['Store_Location_City_Type'] + '_' + input_data['Store_Size']
98
-
 
 
 
 
 
 
 
 
 
99
  # Make predictions
100
- predictions = model.predict(input_data)
101
-
102
  # Prepare results
103
  results = []
104
  for i, row in input_data.iterrows():
105
  results.append({
106
- 'product_id': row['Product_Id'],
107
- 'store_id': row['Store_Id'],
108
  'predicted_sales': round(float(predictions[i]), 2),
109
  'product_type': row['Product_Type'],
110
  'store_type': row['Store_Type']
111
  })
112
-
113
- return jsonify({'predictions': results})
114
-
 
 
 
115
  except Exception as e:
116
  return jsonify({'error': str(e)}), 400
117
 
 
28
  try:
29
  # Get the JSON data from the request body
30
  data = request.get_json()
31
+
32
+ # Prepare input data with only the features used in training
 
 
 
 
 
 
 
 
33
  input_data = pd.DataFrame([{
34
  'Product_Weight': data['product_weight'],
 
35
  'Product_Allocated_Area': data['product_allocated_area'],
 
36
  'Product_MRP': data['product_mrp'],
 
37
  'Store_Establishment_Year': data['store_establishment_year'],
38
+ 'Product_Sugar_Content': data['product_sugar_content'],
39
+ 'Product_Type': data['product_type'],
40
  'Store_Size': data['store_size'],
41
  'Store_Location_City_Type': data['store_location_city_type'],
42
+ 'Store_Type': data['store_type']
 
 
 
 
 
43
  }])
44
+
45
  # Make prediction
46
  predicted_sales = model.predict(input_data)[0]
47
+
48
  # Return the predicted sales
49
  return jsonify({
50
  'predicted_sales': round(float(predicted_sales), 2),
51
+ 'features_used': list(input_data.columns)
 
 
52
  })
53
+
54
  except Exception as e:
55
  return jsonify({'error': str(e)}), 400
56
 
 
65
  # Check if file was uploaded
66
  if 'file' not in request.files:
67
  return jsonify({'error': 'No file uploaded'}), 400
68
+
69
  file = request.files['file']
70
+
71
  # Read CSV file
72
  input_data = pd.read_csv(file)
73
+
74
+ # Ensure we only keep the columns used in training
75
+ required_columns = [
76
+ 'Product_Weight',
77
+ 'Product_Allocated_Area',
78
+ 'Product_MRP',
79
+ 'Store_Establishment_Year',
80
+ 'Product_Sugar_Content',
81
+ 'Product_Type',
82
+ 'Store_Size',
83
+ 'Store_Location_City_Type',
84
+ 'Store_Type'
85
+ ]
86
+
87
+ # Verify all required columns are present
88
+ missing_cols = [col for col in required_columns if col not in input_data.columns]
89
+ if missing_cols:
90
+ return jsonify({'error': f'Missing required columns: {missing_cols}'}), 400
91
+
92
  # Make predictions
93
+ predictions = model.predict(input_data[required_columns])
94
+
95
  # Prepare results
96
  results = []
97
  for i, row in input_data.iterrows():
98
  results.append({
 
 
99
  'predicted_sales': round(float(predictions[i]), 2),
100
  'product_type': row['Product_Type'],
101
  'store_type': row['Store_Type']
102
  })
103
+
104
+ return jsonify({
105
+ 'predictions': results,
106
+ 'features_used': required_columns
107
+ })
108
+
109
  except Exception as e:
110
  return jsonify({'error': str(e)}), 400
111