RathodHarish commited on
Commit
546ac07
·
verified ·
1 Parent(s): b980344

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -36
app.py CHANGED
@@ -100,14 +100,12 @@ def get_folder_id(folder_name):
100
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
101
 
102
  # Salesforce report creation
103
- def create_salesforce_reports(df, reminders_df):
104
  if sf is None or not LABOPS_REPORTS_FOLDER_ID:
105
  logging.error("Cannot create Salesforce reports: No connection or folder ID")
106
  return
107
  try:
108
  timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
109
- current_date = datetime.now()
110
- next_30_days = current_date + timedelta(days=30)
111
  reports = [
112
  {
113
  "reportMetadata": {
@@ -128,10 +126,7 @@ def create_salesforce_reports(df, reminders_df):
128
  "reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
129
  "reportFormat": "TABULAR",
130
  "reportBooleanFilter": None,
131
- "reportFilters": [
132
- {"column": "AMC_Date__c", "operator": "greaterOrEqual", "value": current_date.strftime('%Y-%m-%d')},
133
- {"column": "AMC_Date__c", "operator": "lessOrEqual", "value": next_30_days.strftime('%Y-%m-%d')}
134
- ],
135
  "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.AMC_Date__c"],
136
  "folderId": LABOPS_REPORTS_FOLDER_ID
137
  }
@@ -139,7 +134,7 @@ def create_salesforce_reports(df, reminders_df):
139
  ]
140
  for report in reports:
141
  sf.restful('analytics/reports', method='POST', json=report)
142
- logging.info("Salesforce reports created successfully with AMC reminders filter")
143
  except Exception as e:
144
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
145
 
@@ -156,7 +151,6 @@ def save_to_salesforce(df, reminders_df):
156
  reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set()
157
  logging.info(f"Processing {len(df)} records for Salesforce")
158
 
159
- # Process all records from filtered_df
160
  for idx, row in df.iterrows():
161
  status = str(row['status']).lower()
162
  log_type = str(row['log_type']).lower()
@@ -189,29 +183,6 @@ def save_to_salesforce(df, reminders_df):
189
  }
190
  records.append(record)
191
 
192
- # Process reminders_df explicitly to ensure AMC reminders are saved
193
- if not reminders_df.empty:
194
- logging.info(f"Processing {len(reminders_df)} AMC reminder records for Salesforce")
195
- for idx, row in reminders_df.iterrows():
196
- status = str(row['status']).lower()
197
- log_type = str(row['log_type']).lower()
198
- status_mapped = picklist_mapping['Status__c'].get(status, status_values[0] if status_values else 'Active')
199
- log_type_mapped = picklist_mapping['Log_Type__c'].get(log_type, log_type_values[0] if log_type_values else 'Smart Log')
200
-
201
- amc_date_str = row['amc_date'].strftime('%Y-%m-%d') if pd.notna(row['amc_date']) else None
202
- if amc_date_str:
203
- record = {
204
- 'Device_Id__c': str(row['device_id'])[:50],
205
- 'Log_Type__c': log_type_mapped,
206
- 'Status__c': status_mapped,
207
- 'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
208
- 'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
209
- 'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
210
- 'AMC_Date__c': amc_date_str
211
- }
212
- records.append(record)
213
- logging.info(f"Added AMC reminder for Device ID {row['device_id']} with AMC Date {amc_date_str}")
214
-
215
  if records:
216
  batch_size = 100
217
  for i in range(0, len(records), batch_size):
@@ -406,7 +377,7 @@ def create_anomaly_alerts_chart(anomalies_df):
406
  x='date',
407
  y='anomaly_count',
408
  title="Anomaly Alerts Over Time",
409
- labels={"date": "Date", "anomaly_count": "Number of Anomalies"} # Fixed syntax error here
410
  )
411
  fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
412
  return fig
@@ -611,16 +582,16 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
611
  anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
612
  device_cards = generate_device_cards(filtered_df)
613
 
614
- # Save to Salesforce and create reports
615
  save_to_salesforce(filtered_df, reminders_df)
616
- create_salesforce_reports(filtered_df, reminders_df)
617
 
618
  elapsed_time = time.time() - start_time
619
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
620
  if elapsed_time > 3:
621
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
622
 
623
- return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output, df, current_modified_time)
624
  except Exception as e:
625
  logging.error(f"Failed to process file: {str(e)}")
626
  return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
 
100
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
101
 
102
  # Salesforce report creation
103
+ def create_salesforce_reports(df):
104
  if sf is None or not LABOPS_REPORTS_FOLDER_ID:
105
  logging.error("Cannot create Salesforce reports: No connection or folder ID")
106
  return
107
  try:
108
  timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
 
 
109
  reports = [
110
  {
111
  "reportMetadata": {
 
126
  "reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
127
  "reportFormat": "TABULAR",
128
  "reportBooleanFilter": None,
129
+ "reportFilters": [],
 
 
 
130
  "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.AMC_Date__c"],
131
  "folderId": LABOPS_REPORTS_FOLDER_ID
132
  }
 
134
  ]
135
  for report in reports:
136
  sf.restful('analytics/reports', method='POST', json=report)
137
+ logging.info("Salesforce reports created successfully")
138
  except Exception as e:
139
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
140
 
 
151
  reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set()
152
  logging.info(f"Processing {len(df)} records for Salesforce")
153
 
 
154
  for idx, row in df.iterrows():
155
  status = str(row['status']).lower()
156
  log_type = str(row['log_type']).lower()
 
183
  }
184
  records.append(record)
185
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  if records:
187
  batch_size = 100
188
  for i in range(0, len(records), batch_size):
 
377
  x='date',
378
  y='anomaly_count',
379
  title="Anomaly Alerts Over Time",
380
+ labels={"date": "Date", "anomaly_count": "Number of Anomalies"}
381
  )
382
  fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
383
  return fig
 
582
  anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
583
  device_cards = generate_device_cards(filtered_df)
584
 
585
+ # Save to Salesforce after all other processing
586
  save_to_salesforce(filtered_df, reminders_df)
587
+ create_salesforce_reports(filtered_df)
588
 
589
  elapsed_time = time.time() - start_time
590
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
591
  if elapsed_time > 3:
592
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
593
 
594
+ return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time)
595
  except Exception as e:
596
  logging.error(f"Failed to process file: {str(e)}")
597
  return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state