RathodHarish commited on
Commit
e8bdc91
·
verified ·
1 Parent(s): 9f62323

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +129 -93
app.py CHANGED
@@ -1,5 +1,5 @@
1
  """
2
- LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, and Salesforce integration
3
  """
4
  import gradio as gr
5
  import pandas as pd
@@ -13,6 +13,9 @@ from concurrent.futures import ThreadPoolExecutor
13
  from simple_salesforce import Salesforce
14
  import os
15
  import json
 
 
 
16
 
17
  # Configure logging
18
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
@@ -30,6 +33,14 @@ except Exception as e:
30
  logging.error(f"Failed to connect to Salesforce: {str(e)}")
31
  sf = None
32
 
 
 
 
 
 
 
 
 
33
  # Try to import reportlab
34
  try:
35
  from reportlab.lib.pagesizes import letter
@@ -118,6 +129,59 @@ def get_folder_id(folder_name):
118
  # Cache the folder ID at startup
119
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  # Create Salesforce reports (Usage and AMC Reminders)
122
  def create_salesforce_reports(df):
123
  if sf is None:
@@ -130,46 +194,25 @@ def create_salesforce_reports(df):
130
  usage_report_metadata = {
131
  "reportMetadata": {
132
  "name": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
133
- "developerName": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
134
- "reportType": {
135
- "type": "CustomObject",
136
- "value": "SmartLog__c"
137
- },
138
  "reportFormat": "SUMMARY",
139
- "reportBooleanFilter": None,
140
  "reportFilters": [
141
- {
142
- "column": "SmartLog__c.Status__c",
143
- "operator": "equals",
144
- "value": "Active"
145
- },
146
- {
147
- "column": "SmartLog__c.Timestamp__c",
148
- "operator": "greaterOrEqual",
149
- "value": "THIS_MONTH"
150
- }
151
  ],
152
- "aggregates": ["s!SmartLog__c.Usage_Hours__c", "s!SmartLog__c.Downtime__c"],
153
- "groupingsDown": [
154
- {
155
- "name": "Device_Id__c",
156
- "field": "SmartLog__c.Device_Id__c",
157
- "sortOrder": "Asc",
158
- "sortAggregate": None,
159
- "dateGranularity": "None"
160
- }
161
  ],
162
- "detailColumns": [
163
- "SmartLog__c.Device_Id__c",
164
- "SmartLog__c.Log_Type__c",
165
- "SmartLog__c.Status__c",
166
- "SmartLog__c.Timestamp__c",
167
- "SmartLog__c.Usage_Hours__c",
168
- "SmartLog__c.Downtime__c",
169
- "SmartLog__c.AMC_Date__c"
170
  ],
171
- "folderId": LABOPS_REPORTS_FOLDER_ID,
172
- "currency": None
173
  }
174
  }
175
  usage_result = sf.restful('analytics/reports', method='POST', json=usage_report_metadata)
@@ -180,37 +223,19 @@ def create_salesforce_reports(df):
180
  amc_report_metadata = {
181
  "reportMetadata": {
182
  "name": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
183
- "developerName": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
184
- "reportType": {
185
- "type": "CustomObject",
186
- "value": "SmartLog__c"
187
- },
188
  "reportFormat": "TABULAR",
189
- "reportBooleanFilter": None,
190
  "reportFilters": [
191
- {
192
- "column": "SmartLog__c.Status__c",
193
- "operator": "equals",
194
- "value": "Active"
195
- },
196
- {
197
- "column": "SmartLog__c.AMC_Date__c",
198
- "operator": "greaterOrEqual",
199
- "value": "TODAY"
200
- },
201
- {
202
- "column": "SmartLog__c.AMC_Date__c",
203
- "operator": "lessOrEqual",
204
- "value": "NEXT_N_DAYS:30"
205
- }
206
  ],
207
- "detailColumns": [
208
- "SmartLog__c.Device_Id__c",
209
- "SmartLog__c.AMC_Date__c",
210
- "SmartLog__c.Status__c"
211
  ],
212
- "folderId": LABOPS_REPORTS_FOLDER_ID,
213
- "currency": None
214
  }
215
  }
216
  amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
@@ -234,14 +259,14 @@ def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
234
  # Validate and map picklist values
235
  status = str(row['status'])
236
  log_type = str(row['log_type'])
237
-
238
  # Map Status__c
239
  if status not in status_values:
240
  status = picklist_mapping['Status__c'].get(status.lower(), status_values[0] if status_values else None)
241
  if status is None:
242
  logging.warning(f"Skipping record with invalid Status__c: {row['status']}")
243
  continue
244
-
245
  # Map Log_Type__c
246
  if log_type not in log_type_values:
247
  log_type = picklist_mapping['Log_Type__c'].get(log_type.lower(), log_type_values[0] if log_type_values else None)
@@ -267,7 +292,7 @@ def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
267
  'AMC_Date__c': amc_date_str
268
  }
269
  records.append(record)
270
-
271
  # Bulk insert to reduce API calls
272
  if records:
273
  sf.bulk.SmartLog__c.insert(records)
@@ -316,25 +341,25 @@ def detect_anomalies(df, progress=gr.Progress()):
316
  logging.error(f"Anomaly detection failed: {str(e)}")
317
  return f"Anomaly detection failed: {str(e)}"
318
 
319
- # AMC reminders (identify records for display)
320
  def check_amc_reminders(df, current_date, progress=gr.Progress()):
321
  progress(0.6, "Checking AMC reminders...")
322
  try:
323
  if "device_id" not in df.columns or "amc_date" not in df.columns:
324
- return "AMC reminders require 'device_id' and 'amc_date' columns."
325
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
326
  current_date = pd.to_datetime(current_date)
327
  df["days_to_amc"] = (df["amc_date"] - current_date).dt.days
328
  reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "amc_date"]]
329
  if reminders.empty:
330
- return "No AMC reminders due within the next 30 days."
331
  reminder_lines = ["Upcoming AMC Reminders:"]
332
  for _, row in reminders.head(5).iterrows():
333
  reminder_lines.append(f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}")
334
- return "\n".join(reminder_lines)
335
  except Exception as e:
336
  logging.error(f"AMC reminder generation failed: {str(e)}")
337
- return f"AMC reminder generation failed: {str(e)}"
338
 
339
  # Dashboard insights
340
  def generate_dashboard_insights(df, progress=gr.Progress()):
@@ -379,7 +404,7 @@ def create_usage_chart(df, progress=gr.Progress()):
379
  return None
380
 
381
  # Generate PDF content
382
- def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights):
383
  if not reportlab_available:
384
  return None
385
  try:
@@ -411,6 +436,10 @@ def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights):
411
  story.append(safe_paragraph(amc_reminders or "No AMC reminders.", styles['Normal']))
412
  story.append(Spacer(1, 12))
413
 
 
 
 
 
414
  story.append(Paragraph("Dashboard Insights", styles['Heading2']))
415
  story.append(safe_paragraph(insights or "No insights generated.", styles['Normal']))
416
 
@@ -426,13 +455,13 @@ async def process_logs(file_obj, progress=gr.Progress()):
426
  try:
427
  progress(0, "Starting file processing...")
428
  if not file_obj:
429
- return "No file uploaded.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, "No Salesforce data saved.", "No report created."
430
 
431
  file_name = file_obj.name
432
  logging.info(f"Processing file: {file_name}")
433
-
434
  if not file_name.endswith(".csv"):
435
- return "Please upload a CSV file.", "", None, "", "", "", None, "", ""
436
 
437
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
438
  dtypes = {
@@ -446,11 +475,11 @@ async def process_logs(file_obj, progress=gr.Progress()):
446
  df = pd.read_csv(file_obj, dtype=dtypes)
447
  missing_columns = [col for col in required_columns if col not in df.columns]
448
  if missing_columns:
449
- return f"Missing columns: {missing_columns}", None, None, None, None, None, None, None, None
450
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
451
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
452
  if df.empty:
453
- return "No data available.", None, None, None, None, None, None, None, None
454
 
455
  with ThreadPoolExecutor() as executor:
456
  future_summary = executor.submit(summarize_logs, df)
@@ -462,7 +491,8 @@ async def process_logs(file_obj, progress=gr.Progress()):
462
 
463
  summary = f"Step 1: Summary Report\n{future_summary.result()}"
464
  anomalies = f"Anomaly Detection\n{future_anomalies.result()}"
465
- amc_reminders = f"AMC Reminders\n{future_amc.result()}"
 
466
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
467
  chart = future_chart.result()
468
  report_result = future_reports.result()
@@ -478,13 +508,14 @@ async def process_logs(file_obj, progress=gr.Progress()):
478
  preview = "\n".join(preview_lines)
479
 
480
  salesforce_result = save_to_salesforce(df, summary, anomalies, amc_reminders, insights)
481
- pdf_file = generate_pdf_content(summary, preview, anomalies, amc_reminders, insights)
 
482
 
483
  progress(1.0, "Done!")
484
- return summary, preview, chart, anomalies, amc_reminders, insights, pdf_file, salesforce_result, report_result
485
  except Exception as e:
486
  logging.error(f"Failed to process file: {str(e)}")
487
- return f"Error: {str(e)}", None, None, None, None, None, None, None, None
488
 
489
  # Gradio Interface
490
  try:
@@ -498,7 +529,7 @@ try:
498
  .dashboard-section ul {margin: 2px 0; padding-left: 20px;}
499
  """) as iface:
500
  gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Hugging Face AI)</h1>")
501
- gr.Markdown("Upload a CSV file to analyze and generate Salesforce reports.")
502
 
503
  with gr.Row():
504
  with gr.Column(scale=1):
@@ -508,39 +539,43 @@ try:
508
  with gr.Column(scale=2):
509
  with gr.Group(elem_classes="dashboard-container"):
510
  gr.Markdown("<div class='dashboard-title'>Analysis Results</div>")
511
-
512
  with gr.Group(elem_classes="dashboard-section"):
513
  gr.Markdown("### Step 1: Summary Report")
514
  summary_output = gr.Markdown()
515
-
516
  with gr.Group(elem_classes="dashboard-section"):
517
  gr.Markdown("### Step 2: Log Preview")
518
  preview_output = gr.Markdown()
519
-
520
  with gr.Group(elem_classes="dashboard-section"):
521
  gr.Markdown("### Step 3: Usage Chart")
522
  chart_output = gr.Plot()
523
-
524
  with gr.Group(elem_classes="dashboard-section"):
525
  gr.Markdown("### Step 4: Anomaly Detection")
526
  anomaly_output = gr.Markdown()
527
-
528
  with gr.Group(elem_classes="dashboard-section"):
529
  gr.Markdown("### Step 5: AMC Reminders")
530
  amc_output = gr.Markdown()
531
-
532
  with gr.Group(elem_classes="dashboard-section"):
533
  gr.Markdown("### Step 6: Insights (AI)")
534
  insights_output = gr.Markdown()
535
-
 
 
 
 
536
  with gr.Group(elem_classes="dashboard-section"):
537
  gr.Markdown("### Salesforce Integration")
538
  salesforce_output = gr.Markdown()
539
-
540
  with gr.Group(elem_classes="dashboard-section"):
541
  gr.Markdown("### Salesforce Reports")
542
  report_output = gr.Markdown()
543
-
544
  with gr.Group(elem_classes="dashboard-section"):
545
  gr.Markdown("### Download Report")
546
  pdf_output = gr.File(label="Download Analysis Report as PDF")
@@ -557,7 +592,8 @@ try:
557
  insights_output,
558
  pdf_output,
559
  salesforce_output,
560
- report_output
 
561
  ]
562
  )
563
 
@@ -574,4 +610,4 @@ if __name__ == "__main__":
574
  except Exception as e:
575
  logging.error(f"Failed to launch Gradio interface: {str(e)}")
576
  print(f"Error launching app: {str(e)}")
577
- raise e
 
1
  """
2
+ LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, Salesforce integration, and AMC reminder email alerts
3
  """
4
  import gradio as gr
5
  import pandas as pd
 
13
  from simple_salesforce import Salesforce
14
  import os
15
  import json
16
+ import smtplib
17
+ from email.mime.text import MIMEText
18
+ from email.mime.multipart import MIMEMultipart
19
 
20
  # Configure logging
21
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
33
  logging.error(f"Failed to connect to Salesforce: {str(e)}")
34
  sf = None
35
 
36
+ # Email configuration (using environment variables)
37
+ SMTP_SERVER = os.getenv('SMTP_SERVER', 'smtp.gmail.com')
38
+ SMTP_PORT = int(os.getenv('SMTP_PORT', 587))
39
+ SMTP_USERNAME = os.getenv('harishkumarr@sathkrutha.com') # e.g., your-email@gmail.com
40
+ SMTP_PASSWORD = os.getenv('Harish@048') # App-specific password if using Gmail
41
+ EMAIL_FROM = os.getenv('harishkumarr@sathkrutha.com', SMTP_USERNAME)
42
+ EMAIL_TO = "harishkumarr@sathkrutha.com"
43
+
44
  # Try to import reportlab
45
  try:
46
  from reportlab.lib.pagesizes import letter
 
129
  # Cache the folder ID at startup
130
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
131
 
132
+ # Send AMC reminder emails
133
+ def send_amc_reminder_emails(reminders_df):
134
+ if reminders_df.empty:
135
+ logging.info("No AMC reminders to send via email.")
136
+ return "No AMC reminder emails sent (no reminders found)."
137
+
138
+ if not all([SMTP_USERNAME, SMTP_PASSWORD, EMAIL_FROM]):
139
+ logging.error("SMTP credentials not configured. Please set SMTP_USERNAME, SMTP_PASSWORD, and EMAIL_FROM environment variables.")
140
+ return "Failed to send emails: SMTP credentials not configured."
141
+
142
+ try:
143
+ # Set up the SMTP server
144
+ server = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
145
+ server.starttls()
146
+ server.login(SMTP_USERNAME, SMTP_PASSWORD)
147
+
148
+ email_results = []
149
+ for _, row in reminders_df.iterrows():
150
+ device_id = row['device_id']
151
+ amc_date = row['amc_date'].strftime('%Y-%m-%d')
152
+
153
+ # Create the email
154
+ msg = MIMEMultipart()
155
+ msg['From'] = EMAIL_FROM
156
+ msg['To'] = EMAIL_TO
157
+ msg['Subject'] = f"AMC Reminder for Device {device_id}"
158
+
159
+ body = f"""
160
+ Dear Harish Kumar,
161
+
162
+ This is a reminder that the Annual Maintenance Contract (AMC) for the following device is due:
163
+
164
+ - Device ID: {device_id}
165
+ - AMC Date: {amc_date}
166
+
167
+ Please schedule the maintenance at your earliest convenience.
168
+
169
+ Best regards,
170
+ LabOps Team
171
+ """
172
+ msg.attach(MIMEText(body, 'plain'))
173
+
174
+ # Send the email
175
+ server.sendmail(EMAIL_FROM, EMAIL_TO, msg.as_string())
176
+ logging.info(f"AMC reminder email sent for Device ID {device_id} to {EMAIL_TO}")
177
+ email_results.append(f"Sent AMC reminder for Device ID {device_id}")
178
+
179
+ server.quit()
180
+ return "\n".join(email_results) if email_results else "No emails sent."
181
+ except Exception as e:
182
+ logging.error(f"Failed to send AMC reminder emails: {str(e)}")
183
+ return f"Failed to send AMC reminder emails: {str(e)}"
184
+
185
  # Create Salesforce reports (Usage and AMC Reminders)
186
  def create_salesforce_reports(df):
187
  if sf is None:
 
194
  usage_report_metadata = {
195
  "reportMetadata": {
196
  "name": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
197
+ "reportType": "SmartLog__c",
 
 
 
 
198
  "reportFormat": "SUMMARY",
 
199
  "reportFilters": [
200
+ {"column": "Status__c", "operator": "equals", "value": "Active"},
201
+ {"column": "Timestamp__c", "operator": "greaterOrEqual", "value": "THIS_MONTH"}
 
 
 
 
 
 
 
 
202
  ],
203
+ "reportColumns": [
204
+ {"field": "Device_Id__c"},
205
+ {"field": "Log_Type__c"},
206
+ {"field": "Status__c"},
207
+ {"field": "Timestamp__c"},
208
+ {"field": "Usage_Hours__c", "aggregateTypes": ["Sum"]},
209
+ {"field": "Downtime__c", "aggregateTypes": ["Sum"]},
210
+ {"field": "AMC_Date__c"}
 
211
  ],
212
+ "groupingsDown": [
213
+ {"field": "Device_Id__c", "sortOrder": "Asc", "dateGranularity": "None"}
 
 
 
 
 
 
214
  ],
215
+ "folderId": LABOPS_REPORTS_FOLDER_ID
 
216
  }
217
  }
218
  usage_result = sf.restful('analytics/reports', method='POST', json=usage_report_metadata)
 
223
  amc_report_metadata = {
224
  "reportMetadata": {
225
  "name": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
226
+ "reportType": "SmartLog__c",
 
 
 
 
227
  "reportFormat": "TABULAR",
 
228
  "reportFilters": [
229
+ {"column": "Status__c", "operator": "equals", "value": "Active"},
230
+ {"column": "AMC_Date__c", "operator": "greaterOrEqual", "value": "TODAY"},
231
+ {"column": "AMC_Date__c", "operator": "lessOrEqual", "value": "NEXT_N_DAYS:30"}
 
 
 
 
 
 
 
 
 
 
 
 
232
  ],
233
+ "reportColumns": [
234
+ {"field": "Device_Id__c"},
235
+ {"field": "AMC_Date__c"},
236
+ {"field": "Status__c"}
237
  ],
238
+ "folderId": LABOPS_REPORTS_FOLDER_ID
 
239
  }
240
  }
241
  amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
 
259
  # Validate and map picklist values
260
  status = str(row['status'])
261
  log_type = str(row['log_type'])
262
+
263
  # Map Status__c
264
  if status not in status_values:
265
  status = picklist_mapping['Status__c'].get(status.lower(), status_values[0] if status_values else None)
266
  if status is None:
267
  logging.warning(f"Skipping record with invalid Status__c: {row['status']}")
268
  continue
269
+
270
  # Map Log_Type__c
271
  if log_type not in log_type_values:
272
  log_type = picklist_mapping['Log_Type__c'].get(log_type.lower(), log_type_values[0] if log_type_values else None)
 
292
  'AMC_Date__c': amc_date_str
293
  }
294
  records.append(record)
295
+
296
  # Bulk insert to reduce API calls
297
  if records:
298
  sf.bulk.SmartLog__c.insert(records)
 
341
  logging.error(f"Anomaly detection failed: {str(e)}")
342
  return f"Anomaly detection failed: {str(e)}"
343
 
344
+ # AMC reminders (identify records for display and email)
345
  def check_amc_reminders(df, current_date, progress=gr.Progress()):
346
  progress(0.6, "Checking AMC reminders...")
347
  try:
348
  if "device_id" not in df.columns or "amc_date" not in df.columns:
349
+ return "AMC reminders require 'device_id' and 'amc_date' columns.", pd.DataFrame()
350
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
351
  current_date = pd.to_datetime(current_date)
352
  df["days_to_amc"] = (df["amc_date"] - current_date).dt.days
353
  reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "amc_date"]]
354
  if reminders.empty:
355
+ return "No AMC reminders due within the next 30 days.", reminders
356
  reminder_lines = ["Upcoming AMC Reminders:"]
357
  for _, row in reminders.head(5).iterrows():
358
  reminder_lines.append(f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}")
359
+ return "\n".join(reminder_lines), reminders
360
  except Exception as e:
361
  logging.error(f"AMC reminder generation failed: {str(e)}")
362
+ return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
363
 
364
  # Dashboard insights
365
  def generate_dashboard_insights(df, progress=gr.Progress()):
 
404
  return None
405
 
406
  # Generate PDF content
407
+ def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights, email_status):
408
  if not reportlab_available:
409
  return None
410
  try:
 
436
  story.append(safe_paragraph(amc_reminders or "No AMC reminders.", styles['Normal']))
437
  story.append(Spacer(1, 12))
438
 
439
+ story.append(Paragraph("Email Notification Status", styles['Heading2']))
440
+ story.append(safe_paragraph(email_status or "No emails sent.", styles['Normal']))
441
+ story.append(Spacer(1, 12))
442
+
443
  story.append(Paragraph("Dashboard Insights", styles['Heading2']))
444
  story.append(safe_paragraph(insights or "No insights generated.", styles['Normal']))
445
 
 
455
  try:
456
  progress(0, "Starting file processing...")
457
  if not file_obj:
458
+ return "No file uploaded.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, "No Salesforce data saved.", "No report created.", "No emails sent."
459
 
460
  file_name = file_obj.name
461
  logging.info(f"Processing file: {file_name}")
462
+
463
  if not file_name.endswith(".csv"):
464
+ return "Please upload a CSV file.", "", None, "", "", "", None, "", "", ""
465
 
466
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
467
  dtypes = {
 
475
  df = pd.read_csv(file_obj, dtype=dtypes)
476
  missing_columns = [col for col in required_columns if col not in df.columns]
477
  if missing_columns:
478
+ return f"Missing columns: {missing_columns}", None, None, None, None, None, None, None, None, None
479
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
480
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
481
  if df.empty:
482
+ return "No data available.", None, None, None, None, None, None, None, None, None
483
 
484
  with ThreadPoolExecutor() as executor:
485
  future_summary = executor.submit(summarize_logs, df)
 
491
 
492
  summary = f"Step 1: Summary Report\n{future_summary.result()}"
493
  anomalies = f"Anomaly Detection\n{future_anomalies.result()}"
494
+ amc_reminders, reminders_df = future_amc.result() # Get both display text and DataFrame
495
+ amc_reminders = f"AMC Reminders\n{amc_reminders}"
496
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
497
  chart = future_chart.result()
498
  report_result = future_reports.result()
 
508
  preview = "\n".join(preview_lines)
509
 
510
  salesforce_result = save_to_salesforce(df, summary, anomalies, amc_reminders, insights)
511
+ email_status = send_amc_reminder_emails(reminders_df)
512
+ pdf_file = generate_pdf_content(summary, preview, anomalies, amc_reminders, insights, email_status)
513
 
514
  progress(1.0, "Done!")
515
+ return summary, preview, chart, anomalies, amc_reminders, insights, pdf_file, salesforce_result, report_result, email_status
516
  except Exception as e:
517
  logging.error(f"Failed to process file: {str(e)}")
518
+ return f"Error: {str(e)}", None, None, None, None, None, None, None, None, None
519
 
520
  # Gradio Interface
521
  try:
 
529
  .dashboard-section ul {margin: 2px 0; padding-left: 20px;}
530
  """) as iface:
531
  gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Hugging Face AI)</h1>")
532
+ gr.Markdown("Upload a CSV file to analyze, generate Salesforce reports, and send AMC reminder emails.")
533
 
534
  with gr.Row():
535
  with gr.Column(scale=1):
 
539
  with gr.Column(scale=2):
540
  with gr.Group(elem_classes="dashboard-container"):
541
  gr.Markdown("<div class='dashboard-title'>Analysis Results</div>")
542
+
543
  with gr.Group(elem_classes="dashboard-section"):
544
  gr.Markdown("### Step 1: Summary Report")
545
  summary_output = gr.Markdown()
546
+
547
  with gr.Group(elem_classes="dashboard-section"):
548
  gr.Markdown("### Step 2: Log Preview")
549
  preview_output = gr.Markdown()
550
+
551
  with gr.Group(elem_classes="dashboard-section"):
552
  gr.Markdown("### Step 3: Usage Chart")
553
  chart_output = gr.Plot()
554
+
555
  with gr.Group(elem_classes="dashboard-section"):
556
  gr.Markdown("### Step 4: Anomaly Detection")
557
  anomaly_output = gr.Markdown()
558
+
559
  with gr.Group(elem_classes="dashboard-section"):
560
  gr.Markdown("### Step 5: AMC Reminders")
561
  amc_output = gr.Markdown()
562
+
563
  with gr.Group(elem_classes="dashboard-section"):
564
  gr.Markdown("### Step 6: Insights (AI)")
565
  insights_output = gr.Markdown()
566
+
567
+ with gr.Group(elem_classes="dashboard-section"):
568
+ gr.Markdown("### Step 7: Email Notification Status")
569
+ email_output = gr.Markdown()
570
+
571
  with gr.Group(elem_classes="dashboard-section"):
572
  gr.Markdown("### Salesforce Integration")
573
  salesforce_output = gr.Markdown()
574
+
575
  with gr.Group(elem_classes="dashboard-section"):
576
  gr.Markdown("### Salesforce Reports")
577
  report_output = gr.Markdown()
578
+
579
  with gr.Group(elem_classes="dashboard-section"):
580
  gr.Markdown("### Download Report")
581
  pdf_output = gr.File(label="Download Analysis Report as PDF")
 
592
  insights_output,
593
  pdf_output,
594
  salesforce_output,
595
+ report_output,
596
+ email_output
597
  ]
598
  )
599
 
 
610
  except Exception as e:
611
  logging.error(f"Failed to launch Gradio interface: {str(e)}")
612
  print(f"Error launching app: {str(e)}")
613
+ raise e