RathodHarish commited on
Commit
714897b
·
verified ·
1 Parent(s): 848f896

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -25
app.py CHANGED
@@ -1,5 +1,5 @@
1
  """
2
- LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, Salesforce integration, and AMC reminder via Salesforce trigger
3
  """
4
  import gradio as gr
5
  import pandas as pd
@@ -94,7 +94,7 @@ picklist_mapping = {
94
  'maintenance': 'Smart Log',
95
  'cell': 'Cell Analysis',
96
  'uv': 'UV Verification',
97
- 'weight log': 'Smart Log' # Added mapping for 'weight log'
98
  }
99
  }
100
 
@@ -119,12 +119,14 @@ def get_folder_id(folder_name):
119
  # Cache the folder ID at startup
120
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
121
 
122
- # Updated Salesforce report creation with minimal metadata
123
  def create_salesforce_reports(df):
124
  if sf is None:
125
- return "Salesforce connection not available."
 
126
  if not LABOPS_REPORTS_FOLDER_ID:
127
- return "Cannot create reports: 'LabOps Reports' folder not found in Salesforce."
 
128
 
129
  try:
130
  # Usage Report (Tabular Report for simplicity)
@@ -164,16 +166,14 @@ def create_salesforce_reports(df):
164
  amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
165
  amc_report_id = amc_result['id']
166
  logging.info(f"AMC Reminders Report created: {amc_report_id}")
167
-
168
- return f"Usage Report ID: {usage_report_id}, AMC Reminders Report ID: {amc_report_id}"
169
  except Exception as e:
170
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
171
- return f"Failed to create reports: {str(e)}"
172
 
173
- # Save results to Salesforce SmartLog__c
174
  def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
175
  if sf is None:
176
- return "Salesforce connection not available."
 
177
  try:
178
  records = []
179
  current_date = datetime.now()
@@ -232,10 +232,8 @@ def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
232
  if records:
233
  sf.bulk.SmartLog__c.insert(records)
234
  logging.info(f"Saved {len(records)} records to Salesforce")
235
- return f"Saved {len(records)} records to Salesforce."
236
  except Exception as e:
237
  logging.error(f"Failed to save to Salesforce: {str(e)}")
238
- return f"Failed to save to Salesforce: {str(e)}"
239
 
240
  # Summarize logs
241
  def summarize_logs(df):
@@ -335,7 +333,7 @@ def create_usage_chart(df):
335
  logging.error(f"Failed to create usage chart: {str(e)}")
336
  return None
337
 
338
- # Generate PDF content (removed email_status since email is handled by Salesforce)
339
  def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights):
340
  if not reportlab_available:
341
  return None
@@ -383,13 +381,13 @@ async def process_logs(file_obj):
383
  try:
384
  start_time = datetime.now()
385
  if not file_obj:
386
- return "No file uploaded.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, "No Salesforce data saved.", "No report created."
387
 
388
  file_name = file_obj.name
389
  logging.info(f"Processing file: {file_name}")
390
 
391
  if not file_name.endswith(".csv"):
392
- return "Please upload a CSV file.", "", None, "", "", "", None, "", ""
393
 
394
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
395
  dtypes = {
@@ -403,11 +401,11 @@ async def process_logs(file_obj):
403
  df = pd.read_csv(file_obj, dtype=dtypes)
404
  missing_columns = [col for col in required_columns if col not in df.columns]
405
  if missing_columns:
406
- return f"Missing columns: {missing_columns}", None, None, None, None, None, None, None, None
407
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
408
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
409
  if df.empty:
410
- return "No data available.", None, None, None, None, None, None, None, None
411
 
412
  # Run tasks concurrently
413
  with ThreadPoolExecutor(max_workers=4) as executor:
@@ -424,7 +422,6 @@ async def process_logs(file_obj):
424
  amc_reminders = f"AMC Reminders\n{amc_reminders}"
425
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
426
  chart = future_chart.result()
427
- report_result = future_reports.result()
428
 
429
  preview_lines = ["Step 2: Log Preview (First 5 Rows)"]
430
  for idx, row in df.head(5).iterrows():
@@ -436,15 +433,16 @@ async def process_logs(file_obj):
436
  )
437
  preview = "\n".join(preview_lines)
438
 
439
- salesforce_result = save_to_salesforce(df, summary, anomalies, amc_reminders, insights)
 
440
  pdf_file = generate_pdf_content(summary, preview, anomalies, amc_reminders, insights)
441
 
442
  elapsed_time = (datetime.now() - start_time).total_seconds()
443
- logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
444
- return summary, preview, chart, anomalies, amc_reminders, insights, pdf_file, salesforce_result, report_result
445
  except Exception as e:
446
  logging.error(f"Failed to process file: {str(e)}")
447
- return f"Error: {str(e)}", None, None, None, None, None, None, None, None
448
 
449
  # Gradio Interface
450
  try:
@@ -507,9 +505,7 @@ try:
507
  anomaly_output,
508
  amc_output,
509
  insights_output,
510
- pdf_output,
511
- salesforce_output,
512
- report_output
513
  ]
514
  )
515
 
 
1
  """
2
+ LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, and backend Salesforce integration
3
  """
4
  import gradio as gr
5
  import pandas as pd
 
94
  'maintenance': 'Smart Log',
95
  'cell': 'Cell Analysis',
96
  'uv': 'UV Verification',
97
+ 'weight log': 'Smart Log'
98
  }
99
  }
100
 
 
119
  # Cache the folder ID at startup
120
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
121
 
122
+ # Salesforce report creation (runs in backend, result not displayed)
123
  def create_salesforce_reports(df):
124
  if sf is None:
125
+ logging.info("Salesforce connection not available for report creation.")
126
+ return
127
  if not LABOPS_REPORTS_FOLDER_ID:
128
+ logging.info("Cannot create reports: 'LabOps Reports' folder not found in Salesforce.")
129
+ return
130
 
131
  try:
132
  # Usage Report (Tabular Report for simplicity)
 
166
  amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
167
  amc_report_id = amc_result['id']
168
  logging.info(f"AMC Reminders Report created: {amc_report_id}")
 
 
169
  except Exception as e:
170
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
 
171
 
172
+ # Save results to Salesforce SmartLog__c (runs in backend, result not displayed)
173
  def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
174
  if sf is None:
175
+ logging.info("Salesforce connection not available for saving records.")
176
+ return
177
  try:
178
  records = []
179
  current_date = datetime.now()
 
232
  if records:
233
  sf.bulk.SmartLog__c.insert(records)
234
  logging.info(f"Saved {len(records)} records to Salesforce")
 
235
  except Exception as e:
236
  logging.error(f"Failed to save to Salesforce: {str(e)}")
 
237
 
238
  # Summarize logs
239
  def summarize_logs(df):
 
333
  logging.error(f"Failed to create usage chart: {str(e)}")
334
  return None
335
 
336
+ # Generate PDF content
337
  def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights):
338
  if not reportlab_available:
339
  return None
 
381
  try:
382
  start_time = datetime.now()
383
  if not file_obj:
384
+ return "No file uploaded.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None
385
 
386
  file_name = file_obj.name
387
  logging.info(f"Processing file: {file_name}")
388
 
389
  if not file_name.endswith(".csv"):
390
+ return "Please upload a CSV file.", "", None, "", "", "", None
391
 
392
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
393
  dtypes = {
 
401
  df = pd.read_csv(file_obj, dtype=dtypes)
402
  missing_columns = [col for col in required_columns if col not in df.columns]
403
  if missing_columns:
404
+ return f"Missing columns: {missing_columns}", None, None, None, None, None, None
405
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
406
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
407
  if df.empty:
408
+ return "No data available.", None, None, None, None, None, None
409
 
410
  # Run tasks concurrently
411
  with ThreadPoolExecutor(max_workers=4) as executor:
 
422
  amc_reminders = f"AMC Reminders\n{amc_reminders}"
423
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
424
  chart = future_chart.result()
 
425
 
426
  preview_lines = ["Step 2: Log Preview (First 5 Rows)"]
427
  for idx, row in df.head(5).iterrows():
 
433
  )
434
  preview = "\n".join(preview_lines)
435
 
436
+ # Save to Salesforce in the backend
437
+ save_to_salesforce(df, summary, anomalies, amc_reminders, insights)
438
  pdf_file = generate_pdf_content(summary, preview, anomalies, amc_reminders, insights)
439
 
440
  elapsed_time = (datetime.now() - start_time).total_seconds()
441
+ logging.info(f"Processing completed in {elapsed_time:.2f} secondsIS")
442
+ return summary, preview, chart, anomalies, amc_reminders, insights, pdf_file
443
  except Exception as e:
444
  logging.error(f"Failed to process file: {str(e)}")
445
+ return f"Error: {str(e)}", None, None, None, None, None, None
446
 
447
  # Gradio Interface
448
  try:
 
505
  anomaly_output,
506
  amc_output,
507
  insights_output,
508
+ pdf_output
 
 
509
  ]
510
  )
511