RathodHarish commited on
Commit
6ad3673
·
verified ·
1 Parent(s): 0ee3423

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -111
app.py CHANGED
@@ -17,7 +17,8 @@ import functools
17
  # Configure logging
18
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
 
20
- # Salesforce configuration
 
21
  try:
22
  sf = Salesforce(
23
  username='multi-devicelabopsdashboard@sathkrutha.com',
@@ -29,6 +30,8 @@ try:
29
  except Exception as e:
30
  logging.error(f"Failed to connect to Salesforce: {str(e)}")
31
  sf = None
 
 
32
 
33
  # Try to import reportlab
34
  try:
@@ -50,9 +53,9 @@ try:
50
  "summarization",
51
  model="t5-small",
52
  device=device,
53
- max_length=50,
54
  min_length=10,
55
- num_beams=2
56
  )
57
  logging.info(f"Hugging Face model preloaded on {'GPU' if device == 0 else 'CPU'}")
58
  except Exception as e:
@@ -118,7 +121,11 @@ def get_folder_id(folder_name):
118
 
119
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
120
 
121
- # Salesforce report creation
 
 
 
 
122
  def create_salesforce_reports(df):
123
  if sf is None or not LABOPS_REPORTS_FOLDER_ID:
124
  return
@@ -155,8 +162,13 @@ def create_salesforce_reports(df):
155
  logging.info("Salesforce reports created")
156
  except Exception as e:
157
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
 
158
 
159
- # Save to Salesforce
 
 
 
 
160
  def save_to_salesforce(df, reminders_df):
161
  if sf is None:
162
  logging.error("No Salesforce connection available")
@@ -217,15 +229,31 @@ def save_to_salesforce(df, reminders_df):
217
  logging.warning("No records to save to Salesforce")
218
  except Exception as e:
219
  logging.error(f"Failed to save to Salesforce: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
 
221
  # Summarize logs
 
222
  def summarize_logs(df):
223
  start_time = time.time()
224
  try:
225
  total_devices = df["device_id"].nunique()
226
  most_used = df.groupby("device_id")["usage_hours"].sum().idxmax() if not df.empty else "N/A"
227
  prompt = f"Maintenance logs: {total_devices} devices. Most used: {most_used}."
228
- summary = summarizer(prompt, max_length=50, min_length=10, do_sample=False)[0]["summary_text"]
229
  logging.info(f"Summary generation took {time.time() - start_time:.2f} seconds")
230
  return summary
231
  except Exception as e:
@@ -239,9 +267,9 @@ def detect_anomalies(df):
239
  if "usage_hours" not in df.columns or "downtime" not in df.columns:
240
  return "Anomaly detection requires 'usage_hours' and 'downtime' columns.", pd.DataFrame()
241
  features = df[["usage_hours", "downtime"]].fillna(0)
242
- if len(features) > 200:
243
- features = features.sample(n=200, random_state=42)
244
- iso_forest = IsolationForest(contamination=0.1, random_state=42, n_estimators=50)
245
  df["anomaly"] = iso_forest.fit_predict(features)
246
  anomalies = df[df["anomaly"] == -1][["device_id", "usage_hours", "downtime", "timestamp"]]
247
  if anomalies.empty:
@@ -273,13 +301,14 @@ def check_amc_reminders(df, current_date):
273
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
274
 
275
  # Dashboard insights
 
276
  def generate_dashboard_insights(df):
277
  start_time = time.time()
278
  try:
279
  total_devices = df["device_id"].nunique()
280
  avg_usage = df["usage_hours"].mean() if "usage_hours" in df.columns else 0
281
  prompt = f"Insights: {total_devices} devices, avg usage {avg_usage:.2f} hours."
282
- insights = summarizer(prompt, max_length=50, min_length=10, do_sample=False)[0]["summary_text"]
283
  logging.info(f"Insights generation took {time.time() - start_time:.2f} seconds")
284
  return insights
285
  except Exception as e:
@@ -300,7 +329,7 @@ def cache_dataframe(func):
300
  return result
301
  return wrapper
302
 
303
- # Create usage chart
304
  @cache_dataframe
305
  def create_usage_chart(df):
306
  try:
@@ -322,90 +351,18 @@ def create_usage_chart(df):
322
  logging.error(f"Failed to create usage chart: {str(e)}")
323
  return None
324
 
325
- # Create downtime chart
326
- @cache_dataframe
327
  def create_downtime_chart(df):
328
- try:
329
- downtime_data = df.groupby("device_id")["downtime"].sum().reset_index()
330
- if len(downtime_data) > 5:
331
- downtime_data = downtime_data.nlargest(5, "downtime")
332
- fig = px.bar(
333
- downtime_data,
334
- x="device_id",
335
- y="downtime",
336
- title="Downtime per Device",
337
- labels={"device_id": "Device ID", "downtime": "Downtime (Hours)"}
338
- )
339
- fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
340
- return fig
341
- except Exception as e:
342
- logging.error(f"Failed to create downtime chart: {str(e)}")
343
- return None
344
 
345
- # Create daily log trends chart
346
- @cache_dataframe
347
  def create_daily_log_trends_chart(df):
348
- try:
349
- df['date'] = df['timestamp'].dt.date
350
- daily_logs = df.groupby('date').size().reset_index(name='log_count')
351
- fig = px.line(
352
- daily_logs,
353
- x='date',
354
- y='log_count',
355
- title="Daily Log Trends",
356
- labels={"date": "Date", "log_count": "Number of Logs"}
357
- )
358
- fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
359
- return fig
360
- except Exception as e:
361
- logging.error(f"Failed to create daily log trends chart: {str(e)}")
362
- return None
363
 
364
- # Create weekly uptime chart
365
- @cache_dataframe
366
  def create_weekly_uptime_chart(df):
367
- try:
368
- df['week'] = df['timestamp'].dt.isocalendar().week
369
- df['year'] = df['timestamp'].dt.year
370
- weekly_data = df.groupby(['year', 'week']).agg({
371
- 'usage_hours': 'sum',
372
- 'downtime': 'sum'
373
- }).reset_index()
374
- weekly_data['uptime_percent'] = (weekly_data['usage_hours'] / (weekly_data['usage_hours'] + weekly_data['downtime'])) * 100
375
- weekly_data['year_week'] = weekly_data['year'].astype(str) + '-W' + weekly_data['week'].astype(str)
376
- fig = px.bar(
377
- weekly_data,
378
- x='year_week',
379
- y='uptime_percent',
380
- title="Weekly Uptime Percentage",
381
- labels={"year_week": "Year-Week", "uptime_percent": "Uptime %"}
382
- )
383
- fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
384
- return fig
385
- except Exception as e:
386
- logging.error(f"Failed to create weekly uptime chart: {str(e)}")
387
- return None
388
 
389
- # Create anomaly alerts chart
390
- @cache_dataframe
391
  def create_anomaly_alerts_chart(anomalies_df):
392
- try:
393
- if anomalies_df.empty:
394
- return None
395
- anomalies_df['date'] = anomalies_df['timestamp'].dt.date
396
- anomaly_counts = anomalies_df.groupby('date').size().reset_index(name='anomaly_count')
397
- fig = px.scatter(
398
- anomaly_counts,
399
- x='date',
400
- y='anomaly_count',
401
- title="Anomaly Alerts Over Time",
402
- labels={"date": "Date", "anomaly_count": "Number of Anomalies"}
403
- )
404
- fig.update_layout(title_font_size=16, margin=dict(l=20, r=20, t=40, b=20))
405
- return fig
406
- except Exception as e:
407
- logging.error(f"Failed to create anomaly alerts chart: {str(e)}")
408
- return None
409
 
410
  # Generate device cards
411
  def generate_device_cards(df):
@@ -440,8 +397,8 @@ def generate_device_cards(df):
440
  logging.error(f"Failed to generate device cards: {str(e)}")
441
  return f'<p>Error generating device cards: {str(e)}</p>'
442
 
443
- # Generate PDF content
444
- def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights, device_cards_html, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart):
445
  if not reportlab_available:
446
  return None
447
  try:
@@ -500,9 +457,6 @@ def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights
500
  story.append(safe_paragraph(insights, styles['Normal']))
501
  story.append(Spacer(1, 12))
502
 
503
- story.append(Paragraph("Charts", styles['Heading2']))
504
- story.append(Paragraph("[Chart placeholders - see dashboard for visuals]", styles['Normal']))
505
-
506
  doc.build(story)
507
  logging.info(f"PDF generated at {pdf_path}")
508
  return pdf_path
@@ -511,8 +465,9 @@ def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights
511
  return None
512
 
513
  # Main processing function
514
- async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, last_modified_state):
515
  start_time = time.time()
 
516
  try:
517
  if not file_obj:
518
  return "No file uploaded.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, last_modified_state
@@ -526,6 +481,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
526
  if not file_path.endswith(".csv"):
527
  return "Please upload a CSV file.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, last_modified_state
528
 
 
529
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
530
  dtypes = {
531
  "device_id": "string",
@@ -536,14 +492,15 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
536
  "amc_date": "string"
537
  }
538
  df = pd.read_csv(file_path, dtype=dtypes, usecols=required_columns)
539
- if len(df) > 10000: # Early exit for large datasets
540
- df = df.sample(n=10000, random_state=42)
541
- logging.warning("Dataset too large, sampled to 10,000 rows")
542
 
543
  missing_columns = [col for col in required_columns if col not in df.columns]
544
  if missing_columns:
545
  return f"Missing columns: {missing_columns}", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
546
 
 
547
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
548
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
549
  if df["timestamp"].dt.tz is None:
@@ -552,7 +509,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
552
  return "No data available.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
553
 
554
  # Apply filters
555
- filtered_df = df.copy()
556
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
557
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
558
  if equipment_type_filter and equipment_type_filter != 'All' and 'equipment_type' in filtered_df.columns:
@@ -568,43 +525,51 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
568
  return "No data after applying filters.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
569
 
570
  # Generate table for preview
 
571
  preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
572
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
573
 
574
  # Run tasks concurrently
575
- with ThreadPoolExecutor(max_workers=8) as executor:
 
576
  future_summary = executor.submit(summarize_logs, filtered_df)
577
  future_anomalies = executor.submit(detect_anomalies, filtered_df)
578
  future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
579
  future_insights = executor.submit(generate_dashboard_insights, filtered_df)
580
  future_usage_chart = executor.submit(create_usage_chart, filtered_df)
581
- future_downtime_chart = executor.submit(create_downtime_chart, filtered_df)
582
- future_daily_log_chart = executor.submit(create_daily_log_trends_chart, filtered_df)
583
- future_weekly_uptime_chart = executor.submit(create_weekly_uptime_chart, filtered_df)
584
  future_device_cards = executor.submit(generate_device_cards, filtered_df)
585
- future_reports = executor.submit(create_salesforce_reports, filtered_df)
586
 
 
587
  summary = f"Step 1: Summary Report\n{future_summary.result()}"
 
588
  anomalies, anomalies_df = future_anomalies.result()
589
  anomalies = f"Anomaly Detection\n{anomalies}"
 
590
  amc_reminders, reminders_df = future_amc.result()
591
  amc_reminders = f"AMC Reminders\n{amc_reminders}"
 
592
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
 
593
  usage_chart = future_usage_chart.result()
594
- downtime_chart = future_downtime_chart.result()
595
- daily_log_chart = future_daily_log_chart.result()
596
- weekly_uptime_chart = future_weekly_uptime_chart.result()
597
- anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
598
  device_cards = future_device_cards.result()
599
 
600
- save_to_salesforce(filtered_df, reminders_df)
601
- pdf_file = generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart)
 
 
 
 
602
 
603
  elapsed_time = time.time() - start_time
604
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
605
- if elapsed_time > 30:
606
- logging.warning(f"Processing time exceeded 30 seconds: {elapsed_time:.2f} seconds")
607
 
 
608
  return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, pdf_file, current_modified_time)
609
  except Exception as e:
610
  logging.error(f"Failed to process file: {str(e)}")
 
17
  # Configure logging
18
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
 
20
+ # Salesforce configuration (Disabled for now)
21
+ """
22
  try:
23
  sf = Salesforce(
24
  username='multi-devicelabopsdashboard@sathkrutha.com',
 
30
  except Exception as e:
31
  logging.error(f"Failed to connect to Salesforce: {str(e)}")
32
  sf = None
33
+ """
34
+ sf = None # Temporarily disable Salesforce
35
 
36
  # Try to import reportlab
37
  try:
 
53
  "summarization",
54
  model="t5-small",
55
  device=device,
56
+ max_length=30, # Reduced for faster inference
57
  min_length=10,
58
+ num_beams=1 # Reduced for faster inference
59
  )
60
  logging.info(f"Hugging Face model preloaded on {'GPU' if device == 0 else 'CPU'}")
61
  except Exception as e:
 
121
 
122
  LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
123
 
124
+ # Salesforce report creation (Disabled for now)
125
+ def create_salesforce_reports(df):
126
+ logging.info("Salesforce report creation skipped for optimization")
127
+ return
128
+ """
129
  def create_salesforce_reports(df):
130
  if sf is None or not LABOPS_REPORTS_FOLDER_ID:
131
  return
 
162
  logging.info("Salesforce reports created")
163
  except Exception as e:
164
  logging.error(f"Failed to create Salesforce reports: {str(e)}")
165
+ """
166
 
167
+ # Save to Salesforce (Disabled for now)
168
+ def save_to_salesforce(df, reminders_df):
169
+ logging.info("Salesforce save operation skipped for optimization")
170
+ return
171
+ """
172
  def save_to_salesforce(df, reminders_df):
173
  if sf is None:
174
  logging.error("No Salesforce connection available")
 
229
  logging.warning("No records to save to Salesforce")
230
  except Exception as e:
231
  logging.error(f"Failed to save to Salesforce: {str(e)}")
232
+ """
233
+
234
+ # Cache summarization results
235
+ def cache_summary(func):
236
+ @functools.wraps(func)
237
+ def wrapper(df, *args, **kwargs):
238
+ cache_key = f"{id(df)}_{func.__name__}"
239
+ if not hasattr(wrapper, 'cache'):
240
+ wrapper.cache = {}
241
+ if cache_key in wrapper.cache:
242
+ return wrapper.cache[cache_key]
243
+ result = func(df, *args, **kwargs)
244
+ wrapper.cache[cache_key] = result
245
+ return result
246
+ return wrapper
247
 
248
  # Summarize logs
249
+ @cache_summary
250
  def summarize_logs(df):
251
  start_time = time.time()
252
  try:
253
  total_devices = df["device_id"].nunique()
254
  most_used = df.groupby("device_id")["usage_hours"].sum().idxmax() if not df.empty else "N/A"
255
  prompt = f"Maintenance logs: {total_devices} devices. Most used: {most_used}."
256
+ summary = summarizer(prompt, max_length=30, min_length=10, do_sample=False)[0]["summary_text"]
257
  logging.info(f"Summary generation took {time.time() - start_time:.2f} seconds")
258
  return summary
259
  except Exception as e:
 
267
  if "usage_hours" not in df.columns or "downtime" not in df.columns:
268
  return "Anomaly detection requires 'usage_hours' and 'downtime' columns.", pd.DataFrame()
269
  features = df[["usage_hours", "downtime"]].fillna(0)
270
+ if len(features) > 100: # Further reduced sample size
271
+ features = features.sample(n=100, random_state=42)
272
+ iso_forest = IsolationForest(contamination=0.1, random_state=42, n_estimators=30) # Further reduced n_estimators
273
  df["anomaly"] = iso_forest.fit_predict(features)
274
  anomalies = df[df["anomaly"] == -1][["device_id", "usage_hours", "downtime", "timestamp"]]
275
  if anomalies.empty:
 
301
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
302
 
303
  # Dashboard insights
304
+ @cache_summary
305
  def generate_dashboard_insights(df):
306
  start_time = time.time()
307
  try:
308
  total_devices = df["device_id"].nunique()
309
  avg_usage = df["usage_hours"].mean() if "usage_hours" in df.columns else 0
310
  prompt = f"Insights: {total_devices} devices, avg usage {avg_usage:.2f} hours."
311
+ insights = summarizer(prompt, max_length=30, min_length=10, do_sample=False)[0]["summary_text"]
312
  logging.info(f"Insights generation took {time.time() - start_time:.2f} seconds")
313
  return insights
314
  except Exception as e:
 
329
  return result
330
  return wrapper
331
 
332
+ # Create usage chart (Only this chart will be generated to save time)
333
  @cache_dataframe
334
  def create_usage_chart(df):
335
  try:
 
351
  logging.error(f"Failed to create usage chart: {str(e)}")
352
  return None
353
 
354
+ # Skipped other chart functions to save time
 
355
  def create_downtime_chart(df):
356
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
357
 
 
 
358
  def create_daily_log_trends_chart(df):
359
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
360
 
 
 
361
  def create_weekly_uptime_chart(df):
362
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
363
 
 
 
364
  def create_anomaly_alerts_chart(anomalies_df):
365
+ return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
366
 
367
  # Generate device cards
368
  def generate_device_cards(df):
 
397
  logging.error(f"Failed to generate device cards: {str(e)}")
398
  return f'<p>Error generating device cards: {str(e)}</p>'
399
 
400
+ # Generate PDF content (Simplified to reduce time)
401
+ def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights, device_cards_html):
402
  if not reportlab_available:
403
  return None
404
  try:
 
457
  story.append(safe_paragraph(insights, styles['Normal']))
458
  story.append(Spacer(1, 12))
459
 
 
 
 
460
  doc.build(story)
461
  logging.info(f"PDF generated at {pdf_path}")
462
  return pdf_path
 
465
  return None
466
 
467
  # Main processing function
468
+ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, last_modified_state, progress=gr.Progress()):
469
  start_time = time.time()
470
+ progress(0, desc="Starting processing...")
471
  try:
472
  if not file_obj:
473
  return "No file uploaded.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, last_modified_state
 
481
  if not file_path.endswith(".csv"):
482
  return "Please upload a CSV file.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, last_modified_state
483
 
484
+ progress(0.1, desc="Loading CSV file...")
485
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
486
  dtypes = {
487
  "device_id": "string",
 
492
  "amc_date": "string"
493
  }
494
  df = pd.read_csv(file_path, dtype=dtypes, usecols=required_columns)
495
+ if len(df) > 5000: # More aggressive sampling
496
+ df = df.sample(n=5000, random_state=42)
497
+ logging.warning("Dataset too large, sampled to 5,000 rows")
498
 
499
  missing_columns = [col for col in required_columns if col not in df.columns]
500
  if missing_columns:
501
  return f"Missing columns: {missing_columns}", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
502
 
503
+ progress(0.2, desc="Processing timestamps...")
504
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
505
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
506
  if df["timestamp"].dt.tz is None:
 
509
  return "No data available.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
510
 
511
  # Apply filters
512
+ filtered_df = df
513
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
514
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
515
  if equipment_type_filter and equipment_type_filter != 'All' and 'equipment_type' in filtered_df.columns:
 
525
  return "No data after applying filters.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state
526
 
527
  # Generate table for preview
528
+ progress(0.3, desc="Generating log preview...")
529
  preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
530
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
531
 
532
  # Run tasks concurrently
533
+ progress(0.4, desc="Running analysis tasks...")
534
+ with ThreadPoolExecutor(max_workers=4) as executor: # Reduced workers to avoid overhead
535
  future_summary = executor.submit(summarize_logs, filtered_df)
536
  future_anomalies = executor.submit(detect_anomalies, filtered_df)
537
  future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
538
  future_insights = executor.submit(generate_dashboard_insights, filtered_df)
539
  future_usage_chart = executor.submit(create_usage_chart, filtered_df)
 
 
 
540
  future_device_cards = executor.submit(generate_device_cards, filtered_df)
 
541
 
542
+ progress(0.5, desc="Collecting summary results...")
543
  summary = f"Step 1: Summary Report\n{future_summary.result()}"
544
+ progress(0.6, desc="Collecting anomaly detection results...")
545
  anomalies, anomalies_df = future_anomalies.result()
546
  anomalies = f"Anomaly Detection\n{anomalies}"
547
+ progress(0.7, desc="Collecting AMC reminders...")
548
  amc_reminders, reminders_df = future_amc.result()
549
  amc_reminders = f"AMC Reminders\n{amc_reminders}"
550
+ progress(0.8, desc="Collecting insights...")
551
  insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
552
+ progress(0.9, desc="Generating chart...")
553
  usage_chart = future_usage_chart.result()
554
+ downtime_chart = None
555
+ daily_log_chart = None
556
+ weekly_uptime_chart = None
557
+ anomaly_alerts_chart = None
558
  device_cards = future_device_cards.result()
559
 
560
+ # Skip Salesforce operations
561
+ # save_to_salesforce(filtered_df, reminders_df)
562
+ # create_salesforce_reports(filtered_df)
563
+
564
+ progress(0.95, desc="Generating PDF...")
565
+ pdf_file = generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights, device_cards)
566
 
567
  elapsed_time = time.time() - start_time
568
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
569
+ if elapsed_time > 10:
570
+ logging.warning(f"Processing time exceeded 10 seconds: {elapsed_time:.2f} seconds")
571
 
572
+ progress(1.0, desc="Processing complete!")
573
  return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, pdf_file, current_modified_time)
574
  except Exception as e:
575
  logging.error(f"Failed to process file: {str(e)}")