lavanya121 commited on
Commit
ecaef26
·
verified ·
1 Parent(s): cd5689e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -54,7 +54,7 @@ def detect_anomalies(df):
54
  logging.error(f"Anomaly detection failed: {str(e)}")
55
  return f"Anomaly detection failed: {str(e)}", pd.DataFrame()
56
 
57
- # Updated AMC reminders function
58
  def check_amc_reminders(df, current_date):
59
  try:
60
  if "device_id" not in df.columns or "amc_date" not in df.columns:
@@ -65,8 +65,7 @@ def check_amc_reminders(df, current_date):
65
  reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]]
66
  if reminders.empty:
67
  return "No AMC reminders due within the next 30 days.", reminders
68
- reminder_text = "\n".join([f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date'].strftime('%Y-%m-%d')}" for _, row in reminders.iterrows()])
69
- return reminder_text, reminders
70
  except Exception as e:
71
  logging.error(f"AMC reminder generation failed: {str(e)}")
72
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
@@ -354,7 +353,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
354
  else:
355
  df = cached_df_state
356
 
357
- # Apply filters for analyses other than AMC reminders
358
  filtered_df = df.copy()
359
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
360
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
@@ -367,10 +366,12 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
367
  end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
368
  start_date = start_date.tz_convert('Asia/Kolkata') if start_date.tzinfo else start_date.tz_localize('Asia/Kolkata')
369
  end_date = end_date.tz_convert('Asia/Kolkata') if end_date.tzinfo else end_date.tz_localize('Asia/Kolkata')
370
- logging.info(f"Date range filter: start_date={start_date}, end_date={end_date}")
371
  logging.info(f"Before date filter: {len(filtered_df)} rows")
372
  filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
373
  logging.info(f"After date filter: {len(filtered_df)} rows")
 
 
374
 
375
  if filtered_df.empty:
376
  return "No data after applying filters.", "<p>No data after filters.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
@@ -382,8 +383,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
382
  # Run critical tasks concurrently
383
  with ThreadPoolExecutor(max_workers=2) as executor:
384
  future_anomalies = executor.submit(detect_anomalies, filtered_df)
385
- # Use the unfiltered df for AMC reminders to avoid timestamp-based filtering
386
- future_amc = executor.submit(check_amc_reminders, df, datetime.now())
387
 
388
  summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
389
  anomalies, anomalies_df = future_anomalies.result()
@@ -405,7 +405,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
405
  if elapsed_time > 3:
406
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
407
 
408
- return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time)
409
  except Exception as e:
410
  logging.error(f"Failed to process file: {str(e)}")
411
  return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
@@ -454,7 +454,7 @@ try:
454
  .table tr:nth-child(even) {background-color: #f9f9f9;}
455
  """) as iface:
456
  gr.Markdown("<h1>LabOps Log Analyzer Dashboard</h1>")
457
- gr.Markdown("Upload a CSV file to analyze. Click 'Analyze' to refresh the dashboard. Use 'Export PDF' for report download.")
458
 
459
  last_modified_state = gr.State(value=None)
460
  current_file_state = gr.State(value=None)
@@ -467,7 +467,7 @@ try:
467
  gr.Markdown("### Filters")
468
  lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
469
  equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
470
- date_range_filter = gr.Slider(label="Date Range (Days from Today, e.g., -7 to 0 means last 7 days)", minimum=-365, maximum=0, step=1, value=[-7, 0])
471
  submit_button = gr.Button("Analyze", variant="primary")
472
  pdf_button = gr.Button("Export PDF", variant="secondary")
473
 
 
54
  logging.error(f"Anomaly detection failed: {str(e)}")
55
  return f"Anomaly detection failed: {str(e)}", pd.DataFrame()
56
 
57
+ # AMC reminders
58
  def check_amc_reminders(df, current_date):
59
  try:
60
  if "device_id" not in df.columns or "amc_date" not in df.columns:
 
65
  reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]]
66
  if reminders.empty:
67
  return "No AMC reminders due within the next 30 days.", reminders
68
+ return "\n".join([f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}" for _, row in reminders.head(5).iterrows()]), reminders
 
69
  except Exception as e:
70
  logging.error(f"AMC reminder generation failed: {str(e)}")
71
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
 
353
  else:
354
  df = cached_df_state
355
 
356
+ # Apply filters
357
  filtered_df = df.copy()
358
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
359
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
 
366
  end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
367
  start_date = start_date.tz_convert('Asia/Kolkata') if start_date.tzinfo else start_date.tz_localize('Asia/Kolkata')
368
  end_date = end_date.tz_convert('Asia/Kolkata') if end_date.tzinfo else end_date.tz_localize('Asia/Kolkata')
369
+ logging.info(f"Date range filter applied: start_date={start_date}, end_date={end_date}")
370
  logging.info(f"Before date filter: {len(filtered_df)} rows")
371
  filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
372
  logging.info(f"After date filter: {len(filtered_df)} rows")
373
+ if days_start > days_end:
374
+ logging.warning("Start date is after end date; results may be empty or unexpected.")
375
 
376
  if filtered_df.empty:
377
  return "No data after applying filters.", "<p>No data after filters.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
 
383
  # Run critical tasks concurrently
384
  with ThreadPoolExecutor(max_workers=2) as executor:
385
  future_anomalies = executor.submit(detect_anomalies, filtered_df)
386
+ future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
 
387
 
388
  summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
389
  anomalies, anomalies_df = future_anomalies.result()
 
405
  if elapsed_time > 3:
406
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
407
 
408
+ return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_output, anomaly_alerts_output, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time)
409
  except Exception as e:
410
  logging.error(f"Failed to process file: {str(e)}")
411
  return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
 
454
  .table tr:nth-child(even) {background-color: #f9f9f9;}
455
  """) as iface:
456
  gr.Markdown("<h1>LabOps Log Analyzer Dashboard</h1>")
457
+ gr.Markdown("Upload a CSV file to analyze. Click 'Analyze' to refresh the dashboard. Use 'Export PDF' for report download. Date Range accepts [start, end] days (e.g., [-7, -3] for last 7 to 3 days ago).")
458
 
459
  last_modified_state = gr.State(value=None)
460
  current_file_state = gr.State(value=None)
 
467
  gr.Markdown("### Filters")
468
  lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
469
  equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
470
+ date_range_filter = gr.Slider(label="Date Range (Days from Today, e.g., [-7, -3] means 7 to 3 days ago)", minimum=-365, maximum=0, step=1, value=[-7, 0], interactive=True)
471
  submit_button = gr.Button("Analyze", variant="primary")
472
  pdf_button = gr.Button("Export PDF", variant="secondary")
473