RathodHarish commited on
Commit
c26f5ce
·
verified ·
1 Parent(s): b8aab66

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -45
app.py CHANGED
@@ -231,12 +231,12 @@ def generate_device_cards(df):
231
  health_color = {'Healthy': 'green', 'Unhealthy': 'red', 'Warning': 'orange', 'Unknown': 'gray'}.get(row['health'], 'gray')
232
  timestamp_str = str(row['timestamp']) if pd.notna(row['timestamp']) else 'Unknown'
233
  cards_html += f"""
234
- <div style="border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px; width: 200px;">
235
- <h4>Device: {row['device_id']}</h4>
236
- <p><b>Health:</b> <span style="color: {health_color}">{row['health']}</span></p>
237
- <p><b>Usage Count:</b> {row['count']}</p>
238
- <p><b>Last Log:</b> {timestamp_str}</p>
239
- </div>
240
  """
241
  cards_html += '</div>'
242
  return cards_html
@@ -314,21 +314,21 @@ def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights
314
  logging.error(f"Failed to generate PDF: {str(e)}")
315
  return None
316
 
317
- # Main processing function (Updated)
318
- async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, last_modified_state, cached_df_state, cached_filtered_df_state):
319
  start_time = time.time()
320
  try:
321
  if not file_obj:
322
- return "No file uploaded.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, last_modified_state, cached_df_state, None
323
 
324
  file_path = file_obj.name
325
  current_modified_time = os.path.getmtime(file_path)
326
 
327
- # Load or use cached original dataframe
328
  if cached_df_state is None or current_modified_time != last_modified_state:
329
- logging.info(f"Processing file: {file_path}")
330
  if not file_path.endswith(".csv"):
331
- return "Please upload a CSV file.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, last_modified_state, cached_df_state, None
332
 
333
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
334
  dtypes = {
@@ -342,19 +342,18 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
342
  df = pd.read_csv(file_path, dtype=dtypes)
343
  missing_columns = [col for col in required_columns if col not in df.columns]
344
  if missing_columns:
345
- return f"Missing columns: {missing_columns}", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state, cached_df_state, None
346
 
347
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
348
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
349
  if df["timestamp"].dt.tz is None:
350
  df["timestamp"] = df["timestamp"].dt.tz_localize('UTC').dt.tz_convert('Asia/Kolkata')
351
  if df.empty:
352
- return "No data available.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state, df, None
353
- cached_df_state = df
354
  else:
355
  df = cached_df_state
356
 
357
- # Always apply current filters to the original dataframe
358
  filtered_df = df.copy()
359
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
360
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
@@ -362,37 +361,34 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
362
  filtered_df = filtered_df[filtered_df['equipment_type'] == equipment_type_filter]
363
  if date_range and len(date_range) == 2:
364
  days_start, days_end = date_range
365
- today = pd.to_datetime(datetime.now().date()).tz_localize('Asia/Kolkata')
366
  start_date = today + pd.Timedelta(days=days_start)
367
  end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
 
 
 
 
368
  filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
 
369
 
370
  if filtered_df.empty:
371
- return "No data after applying filters.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, None, None, None, None, last_modified_state, cached_df_state, None
372
 
373
  # Generate table for preview
374
  preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
375
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
376
 
377
  # Run critical tasks concurrently
378
- try:
379
- with ThreadPoolExecutor(max_workers=2) as executor:
380
- future_anomalies = executor.submit(detect_anomalies, filtered_df)
381
- future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
382
-
383
- summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
384
- anomalies, anomalies_df = future_anomalies.result()
385
- anomalies = f"Anomaly Detection\n{anomalies}"
386
- amc_reminders, reminders_df = future_amc.result()
387
- amc_reminders = f"AMC Reminders\n{amc_reminders}"
388
- insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
389
- except Exception as e:
390
- logging.error(f"Concurrent task execution failed: {str(e)}")
391
- summary = "Failed to generate summary due to processing error."
392
- anomalies = "Anomaly detection failed due to processing error."
393
- amc_reminders = "AMC reminders failed due to processing error."
394
- insights = "Insights generation failed due to processing error."
395
- anomalies_df = pd.DataFrame()
396
 
397
  # Generate charts sequentially
398
  usage_chart = create_usage_chart(filtered_df)
@@ -407,10 +403,10 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
407
  if elapsed_time > 3:
408
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
409
 
410
- return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, current_modified_time, cached_df_state, filtered_df)
411
  except Exception as e:
412
  logging.error(f"Failed to process file: {str(e)}")
413
- return f"Error: {str(e)}", pd.DataFrame(), None, '<p>Error processing data.</p>', None, None, None, None, None, None, None, None, last_modified_state, cached_df_state, None
414
 
415
  # Generate PDF separately
416
  async def generate_pdf(summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights):
@@ -461,7 +457,6 @@ try:
461
  last_modified_state = gr.State(value=None)
462
  current_file_state = gr.State(value=None)
463
  cached_df_state = gr.State(value=None)
464
- cached_filtered_df_state = gr.State(value=None)
465
 
466
  with gr.Row():
467
  with gr.Column(scale=1):
@@ -470,9 +465,9 @@ try:
470
  gr.Markdown("### Filters")
471
  lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
472
  equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
473
- date_range_filter = gr.Slider(label="Date Range (Days from Today)", minimum=-365, maximum=0, step=1, value=[-30, 0])
474
- submit_button = gr.Button("Analyze", variant="primary")
475
- pdf_button = gr.Button("Export PDF", variant="secondary")
476
 
477
  with gr.Column(scale=2):
478
  with gr.Group(elem_classes="dashboard-container"):
@@ -520,8 +515,8 @@ try:
520
 
521
  submit_button.click(
522
  fn=process_logs,
523
- inputs=[file_input, lab_site_filter, equipment_type_filter, date_range_filter, last_modified_state, cached_df_state, cached_filtered_df_state],
524
- outputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output, last_modified_state, cached_df_state, cached_filtered_df_state]
525
  )
526
 
527
  pdf_button.click(
@@ -530,7 +525,7 @@ try:
530
  outputs=[pdf_output]
531
  )
532
 
533
- logging.info("Gradio interface initialized successfully")
534
  except Exception as e:
535
  logging.error(f"Failed to initialize Gradio interface: {str(e)}")
536
  raise e
 
231
  health_color = {'Healthy': 'green', 'Unhealthy': 'red', 'Warning': 'orange', 'Unknown': 'gray'}.get(row['health'], 'gray')
232
  timestamp_str = str(row['timestamp']) if pd.notna(row['timestamp']) else 'Unknown'
233
  cards_html += f"""
234
+ <div style="border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px; width: 200px;">
235
+ <h4>Device: {row['device_id']}</h4>
236
+ <p><b>Health:</b> <span style="color: {health_color}">{row['health']}</span></p>
237
+ <p><b>Usage Count:</b> {row['count']}</p>
238
+ <p><b>Last Log:</b> {timestamp_str}</p>
239
+ </div>
240
  """
241
  cards_html += '</div>'
242
  return cards_html
 
314
  logging.error(f"Failed to generate PDF: {str(e)}")
315
  return None
316
 
317
+ # Main processing function
318
+ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, cached_df_state, last_modified_state):
319
  start_time = time.time()
320
  try:
321
  if not file_obj:
322
+ return "No file uploaded.", "<p>No data available.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, cached_df_state, last_modified_state
323
 
324
  file_path = file_obj.name
325
  current_modified_time = os.path.getmtime(file_path)
326
 
327
+ # Read file only if it's new or modified
328
  if cached_df_state is None or current_modified_time != last_modified_state:
329
+ logging.info(f"Processing new or modified file: {file_path}")
330
  if not file_path.endswith(".csv"):
331
+ return "Please upload a CSV file.", "<p>Invalid file format.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
332
 
333
  required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
334
  dtypes = {
 
342
  df = pd.read_csv(file_path, dtype=dtypes)
343
  missing_columns = [col for col in required_columns if col not in df.columns]
344
  if missing_columns:
345
+ return f"Missing columns: {missing_columns}", "<p>Missing required columns.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
346
 
347
  df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
348
  df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
349
  if df["timestamp"].dt.tz is None:
350
  df["timestamp"] = df["timestamp"].dt.tz_localize('UTC').dt.tz_convert('Asia/Kolkata')
351
  if df.empty:
352
+ return "No data available.", "<p>No data available.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
 
353
  else:
354
  df = cached_df_state
355
 
356
+ # Apply filters
357
  filtered_df = df.copy()
358
  if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
359
  filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
 
361
  filtered_df = filtered_df[filtered_df['equipment_type'] == equipment_type_filter]
362
  if date_range and len(date_range) == 2:
363
  days_start, days_end = date_range
364
+ today = pd.to_datetime(datetime.now()).tz_localize('Asia/Kolkata')
365
  start_date = today + pd.Timedelta(days=days_start)
366
  end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
367
+ start_date = start_date.tz_convert('Asia/Kolkata') if start_date.tzinfo else start_date.tz_localize('Asia/Kolkata')
368
+ end_date = end_date.tz_convert('Asia/Kolkata') if end_date.tzinfo else end_date.tz_localize('Asia/Kolkata')
369
+ logging.info(f"Date range filter: start_date={start_date}, end_date={end_date}")
370
+ logging.info(f"Before date filter: {len(filtered_df)} rows")
371
  filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
372
+ logging.info(f"After date filter: {len(filtered_df)} rows")
373
 
374
  if filtered_df.empty:
375
+ return "No data after applying filters.", "<p>No data after filters.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
376
 
377
  # Generate table for preview
378
  preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
379
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
380
 
381
  # Run critical tasks concurrently
382
+ with ThreadPoolExecutor(max_workers=2) as executor:
383
+ future_anomalies = executor.submit(detect_anomalies, filtered_df)
384
+ future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
385
+
386
+ summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
387
+ anomalies, anomalies_df = future_anomalies.result()
388
+ anomalies = f"Anomaly Detection\n{anomalies}"
389
+ amc_reminders, reminders_df = future_amc.result()
390
+ amc_reminders = f"AMC Reminders\n{amc_reminders}"
391
+ insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
 
 
 
 
 
 
 
 
392
 
393
  # Generate charts sequentially
394
  usage_chart = create_usage_chart(filtered_df)
 
403
  if elapsed_time > 3:
404
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
405
 
406
+ return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time)
407
  except Exception as e:
408
  logging.error(f"Failed to process file: {str(e)}")
409
+ return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
410
 
411
  # Generate PDF separately
412
  async def generate_pdf(summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights):
 
457
  last_modified_state = gr.State(value=None)
458
  current_file_state = gr.State(value=None)
459
  cached_df_state = gr.State(value=None)
 
460
 
461
  with gr.Row():
462
  with gr.Column(scale=1):
 
465
  gr.Markdown("### Filters")
466
  lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
467
  equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
468
+ date_range_filter = gr.Slider(label="Date Range (Days from Today, e.g., -7 to 0 means last 7 days)", minimum=-365, maximum=0, step=1, value=[-7, 0])
469
+ submit_button = gr.Button("Analyze", variant="primary")
470
+ pdf_button = gr.Button("Export PDF", variant="secondary")
471
 
472
  with gr.Column(scale=2):
473
  with gr.Group(elem_classes="dashboard-container"):
 
515
 
516
  submit_button.click(
517
  fn=process_logs,
518
+ inputs=[file_input, lab_site_filter, equipment_type_filter, date_range_filter, cached_df_state, last_modified_state],
519
+ outputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output, cached_df_state, last_modified_state]
520
  )
521
 
522
  pdf_button.click(
 
525
  outputs=[pdf_output]
526
  )
527
 
528
+ logging.info("Gradio interface initialized successfully")
529
  except Exception as e:
530
  logging.error(f"Failed to initialize Gradio interface: {str(e)}")
531
  raise e