RathodHarish commited on
Commit
f070cb5
·
verified ·
1 Parent(s): 64799ba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -16
app.py CHANGED
@@ -5,6 +5,7 @@ import logging
5
  import plotly.express as px
6
  import plotly.graph_objects as go
7
  from sklearn.ensemble import IsolationForest
 
8
  import os
9
  import io
10
  import time
@@ -25,7 +26,7 @@ except ImportError:
25
  logging.warning("reportlab module not found. PDF generation disabled.")
26
  reportlab_available = False
27
 
28
- # Summarize logs (without Hugging Face)
29
  def summarize_logs(df):
30
  try:
31
  total_devices = df["device_id"].nunique()
@@ -41,7 +42,7 @@ def detect_anomalies(df):
41
  if "usage_hours" not in df.columns or "downtime" not in df.columns:
42
  return "Anomaly detection requires 'usage_hours' and 'downtime' columns.", pd.DataFrame()
43
  features = df[["usage_hours", "downtime"]].fillna(0)
44
- if len(features) > 50: # Reduced sample size
45
  features = features.sample(n=50, random_state=42)
46
  iso_forest = IsolationForest(contamination=0.1, random_state=42)
47
  df["anomaly"] = iso_forest.fit_predict(features)
@@ -69,7 +70,7 @@ def check_amc_reminders(df, current_date):
69
  logging.error(f"AMC reminder generation failed: {str(e)}")
70
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
71
 
72
- # Dashboard insights (without Hugging Face)
73
  def generate_dashboard_insights(df):
74
  try:
75
  total_devices = df["device_id"].nunique()
@@ -374,18 +375,26 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
374
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
375
 
376
  # Run critical tasks concurrently
377
- with ThreadPoolExecutor(max_workers=2) as executor: # Reduced workers
378
- future_anomalies = executor.submit(detect_anomalies, filtered_df)
379
- future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
380
-
381
- summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
382
- anomalies, anomalies_df = future_anomalies.result()
383
- anomalies = f"Anomaly Detection\n{anomalies}"
384
- amc_reminders, reminders_df = future_amc.result()
385
- amc_reminders = f"AMC Reminders\n{amc_reminders}"
386
- insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
387
-
388
- # Generate charts sequentially to avoid contention
 
 
 
 
 
 
 
 
389
  usage_chart = create_usage_chart(filtered_df)
390
  downtime_chart = create_downtime_chart(filtered_df)
391
  daily_log_chart = create_daily_log_trends_chart(filtered_df)
@@ -395,7 +404,7 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
395
 
396
  elapsed_time = time.time() - start_time
397
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
398
- if elapsed_time > 3: # Very strict threshold
399
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
400
 
401
  return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, current_modified_time, df, filtered_df)
 
5
  import plotly.express as px
6
  import plotly.graph_objects as go
7
  from sklearn.ensemble import IsolationForest
8
+ from concurrent.futures import ThreadPoolExecutor # Added missing import
9
  import os
10
  import io
11
  import time
 
26
  logging.warning("reportlab module not found. PDF generation disabled.")
27
  reportlab_available = False
28
 
29
+ # Summarize logs
30
  def summarize_logs(df):
31
  try:
32
  total_devices = df["device_id"].nunique()
 
42
  if "usage_hours" not in df.columns or "downtime" not in df.columns:
43
  return "Anomaly detection requires 'usage_hours' and 'downtime' columns.", pd.DataFrame()
44
  features = df[["usage_hours", "downtime"]].fillna(0)
45
+ if len(features) > 50:
46
  features = features.sample(n=50, random_state=42)
47
  iso_forest = IsolationForest(contamination=0.1, random_state=42)
48
  df["anomaly"] = iso_forest.fit_predict(features)
 
70
  logging.error(f"AMC reminder generation failed: {str(e)}")
71
  return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
72
 
73
+ # Dashboard insights
74
  def generate_dashboard_insights(df):
75
  try:
76
  total_devices = df["device_id"].nunique()
 
375
  preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
376
 
377
  # Run critical tasks concurrently
378
+ try:
379
+ with ThreadPoolExecutor(max_workers=2) as executor:
380
+ future_anomalies = executor.submit(detect_anomalies, filtered_df)
381
+ future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
382
+
383
+ summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
384
+ anomalies, anomalies_df = future_anomalies.result()
385
+ anomalies = f"Anomaly Detection\n{anomalies}"
386
+ amc_reminders, reminders_df = future_amc.result()
387
+ amc_reminders = f"AMC Reminders\n{amc_reminders}"
388
+ insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
389
+ except Exception as e:
390
+ logging.error(f"Concurrent task execution failed: {str(e)}")
391
+ summary = "Failed to generate summary due to processing error."
392
+ anomalies = "Anomaly detection failed due to processing error."
393
+ amc_reminders = "AMC reminders failed due to processing error."
394
+ insights = "Insights generation failed due to processing error."
395
+ anomalies_df = pd.DataFrame()
396
+
397
+ # Generate charts sequentially
398
  usage_chart = create_usage_chart(filtered_df)
399
  downtime_chart = create_downtime_chart(filtered_df)
400
  daily_log_chart = create_daily_log_trends_chart(filtered_df)
 
404
 
405
  elapsed_time = time.time() - start_time
406
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
407
+ if elapsed_time > 3:
408
  logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
409
 
410
  return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, current_modified_time, df, filtered_df)