Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,10 +10,24 @@ import os
|
|
| 10 |
import io
|
| 11 |
import time
|
| 12 |
import asyncio
|
|
|
|
| 13 |
|
| 14 |
# Configure logging
|
| 15 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
# Try to import reportlab
|
| 18 |
try:
|
| 19 |
from reportlab.lib.pagesizes import letter
|
|
@@ -26,6 +40,166 @@ except ImportError:
|
|
| 26 |
logging.warning("reportlab module not found. PDF generation disabled.")
|
| 27 |
reportlab_available = False
|
| 28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
# Summarize logs
|
| 30 |
def summarize_logs(df):
|
| 31 |
try:
|
|
@@ -65,8 +239,7 @@ def check_amc_reminders(df, current_date):
|
|
| 65 |
reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]]
|
| 66 |
if reminders.empty:
|
| 67 |
return "No AMC reminders due within the next 30 days.", reminders
|
| 68 |
-
|
| 69 |
-
return "\n".join([f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}" for _, row in reminders.iterrows()]), reminders
|
| 70 |
except Exception as e:
|
| 71 |
logging.error(f"AMC reminder generation failed: {str(e)}")
|
| 72 |
return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
|
|
@@ -232,12 +405,12 @@ def generate_device_cards(df):
|
|
| 232 |
health_color = {'Healthy': 'green', 'Unhealthy': 'red', 'Warning': 'orange', 'Unknown': 'gray'}.get(row['health'], 'gray')
|
| 233 |
timestamp_str = str(row['timestamp']) if pd.notna(row['timestamp']) else 'Unknown'
|
| 234 |
cards_html += f"""
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
"""
|
| 242 |
cards_html += '</div>'
|
| 243 |
return cards_html
|
|
@@ -315,25 +488,21 @@ def generate_pdf_content(summary, preview_df, anomalies, amc_reminders, insights
|
|
| 315 |
logging.error(f"Failed to generate PDF: {str(e)}")
|
| 316 |
return None
|
| 317 |
|
| 318 |
-
# Main processing function
|
| 319 |
-
async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range,
|
| 320 |
start_time = time.time()
|
| 321 |
try:
|
| 322 |
if not file_obj:
|
| 323 |
-
return "No file uploaded.",
|
| 324 |
-
|
| 325 |
-
# Check if date range is provided
|
| 326 |
-
if not date_range or len(date_range) != 2:
|
| 327 |
-
return "Please provide a valid date range.", pd.DataFrame(), None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, last_modified_state, cached_df_state, None
|
| 328 |
|
| 329 |
file_path = file_obj.name
|
| 330 |
current_modified_time = os.path.getmtime(file_path)
|
| 331 |
|
| 332 |
-
#
|
| 333 |
if cached_df_state is None or current_modified_time != last_modified_state:
|
| 334 |
-
logging.info(f"Processing file: {file_path}")
|
| 335 |
if not file_path.endswith(".csv"):
|
| 336 |
-
return "Please upload a CSV file.",
|
| 337 |
|
| 338 |
required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
|
| 339 |
dtypes = {
|
|
@@ -347,19 +516,18 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 347 |
df = pd.read_csv(file_path, dtype=dtypes)
|
| 348 |
missing_columns = [col for col in required_columns if col not in df.columns]
|
| 349 |
if missing_columns:
|
| 350 |
-
return f"Missing columns: {missing_columns}",
|
| 351 |
|
| 352 |
df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
|
| 353 |
df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
|
| 354 |
if df["timestamp"].dt.tz is None:
|
| 355 |
df["timestamp"] = df["timestamp"].dt.tz_localize('UTC').dt.tz_convert('Asia/Kolkata')
|
| 356 |
if df.empty:
|
| 357 |
-
return "No data available.",
|
| 358 |
-
cached_df_state = df
|
| 359 |
else:
|
| 360 |
df = cached_df_state
|
| 361 |
|
| 362 |
-
#
|
| 363 |
filtered_df = df.copy()
|
| 364 |
if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
|
| 365 |
filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
|
|
@@ -367,37 +535,34 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 367 |
filtered_df = filtered_df[filtered_df['equipment_type'] == equipment_type_filter]
|
| 368 |
if date_range and len(date_range) == 2:
|
| 369 |
days_start, days_end = date_range
|
| 370 |
-
today = pd.to_datetime(datetime.now()
|
| 371 |
start_date = today + pd.Timedelta(days=days_start)
|
| 372 |
end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 373 |
filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
|
|
|
|
| 374 |
|
| 375 |
if filtered_df.empty:
|
| 376 |
-
return "No data after applying filters.",
|
| 377 |
|
| 378 |
# Generate table for preview
|
| 379 |
preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
|
| 380 |
preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
|
| 381 |
|
| 382 |
# Run critical tasks concurrently
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
|
| 393 |
-
insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
|
| 394 |
-
except Exception as e:
|
| 395 |
-
logging.error(f"Concurrent task execution failed: {str(e)}")
|
| 396 |
-
summary = "Failed to generate summary due to processing error."
|
| 397 |
-
anomalies = "Anomaly detection failed due to processing error."
|
| 398 |
-
amc_reminders = "AMC reminders failed due to processing error."
|
| 399 |
-
insights = "Insights generation failed due to processing error."
|
| 400 |
-
anomalies_df = pd.DataFrame()
|
| 401 |
|
| 402 |
# Generate charts sequentially
|
| 403 |
usage_chart = create_usage_chart(filtered_df)
|
|
@@ -407,15 +572,19 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
|
|
| 407 |
anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
|
| 408 |
device_cards = generate_device_cards(filtered_df)
|
| 409 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 410 |
elapsed_time = time.time() - start_time
|
| 411 |
logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
|
| 412 |
if elapsed_time > 3:
|
| 413 |
logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
|
| 414 |
|
| 415 |
-
return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None,
|
| 416 |
except Exception as e:
|
| 417 |
logging.error(f"Failed to process file: {str(e)}")
|
| 418 |
-
return f"Error: {str(e)}",
|
| 419 |
|
| 420 |
# Generate PDF separately
|
| 421 |
async def generate_pdf(summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights):
|
|
@@ -466,7 +635,6 @@ try:
|
|
| 466 |
last_modified_state = gr.State(value=None)
|
| 467 |
current_file_state = gr.State(value=None)
|
| 468 |
cached_df_state = gr.State(value=None)
|
| 469 |
-
cached_filtered_df_state = gr.State(value=None)
|
| 470 |
|
| 471 |
with gr.Row():
|
| 472 |
with gr.Column(scale=1):
|
|
@@ -475,9 +643,9 @@ try:
|
|
| 475 |
gr.Markdown("### Filters")
|
| 476 |
lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
|
| 477 |
equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
|
| 478 |
-
date_range_filter = gr.Slider(label="Date Range (Days from Today)", minimum=-365, maximum=0, step=1, value=[-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
|
| 482 |
with gr.Column(scale=2):
|
| 483 |
with gr.Group(elem_classes="dashboard-container"):
|
|
@@ -514,7 +682,7 @@ try:
|
|
| 514 |
insights_output = gr.Markdown()
|
| 515 |
with gr.Group(elem_classes="dashboard-section"):
|
| 516 |
gr.Markdown("### Export Report")
|
| 517 |
-
pdf_output = gr.File(label="Download Report as PDF")
|
| 518 |
|
| 519 |
file_input.change(
|
| 520 |
fn=update_filters,
|
|
@@ -525,8 +693,8 @@ try:
|
|
| 525 |
|
| 526 |
submit_button.click(
|
| 527 |
fn=process_logs,
|
| 528 |
-
inputs=[file_input, lab_site_filter, equipment_type_filter, date_range_filter,
|
| 529 |
-
outputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output,
|
| 530 |
)
|
| 531 |
|
| 532 |
pdf_button.click(
|
|
@@ -535,7 +703,7 @@ try:
|
|
| 535 |
outputs=[pdf_output]
|
| 536 |
)
|
| 537 |
|
| 538 |
-
|
| 539 |
except Exception as e:
|
| 540 |
logging.error(f"Failed to initialize Gradio interface: {str(e)}")
|
| 541 |
raise e
|
|
|
|
| 10 |
import io
|
| 11 |
import time
|
| 12 |
import asyncio
|
| 13 |
+
from simple_salesforce import Salesforce
|
| 14 |
|
| 15 |
# Configure logging
|
| 16 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 17 |
|
| 18 |
+
# Salesforce configuration
|
| 19 |
+
try:
|
| 20 |
+
sf = Salesforce(
|
| 21 |
+
username='multi-devicelabopsdashboard@sathkrutha.com',
|
| 22 |
+
password='Team@1234',
|
| 23 |
+
security_token=os.getenv('SF_SECURITY_TOKEN', ''),
|
| 24 |
+
domain='login'
|
| 25 |
+
)
|
| 26 |
+
logging.info("Salesforce connection established")
|
| 27 |
+
except Exception as e:
|
| 28 |
+
logging.error(f"Failed to connect to Salesforce: {str(e)}")
|
| 29 |
+
sf = None
|
| 30 |
+
|
| 31 |
# Try to import reportlab
|
| 32 |
try:
|
| 33 |
from reportlab.lib.pagesizes import letter
|
|
|
|
| 40 |
logging.warning("reportlab module not found. PDF generation disabled.")
|
| 41 |
reportlab_available = False
|
| 42 |
|
| 43 |
+
# Cache picklist values at startup
|
| 44 |
+
def get_picklist_values(field_name):
|
| 45 |
+
if sf is None:
|
| 46 |
+
return []
|
| 47 |
+
try:
|
| 48 |
+
obj_desc = sf.SmartLog__c.describe()
|
| 49 |
+
for field in obj_desc['fields']:
|
| 50 |
+
if field['name'] == field_name:
|
| 51 |
+
return [value['value'] for value in field['picklistValues'] if value['active']]
|
| 52 |
+
return []
|
| 53 |
+
except Exception as e:
|
| 54 |
+
logging.error(f"Failed to fetch picklist values for {field_name}: {str(e)}")
|
| 55 |
+
return []
|
| 56 |
+
|
| 57 |
+
status_values = get_picklist_values('Status__c') or ["Active", "Inactive", "Pending"]
|
| 58 |
+
log_type_values = get_picklist_values('Log_Type__c') or ["Smart Log", "Cell Analysis", "UV Verification"]
|
| 59 |
+
logging.info(f"Valid Status__c values: {status_values}")
|
| 60 |
+
logging.info(f"Valid Log_Type__c values: {log_type_values}")
|
| 61 |
+
|
| 62 |
+
# Map invalid picklist values
|
| 63 |
+
picklist_mapping = {
|
| 64 |
+
'Status__c': {
|
| 65 |
+
'normal': 'Active',
|
| 66 |
+
'error': 'Inactive',
|
| 67 |
+
'warning': 'Pending',
|
| 68 |
+
'ok': 'Active',
|
| 69 |
+
'failed': 'Inactive'
|
| 70 |
+
},
|
| 71 |
+
'Log_Type__c': {
|
| 72 |
+
'maint': 'Smart Log',
|
| 73 |
+
'error': 'Cell Analysis',
|
| 74 |
+
'ops': 'UV Verification',
|
| 75 |
+
'maintenance': 'Smart Log',
|
| 76 |
+
'cell': 'Cell Analysis',
|
| 77 |
+
'uv': 'UV Verification',
|
| 78 |
+
'weight log': 'Smart Log'
|
| 79 |
+
}
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
# Cache folder ID for Salesforce reports
|
| 83 |
+
def get_folder_id(folder_name):
|
| 84 |
+
if sf is None:
|
| 85 |
+
return None
|
| 86 |
+
try:
|
| 87 |
+
query = f"SELECT Id FROM Folder WHERE Name = '{folder_name}' AND Type = 'Report'"
|
| 88 |
+
result = sf.query(query)
|
| 89 |
+
if result['totalSize'] > 0:
|
| 90 |
+
folder_id = result['records'][0]['Id']
|
| 91 |
+
logging.info(f"Found folder ID for '{folder_name}': {folder_id}")
|
| 92 |
+
return folder_id
|
| 93 |
+
else:
|
| 94 |
+
logging.error(f"Folder '{folder_name}' not found in Salesforce.")
|
| 95 |
+
return None
|
| 96 |
+
except Exception as e:
|
| 97 |
+
logging.error(f"Failed to fetch folder ID for '{folder_name}': {str(e)}")
|
| 98 |
+
return None
|
| 99 |
+
|
| 100 |
+
LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
|
| 101 |
+
|
| 102 |
+
# Salesforce report creation
|
| 103 |
+
def create_salesforce_reports(df):
|
| 104 |
+
if sf is None or not LABOPS_REPORTS_FOLDER_ID:
|
| 105 |
+
logging.error("Cannot create Salesforce reports: No connection or folder ID")
|
| 106 |
+
return
|
| 107 |
+
try:
|
| 108 |
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
| 109 |
+
reports = [
|
| 110 |
+
{
|
| 111 |
+
"reportMetadata": {
|
| 112 |
+
"name": f"SmartLog_Usage_Report_{timestamp}",
|
| 113 |
+
"developerName": f"SmartLog_Usage_Report_{timestamp}",
|
| 114 |
+
"reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
|
| 115 |
+
"reportFormat": "TABULAR",
|
| 116 |
+
"reportBooleanFilter": None,
|
| 117 |
+
"reportFilters": [],
|
| 118 |
+
"detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.Usage_Hours__c"],
|
| 119 |
+
"folderId": LABOPS_REPORTS_FOLDER_ID
|
| 120 |
+
}
|
| 121 |
+
},
|
| 122 |
+
{
|
| 123 |
+
"reportMetadata": {
|
| 124 |
+
"name": f"SmartLog_AMC_Reminders_{timestamp}",
|
| 125 |
+
"developerName": f"SmartLog_AMC_Reminders_{timestamp}",
|
| 126 |
+
"reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
|
| 127 |
+
"reportFormat": "TABULAR",
|
| 128 |
+
"reportBooleanFilter": None,
|
| 129 |
+
"reportFilters": [],
|
| 130 |
+
"detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.AMC_Date__c"],
|
| 131 |
+
"folderId": LABOPS_REPORTS_FOLDER_ID
|
| 132 |
+
}
|
| 133 |
+
}
|
| 134 |
+
]
|
| 135 |
+
for report in reports:
|
| 136 |
+
sf.restful('analytics/reports', method='POST', json=report)
|
| 137 |
+
logging.info("Salesforce reports created successfully")
|
| 138 |
+
except Exception as e:
|
| 139 |
+
logging.error(f"Failed to create Salesforce reports: {str(e)}")
|
| 140 |
+
|
| 141 |
+
# Save to Salesforce
|
| 142 |
+
def save_to_salesforce(df, reminders_df):
|
| 143 |
+
if sf is None:
|
| 144 |
+
logging.error("No Salesforce connection available")
|
| 145 |
+
return
|
| 146 |
+
try:
|
| 147 |
+
logging.info("Starting Salesforce save operation")
|
| 148 |
+
current_date = datetime.now()
|
| 149 |
+
next_30_days = current_date + timedelta(days=30)
|
| 150 |
+
records = []
|
| 151 |
+
reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set()
|
| 152 |
+
logging.info(f"Processing {len(df)} records for Salesforce")
|
| 153 |
+
|
| 154 |
+
for idx, row in df.iterrows():
|
| 155 |
+
status = str(row['status']).lower()
|
| 156 |
+
log_type = str(row['log_type']).lower()
|
| 157 |
+
status_mapped = picklist_mapping['Status__c'].get(status, status_values[0] if status_values else 'Active')
|
| 158 |
+
log_type_mapped = picklist_mapping['Log_Type__c'].get(log_type, log_type_values[0] if log_type_values else 'Smart Log')
|
| 159 |
+
|
| 160 |
+
if not status_mapped or not log_type_mapped:
|
| 161 |
+
logging.warning(f"Skipping record {idx}: Invalid status ({status}) or log_type ({log_type})")
|
| 162 |
+
continue
|
| 163 |
+
|
| 164 |
+
amc_date_str = None
|
| 165 |
+
if pd.notna(row['amc_date']):
|
| 166 |
+
try:
|
| 167 |
+
amc_date = pd.to_datetime(row['amc_date']).strftime('%Y-%m-%d')
|
| 168 |
+
amc_date_str = amc_date
|
| 169 |
+
amc_date_dt = datetime.strptime(amc_date, '%Y-%m-%d')
|
| 170 |
+
if status_mapped == "Active" and current_date.date() <= amc_date_dt.date() <= next_30_days.date():
|
| 171 |
+
logging.info(f"AMC Reminder for Device ID {row['device_id']}: {amc_date}")
|
| 172 |
+
except Exception as e:
|
| 173 |
+
logging.warning(f"Invalid AMC date for Device ID {row['device_id']}: {str(e)}")
|
| 174 |
+
|
| 175 |
+
record = {
|
| 176 |
+
'Device_Id__c': str(row['device_id'])[:50],
|
| 177 |
+
'Log_Type__c': log_type_mapped,
|
| 178 |
+
'Status__c': status_mapped,
|
| 179 |
+
'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
|
| 180 |
+
'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
|
| 181 |
+
'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
|
| 182 |
+
'AMC_Date__c': amc_date_str
|
| 183 |
+
}
|
| 184 |
+
records.append(record)
|
| 185 |
+
|
| 186 |
+
if records:
|
| 187 |
+
batch_size = 100
|
| 188 |
+
for i in range(0, len(records), batch_size):
|
| 189 |
+
batch = records[i:i + batch_size]
|
| 190 |
+
try:
|
| 191 |
+
result = sf.bulk.SmartLog__c.insert(batch)
|
| 192 |
+
logging.info(f"Saved {len(batch)} records to Salesforce in batch {i//batch_size + 1}")
|
| 193 |
+
for res in result:
|
| 194 |
+
if not res['success']:
|
| 195 |
+
logging.error(f"Failed to save record: {res['errors']}")
|
| 196 |
+
except Exception as e:
|
| 197 |
+
logging.error(f"Failed to save batch {i//batch_size + 1}: {str(e)}")
|
| 198 |
+
else:
|
| 199 |
+
logging.warning("No records to save to Salesforce")
|
| 200 |
+
except Exception as e:
|
| 201 |
+
logging.error(f"Failed to save to Salesforce: {str(e)}")
|
| 202 |
+
|
| 203 |
# Summarize logs
|
| 204 |
def summarize_logs(df):
|
| 205 |
try:
|
|
|
|
| 239 |
reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]]
|
| 240 |
if reminders.empty:
|
| 241 |
return "No AMC reminders due within the next 30 days.", reminders
|
| 242 |
+
return "\n".join([f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}" for _, row in reminders.head(5).iterrows()]), reminders
|
|
|
|
| 243 |
except Exception as e:
|
| 244 |
logging.error(f"AMC reminder generation failed: {str(e)}")
|
| 245 |
return f"AMC reminder generation failed: {str(e)}", pd.DataFrame()
|
|
|
|
| 405 |
health_color = {'Healthy': 'green', 'Unhealthy': 'red', 'Warning': 'orange', 'Unknown': 'gray'}.get(row['health'], 'gray')
|
| 406 |
timestamp_str = str(row['timestamp']) if pd.notna(row['timestamp']) else 'Unknown'
|
| 407 |
cards_html += f"""
|
| 408 |
+
<div style="border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px; width: 200px;">
|
| 409 |
+
<h4>Device: {row['device_id']}</h4>
|
| 410 |
+
<p><b>Health:</b> <span style="color: {health_color}">{row['health']}</span></p>
|
| 411 |
+
<p><b>Usage Count:</b> {row['count']}</p>
|
| 412 |
+
<p><b>Last Log:</b> {timestamp_str}</p>
|
| 413 |
+
</div>
|
| 414 |
"""
|
| 415 |
cards_html += '</div>'
|
| 416 |
return cards_html
|
|
|
|
| 488 |
logging.error(f"Failed to generate PDF: {str(e)}")
|
| 489 |
return None
|
| 490 |
|
| 491 |
+
# Main processing function
|
| 492 |
+
async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_range, cached_df_state, last_modified_state):
|
| 493 |
start_time = time.time()
|
| 494 |
try:
|
| 495 |
if not file_obj:
|
| 496 |
+
return "No file uploaded.", "<p>No data available.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, cached_df_state, last_modified_state
|
|
|
|
|
|
|
|
|
|
|
|
|
| 497 |
|
| 498 |
file_path = file_obj.name
|
| 499 |
current_modified_time = os.path.getmtime(file_path)
|
| 500 |
|
| 501 |
+
# Read file only if it's new or modified
|
| 502 |
if cached_df_state is None or current_modified_time != last_modified_state:
|
| 503 |
+
logging.info(f"Processing new or modified file: {file_path}")
|
| 504 |
if not file_path.endswith(".csv"):
|
| 505 |
+
return "Please upload a CSV file.", "<p>Invalid file format.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
|
| 506 |
|
| 507 |
required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
|
| 508 |
dtypes = {
|
|
|
|
| 516 |
df = pd.read_csv(file_path, dtype=dtypes)
|
| 517 |
missing_columns = [col for col in required_columns if col not in df.columns]
|
| 518 |
if missing_columns:
|
| 519 |
+
return f"Missing columns: {missing_columns}", "<p>Missing required columns.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
|
| 520 |
|
| 521 |
df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
|
| 522 |
df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
|
| 523 |
if df["timestamp"].dt.tz is None:
|
| 524 |
df["timestamp"] = df["timestamp"].dt.tz_localize('UTC').dt.tz_convert('Asia/Kolkata')
|
| 525 |
if df.empty:
|
| 526 |
+
return "No data available.", "<p>No data available.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
|
|
|
|
| 527 |
else:
|
| 528 |
df = cached_df_state
|
| 529 |
|
| 530 |
+
# Apply filters
|
| 531 |
filtered_df = df.copy()
|
| 532 |
if lab_site_filter and lab_site_filter != 'All' and 'lab_site' in filtered_df.columns:
|
| 533 |
filtered_df = filtered_df[filtered_df['lab_site'] == lab_site_filter]
|
|
|
|
| 535 |
filtered_df = filtered_df[filtered_df['equipment_type'] == equipment_type_filter]
|
| 536 |
if date_range and len(date_range) == 2:
|
| 537 |
days_start, days_end = date_range
|
| 538 |
+
today = pd.to_datetime(datetime.now()).tz_localize('Asia/Kolkata')
|
| 539 |
start_date = today + pd.Timedelta(days=days_start)
|
| 540 |
end_date = today + pd.Timedelta(days=days_end) + pd.Timedelta(days=1) - pd.Timedelta(seconds=1)
|
| 541 |
+
start_date = start_date.tz_convert('Asia/Kolkata') if start_date.tzinfo else start_date.tz_localize('Asia/Kolkata')
|
| 542 |
+
end_date = end_date.tz_convert('Asia/Kolkata') if end_date.tzinfo else end_date.tz_localize('Asia/Kolkata')
|
| 543 |
+
logging.info(f"Date range filter: start_date={start_date}, end_date={end_date}")
|
| 544 |
+
logging.info(f"Before date filter: {len(filtered_df)} rows")
|
| 545 |
filtered_df = filtered_df[(filtered_df['timestamp'] >= start_date) & (filtered_df['timestamp'] <= end_date)]
|
| 546 |
+
logging.info(f"After date filter: {len(filtered_df)} rows")
|
| 547 |
|
| 548 |
if filtered_df.empty:
|
| 549 |
+
return "No data after applying filters.", "<p>No data after filters.</p>", None, '<p>No device cards available.</p>', None, None, None, None, "", "", "", None, df, current_modified_time
|
| 550 |
|
| 551 |
# Generate table for preview
|
| 552 |
preview_df = filtered_df[['device_id', 'log_type', 'status', 'timestamp', 'usage_hours', 'downtime', 'amc_date']].head(5)
|
| 553 |
preview_html = preview_df.to_html(index=False, classes='table table-striped', border=0)
|
| 554 |
|
| 555 |
# Run critical tasks concurrently
|
| 556 |
+
with ThreadPoolExecutor(max_workers=2) as executor:
|
| 557 |
+
future_anomalies = executor.submit(detect_anomalies, filtered_df)
|
| 558 |
+
future_amc = executor.submit(check_amc_reminders, filtered_df, datetime.now())
|
| 559 |
+
|
| 560 |
+
summary = f"Step 1: Summary Report\n{summarize_logs(filtered_df)}"
|
| 561 |
+
anomalies, anomalies_df = future_anomalies.result()
|
| 562 |
+
anomalies = f"Anomaly Detection\n{anomalies}"
|
| 563 |
+
amc_reminders, reminders_df = future_amc.result()
|
| 564 |
+
amc_reminders = f"AMC Reminders\n{amc_reminders}"
|
| 565 |
+
insights = f"Dashboard Insights\n{generate_dashboard_insights(filtered_df)}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 566 |
|
| 567 |
# Generate charts sequentially
|
| 568 |
usage_chart = create_usage_chart(filtered_df)
|
|
|
|
| 572 |
anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
|
| 573 |
device_cards = generate_device_cards(filtered_df)
|
| 574 |
|
| 575 |
+
# Save to Salesforce after all other processing
|
| 576 |
+
save_to_salesforce(filtered_df, reminders_df)
|
| 577 |
+
create_salesforce_reports(filtered_df)
|
| 578 |
+
|
| 579 |
elapsed_time = time.time() - start_time
|
| 580 |
logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
|
| 581 |
if elapsed_time > 3:
|
| 582 |
logging.warning(f"Processing time exceeded 3 seconds: {elapsed_time:.2f} seconds")
|
| 583 |
|
| 584 |
+
return (summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights, None, df, current_modified_time)
|
| 585 |
except Exception as e:
|
| 586 |
logging.error(f"Failed to process file: {str(e)}")
|
| 587 |
+
return f"Error: {str(e)}", "<p>Error processing data.</p>", None, '<p>Error processing data.</p>', None, None, None, None, "", "", "", None, cached_df_state, last_modified_state
|
| 588 |
|
| 589 |
# Generate PDF separately
|
| 590 |
async def generate_pdf(summary, preview_html, usage_chart, device_cards, daily_log_chart, weekly_uptime_chart, anomaly_alerts_chart, downtime_chart, anomalies, amc_reminders, insights):
|
|
|
|
| 635 |
last_modified_state = gr.State(value=None)
|
| 636 |
current_file_state = gr.State(value=None)
|
| 637 |
cached_df_state = gr.State(value=None)
|
|
|
|
| 638 |
|
| 639 |
with gr.Row():
|
| 640 |
with gr.Column(scale=1):
|
|
|
|
| 643 |
gr.Markdown("### Filters")
|
| 644 |
lab_site_filter = gr.Dropdown(label="Lab Site", choices=['All'], value='All', interactive=True)
|
| 645 |
equipment_type_filter = gr.Dropdown(label="Equipment Type", choices=['All'], value='All', interactive=True)
|
| 646 |
+
date_range_filter = gr.Slider(label="Date Range (Days from Today, e.g., -7 to 0 means last 7 days)", minimum=-365, maximum=0, step=1, value=[-7, 0])
|
| 647 |
+
submit_button = gr.Button("Analyze", variant="primary")
|
| 648 |
+
pdf_button = gr.Button("Export PDF", variant="secondary")
|
| 649 |
|
| 650 |
with gr.Column(scale=2):
|
| 651 |
with gr.Group(elem_classes="dashboard-container"):
|
|
|
|
| 682 |
insights_output = gr.Markdown()
|
| 683 |
with gr.Group(elem_classes="dashboard-section"):
|
| 684 |
gr.Markdown("### Export Report")
|
| 685 |
+
pdf_output = gr.File(label="Download Status Report as PDF")
|
| 686 |
|
| 687 |
file_input.change(
|
| 688 |
fn=update_filters,
|
|
|
|
| 693 |
|
| 694 |
submit_button.click(
|
| 695 |
fn=process_logs,
|
| 696 |
+
inputs=[file_input, lab_site_filter, equipment_type_filter, date_range_filter, cached_df_state, last_modified_state],
|
| 697 |
+
outputs=[summary_output, preview_output, usage_chart_output, device_cards_output, daily_log_trends_output, weekly_uptime_output, anomaly_alerts_output, downtime_chart_output, anomaly_output, amc_output, insights_output, pdf_output, cached_df_state, last_modified_state]
|
| 698 |
)
|
| 699 |
|
| 700 |
pdf_button.click(
|
|
|
|
| 703 |
outputs=[pdf_output]
|
| 704 |
)
|
| 705 |
|
| 706 |
+
logging.info("Gradio interface initialized successfully")
|
| 707 |
except Exception as e:
|
| 708 |
logging.error(f"Failed to initialize Gradio interface: {str(e)}")
|
| 709 |
raise e
|