sanjaybhargavneela1929 commited on
Commit
c1f6656
·
verified ·
1 Parent(s): 24d22c3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +577 -0
app.py ADDED
@@ -0,0 +1,577 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ LabOps Log Analyzer Dashboard with CSV file upload, PDF generation, and Salesforce integration
3
+ """
4
+ import gradio as gr
5
+ import pandas as pd
6
+ from datetime import datetime, timedelta
7
+ import logging
8
+ import plotly.express as px
9
+ from sklearn.ensemble import IsolationForest
10
+ from transformers import pipeline
11
+ import torch
12
+ from concurrent.futures import ThreadPoolExecutor
13
+ from simple_salesforce import Salesforce
14
+ import os
15
+ import json
16
+
17
+ # Configure logging
18
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
19
+
20
+ # Salesforce configuration
21
+ try:
22
+ sf = Salesforce(
23
+ username=os.getenv('SF_USERNAME'),
24
+ password=os.getenv('SF_PASSWORD'),
25
+ security_token=os.getenv('SF_SECURITY_TOKEN'),
26
+ domain='login'
27
+ )
28
+ logging.info("Salesforce connection established")
29
+ except Exception as e:
30
+ logging.error(f"Failed to connect to Salesforce: {str(e)}")
31
+ sf = None
32
+
33
+ # Try to import reportlab
34
+ try:
35
+ from reportlab.lib.pagesizes import letter
36
+ from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
37
+ from reportlab.lib.styles import getSampleStyleSheet
38
+ reportlab_available = True
39
+ logging.info("reportlab module successfully imported")
40
+ except ImportError:
41
+ logging.warning("reportlab module not found. PDF generation disabled.")
42
+ reportlab_available = False
43
+
44
+ # Preload Hugging Face model
45
+ logging.info("Preloading Hugging Face model...")
46
+ try:
47
+ device = 0 if torch.cuda.is_available() else -1
48
+ summarizer = pipeline(
49
+ "summarization",
50
+ model="facebook/bart-large-cnn",
51
+ device=device,
52
+ max_length=50,
53
+ min_length=10,
54
+ num_beams=4
55
+ )
56
+ logging.info(f"Hugging Face model preloaded on {'GPU' if device == 0 else 'CPU'}")
57
+ except Exception as e:
58
+ logging.error(f"Failed to preload model: {str(e)}")
59
+ raise e
60
+
61
+ # Fetch valid picklist values from Salesforce
62
+ def get_picklist_values(field_name):
63
+ if sf is None:
64
+ return []
65
+ try:
66
+ obj_desc = sf.SmartLog__c.describe()
67
+ for field in obj_desc['fields']:
68
+ if field['name'] == field_name:
69
+ return [value['value'] for value in field['picklistValues'] if value['active']]
70
+ return []
71
+ except Exception as e:
72
+ logging.error(f"Failed to fetch picklist values for {field_name}: {str(e)}")
73
+ return []
74
+
75
+ # Cache picklist values at startup
76
+ status_values = get_picklist_values('Status__c') or ["Active", "Inactive", "Pending"]
77
+ log_type_values = get_picklist_values('Log_Type__c') or ["Smart Log", "Cell Analysis", "UV Verification"]
78
+ logging.info(f"Valid Status__c values: {status_values}")
79
+ logging.info(f"Valid Log_Type__c values: {log_type_values}")
80
+
81
+ # Map invalid picklist values to valid ones
82
+ picklist_mapping = {
83
+ 'Status__c': {
84
+ 'normal': 'Active',
85
+ 'error': 'Inactive',
86
+ 'warning': 'Pending',
87
+ 'ok': 'Active',
88
+ 'failed': 'Inactive'
89
+ },
90
+ 'Log_Type__c': {
91
+ 'maint': 'Smart Log',
92
+ 'error': 'Cell Analysis',
93
+ 'ops': 'UV Verification',
94
+ 'maintenance': 'Smart Log',
95
+ 'cell': 'Cell Analysis',
96
+ 'uv': 'UV Verification'
97
+ }
98
+ }
99
+
100
+ # Fetch folder ID for "LabOps Reports"
101
+ def get_folder_id(folder_name):
102
+ if sf is None:
103
+ return None
104
+ try:
105
+ query = f"SELECT Id FROM Folder WHERE Name = '{folder_name}' AND Type = 'Report'"
106
+ result = sf.query(query)
107
+ if result['totalSize'] > 0:
108
+ folder_id = result['records'][0]['Id']
109
+ logging.info(f"Found folder ID for '{folder_name}': {folder_id}")
110
+ return folder_id
111
+ else:
112
+ logging.error(f"Folder '{folder_name}' not found in Salesforce.")
113
+ return None
114
+ except Exception as e:
115
+ logging.error(f"Failed to fetch folder ID for '{folder_name}': {str(e)}")
116
+ return None
117
+
118
+ # Cache the folder ID at startup
119
+ LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
120
+
121
+ # Create Salesforce reports (Usage and AMC Reminders)
122
+ def create_salesforce_reports(df):
123
+ if sf is None:
124
+ return "Salesforce connection not available."
125
+ if not LABOPS_REPORTS_FOLDER_ID:
126
+ return "Cannot create reports: 'LabOps Reports' folder not found in Salesforce."
127
+
128
+ try:
129
+ # Usage Report (Summary Report)
130
+ usage_report_metadata = {
131
+ "reportMetadata": {
132
+ "name": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
133
+ "developerName": f"SmartLog_Usage_Report_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
134
+ "reportType": {
135
+ "type": "CustomObject",
136
+ "value": "SmartLog__c"
137
+ },
138
+ "reportFormat": "SUMMARY",
139
+ "reportBooleanFilter": None,
140
+ "reportFilters": [
141
+ {
142
+ "column": "SmartLog__c.Status__c",
143
+ "operator": "equals",
144
+ "value": "Active"
145
+ },
146
+ {
147
+ "column": "SmartLog__c.Timestamp__c",
148
+ "operator": "greaterOrEqual",
149
+ "value": "THIS_MONTH"
150
+ }
151
+ ],
152
+ "aggregates": ["s!SmartLog__c.Usage_Hours__c", "s!SmartLog__c.Downtime__c"],
153
+ "groupingsDown": [
154
+ {
155
+ "name": "Device_Id__c",
156
+ "field": "SmartLog__c.Device_Id__c",
157
+ "sortOrder": "Asc",
158
+ "sortAggregate": None,
159
+ "dateGranularity": "None"
160
+ }
161
+ ],
162
+ "detailColumns": [
163
+ "SmartLog__c.Device_Id__c",
164
+ "SmartLog__c.Log_Type__c",
165
+ "SmartLog__c.Status__c",
166
+ "SmartLog__c.Timestamp__c",
167
+ "SmartLog__c.Usage_Hours__c",
168
+ "SmartLog__c.Downtime__c",
169
+ "SmartLog__c.AMC_Date__c"
170
+ ],
171
+ "folderId": LABOPS_REPORTS_FOLDER_ID,
172
+ "currency": None
173
+ }
174
+ }
175
+ usage_result = sf.restful('analytics/reports', method='POST', json=usage_report_metadata)
176
+ usage_report_id = usage_result['id']
177
+ logging.info(f"Usage Report created: {usage_report_id}")
178
+
179
+ # AMC Reminders Report (Tabular Report)
180
+ amc_report_metadata = {
181
+ "reportMetadata": {
182
+ "name": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
183
+ "developerName": f"SmartLog_AMC_Reminders_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
184
+ "reportType": {
185
+ "type": "CustomObject",
186
+ "value": "SmartLog__c"
187
+ },
188
+ "reportFormat": "TABULAR",
189
+ "reportBooleanFilter": None,
190
+ "reportFilters": [
191
+ {
192
+ "column": "SmartLog__c.Status__c",
193
+ "operator": "equals",
194
+ "value": "Active"
195
+ },
196
+ {
197
+ "column": "SmartLog__c.AMC_Date__c",
198
+ "operator": "greaterOrEqual",
199
+ "value": "TODAY"
200
+ },
201
+ {
202
+ "column": "SmartLog__c.AMC_Date__c",
203
+ "operator": "lessOrEqual",
204
+ "value": "NEXT_N_DAYS:30"
205
+ }
206
+ ],
207
+ "detailColumns": [
208
+ "SmartLog__c.Device_Id__c",
209
+ "SmartLog__c.AMC_Date__c",
210
+ "SmartLog__c.Status__c"
211
+ ],
212
+ "folderId": LABOPS_REPORTS_FOLDER_ID,
213
+ "currency": None
214
+ }
215
+ }
216
+ amc_result = sf.restful('analytics/reports', method='POST', json=amc_report_metadata)
217
+ amc_report_id = amc_result['id']
218
+ logging.info(f"AMC Reminders Report created: {amc_report_id}")
219
+
220
+ return f"Usage Report ID: {usage_report_id}, AMC Reminders Report ID: {amc_report_id}"
221
+ except Exception as e:
222
+ logging.error(f"Failed to create Salesforce reports: {str(e)}")
223
+ return f"Failed to create reports: {str(e)}"
224
+
225
+ # Save results to Salesforce SmartLog__c
226
+ def save_to_salesforce(df, summary, anomalies, amc_reminders, insights):
227
+ if sf is None:
228
+ return "Salesforce connection not available."
229
+ try:
230
+ records = []
231
+ current_date = datetime.now()
232
+ next_30_days = current_date + timedelta(days=30)
233
+ for _, row in df.head(100).iterrows():
234
+ # Validate and map picklist values
235
+ status = str(row['status'])
236
+ log_type = str(row['log_type'])
237
+
238
+ # Map Status__c
239
+ if status not in status_values:
240
+ status = picklist_mapping['Status__c'].get(status.lower(), status_values[0] if status_values else None)
241
+ if status is None:
242
+ logging.warning(f"Skipping record with invalid Status__c: {row['status']}")
243
+ continue
244
+
245
+ # Map Log_Type__c
246
+ if log_type not in log_type_values:
247
+ log_type = picklist_mapping['Log_Type__c'].get(log_type.lower(), log_type_values[0] if log_type_values else None)
248
+ if log_type is None:
249
+ logging.warning(f"Skipping record with invalid Log_Type__c: {row['log_type']}")
250
+ continue
251
+
252
+ # Ensure AMC_Date__c is in correct format
253
+ amc_date_str = row['amc_date'].strftime('%Y-%m-%d') if pd.notna(row['amc_date']) else None
254
+ if amc_date_str:
255
+ amc_date = datetime.strptime(amc_date_str, '%Y-%m-%d')
256
+ # Log if this record qualifies for AMC Reminders
257
+ if status == "Active" and current_date.date() <= amc_date.date() <= next_30_days.date():
258
+ logging.info(f"Record qualifies for AMC Reminders: Device ID {row['device_id']}, AMC Date {amc_date_str}")
259
+
260
+ record = {
261
+ 'Device_Id__c': str(row['device_id'])[:50],
262
+ 'Log_Type__c': log_type,
263
+ 'Status__c': status,
264
+ 'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
265
+ 'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
266
+ 'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
267
+ 'AMC_Date__c': amc_date_str
268
+ }
269
+ records.append(record)
270
+
271
+ # Bulk insert to reduce API calls
272
+ if records:
273
+ sf.bulk.SmartLog__c.insert(records)
274
+ logging.info(f"Saved {len(records)} records to Salesforce")
275
+ return f"Saved {len(records)} records to Salesforce."
276
+ except Exception as e:
277
+ logging.error(f"Failed to save to Salesforce: {str(e)}")
278
+ return f"Failed to save to Salesforce: {str(e)}"
279
+
280
+ # Summarize logs
281
+ def summarize_logs(df, progress=gr.Progress()):
282
+ progress(0.1, "Generating summary report...")
283
+ try:
284
+ total_devices = df["device_id"].nunique()
285
+ most_used = df.groupby("device_id")["usage_hours"].sum().idxmax() if not df.empty else "N/A"
286
+ prompt = f"Maintenance logs: {total_devices} devices. Most used: {most_used}."
287
+ summary = summarizer(prompt, max_length=50, min_length=10, do_sample=False)[0]["summary_text"]
288
+ logging.info("Summary generated successfully")
289
+ return summary
290
+ except Exception as e:
291
+ logging.error(f"Summary generation failed: {str(e)}")
292
+ return f"Failed to generate summary: {str(e)}"
293
+
294
+ # Anomaly detection
295
+ def detect_anomalies(df, progress=gr.Progress()):
296
+ progress(0.4, "Detecting anomalies...")
297
+ try:
298
+ if "usage_hours" not in df.columns or "downtime" not in df.columns:
299
+ return "Anomaly detection requires 'usage_hours' and 'downtime' columns."
300
+ if len(df) > 1000:
301
+ df = df.sample(n=1000, random_state=42)
302
+ features = df[["usage_hours", "downtime"]].fillna(0)
303
+ iso_forest = IsolationForest(contamination=0.1, random_state=42, n_jobs=-1)
304
+ df["anomaly"] = iso_forest.fit_predict(features)
305
+ anomalies = df[df["anomaly"] == -1][["device_id", "usage_hours", "downtime", "timestamp"]]
306
+ if anomalies.empty:
307
+ return "No anomalies detected."
308
+ anomaly_lines = ["Detected Anomalies:"]
309
+ for _, row in anomalies.head(5).iterrows():
310
+ anomaly_lines.append(
311
+ f"- Device ID: {row['device_id']}, Usage Hours: {row['usage_hours']}, "
312
+ f"Downtime: {row['downtime']}, Timestamp: {row['timestamp']}"
313
+ )
314
+ return "\n".join(anomaly_lines)
315
+ except Exception as e:
316
+ logging.error(f"Anomaly detection failed: {str(e)}")
317
+ return f"Anomaly detection failed: {str(e)}"
318
+
319
+ # AMC reminders (identify records for display)
320
+ def check_amc_reminders(df, current_date, progress=gr.Progress()):
321
+ progress(0.6, "Checking AMC reminders...")
322
+ try:
323
+ if "device_id" not in df.columns or "amc_date" not in df.columns:
324
+ return "AMC reminders require 'device_id' and 'amc_date' columns."
325
+ df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
326
+ current_date = pd.to_datetime(current_date)
327
+ df["days_to_amc"] = (df["amc_date"] - current_date).dt.days
328
+ reminders = df[(df["days_to_amc"] >= 0) & (df["days_to_amc"] <= 30)][["device_id", "amc_date"]]
329
+ if reminders.empty:
330
+ return "No AMC reminders due within the next 30 days."
331
+ reminder_lines = ["Upcoming AMC Reminders:"]
332
+ for _, row in reminders.head(5).iterrows():
333
+ reminder_lines.append(f"- Device ID: {row['device_id']}, AMC Date: {row['amc_date']}")
334
+ return "\n".join(reminder_lines)
335
+ except Exception as e:
336
+ logging.error(f"AMC reminder generation failed: {str(e)}")
337
+ return f"AMC reminder generation failed: {str(e)}"
338
+
339
+ # Dashboard insights
340
+ def generate_dashboard_insights(df, progress=gr.Progress()):
341
+ progress(0.8, "Generating dashboard insights...")
342
+ try:
343
+ total_devices = df["device_id"].nunique()
344
+ avg_usage = df["usage_hours"].mean() if "usage_hours" in df.columns else 0
345
+ prompt = f"Insights: {total_devices} devices, avg usage {avg_usage:.2f} hours."
346
+ insights = summarizer(prompt, max_length=50, min_length=10, do_sample=False)[0]["summary_text"]
347
+ return insights
348
+ except Exception as e:
349
+ logging.error(f"Dashboard insights generation failed: {str(e)}")
350
+ return f"Dashboard insights generation failed: {str(e)}"
351
+
352
+ # Create usage chart
353
+ def create_usage_chart(df, progress=gr.Progress()):
354
+ progress(0.9, "Creating usage chart...")
355
+ try:
356
+ usage_data = df.groupby("device_id")["usage_hours"].sum().reset_index()
357
+ if len(usage_data) > 5:
358
+ usage_data = usage_data.nlargest(5, "usage_hours")
359
+ custom_colors = ['#FF6B6B', '#4ECDC4', '#45B7D1', '#96CEB4']
360
+ fig = px.bar(
361
+ usage_data,
362
+ x="device_id",
363
+ y="usage_hours",
364
+ title="Usage Hours per Device",
365
+ labels={"device_id": "Device ID", "usage_hours": "Usage Hours"},
366
+ color="device_id",
367
+ color_discrete_sequence=custom_colors
368
+ )
369
+ fig.update_layout(
370
+ title_font_size=16,
371
+ margin=dict(l=20, r=20, t=40, b=20),
372
+ plot_bgcolor="white",
373
+ paper_bgcolor="white",
374
+ font=dict(size=12)
375
+ )
376
+ return fig
377
+ except Exception as e:
378
+ logging.error(f"Failed to create usage chart: {str(e)}")
379
+ return None
380
+
381
+ # Generate PDF content
382
+ def generate_pdf_content(summary, preview, anomalies, amc_reminders, insights):
383
+ if not reportlab_available:
384
+ return None
385
+ try:
386
+ pdf_path = f"analysis_report_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf"
387
+ doc = SimpleDocTemplate(pdf_path, pagesize=letter)
388
+ styles = getSampleStyleSheet()
389
+ story = []
390
+
391
+ def safe_paragraph(text, style):
392
+ return Paragraph(str(text).replace('\n', '<br/>'), style) if text else Paragraph("", style)
393
+
394
+ story.append(Paragraph("LabOps Log Analysis Report", styles['Title']))
395
+ story.append(Paragraph(f"Generated on {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", styles['Normal']))
396
+ story.append(Spacer(1, 12))
397
+
398
+ story.append(Paragraph("Summary Report", styles['Heading2']))
399
+ story.append(safe_paragraph(summary or "No summary available.", styles['Normal']))
400
+ story.append(Spacer(1, 12))
401
+
402
+ story.append(Paragraph("Log Preview", styles['Heading2']))
403
+ story.append(safe_paragraph(preview or "No preview available.", styles['Normal']))
404
+ story.append(Spacer(1, 12))
405
+
406
+ story.append(Paragraph("Anomaly Detection", styles['Heading2']))
407
+ story.append(safe_paragraph(anomalies or "No anomalies detected.", styles['Normal']))
408
+ story.append(Spacer(1, 12))
409
+
410
+ story.append(Paragraph("AMC Reminders", styles['Heading2']))
411
+ story.append(safe_paragraph(amc_reminders or "No AMC reminders.", styles['Normal']))
412
+ story.append(Spacer(1, 12))
413
+
414
+ story.append(Paragraph("Dashboard Insights", styles['Heading2']))
415
+ story.append(safe_paragraph(insights or "No insights generated.", styles['Normal']))
416
+
417
+ doc.build(story)
418
+ logging.info(f"PDF generated at {pdf_path}")
419
+ return pdf_path
420
+ except Exception as e:
421
+ logging.error(f"Failed to generate PDF: {str(e)}")
422
+ return None
423
+
424
+ # Main Gradio function
425
+ async def process_logs(file_obj, progress=gr.Progress()):
426
+ try:
427
+ progress(0, "Starting file processing...")
428
+ if not file_obj:
429
+ return "No file uploaded.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated.", None, "No Salesforce data saved.", "No report created."
430
+
431
+ file_name = file_obj.name
432
+ logging.info(f"Processing file: {file_name}")
433
+
434
+ if not file_name.endswith(".csv"):
435
+ return "Please upload a CSV file.", "", None, "", "", "", None, "", ""
436
+
437
+ required_columns = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
438
+ dtypes = {
439
+ "device_id": "string",
440
+ "log_type": "string",
441
+ "status": "string",
442
+ "usage_hours": "float32",
443
+ "downtime": "float32",
444
+ "amc_date": "string"
445
+ }
446
+ df = pd.read_csv(file_obj, dtype=dtypes)
447
+ missing_columns = [col for col in required_columns if col not in df.columns]
448
+ if missing_columns:
449
+ return f"Missing columns: {missing_columns}", None, None, None, None, None, None, None, None
450
+ df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
451
+ df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
452
+ if df.empty:
453
+ return "No data available.", None, None, None, None, None, None, None, None
454
+
455
+ with ThreadPoolExecutor() as executor:
456
+ future_summary = executor.submit(summarize_logs, df)
457
+ future_anomalies = executor.submit(detect_anomalies, df)
458
+ future_amc = executor.submit(check_amc_reminders, df, datetime.now())
459
+ future_insights = executor.submit(generate_dashboard_insights, df)
460
+ future_chart = executor.submit(create_usage_chart, df)
461
+ future_reports = executor.submit(create_salesforce_reports, df)
462
+
463
+ summary = f"Step 1: Summary Report\n{future_summary.result()}"
464
+ anomalies = f"Anomaly Detection\n{future_anomalies.result()}"
465
+ amc_reminders = f"AMC Reminders\n{future_amc.result()}"
466
+ insights = f"Dashboard Insights (AI)\n{future_insights.result()}"
467
+ chart = future_chart.result()
468
+ report_result = future_reports.result()
469
+
470
+ preview_lines = ["Step 2: Log Preview (First 5 Rows)"]
471
+ for idx, row in df.head(5).iterrows():
472
+ preview_lines.append(
473
+ f"Row {idx + 1}: Device ID: {row['device_id']}, "
474
+ f"Log Type: {row['log_type']}, Status: {row['status']}, "
475
+ f"Timestamp: {row['timestamp']}, Usage Hours: {row['usage_hours']}, "
476
+ f"Downtime: {row['downtime']}, AMC Date: {row['amc_date']}"
477
+ )
478
+ preview = "\n".join(preview_lines)
479
+
480
+ salesforce_result = save_to_salesforce(df, summary, anomalies, amc_reminders, insights)
481
+ pdf_file = generate_pdf_content(summary, preview, anomalies, amc_reminders, insights)
482
+
483
+ progress(1.0, "Done!")
484
+ return summary, preview, chart, anomalies, amc_reminders, insights, pdf_file, salesforce_result, report_result
485
+ except Exception as e:
486
+ logging.error(f"Failed to process file: {str(e)}")
487
+ return f"Error: {str(e)}", None, None, None, None, None, None, None, None
488
+
489
+ # Gradio Interface
490
+ try:
491
+ logging.info("Initializing Gradio interface...")
492
+ with gr.Blocks(css="""
493
+ .dashboard-container {border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px;}
494
+ .dashboard-title {font-size: 24px; font-weight: bold; margin-bottom: 5px;}
495
+ .dashboard-section {margin-bottom: 20px;}
496
+ .dashboard-section h3 {font-size: 18px; margin-bottom: 2px;}
497
+ .dashboard-section p {margin: 1px 0; line-height: 1.2;}
498
+ .dashboard-section ul {margin: 2px 0; padding-left: 20px;}
499
+ """) as iface:
500
+ gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Hugging Face AI)</h1>")
501
+ gr.Markdown("Upload a CSV file to analyze and generate Salesforce reports.")
502
+
503
+ with gr.Row():
504
+ with gr.Column(scale=1):
505
+ file_input = gr.File(label="Upload Logs (CSV)", file_types=[".csv"])
506
+ submit_button = gr.Button("Analyze", variant="primary")
507
+
508
+ with gr.Column(scale=2):
509
+ with gr.Group(elem_classes="dashboard-container"):
510
+ gr.Markdown("<div class='dashboard-title'>Analysis Results</div>")
511
+
512
+ with gr.Group(elem_classes="dashboard-section"):
513
+ gr.Markdown("### Step 1: Summary Report")
514
+ summary_output = gr.Markdown()
515
+
516
+ with gr.Group(elem_classes="dashboard-section"):
517
+ gr.Markdown("### Step 2: Log Preview")
518
+ preview_output = gr.Markdown()
519
+
520
+ with gr.Group(elem_classes="dashboard-section"):
521
+ gr.Markdown("### Step 3: Usage Chart")
522
+ chart_output = gr.Plot()
523
+
524
+ with gr.Group(elem_classes="dashboard-section"):
525
+ gr.Markdown("### Step 4: Anomaly Detection")
526
+ anomaly_output = gr.Markdown()
527
+
528
+ with gr.Group(elem_classes="dashboard-section"):
529
+ gr.Markdown("### Step 5: AMC Reminders")
530
+ amc_output = gr.Markdown()
531
+
532
+ with gr.Group(elem_classes="dashboard-section"):
533
+ gr.Markdown("### Step 6: Insights (AI)")
534
+ insights_output = gr.Markdown()
535
+
536
+ with gr.Group(elem_classes="dashboard-section"):
537
+ gr.Markdown("### Salesforce Integration")
538
+ salesforce_output = gr.Markdown()
539
+
540
+ with gr.Group(elem_classes="dashboard-section"):
541
+ gr.Markdown("### Salesforce Reports")
542
+ report_output = gr.Markdown()
543
+
544
+ with gr.Group(elem_classes="dashboard-section"):
545
+ gr.Markdown("### Download Report")
546
+ pdf_output = gr.File(label="Download Analysis Report as PDF")
547
+
548
+ submit_button.click(
549
+ fn=process_logs,
550
+ inputs=[file_input],
551
+ outputs=[
552
+ summary_output,
553
+ preview_output,
554
+ chart_output,
555
+ anomaly_output,
556
+ amc_output,
557
+ insights_output,
558
+ pdf_output,
559
+ salesforce_output,
560
+ report_output
561
+ ]
562
+ )
563
+
564
+ logging.info("Gradio interface initialized successfully")
565
+ except Exception as e:
566
+ logging.error(f"Failed to initialize Gradio interface: {str(e)}")
567
+ raise e
568
+
569
+ if __name__ == "__main__":
570
+ try:
571
+ logging.info("Launching Gradio interface...")
572
+ iface.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False)
573
+ logging.info("Gradio interface launched successfully")
574
+ except Exception as e:
575
+ logging.error(f"Failed to launch Gradio interface: {str(e)}")
576
+ print(f"Error launching app: {str(e)}")
577
+ raise e