RathodHarish commited on
Commit
b99e0b1
·
verified ·
1 Parent(s): f7efdb5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +160 -159
app.py CHANGED
@@ -1,165 +1,166 @@
1
- from flask import Flask, request, jsonify
2
- import requests
3
  import pandas as pd
4
- import numpy as np
5
- from sklearn.ensemble import IsolationForest
6
- from datetime import datetime, timedelta
7
- from reportlab.lib.pagesizes import letter
8
- from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
9
- from reportlab.lib.styles import getSampleStyleSheet
10
- import io
11
- import base64
12
  import os
13
 
14
- app = Flask(__name__)
15
-
16
- # Salesforce credentials (use environment variables in production)
17
- SF_USERNAME = os.getenv('SF_USERNAME', 'YOUR_USERNAME')
18
- SF_PASSWORD = os.getenv('SF_PASSWORD', 'YOUR_PASSWORD_WITH_TOKEN')
19
- SF_AUTH_URL = 'https://login.salesforce.com/services/oauth2/token'
20
-
21
- # Get Salesforce access token
22
- def get_sf_access_token():
23
- try:
24
- response = requests.post(SF_AUTH_URL, data={
25
- 'grant_type': 'password',
26
- 'username': SF_USERNAME,
27
- 'password': SF_PASSWORD
28
- })
29
- response.raise_for_status()
30
- return response.json()['access_token'], response.json()['instance_url']
31
- except Exception as e:
32
- print(f"Error getting Salesforce token: {e}")
33
- return None, None
34
-
35
- # Anomaly detection using Isolation Forest
36
- def detect_anomalies(log_data):
37
  try:
38
- df = pd.DataFrame(log_data)
39
- if df.empty:
40
- return []
41
- df['Timestamp__c'] = pd.to_datetime(df['Timestamp__c'])
42
- df['Timestamp_numeric'] = df['Timestamp__c'].apply(lambda x: x.timestamp())
43
- features = df[['Usage__c', 'Timestamp_numeric']]
44
- model = IsolationForest(contamination=0.1, random_state=42)
45
- model.fit(features)
46
- df['is_anomaly'] = model.predict(features) == -1
47
- return df[['Id', 'is_anomaly']].to_dict('records')
48
- except Exception as e:
49
- print(f"Error in anomaly detection: {e}")
50
- return []
51
-
52
- # Dashboard data endpoint
53
- @app.route('/api/dashboard', methods=['GET'])
54
- def get_dashboard_data():
55
- lab = request.args.get('lab', '')
56
- equipment_type = request.args.get('equipmentType', '')
57
- date_range = request.args.get('dateRange', '7')
58
-
59
- access_token, instance_url = get_sf_access_token()
60
- if not access_token:
61
- return jsonify({'error': 'Failed to authenticate with Salesforce'}), 500
62
-
63
- # Fetch devices
64
- soql = 'SELECT Id, Name, Status__c, Usage__c, Last_Log_Timestamp__c, Lab__c, Type FROM Equipment__c WHERE '
65
- if lab:
66
- soql += f"Lab__c = '{lab}' AND "
67
- if equipment_type:
68
- soql += f"Type = '{equipment_type}' AND "
69
- soql += f"Last_Log_Timestamp__c = LAST_N_DAYS:{date_range} LIMIT 100"
70
-
71
- try:
72
- response = requests.get(f'{instance_url}/services/data/v52.0/query?q={soql}', headers={'Authorization': f'Bearer {access_token}'})
73
- response.raise_for_status()
74
- devices = response.json()['records']
75
-
76
- # Fetch logs for anomaly detection
77
- log_soql = 'SELECT Id, Usage__c, Timestamp__c, Equipment__c FROM SmartLog__c WHERE '
78
- if lab:
79
- log_soql += f"Equipment__r.Lab__c = '{lab}' AND "
80
- if equipment_type:
81
- log_soql += f"Equipment__r.Type = '{equipment_type}' AND "
82
- log_soql += f"Timestamp__c = LAST_N_DAYS:{date_range}"
83
-
84
- log_response = requests.get(f'{instance_url}/services/data/v52.0/query?q={log_soql}', headers={'Authorization': f'Bearer {access_token}'})
85
- log_response.raise_for_status()
86
- logs = log_response.json()['records']
87
-
88
- # Detect anomalies
89
- anomalies = detect_anomalies(logs)
90
- anomaly_map = {a['Id']: a['is_anomaly'] for a in anomalies}
91
- devices_with_anomalies = [
92
- {**d, 'isAnomaly': any(l['Equipment__c'] == d['Id'] and anomaly_map.get(l['Id'], False) for l in logs)}
93
- for d in devices
94
  ]
95
-
96
- # Fetch chart data (daily log trends)
97
- chart_soql = f"SELECT CALENDAR_DATE(Timestamp__c) logDate, COUNT(Id) logCount FROM SmartLog__c WHERE Timestamp__c = LAST_N_DAYS:{date_range} GROUP BY CALENDAR_DATE(Timestamp__c)"
98
- chart_response = requests.get(f'{instance_url}/services/data/v52.0/query?q={chart_soql}', headers={'Authorization': f'Bearer {access_token}'})
99
- chart_response.raise_for_status()
100
- chart_data = chart_response.json()['records']
101
- labels = [str(r['logDate']) for r in chart_data]
102
- data = [r['logCount'] for r in chart_data]
103
-
104
- # Calculate weekly uptime %
105
- uptime_soql = f"SELECT Equipment__c, COUNT(Id) upCount FROM SmartLog__c WHERE Status__c = 'Active' AND Timestamp__c = LAST_N_DAYS:{date_range} GROUP BY Equipment__c"
106
- uptime_response = requests.get(f'{instance_url}/services/data/v52.0/query?q={uptime_soql}', headers={'Authorization': f'Bearer {access_token}'})
107
- uptime_response.raise_for_status()
108
- uptime_data = uptime_response.json()['records']
109
- total_logs = sum(r['logCount'] for r in chart_data)
110
- uptime = sum(r['upCount'] for r in uptime_data) / total_logs * 100 if total_logs > 0 else 0
111
-
112
- return jsonify({
113
- 'devices': devices_with_anomalies,
114
- 'chartData': {
115
- 'dailyTrends': {'labels': labels, 'data': data},
116
- 'weeklyUptime': round(uptime, 2)
117
- }
118
- })
119
- except Exception as e:
120
- print(f"Error fetching dashboard data: {e}")
121
- return jsonify({'error': 'Failed to fetch data'}), 500
122
-
123
- # PDF generation endpoint
124
- @app.route('/api/generate-pdf', methods=['POST'])
125
- def generate_pdf():
126
- dashboard_data = request.json.get('dashboardData', [])
127
- try:
128
- output = io.BytesIO()
129
- doc = SimpleDocTemplate(output, pagesize=letter)
130
- styles = getSampleStyleSheet()
131
- elements = [Paragraph("LabOps Monthly Report", styles['Title']), Spacer(1, 12)]
132
-
133
- for device in dashboard_data:
134
- text = f"Device: {device['Name']}<br/>Health: {device['Status__c']}<br/>Usage: {device['Usage__c']}<br/>Last Log: {device['Last_Log_Timestamp__c']}<br/>Anomaly: {'Yes' if device['isAnomaly'] else 'No'}"
135
- elements.append(Paragraph(text, styles['Normal']))
136
- elements.append(Spacer(1, 12))
137
-
138
- doc.build(elements)
139
- pdf_base64 = base64.b64encode(output.getvalue()).decode('utf-8')
140
- output.close()
141
- return jsonify({'pdf': pdf_base64})
142
- except Exception as e:
143
- print(f"Error generating PDF: {e}")
144
- return jsonify({'error': 'Failed to generate PDF'}), 500
145
-
146
- # AMC expiry checker
147
- @app.route('/api/check-amc', methods=['POST'])
148
- def check_amc_expiry():
149
- access_token, instance_url = get_sf_access_token()
150
- if not access_token:
151
- return jsonify({'error': 'Failed to authenticate with Salesforce'}), 500
152
-
153
- try:
154
- expiry_date = (datetime.now() + timedelta(days=14)).strftime('%Y-%m-%d')
155
- soql = f"SELECT Id, Name, AMC_Expiry_Date__c, Lab__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= {expiry_date}"
156
- response = requests.get(f'{instance_url}/services/data/v52.0/query?q={soql}', headers={'Authorization': f'Bearer {access_token}'})
157
- response.raise_for_status()
158
- equipment = response.json()['records']
159
- return jsonify({'status': 'Success', 'expiringAMCs': equipment})
160
  except Exception as e:
161
- print(f"Error checking AMC: {e}")
162
- return jsonify({'error': 'Failed to check AMC expiries'}), 500
163
-
164
- if __name__ == '__main__':
165
- app.run(host='0.0.0.0', port=8080)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
 
2
  import pandas as pd
3
+ import plotly.express as px
4
+ from weasyprint import HTML
5
+ import datetime
 
 
 
 
 
6
  import os
7
 
8
+ # Cache data loading with detailed error handling
9
+ def load_data():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  try:
11
+ required_files = [
12
+ "data/smartlog.csv",
13
+ "data/cell_analysis.csv",
14
+ "data/weight_log.csv",
15
+ "data/uv_verification.csv",
16
+ "data/equipment.csv"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  ]
18
+ missing_files = [file for file in required_files if not os.path.exists(file)]
19
+ if missing_files:
20
+ return None, f"Missing files: {', '.join(missing_files)}"
21
+
22
+ # Load logs with error handling for each file
23
+ log_files = [
24
+ "data/smartlog.csv",
25
+ "data/cell_analysis.csv",
26
+ "data/weight_log.csv",
27
+ "data/uv_verification.csv"
28
+ ]
29
+ logs_list = []
30
+ for file in log_files:
31
+ try:
32
+ df = pd.read_csv(file)
33
+ if not all(col in df.columns for col in ["device_id", "timestamp", "status", "metrics"]):
34
+ return None, f"Invalid columns in {file}. Expected: device_id, timestamp, status, metrics"
35
+ logs_list.append(df)
36
+ except Exception as e:
37
+ return None, f"Error reading {file}: {str(e)}"
38
+
39
+ logs = pd.concat(logs_list, ignore_index=True)
40
+ logs["timestamp"] = pd.to_datetime(logs["timestamp"], errors='coerce')
41
+ if logs["timestamp"].isna().any():
42
+ return None, "Error parsing timestamps in logs"
43
+
44
+ # Load equipment data
45
+ equipment = pd.read_csv("data/equipment.csv")
46
+ if not all(col in equipment.columns for col in ["device_id", "type", "lab_id", "amc_expiry"]):
47
+ return None, "Invalid columns in equipment.csv. Expected: device_id, type, lab_id, amc_expiry"
48
+ equipment["amc_expiry"] = pd.to_datetime(equipment["amc_expiry"], errors='coerce')
49
+ if equipment["amc_expiry"].isna().any():
50
+ return None, "Error parsing amc_expiry in equipment.csv"
51
+
52
+ return logs, equipment
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  except Exception as e:
54
+ return None, f"Unexpected error: {str(e)}"
55
+
56
+ # Generate PDF report
57
+ def generate_pdf_report(data):
58
+ os.makedirs("reports", exist_ok=True)
59
+ html = f"""
60
+ <h1>LabOps Dashboard Report</h1>
61
+ <h2>Filtered Data</h2>
62
+ {data.to_html()}
63
+ """
64
+ filename = f"reports/report_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf"
65
+ HTML(string=html).write_pdf(filename)
66
+ return filename
67
+
68
+ # Main dashboard function
69
+ def render_dashboard(lab, device_type, date_start, date_end):
70
+ logs, equipment = load_data()
71
+
72
+ if logs is None:
73
+ return f"<div style='color: red;'>Error: {equipment}</div>", None, None, f"Data loading failed: {equipment}", None, f"Error: {equipment}"
74
+
75
+ # Apply filters
76
+ filtered_logs = logs
77
+ if lab != "All":
78
+ device_ids = equipment[equipment["lab_id"] == lab]["device_id"]
79
+ filtered_logs = filtered_logs[filtered_logs["device_id"].isin(device_ids)]
80
+ if device_type != "All":
81
+ device_ids = equipment[equipment["type"] == device_type]["device_id"]
82
+ filtered_logs = filtered_logs[filtered_logs["device_id"].isin(device_ids)]
83
+ filtered_logs = filtered_logs[
84
+ (filtered_logs["timestamp"].dt.date >= pd.to_datetime(date_start).date()) &
85
+ (filtered_logs["timestamp"].dt.date <= pd.to_datetime(date_end).date())
86
+ ]
87
+
88
+ # Device Cards (FR-002)
89
+ device_cards = ""
90
+ for device_id in filtered_logs["device_id"].unique():
91
+ device_data = filtered_logs[filtered_logs["device_id"] == device_id]
92
+ if not device_data.empty:
93
+ status = device_data["status"].iloc[-1]
94
+ color = "green" if status == "UP" else "red"
95
+ device_cards += f"""
96
+ <div style='border: 1px solid {color}; padding: 10px; margin: 10px; border-radius: 5px;'>
97
+ <h3>Device: {device_id}</h3>
98
+ <p>Status: <span style='color: {color};'>{status}</span></p>
99
+ <p>Last Log: {device_data['timestamp'].iloc[-1]}</p>
100
+ <p>Usage Count: {len(device_data)}</p>
101
+ </div>
102
+ """
103
+
104
+ # Charts (FR-003)
105
+ downtime = filtered_logs[filtered_logs["status"] == "DOWN"].groupby("device_id").size().reset_index(name="downtime_count")
106
+ downtime_fig = px.bar(downtime, x="device_id", y="downtime_count", title="Downtime Events by Device")
107
+
108
+ usage = filtered_logs.groupby([filtered_logs["timestamp"].dt.date, "device_id"])["metrics"].mean().reset_index()
109
+ usage_fig = px.line(usage, x="timestamp", y="metrics", color="device_id", title="Daily Usage Metrics")
110
+
111
+ # AMC Reminders (FR-004)
112
+ today = datetime.datetime.now().date()
113
+ two_weeks = today + datetime.timedelta(days=14)
114
+ amc_alerts = equipment[equipment["amc_expiry"].dt.date <= two_weeks]
115
+ amc_text = amc_alerts[['device_id', 'type', 'lab_id', 'amc_expiry']].to_string() if not amc_alerts.empty else "No AMC expirations within 2 weeks."
116
+
117
+ # PDF Export (FR-004)
118
+ report_file = generate_pdf_report(filtered_logs)
119
+
120
+ return device_cards, downtime_fig, usage_fig, amc_text, report_file, "Data loaded successfully"
121
+
122
+ # Load data for dropdowns with fallback
123
+ logs, equipment_or_error = load_data()
124
+ if logs is None:
125
+ lab_choices = ["All"]
126
+ device_type_choices = ["All"]
127
+ initial_error = f"Error loading data: {equipment_or_error}"
128
+ else:
129
+ lab_choices = ["All"] + list(equipment_or_error["lab_id"].unique())
130
+ device_type_choices = ["All"] + list(equipment_or_error["type"].unique())
131
+ initial_error = "Data loaded successfully"
132
+
133
+ # Gradio interface
134
+ with gr.Blocks(css=".gradio-container {max-width: 100%;}") as demo:
135
+ gr.Markdown("# LabOps Dashboard")
136
+
137
+ # Filters
138
+ with gr.Row():
139
+ lab = gr.Dropdown(choices=lab_choices, label="Select Lab", value="All")
140
+ device_type = gr.Dropdown(choices=device_type_choices, label="Device Type", value="All")
141
+ with gr.Row():
142
+ date_start = gr.DateTime(label="Start Date", value=logs["timestamp"].min() if logs is not None else "2025-05-30")
143
+ date_end = gr.DateTime(label="End Date", value=logs["timestamp"].max() if logs is not None else "2025-05-30")
144
+
145
+ refresh_btn = gr.Button("Refresh")
146
+
147
+ # Outputs
148
+ device_cards = gr.HTML(label="Device Status")
149
+ downtime_plot = gr.Plot(label="Downtime Trends")
150
+ usage_plot = gr.Plot(label="Usage Trends")
151
+ amc_reminders = gr.Textbox(label="AMC Expiry Reminders")
152
+ report_download = gr.File(label="Download Report")
153
+ debug_info = gr.Textbox(label="Debug Info", value=initial_error)
154
+
155
+ # Bind inputs to outputs
156
+ input_components = [lab, device_type, date_start, date_end]
157
+ outputs = [device_cards, downtime_plot, usage_plot, amc_reminders, report_download, debug_info]
158
+
159
+ # Bind change events for input components
160
+ for input_component in input_components:
161
+ input_component.change(fn=render_dashboard, inputs=[lab, device_type, date_start, date_end], outputs=outputs)
162
+
163
+ # Bind click event for refresh button
164
+ refresh_btn.click(fn=render_dashboard, inputs=[lab, device_type, date_start, date_end], outputs=outputs)
165
+
166
+ demo.launch()