lavanya121 commited on
Commit
22a086d
·
verified ·
1 Parent(s): 357a70f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +149 -156
app.py CHANGED
@@ -1,180 +1,173 @@
1
- from flask import Flask, request, jsonify
2
  import pandas as pd
3
- from datetime import datetime, timedelta
4
  import json
 
5
  import logging
6
- import sys
 
7
 
8
- app = Flask(__name__)
9
-
10
- # Configure logging to diagnose issues
11
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
12
- logger = logging.getLogger(__name__)
13
 
14
- # Try to load Hugging Face summarization pipeline with fallback
15
  try:
16
- from transformers import pipeline
17
- summarizer = pipeline("text2text-generation", model="t5-small", framework="pt")
18
- logger.info("Hugging Face model 't5-small' loaded successfully.")
19
  except Exception as e:
20
- logger.error(f"Failed to load Hugging Face model: {str(e)}")
21
- summarizer = None # Fallback to skip summarization if model fails
22
 
23
- # Helper function to calculate days until AMC expiry
24
- def days_until_expiry(expiry_date_str):
25
  try:
26
- expiry_date = datetime.strptime(expiry_date_str, "%Y-%m-%d")
27
- current_date = datetime.now()
28
- return (expiry_date - current_date).days
29
- except ValueError as e:
30
- logger.warning(f"Invalid date format for AMC expiry: {expiry_date_str}, error: {str(e)}")
31
- return None
32
-
33
- # Helper function to detect anomalies (rule-based, simplified)
34
- def detect_anomalies(logs):
35
- anomalies = []
36
- for log in logs:
37
- try:
38
- # Rule 1: Flag ERROR status as high severity
39
- if log.get("status") == "ERROR":
40
- anomalies.append({
41
- "device_id": log.get("device_id", "Unknown"),
42
- "issue": "ERROR status detected",
43
- "detected_on": log.get("timestamp", "N/A"),
44
- "severity": "high"
45
- })
46
- # Rule 2: Flag usage spikes (>7 hours as example threshold)
47
- if log.get("usage_hours", 0) > 7:
48
- anomalies.append({
49
- "device_id": log.get("device_id", "Unknown"),
50
- "issue": "Usage spike",
51
- "detected_on": log.get("timestamp", "N/A"),
52
- "severity": "high"
53
- })
54
- # Rule 3: Flag downtime (usage_hours = 0 with DOWN status)
55
- if log.get("status") == "DOWN" and log.get("usage_hours", 0) == 0:
56
- anomalies.append({
57
- "device_id": log.get("device_id", "Unknown"),
58
- "issue": "Unplanned downtime",
59
- "detected_on": log.get("timestamp", "N/A"),
60
- "severity": "medium"
61
- })
62
- except Exception as e:
63
- logger.error(f"Error processing log entry {log}: {str(e)}")
64
- return anomalies
65
-
66
- # Helper function to generate AMC reminders
67
- def generate_amc_reminders(logs):
68
- reminders = []
69
- for log in logs:
70
- try:
71
- days_left = days_until_expiry(log.get("amc_expiry", ""))
72
- if days_left is not None and 0 < days_left <= 30:
73
- reminders.append({
74
- "device_id": log.get("device_id", "Unknown"),
75
- "amc_expiry": log.get("amc_expiry", "N/A"),
76
- "days_remaining": days_left,
77
- "alert": f"AMC expires in {days_left} days"
78
- })
79
- except Exception as e:
80
- logger.error(f"Error processing AMC for log {log}: {str(e)}")
81
- return reminders
82
-
83
- # Helper function to summarize logs
84
- def summarize_logs(logs, prompt):
85
- try:
86
- if summarizer is None:
87
- logger.warning("Summarizer model not available, returning basic summary.")
88
- return "Summary unavailable: Model not loaded. Please check logs for details."
89
-
90
- # Convert logs to text for summarization
91
- log_text = "\n".join([f"Device {log.get('device_id', 'Unknown')} ({log.get('log_type', 'N/A')}): Status {log.get('status', 'N/A')}, Usage {log.get('usage_hours', 0)} hours, Timestamp {log.get('timestamp', 'N/A')}, AMC Expiry {log.get('amc_expiry', 'N/A')}" for log in logs])
92
- input_text = f"{prompt}\n\nLogs:\n{log_text}"
93
-
94
- # Use Hugging Face summarizer
95
- summary = summarizer(input_text, max_length=150, min_length=50, do_sample=False)[0]["generated_text"]
96
  return summary
97
  except Exception as e:
98
- logger.error(f"Error generating summary: {str(e)}")
99
- return f"Summary unavailable: {str(e)}"
100
 
101
- # API endpoint to process logs
102
- @app.route("/process-logs", methods=["POST"])
103
- def process_logs():
104
  try:
105
- data = request.get_json()
106
- if not data or "logs" not in data:
107
- logger.error("Invalid or missing logs in request.")
108
- return jsonify({"error": "No logs provided in the request"}), 400
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
- logs = data.get("logs", [])
111
- prompt = data.get("prompt", "Summarize downtime and usage patterns for SmartLab-1 from May 1 to May 14")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
- if not logs:
114
- logger.error("Empty logs list provided.")
115
- return jsonify({"error": "Logs list is empty"}), 400
116
 
117
- # Convert logs to DataFrame for analysis
118
  try:
119
- df = pd.DataFrame(logs)
 
 
 
 
120
  except Exception as e:
121
- logger.error(f"Failed to convert logs to DataFrame: {str(e)}")
122
- return jsonify({"error": f"Invalid log format: {str(e)}"}), 400
123
 
124
- # Calculate summary metrics
125
- try:
126
- total_devices = len(df["device_id"].unique())
127
- avg_uptime = len(df[df["status"] == "OK"]) / len(df) * 100 if len(df) > 0 else 0
128
- downtime_events = len(df[df["status"] == "DOWN"])
129
- most_used_device = df.groupby("device_id")["usage_hours"].sum().idxmax() if not df.empty else "N/A"
130
- except Exception as e:
131
- logger.error(f"Error calculating summary metrics: {str(e)}")
132
- total_devices, avg_uptime, downtime_events, most_used_device = 0, 0, 0, "N/A"
133
-
134
- # Generate outputs
135
- summary = {
136
- "total_devices": total_devices,
137
- "avg_uptime": f"{avg_uptime:.1f}%",
138
- "downtime_events": downtime_events,
139
- "most_used_device": most_used_device
140
- }
141
- anomalies = detect_anomalies(logs)
142
- amc_reminders = generate_amc_reminders(logs)
143
- text_summary = summarize_logs(logs, prompt)
144
-
145
- # Generate maintenance report
146
- report = f"""
147
- SmartLab-1 Maintenance Report (May 1–14, 2025)
148
- Generated on: {datetime.now().strftime('%Y-%m-%d')}
149
-
150
- 1. Summary
151
- - Total Devices: {total_devices}
152
- - Average Uptime: {avg_uptime:.1f}%
153
- - Downtime Events: {downtime_events}
154
- - Most Used Device: {most_used_device}
155
-
156
- 2. Anomalies Detected
157
- {chr(10).join([f"- {a['device_id']}: {a['issue']} on {a['detected_on']} ({a['severity']} severity)" for a in anomalies]) or "No anomalies detected"}
158
-
159
- 3. AMC Alerts
160
- {chr(10).join([f"- {r['device_id']}: AMC expires on {r['amc_expiry']} ({r['days_remaining']} days remaining)" for r in amc_reminders]) or "No AMC expirations within 30 days"}
161
-
162
- 4. AI-Generated Summary
163
- {text_summary}
164
- """
165
-
166
- logger.info("Successfully processed logs and generated response.")
167
- return jsonify({
168
- "summary": summary,
169
- "anomalies": anomalies,
170
- "amc_reminders": amc_reminders,
171
- "maintenance_report": report
172
- })
173
 
 
 
 
 
 
174
  except Exception as e:
175
- logger.error(f"Unexpected error in process_logs: {str(e)}")
176
- return jsonify({"error": f"Internal server error: {str(e)}"}), 500
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
 
178
  if __name__ == "__main__":
179
- logger.info(f"Starting Flask app with Python version {sys.version}")
180
- app.run(debug=True, host="0.0.0.0", port=5000)
 
 
 
 
 
 
 
1
+ import gradio as gr
2
  import pandas as pd
3
+ from datetime import datetime
4
  import json
5
+ from transformers import pipeline
6
  import logging
7
+ import os
8
+ import plotly.express as px
9
 
10
+ # Configure logging for debugging
 
 
11
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
12
 
13
+ # Load Hugging Face summarization model
14
  try:
15
+ logging.info("Attempting to load Hugging Face model...")
16
+ summarizer = pipeline("text2text-generation", model="google/flan-t5-base")
17
+ logging.info("Hugging Face model loaded successfully")
18
  except Exception as e:
19
+ logging.error(f"Failed to load model: {str(e)}")
20
+ raise e
21
 
22
+ # Format summary prompt and generate report
23
+ def summarize_logs(df, lab_name, start_date, end_date):
24
  try:
25
+ total_devices = df["device_id"].nunique()
26
+ avg_uptime = "97%" # Placeholder
27
+ most_used = df.groupby("device_id")["usage_hours"].sum().idxmax() if not df.empty else "N/A"
28
+ downtime_events = 3 # Placeholder
29
+
30
+ prompt = (
31
+ f"Summarize maintenance and usage logs for lab {lab_name} "
32
+ f"from {start_date} to {end_date}. "
33
+ f"There were {total_devices} devices. "
34
+ f"The most used device was {most_used}."
35
+ )
36
+ summary = summarizer(prompt, max_length=200, do_sample=False)[0]["generated_text"]
37
+ logging.info("Summary generated successfully")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  return summary
39
  except Exception as e:
40
+ logging.error(f"Summary generation failed: {str(e)}")
41
+ return "Failed to generate summary."
42
 
43
+ # Create a bar chart for usage hours per device
44
+ def create_usage_chart(df):
 
45
  try:
46
+ usage_data = df.groupby("device_id")["usage_hours"].sum().reset_index()
47
+ fig = px.bar(
48
+ usage_data,
49
+ x="device_id",
50
+ y="usage_hours",
51
+ title="Usage Hours per Device",
52
+ labels={"device_id": "Device ID", "usage_hours": "Usage Hours"},
53
+ color="usage_hours",
54
+ color_continuous_scale="Blues"
55
+ )
56
+ fig.update_layout(
57
+ title_font_size=16,
58
+ margin=dict(l=20, r=20, t=40, b=20),
59
+ plot_bgcolor="white",
60
+ paper_bgcolor="white",
61
+ font=dict(size=12)
62
+ )
63
+ return fig
64
+ except Exception as e:
65
+ logging.error(f"Failed to create usage chart: {str(e)}")
66
+ return None
67
 
68
+ # Main Gradio function
69
+ def process_logs(file_obj, lab_site, start_date, end_date):
70
+ try:
71
+ if file_obj is None:
72
+ logging.warning("No file uploaded, returning empty results")
73
+ return "No file uploaded.", "No data to preview.", None
74
+
75
+ # Read file based on extension
76
+ file_name = file_obj.name if hasattr(file_obj, 'name') else file_obj
77
+ logging.info(f"Processing file: {file_name}")
78
+
79
+ if file_name.endswith(".json"):
80
+ df = pd.read_json(file_name)
81
+ elif file_name.endswith(".csv"):
82
+ df = pd.read_csv(file_name)
83
+ else:
84
+ logging.error("Unsupported file format")
85
+ return "Unsupported file format. Please upload a CSV or JSON file.", None, None
86
 
87
+ logging.info(f"File loaded successfully with {len(df)} rows")
 
 
88
 
89
+ # Convert timestamp to datetime and filter by date range
90
  try:
91
+ df["timestamp"] = pd.to_datetime(df["timestamp"])
92
+ start_date = pd.to_datetime(start_date)
93
+ end_date = pd.to_datetime(end_date)
94
+ df = df[(df["timestamp"] >= start_date) & (df["timestamp"] <= end_date)]
95
+ logging.info(f"Filtered to {len(df)} rows within date range {start_date} to {end_date}")
96
  except Exception as e:
97
+ logging.error(f"Date filtering failed: {str(e)}")
98
+ return f"Failed to filter data by date: {str(e)}", None, None
99
 
100
+ if df.empty:
101
+ logging.warning("No data within the specified date range")
102
+ return "No data available for the specified date range.", "No data to preview.", None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
 
104
+ summary = summarize_logs(df, lab_site, start_date, end_date)
105
+ preview = df.head().to_markdown() if not df.empty else "No data available."
106
+ chart = create_usage_chart(df)
107
+
108
+ return summary, preview, chart
109
  except Exception as e:
110
+ logging.error(f"Failed to process file: {str(e)}")
111
+ return f"Failed to process file: {str(e)}", None, None
112
+
113
+ # Gradio Interface with Dashboard Layout
114
+ try:
115
+ logging.info("Initializing Gradio Blocks interface...")
116
+ with gr.Blocks(css="""
117
+ .dashboard-container {border: 1px solid #e0e0e0; padding: 10px; border-radius: 5px; background-color: #f9f9f9;}
118
+ .dashboard-title {font-size: 24px; font-weight: bold; margin-bottom: 10px;}
119
+ .dashboard-section {margin-bottom: 15px;}
120
+ .dashboard-section h3 {font-size: 18px; margin-bottom: 5px;}
121
+ """) as iface:
122
+ gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Hugging Face AI)</h1>")
123
+ gr.Markdown("Upload a CSV or JSON file containing lab equipment logs to analyze usage.")
124
+
125
+ with gr.Row():
126
+ with gr.Column(scale=1):
127
+ file_input = gr.File(label="Upload Logs (CSV or JSON)", file_types=[".csv", ".json"])
128
+ lab_site_input = gr.Textbox(label="Lab Site", placeholder="e.g., Lab A")
129
+ start_date_input = gr.Textbox(label="Start Date (YYYY-MM-DD)", placeholder="e.g., 2025-01-01")
130
+ end_date_input = gr.Textbox(label="End Date (YYYY-MM-DD)", placeholder="e.g., 2025-01-31")
131
+ submit_button = gr.Button("Submit", variant="primary")
132
+
133
+ with gr.Column(scale=2):
134
+ with gr.Group(elem_classes="dashboard-container"):
135
+ gr.Markdown("<div class='dashboard-title'>Analysis Dashboard</div>")
136
+
137
+ with gr.Row():
138
+ with gr.Column(scale=1):
139
+ with gr.Group(elem_classes="dashboard-section"):
140
+ gr.Markdown("### Summary Report")
141
+ summary_output = gr.Textbox(lines=5)
142
+
143
+ with gr.Row():
144
+ with gr.Column(scale=1):
145
+ with gr.Group(elem_classes="dashboard-section"):
146
+ gr.Markdown("### Usage Chart")
147
+ chart_output = gr.Plot()
148
+
149
+ with gr.Column(scale=1):
150
+ with gr.Group(elem_classes="dashboard-section"):
151
+ gr.Markdown("### Log Preview")
152
+ preview_output = gr.Markdown()
153
+
154
+ submit_button.click(
155
+ fn=process_logs,
156
+ inputs=[file_input, lab_site_input, start_date_input, end_date_input],
157
+ outputs=[summary_output, preview_output, chart_output]
158
+ )
159
+
160
+ logging.info("Gradio interface initialized successfully")
161
+ except Exception as e:
162
+ logging.error(f"Failed to initialize Gradio interface: {str(e)}")
163
+ raise e
164
 
165
  if __name__ == "__main__":
166
+ try:
167
+ logging.info("Launching Gradio interface...")
168
+ iface.launch(server_name="0.0.0.0", server_port=7860, debug=True, share=False)
169
+ logging.info("Gradio interface launched successfully")
170
+ except Exception as e:
171
+ logging.error(f"Failed to launch Gradio interface: {str(e)}")
172
+ print(f"Error launching app: {str(e)}")
173
+ raise e