Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,89 +4,101 @@ import matplotlib.pyplot as plt
|
|
| 4 |
from fpdf import FPDF
|
| 5 |
import io
|
| 6 |
from datetime import datetime, timedelta
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# Global DataFrame to store the CSV data
|
| 9 |
df = pd.DataFrame()
|
| 10 |
|
| 11 |
def upload_csv(file):
|
| 12 |
global df
|
| 13 |
-
|
| 14 |
try:
|
| 15 |
if file is None:
|
| 16 |
-
return ["All"], ["All"], ["All"], f"{
|
| 17 |
|
| 18 |
# Read the CSV file
|
| 19 |
-
|
| 20 |
df = pd.read_csv(file)
|
| 21 |
|
| 22 |
if df.empty:
|
| 23 |
-
return ["All"], ["All"], ["All"], f"{
|
| 24 |
|
| 25 |
# Debug: Show the CSV content and column names
|
| 26 |
-
|
| 27 |
|
| 28 |
# Define required columns
|
| 29 |
required_columns = {'DeviceID', 'Lab', 'Type', 'Timestamp', 'Status', 'UsageCount'}
|
| 30 |
if not required_columns.issubset(df.columns):
|
| 31 |
missing_cols = required_columns - set(df.columns)
|
| 32 |
-
return ["All"], ["All"], ["All"], f"{
|
| 33 |
|
| 34 |
# Debug: Check data types and sample values
|
| 35 |
-
|
| 36 |
|
| 37 |
# Check for empty or all-NaN columns
|
| 38 |
if df['Lab'].dropna().empty:
|
| 39 |
-
|
| 40 |
if df['Type'].dropna().empty:
|
| 41 |
-
|
| 42 |
if df['Lab'].dropna().empty or df['Type'].dropna().empty:
|
| 43 |
-
return ["All"], ["All"], ["All"],
|
| 44 |
|
| 45 |
# Convert Timestamp to datetime with error handling
|
| 46 |
-
|
| 47 |
try:
|
| 48 |
df['Timestamp'] = pd.to_datetime(df['Timestamp'], errors='coerce')
|
| 49 |
-
|
| 50 |
timestamps_invalid = df['Timestamp'].isna().all()
|
| 51 |
if timestamps_invalid:
|
| 52 |
-
|
| 53 |
except Exception as e:
|
| 54 |
-
return ["All"], ["All"], ["All"], f"{
|
| 55 |
|
| 56 |
# Extract unique values for dropdowns
|
| 57 |
-
|
| 58 |
labs = ['All'] + sorted([str(lab) for lab in df['Lab'].dropna().unique()])
|
| 59 |
types = ['All'] + sorted([str(v) for v in df['Type'].dropna().unique()])
|
| 60 |
-
|
| 61 |
|
| 62 |
# Extract date range for filter
|
| 63 |
if timestamps_invalid:
|
| 64 |
date_ranges = ['All']
|
| 65 |
-
|
| 66 |
else:
|
| 67 |
min_date = df['Timestamp'].min()
|
| 68 |
max_date = df['Timestamp'].max()
|
| 69 |
if pd.isna(min_date) or pd.isna(max_date):
|
| 70 |
date_ranges = ['All']
|
| 71 |
-
|
| 72 |
else:
|
| 73 |
min_date_str = min_date.strftime('%Y-%m-%d')
|
| 74 |
max_date_str = max_date.strftime('%Y-%m-%d')
|
| 75 |
date_ranges = ['All', f"{min_date_str} to {max_date_str}"]
|
| 76 |
-
|
| 77 |
|
| 78 |
# Automatically trigger filter_and_visualize after upload with default filters
|
| 79 |
-
|
| 80 |
try:
|
| 81 |
device_cards, plot_daily, plot_uptime, anomaly_text, filter_msg = filter_and_visualize("All", "All", "All")
|
| 82 |
-
|
| 83 |
except Exception as e:
|
| 84 |
-
|
| 85 |
device_cards, plot_daily, plot_uptime, anomaly_text = None, None, None, None
|
| 86 |
|
| 87 |
-
return labs, types, date_ranges,
|
| 88 |
except Exception as e:
|
| 89 |
-
return ["All"], ["All"], ["All"], f"{
|
| 90 |
|
| 91 |
def filter_and_visualize(selected_lab, selected_type, selected_date_range):
|
| 92 |
global df
|
|
@@ -295,49 +307,52 @@ def download_pdf(selected_lab, selected_type, selected_date_range):
|
|
| 295 |
return None
|
| 296 |
|
| 297 |
# Build the Gradio interface
|
| 298 |
-
|
| 299 |
-
gr.
|
|
|
|
| 300 |
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
|
| 309 |
-
|
| 310 |
-
|
| 311 |
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
| 322 |
-
|
| 323 |
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
|
| 343 |
-
demo.launch()
|
|
|
|
|
|
|
|
|
| 4 |
from fpdf import FPDF
|
| 5 |
import io
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
# Check library versions for debugging
|
| 10 |
+
debug_msg = "Library Versions:\n"
|
| 11 |
+
try:
|
| 12 |
+
debug_msg += f"Python: {sys.version}\n"
|
| 13 |
+
debug_msg += f"Gradio: {gr.__version__}\n"
|
| 14 |
+
debug_msg += f"Pandas: {pd.__version__}\n"
|
| 15 |
+
debug_msg += f"Matplotlib: {matplotlib.__version__}\n"
|
| 16 |
+
debug_msg += f"FPDF: {FPDF.FPDF_VERSION}\n"
|
| 17 |
+
except Exception as e:
|
| 18 |
+
debug_msg += f"Error checking library versions: {str(e)}\n"
|
| 19 |
|
| 20 |
# Global DataFrame to store the CSV data
|
| 21 |
df = pd.DataFrame()
|
| 22 |
|
| 23 |
def upload_csv(file):
|
| 24 |
global df
|
| 25 |
+
debug_msg_local = debug_msg + "\nStarting CSV upload process...\n"
|
| 26 |
try:
|
| 27 |
if file is None:
|
| 28 |
+
return ["All"], ["All"], ["All"], f"{debug_msg_local}No file uploaded. Please upload a CSV file.", "All", "All", "All", None, None, None, None
|
| 29 |
|
| 30 |
# Read the CSV file
|
| 31 |
+
debug_msg_local += "Reading CSV file...\n"
|
| 32 |
df = pd.read_csv(file)
|
| 33 |
|
| 34 |
if df.empty:
|
| 35 |
+
return ["All"], ["All"], ["All"], f"{debug_msg_local}The uploaded CSV file is empty.", "All", "All", "All", None, None, None, None
|
| 36 |
|
| 37 |
# Debug: Show the CSV content and column names
|
| 38 |
+
debug_msg_local += f"CSV Columns: {', '.join(df.columns)}\nRaw CSV Content:\n{df.to_string()}\n\n"
|
| 39 |
|
| 40 |
# Define required columns
|
| 41 |
required_columns = {'DeviceID', 'Lab', 'Type', 'Timestamp', 'Status', 'UsageCount'}
|
| 42 |
if not required_columns.issubset(df.columns):
|
| 43 |
missing_cols = required_columns - set(df.columns)
|
| 44 |
+
return ["All"], ["All"], ["All"], f"{debug_msg_local}Error: CSV is missing required columns: {', '.join(missing_cols)}", "All", "All", "All", None, None, None, None
|
| 45 |
|
| 46 |
# Debug: Check data types and sample values
|
| 47 |
+
debug_msg_local += f"Data Types:\n{df.dtypes}\n\nSample Values:\n{df.head().to_string()}\n\n"
|
| 48 |
|
| 49 |
# Check for empty or all-NaN columns
|
| 50 |
if df['Lab'].dropna().empty:
|
| 51 |
+
debug_msg_local += "Error: Lab column is empty or contains only NaN values.\n"
|
| 52 |
if df['Type'].dropna().empty:
|
| 53 |
+
debug_msg_local += "Error: Type column is empty or contains only NaN values.\n"
|
| 54 |
if df['Lab'].dropna().empty or df['Type'].dropna().empty:
|
| 55 |
+
return ["All"], ["All"], ["All"], debug_msg_local, "All", "All", "All", None, None, None, None
|
| 56 |
|
| 57 |
# Convert Timestamp to datetime with error handling
|
| 58 |
+
debug_msg_local += "Converting Timestamp column...\n"
|
| 59 |
try:
|
| 60 |
df['Timestamp'] = pd.to_datetime(df['Timestamp'], errors='coerce')
|
| 61 |
+
debug_msg_local += f"Timestamps after conversion:\n{df['Timestamp'].to_string()}\n\n"
|
| 62 |
timestamps_invalid = df['Timestamp'].isna().all()
|
| 63 |
if timestamps_invalid:
|
| 64 |
+
debug_msg_local += "Warning: All Timestamp values are invalid or unparseable. Date range filtering will be disabled.\n"
|
| 65 |
except Exception as e:
|
| 66 |
+
return ["All"], ["All"], ["All"], f"{debug_msg_local}Error: Failed to parse Timestamp column: {str(e)}", "All", "All", "All", None, None, None, None
|
| 67 |
|
| 68 |
# Extract unique values for dropdowns
|
| 69 |
+
debug_msg_local += "Extracting unique values for dropdowns...\n"
|
| 70 |
labs = ['All'] + sorted([str(lab) for lab in df['Lab'].dropna().unique()])
|
| 71 |
types = ['All'] + sorted([str(v) for v in df['Type'].dropna().unique()])
|
| 72 |
+
debug_msg_local += f"Lab options: {', '.join(labs)}\nType options: {', '.join(types)}\n\n"
|
| 73 |
|
| 74 |
# Extract date range for filter
|
| 75 |
if timestamps_invalid:
|
| 76 |
date_ranges = ['All']
|
| 77 |
+
debug_msg_local += "Date range dropdown disabled due to invalid timestamps.\n"
|
| 78 |
else:
|
| 79 |
min_date = df['Timestamp'].min()
|
| 80 |
max_date = df['Timestamp'].max()
|
| 81 |
if pd.isna(min_date) or pd.isna(max_date):
|
| 82 |
date_ranges = ['All']
|
| 83 |
+
debug_msg_local += "Warning: Could not determine date range due to invalid timestamps.\n"
|
| 84 |
else:
|
| 85 |
min_date_str = min_date.strftime('%Y-%m-%d')
|
| 86 |
max_date_str = max_date.strftime('%Y-%m-%d')
|
| 87 |
date_ranges = ['All', f"{min_date_str} to {max_date_str}"]
|
| 88 |
+
debug_msg_local += f"Date Range: {min_date_str} to {max_date_str}\n"
|
| 89 |
|
| 90 |
# Automatically trigger filter_and_visualize after upload with default filters
|
| 91 |
+
debug_msg_local += "Triggering initial visualization with default filters...\n"
|
| 92 |
try:
|
| 93 |
device_cards, plot_daily, plot_uptime, anomaly_text, filter_msg = filter_and_visualize("All", "All", "All")
|
| 94 |
+
debug_msg_local += f"Initial Filter Result: {filter_msg}\n"
|
| 95 |
except Exception as e:
|
| 96 |
+
debug_msg_local += f"Initial Filter Error: {str(e)}\n"
|
| 97 |
device_cards, plot_daily, plot_uptime, anomaly_text = None, None, None, None
|
| 98 |
|
| 99 |
+
return labs, types, date_ranges, debug_msg_local, "All", "All", "All", device_cards, plot_daily, plot_uptime, anomaly_text
|
| 100 |
except Exception as e:
|
| 101 |
+
return ["All"], ["All"], ["All"], f"{debug_msg_local}Failed to process CSV: {str(e)}", "All", "All", "All", None, None, None, None
|
| 102 |
|
| 103 |
def filter_and_visualize(selected_lab, selected_type, selected_date_range):
|
| 104 |
global df
|
|
|
|
| 307 |
return None
|
| 308 |
|
| 309 |
# Build the Gradio interface
|
| 310 |
+
try:
|
| 311 |
+
with gr.Blocks() as demo:
|
| 312 |
+
gr.Markdown("🧪 **Multi-Device LabOps Dashboard**\nMonitor smart lab devices, visualize logs, and generate PDF reports.")
|
| 313 |
|
| 314 |
+
with gr.Row():
|
| 315 |
+
csv_input = gr.File(label="Upload Device Logs CSV", file_types=[".csv"])
|
| 316 |
+
|
| 317 |
+
with gr.Row():
|
| 318 |
+
lab_dropdown = gr.Dropdown(label="Filter by Lab", choices=["All"], value="All")
|
| 319 |
+
type_dropdown = gr.Dropdown(label="Filter by Equipment Type", choices=["All"], value="All")
|
| 320 |
+
date_dropdown = gr.Dropdown(label="Filter by Date Range", choices=["All"], value="All")
|
| 321 |
|
| 322 |
+
with gr.Row():
|
| 323 |
+
submit_btn = gr.Button("Submit Filters")
|
| 324 |
|
| 325 |
+
with gr.Row():
|
| 326 |
+
device_cards = gr.DataFrame(label="Device Cards (Usage, Last Log)")
|
| 327 |
+
plot_daily = gr.Image(label="Daily Log Trends")
|
| 328 |
+
plot_uptime = gr.Image(label="Weekly Uptime %")
|
| 329 |
|
| 330 |
+
anomaly_output = gr.Textbox(label="Anomaly Alerts")
|
| 331 |
+
|
| 332 |
+
with gr.Row():
|
| 333 |
+
download_btn = gr.Button("Download PDF Report")
|
| 334 |
+
|
| 335 |
+
error_box = gr.Textbox(label="Status/Error Message", visible=True, interactive=False)
|
| 336 |
|
| 337 |
+
# Connect the components
|
| 338 |
+
csv_input.change(
|
| 339 |
+
fn=upload_csv,
|
| 340 |
+
inputs=csv_input,
|
| 341 |
+
outputs=[lab_dropdown, type_dropdown, date_dropdown, error_box, lab_dropdown, type_dropdown, date_dropdown, device_cards, plot_daily, plot_uptime, anomaly_output]
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
submit_btn.click(
|
| 345 |
+
fn=filter_and_visualize,
|
| 346 |
+
inputs=[lab_dropdown, type_dropdown, date_dropdown],
|
| 347 |
+
outputs=[device_cards, plot_daily, plot_uptime, anomaly_output, error_box]
|
| 348 |
+
)
|
| 349 |
+
|
| 350 |
+
download_btn.click(
|
| 351 |
+
fn=download_pdf,
|
| 352 |
+
inputs=[lab_dropdown, type_dropdown, date_dropdown],
|
| 353 |
+
outputs=gr.File(label="Download PDF")
|
| 354 |
+
)
|
| 355 |
|
| 356 |
+
demo.launch()
|
| 357 |
+
except Exception as e:
|
| 358 |
+
print(f"Error launching Gradio interface: {str(e)}")
|