Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -81,6 +81,38 @@ def fetch_salesforce_data(sf, row_limit=10000, progress=gr.Progress()):
|
|
| 81 |
logging.error(f"Failed to fetch Salesforce data: {str(e)}")
|
| 82 |
raise e
|
| 83 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
# Format summary prompt and generate report
|
| 85 |
def summarize_logs(df, progress=gr.Progress()):
|
| 86 |
progress(0.1, "Generating summary report...")
|
|
@@ -199,17 +231,21 @@ def create_usage_chart(df, progress=gr.Progress()):
|
|
| 199 |
return None
|
| 200 |
|
| 201 |
# Main Gradio function
|
| 202 |
-
async def process_logs(progress=gr.Progress()):
|
| 203 |
try:
|
| 204 |
-
progress(0, "Starting
|
| 205 |
|
| 206 |
-
#
|
| 207 |
-
|
| 208 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 209 |
|
| 210 |
if df.empty:
|
| 211 |
-
logging.warning("No data
|
| 212 |
-
return "No data available
|
| 213 |
|
| 214 |
# Step 1: Summary Report
|
| 215 |
progress(0.2, "Generating summary...")
|
|
@@ -240,8 +276,8 @@ async def process_logs(progress=gr.Progress()):
|
|
| 240 |
progress(1.0, "Processing complete!")
|
| 241 |
return summary, preview, chart, anomalies, amc_reminders, insights
|
| 242 |
except Exception as e:
|
| 243 |
-
logging.error(f"Failed to process
|
| 244 |
-
return f"Failed to process
|
| 245 |
|
| 246 |
# Gradio Interface with Step-by-Step Layout
|
| 247 |
try:
|
|
@@ -256,11 +292,12 @@ try:
|
|
| 256 |
.dashboard-section ul {margin: 2px 0; padding-left: 20px;}
|
| 257 |
""") as iface:
|
| 258 |
gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Salesforce + Hugging Face AI)</h1>")
|
| 259 |
-
gr.Markdown("
|
| 260 |
|
| 261 |
with gr.Row():
|
| 262 |
with gr.Column(scale=1):
|
| 263 |
-
|
|
|
|
| 264 |
|
| 265 |
with gr.Column(scale=2):
|
| 266 |
with gr.Group(elem_classes="dashboard-container"):
|
|
@@ -298,7 +335,7 @@ try:
|
|
| 298 |
|
| 299 |
submit_button.click(
|
| 300 |
fn=process_logs,
|
| 301 |
-
inputs=[],
|
| 302 |
outputs=[summary_output, preview_output, chart_output, anomaly_output, amc_output, insights_output]
|
| 303 |
)
|
| 304 |
|
|
|
|
| 81 |
logging.error(f"Failed to fetch Salesforce data: {str(e)}")
|
| 82 |
raise e
|
| 83 |
|
| 84 |
+
# Load data from CSV file
|
| 85 |
+
def load_csv_data(file_obj, row_limit=10000, progress=gr.Progress()):
|
| 86 |
+
progress(0.05, "Loading CSV file...")
|
| 87 |
+
try:
|
| 88 |
+
file_name = file_obj.name if hasattr(file_obj, 'name') else file_obj
|
| 89 |
+
logging.info(f"Processing CSV file: {file_name}")
|
| 90 |
+
|
| 91 |
+
if not file_name.endswith(".csv"):
|
| 92 |
+
logging.error("Unsupported file format")
|
| 93 |
+
raise ValueError("Unsupported file format. Please upload a CSV file.")
|
| 94 |
+
|
| 95 |
+
usecols = ["device_id", "log_type", "status", "timestamp", "usage_hours", "downtime", "amc_date"]
|
| 96 |
+
dtypes = {
|
| 97 |
+
"device_id": "string",
|
| 98 |
+
"log_type": "string",
|
| 99 |
+
"status": "string",
|
| 100 |
+
"usage_hours": "float32",
|
| 101 |
+
"downtime": "float32",
|
| 102 |
+
"amc_date": "string"
|
| 103 |
+
}
|
| 104 |
+
df = pd.read_csv(file_name, usecols=usecols, dtype=dtypes, nrows=row_limit)
|
| 105 |
+
|
| 106 |
+
# Convert timestamps
|
| 107 |
+
df["timestamp"] = pd.to_datetime(df["timestamp"], errors='coerce')
|
| 108 |
+
df["amc_date"] = pd.to_datetime(df["amc_date"], errors='coerce')
|
| 109 |
+
|
| 110 |
+
logging.info(f"File loaded successfully with {len(df)} rows (limited to {row_limit} rows)")
|
| 111 |
+
return df
|
| 112 |
+
except Exception as e:
|
| 113 |
+
logging.error(f"Failed to load CSV: {str(e)}")
|
| 114 |
+
raise e
|
| 115 |
+
|
| 116 |
# Format summary prompt and generate report
|
| 117 |
def summarize_logs(df, progress=gr.Progress()):
|
| 118 |
progress(0.1, "Generating summary report...")
|
|
|
|
| 231 |
return None
|
| 232 |
|
| 233 |
# Main Gradio function
|
| 234 |
+
async def process_logs(file_obj=None, progress=gr.Progress()):
|
| 235 |
try:
|
| 236 |
+
progress(0, "Starting data processing...")
|
| 237 |
|
| 238 |
+
# Load data: prioritize CSV if uploaded, otherwise fetch from Salesforce
|
| 239 |
+
if file_obj:
|
| 240 |
+
df = load_csv_data(file_obj, row_limit=10000, progress=progress)
|
| 241 |
+
else:
|
| 242 |
+
progress(0.05, "No CSV uploaded, fetching from Salesforce...")
|
| 243 |
+
sf = connect_to_salesforce()
|
| 244 |
+
df = fetch_salesforce_data(sf, row_limit=10000, progress=progress)
|
| 245 |
|
| 246 |
if df.empty:
|
| 247 |
+
logging.warning("No data available")
|
| 248 |
+
return "No data available.", "No data to preview.", None, "No anomalies detected.", "No AMC reminders.", "No insights generated."
|
| 249 |
|
| 250 |
# Step 1: Summary Report
|
| 251 |
progress(0.2, "Generating summary...")
|
|
|
|
| 276 |
progress(1.0, "Processing complete!")
|
| 277 |
return summary, preview, chart, anomalies, amc_reminders, insights
|
| 278 |
except Exception as e:
|
| 279 |
+
logging.error(f"Failed to process data: {str(e)}")
|
| 280 |
+
return f"Failed to process data: {str(e)}", None, None, None, None, None
|
| 281 |
|
| 282 |
# Gradio Interface with Step-by-Step Layout
|
| 283 |
try:
|
|
|
|
| 292 |
.dashboard-section ul {margin: 2px 0; padding-left: 20px;}
|
| 293 |
""") as iface:
|
| 294 |
gr.Markdown("<h1>LabOps Log Analyzer Dashboard (Salesforce + Hugging Face AI)</h1>")
|
| 295 |
+
gr.Markdown("Upload a CSV file or fetch lab equipment logs from Salesforce to analyze usage.")
|
| 296 |
|
| 297 |
with gr.Row():
|
| 298 |
with gr.Column(scale=1):
|
| 299 |
+
file_input = gr.File(label="Upload Logs (CSV)", file_types=[".csv"])
|
| 300 |
+
submit_button = gr.Button("Analyze Data", variant="primary")
|
| 301 |
|
| 302 |
with gr.Column(scale=2):
|
| 303 |
with gr.Group(elem_classes="dashboard-container"):
|
|
|
|
| 335 |
|
| 336 |
submit_button.click(
|
| 337 |
fn=process_logs,
|
| 338 |
+
inputs=[file_input],
|
| 339 |
outputs=[summary_output, preview_output, chart_output, anomaly_output, amc_output, insights_output]
|
| 340 |
)
|
| 341 |
|