hmgill commited on
Commit
3c46cb0
·
verified ·
1 Parent(s): f412687

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -22
app.py CHANGED
@@ -12,7 +12,6 @@ from pathlib import Path
12
  from PIL import Image
13
 
14
  # --- Safe Input Mocking ---
15
- # Fix: Mock input to prevent hanging on server consoles
16
  builtins.input = lambda *args: "y"
17
 
18
  # GenAI & ADK Imports
@@ -29,7 +28,7 @@ except ImportError as e:
29
  Sam3Model = None
30
  Sam3Processor = None
31
  root_agent = None
32
- AnalysisDeps = None # FIX: Correctly indented inside except block
33
 
34
  # Optional: Distinctipy for better colors
35
  try:
@@ -100,6 +99,32 @@ def load_models():
100
  return f"⚠️ Model load failed: {e}"
101
 
102
  # --- Helpers ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  def load_excel_data(logs_text):
104
  placeholder = pd.DataFrame({"Status": ["No Data Available"]})
105
  candidates = glob.glob("/tmp/*.xlsx") + glob.glob("*.xlsx")
@@ -134,15 +159,18 @@ def get_available_layers():
134
  files = glob.glob("/tmp/data_*.npz")
135
  layers = []
136
  for f in files:
137
- name = os.path.basename(f).replace("data_", "").replace(".npz", "")
138
- layers.append(name)
139
- return sorted(layers)
 
 
140
 
141
  def update_opacity_sliders(layers):
142
  updates = []
143
  for i in range(4):
144
  if i < len(layers):
145
- layer_name = layers[i].replace("_", " ").title()
 
146
  updates.append(gr.update(visible=True, label=f"{layer_name} Opacity", value=0.6))
147
  else:
148
  updates.append(gr.update(visible=False))
@@ -177,7 +205,8 @@ def generate_overlay(image_path_str, selected_layers, layer_opacities=None, forc
177
  base_w, base_h = base_img.size
178
 
179
  for file_path in all_layer_files:
180
- layer_name = os.path.basename(file_path).replace("data_", "").replace(".npz", "")
 
181
 
182
  # Skip if already cached
183
  if layer_name in MASK_CACHE["layers"]:
@@ -269,13 +298,11 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
269
  yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
270
  return
271
 
272
- # FIX: Safety check for AnalysisDeps availability
273
  if AnalysisDeps is None:
274
  error_msg = "❌ Project imports failed. 'AnalysisDeps' is missing. Check your 'cellemetry' package installation."
275
  yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
276
  return
277
 
278
- # FIX: Wrap Agent/Runner initialization in try/except to catch crashes here
279
  try:
280
  deps = AnalysisDeps(
281
  sam_model=MODEL_CACHE["model"],
@@ -315,12 +342,11 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
315
 
316
  logs = [f"🔄 **Starting analysis** on {MODEL_CACHE['device']}..."]
317
 
318
- # FIX: Encode spaces for Markdown
319
  display_path = image_path_str.replace(" ", "%20")
320
 
321
  def yield_status(log_list):
322
  full_log = "\n\n".join(log_list)
323
- # Use display_path
324
  user_msg = f"![](file={display_path})\n\n{user_prompt}"
325
  return [{"role": "user", "content": user_msg}, {"role": "assistant", "content": full_log}]
326
 
@@ -369,7 +395,6 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
369
  completion_msg = f"\n\n---\n\n✨ **Analysis finished!** Found {len(layers)} layer(s). Results are now available in the Segmentation and Quantitative Results tabs."
370
  full_log_text += completion_msg
371
 
372
- # Use display_path here too
373
  final_user_msg = f"![](file={display_path})\n\n{user_prompt}"
374
  final_history = [{"role": "user", "content": final_user_msg}, {"role": "assistant", "content": full_log_text}]
375
  slider_updates = update_opacity_sliders(layers)
@@ -396,26 +421,21 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
396
  if image_path and (not session_id or files):
397
  if not user_text: user_text = "Analyze this microscopy image."
398
 
399
- # FIX: Encode spaces for Markdown
400
  display_path = image_path.replace(" ", "%20")
401
 
402
  history.append({"role": "user", "content": f"![](file={display_path})\n\n{user_text}"})
403
  history.append({"role": "assistant", "content": "🔄 Starting analysis (Model loading may take a moment)..."})
404
 
405
- # Yield 1: Set initial visibility state ONCE
406
  yield history, session_id, image_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)
407
 
408
  final_result = None
409
- # FIX: Catch potential generator crashes here to prevent asyncio loop death
410
  try:
411
  async for result in run_analysis(image_path, user_text, session_id):
412
  final_result = result
413
  updated_history = result[0].copy()
414
  if files and len(updated_history) > 0:
415
- # Ensure the user message in history also uses the encoded path
416
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
417
 
418
- # Yield Loop: Pass gr.update() to prevent flickering
419
  yield (updated_history, result[1], image_path, *result[2:], None, gr.update(), gr.update(), gr.update())
420
  except Exception as e:
421
  history.append({"role": "assistant", "content": f"❌ Critical Error: {str(e)}"})
@@ -427,7 +447,6 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
427
  if files and len(updated_history) > 0:
428
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
429
 
430
- # Yield Final: Show Results
431
  yield (updated_history, final_result[1], image_path, *final_result[2:], None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True))
432
  return
433
 
@@ -445,7 +464,6 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
445
  content = types.Content(role="user", parts=[types.Part.from_text(text=user_text)])
446
  accumulated_response = ""
447
 
448
- # 1. Stream response text
449
  try:
450
  async for event in ACTIVE_RUNNER.run_async(user_id="demo_user", session_id=session_id, new_message=content):
451
  if event.content and event.content.parts:
@@ -459,15 +477,12 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
459
  yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
460
  return
461
 
462
- # 2. REFRESH DATA (Tables, Overlays, Layers)
463
  report_file, df_m, df_s, df_r = load_excel_data("")
464
  layers = get_available_layers()
465
 
466
- # Force overlay generation with new layers (using force_reload=True to clear cache)
467
  new_overlay = generate_overlay(current_img_path, layers, force_reload=True)
468
  slider_updates = update_opacity_sliders(layers)
469
 
470
- # 3. Yield FINAL update with new data
471
  yield (
472
  history,
473
  session_id,
 
12
  from PIL import Image
13
 
14
  # --- Safe Input Mocking ---
 
15
  builtins.input = lambda *args: "y"
16
 
17
  # GenAI & ADK Imports
 
28
  Sam3Model = None
29
  Sam3Processor = None
30
  root_agent = None
31
+ AnalysisDeps = None
32
 
33
  # Optional: Distinctipy for better colors
34
  try:
 
99
  return f"⚠️ Model load failed: {e}"
100
 
101
  # --- Helpers ---
102
+ def clean_layer_name(filename):
103
+ """
104
+ Converts 'data_blue_nuclei.npz' -> 'Nuclei'.
105
+ Removes standard color names and underscores.
106
+ """
107
+ # Remove prefix/suffix
108
+ raw = os.path.basename(filename).replace("data_", "").replace(".npz", "")
109
+
110
+ # Split into parts
111
+ parts = raw.split('_')
112
+
113
+ # Filter out common colors
114
+ colors = {
115
+ 'blue', 'green', 'red', 'yellow', 'cyan', 'magenta',
116
+ 'orange', 'purple', 'white', 'black', 'gray', 'grey',
117
+ 'pink', 'brown', 'lime', 'teal'
118
+ }
119
+
120
+ cleaned_parts = [p for p in parts if p.lower() not in colors]
121
+
122
+ # Join back and Title Case
123
+ if not cleaned_parts:
124
+ return raw.replace("_", " ").title()
125
+
126
+ return " ".join(cleaned_parts).title()
127
+
128
  def load_excel_data(logs_text):
129
  placeholder = pd.DataFrame({"Status": ["No Data Available"]})
130
  candidates = glob.glob("/tmp/*.xlsx") + glob.glob("*.xlsx")
 
159
  files = glob.glob("/tmp/data_*.npz")
160
  layers = []
161
  for f in files:
162
+ # UPDATED: Use the clean name helper
163
+ layers.append(clean_layer_name(f))
164
+
165
+ # Return unique sorted layers
166
+ return sorted(list(set(layers)))
167
 
168
  def update_opacity_sliders(layers):
169
  updates = []
170
  for i in range(4):
171
  if i < len(layers):
172
+ # Layer name is already clean from get_available_layers
173
+ layer_name = layers[i]
174
  updates.append(gr.update(visible=True, label=f"{layer_name} Opacity", value=0.6))
175
  else:
176
  updates.append(gr.update(visible=False))
 
205
  base_w, base_h = base_img.size
206
 
207
  for file_path in all_layer_files:
208
+ # UPDATED: Use clean name as key
209
+ layer_name = clean_layer_name(file_path)
210
 
211
  # Skip if already cached
212
  if layer_name in MASK_CACHE["layers"]:
 
298
  yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
299
  return
300
 
 
301
  if AnalysisDeps is None:
302
  error_msg = "❌ Project imports failed. 'AnalysisDeps' is missing. Check your 'cellemetry' package installation."
303
  yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
304
  return
305
 
 
306
  try:
307
  deps = AnalysisDeps(
308
  sam_model=MODEL_CACHE["model"],
 
342
 
343
  logs = [f"🔄 **Starting analysis** on {MODEL_CACHE['device']}..."]
344
 
345
+ # Encode spaces for Markdown
346
  display_path = image_path_str.replace(" ", "%20")
347
 
348
  def yield_status(log_list):
349
  full_log = "\n\n".join(log_list)
 
350
  user_msg = f"![](file={display_path})\n\n{user_prompt}"
351
  return [{"role": "user", "content": user_msg}, {"role": "assistant", "content": full_log}]
352
 
 
395
  completion_msg = f"\n\n---\n\n✨ **Analysis finished!** Found {len(layers)} layer(s). Results are now available in the Segmentation and Quantitative Results tabs."
396
  full_log_text += completion_msg
397
 
 
398
  final_user_msg = f"![](file={display_path})\n\n{user_prompt}"
399
  final_history = [{"role": "user", "content": final_user_msg}, {"role": "assistant", "content": full_log_text}]
400
  slider_updates = update_opacity_sliders(layers)
 
421
  if image_path and (not session_id or files):
422
  if not user_text: user_text = "Analyze this microscopy image."
423
 
 
424
  display_path = image_path.replace(" ", "%20")
425
 
426
  history.append({"role": "user", "content": f"![](file={display_path})\n\n{user_text}"})
427
  history.append({"role": "assistant", "content": "🔄 Starting analysis (Model loading may take a moment)..."})
428
 
 
429
  yield history, session_id, image_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)
430
 
431
  final_result = None
 
432
  try:
433
  async for result in run_analysis(image_path, user_text, session_id):
434
  final_result = result
435
  updated_history = result[0].copy()
436
  if files and len(updated_history) > 0:
 
437
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
438
 
 
439
  yield (updated_history, result[1], image_path, *result[2:], None, gr.update(), gr.update(), gr.update())
440
  except Exception as e:
441
  history.append({"role": "assistant", "content": f"❌ Critical Error: {str(e)}"})
 
447
  if files and len(updated_history) > 0:
448
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
449
 
 
450
  yield (updated_history, final_result[1], image_path, *final_result[2:], None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True))
451
  return
452
 
 
464
  content = types.Content(role="user", parts=[types.Part.from_text(text=user_text)])
465
  accumulated_response = ""
466
 
 
467
  try:
468
  async for event in ACTIVE_RUNNER.run_async(user_id="demo_user", session_id=session_id, new_message=content):
469
  if event.content and event.content.parts:
 
477
  yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
478
  return
479
 
 
480
  report_file, df_m, df_s, df_r = load_excel_data("")
481
  layers = get_available_layers()
482
 
 
483
  new_overlay = generate_overlay(current_img_path, layers, force_reload=True)
484
  slider_updates = update_opacity_sliders(layers)
485
 
 
486
  yield (
487
  history,
488
  session_id,