hmgill commited on
Commit
0b0bc00
Β·
verified Β·
1 Parent(s): 06e2930

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +32 -26
app.py CHANGED
@@ -28,7 +28,7 @@ except ImportError as e:
28
  Sam3Model = None
29
  Sam3Processor = None
30
  root_agent = None
31
- AnalysisDeps = None # Indented correctly to prevent overwriting imports [cite: 2]
32
 
33
  # Optional: Distinctipy for better colors
34
  try:
@@ -256,15 +256,15 @@ def generate_overlay(image_path_str, selected_layers, layer_opacities=None, forc
256
 
257
  # --- Core Logic ---
258
  async def run_analysis(image_path_str, user_prompt, session_id_state):
259
- waiting_df = pd.DataFrame({"Status": ["Waiting..."]})
260
- empty_slider_updates = [gr.update()] * 4
261
 
262
  if not MODEL_CACHE["loaded"]:
263
- yield [], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
264
  load_models()
265
 
266
  if not image_path_str:
267
- yield [], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
268
  return
269
 
270
  # Cleanup
@@ -281,12 +281,12 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
281
 
282
  if MODEL_CACHE["model"] is None:
283
  error_msg = "❌ Model failed to load. Please check logs."
284
- yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
285
  return
286
 
287
  if AnalysisDeps is None:
288
  error_msg = "❌ Project imports failed. 'AnalysisDeps' is missing. Check your 'cellemetry' package installation."
289
- yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
290
  return
291
 
292
  try:
@@ -314,7 +314,7 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
314
  except Exception as e:
315
  error_msg = f"❌ Agent Initialization Failed: {str(e)}"
316
  print(error_msg)
317
- yield [{"role": "assistant", "content": error_msg}], None, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
318
  return
319
 
320
  image_bytes = image_path.read_bytes()
@@ -328,7 +328,6 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
328
 
329
  logs = [f"πŸ”„ **Starting analysis** on {MODEL_CACHE['device']}..."]
330
 
331
- # Encode spaces for Markdown
332
  display_path = image_path_str.replace(" ", "%20")
333
 
334
  def yield_status(log_list):
@@ -336,7 +335,8 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
336
  user_msg = f"![](file={display_path})\n\n{user_prompt}"
337
  return [{"role": "user", "content": user_msg}, {"role": "assistant", "content": full_log}]
338
 
339
- yield yield_status(logs), session_id, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
 
340
 
341
  try:
342
  async for event in ACTIVE_RUNNER.run_async(user_id="demo_user", session_id=session.id, new_message=content):
@@ -345,7 +345,7 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
345
  if event.get_function_calls():
346
  for fc in event.get_function_calls():
347
  logs.append(f"πŸ”§ **{author}**: Calling `{fc.name}`")
348
- yield yield_status(logs), session_id, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
349
 
350
  if event.content and event.content.parts:
351
  for part in event.content.parts:
@@ -360,15 +360,15 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
360
  logs[-1] = f"βœ… **{author}**: {part.text}"
361
  else:
362
  logs.append(f"βœ… **{author}**: {part.text}")
363
- yield yield_status(logs), session_id, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
364
 
365
  except Exception as e:
366
  logs.append(f"❌ Error: {e}")
367
- yield yield_status(logs), session_id, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
368
  return
369
 
370
  logs.append("\nβœ… **Analysis Complete!** Loading results...")
371
- yield yield_status(logs), session_id, None, [], None, waiting_df, waiting_df, waiting_df, *empty_slider_updates
372
 
373
  await asyncio.sleep(0.5)
374
 
@@ -385,6 +385,7 @@ async def run_analysis(image_path_str, user_prompt, session_id_state):
385
  final_history = [{"role": "user", "content": final_user_msg}, {"role": "assistant", "content": full_log_text}]
386
  slider_updates = update_opacity_sliders(layers)
387
 
 
388
  yield final_history, session_id, initial_overlay, gr.CheckboxGroup(choices=layers, value=layers), report_file, df_m, df_s, df_r, *slider_updates
389
 
390
  async def unified_chat_handler(message, history, session_id, current_img_path):
@@ -401,9 +402,10 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
401
  image_path = current_img_path
402
 
403
  waiting_df = pd.DataFrame({"Status": ["Waiting..."]})
404
- empty_slider_updates = [gr.update()] * 4
 
405
 
406
- # CASE 1: INITIAL ANALYSIS (Show Loading Overlay)
407
  if image_path and (not session_id or files):
408
  if not user_text: user_text = "Analyze this microscopy image."
409
 
@@ -413,7 +415,7 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
413
  history.append({"role": "assistant", "content": "πŸ”„ Starting analysis (Model loading may take a moment)..."})
414
 
415
  # Show Loading, Hide Results
416
- yield history, session_id, image_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)
417
 
418
  final_result = None
419
  try:
@@ -423,10 +425,11 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
423
  if files and len(updated_history) > 0:
424
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
425
 
 
426
  yield (updated_history, result[1], image_path, *result[2:], None, gr.update(), gr.update(), gr.update())
427
  except Exception as e:
428
  history.append({"role": "assistant", "content": f"❌ Critical Error: {str(e)}"})
429
- yield history, session_id, image_path, None, gr.CheckboxGroup(), None, waiting_df, waiting_df, waiting_df, *empty_slider_updates, None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True)
430
  return
431
 
432
  if final_result:
@@ -438,17 +441,18 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
438
  yield (updated_history, final_result[1], image_path, *final_result[2:], None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True))
439
  return
440
 
441
- # CASE 2: FOLLOW-UP ANALYSIS (No Loading Overlay)
442
  elif session_id and user_text:
443
  history.append({"role": "user", "content": user_text})
444
  history.append({"role": "assistant", "content": "πŸ’­ Thinking..."})
445
 
446
- # [cite_start]Don't show loading overlay for follow-up questions [cite: 71]
447
- yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
 
448
 
449
  if not ACTIVE_RUNNER:
450
  history[-1]["content"] = "⚠️ Session expired or Agent not initialized."
451
- yield history, None, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
452
  return
453
 
454
  content = types.Content(role="user", parts=[types.Part.from_text(text=user_text)])
@@ -461,10 +465,11 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
461
  if hasattr(part, 'text') and part.text:
462
  accumulated_response += part.text
463
  history[-1]["content"] = accumulated_response
464
- yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
 
465
  except Exception as e:
466
  history[-1]["content"] = f"❌ Error: {e}"
467
- yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
468
  return
469
 
470
  report_file, df_m, df_s, df_r = load_excel_data("")
@@ -473,6 +478,7 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
473
  new_overlay = generate_overlay(current_img_path, layers, force_reload=True)
474
  slider_updates = update_opacity_sliders(layers)
475
 
 
476
  yield (
477
  history,
478
  session_id,
@@ -494,7 +500,7 @@ async def unified_chat_handler(message, history, session_id, current_img_path):
494
  history = [{"role": "assistant", "content": "πŸ‘‹ Welcome! Upload a microscopy image and describe what you'd like to analyze."}]
495
  else:
496
  history.append({"role": "assistant", "content": "⚠️ Please provide a question or upload a new image."})
497
- yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *empty_slider_updates, None, gr.update(), gr.update(), gr.update()
498
 
499
  # --- UI Layout ---
500
 
@@ -579,7 +585,7 @@ with gr.Blocks(title="Cellemetry Agent", css=custom_css) as demo:
579
  <div style="text-align: center;">
580
  <div style="border: 8px solid #f3f3f3; border-top: 8px solid #3498db; border-radius: 50%; width: 60px; height: 60px; animation: spin 1s linear infinite; margin: 0 auto 20px;"></div>
581
  <h3 style="color: #555; margin: 0;">βš™οΈ Analyzing</h3>
582
- <p style="color: #888; margin-top: 10px;">Please wait while your image is processed...</p>
583
  </div>
584
  <style>@keyframes spin { 0% { transform: rotate(0deg); } 100% { transform: rotate(360deg); } }</style>
585
  </div>
 
28
  Sam3Model = None
29
  Sam3Processor = None
30
  root_agent = None
31
+ AnalysisDeps = None
32
 
33
  # Optional: Distinctipy for better colors
34
  try:
 
256
 
257
  # --- Core Logic ---
258
  async def run_analysis(image_path_str, user_prompt, session_id_state):
259
+ # FIX: Use gr.skip() for updates to prevent UI jitter during streaming
260
+ skipped_updates = [gr.skip()] * 4
261
 
262
  if not MODEL_CACHE["loaded"]:
263
+ yield [], None, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
264
  load_models()
265
 
266
  if not image_path_str:
267
+ yield [], None, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
268
  return
269
 
270
  # Cleanup
 
281
 
282
  if MODEL_CACHE["model"] is None:
283
  error_msg = "❌ Model failed to load. Please check logs."
284
+ yield [{"role": "assistant", "content": error_msg}], None, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
285
  return
286
 
287
  if AnalysisDeps is None:
288
  error_msg = "❌ Project imports failed. 'AnalysisDeps' is missing. Check your 'cellemetry' package installation."
289
+ yield [{"role": "assistant", "content": error_msg}], None, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
290
  return
291
 
292
  try:
 
314
  except Exception as e:
315
  error_msg = f"❌ Agent Initialization Failed: {str(e)}"
316
  print(error_msg)
317
+ yield [{"role": "assistant", "content": error_msg}], None, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
318
  return
319
 
320
  image_bytes = image_path.read_bytes()
 
328
 
329
  logs = [f"πŸ”„ **Starting analysis** on {MODEL_CACHE['device']}..."]
330
 
 
331
  display_path = image_path_str.replace(" ", "%20")
332
 
333
  def yield_status(log_list):
 
335
  user_msg = f"![](file={display_path})\n\n{user_prompt}"
336
  return [{"role": "user", "content": user_msg}, {"role": "assistant", "content": full_log}]
337
 
338
+ # FIX: Yield skips instead of updates
339
+ yield yield_status(logs), session_id, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
340
 
341
  try:
342
  async for event in ACTIVE_RUNNER.run_async(user_id="demo_user", session_id=session.id, new_message=content):
 
345
  if event.get_function_calls():
346
  for fc in event.get_function_calls():
347
  logs.append(f"πŸ”§ **{author}**: Calling `{fc.name}`")
348
+ yield yield_status(logs), session_id, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
349
 
350
  if event.content and event.content.parts:
351
  for part in event.content.parts:
 
360
  logs[-1] = f"βœ… **{author}**: {part.text}"
361
  else:
362
  logs.append(f"βœ… **{author}**: {part.text}")
363
+ yield yield_status(logs), session_id, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
364
 
365
  except Exception as e:
366
  logs.append(f"❌ Error: {e}")
367
+ yield yield_status(logs), session_id, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
368
  return
369
 
370
  logs.append("\nβœ… **Analysis Complete!** Loading results...")
371
+ yield yield_status(logs), session_id, None, [], None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates
372
 
373
  await asyncio.sleep(0.5)
374
 
 
385
  final_history = [{"role": "user", "content": final_user_msg}, {"role": "assistant", "content": full_log_text}]
386
  slider_updates = update_opacity_sliders(layers)
387
 
388
+ # Final yield is the ONLY one with real data
389
  yield final_history, session_id, initial_overlay, gr.CheckboxGroup(choices=layers, value=layers), report_file, df_m, df_s, df_r, *slider_updates
390
 
391
  async def unified_chat_handler(message, history, session_id, current_img_path):
 
402
  image_path = current_img_path
403
 
404
  waiting_df = pd.DataFrame({"Status": ["Waiting..."]})
405
+ # FIX: Prepare skips
406
+ skipped_updates = [gr.skip()] * 4
407
 
408
+ # CASE 1: INITIAL ANALYSIS
409
  if image_path and (not session_id or files):
410
  if not user_text: user_text = "Analyze this microscopy image."
411
 
 
415
  history.append({"role": "assistant", "content": "πŸ”„ Starting analysis (Model loading may take a moment)..."})
416
 
417
  # Show Loading, Hide Results
418
+ yield history, session_id, image_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *skipped_updates, None, gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)
419
 
420
  final_result = None
421
  try:
 
425
  if files and len(updated_history) > 0:
426
  updated_history[0] = {"role": "user", "content": f"![](file={display_path})\n\n{user_text}"}
427
 
428
+ # Pass through the skips/data from run_analysis
429
  yield (updated_history, result[1], image_path, *result[2:], None, gr.update(), gr.update(), gr.update())
430
  except Exception as e:
431
  history.append({"role": "assistant", "content": f"❌ Critical Error: {str(e)}"})
432
+ yield history, session_id, image_path, None, gr.CheckboxGroup(), None, gr.skip(), gr.skip(), gr.skip(), *skipped_updates, None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True)
433
  return
434
 
435
  if final_result:
 
441
  yield (updated_history, final_result[1], image_path, *final_result[2:], None, gr.update(visible=False), gr.update(visible=False), gr.update(visible=True))
442
  return
443
 
444
+ # CASE 2: FOLLOW-UP ANALYSIS
445
  elif session_id and user_text:
446
  history.append({"role": "user", "content": user_text})
447
  history.append({"role": "assistant", "content": "πŸ’­ Thinking..."})
448
 
449
+ # Don't show loading overlay for follow-ups
450
+ # FIX: Send gr.skip() to all result components to prevent jitter
451
+ yield history, session_id, current_img_path, gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *skipped_updates, None, gr.update(), gr.update(), gr.update()
452
 
453
  if not ACTIVE_RUNNER:
454
  history[-1]["content"] = "⚠️ Session expired or Agent not initialized."
455
+ yield history, None, current_img_path, gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *skipped_updates, None, gr.update(), gr.update(), gr.update()
456
  return
457
 
458
  content = types.Content(role="user", parts=[types.Part.from_text(text=user_text)])
 
465
  if hasattr(part, 'text') and part.text:
466
  accumulated_response += part.text
467
  history[-1]["content"] = accumulated_response
468
+ # FIX: Keep sending gr.skip() during stream
469
+ yield history, session_id, current_img_path, gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *skipped_updates, None, gr.update(), gr.update(), gr.update()
470
  except Exception as e:
471
  history[-1]["content"] = f"❌ Error: {e}"
472
+ yield history, session_id, current_img_path, gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *skipped_updates, None, gr.update(), gr.update(), gr.update()
473
  return
474
 
475
  report_file, df_m, df_s, df_r = load_excel_data("")
 
478
  new_overlay = generate_overlay(current_img_path, layers, force_reload=True)
479
  slider_updates = update_opacity_sliders(layers)
480
 
481
+ # Final yield updates the components
482
  yield (
483
  history,
484
  session_id,
 
500
  history = [{"role": "assistant", "content": "πŸ‘‹ Welcome! Upload a microscopy image and describe what you'd like to analyze."}]
501
  else:
502
  history.append({"role": "assistant", "content": "⚠️ Please provide a question or upload a new image."})
503
+ yield history, session_id, current_img_path, gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), *skipped_updates, None, gr.update(), gr.update(), gr.update()
504
 
505
  # --- UI Layout ---
506
 
 
585
  <div style="text-align: center;">
586
  <div style="border: 8px solid #f3f3f3; border-top: 8px solid #3498db; border-radius: 50%; width: 60px; height: 60px; animation: spin 1s linear infinite; margin: 0 auto 20px;"></div>
587
  <h3 style="color: #555; margin: 0;">βš™οΈ Analyzing</h3>
588
+ <p style="color: #888; margin-top: 10px;">Your image is being processed...</p>
589
  </div>
590
  <style>@keyframes spin { 0% { transform: rotate(0deg); } 100% { transform: rotate(360deg); } }</style>
591
  </div>