Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -349,35 +349,39 @@ async def multi_agent_conversation(task_message: str, log_queue: queue.Queue, ap
|
|
| 349 |
log_queue.put(("result", context.conversation_history))
|
| 350 |
|
| 351 |
# -------------------- Process Generator and Human Input --------------------
|
| 352 |
-
|
| 353 |
def process_conversation_generator(task_message: str, api_key: str,
|
| 354 |
human_event: threading.Event, human_input_queue: queue.Queue,
|
| 355 |
log_queue: queue.Queue) -> Generator[str, None, None]:
|
| 356 |
"""
|
| 357 |
-
Runs the conversation and yields log messages
|
|
|
|
| 358 |
"""
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 364 |
try:
|
| 365 |
-
msg = log_queue.
|
| 366 |
if isinstance(msg, tuple) and msg[0] == "result":
|
| 367 |
-
|
| 368 |
-
yield gr.Chatbot.update(value=
|
| 369 |
-
yield "Conversation complete."
|
| 370 |
-
break
|
| 371 |
else:
|
| 372 |
yield msg
|
| 373 |
except queue.Empty:
|
| 374 |
pass
|
| 375 |
-
|
| 376 |
-
# If human feedback is requested, yield an appropriate message.
|
| 377 |
if human_event.is_set():
|
| 378 |
yield "Waiting for human feedback..."
|
| 379 |
-
|
| 380 |
-
|
|
|
|
|
|
|
|
|
|
| 381 |
|
| 382 |
def get_human_feedback(placeholder_text: str, human_input_queue: queue.Queue) -> gr.Blocks:
|
| 383 |
"""
|
|
|
|
| 349 |
log_queue.put(("result", context.conversation_history))
|
| 350 |
|
| 351 |
# -------------------- Process Generator and Human Input --------------------
|
|
|
|
| 352 |
def process_conversation_generator(task_message: str, api_key: str,
|
| 353 |
human_event: threading.Event, human_input_queue: queue.Queue,
|
| 354 |
log_queue: queue.Queue) -> Generator[str, None, None]:
|
| 355 |
"""
|
| 356 |
+
Runs the multi-agent conversation in a background thread and yields log messages
|
| 357 |
+
to update the chat box without blocking the UI.
|
| 358 |
"""
|
| 359 |
+
def run_conversation():
|
| 360 |
+
asyncio.run(multi_agent_conversation(task_message, log_queue, api_key, human_event, human_input_queue))
|
| 361 |
+
|
| 362 |
+
# Start the asynchronous conversation in a separate thread.
|
| 363 |
+
conversation_thread = threading.Thread(target=run_conversation)
|
| 364 |
+
conversation_thread.start()
|
| 365 |
+
|
| 366 |
+
# Continuously yield log messages until the conversation is complete.
|
| 367 |
+
while conversation_thread.is_alive() or not log_queue.empty():
|
| 368 |
try:
|
| 369 |
+
msg = log_queue.get(timeout=0.1)
|
| 370 |
if isinstance(msg, tuple) and msg[0] == "result":
|
| 371 |
+
# Update the chat box with the final conversation history.
|
| 372 |
+
yield gr.Chatbot.update(value=msg[1], visible=True)
|
|
|
|
|
|
|
| 373 |
else:
|
| 374 |
yield msg
|
| 375 |
except queue.Empty:
|
| 376 |
pass
|
| 377 |
+
|
|
|
|
| 378 |
if human_event.is_set():
|
| 379 |
yield "Waiting for human feedback..."
|
| 380 |
+
|
| 381 |
+
# Small sleep to prevent busy-waiting.
|
| 382 |
+
time.sleep(0.1)
|
| 383 |
+
|
| 384 |
+
yield "Conversation complete."
|
| 385 |
|
| 386 |
def get_human_feedback(placeholder_text: str, human_input_queue: queue.Queue) -> gr.Blocks:
|
| 387 |
"""
|