fagun18 commited on
Commit
ba4613e
·
verified ·
1 Parent(s): 337118a

Add clear error messages for LLM initialization failures

Browse files
src/webui/components/browser_use_agent_tab.py CHANGED
@@ -430,6 +430,44 @@ async def run_agent_task(
430
  llm_api_key,
431
  ollama_num_ctx if llm_provider_name == "ollama" else None,
432
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
433
 
434
  # Pass the webui_manager instance to the callback when wrapping it
435
  async def ask_callback_wrapper(
 
430
  llm_api_key,
431
  ollama_num_ctx if llm_provider_name == "ollama" else None,
432
  )
433
+
434
+ # Check if LLM was initialized successfully
435
+ if not main_llm:
436
+ error_msg = "❌ **LLM Initialization Failed**\n\n"
437
+ if not llm_provider_name or not llm_model_name:
438
+ error_msg += "**Issue:** No LLM provider or model selected.\n\n"
439
+ error_msg += "**Solution:**\n"
440
+ error_msg += "1. Go to **⚙️ AGENT CONFIG** tab\n"
441
+ error_msg += "2. Select **LLM Provider** (e.g., Google - Gemini)\n"
442
+ error_msg += "3. Enter your **API Key**\n"
443
+ error_msg += "4. Select a **Model** (e.g., gemini-2.0-flash-exp)\n"
444
+ error_msg += "5. Come back and try again!\n"
445
+ elif not llm_api_key:
446
+ error_msg += "**Issue:** API Key is missing.\n\n"
447
+ error_msg += "**Solution:**\n"
448
+ error_msg += "1. Go to **⚙️ AGENT CONFIG** tab\n"
449
+ error_msg += "2. Enter your **LLM API Key**\n"
450
+ error_msg += "3. Make sure it's valid and has credits\n"
451
+ error_msg += "4. Come back and try again!\n"
452
+ else:
453
+ error_msg += "**Issue:** Failed to initialize the LLM.\n\n"
454
+ error_msg += "**Possible causes:**\n"
455
+ error_msg += "- Invalid API key\n"
456
+ error_msg += "- API key has no credits\n"
457
+ error_msg += "- Network connection issue\n"
458
+ error_msg += "- Model name is incorrect\n\n"
459
+ error_msg += "**Solution:** Check your settings in **⚙️ AGENT CONFIG** tab\n"
460
+
461
+ webui_manager.bu_chat_history.append({"role": "assistant", "content": error_msg})
462
+ yield {
463
+ user_input_comp: gr.update(interactive=True, placeholder="Fix LLM settings and try again..."),
464
+ run_button_comp: gr.update(value="▶️ Submit Task", interactive=True),
465
+ stop_button_comp: gr.update(interactive=False),
466
+ pause_resume_button_comp: gr.update(interactive=False),
467
+ clear_button_comp: gr.update(interactive=True),
468
+ chatbot_comp: gr.update(value=webui_manager.bu_chat_history),
469
+ }
470
+ return
471
 
472
  # Pass the webui_manager instance to the callback when wrapping it
473
  async def ask_callback_wrapper(