import gradio as gr from scripts.chatbot_logic import ProjectGuidanceChatbot # Initialize Chatbot chatbot = ProjectGuidanceChatbot( roadmap_file="roadmap.yaml", rules_file="rules.yaml", config_file="configs/chatbot_config.yaml", code_templates_dir="scripts/code_templates" ) def respond(message, chat_history): bot_message = chatbot.process_query(message) chat_history.append((message, bot_message)) return "", chat_history def switch_model(model_key): model_switch_result = chatbot.switch_llm_model(model_key) # Get result message greeting_message = chatbot.get_chatbot_greeting() if isinstance(model_switch_result, str) and "Error:" in model_switch_result: # Check if result is an error string return gr.Warning(model_switch_result), greeting_message # Display error as Gradio Warning else: return None, greeting_message # No warning, just update greeting with gr.Blocks() as demo: chatbot_greeting_md = gr.Markdown(chatbot.get_chatbot_greeting()) gr.Markdown(f"# {chatbot.chatbot_config.get('name', 'Project Guidance Chatbot')}") model_choices = [(model['name'], key) for key, model in chatbot.available_models_config.items()] # Updated choices to include FLAN-T5 and Gemini model_dropdown = gr.Dropdown( choices=model_choices, value=chatbot.active_model_info['name'] if chatbot.active_model_info else None, label="Select LLM Model" ) model_error_output = gr.Warning(visible=False) # Initially hidden warning component model_dropdown.change( fn=switch_model, inputs=model_dropdown, outputs=[model_error_output, chatbot_greeting_md] # Output both warning and greeting ) chatbot_ui = gr.Chatbot() msg = gr.Textbox() clear = gr.ClearButton([msg, chatbot_ui]) msg.submit(respond, [msg, chatbot_ui], [msg, chatbot_ui]) demo.launch()