Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient | |
| # Function to initialize MCP client and agent | |
| def initialize_agent(api_key, model_name): | |
| try: | |
| # Initialize MCP client with explicit transport key | |
| mcp_client = MCPClient( | |
| { | |
| "url": "https://wd101-oneservertorulethemall.hf.space/gradio_api/mcp/sse", | |
| "transport": "sse" # Explicitly specify transport to avoid FutureWarning | |
| } | |
| ) | |
| tools = mcp_client.get_tools() | |
| # Initialize model with API key | |
| if not api_key: | |
| return None, None, "Please provide a valid API key." | |
| # Map model names to InferenceClientModel configurations | |
| model_configs = { | |
| "Qwen": "Qwen/Qwen2-72B-Instruct", # Example model ID | |
| "Gemma": "google/gemma-2-27b-it", # Example model ID | |
| "Llama": "meta-llama/Llama-3-8b" # Example model ID | |
| } | |
| model = InferenceClientModel( | |
| provider='nebius', | |
| token=api_key, | |
| model=model_configs.get(model_name, "google/gemma-2-27b-it") # Default to Qwen | |
| ) | |
| agent = CodeAgent(tools=[*tools], model=model) | |
| return mcp_client, agent, None | |
| except Exception as e: | |
| return None, None, f"Failed to initialize agent: {str(e)}" | |
| # Function to handle chatbot interaction | |
| def chatbot_function(user_message, chat_history, api_key, model_name): | |
| # Initialize MCP client and agent | |
| mcp_client, agent, error = initialize_agent(api_key, model_name) | |
| if error: | |
| chat_history.append((user_message, error)) | |
| if mcp_client: | |
| mcp_client.disconnect() | |
| return chat_history, "" | |
| try: | |
| # Process user message with the agent | |
| if not user_message: | |
| response = "Please enter a message." | |
| else: | |
| response = str(agent.run(user_message)) | |
| chat_history.append((user_message, response)) | |
| except Exception as e: | |
| chat_history.append((user_message, f"Error processing message: {str(e)}")) | |
| finally: | |
| if mcp_client: | |
| mcp_client.disconnect() | |
| return chat_history, "" | |
| # Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Hackathon Chatbot with MCP Tools") | |
| gr.Markdown("Enter your API key (e.g., Hugging Face token), select a model, and start chatting!") | |
| # API key input and model selection | |
| with gr.Row(): | |
| api_key_input = gr.Textbox(label="API Key", type="password", placeholder="Enter your API key here") | |
| model_dropdown = gr.Dropdown( | |
| choices=["Qwen", "Gemma", "Llama"], | |
| label="Select Model", | |
| value="Gemma" | |
| ) | |
| # Chatbot interface | |
| chatbot = gr.Chatbot(label="Chat History") | |
| user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...") | |
| submit_button = gr.Button("Send") | |
| # State to store chat history | |
| chat_history = gr.State([]) | |
| # Connect the submit button to the chatbot function | |
| submit_button.click( | |
| fn=chatbot_function, | |
| inputs=[user_input, chat_history, api_key_input, model_dropdown], | |
| outputs=[chatbot, user_input] | |
| ) | |
| # Launch the Gradio app | |
| demo.launch() |