File size: 3,286 Bytes
837a945
521f62d
837a945
521f62d
837a945
 
521f62d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cfac8d5
 
521f62d
 
 
 
 
 
cfac8d5
521f62d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
837a945
 
521f62d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import gradio as gr
import os

from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient


# Function to initialize MCP client and agent
def initialize_agent(api_key, model_name):
    try:
        # Initialize MCP client with explicit transport key
        mcp_client = MCPClient(
            {
                "url": "https://wd101-oneservertorulethemall.hf.space/gradio_api/mcp/sse",
                "transport": "sse"  # Explicitly specify transport to avoid FutureWarning
            }
        )
        tools = mcp_client.get_tools()

        # Initialize model with API key
        if not api_key:
            return None, None, "Please provide a valid API key."

        # Map model names to InferenceClientModel configurations
        model_configs = {
            "Qwen": "Qwen/Qwen2-72B-Instruct",  # Example model ID
            "Gemma": "google/gemma-2-27b-it",  # Example model ID
            "Llama": "meta-llama/Llama-3-8b"  # Example model ID
        }

        model = InferenceClientModel(
            provider='nebius',
            token=api_key,
            model=model_configs.get(model_name, "google/gemma-2-27b-it")  # Default to Qwen
        )
        agent = CodeAgent(tools=[*tools], model=model)
        return mcp_client, agent, None
    except Exception as e:
        return None, None, f"Failed to initialize agent: {str(e)}"


# Function to handle chatbot interaction
def chatbot_function(user_message, chat_history, api_key, model_name):
    # Initialize MCP client and agent
    mcp_client, agent, error = initialize_agent(api_key, model_name)

    if error:
        chat_history.append((user_message, error))
        if mcp_client:
            mcp_client.disconnect()
        return chat_history, ""

    try:
        # Process user message with the agent
        if not user_message:
            response = "Please enter a message."
        else:
            response = str(agent.run(user_message))

        chat_history.append((user_message, response))
    except Exception as e:
        chat_history.append((user_message, f"Error processing message: {str(e)}"))
    finally:
        if mcp_client:
            mcp_client.disconnect()

    return chat_history, ""


# Gradio interface
with gr.Blocks() as demo:
    gr.Markdown("# Hackathon Chatbot with MCP Tools")
    gr.Markdown("Enter your API key (e.g., Hugging Face token), select a model, and start chatting!")

    # API key input and model selection
    with gr.Row():
        api_key_input = gr.Textbox(label="API Key", type="password", placeholder="Enter your API key here")
        model_dropdown = gr.Dropdown(
            choices=["Qwen", "Gemma", "Llama"],
            label="Select Model",
            value="Gemma"
        )

    # Chatbot interface
    chatbot = gr.Chatbot(label="Chat History")
    user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
    submit_button = gr.Button("Send")

    # State to store chat history
    chat_history = gr.State([])

    # Connect the submit button to the chatbot function
    submit_button.click(
        fn=chatbot_function,
        inputs=[user_input, chat_history, api_key_input, model_dropdown],
        outputs=[chatbot, user_input]
    )

# Launch the Gradio app
demo.launch()