File size: 1,128 Bytes
b465f36
 
 
 
08fb142
b465f36
 
 
08fb142
 
b465f36
 
 
 
 
 
08fb142
 
 
 
 
 
 
b465f36
08fb142
b465f36
 
 
 
 
 
08fb142
 
 
b465f36
08fb142
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import os
from smolagents import InferenceClientModel, CodeAgent, MCPClient

mcp_client = None

try:
    mcp_client = MCPClient(
        {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse", "transport": "sse"},
        structured_output=True
    )
    tools = mcp_client.get_tools()

    model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
    agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])

    def chat_with_agent(message, history):
        try:
            result = agent.run(message)
            return str(result)
        except Exception as e:
            return f"Error: {str(e)}"

    demo = gr.ChatInterface(
        fn=chat_with_agent,
        examples=["Analyze the sentiment of the following text 'This is awesome'"],
        title="Agent with MCP Tools",
        description="This is a simple agent that uses MCP tools to answer questions.",
    )

    demo.launch()
    
except Exception as e:
    print(f"Error initializing: {e}")
finally:
    if mcp_client:
        mcp_client.disconnect()