File size: 1,650 Bytes
dea2d74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# import gradio as gr
# import os

# from mcp import StdioServerParameters
# from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
# mcp_client = MCPClient(
#     {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse","transport":"sse"} # This is the MCP Client we created in the previous section
# )
# tools = mcp_client.get_tools()

# model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
# agent = CodeAgent(tools=[*tools], model=model)

# demo = gr.ChatInterface(
#     fn=lambda message, history: str(agent.run(message)),
#     type="messages",
#     examples=["Prime factorization of 68"],
#     title="Agent with MCP Tools",
#     description="This is a simple agent that uses MCP tools to answer questions."
# )

# demo.launch()

import gradio as gr
import os

from smolagents import InferenceClientModel, CodeAgent, MCPClient


try:
    mcp_client = MCPClient(
        {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
    )
    tools = mcp_client.get_tools()

    model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
    agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])

    demo = gr.ChatInterface(
        fn=lambda message, history: str(agent.run(message)),
        type="messages",
        examples=["Analyze the sentiment of the following text 'This is awesome'"],
        title="Agent with MCP Tools",
        description="This is a simple agent that uses MCP tools to answer questions.",
    )

    demo.launch()
finally:
    mcp_client.disconnect()