File size: 1,326 Bytes
19cb828
 
 
 
 
 
d7ce8d1
a5869c5
258fe38
a5869c5
 
258fe38
a5869c5
 
 
 
 
 
 
19cb828
 
 
 
ae2078d
 
19cb828
f2e646e
 
19cb828
 
 
 
 
 
 
 
 
 
 
 
 
 
a5869c5
19cb828
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import gradio as gr

from mcp.client.stdio import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection
from smolagents.mcp_client import MCPClient

from mcp.server.fastmcp import FastMCP


mcp_server = FastMCP("my-mcp-client")

@mcp_server.tool()
async def process_text(text: str) -> str:
    """
    A simple tool that processes the imput text.
    """
    processed_text = f"Processed: {text.upper()}"
    print(processed_text)
    return processed_text

try:
    mcp_client = MCPClient(
        ## Try this working example on the hub:
        #{"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse"}
        {"url": "https://applemuncy-mcp-sentiment.hf.space/gradio_api/mcp/sse"}
        #{"url": "http://localhost:7860/gradio_api/mcp/sse"}
        #"transport": "sse"
        
    )
    tools = mcp_client.get_tools()

    model = InferenceClientModel()
    agent = CodeAgent(tools=[*tools], model=model)

    demo = gr.ChatInterface(
        fn=lambda message, history: str(agent.run(message)),
        type="messages",
        examples=["Prime factorization of 68"],
        title="Agent with MCP Tools",
        description="This is a simple agent that uses MCP tools to answer questions.",
    )

    demo.launch(mcp_server=True)
finally:
    mcp_client.disconnect()