File size: 898 Bytes
5d76269
0893c4f
5d76269
 
0893c4f
5d76269
 
 
0893c4f
5d76269
 
0893c4f
5d76269
 
0893c4f
5d76269
 
0893c4f
5d76269
 
 
 
 
 
 
 
 
0893c4f
5d76269
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import gradio as gr
import os
from mcp import MCPClient
from smolagents import InferenceClientModel, CodeAgent

# Connect to MCP server
MCP_SERVER_URL = os.environ.get("MCP_SERVER_URL", "https://your-mcp-server-url/gradio_api/mcp/sse")
HF_TOKEN = os.environ.get("HUGGINGFACE_API_TOKEN")

mcp_client = MCPClient({"url": MCP_SERVER_URL})
tools = mcp_client.get_tools()

# Load model
model = InferenceClientModel(token=HF_TOKEN)

# Create agent
agent = CodeAgent(tools=tools, model=model)

# Gradio chat interface
demo = gr.ChatInterface(
    fn=lambda message, history: str(agent.run(message)),
    title="🧠 MCP Agent",
    description="Ask anything. Agent will use tools from MCP server to answer.",
    examples=["Get tags for bert-base-uncased", "Sentiment of 'I love this model'", "Translate 'Bonjour'"]
)

# Launch app
if __name__ == "__main__":
    demo.launch()
    mcp_client.disconnect()