| | import gradio as gr |
| | import os |
| | from mcp import MCPClient |
| | from smolagents import InferenceClientModel, CodeAgent |
| |
|
| | |
| | MCP_SERVER_URL = os.environ.get("MCP_SERVER_URL", "https://your-mcp-server-url/gradio_api/mcp/sse") |
| | HF_TOKEN = os.environ.get("HUGGINGFACE_API_TOKEN") |
| |
|
| | mcp_client = MCPClient({"url": MCP_SERVER_URL}) |
| | tools = mcp_client.get_tools() |
| |
|
| | |
| | model = InferenceClientModel(token=HF_TOKEN) |
| |
|
| | |
| | agent = CodeAgent(tools=tools, model=model) |
| |
|
| | |
| | demo = gr.ChatInterface( |
| | fn=lambda message, history: str(agent.run(message)), |
| | title="🧠 MCP Agent", |
| | description="Ask anything. Agent will use tools from MCP server to answer.", |
| | examples=["Get tags for bert-base-uncased", "Sentiment of 'I love this model'", "Translate 'Bonjour'"] |
| | ) |
| |
|
| | |
| | if __name__ == "__main__": |
| | demo.launch() |
| | mcp_client.disconnect() |
| |
|