import gradio as gr from mcp.client.stdio import StdioServerParameters from smolagents import InferenceClientModel, CodeAgent, ToolCollection from smolagents.mcp_client import MCPClient from dotenv import load_dotenv import os load_dotenv() import os hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN") if not hf_token: raise ValueError("HUGGINGFACEHUB_API_TOKEN env var is not set!") from huggingface_hub import login login(token=hf_token) try: mcp_client = MCPClient( ## Try this working example on the hub: # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"} {"url": "https://baon2024-gradiomcpserver2.hf.space/gradio_api/mcp/sse"} ) tools = mcp_client.get_tools() model = InferenceClientModel() agent = CodeAgent(tools=[*tools], model=model)#presumingly can also add non-MCP tools under their own names?? demo = gr.ChatInterface( fn=lambda message, history: str(agent.run(message)), type="messages", examples=["Prime factorization of 68"], title="Agent with MCP Tools", description="This is a simple agent that uses MCP tools to answer questions.", ) demo.launch() finally: mcp_client.disconnect()