Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| from dotenv import load_dotenv | |
| from mcp import StdioServerParameters | |
| from smolagents import InferenceClientModel, CodeAgent, ToolCollection,MCPClient | |
| try: | |
| mcp_client = MCPClient( | |
| { | |
| "url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse" | |
| } | |
| ) | |
| tools = mcp_client.get_tools() | |
| load_dotenv() | |
| model = InferenceClientModel(token=os.getenv("HF_TOKEN")) | |
| agent = CodeAgent(tools=[*tools], model=model) | |
| demo = gr.ChatInterface( | |
| fn= lambda message, history: str(agent.run(message)), | |
| type='messages', | |
| examples=['prime factorization of 68'], | |
| title='agent with mcp server', | |
| description='this is a simple agent that uses MCP tools to answer questions' | |
| ) | |
| demo.launch() | |
| finally: | |
| mcp_client.disconnect() |