Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| from smolagents import InferenceClientModel, CodeAgent, MCPClient | |
| mcp_client = None | |
| try: | |
| mcp_client = MCPClient( | |
| {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse", "transport": "sse"}, | |
| structured_output=False | |
| ) | |
| tools = mcp_client.get_tools() | |
| model = InferenceClientModel(token=os.getenv("HF_TOKEN_MCP")) | |
| agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"]) | |
| demo = gr.ChatInterface( | |
| fn=lambda message, history: str(agent.run(message)), | |
| type="messages", | |
| examples=["Analyze the sentiment of the following text 'This is awesome'"], | |
| title="Agent with MCP Tools", | |
| description="This is a simple agent that uses MCP tools to answer questions.", | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |
| except Exception as e: | |
| print(f"Error: {e}") | |
| # Fallback demo without MCP tools | |
| demo = gr.ChatInterface( | |
| fn=lambda message, history: "MCP client connection failed. Please check the configuration.", | |
| type="messages", | |
| title="MCP Client (Connection Failed)", | |
| description="Could not connect to MCP server.", | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |
| finally: | |
| if mcp_client is not None: | |
| mcp_client.disconnect() |