Spaces:
Sleeping
Sleeping
File size: 1,367 Bytes
eb0ab69 5b653c8 eb0ab69 5b653c8 eb0ab69 7d660e2 eb0ab69 5b653c8 7d660e2 5b653c8 eb0ab69 af34e94 5b653c8 eb0ab69 5b653c8 eb0ab69 7d660e2 5b653c8 7d660e2 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 | import gradio as gr
import os
from smolagents import InferenceClientModel, CodeAgent, MCPClient
mcp_client = None
try:
mcp_client = MCPClient(
{"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse", "transport": "sse"},
structured_output=False
)
tools = mcp_client.get_tools()
model = InferenceClientModel(token=os.getenv("HF_TOKEN_MCP"))
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
examples=["Analyze the sentiment of the following text 'This is awesome'"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions.",
)
if __name__ == "__main__":
demo.launch()
except Exception as e:
print(f"Error: {e}")
# Fallback demo without MCP tools
demo = gr.ChatInterface(
fn=lambda message, history: "MCP client connection failed. Please check the configuration.",
type="messages",
title="MCP Client (Connection Failed)",
description="Could not connect to MCP server.",
)
if __name__ == "__main__":
demo.launch()
finally:
if mcp_client is not None:
mcp_client.disconnect() |