Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import os | |
| from smolagents import InferenceClientModel, CodeAgent, MCPClient | |
| # MCP Server URL - Replace with your remote endpoint | |
| MCP_SERVER_URL = "https://waysaheadglobal-model-context-protocol.hf.space/sse" | |
| # Load tools from MCP Server | |
| try: | |
| mcp_client = MCPClient({"url": MCP_SERVER_URL}) | |
| tools = mcp_client.get_tools() | |
| # Load LLM model using your Hugging Face token | |
| model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN")) | |
| agent = CodeAgent(tools=[*tools], model=model) | |
| # Gradio Chat Interface | |
| demo = gr.ChatInterface( | |
| fn=lambda message, history: str(agent.run(message)), | |
| title="IMDL MCP Agent 🧠", | |
| description="Ask anything from the PostgreSQL data tools", | |
| examples=["Show me KPIs for May 2025", "Generate brand-wise revenue trends", "How many events happened last quarter?"], | |
| ) | |
| demo.launch() | |
| finally: | |
| mcp_client.disconnect() | |