File size: 942 Bytes
6da64fa
 
 
 
 
41d59a1
6da64fa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import gradio as gr
import os
from smolagents import InferenceClientModel, CodeAgent, MCPClient

# MCP Server URL - Replace with your remote endpoint
MCP_SERVER_URL = "https://waysaheadglobal-model-context-protocol.hf.space/sse"

# Load tools from MCP Server
try:
    mcp_client = MCPClient({"url": MCP_SERVER_URL})
    tools = mcp_client.get_tools()

    # Load LLM model using your Hugging Face token
    model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
    agent = CodeAgent(tools=[*tools], model=model)

    # Gradio Chat Interface
    demo = gr.ChatInterface(
        fn=lambda message, history: str(agent.run(message)),
        title="IMDL MCP Agent 🧠",
        description="Ask anything from the PostgreSQL data tools",
        examples=["Show me KPIs for May 2025", "Generate brand-wise revenue trends", "How many events happened last quarter?"],
    )

    demo.launch()
finally:
    mcp_client.disconnect()