WaysAheadGlobal commited on
Commit
6da64fa
·
verified ·
1 Parent(s): 8b331cc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -0
app.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ from smolagents import InferenceClientModel, CodeAgent, MCPClient
4
+
5
+ # MCP Server URL - Replace with your remote endpoint
6
+ MCP_SERVER_URL = "https://waysaheadglobal-mcp.hf.space/gradio_api/mcp/sse"
7
+
8
+ # Load tools from MCP Server
9
+ try:
10
+ mcp_client = MCPClient({"url": MCP_SERVER_URL})
11
+ tools = mcp_client.get_tools()
12
+
13
+ # Load LLM model using your Hugging Face token
14
+ model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
15
+ agent = CodeAgent(tools=[*tools], model=model)
16
+
17
+ # Gradio Chat Interface
18
+ demo = gr.ChatInterface(
19
+ fn=lambda message, history: str(agent.run(message)),
20
+ title="IMDL MCP Agent 🧠",
21
+ description="Ask anything from the PostgreSQL data tools",
22
+ examples=["Show me KPIs for May 2025", "Generate brand-wise revenue trends", "How many events happened last quarter?"],
23
+ )
24
+
25
+ demo.launch()
26
+ finally:
27
+ mcp_client.disconnect()