wlchee commited on
Commit
9634b9a
·
verified ·
1 Parent(s): 5830c77

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -31
app.py CHANGED
@@ -1,39 +1,19 @@
1
  import gradio as gr
2
  import os
3
 
4
- from mcp.client.stdio import StdioServerParameters
5
- from smolagents import InferenceClientModel, CodeAgent, ToolCollection
6
- from smolagents.mcp_client import MCPClient
7
- from huggingface_hub import login
8
 
9
- # Retrieve API keys securely
10
- hf_api_key = os.getenv('mcp2') # Hugging Face API key from secret
11
 
12
- # Check if the Hugging Face API key is available
13
- if not hf_api_key:
14
- raise ValueError("Hugging Face API key is not set. Please check your secrets.")
15
-
16
- # Log in to Hugging Face using the API key
17
- login(hf_api_key)
18
-
19
- mcp_client = None
20
-
21
- # Initialize MCPClient with URL
22
  try:
23
  mcp_client = MCPClient(
24
- {"url": "https://wlchee-mcp-sentiment.hf.space/gradio_api/mcp/sse"},
25
  )
26
-
27
- # Retrieve tools from the MCP client
28
  tools = mcp_client.get_tools()
29
- if not tools:
30
- raise Exception("No tools retrieved from the MCP client.")
31
-
32
- # Create the model and agent
33
- model = InferenceClientModel(model_id="Qwen/Qwen2.5-Omni-7B")
34
  agent = CodeAgent(tools=[*tools], model=model)
35
 
36
- # Define the Gradio interface
37
  demo = gr.ChatInterface(
38
  fn=lambda message, history: str(agent.run(message)),
39
  type="messages",
@@ -42,10 +22,6 @@ try:
42
  description="This is a simple agent that uses MCP tools to answer questions.",
43
  )
44
 
45
- # Launch the interface
46
- demo.launch(share=True)
47
-
48
  finally:
49
- # Ensure MCPClient disconnects on script termination
50
- if mcp_client:
51
- mcp_client.disconnect()
 
1
  import gradio as gr
2
  import os
3
 
4
+ from mcp import StdioServerParameters
5
+ from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
 
 
6
 
 
 
7
 
 
 
 
 
 
 
 
 
 
 
8
  try:
9
  mcp_client = MCPClient(
10
+ {"url": "https://wlchee-mcp-sentiment.hf.space/gradio_api/mcp/sse"} # This is the MCP Server we created in the previous section
11
  )
 
 
12
  tools = mcp_client.get_tools()
13
+
14
+ model = InferenceClientModel(token=os.getenv("mcp2"))
 
 
 
15
  agent = CodeAgent(tools=[*tools], model=model)
16
 
 
17
  demo = gr.ChatInterface(
18
  fn=lambda message, history: str(agent.run(message)),
19
  type="messages",
 
22
  description="This is a simple agent that uses MCP tools to answer questions.",
23
  )
24
 
25
+ demo.launch()
 
 
26
  finally:
27
+ mcp_client.disconnect()