OppaAI commited on
Commit
1448549
·
verified ·
1 Parent(s): 032f7f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -39
app.py CHANGED
@@ -1,41 +1,54 @@
1
  import os
2
  import gradio as gr
3
- import asyncio
4
- from smolagents import CodeAgent, MCPClient, InferenceClientModel
5
-
6
- async def main():
7
- mcp_client = None
8
- try:
9
- # Initialize MCPClient for your MCP server
10
- mcp_client = MCPClient({"url": "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"})
11
-
12
- # Fetch tools exposed by the MCP server
13
- tools = await mcp_client.get_tools()
14
-
15
- # Initialize the model with HF token from env
16
- model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
17
-
18
- # Create the agent with the fetched tools and model
19
- agent = CodeAgent(tools=tools, model=model)
20
-
21
- async def chat_with_agent(message, history):
22
- history = history or []
23
- result = await agent.run(message)
24
- history.append((message, result))
25
- return history, history
26
-
27
- with gr.Blocks() as demo:
28
- chatbot = gr.Chatbot()
29
- user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...", label="Your Message")
30
- state = gr.State([])
31
-
32
- user_input.submit(chat_with_agent, inputs=[user_input, state], outputs=[chatbot, state])
33
-
34
- demo.title = "Jobcy Remote Job Search Assistant"
35
- demo.launch()
36
- finally:
37
- if mcp_client:
38
- await mcp_client.disconnect()
39
-
40
- if __name__ == "__main__":
41
- asyncio.run(main())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+ from tinyagents import Agent
5
+
6
+ # Set your Hugging Face API token
7
+ HF_TOKEN = os.getenv("HF_TOKEN")
8
+
9
+ # Define the MCP server URL
10
+ MCP_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
11
+
12
+ # Initialize the MCP client
13
+ mcp_client = InferenceClient(
14
+ url=MCP_URL,
15
+ token=HF_TOKEN
16
+ )
17
+
18
+ # Fetch the tools exposed by the MCP server
19
+ tools = mcp_client.get_tools()
20
+
21
+ # Define the model to be used
22
+ model = InferenceClient(
23
+ model="Qwen/Qwen3-8B",
24
+ token=HF_TOKEN
25
+ )
26
+
27
+ # Create the agent
28
+ agent = Agent(
29
+ model=model,
30
+ tools=tools
31
+ )
32
+
33
+ # Define the function to handle user input and generate responses
34
+ def chat_fn(user_message, history):
35
+ history = history or []
36
+ history.append(("user", user_message))
37
+ response = agent.run(user_message)
38
+ history.append(("agent", response))
39
+ return history, history
40
+
41
+ # Set up the Gradio interface
42
+ with gr.Blocks() as demo:
43
+ chatbot = gr.Chatbot()
44
+ user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...")
45
+ state = gr.State([])
46
+
47
+ user_input.submit(chat_fn, inputs=[user_input, state], outputs=[chatbot, state])
48
+
49
+ demo.title = "Jobcy Remote Job Search Assistant (tinyagents)"
50
+ demo.launch()
51
+
52
+ # Disconnect the MCP client when done
53
+ import atexit
54
+ atexit.register(mcp_client.disconnect)