OppaAI commited on
Commit
f043f8f
·
verified ·
1 Parent(s): 7b513dd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +91 -42
app.py CHANGED
@@ -1,54 +1,103 @@
1
  import os
 
 
2
  import gradio as gr
3
- from huggingface_hub import InferenceClient
4
- from tinyagents import Agent
5
 
6
- # Set your Hugging Face API token
7
- HF_TOKEN = os.getenv("HF_TOKEN")
 
 
 
 
8
 
9
- # Define the MCP server URL
10
  MCP_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
11
 
12
- # Initialize the MCP client
13
- mcp_client = InferenceClient(
14
- url=MCP_URL,
15
- token=HF_TOKEN
16
- )
17
-
18
- # Fetch the tools exposed by the MCP server
19
- tools = mcp_client.get_tools()
20
-
21
- # Define the model to be used
22
- model = InferenceClient(
23
- model="Qwen/Qwen3-30B-A3B",
24
- token=HF_TOKEN
25
- )
26
-
27
- # Create the agent
28
- agent = Agent(
29
- model=model,
30
- tools=tools
31
- )
32
-
33
- # Define the function to handle user input and generate responses
34
- def chat_fn(user_message, history):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  history = history or []
36
- history.append(("user", user_message))
37
- response = agent.run(user_message)
38
- history.append(("agent", response))
 
 
 
39
  return history, history
40
 
41
- # Set up the Gradio interface
42
- with gr.Blocks() as demo:
43
- chatbot = gr.Chatbot()
44
- user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...")
45
- state = gr.State([])
 
 
 
 
46
 
47
- user_input.submit(chat_fn, inputs=[user_input, state], outputs=[chatbot, state])
 
48
 
49
- demo.title = "Jobcy Remote Job Search Assistant (tinyagents)"
50
- demo.launch()
51
 
52
- # Disconnect the MCP client when done
53
- import atexit
54
- atexit.register(mcp_client.disconnect)
 
1
  import os
2
+ import asyncio
3
+ import json
4
  import gradio as gr
5
+ from contextlib import AsyncExitStack
6
+ from typing import Optional
7
 
8
+ from huggingface_hub import AsyncInferenceClient
9
+ from mcp import ClientSession
10
+ from mcp.http import MCPServerHTTP # Use HTTP MCP client for remote server
11
+
12
+ SYSTEM_PROMPT = """Your name is Jobcy. You are an AI assistant designed to help users find remote jobs by searching through job listings from various sources, including the Jobicy API and other platforms.
13
+ You will list the job listings in a structured format, including the job title, company, location, and a google search link."""
14
 
 
15
  MCP_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
16
 
17
+ class TinyToolCallingAgent:
18
+ def __init__(self):
19
+ self.mcp_client_session: Optional[ClientSession] = None
20
+ self.exit_stack = AsyncExitStack()
21
+ self.model = AsyncInferenceClient(model="Qwen/Qwen3-8B", provider="hf-inference", token=os.getenv("HF_TOKEN"))
22
+ self.system_prompt = SYSTEM_PROMPT
23
+ self.tools = []
24
+
25
+ async def connect_to_server(self):
26
+ # Connect to MCP server over HTTP SSE URL
27
+ server = MCPServerHTTP(url=MCP_URL)
28
+ self.mcp_client_session = await server.connect()
29
+ response = await self.mcp_client_session.list_tools()
30
+ self.tools = response.tools
31
+ print("Connected to MCP server with tools:", [tool.name for tool in self.tools])
32
+
33
+ async def _call_model(self, messages, tools=None):
34
+ return await self.model.chat_completion(messages, max_tokens=1000, tools=tools)
35
+
36
+ async def process_query(self, query: str) -> str:
37
+ messages = [
38
+ {"role": "system", "content": self.system_prompt},
39
+ {"role": "user", "content": query},
40
+ ]
41
+
42
+ tools = [
43
+ {
44
+ "type": "function",
45
+ "function": {"name": tool.name, "description": tool.description, "parameters": tool.inputSchema},
46
+ }
47
+ for tool in self.tools
48
+ ]
49
+
50
+ response = await self._call_model(messages, tools=tools)
51
+ final_text = []
52
+ message = response.choices[0].message
53
+
54
+ if message.tool_calls:
55
+ if message.content:
56
+ messages.append({"role": "assistant", "content": message.content})
57
+ for tool_call in message.tool_calls:
58
+ tool_name = tool_call.function.name
59
+ tool_args = json.loads(tool_call.function.arguments)
60
+ result = await self.mcp_client_session.call_tool(tool_name, tool_args)
61
+ final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
62
+ messages.append({"role": "user", "content": result.content[0].text})
63
+ response = await self._call_model(messages)
64
+ final_text.append(response.choices[0].message.content)
65
+ elif message.content:
66
+ final_text.append(message.content)
67
+
68
+ return "\n".join(final_text)
69
+
70
+ async def cleanup(self):
71
+ if self.mcp_client_session:
72
+ await self.mcp_client_session.aclose()
73
+ await self.exit_stack.aclose()
74
+
75
+ agent = TinyToolCallingAgent()
76
+
77
+ async def gradio_chat(user_input, history):
78
  history = history or []
79
+ try:
80
+ response = await agent.process_query(user_input)
81
+ except Exception as e:
82
+ response = f"Error: {str(e)}"
83
+ history.append(("User", user_input))
84
+ history.append(("Jobcy", response))
85
  return history, history
86
 
87
+ async def main():
88
+ await agent.connect_to_server()
89
+
90
+ with gr.Blocks() as demo:
91
+ chatbot = gr.Chatbot()
92
+ user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs...", label="Your Message")
93
+ state = gr.State([])
94
+
95
+ user_input.submit(gradio_chat, inputs=[user_input, state], outputs=[chatbot, state])
96
 
97
+ demo.title = "Jobcy Remote Job Search Assistant"
98
+ demo.launch()
99
 
100
+ await agent.cleanup()
 
101
 
102
+ if __name__ == "__main__":
103
+ asyncio.run(main())