OppaAI commited on
Commit
032f7f1
·
verified ·
1 Parent(s): 1327560

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -48
app.py CHANGED
@@ -1,51 +1,41 @@
1
  import os
2
  import gradio as gr
3
  import asyncio
4
- from smolagents import Agent
5
- from smolagents.mcp import MCPServerHTTP
6
- from smolagents.models import HuggingFaceModel
7
- from smolagents.providers import HuggingFaceProvider
8
-
9
- HF_TOKEN = os.getenv("HF_TOKEN")
10
-
11
- # MCP Server SSE URL
12
- SSE_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
13
- server = MCPServerHTTP(url=SSE_URL)
14
-
15
- # Use HF Qwen3-8B model from Hugging Face Hub
16
- hf_model = HuggingFaceModel(
17
- model_name="Qwen/Qwen-3-8B",
18
- provider=HuggingFaceProvider(
19
- api_token=HF_TOKEN
20
- )
21
- )
22
-
23
- # Create Agent with MCP Server
24
- agent = Agent(
25
- model=hf_model,
26
- mcp_servers=[server],
27
- instructions="""
28
- Your name is Jobcy. You are an AI assistant designed to help users find remote jobs by searching through job listings from various sources, including the Jobicy API and other platforms.
29
- You will list the job listings in a structured format, including the job title, company, location, and the Google search link.
30
- """,
31
- stream=False
32
- )
33
-
34
- async def chat_with_agent(user_input, history):
35
- history = history or []
36
- history.append(("user", user_input))
37
-
38
- result = await agent.run(user_input)
39
-
40
- history.append(("agent", result.output)) # result.output contains the answer text
41
- return history, history
42
-
43
- with gr.Blocks() as demo:
44
- chatbot = gr.Chatbot()
45
- user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...", label="Your Message")
46
- state = gr.State([])
47
-
48
- user_input.submit(chat_with_agent, inputs=[user_input, state], outputs=[chatbot, state])
49
-
50
- demo.title = "Jobcy Remote Job Search Assistant"
51
- demo.launch()
 
1
  import os
2
  import gradio as gr
3
  import asyncio
4
+ from smolagents import CodeAgent, MCPClient, InferenceClientModel
5
+
6
+ async def main():
7
+ mcp_client = None
8
+ try:
9
+ # Initialize MCPClient for your MCP server
10
+ mcp_client = MCPClient({"url": "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"})
11
+
12
+ # Fetch tools exposed by the MCP server
13
+ tools = await mcp_client.get_tools()
14
+
15
+ # Initialize the model with HF token from env
16
+ model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
17
+
18
+ # Create the agent with the fetched tools and model
19
+ agent = CodeAgent(tools=tools, model=model)
20
+
21
+ async def chat_with_agent(message, history):
22
+ history = history or []
23
+ result = await agent.run(message)
24
+ history.append((message, result))
25
+ return history, history
26
+
27
+ with gr.Blocks() as demo:
28
+ chatbot = gr.Chatbot()
29
+ user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...", label="Your Message")
30
+ state = gr.State([])
31
+
32
+ user_input.submit(chat_with_agent, inputs=[user_input, state], outputs=[chatbot, state])
33
+
34
+ demo.title = "Jobcy Remote Job Search Assistant"
35
+ demo.launch()
36
+ finally:
37
+ if mcp_client:
38
+ await mcp_client.disconnect()
39
+
40
+ if __name__ == "__main__":
41
+ asyncio.run(main())