OppaAI commited on
Commit
c2bf8ca
·
verified ·
1 Parent(s): 74d73fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -28
app.py CHANGED
@@ -1,37 +1,49 @@
1
- import os
2
  import gradio as gr
3
- from smolagents import MCPClient, InferenceClientModel, CodeAgent
4
-
5
- # Your MCP server URL (replace with your actual HF space MCP endpoint)
6
- MCP_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
7
-
8
- # Initialize MCP client to connect to your MCP server
9
- mcp_client = MCPClient({"url": MCP_URL, "transport": "streamable-http"})
10
-
11
- # Fetch available tools from MCP server
12
- tools = mcp_client.get_tools()
 
 
 
 
 
 
13
 
14
- # Initialize your LLM model from Hugging Face (requires HF token for private/protected models)
15
- llm_model = InferenceClientModel(
16
- model_id="HuggingFaceH4/zephyr-7b-beta",
17
- token=os.getenv("HUGGINGFACE_TOKEN"), # Set this environment variable if needed
 
 
 
 
 
18
  )
19
 
20
- # Create an agent that uses the tools and the LLM
21
- agent = CodeAgent(tools=tools, model=llm_model, stream_outputs=True)
 
22
 
 
 
23
 
24
- def respond(message, history):
25
- # Agent processes the input message, uses tools if needed, else LLM
26
- reply = agent.run(message)
27
- return reply
28
 
 
 
 
 
29
 
30
- demo = gr.ChatInterface(
31
- fn=respond,
32
- title="Job Search Assistant (Agent)",
33
- description="Ask me about job listings or general questions — I’ll use the job search tool when needed!",
34
- )
35
 
36
- if __name__ == "__main__":
37
- demo.launch()
 
1
+ import re
2
  import gradio as gr
3
+ from smolagents import Agent
4
+ from smolagents.mcp import MCPServerHTTP
5
+ from smolagents.models import HuggingFaceModel
6
+ from smolagents.providers import HuggingFaceProvider
7
+
8
+ # MCP Server SSE URL
9
+ SSE_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
10
+ server = MCPServerHTTP(url=SSE_URL)
11
+
12
+ # Use HF Qwen3-8B model from Hugging Face Hub
13
+ hf_model = HuggingFaceModel(
14
+ model_name="Qwen/Qwen-3-8B", # change this if you have exact repo name or path
15
+ provider=HuggingFaceProvider(
16
+ api_token="HF_TOKEN" # put your HF token here if required
17
+ )
18
+ )
19
 
20
+ # Create Agent with MCP Server
21
+ agent = Agent(
22
+ model=hf_model,
23
+ mcp_servers=[server],
24
+ instructions="""
25
+ Your name is Jobcy. You are an AI assistant designed to help users to find remote jobs by searching through job listings from various sources, including the Jobicy API and other platforms.
26
+ You will list the job listings in a structured format, including the job title, company, location, and the google search link.
27
+ """,
28
+ stream=False
29
  )
30
 
31
+ def chat_with_agent(user_input, history):
32
+ history = history or []
33
+ history.append(("user", user_input))
34
 
35
+ result = agent.run(user_input)
36
+ cleaned_output = re.sub(r"<think>.*?</think>", "", result.output, flags=re.DOTALL).strip()
37
 
38
+ history.append(("agent", cleaned_output))
39
+ return history, history
 
 
40
 
41
+ with gr.Blocks() as demo:
42
+ chatbot = gr.Chatbot()
43
+ user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...", label="Your Message")
44
+ state = gr.State([])
45
 
46
+ user_input.submit(chat_with_agent, inputs=[user_input, state], outputs=[chatbot, state])
 
 
 
 
47
 
48
+ demo.title = "Jobcy Remote Job Search Assistant"
49
+ demo.launch()