OppaAI's picture
Update app.py
fddd1c8 verified
raw
history blame
1.84 kB
import asyncio
import re
import os
import gradio as gr
from pydantic_ai import Agent
from pydantic_ai.mcp import MCPServerHTTP
from pydantic_ai.models.gemini import GeminiModel
from pydantic_ai.providers.google_gla import GoogleGLAProvider
# MCP Server SSE URL
SSE_URL = "https://oppaai-job-search-mcp-server.hf.space/gradio_api/mcp/sse"
server = MCPServerHTTP(url=SSE_URL)
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") # Make sure your GEMINI_API_KEY is set in environment variables
# Use OpenRouter hosted LLM Qwen3-30B-A3B model
model = GeminiModel(
'gemini-2.0-flash',
provider=GoogleGLAProvider(api_key=GEMINI_API_KEY)
)
# Create Agent with MCP Server
agent = Agent(
model=model,
mcp_servers=[server],
instructions="""
Your name is Jobcy. You are an AI assistant designed to help users to find remote jobs by searching through job listings from various sources, including the Jobicy API and other platforms.
You will list the job listings in a structured format, including the job title, company, location, and the google search link.
"""
)
async def chat_with_agent(user_input, history):
history = history or []
result = await agent.run(user_input)
history.append(("User", user_input))
history.append(("Jobcy", result))
return history, history
async def main():
async with agent.run_mcp_servers():
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
user_input = gr.Textbox(placeholder="Ask Jobcy about remote jobs or anything else...", label="Your Message")
state = gr.State([])
user_input.submit(chat_with_agent, inputs=[user_input, state], outputs=[chatbot, state])
demo.title = "Jobcy Remote Job Search Assistant"
demo.launch(share=True)
if __name__ == "__main__":
asyncio.run(main())