|
|
""" |
|
|
File: web_app/module_agent_web_search.py |
|
|
Description: Gradio module for the Agent Web Search functionality. |
|
|
Author: Didier Guillevic |
|
|
Date: 2025-10-20 |
|
|
""" |
|
|
|
|
|
import gradio as gr |
|
|
|
|
|
from google.adk.agents import Agent |
|
|
from google.adk.runners import Runner |
|
|
from google.adk.sessions import InMemorySessionService |
|
|
from google.adk.tools import google_search |
|
|
from google.genai import types |
|
|
|
|
|
import asyncio |
|
|
import uuid |
|
|
|
|
|
APP_NAME="google_search_agent" |
|
|
SESSION_ID="1234" |
|
|
|
|
|
model = "gemini-2.5-flash" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
root_agent = Agent( |
|
|
name="basic_search_agent", |
|
|
model=model, |
|
|
description=( |
|
|
"Agent to answer questions with the option to call Google Search " |
|
|
"if needed for up-to-date information." |
|
|
), |
|
|
instruction=( |
|
|
"I can answer your questions from my own knowledge or by searching the " |
|
|
"web using Google Search. Just ask me anything!" |
|
|
), |
|
|
|
|
|
tools=[google_search] |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def setup_session_and_runner(user_id: str): |
|
|
session_service = InMemorySessionService() |
|
|
session = await session_service.create_session( |
|
|
app_name=APP_NAME, |
|
|
user_id=user_id, |
|
|
session_id=SESSION_ID |
|
|
) |
|
|
runner = Runner( |
|
|
agent=root_agent, |
|
|
app_name=APP_NAME, |
|
|
session_service=session_service |
|
|
) |
|
|
return session, runner |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def call_agent_async(query: str, user_id: str): |
|
|
content = types.Content(role='user', parts=[types.Part(text=query)]) |
|
|
session, runner = await setup_session_and_runner(user_id=user_id) |
|
|
events = runner.run_async( |
|
|
user_id=user_id, |
|
|
session_id=SESSION_ID, |
|
|
new_message=content |
|
|
) |
|
|
|
|
|
final_response = "" |
|
|
rendered_content = "" |
|
|
|
|
|
async for event in events: |
|
|
if event.is_final_response(): |
|
|
final_response = event.content.parts[0].text |
|
|
|
|
|
|
|
|
if ( |
|
|
event.grounding_metadata and |
|
|
event.grounding_metadata.search_entry_point and |
|
|
event.grounding_metadata.search_entry_point.rendered_content |
|
|
): |
|
|
rendered_content = event.grounding_metadata.search_entry_point.rendered_content |
|
|
else: |
|
|
rendered_content = None |
|
|
|
|
|
return final_response, rendered_content |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def call_agent_streaming(query: str, user_id: str): |
|
|
content = types.Content(role='user', parts=[types.Part(text=query)]) |
|
|
session, runner = await setup_session_and_runner(user_id=user_id) |
|
|
events = runner.run_async( |
|
|
user_id=user_id, |
|
|
session_id=SESSION_ID, |
|
|
new_message=content |
|
|
) |
|
|
|
|
|
accumulated_response = "" |
|
|
rendered_content = None |
|
|
|
|
|
async for event in events: |
|
|
|
|
|
if event.content and event.content.parts and event.content.parts[0].text: |
|
|
|
|
|
new_text = event.content.parts[0].text |
|
|
accumulated_response += new_text |
|
|
yield accumulated_response, None, user_id |
|
|
|
|
|
|
|
|
if event.is_final_response(): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if ( |
|
|
event.grounding_metadata and |
|
|
event.grounding_metadata.search_entry_point and |
|
|
event.grounding_metadata.search_entry_point.rendered_content |
|
|
): |
|
|
rendered_content = event.grounding_metadata.search_entry_point.rendered_content |
|
|
|
|
|
|
|
|
|
|
|
yield accumulated_response, rendered_content, user_id |
|
|
|
|
|
|
|
|
|
|
|
if rendered_content is None: |
|
|
yield accumulated_response, None, user_id |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def agent_web_search(query: str, user_id=None): |
|
|
"""Calls a language model agent with Google Search tool to answer the query. |
|
|
|
|
|
Args: |
|
|
query (str): The user query. |
|
|
user_id (str, optional): The user ID for session management. If None, a new ID is generated. Defaults to None. |
|
|
|
|
|
Returns: |
|
|
tuple: A tuple containing the agent's response (str), rendered grounding content (str or None), and user_id (str). |
|
|
""" |
|
|
if user_id is None: |
|
|
user_id = str(uuid.uuid4()) |
|
|
|
|
|
response, rendered_content = asyncio.run(call_agent_async(query, user_id)) |
|
|
return response, rendered_content, user_id |
|
|
|
|
|
|
|
|
async def agent_web_search_streaming(query: str, current_user_id: str | None): |
|
|
|
|
|
if current_user_id is None: |
|
|
user_id = str(uuid.uuid4()) |
|
|
else: |
|
|
user_id = current_user_id |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return call_agent_streaming(query, user_id) |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown( |
|
|
""" |
|
|
**Agent with Google Search tool**: be patient :-) Currently looking into (async) streaming support... |
|
|
""" |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
input_text = gr.Textbox( |
|
|
lines=2, |
|
|
placeholder="Enter your query here...", |
|
|
label="Query", |
|
|
render=True |
|
|
) |
|
|
|
|
|
user_id = gr.State(None) |
|
|
|
|
|
with gr.Row(): |
|
|
submit_button = gr.Button("Submit", variant="primary") |
|
|
clear_button = gr.Button("Clear", variant="secondary") |
|
|
|
|
|
with gr.Row(): |
|
|
output_text = gr.Markdown( |
|
|
label="Agent Response", |
|
|
render=True |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
grounding = gr.HTML( |
|
|
label="Grounding Content", |
|
|
render=True |
|
|
) |
|
|
|
|
|
with gr.Accordion("Examples", open=False): |
|
|
examples = gr.Examples( |
|
|
examples=[ |
|
|
["What is the prime number factorization of 15?",], |
|
|
["Who won the Nobel Peace Prize in 2025?",], |
|
|
["What is the weather like tomorrow in Montreal, Canada?",], |
|
|
["What are the latest news about Graph Neural Networks?",], |
|
|
], |
|
|
inputs=[input_text,], |
|
|
cache_examples=False, |
|
|
label="Click to use an example" |
|
|
) |
|
|
|
|
|
|
|
|
submit_button.click( |
|
|
fn=agent_web_search, |
|
|
inputs=[input_text, user_id], |
|
|
outputs=[output_text, grounding, user_id] |
|
|
) |
|
|
clear_button.click( |
|
|
fn=lambda : ('', '', None), |
|
|
inputs=None, |
|
|
outputs=[input_text, output_text, grounding] |
|
|
) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch(mcp_server=True) |