Tools / Modules /AI_Web_Search.py
chmielvu's picture
Upload folder using huggingface_hub
588592f verified
raw
history blame
5.7 kB
"""
AI Web Search Module.
AI-powered web search using Pollinations API with Perplexity and Gemini models.
Query optimization is ALWAYS enabled for best results.
Depth levels:
- fast: Gemini with Google Search - Quick, reliable answers
- normal: Perplexity Sonar - Balanced speed and quality
- deep: Perplexity Sonar Reasoning - Deep analysis with reasoning chain
"""
from __future__ import annotations
from typing import Annotated, Literal
import gradio as gr
from app import _log_call_end, _log_call_start, _truncate_for_log
from ._docstrings import autodoc
from ._pollinations_client import PollinationsClient
from ._query_optimizer import get_optimizer
# Single source of truth for the LLM-facing tool description
TOOL_SUMMARY = (
"AI-powered web search using Perplexity or Gemini with built-in web search. "
"Returns synthesized answers with source citations. "
"Use for complex questions requiring current information and analysis. "
"Query optimization is automatically applied for best results."
)
@autodoc(
summary=TOOL_SUMMARY,
)
def AI_Web_Search(
query: Annotated[str, "The search query or question."],
depth: Annotated[
Literal["fast", "normal", "deep"],
"Search depth: 'fast' (Gemini + Google Search), 'normal' (Perplexity Sonar), 'deep' (Perplexity Sonar Reasoning).",
] = "normal",
detailed: Annotated[bool, "Request a comprehensive answer with more detail."] = False,
) -> str:
"""
AI-powered web search with automatic query optimization.
Uses Pollinations API to access AI search models with built-in web search:
- fast: Gemini with Google Search - Best for quick facts
- normal: Perplexity Sonar - Balanced for general research
- deep: Perplexity Sonar Reasoning - Best for complex analysis
Query optimization is ALWAYS ON - queries are automatically optimized
for AI search using SC-CoT (Mistral → HF fallback chain).
Returns a synthesized answer with numbered citations and source URLs.
"""
_log_call_start("AI_Web_Search", query=query, depth=depth, detailed=detailed)
if not query or not query.strip():
result = "No search query provided. Please enter a question or search term."
_log_call_end("AI_Web_Search", _truncate_for_log(result))
return result
# ALWAYS optimize the query for AI search
original_query = query
optimization_metadata = None
try:
optimizer = get_optimizer()
query, optimization_metadata = optimizer.optimize_for_ai_search(query)
except Exception as exc:
print(f"[AI_Web_Search] Query optimization failed: {exc}", flush=True)
# Continue with original query
try:
client = PollinationsClient()
result_data = client.web_search_sync(query, depth, detailed)
# Build output
lines = []
# Add optimization info if available
if optimization_metadata and optimization_metadata.get("original_query") != optimization_metadata.get("optimized_query"):
lines.append(f"Optimized query: {query}")
lines.append(f"Original query: {original_query}")
lines.append(f"Optimizer: {optimization_metadata.get('provider', 'unknown')}")
lines.append("")
lines.append(f"Query: {result_data['query']}")
lines.append(f"Model: {result_data['model']}")
lines.append(f"Depth: {depth}")
lines.append("")
lines.append("Answer:")
lines.append(result_data["answer"] or "No answer generated.")
if result_data["sources"]:
lines.append("")
lines.append("Sources:")
for i, source in enumerate(result_data["sources"], 1):
lines.append(f" {i}. {source}")
else:
lines.append("")
lines.append("(No sources provided)")
result = "\n".join(lines)
_log_call_end("AI_Web_Search", _truncate_for_log(result))
return result
except Exception as exc:
error_msg = f"Search failed: {exc}"
_log_call_end("AI_Web_Search", error_msg)
return error_msg
def build_interface() -> gr.Interface:
"""Build the Gradio interface for AI Web Search."""
return gr.Interface(
fn=AI_Web_Search,
inputs=[
gr.Textbox(
label="Query",
placeholder="Ask a question or enter a search topic...",
max_lines=3,
info="Your question or search query (will be optimized automatically)",
),
gr.Radio(
label="Search Depth",
choices=["fast", "normal", "deep"],
value="normal",
info="fast: Gemini + Google Search | normal: Perplexity Sonar | deep: Perplexity Reasoning",
),
gr.Checkbox(
label="Detailed Answer",
value=False,
info="Request a comprehensive answer with more detail",
),
],
outputs=gr.Textbox(
label="AI Search Results",
interactive=False,
lines=20,
max_lines=30,
),
title="AI Web Search",
description=(
"<div style='text-align:center'>"
"AI-powered web search with automatic query optimization. "
"Uses Perplexity Sonar or Gemini with built-in web search to provide "
"direct answers with source citations. Query optimization is always enabled."
"</div>"
),
api_description=TOOL_SUMMARY,
flagging_mode="never",
submit_btn="Search",
)
__all__ = ["AI_Web_Search", "build_interface"]