|
|
|
|
|
|
|
|
import json |
|
|
import sys |
|
|
from pathlib import Path |
|
|
from datetime import datetime |
|
|
from aiohttp import web |
|
|
import logging |
|
|
|
|
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent)) |
|
|
|
|
|
from services.web_search import get_search_service |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
class SearchServer: |
|
|
"""Real search MCP server using Serper API (serper.dev)""" |
|
|
|
|
|
def __init__(self): |
|
|
self.search_service = get_search_service() |
|
|
logger.info("Search MCP Server initialized with Serper API (serper.dev)") |
|
|
|
|
|
async def handle_rpc(self, request): |
|
|
data = await request.json() |
|
|
method = data.get("method") |
|
|
params = data.get("params", {}) |
|
|
|
|
|
if method == "health": |
|
|
return web.json_response({"result": "ok"}) |
|
|
|
|
|
elif method == "search.query": |
|
|
q = params.get("q", "") |
|
|
max_results = params.get("max_results", 5) |
|
|
|
|
|
if not q: |
|
|
return web.json_response({"error": "Query parameter 'q' is required"}, status=400) |
|
|
|
|
|
logger.info(f"Search query: '{q}'") |
|
|
|
|
|
|
|
|
search_results = await self.search_service.search(q, max_results=max_results) |
|
|
|
|
|
|
|
|
results = [] |
|
|
for result in search_results: |
|
|
results.append({ |
|
|
"text": result.get('body', ''), |
|
|
"title": result.get('title', ''), |
|
|
"source": result.get('source', ''), |
|
|
"url": result.get('url', ''), |
|
|
"ts": datetime.utcnow().isoformat(), |
|
|
"confidence": 0.8 |
|
|
}) |
|
|
|
|
|
logger.info(f"Returning {len(results)} search results") |
|
|
return web.json_response({"result": results}) |
|
|
|
|
|
elif method == "search.news": |
|
|
q = params.get("q", "") |
|
|
max_results = params.get("max_results", 5) |
|
|
|
|
|
if not q: |
|
|
return web.json_response({"error": "Query parameter 'q' is required"}, status=400) |
|
|
|
|
|
logger.info(f"News search query: '{q}'") |
|
|
|
|
|
|
|
|
news_results = await self.search_service.search_news(q, max_results=max_results) |
|
|
|
|
|
|
|
|
results = [] |
|
|
for result in news_results: |
|
|
results.append({ |
|
|
"text": result.get('body', ''), |
|
|
"title": result.get('title', ''), |
|
|
"source": result.get('source', ''), |
|
|
"url": result.get('url', ''), |
|
|
"date": result.get('date', ''), |
|
|
"ts": datetime.utcnow().isoformat(), |
|
|
"confidence": 0.85 |
|
|
}) |
|
|
|
|
|
logger.info(f"Returning {len(results)} news results") |
|
|
return web.json_response({"result": results}) |
|
|
|
|
|
return web.json_response({"error": f"Unknown method: {method}"}, status=400) |
|
|
|
|
|
app = web.Application() |
|
|
server = SearchServer() |
|
|
app.router.add_post("/rpc", server.handle_rpc) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
web.run_app(app, port=9001) |