Spaces:
Sleeping
Sleeping
POC class
Browse files- tools/http_search.py +47 -30
tools/http_search.py
CHANGED
|
@@ -1,60 +1,77 @@
|
|
| 1 |
from typing import Dict, Any
|
| 2 |
from smolagents.tools import Tool
|
| 3 |
import requests
|
| 4 |
-
import json
|
| 5 |
|
| 6 |
|
| 7 |
class HttpSearchTool(Tool):
|
| 8 |
name = "http_search"
|
| 9 |
description = (
|
| 10 |
-
"
|
| 11 |
-
"
|
| 12 |
)
|
| 13 |
inputs = {
|
| 14 |
"query": {
|
| 15 |
"type": "string",
|
| 16 |
-
"description": "
|
| 17 |
}
|
| 18 |
}
|
| 19 |
output_type = "string"
|
| 20 |
|
| 21 |
-
def __init__(self,
|
| 22 |
"""
|
| 23 |
Args:
|
| 24 |
-
|
| 25 |
-
query_param: nom du paramètre utilisé pour la requête (ex: 'q', 'query', ...).
|
| 26 |
-
extra_params: dict de paramètres fixes à ajouter à chaque requête (clé API, type de moteur, etc.).
|
| 27 |
-
headers: headers HTTP à envoyer (optionnels).
|
| 28 |
"""
|
| 29 |
super().__init__()
|
| 30 |
-
self.
|
| 31 |
-
self.
|
| 32 |
-
self.extra_params = extra_params or {}
|
| 33 |
-
self.headers = headers or {}
|
| 34 |
|
| 35 |
def forward(self, query: str) -> str:
|
| 36 |
-
params = {
|
| 37 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
|
| 39 |
try:
|
| 40 |
-
resp = requests.get(self.base_url, params=params,
|
| 41 |
resp.raise_for_status()
|
| 42 |
except Exception as e:
|
| 43 |
-
return f"Erreur lors de l'appel HTTP à
|
| 44 |
|
| 45 |
-
# On suppose une réponse JSON
|
| 46 |
try:
|
| 47 |
data = resp.json()
|
| 48 |
except Exception:
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
return
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from typing import Dict, Any
|
| 2 |
from smolagents.tools import Tool
|
| 3 |
import requests
|
|
|
|
| 4 |
|
| 5 |
|
| 6 |
class HttpSearchTool(Tool):
|
| 7 |
name = "http_search"
|
| 8 |
description = (
|
| 9 |
+
"Effectue une recherche de pages Wikipédia et renvoie une liste de résultats "
|
| 10 |
+
"lisibles (titre, lien, extrait)."
|
| 11 |
)
|
| 12 |
inputs = {
|
| 13 |
"query": {
|
| 14 |
"type": "string",
|
| 15 |
+
"description": "Le terme à rechercher sur Wikipédia.",
|
| 16 |
}
|
| 17 |
}
|
| 18 |
output_type = "string"
|
| 19 |
|
| 20 |
+
def __init__(self, language: str = "en"):
|
| 21 |
"""
|
| 22 |
Args:
|
| 23 |
+
language: code langue de Wikipédia ('en', 'fr', ...).
|
|
|
|
|
|
|
|
|
|
| 24 |
"""
|
| 25 |
super().__init__()
|
| 26 |
+
self.language = language
|
| 27 |
+
self.base_url = f"https://{language}.wikipedia.org/w/api.php"
|
|
|
|
|
|
|
| 28 |
|
| 29 |
def forward(self, query: str) -> str:
|
| 30 |
+
params = {
|
| 31 |
+
"action": "opensearch",
|
| 32 |
+
"search": query,
|
| 33 |
+
"limit": 5,
|
| 34 |
+
"namespace": 0,
|
| 35 |
+
"format": "json",
|
| 36 |
+
}
|
| 37 |
|
| 38 |
try:
|
| 39 |
+
resp = requests.get(self.base_url, params=params, timeout=15)
|
| 40 |
resp.raise_for_status()
|
| 41 |
except Exception as e:
|
| 42 |
+
return f"Erreur lors de l'appel HTTP à Wikipédia : {e}"
|
| 43 |
|
|
|
|
| 44 |
try:
|
| 45 |
data = resp.json()
|
| 46 |
except Exception:
|
| 47 |
+
return "Impossible de décoder la réponse de Wikipédia en JSON."
|
| 48 |
+
|
| 49 |
+
# Format opensearch: [search_term, titles[], descriptions[], urls[]]
|
| 50 |
+
if len(data) != 4 or not data[1]:
|
| 51 |
+
return (
|
| 52 |
+
f'Aucun résultat trouvé sur Wikipédia ({self.language}) '
|
| 53 |
+
f'pour la requête "{query}".'
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
titles = data[1]
|
| 57 |
+
descriptions = data[2]
|
| 58 |
+
urls = data[3]
|
| 59 |
+
|
| 60 |
+
lines = [
|
| 61 |
+
f'Voici quelques résultats Wikipédia ({self.language}) pour la requête "{query}" :',
|
| 62 |
+
"",
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
for idx, (title, desc, url) in enumerate(
|
| 66 |
+
zip(titles, descriptions, urls), start=1
|
| 67 |
+
):
|
| 68 |
+
desc = (desc or "").strip()
|
| 69 |
+
if len(desc) > 220:
|
| 70 |
+
desc = desc[:217].rstrip() + "..."
|
| 71 |
+
lines.append(f"{idx}. {title}")
|
| 72 |
+
lines.append(f" Lien : {url}")
|
| 73 |
+
if desc:
|
| 74 |
+
lines.append(f" Résumé : {desc}")
|
| 75 |
+
lines.append("")
|
| 76 |
+
|
| 77 |
+
return "\n".join(lines).strip()
|