Spaces:
Sleeping
Sleeping
web
Browse files
agent.py
CHANGED
|
@@ -4,7 +4,7 @@ from langgraph.prebuilt import create_react_agent
|
|
| 4 |
from tools import (
|
| 5 |
wikipedia_search_tool, arxiv_search_tool,
|
| 6 |
audio_transcriber_tool, excel_tool, analyze_code_tool, image_tool,
|
| 7 |
-
add_tool, subtract_tool, multiply_tool, divide_tool
|
| 8 |
)
|
| 9 |
|
| 10 |
# ββββββββββββββββββββββββββββββββ Config ββββββββββββββββββββββββββββββββ
|
|
@@ -33,7 +33,7 @@ IMPORTANT:
|
|
| 33 |
TOOLS = [
|
| 34 |
wikipedia_search_tool, arxiv_search_tool,
|
| 35 |
audio_transcriber_tool, excel_tool, analyze_code_tool, image_tool,
|
| 36 |
-
add_tool, subtract_tool, multiply_tool, divide_tool
|
| 37 |
]
|
| 38 |
|
| 39 |
# βββββββββββββββββββββββββββββ Agent Wrapper βββββββββββββββββββββββββββββ
|
|
|
|
| 4 |
from tools import (
|
| 5 |
wikipedia_search_tool, arxiv_search_tool,
|
| 6 |
audio_transcriber_tool, excel_tool, analyze_code_tool, image_tool,
|
| 7 |
+
add_tool, subtract_tool, multiply_tool, divide_tool, web_search_tool
|
| 8 |
)
|
| 9 |
|
| 10 |
# ββββββββββββββββββββββββββββββββ Config ββββββββββββββββββββββββββββββββ
|
|
|
|
| 33 |
TOOLS = [
|
| 34 |
wikipedia_search_tool, arxiv_search_tool,
|
| 35 |
audio_transcriber_tool, excel_tool, analyze_code_tool, image_tool,
|
| 36 |
+
add_tool, subtract_tool, multiply_tool, divide_tool, web_search_tool
|
| 37 |
]
|
| 38 |
|
| 39 |
# βββββββββββββββββββββββββββββ Agent Wrapper βββββββββββββββββββββββββββββ
|
tools.py
CHANGED
|
@@ -262,13 +262,15 @@ def wikipedia_search_tool(wiki_query: str) -> str:
|
|
| 262 |
summary_text = summary_data.get("extract")
|
| 263 |
if not summary_text:
|
| 264 |
summary_text = summary_data.get("description", "No summary available.")
|
| 265 |
-
print(
|
| 266 |
return f"Title: {first_title}\n\n{summary_text}"
|
| 267 |
|
| 268 |
|
| 269 |
except requests.exceptions.RequestException as e:
|
|
|
|
| 270 |
return f"Wikipedia search error: {e}"
|
| 271 |
except Exception as e:
|
|
|
|
| 272 |
return f"Unexpected error in wikipedia_search_tool: {e}"
|
| 273 |
|
| 274 |
@tool
|
|
@@ -442,54 +444,52 @@ def divide_tool(a: float, b: float) -> str:
|
|
| 442 |
result = a / b
|
| 443 |
return f"Division result: {a} Γ· {b} = {result}"
|
| 444 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 445 |
|
|
|
|
| 446 |
|
|
|
|
| 447 |
|
| 448 |
-
|
| 449 |
-
|
| 450 |
-
|
| 451 |
-
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
|
| 465 |
-
#
|
| 466 |
-
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
#
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
#
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
#
|
| 482 |
-
|
| 483 |
-
|
| 484 |
-
#
|
| 485 |
-
|
| 486 |
-
|
| 487 |
-
|
| 488 |
-
|
| 489 |
-
# # Successful response (no exception and no rateβlimit text)
|
| 490 |
-
# break
|
| 491 |
-
|
| 492 |
-
# return {
|
| 493 |
-
# "web_search_query": None,
|
| 494 |
-
# "web_search_result": result_text
|
| 495 |
-
# }
|
|
|
|
| 262 |
summary_text = summary_data.get("extract")
|
| 263 |
if not summary_text:
|
| 264 |
summary_text = summary_data.get("description", "No summary available.")
|
| 265 |
+
print("Submitted wiki successfully")
|
| 266 |
return f"Title: {first_title}\n\n{summary_text}"
|
| 267 |
|
| 268 |
|
| 269 |
except requests.exceptions.RequestException as e:
|
| 270 |
+
print("Wikipedia search error: ", e)
|
| 271 |
return f"Wikipedia search error: {e}"
|
| 272 |
except Exception as e:
|
| 273 |
+
print("Unexpected error in wikipedia_search_tool: ", e)
|
| 274 |
return f"Unexpected error in wikipedia_search_tool: {e}"
|
| 275 |
|
| 276 |
@tool
|
|
|
|
| 444 |
result = a / b
|
| 445 |
return f"Division result: {a} Γ· {b} = {result}"
|
| 446 |
|
| 447 |
+
@tool
|
| 448 |
+
def web_search_tool(query: str) -> str:
|
| 449 |
+
"""
|
| 450 |
+
TOOL NAME: Web Search Tool
|
| 451 |
|
| 452 |
+
Purpose: When the user asks for current information, recent news, or topics not covered by Wikipedia, use this tool.
|
| 453 |
|
| 454 |
+
Input: A string describing what to search for on the web.
|
| 455 |
|
| 456 |
+
Example usage:
|
| 457 |
+
- "What's the latest news about AI?"
|
| 458 |
+
- "Current stock price of Tesla"
|
| 459 |
+
- "Recent developments in renewable energy"
|
| 460 |
+
"""
|
| 461 |
+
print("reached web_search_tool")
|
| 462 |
+
if not query:
|
| 463 |
+
return "No search query provided."
|
| 464 |
+
|
| 465 |
+
ddg = DDGS()
|
| 466 |
+
max_retries = 5
|
| 467 |
+
result_text = ""
|
| 468 |
+
|
| 469 |
+
for attempt in range(1, max_retries + 1):
|
| 470 |
+
try:
|
| 471 |
+
result_text = str(ddg.text(query, max_results=5))
|
| 472 |
+
except Exception as e:
|
| 473 |
+
# Network error or timeoutβretry up to max_retries
|
| 474 |
+
if attempt < max_retries:
|
| 475 |
+
print(f"web_search_tool: exception '{e}', retrying in 4 seconds ({attempt}/{max_retries})")
|
| 476 |
+
time.sleep(4)
|
| 477 |
+
continue
|
| 478 |
+
else:
|
| 479 |
+
# Final attempt failed
|
| 480 |
+
return f"Error during DuckDuckGo search: {e}"
|
| 481 |
+
|
| 482 |
+
# Check for DuckDuckGo rateβlimit indicator
|
| 483 |
+
if "202 Ratelimit" in result_text:
|
| 484 |
+
if attempt < max_retries:
|
| 485 |
+
print(f"web_search_tool: received '202 Ratelimit', retrying in 4 seconds ({attempt}/{max_retries})")
|
| 486 |
+
time.sleep(4)
|
| 487 |
+
continue
|
| 488 |
+
else:
|
| 489 |
+
# Final attempt still rateβlimited
|
| 490 |
+
break
|
| 491 |
+
|
| 492 |
+
# Successful response (no exception and no rateβlimit text)
|
| 493 |
+
break
|
| 494 |
+
print("Submitted web search successfully")
|
| 495 |
+
return result_text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|