Lasdw commited on
Commit
eae50b4
·
1 Parent(s): 5a47613

Improved search capabilities

Browse files
Files changed (1) hide show
  1. agent.py +10 -17
agent.py CHANGED
@@ -25,7 +25,7 @@ import html2text
25
  from apify_client import ApifyClient
26
  from langchain_community.document_loaders import WikipediaLoader
27
  from langchain_community.document_loaders import ArxivLoader
28
- from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper # For Tavily search
29
 
30
  load_dotenv()
31
 
@@ -315,7 +315,7 @@ def format_search_results(results: List[Dict], query: str) -> str:
315
  formatted_results += f" {result['snippet']}\n"
316
  formatted_results += "\n"
317
 
318
- return str(items)
319
 
320
  def fallback_search(query: str) -> str:
321
  """Fallback search method using DuckDuckGo when Apify is not available"""
@@ -449,11 +449,11 @@ def tavily_search(query: str, search_depth: str = "basic") -> str:
449
 
450
  print(f"Searching Tavily for: {query} (depth: {search_depth})")
451
 
452
- # Initialize the Tavily search wrapper
453
- search = TavilySearchAPIWrapper()
454
 
455
  # Execute the search
456
- results = search.results(query, search_depth=search_depth)
457
 
458
  if not results:
459
  return f"No Tavily search results found for '{query}'. Try refining your search."
@@ -552,7 +552,8 @@ IMPORTANT: You MUST strictly follow the ReAct pattern (Reasoning, Action, Observ
552
  4. Based on the observation, continue with another thought
553
  5. This cycle repeats until you have enough information to provide a final answer
554
 
555
- NEVER fake or simulate tool output yourself. ALWAYS wait for the real observation from the tool.
 
556
 
557
  The way you use the tools is by specifying a json blob.
558
  Specifically, this json should have an `action` key (with the name of the tool to use) and an `action_input` key (with the input to the tool going here).
@@ -713,14 +714,6 @@ def assistant(state: AgentState) -> Dict[str, Any]:
713
  # Combine system message with user messages
714
  messages = [system_msg] + user_messages
715
 
716
- # Print the full context of messages being sent to the LLM
717
- print("\n=== INPUT TO LLM ===")
718
- for i, msg in enumerate(messages):
719
- msg_type = type(msg).__name__
720
- content_preview = msg.content + "..." if len(msg.content) > 150 else msg.content
721
- print(f"Message {i} ({msg_type}): {content_preview}")
722
- print("=== END INPUT ===\n")
723
-
724
  # Get response from the assistant
725
  response = chat_with_tools.invoke(messages, stop=["Observation:"])
726
  print(f"Assistant response type: {type(response)}")
@@ -1186,7 +1179,7 @@ def create_agent_graph() -> StateGraph:
1186
  builder.add_edge("tavily_search", "assistant")
1187
  builder.add_edge("arxiv_search", "assistant")
1188
 
1189
- # Compile with a reasonable recursion limit to prevent infinite loops
1190
  return builder.compile()
1191
 
1192
  # Main agent class that integrates with your existing app.py
@@ -1216,7 +1209,7 @@ class TurboNerd:
1216
 
1217
  try:
1218
  # Set a reasonable recursion limit
1219
- result = self.graph.invoke(initial_state, config={"recursion_limit": 15})
1220
 
1221
  # Print the final state for debugging
1222
  print(f"Final state keys: {result.keys()}")
@@ -1240,7 +1233,7 @@ class TurboNerd:
1240
  # Example usage:
1241
  if __name__ == "__main__":
1242
  agent = TurboNerd(max_execution_time=60)
1243
- response = agent("When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect? Use Tavily Search")
1244
  print("\nFinal Response:")
1245
  print(response)
1246
 
 
25
  from apify_client import ApifyClient
26
  from langchain_community.document_loaders import WikipediaLoader
27
  from langchain_community.document_loaders import ArxivLoader
28
+ from langchain_community.tools.tavily_search import TavilySearchResults # For Tavily search
29
 
30
  load_dotenv()
31
 
 
315
  formatted_results += f" {result['snippet']}\n"
316
  formatted_results += "\n"
317
 
318
+ return formatted_results
319
 
320
  def fallback_search(query: str) -> str:
321
  """Fallback search method using DuckDuckGo when Apify is not available"""
 
449
 
450
  print(f"Searching Tavily for: {query} (depth: {search_depth})")
451
 
452
+ # Initialize the Tavily search tool
453
+ search = TavilySearchResults(api_key=tavily_api_key)
454
 
455
  # Execute the search
456
+ results = search.invoke({"query": query, "search_depth": search_depth})
457
 
458
  if not results:
459
  return f"No Tavily search results found for '{query}'. Try refining your search."
 
552
  4. Based on the observation, continue with another thought
553
  5. This cycle repeats until you have enough information to provide a final answer
554
 
555
+ NEVER fake or simulate tool output yourself. You can try to use the tools multiple times if needed and try using multiple tools if needed.
556
+ Give preference to using Tavily Search and Wikipedia Search before using web_search or webpage_scrape.
557
 
558
  The way you use the tools is by specifying a json blob.
559
  Specifically, this json should have an `action` key (with the name of the tool to use) and an `action_input` key (with the input to the tool going here).
 
714
  # Combine system message with user messages
715
  messages = [system_msg] + user_messages
716
 
 
 
 
 
 
 
 
 
717
  # Get response from the assistant
718
  response = chat_with_tools.invoke(messages, stop=["Observation:"])
719
  print(f"Assistant response type: {type(response)}")
 
1179
  builder.add_edge("tavily_search", "assistant")
1180
  builder.add_edge("arxiv_search", "assistant")
1181
 
1182
+ # Compile the graph
1183
  return builder.compile()
1184
 
1185
  # Main agent class that integrates with your existing app.py
 
1209
 
1210
  try:
1211
  # Set a reasonable recursion limit
1212
+ result = self.graph.invoke(initial_state, {"recursion_limit": 100})
1213
 
1214
  # Print the final state for debugging
1215
  print(f"Final state keys: {result.keys()}")
 
1233
  # Example usage:
1234
  if __name__ == "__main__":
1235
  agent = TurboNerd(max_execution_time=60)
1236
+ response = agent("What is the last sentence of albert einstein's wikipedia page?")
1237
  print("\nFinal Response:")
1238
  print(response)
1239