sebastianfrench commited on
Commit
5d8f022
·
1 Parent(s): d1d61ce

format tools

Browse files
Files changed (3) hide show
  1. agents/search_agent.py +2 -0
  2. graphs/evaluation.py +7 -18
  3. tools/search.py +27 -45
agents/search_agent.py CHANGED
@@ -17,6 +17,8 @@ class SearchAgent:
17
  state = workflow.invoke({
18
  "messages":messages,
19
  }, config={"callbacks": [langfuse_handler]})
 
 
20
  return state["answer"]
21
 
22
  if __name__ == "__main__":
 
17
  state = workflow.invoke({
18
  "messages":messages,
19
  }, config={"callbacks": [langfuse_handler]})
20
+
21
+ print(state["external_information"])
22
  return state["answer"]
23
 
24
  if __name__ == "__main__":
graphs/evaluation.py CHANGED
@@ -1,5 +1,5 @@
1
  from models.models import groq_model, anthropic_model
2
- from tools.search import arxiv_search, taivily_search, serper_search
3
  from langgraph.graph import StateGraph, START, END, MessagesState
4
  from langchain_core.messages import HumanMessage, SystemMessage
5
  from typing import List, TypedDict
@@ -7,7 +7,7 @@ from langgraph.prebuilt import ToolNode
7
 
8
 
9
  tools = [
10
- taivily_search,
11
  serper_search,
12
  ]
13
 
@@ -24,6 +24,9 @@ bound_model_llama = groq_model.bind_tools(tools)
24
  bound_model_antrhropic = anthropic_model.bind_tools(tools)
25
 
26
  def call_node(state: EvaluationState):
 
 
 
27
  question = state["messages"][-1].content
28
  state["question"] = question
29
  response = bound_model_llama.invoke(state["messages"])
@@ -61,21 +64,11 @@ def map_answer(state: EvaluationState):
61
  Map the answer to the final answer
62
  """
63
  answer = anthropic_model.invoke("Map the answer, I want only the number, string or list. ANSWER:"+ state["answer"])
64
- print(answer.content)
65
  return {
66
  "answer": answer.content
67
  }
68
 
69
- def map_tool_answer(state: EvaluationState):
70
- """
71
- Map the tool answer to the final answer
72
- """
73
- last_message = state["messages"][-1]
74
- state["external_information"] = last_message.content
75
-
76
- return state
77
-
78
-
79
  def build_workflow():
80
  """
81
  Build search workflow
@@ -83,16 +76,12 @@ def build_workflow():
83
  workflow = StateGraph(EvaluationState)
84
  workflow.add_node("agent", call_node)
85
  workflow.add_node("action", tool_node)
86
- workflow.add_node("map_tool_answer", map_tool_answer)
87
  workflow.add_node("parse_response", parse_response)
88
  workflow.add_node("map_answer", map_answer)
89
- """ workflow.add_node("action",tool_node)
90
- workflow.add_node("answer", parse_response) """
91
 
92
  workflow.add_edge(START,"agent")
93
  workflow.add_edge("agent", "action")
94
- workflow.add_edge("action", "map_tool_answer")
95
- workflow.add_edge("map_tool_answer", "parse_response")
96
  workflow.add_edge("parse_response", "map_answer")
97
  workflow.add_edge("map_answer", END)
98
 
 
1
  from models.models import groq_model, anthropic_model
2
+ from tools.search import taivily_search, serper_search
3
  from langgraph.graph import StateGraph, START, END, MessagesState
4
  from langchain_core.messages import HumanMessage, SystemMessage
5
  from typing import List, TypedDict
 
7
 
8
 
9
  tools = [
10
+ #taivily_search,
11
  serper_search,
12
  ]
13
 
 
24
  bound_model_antrhropic = anthropic_model.bind_tools(tools)
25
 
26
  def call_node(state: EvaluationState):
27
+ """
28
+ This node call the model with the question and the tools
29
+ """
30
  question = state["messages"][-1].content
31
  state["question"] = question
32
  response = bound_model_llama.invoke(state["messages"])
 
64
  Map the answer to the final answer
65
  """
66
  answer = anthropic_model.invoke("Map the answer, I want only the number, string or list. ANSWER:"+ state["answer"])
67
+
68
  return {
69
  "answer": answer.content
70
  }
71
 
 
 
 
 
 
 
 
 
 
 
72
  def build_workflow():
73
  """
74
  Build search workflow
 
76
  workflow = StateGraph(EvaluationState)
77
  workflow.add_node("agent", call_node)
78
  workflow.add_node("action", tool_node)
 
79
  workflow.add_node("parse_response", parse_response)
80
  workflow.add_node("map_answer", map_answer)
 
 
81
 
82
  workflow.add_edge(START,"agent")
83
  workflow.add_edge("agent", "action")
84
+ workflow.add_edge("action", "parse_response")
 
85
  workflow.add_edge("parse_response", "map_answer")
86
  workflow.add_edge("map_answer", END)
87
 
tools/search.py CHANGED
@@ -1,32 +1,18 @@
1
  from langchain_core.tools import tool
2
  from langchain_community.tools.tavily_search import TavilySearchResults
3
  from langchain_community.utilities import GoogleSerperAPIWrapper
4
- from langchain_community.document_loaders import WikipediaLoader
5
- from langchain_community.document_loaders import ArxivLoader
6
  from dotenv import load_dotenv
 
 
 
 
 
 
7
 
8
  load_dotenv()
9
 
10
  @tool
11
- def wikipedia_search(query: str) -> str:
12
- """Search Wikipedia for a query and return maximum 1 result.
13
- Args:
14
- query: The search query."""
15
- query = "Mercedes Sosa"
16
- search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
17
-
18
- formatted_search_docs = "\n\n---\n\n".join(
19
- [
20
- f'<Document source="{doc.metadata["source"]}"/>\n{doc.page_content}\n</Document>'
21
- for doc in search_docs
22
- if "Closed-ended question" not in doc.metadata.get("title", "")
23
- ]
24
- )
25
-
26
- return {"wiki_results": formatted_search_docs}
27
-
28
- @tool
29
- def taivily_search(query: str) -> str:
30
  """Tavily is a search engine optimized for LLMs, aimed at efficient, quick and persistent search results. Tavily take care of all the burden of searching, scraping, filtering and extracting the most relevant information from online sources.
31
  Args:
32
  query: The search query."""
@@ -35,39 +21,35 @@ def taivily_search(query: str) -> str:
35
 
36
  formatted_search_docs = "\n\n---\n\n".join(
37
  [
38
- f'<Document source="{doc["url"]}""/>\n{doc["content"]}\n</Document>'
39
  for doc in search_docs
40
  ]
41
  )
42
 
43
- return {"web_results": formatted_search_docs}
44
-
45
-
46
- @tool
47
- def arxiv_search(query: str) -> str:
48
- """Search Arxiv for a query and return maximum 3 result.
49
- Args:
50
- query: The search query."""
51
- search_docs = ArxivLoader(query=query, load_max_docs=3).load()
52
- print(search_docs)
53
- formatted_search_docs = "\n\n---\n\n".join(
54
- [
55
- f'<Document source="{doc.metadata["Title"]}""/>\n{doc.page_content[:1000]}\n</Document>'
56
- for doc in search_docs
57
- ]
58
  )
59
- return {"arxiv_results": formatted_search_docs}
60
 
61
  @tool
62
- def serper_search(query: str) -> str:
63
  """
64
- Search Google for a query and return maximum 2 result.
65
- Args: query: The search query.
 
66
  """
67
- search_docs = GoogleSerperAPIWrapper(k=2)
68
  result = search_docs.run(query)
69
 
70
- return {"google_results": result}
 
 
 
 
 
 
 
 
71
 
72
-
73
-
 
1
  from langchain_core.tools import tool
2
  from langchain_community.tools.tavily_search import TavilySearchResults
3
  from langchain_community.utilities import GoogleSerperAPIWrapper
 
 
4
  from dotenv import load_dotenv
5
+ from typing import Annotated
6
+ from typing_extensions import Annotated
7
+ from langchain_core.tools.base import InjectedToolCallId
8
+ from langchain_core.runnables import RunnableConfig
9
+ from langgraph.types import Command
10
+ from langchain_core.messages import ToolMessage
11
 
12
  load_dotenv()
13
 
14
  @tool
15
+ def taivily_search(query: str, tool_call_id: Annotated[str, InjectedToolCallId], config: RunnableConfig):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  """Tavily is a search engine optimized for LLMs, aimed at efficient, quick and persistent search results. Tavily take care of all the burden of searching, scraping, filtering and extracting the most relevant information from online sources.
17
  Args:
18
  query: The search query."""
 
21
 
22
  formatted_search_docs = "\n\n---\n\n".join(
23
  [
24
+ f"""# Taivily \n source="{doc["url"]}"" \n ## Content \n {doc["content"]} \n---"""
25
  for doc in search_docs
26
  ]
27
  )
28
 
29
+ return Command(
30
+ update={
31
+ "external_information": f"{config.get('external_information', '')}\n\n---\n\n{formatted_search_docs}",
32
+ "messages": [ToolMessage(content=formatted_search_docs, tool_call_id=tool_call_id)]
33
+ }
 
 
 
 
 
 
 
 
 
 
34
  )
 
35
 
36
  @tool
37
+ def serper_search(query: str, tool_call_id: Annotated[str, InjectedToolCallId], config: RunnableConfig) -> Command:
38
  """
39
+ lightning-fast Google search results in 1-2 seconds.
40
+ Args:
41
+ query: The search query.
42
  """
43
+ search_docs = GoogleSerperAPIWrapper(k=8)
44
  result = search_docs.run(query)
45
 
46
+ formatted_result = f"""# Google Search Results \n\n## Results \n\n{result}"""
47
+
48
+ return Command(
49
+ update={
50
+ "external_information": f"{config.get('external_information', '')}\n\n---\n\n{formatted_result}",
51
+ "messages": [ToolMessage(content=formatted_result, tool_call_id=tool_call_id)]
52
+ }
53
+ )
54
+
55