grshot commited on
Commit
b549b82
·
1 Parent(s): 7eb4491

Adjust Tools and introduce wiki search

Browse files
Files changed (1) hide show
  1. agent.py +29 -6
agent.py CHANGED
@@ -1,5 +1,7 @@
1
  import os
2
 
 
 
3
  # --- Langchain / Langraph ---
4
  from langchain_community.tools.tavily_search import TavilySearchResults
5
  from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
@@ -18,7 +20,7 @@ from langgraph.prebuilt import ToolNode, tools_condition
18
 
19
 
20
  @tool
21
- def search_web_sources(query: str) -> str:
22
  """
23
  Perform a web search using Tavily and return up to 3 relevant documents.
24
  This tool is useful for answering research-based queries that require
@@ -30,14 +32,35 @@ def search_web_sources(query: str) -> str:
30
  Returns:
31
  str: Formatted web search results with metadata and content.
32
  """
33
- search_docs = TavilySearchResults(max_results=3).invoke(query=query)
 
34
  formatted_search_docs = "\n\n---\n\n".join(
35
  [
36
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
37
  for doc in search_docs
38
  ]
39
  )
40
- return formatted_search_docs
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
 
43
  @tool
@@ -74,7 +97,7 @@ def build_agent_graph(provider: str = "groq"):
74
  groq_api_key = os.environ.get("GROQ_API_KEY")
75
 
76
  # Define toolset
77
- tools = [search_web_sources, run_python_code]
78
 
79
  # Instantiate LLM
80
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0, api_key=groq_api_key)
@@ -83,10 +106,10 @@ def build_agent_graph(provider: str = "groq"):
83
  llm_with_tools = llm.bind_tools(tools)
84
 
85
  # Assistant: reasoning step that plans next action
86
- def assistant_node(state: MessagesState):
87
  messages = state["messages"]
88
  response = llm_with_tools.invoke(messages)
89
- return {"messages": add_messages(state, [response])}
90
 
91
  # Stubbed retriever node for future integration
92
  def retriever_node(state: MessagesState):
 
1
  import os
2
 
3
+ from langchain_community.document_loaders import WikipediaLoader
4
+
5
  # --- Langchain / Langraph ---
6
  from langchain_community.tools.tavily_search import TavilySearchResults
7
  from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
 
20
 
21
 
22
  @tool
23
+ def search_web_sources(query: str) -> dict:
24
  """
25
  Perform a web search using Tavily and return up to 3 relevant documents.
26
  This tool is useful for answering research-based queries that require
 
32
  Returns:
33
  str: Formatted web search results with metadata and content.
34
  """
35
+ tavily_tool = TavilySearchResults(max_results=3)
36
+ search_docs = tavily_tool.invoke({"query": query})
37
  formatted_search_docs = "\n\n---\n\n".join(
38
  [
39
  f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
40
  for doc in search_docs
41
  ]
42
  )
43
+ return {"web_results": formatted_search_docs}
44
+
45
+
46
+ @tool
47
+ def search_wikipedia(query: str) -> dict:
48
+ """Search Wikipedia using LangChain's loader and return the first document summary."""
49
+ try:
50
+ loader = WikipediaLoader(query=query, lang="en", load_max_docs=2)
51
+ docs = loader.load()
52
+ if docs:
53
+ formatted_docs = "---".join(
54
+ [
55
+ f'<WikipediaArticle title="{query}">{doc.page_content}</WikipediaArticle>'
56
+ for doc in docs
57
+ ]
58
+ )
59
+ return {"wiki_results": formatted_docs}
60
+ else:
61
+ return {"wiki_results": "No content found."}
62
+ except Exception as e:
63
+ return {"wiki_results": f"Error fetching Wikipedia article: {e}"}
64
 
65
 
66
  @tool
 
97
  groq_api_key = os.environ.get("GROQ_API_KEY")
98
 
99
  # Define toolset
100
+ tools = [search_web_sources, search_wikipedia, run_python_code]
101
 
102
  # Instantiate LLM
103
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0, api_key=groq_api_key)
 
106
  llm_with_tools = llm.bind_tools(tools)
107
 
108
  # Assistant: reasoning step that plans next action
109
+ def assistant_node(state: MessagesState) -> dict:
110
  messages = state["messages"]
111
  response = llm_with_tools.invoke(messages)
112
+ return {"messages": response}
113
 
114
  # Stubbed retriever node for future integration
115
  def retriever_node(state: MessagesState):