abtsousa commited on
Commit
538782c
·
1 Parent(s): 3c439f4

Add langchain-tavily dependency and implement TavilySearch tool

Browse files
Files changed (5) hide show
  1. pyproject.toml +1 -0
  2. tools/__init__.py +6 -7
  3. tools/search.py +4 -0
  4. tools/wikipedia.py +0 -1
  5. uv.lock +17 -0
pyproject.toml CHANGED
@@ -12,6 +12,7 @@ dependencies = [
12
  "ipywidgets>=8.1.7",
13
  "langchain-community>=0.3.27",
14
  "langchain-google-genai>=2.1.9",
 
15
  "langchain[google-genai,googlegenai,openai]>=0.3.26",
16
  "langgraph>=0.4.8",
17
  "matplotlib>=3.10.5",
 
12
  "ipywidgets>=8.1.7",
13
  "langchain-community>=0.3.27",
14
  "langchain-google-genai>=2.1.9",
15
+ "langchain-tavily>=0.2.11",
16
  "langchain[google-genai,googlegenai,openai]>=0.3.26",
17
  "langgraph>=0.4.8",
18
  "matplotlib>=3.10.5",
tools/__init__.py CHANGED
@@ -1,8 +1,6 @@
1
- from .wikipedia import fetch_wikipedia_content
 
2
  from langchain_core.tools import BaseTool
3
- from langchain_community.document_loaders import WikipediaLoader
4
- from langchain_core.tools import render_text_description_and_args
5
- from langchain_core.tools import tool
6
 
7
  def get_all_tools() -> list[BaseTool]:
8
  """
@@ -11,9 +9,10 @@ def get_all_tools() -> list[BaseTool]:
11
  Returns:
12
  List of BaseTool instances ready for use with LangChain agents
13
  """
14
- tools = []
15
-
16
- tools.append(fetch_wikipedia_content)
 
17
 
18
  return tools
19
 
 
1
+ from .wikipedia import wiki_search
2
+ from .search import web_search
3
  from langchain_core.tools import BaseTool
 
 
 
4
 
5
  def get_all_tools() -> list[BaseTool]:
6
  """
 
9
  Returns:
10
  List of BaseTool instances ready for use with LangChain agents
11
  """
12
+ tools = [
13
+ wiki_search,
14
+ web_search
15
+ ]
16
 
17
  return tools
18
 
tools/search.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from langchain_core.tools import tool
2
+ from langchain_tavily import TavilySearch
3
+
4
+ web_search = TavilySearch(max_results=3, topic="general")
tools/wikipedia.py CHANGED
@@ -1,6 +1,5 @@
1
  from langchain_core.tools import tool
2
  from langchain_community.document_loaders import WikipediaLoader
3
- from typing import Optional
4
 
5
  @tool
6
  def wiki_search(query: str) -> str:
 
1
  from langchain_core.tools import tool
2
  from langchain_community.document_loaders import WikipediaLoader
 
3
 
4
  @tool
5
  def wiki_search(query: str) -> str:
uv.lock CHANGED
@@ -1260,6 +1260,21 @@ wheels = [
1260
  { url = "https://files.pythonhosted.org/packages/ac/f2/a6a73beec15e90605e6a24c4498a8592d79a72c8e81c18ed0f5e9b7308e9/langchain_openai-0.3.29-py3-none-any.whl", hash = "sha256:71ae6791b3e017ec892a8062f993edc882c6665fd8385aa66e9dc3bff8205996", size = 74316, upload-time = "2025-08-08T15:12:30.794Z" },
1261
  ]
1262
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1263
  [[package]]
1264
  name = "langchain-text-splitters"
1265
  version = "0.3.9"
@@ -1781,6 +1796,7 @@ dependencies = [
1781
  { name = "langchain", extra = ["google-genai", "openai"] },
1782
  { name = "langchain-community" },
1783
  { name = "langchain-google-genai" },
 
1784
  { name = "langgraph" },
1785
  { name = "matplotlib" },
1786
  { name = "openinference-instrumentation-langchain" },
@@ -1803,6 +1819,7 @@ requires-dist = [
1803
  { name = "langchain", extras = ["google-genai", "googlegenai", "openai"], specifier = ">=0.3.26" },
1804
  { name = "langchain-community", specifier = ">=0.3.27" },
1805
  { name = "langchain-google-genai", specifier = ">=2.1.9" },
 
1806
  { name = "langgraph", specifier = ">=0.4.8" },
1807
  { name = "matplotlib", specifier = ">=3.10.5" },
1808
  { name = "openinference-instrumentation-langchain", specifier = ">=0.1.43" },
 
1260
  { url = "https://files.pythonhosted.org/packages/ac/f2/a6a73beec15e90605e6a24c4498a8592d79a72c8e81c18ed0f5e9b7308e9/langchain_openai-0.3.29-py3-none-any.whl", hash = "sha256:71ae6791b3e017ec892a8062f993edc882c6665fd8385aa66e9dc3bff8205996", size = 74316, upload-time = "2025-08-08T15:12:30.794Z" },
1261
  ]
1262
 
1263
+ [[package]]
1264
+ name = "langchain-tavily"
1265
+ version = "0.2.11"
1266
+ source = { registry = "https://pypi.org/simple" }
1267
+ dependencies = [
1268
+ { name = "aiohttp" },
1269
+ { name = "langchain" },
1270
+ { name = "langchain-core" },
1271
+ { name = "requests" },
1272
+ ]
1273
+ sdist = { url = "https://files.pythonhosted.org/packages/17/bb/63ce4058684dddf525af3c8e5dcfab15c5f17515d20241ef6e726ac9e8b7/langchain_tavily-0.2.11.tar.gz", hash = "sha256:ab4f5d0f7fcb276a3905aef2e38c21a334b6cbfc86b405a3238fdc9c6eae1290", size = 22382, upload-time = "2025-07-25T17:26:33.41Z" }
1274
+ wheels = [
1275
+ { url = "https://files.pythonhosted.org/packages/a8/5a/9326f125b4d3055a96200a5035016efe1aac46149cdafc7182e56710fcfe/langchain_tavily-0.2.11-py3-none-any.whl", hash = "sha256:358317c18fbb26500bca665301450e38945f1f4f6a6f4e06406c7674a76c8d5c", size = 26187, upload-time = "2025-07-25T17:26:32.324Z" },
1276
+ ]
1277
+
1278
  [[package]]
1279
  name = "langchain-text-splitters"
1280
  version = "0.3.9"
 
1796
  { name = "langchain", extra = ["google-genai", "openai"] },
1797
  { name = "langchain-community" },
1798
  { name = "langchain-google-genai" },
1799
+ { name = "langchain-tavily" },
1800
  { name = "langgraph" },
1801
  { name = "matplotlib" },
1802
  { name = "openinference-instrumentation-langchain" },
 
1819
  { name = "langchain", extras = ["google-genai", "googlegenai", "openai"], specifier = ">=0.3.26" },
1820
  { name = "langchain-community", specifier = ">=0.3.27" },
1821
  { name = "langchain-google-genai", specifier = ">=2.1.9" },
1822
+ { name = "langchain-tavily", specifier = ">=0.2.11" },
1823
  { name = "langgraph", specifier = ">=0.4.8" },
1824
  { name = "matplotlib", specifier = ">=3.10.5" },
1825
  { name = "openinference-instrumentation-langchain", specifier = ">=0.1.43" },