Amalia commited on
Commit
37f62c4
·
1 Parent(s): bb0c381

add more tools

Browse files
Files changed (2) hide show
  1. app.py +41 -1
  2. requirements.txt +2 -1
app.py CHANGED
@@ -4,6 +4,8 @@ import requests
4
  import inspect
5
  import pandas as pd
6
  from smolagents import CodeAgent, tool, InferenceClientModel
 
 
7
 
8
  # (Keep Constants as is)
9
  # --- Constants ---
@@ -36,6 +38,45 @@ def subtract(a: int, b: int) -> int:
36
  """
37
  return a - b
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  # --- Basic Agent Definition ---
41
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
@@ -45,7 +86,6 @@ class BasicAgent:
45
  self.agent = CodeAgent(
46
  model=InferenceClientModel(model_id=model_id, token=os.getenv("HF_TOKEN")),
47
  tools=[multiply, add, subtract],
48
- verbose=True,
49
  max_steps=10,
50
  )
51
  print("BasicAgent initialized.")
 
4
  import inspect
5
  import pandas as pd
6
  from smolagents import CodeAgent, tool, InferenceClientModel
7
+ from langchain_community.tools.tavily_search import TavilySearchResults
8
+ from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
9
 
10
  # (Keep Constants as is)
11
  # --- Constants ---
 
38
  """
39
  return a - b
40
 
41
+ @tool
42
+ def web_search(query: str) -> str:
43
+ """Search Tavily for a query and return maximum 3 results.
44
+
45
+ Args:
46
+ query: The search query."""
47
+ search_docs = TavilySearchResults(max_results=3).invoke(query=query)
48
+ formatted_search_docs = "\n\n---\n\n".join(
49
+ [
50
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
51
+ for doc in search_docs
52
+ ])
53
+ return {"web_results": formatted_search_docs}
54
+
55
+ @tool
56
+ def arxiv_search(query: str, load_max_docs: int) -> Dict[str, str]:
57
+ """Search Arxiv for a query and return maximum 3 result.
58
+ Args:
59
+ query: The search query.
60
+ load_max_docs: The maximum number of documents to load.
61
+ """
62
+ search_docs = ArxivLoader(query=query, load_max_docs=load_max_docs).load()
63
+ formatted_search_docs = "\n\n---\n\n".join(
64
+ [
65
+ f'<Document Title="{doc.metadata["Title"]}" Published="{doc.metadata["Published"]}" Authors="{doc.metadata["Authors"]} Summary={doc.metadata["Summary"]}"/>\n{doc.page_content}\n</Document>'
66
+ for doc in search_docs
67
+ ]
68
+ )
69
+ return {"arxiv_results": formatted_search_docs}
70
+
71
+ @tool
72
+ def wikipedia_search(query: str, load_max_docs: int=3) -> str:
73
+ """Search Wikipedia for a query and return the summary.
74
+ Args:
75
+ query: The search query.
76
+ load_max_docs: The maximum number of documents to load.
77
+ """
78
+ search_docs = WikipediaLoader(query=query, load_max_docs=load_max_docs).load()
79
+ return {"wikipedia_results": search_docs}
80
 
81
  # --- Basic Agent Definition ---
82
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
 
86
  self.agent = CodeAgent(
87
  model=InferenceClientModel(model_id=model_id, token=os.getenv("HF_TOKEN")),
88
  tools=[multiply, add, subtract],
 
89
  max_steps=10,
90
  )
91
  print("BasicAgent initialized.")
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  gradio
2
  requests
3
- smolagents
 
 
1
  gradio
2
  requests
3
+ smolagents
4
+ langchain-community