Spaces:
Sleeping
Sleeping
Update agent.py
Browse files
agent.py
CHANGED
|
@@ -6,7 +6,7 @@ from serpapi import GoogleSearch
|
|
| 6 |
import pandas as pd
|
| 7 |
import faiss
|
| 8 |
from langgraph import Graph, LLM, tool #or other graph library
|
| 9 |
-
|
| 10 |
|
| 11 |
# βββ 1) Load & embed all documents at startup βββ
|
| 12 |
# 1a) Read CSV of docs
|
|
@@ -95,7 +95,26 @@ def web_search(query: str, num_results: int = 5) -> str:
|
|
| 95 |
results = search.get_dict().get("organic_results", [])
|
| 96 |
snippets = [r.get("snippet","")for r in results]
|
| 97 |
return "\n".join(f"- {s}" for s in snippets)
|
| 98 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 99 |
|
| 100 |
# 2) Build your graph
|
| 101 |
def build_graph(provider: str = "huggingface") -> Graph:
|
|
@@ -107,7 +126,9 @@ def build_graph(provider: str = "huggingface") -> Graph:
|
|
| 107 |
tools = [
|
| 108 |
calculator,
|
| 109 |
retrieve_docs,
|
| 110 |
-
web_search,
|
|
|
|
|
|
|
| 111 |
]
|
| 112 |
llm_with_tools = llm.with_tools(tools)
|
| 113 |
|
|
@@ -118,11 +139,15 @@ def build_graph(provider: str = "huggingface") -> Graph:
|
|
| 118 |
# tool nodes
|
| 119 |
graph.add_node("calc", calculator)
|
| 120 |
graph.add_node("retrieve", retrieve_docs)
|
| 121 |
-
graph.add_node("
|
|
|
|
|
|
|
| 122 |
# allow the LLM to call any tool:
|
| 123 |
graph.add_edge("ask", "calc") # allow ask -> calc
|
| 124 |
graph.add_edge("ask", "retrieve")
|
| 125 |
graph.add_edge("ask", "web_search")
|
|
|
|
|
|
|
| 126 |
|
| 127 |
graph.set.start("ask")
|
| 128 |
return graph
|
|
|
|
| 6 |
import pandas as pd
|
| 7 |
import faiss
|
| 8 |
from langgraph import Graph, LLM, tool #or other graph library
|
| 9 |
+
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
| 10 |
|
| 11 |
# βββ 1) Load & embed all documents at startup βββ
|
| 12 |
# 1a) Read CSV of docs
|
|
|
|
| 95 |
results = search.get_dict().get("organic_results", [])
|
| 96 |
snippets = [r.get("snippet","")for r in results]
|
| 97 |
return "\n".join(f"- {s}" for s in snippets)
|
| 98 |
+
|
| 99 |
+
@tool
|
| 100 |
+
def wiki_search(query: str) -> str:
|
| 101 |
+
"""
|
| 102 |
+
Search Wikipedia for up to 2 pages matching 'query',
|
| 103 |
+
and return their contents.
|
| 104 |
+
"""
|
| 105 |
+
#load up to 2 pages
|
| 106 |
+
pages = WikipediaLoader(query=query, load_max_docs=2).load()
|
| 107 |
+
#format as plain text
|
| 108 |
+
return "\n\n---\n\n".join(doc.page_content for doc in pages)
|
| 109 |
+
|
| 110 |
+
@tool
|
| 111 |
+
def arxiv_search(query:str) -> str:
|
| 112 |
+
"""
|
| 113 |
+
Search ArXiv for up to 3 abstracts matching 'query',
|
| 114 |
+
and return their first 1000 characters.
|
| 115 |
+
"""
|
| 116 |
+
papers = ArxivLoader(query=query, load_max_docs=3).load()
|
| 117 |
+
return "\n\n---\n\n".join(doc.page_content[:1000]for doc in papers)
|
| 118 |
|
| 119 |
# 2) Build your graph
|
| 120 |
def build_graph(provider: str = "huggingface") -> Graph:
|
|
|
|
| 126 |
tools = [
|
| 127 |
calculator,
|
| 128 |
retrieve_docs,
|
| 129 |
+
web_search,
|
| 130 |
+
wiki_search,
|
| 131 |
+
arxiv_search, # add more tools here
|
| 132 |
]
|
| 133 |
llm_with_tools = llm.with_tools(tools)
|
| 134 |
|
|
|
|
| 139 |
# tool nodes
|
| 140 |
graph.add_node("calc", calculator)
|
| 141 |
graph.add_node("retrieve", retrieve_docs)
|
| 142 |
+
graph.add_node("web_search", web_search)
|
| 143 |
+
graph.add_node("wiki", wiki_search)
|
| 144 |
+
graph.add_node("arxiv", arxiv_search)
|
| 145 |
# allow the LLM to call any tool:
|
| 146 |
graph.add_edge("ask", "calc") # allow ask -> calc
|
| 147 |
graph.add_edge("ask", "retrieve")
|
| 148 |
graph.add_edge("ask", "web_search")
|
| 149 |
+
graph.add_edge("ask", "wiki")
|
| 150 |
+
graph.add_edge("ask", "arxiv")
|
| 151 |
|
| 152 |
graph.set.start("ask")
|
| 153 |
return graph
|