mdicio's picture
hf login
0f37fb5
raw
history blame
7.63 kB
import os
from datasets import load_dataset
from dotenv import load_dotenv
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.schema import Document
from langchain.tools.retriever import create_retriever_tool
from langchain.vectorstores import Chroma
from langchain_community.document_loaders import ArxivLoader, WikipediaLoader
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.tools import tool
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
from langchain_huggingface import (ChatHuggingFace, HuggingFaceEmbeddings,
HuggingFaceEndpoint)
from langgraph.graph import START, MessagesState, StateGraph
from langgraph.prebuilt import ToolNode, tools_condition
from huggingface_hub import login
login(token=os.environ["HUGGINGFACE_TOKEN"])
load_dotenv()
@tool
def calculator(query: str) -> str:
"""Perform basic arithmetic operations based on the provided query.
Args:
query: A mathematical query as a string, e.g., '2 + 2' or '5 * 6'."""
try:
# Evaluate the mathematical expression
result = eval(query)
return {"calculator_result": str(result)}
except Exception as e:
return {"error": f"Error evaluating the expression: {str(e)}"}
@tool
def wiki_search(query: str) -> str:
"""Search Wikipedia for a query and return maximum 2 results.
Args:
query: The search query."""
search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
formatted_search_docs = "\n\n---\n\n".join(
[
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
for doc in search_docs
]
)
return {"wiki_results": formatted_search_docs}
@tool
def web_search(query: str) -> str:
"""Search Tavily for a query and return maximum 3 results.
Args:
query: The search query."""
search_docs = TavilySearchResults(max_results=3).invoke(query=query)
formatted_search_docs = "\n\n---\n\n".join(
[
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
for doc in search_docs
]
)
return {"web_results": formatted_search_docs}
@tool
def arvix_search(query: str) -> str:
"""Search Arxiv for a query and return maximum 3 result.
Args:
query: The search query."""
search_docs = ArxivLoader(query=query, load_max_docs=3).load()
formatted_search_docs = "\n\n---\n\n".join(
[
f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
for doc in search_docs
]
)
return {"arvix_results": formatted_search_docs}
system_prompt = """You are a helpful assistant tasked with answering questions using a set of tools.
Now, I will ask you a question. Report your thoughts, and finish your answer with the following template:
FINAL ANSWER: [YOUR FINAL ANSWER].
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
Your answer should only start with "FINAL ANSWER: ", then follows with the answer. """
# System message
sys_msg = SystemMessage(content=system_prompt)
# build a retriever
embeddings = HuggingFaceEmbeddings(
model_name="sentence-transformers/all-mpnet-base-v2"
) # dim=768
# Load the GAIA validation dataset
dataset = load_dataset("gaia-benchmark/GAIA", name="2023_level1", split="validation")
# Prepare the embeddings model
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
# Extract questions and their answers
documents = []
for entry in dataset:
question = entry["Question"]
answer = entry["Final answer"]
# Create a document with both the question and the answer as metadata
metadata = {
"task_id": entry["task_id"],
"steps": entry["Annotator Metadata"]["Steps"],
"tools": entry["Annotator Metadata"]["Tools"],
"answer": answer,
}
# Add the question to the list of documents
documents.append(Document(page_content=question, metadata=metadata))
# Insert the documents into Chroma
vectorstore = Chroma.from_documents(
documents=documents,
embedding=embeddings,
collection_name="gaia_validation",
persist_directory="./chroma_store",
)
create_retriever_tool = create_retriever_tool(
retriever=vectorstore.as_retriever(),
name="Question Search",
description="A tool to retrieve similar questions from a vector store.",
)
tools = [
calculator,
wiki_search,
web_search,
arvix_search,
]
# Build graph function
def build_graph(provider: str = "groq"):
"""Build the graph"""
# Load environment variables from .env file
if provider == "google":
# Google Gemini
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
elif provider == "groq":
# Groq https://console.groq.com/docs/models
llm = ChatGroq(
model="qwen-qwq-32b", temperature=0
) # optional : qwen-qwq-32b gemma2-9b-it
elif provider == "huggingface":
# TODO: Add huggingface endpoint
llm = ChatHuggingFace(
llm=HuggingFaceEndpoint(
url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
temperature=0,
),
)
else:
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
# Bind tools to LLM
llm_with_tools = llm.bind_tools(tools)
# Node
def assistant(state: MessagesState):
"""Assistant node"""
return {"messages": [llm_with_tools.invoke(state["messages"])]}
def retriever(state: MessagesState):
"""Retriever node"""
similar_question = vectorstore.similarity_search(state["messages"][0].content)
example_msg = HumanMessage(
content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
)
return {"messages": [sys_msg] + state["messages"] + [example_msg]}
builder = StateGraph(MessagesState)
builder.add_node("retriever", retriever)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
builder.add_edge(START, "retriever")
builder.add_edge("retriever", "assistant")
builder.add_conditional_edges(
"assistant",
tools_condition,
)
builder.add_edge("tools", "assistant")
# Compile graph
return builder.compile()
# test
if __name__ == "__main__":
question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?"
# Build the graph
graph = build_graph(provider="groq")
# Run the graph
messages = [HumanMessage(content=question)]
messages = graph.invoke({"messages": messages})
for m in messages["messages"]:
m.pretty_print()