import os from dotenv import load_dotenv from langgraph.graph import START, StateGraph, MessagesState from langgraph.prebuilt import tools_condition from langgraph.prebuilt import ToolNode from langchain_groq import ChatGroq from langchain_community.tools import DuckDuckGoSearchResults from langchain_community.document_loaders import WikipediaLoader from langchain_core.messages import SystemMessage, HumanMessage from langchain_core.tools import tool from langchain_core.output_parsers import StrOutputParser load_dotenv() @tool def wiki_search(query: str) -> str: """Search Wikipedia for a query and return maximum 2 results. Args: query: The search query.""" search_docs = WikipediaLoader(query=query, load_max_docs=2).load() formatted_search_docs = "\n\n---\n\n".join( [ f'\n{doc.page_content}\n' for doc in search_docs ]) return {"wiki_results": formatted_search_docs} @tool def web_search(query: str) -> str: """Search DuckDuckGo for a query and return maximum 3 results. Args: query: The search query.""" search_docs = DuckDuckGoSearchResults(max_results=3).invoke(query) return {"web_results": search_docs} system_prompt = "You are a helpful assistant" sys_msg = SystemMessage(content=system_prompt) tools = [ wiki_search, web_search, ] llm = ChatGroq(model="qwen-qwq-32b", temperature=0) llm_with_tools = llm.bind_tools(tools) def assistant(state: MessagesState): """Assistant node""" return {"messages": [llm_with_tools.invoke(state["messages"])]} builder = StateGraph(MessagesState) builder.add_node("assistant", assistant) builder.add_node("tools", ToolNode(tools)) builder.add_edge(START, "assistant") builder.add_conditional_edges( "assistant", tools_condition, ) builder.add_edge("tools", "assistant") graph = builder.compile() def get_answer(query): messages = [HumanMessage(content=query)] messages = graph.invoke({"messages": messages}) return result["messages"][-1]["content"] if __name__ == "__main__": question = "In the video https://www.youtube.com/watch?v=L1vXCYZAYYM, what is the highest number of bird species to be on camera simultaneously?" messages = [HumanMessage(content=question)] messages = graph.invoke({"messages": messages}) for m in messages["messages"]: m.pretty_print()