| | import os |
| | from typing import TypedDict, Annotated |
| | from dotenv import load_dotenv |
| | from langgraph.graph.message import add_messages |
| | from langchain_core.messages import AnyMessage, HumanMessage, AIMessage |
| | from langgraph.prebuilt import ToolNode |
| | from langgraph.graph import START, StateGraph, MessagesState |
| | from langgraph.prebuilt import tools_condition |
| | from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace |
| | from langchain_google_genai import ChatGoogleGenerativeAI |
| | from langchain_groq import ChatGroq |
| | from langchain_openai import ChatOpenAI |
| |
|
| | from tools import ( |
| | divide, |
| | multiply, |
| | modulus, |
| | add, |
| | subtract, |
| | power, |
| | square_root, |
| | web_search, |
| | wiki_search, |
| | arxiv_search, |
| | ) |
| |
|
| | |
| | load_dotenv() |
| |
|
| |
|
| | def buildAgent(provider="huggingface"): |
| | |
| | if provider == "huggingface": |
| | llm = ChatHuggingFace( |
| | llm=HuggingFaceEndpoint(repo_id="Qwen/Qwen2.5-Coder-32B-Instruct"), |
| | ) |
| | elif provider == "groq": |
| | llm = ChatGroq(model="qwen-qwq-32b") |
| | elif provider == "openrouter": |
| | llm = ChatOpenAI( |
| | base_url="https://openrouter.ai/api/v1", |
| | api_key=os.environ.get("OPENROUTER_API_KEY"), |
| | model="google/gemini-2.0-flash-exp", |
| | ) |
| |
|
| | tools = [ |
| | multiply, |
| | add, |
| | subtract, |
| | divide, |
| | modulus, |
| | power, |
| | square_root, |
| | web_search, |
| | wiki_search, |
| | arxiv_search, |
| | ] |
| |
|
| | chat_with_tools = llm.bind_tools(tools) |
| |
|
| | |
| | def assistant(state: MessagesState): |
| | return { |
| | "messages": [chat_with_tools.invoke(state["messages"])], |
| | } |
| |
|
| | |
| | builder = StateGraph(MessagesState) |
| | |
| | builder.add_node("assistant", assistant) |
| | builder.add_node("tools", ToolNode(tools)) |
| | |
| | builder.add_edge(START, "assistant") |
| | builder.add_conditional_edges( |
| | "assistant", |
| | |
| | |
| | tools_condition, |
| | ) |
| | builder.add_edge("tools", "assistant") |
| | return builder.compile() |
| |
|
| |
|
| | if __name__ == "__main__": |
| | question = "When was a picture of St. Thomas Aquinas first added to the Wikipedia page on the Principle of double effect?" |
| | graph = buildAgent(provider="groq") |
| | messages = [HumanMessage(content=question)] |
| | print(messages) |
| | messages = graph.invoke({"messages": messages}) |
| | for m in messages["messages"]: |
| | m.pretty_print() |
| |
|