from typing import TypedDict, Annotated from tool import add, substract, multiply, divide, DuckDuckGoSearchTool, WikipediaSearchTool, ArxivSearchTool, PubmedSearchTool from os import getenv from langgraph.graph.message import add_messages from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, AIMessage from langgraph.graph import StateGraph, START, END, MessagesState from langgraph.prebuilt import ToolNode, tools_condition from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace # load the system prompt from the file with open("prompt.txt", "r", encoding="utf-8") as f: system_prompt = f.read() # System message sys_msg = SystemMessage(content=system_prompt) HUGGINGFACEHUB_API_TOKEN = getenv("HUGGINGFACEHUB_API_TOKEN") # Making the agent llm = HuggingFaceEndpoint( repo_id="Qwen/Qwen2.5-Coder-32B-Instruct", huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN, ) chat = ChatHuggingFace(llm=llm, verbose=True) tools = [add, substract, multiply, divide, DuckDuckGoSearchTool, WikipediaSearchTool, ArxivSearchTool, PubmedSearchTool] chat_with_tools = chat.bind_tools(tools) def simple_graph(): ## Defining our nodes def assistant(state: MessagesState): """Assistant node""" return {"messages": [sys_msg] + [chat_with_tools.invoke(state["messages"])]} # Build graph / nodes builder = StateGraph(MessagesState) builder.add_node("assistant", chat) # Assistant builder.add_node("tools", ToolNode(tools)) # Tools # Logic / edges builder.add_edge(START, "assistant") builder.add_conditional_edges("assistant", tools_condition) builder.add_edge("tools", "assistant") graph = builder.compile() return graph