import os from langchain_google_genai import ChatGoogleGenerativeAI from langgraph.graph import START, StateGraph, MessagesState from langgraph.prebuilt import tools_condition from langgraph.prebuilt import ToolNode from langchain_core.messages import SystemMessage, HumanMessage from tools import * tools = [add, substract, multiply, divide, web_search] class AgentState(TypedDict): question: str def assistant(state: AgentState): return { "answer": [chat_with_tools.invoke(state["question"])], } def build_agent(): llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0) llm.bind_tools(tools) ## The graph builder = StateGraph(AgentState) # Define nodes: these do the work builder.add_node("assistant", assistant) builder.add_node("tools", ToolNode(tools)) # Define edges: these determine how the control flow moves builder.add_edge(START, "assistant") builder.add_conditional_edges( "assistant", # If the latest message requires a tool, route to tools # Otherwise, provide a direct response tools_condition, ) builder.add_edge("tools", "assistant") return builder.compile()