File size: 2,651 Bytes
2a6057d
ec639fe
2a6057d
 
 
 
 
 
edf5eaf
2a6057d
32aa30b
a52d511
bb4ec09
2a6057d
ec639fe
2a6057d
 
 
 
 
 
 
 
33dc421
2a6057d
bb4ec09
 
 
 
2a6057d
 
 
 
 
0538019
 
 
ec639fe
0538019
 
edf5eaf
2a6057d
 
349e20b
2a6057d
 
349e20b
 
2a6057d
 
 
 
 
 
 
 
 
0538019
2a6057d
57a3c14
2a6057d
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
from pathlib import Path
from dotenv import load_dotenv
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import tools_condition
from langgraph.prebuilt import ToolNode
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.messages import SystemMessage, HumanMessage

from tools.math_tools import add, subtract, multiply, divide, modulus, power, sqrt
from tools.search_tools import search_wikipedia, web_search, arxiv_search
from tools.image_video_tools import query_image
from tools.file_tools import analyze_excel_file, execute_python_code, analyze_csv_file, save_and_read_file, download_file_from_url, extract_text_from_image

system_prompt = Path("system_prompt.txt").read_text()

def build_graph():
    llm = ChatGoogleGenerativeAI(
        model="gemini-2.0-flash-001",
        temperature=0.8,
        max_tokens=None,
        timeout=None,
        max_retries=2,
        google_api_key=os.getenv("GOOGLE_API_KEY")  # Get API key from environment variable
        )
    tools = [add, subtract, multiply, divide, modulus, power, sqrt, 
             web_search, arxiv_search, search_wikipedia, 
             query_image, 
             analyze_excel_file, execute_python_code, analyze_csv_file, save_and_read_file, download_file_from_url, extract_text_from_image]

    llm_with_tools = llm.bind_tools(tools)

    def assistant(state: MessagesState):
        """Assistant node for invoking the LLM."""
        messages = state["messages"]
        # Add system message if not present
        if not any(isinstance(m, SystemMessage) for m in messages):
            messages = [SystemMessage(content=system_prompt)] + messages
        response = llm_with_tools.invoke(messages)
        return {"messages": [response]}
    

    builder = StateGraph(MessagesState)
    # builder.add_node("retriever", retriever)
    builder.add_node("assistant", assistant)
    builder.add_node("tools", ToolNode(tools))
    builder.add_edge(START, "assistant")
    # builder.add_edge("retriever", "assistant")
    builder.add_conditional_edges(
        "assistant",
        tools_condition,
    )
    builder.add_edge("tools", "assistant")

    # Compile graph
    return builder.compile()


if __name__ == "__main__":
    question = "Who did the actor who played Ray in the Polish-language version of Everybody Loves Raymond play in Magda M.? Give only the first name."
    # Build the graph
    graph = build_graph()
    # Run the graph
    messages = [HumanMessage(content=question)]
    messages = graph.invoke({"messages": messages})
    for m in messages["messages"]:
        m.pretty_print()