File size: 2,102 Bytes
5a7d4ca
9332d1a
5a7d4ca
83da958
5a7d4ca
 
825032d
20e8a32
5a7d4ca
83da958
 
5a7d4ca
 
 
 
 
 
 
 
 
9332d1a
5a7d4ca
 
 
 
 
 
 
 
 
 
20e8a32
825032d
20e8a32
83da958
20e8a32
148189b
83da958
 
5a7d4ca
 
9332d1a
 
 
 
5a7d4ca
9332d1a
5a7d4ca
 
20e8a32
5a7d4ca
 
 
 
 
20e8a32
5a7d4ca
 
 
148189b
 
20e8a32
5a7d4ca
9332d1a
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# imports
from typing import TypedDict, Annotated, Optional
from langgraph.graph.message import add_messages
from langchain_core.messages import AnyMessage, SystemMessage
from langchain_openai import AzureChatOpenAI
from langgraph.graph import START, StateGraph
from tools import duckduck_tool, wiki_RAG_tool, image_analyser_tool, audio_transcriber_tool, python_script_opener
from langgraph.prebuilt import ToolNode, tools_condition

import prompts_lib as my_prompts

import os
from dotenv import load_dotenv  

# load environment variables
load_dotenv()  # take environment variables

# define state
class State(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]
    file_path: Optional[str]

# create llm interface
llm = AzureChatOpenAI(
    deployment_name = os.environ.get("AZURE_OPENAI_DEPLOYMENT_NAME"),
    openai_api_key = os.environ.get("AZURE_OPENAI_API_KEY"),
    azure_endpoint = os.environ.get("AZURE_OPENAI_ENDPOINT"),
    openai_api_version = os.environ.get("OPENAI_API_VERSION"),
    temperature=0
    )

# bild tools
tools = [duckduck_tool, wiki_RAG_tool, image_analyser_tool, audio_transcriber_tool, python_script_opener]
chat_w_tools = llm.bind_tools(tools)

# load system prompt
system_prompt = my_prompts.system_prompt2
system_message = SystemMessage(content=system_prompt)

# define nodes
def assistant(state: State):

    file_path = state.get("file_path", None)
    if file_path:
        state["messages"].append(SystemMessage(content=f"File path provided: {file_path}"))
    return {
        "messages": [chat_w_tools.invoke([system_message] + state["messages"])],
    }


# define graph
builder = StateGraph(State)

# add nodes
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))

# define edges
builder.add_edge(START, "assistant")
builder.add_conditional_edges("assistant", tools_condition,
                               {"tools": "tools", "__end__": "__end__"})
builder.add_edge("tools", "assistant")
# compile gtaph
# agent = builder.compile()

class Agent:
    def __init__(self):
        self.builder = builder.compile()