File size: 1,923 Bytes
d360421
 
e1ecc57
 
 
 
 
 
 
 
 
 
 
d360421
 
 
 
 
e1ecc57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.messages import AnyMessage, HumanMessage
from langchain_core.runnables import RunnableConfig
from langgraph.prebuilt import create_react_agent
from langgraph.prebuilt.chat_agent_executor import AgentState

from langchain_community.tools import DuckDuckGoSearchRun
from langchain_experimental.utilities import PythonREPL
from langchain_community.tools import WikipediaQueryRun
from langchain_community.utilities import WikipediaAPIWrapper

load_dotenv()

BASE_URL = os.getenv("BASE_URL")
OKTA_ACCESS_TOKEN = os.getenv("OKTA_ACCESS_TOKEN")
SUBSCRIPTION_KEY = os.getenv("SUBSCRIPTION_KEY")

chat = ChatOpenAI(
    api_key=OKTA_ACCESS_TOKEN,
    base_url=BASE_URL,
    model="GPT_4_1",
    default_headers={"Subscription-Key": SUBSCRIPTION_KEY},
)

duckduckgo = DuckDuckGoSearchRun()
python_repl = PythonREPL()
wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())

def prompt(state: AgentState, config: RunnableConfig) -> list[AnyMessage]:
    file_path = config["configurable"].get("file_path", "")
    system_msg = (
        f"You are an AI assistant evaluated on the GAIA benchmark. "
        f"Answer questions using only verified information. "
        f"Use the available tools to find answers when needed. "
        f"If you do not have enough information, reply: 'I do not have enough information to answer the question.' "
        f"Provide only the direct answer, with no extra explanation, formatting, or restating the question. "
        f"For example, if asked 'What is the capital of France?', answer 'Paris'. "
    )
    if file_path:
        system_msg += f"If a file is referenced, use the file path: {file_path}."
    return [{"role": "system", "content": system_msg}] + state["messages"]

react_agent = create_react_agent(
    model=chat,
    tools=[duckduckgo, wikipedia],
    prompt=prompt,
)