from typing import Annotated, TypedDict import os from langchain_google_genai.chat_models import ChatGoogleGenerativeAI from langchain_openai.chat_models import ChatOpenAI from langchain_anthropic.chat_models import ChatAnthropic from langchain_groq.chat_models import ChatGroq from langchain_core.messages import AnyMessage, HumanMessage, SystemMessage from langgraph.graph import StateGraph, START from langgraph.graph.message import add_messages from langgraph.prebuilt import ToolNode, tools_condition from langfuse import Langfuse from langfuse.langchain import CallbackHandler from tools import * class AgentState(TypedDict): messages: Annotated[list[AnyMessage], add_messages] # for local testing # from dotenv import load_dotenv # load_dotenv() class Gaia_Agent: def __init__(self): model_family = os.getenv("MODEL_FAMILY", None) if model_family is None: raise ValueError("MODEL_FAMILY is not set in environment variables") model_name = os.getenv("MODEL_NAME", None) if model_name is None: raise ValueError("MODEL_NAME is not set in environment variables") llm_api_key = os.getenv("LLM_API_KEY", None) if llm_api_key is None: raise ValueError("LLM_API_KEY is not set in environment variables") langfuse_host = os.getenv("LANGFUSE_HOST", None) if langfuse_host is None: raise ValueError("LANGFUSE_HOST is not set in environment variables") langfuse_secret_key = os.getenv("LANGFUSE_SECRET_KEY", None) if langfuse_secret_key is None: raise ValueError("LANGFUSE_SECRET_KEY is not set in environment variables") langfuse_public_key = os.getenv("LANGFUSE_PUBLIC_KEY", None) if langfuse_public_key is None: raise ValueError("LANGFUSE_PUBLIC_KEY is not set in environment variables") # Setting the llm if model_family.lower() == "gemini" or model_family.lower() == "google": self.llm = ChatGoogleGenerativeAI(model=model_name, google_api_key=llm_api_key, verbose=True) elif model_family.lower() == "openai" or model_family.lower() == "gpt": self.llm = ChatOpenAI(model=model_name, api_key=llm_api_key, verbose=True) elif model_family.lower() == "anthropic" or model_family.lower() == "claude": self.llm = ChatAnthropic(model_name=model_name, api_key=llm_api_key, verbose=True) elif model_family.lower() == "groq": self.llm = ChatGroq(model=model_name, temperature=0, api_key=llm_api_key) else: raise ValueError("model family not an acceptable value!!") langfuse = Langfuse( public_key=langfuse_public_key, secret_key=langfuse_secret_key, host=langfuse_host ) self.langfuse_handler = CallbackHandler() def add_tools(self): self.tools_list = [ calculator, Web_Search, Arxiv_Search, Wikipedia_Search, get_yt_video_info_metadata, get_yt_video_transcript, analyze_excel_file, read_file, save_file_temp, analyze_image, transcribe_audio_file, execute_code_file ] self.llm_with_tools = self.llm.bind_tools(tools=self.tools_list) def build_graph(self): def assistant(state:AgentState) -> AgentState: """Assistant node with message history""" try: response = self.llm_with_tools.invoke(state["messages"]) return {"messages": state["messages"] + [response]} except Exception as e: print(f"encountered an error: {e}") raise graph_builder = StateGraph(AgentState) self.add_tools() #nodes graph_builder.add_node("assistant", assistant) graph_builder.add_node("tools", ToolNode(tools=self.tools_list)) #edges graph_builder.add_edge(START, "assistant") graph_builder.add_edge("tools", "assistant") graph_builder.add_conditional_edges( "assistant", tools_condition ) self.graph = graph_builder.compile() return self.graph def Run_Agent(self, prompt:str, sys_prompt_file:str): with open(sys_prompt_file, "r") as f: system_prompt = f.read() sys_message = SystemMessage(content=system_prompt) message = HumanMessage(content=prompt) response = self.graph.invoke( input={"messages":[sys_message]+[message]}, config={"callbacks":[self.langfuse_handler]} ) return response if __name__ == "__main__": agent = Gaia_Agent() prompt = "If Eliud Kipchoge could maintain his record-making marathon pace indefinitely, how many thousand hours would it take him to run the distance between the Earth and the Moon its closest approach? Please use the minimum perigee value on the Wikipedia page for the Moon when carrying out your calculation. Round your result to the nearest 1000 hours and do not use any comma separators if necessary." answer = agent.Run_Agent(prompt,"sys_prompt_v2.txt") print(answer["messages"][-1].content)