Spaces:
Sleeping
Sleeping
File size: 4,403 Bytes
5070fda f4d0e6f 5070fda f4d0e6f 5070fda | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 | import json
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_openai.chat_models import ChatOpenAI
from langfuse import Langfuse, get_client
from langfuse.langchain import CallbackHandler
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import tools_condition
from langgraph.prebuilt import ToolNode
class Agent:
"""
Class representing a basic agent that can answer questions.
"""
def __init__(
self,
model: str,
tools: list,
system_prompt_path: str,
openai_api_key: str = None,
langfuse_callback_handler: CallbackHandler = None
):
"""
Initialize the agent object.
:param model: The OpenAI model to use.
:param tools: List of tools the agent can use.
:param system_prompt_path: Path to the system prompt file.
:param openai_api_key: OpenAI API key for authentication.
:param langfuse_callback_handler: Langfuse callback handler for
tracking and logging interactions.
"""
self.chat_model = ChatOpenAI(
model=model,
api_key=openai_api_key,
)
with open(system_prompt_path, "r") as file:
self.system_prompt = file.read()
self.tools = tools
if langfuse_callback_handler is not None:
self.chat_model.callbacks = [langfuse_callback_handler]
self.chat_model_with_tools = self.chat_model.bind_tools(
tools=tools,
parallel_tool_calls=False
)
self.graph = self.__build_graph()
def __call__(self, question: str) -> tuple[str, str]:
"""
Reply to a question using the agent and return the agents full reply
with reasoning included.
:param question: The question to ask the agent.
:return: The agent's response.
"""
final_state = self.graph.invoke(
input={
"messages": [
SystemMessage(content=self.system_prompt),
HumanMessage(content=question)
]
},
config={
"callbacks": self.chat_model.callbacks
}
)
reply = json.loads(final_state["messages"][-1].content)
return reply["reasoning"], reply["answer"]
def __build_graph(self):
"""
Build the graph for the agent.
"""
builder = StateGraph(MessagesState)
# Define nodes: these do the work
builder.add_node("assistant", self.__assistant)
builder.add_node("tools", ToolNode(self.tools))
# Define edges: these determine how the control flow moves
builder.add_edge(START, "assistant")
builder.add_conditional_edges(
"assistant",
tools_condition,
)
builder.add_edge("tools", "assistant")
return builder.compile()
def __assistant(self, state: MessagesState) -> MessagesState:
"""
The assistant function that processes the state and returns a response.
:param state: The current state of the agent.
:return: Updated state with the assistant's response.
"""
response = self.chat_model_with_tools.invoke(state["messages"])
return {"messages": [response]}
if __name__ == "__main__":
import os
from langchain_community.tools import DuckDuckGoSearchResults
from tools import multiply, add, subtract, divide, modulus
# Initialize Langfuse client with constructor arguments
Langfuse(
public_key=os.environ.get("LANGFUSE_PUBLIC_KEY"),
secret_key=os.environ.get("LANGFUSE_SECRET_KEY"),
host='https://cloud.langfuse.com'
)
# Get the configured client instance
langfuse = get_client()
# Initialize the Langfuse handler
langfuse_handler = CallbackHandler()
tools = [multiply, add, subtract, divide, modulus]
tools.append(
DuckDuckGoSearchResults()
)
agent = Agent(
model="gpt-4o",
tools=tools,
system_prompt_path="prompts/system_prompt.txt",
openai_api_key=os.environ.get("OPENAI_API_KEY"),
langfuse_callback_handler=langfuse_handler
)
response = agent(
question="""
Search for Tom Cruise and summarize the results for me.
"""
)
print(response)
|