toni5rovic's picture
add solution
0b7930f
from langchain_core.messages import SystemMessage
from langgraph.graph import START, StateGraph, MessagesState
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_openai import ChatOpenAI
from typing import Optional, TypedDict, Annotated
from langgraph.graph.message import add_messages
from langfuse.langchain import CallbackHandler
from langfuse import get_client
from tools import tools
import os
class AgentState(TypedDict):
task_id: str
question: str
file_name: str
file_path: Optional[str]
file_mime: Optional[str]
messages: Annotated[list, add_messages]
response: Optional[str]
langfuse = get_client()
# Verify connection
if langfuse.auth_check():
print("Langfuse client is authenticated and ready!")
else:
print("Authentication failed. Please check your credentials and host.")
langfuse_handler = CallbackHandler()
system_prompt = """You are a general AI assistant. I will ask you a question.
Report your thoughts, and finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER].
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings.
If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise.
If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise.
If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
"""
# System message
sys_msg = SystemMessage(content=system_prompt)
class AgentIzzy:
def __init__(self):
llm = ChatOpenAI(
model="gpt-4o",
temperature=0,
max_tokens=None,
timeout=None,
max_retries=2,
)
llm_with_tools = llm.bind_tools(tools)
def assistant(state: MessagesState):
response = llm_with_tools.invoke(state["messages"])
print("LLM returned:", response)
return {"messages": [response]}
builder = StateGraph(MessagesState)
builder.add_node("assistant", assistant)
builder.add_node("tools", ToolNode(tools))
builder.add_edge(START, "assistant")
# builder.add_edge("retriever", "assistant")
builder.add_conditional_edges(
"assistant",
tools_condition,
)
builder.add_edge("tools", "assistant")
self.graph = builder.compile()
# img_bytes = self.graph.get_graph().draw_mermaid_png()
# with open("agent_izzy_graph.png", "wb") as f:
# f.write(img_bytes)
def __call__(self, state_input: dict) -> str:
question = state_input["question"]
print(f"Agent received question: {question}")
messages = [sys_msg] + state_input.get("messages", [])
state_input["messages"] = messages
output = self.graph.invoke(state_input,
config={"callbacks": [langfuse_handler]})
answer = output['messages'][-1].content
print("Agent response (first 50 chars):", answer[:50] + "..." if len(answer) > 50 else answer)
return answer