Julia Ostheimer
commited on
Commit
·
3051bba
1
Parent(s):
8a8026b
Add langfuse tracing additionally when invoking llm_with_tools and graph
Browse files
app.py
CHANGED
|
@@ -9,6 +9,9 @@ from langgraph.checkpoint.memory import MemorySaver
|
|
| 9 |
from langgraph.graph import MessagesState, StateGraph, END
|
| 10 |
from langgraph.prebuilt import ToolNode, tools_condition
|
| 11 |
from langgraph.prebuilt import ToolNode
|
|
|
|
|
|
|
|
|
|
| 12 |
import structlog
|
| 13 |
|
| 14 |
from qdrant_client import QdrantClient
|
|
@@ -21,6 +24,21 @@ from ingestion.main import ingest_document
|
|
| 21 |
|
| 22 |
from config import app_settings
|
| 23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
# Create a logger instance
|
| 25 |
logger = structlog.get_logger(__name__)
|
| 26 |
|
|
@@ -65,7 +83,7 @@ def retrieve(query: str):
|
|
| 65 |
def query_or_respond(state: MessagesState):
|
| 66 |
"""Generate tool call for retrieval or respond."""
|
| 67 |
llm_with_tools = llm.bind_tools([retrieve])
|
| 68 |
-
response = llm_with_tools.invoke(state["messages"])
|
| 69 |
# MessagesState appends messages to state instead of overwriting
|
| 70 |
return {"messages": [response]}
|
| 71 |
|
|
@@ -116,7 +134,7 @@ def bot(message, history) -> list[Any]:
|
|
| 116 |
|
| 117 |
# create text response
|
| 118 |
# TODO: see how state can be set in chat interface
|
| 119 |
-
config = {"configurable": {"thread_id": "abc123"}}
|
| 120 |
response = graph.invoke(
|
| 121 |
{"messages": [{"role": "user", "content": message.get("text")}]},
|
| 122 |
config=config,
|
|
|
|
| 9 |
from langgraph.graph import MessagesState, StateGraph, END
|
| 10 |
from langgraph.prebuilt import ToolNode, tools_condition
|
| 11 |
from langgraph.prebuilt import ToolNode
|
| 12 |
+
from langfuse import Langfuse, get_client
|
| 13 |
+
from langfuse.langchain import CallbackHandler
|
| 14 |
+
|
| 15 |
import structlog
|
| 16 |
|
| 17 |
from qdrant_client import QdrantClient
|
|
|
|
| 24 |
|
| 25 |
from config import app_settings
|
| 26 |
|
| 27 |
+
|
| 28 |
+
# Initialize Langfuse client with constructor arguments
|
| 29 |
+
Langfuse(
|
| 30 |
+
public_key=app_settings.langfuse_public_api_key,
|
| 31 |
+
secret_key=app_settings.langfuse_secret_api_key,
|
| 32 |
+
host=app_settings.langfuse_host
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
# Get the configured client instance
|
| 36 |
+
langfuse = get_client()
|
| 37 |
+
|
| 38 |
+
# Initialize the Langfuse handler
|
| 39 |
+
langfuse_handler = CallbackHandler()
|
| 40 |
+
|
| 41 |
+
|
| 42 |
# Create a logger instance
|
| 43 |
logger = structlog.get_logger(__name__)
|
| 44 |
|
|
|
|
| 83 |
def query_or_respond(state: MessagesState):
|
| 84 |
"""Generate tool call for retrieval or respond."""
|
| 85 |
llm_with_tools = llm.bind_tools([retrieve])
|
| 86 |
+
response = llm_with_tools.invoke(state["messages"], config={"callbacks": [langfuse_handler]})
|
| 87 |
# MessagesState appends messages to state instead of overwriting
|
| 88 |
return {"messages": [response]}
|
| 89 |
|
|
|
|
| 134 |
|
| 135 |
# create text response
|
| 136 |
# TODO: see how state can be set in chat interface
|
| 137 |
+
config = {"configurable": {"thread_id": "abc123"}, "callbacks": [langfuse_handler]}
|
| 138 |
response = graph.invoke(
|
| 139 |
{"messages": [{"role": "user", "content": message.get("text")}]},
|
| 140 |
config=config,
|