Julia Ostheimer commited on
Commit ·
8a8026b
1
Parent(s): 6b02b62
Add Langfuse tracing via Langchain callback when generating answer
Browse files- conversation/generate.py +17 -1
conversation/generate.py
CHANGED
|
@@ -8,6 +8,8 @@ from langchain_core.prompts import (
|
|
| 8 |
)
|
| 9 |
from langchain_core.runnables import RunnableParallel
|
| 10 |
from langgraph.graph import MessagesState
|
|
|
|
|
|
|
| 11 |
from pydantic import BaseModel
|
| 12 |
|
| 13 |
from config import app_settings
|
|
@@ -19,6 +21,19 @@ llm = init_chat_model(
|
|
| 19 |
model_provider="openai",
|
| 20 |
api_key=app_settings.llm_api_key
|
| 21 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
|
| 24 |
# RAG answer synthesis prompt
|
|
@@ -82,5 +97,6 @@ def generate(state: MessagesState):
|
|
| 82 |
"question": conversation_messages[-1].content,
|
| 83 |
"chat_history": conversation_messages,
|
| 84 |
"context": docs_content,
|
| 85 |
-
})
|
|
|
|
| 86 |
return {"messages": [response]}
|
|
|
|
| 8 |
)
|
| 9 |
from langchain_core.runnables import RunnableParallel
|
| 10 |
from langgraph.graph import MessagesState
|
| 11 |
+
from langfuse import Langfuse, get_client
|
| 12 |
+
from langfuse.langchain import CallbackHandler
|
| 13 |
from pydantic import BaseModel
|
| 14 |
|
| 15 |
from config import app_settings
|
|
|
|
| 21 |
model_provider="openai",
|
| 22 |
api_key=app_settings.llm_api_key
|
| 23 |
)
|
| 24 |
+
|
| 25 |
+
# Initialize Langfuse client with constructor arguments
|
| 26 |
+
Langfuse(
|
| 27 |
+
public_key=app_settings.langfuse_public_api_key,
|
| 28 |
+
secret_key=app_settings.langfuse_secret_api_key,
|
| 29 |
+
host=app_settings.langfuse_host
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Get the configured client instance
|
| 33 |
+
langfuse = get_client()
|
| 34 |
+
|
| 35 |
+
# Initialize the Langfuse handler
|
| 36 |
+
langfuse_handler = CallbackHandler()
|
| 37 |
|
| 38 |
|
| 39 |
# RAG answer synthesis prompt
|
|
|
|
| 97 |
"question": conversation_messages[-1].content,
|
| 98 |
"chat_history": conversation_messages,
|
| 99 |
"context": docs_content,
|
| 100 |
+
}, config={"callbacks": [langfuse_handler]})
|
| 101 |
+
|
| 102 |
return {"messages": [response]}
|