Humanlearning commited on
Commit
549edad
·
1 Parent(s): 9765233

+ updated for langgraph

Browse files
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -8,6 +8,7 @@ from langraph_agent import build_graph
8
  import asyncio
9
  import aiohttp
10
  from langfuse.langchain import CallbackHandler
 
11
 
12
  # Initialize Langfuse CallbackHandler for LangGraph/Langchain (tracing)
13
  langfuse_handler = CallbackHandler()
@@ -23,10 +24,14 @@ class BasicAgent:
23
  self.agent = build_graph()
24
  print("BasicAgent initialized.")
25
  async def aquery(self, question: str) -> str:
 
 
 
26
  print(f"Agent received question (first 50 chars): {question[:50]}...")
27
- response = await self.agent.invoke(question, config={"callbacks": [langfuse_handler]})
28
- print(f"Agent returning fixed answer: {response}")
29
- return response
 
30
 
31
  # Global cache for answers (in-memory)
32
  cached_answers = None
 
8
  import asyncio
9
  import aiohttp
10
  from langfuse.langchain import CallbackHandler
11
+ from langchain_core.messages import HumanMessage
12
 
13
  # Initialize Langfuse CallbackHandler for LangGraph/Langchain (tracing)
14
  langfuse_handler = CallbackHandler()
 
24
  self.agent = build_graph()
25
  print("BasicAgent initialized.")
26
  async def aquery(self, question: str) -> str:
27
+ messages = [HumanMessage(content=question)]
28
+ # messages = self.agent.invoke({"messages": messages}, config={"callbacks": [langfuse_handler]})
29
+
30
  print(f"Agent received question (first 50 chars): {question[:50]}...")
31
+ response = await self.agent.invoke({"messages": messages}, config={"callbacks": [langfuse_handler]})
32
+ answer = response['messages'][-1].content
33
+ print(f"Agent returning fixed answer: {answer}")
34
+ return answer
35
 
36
  # Global cache for answers (in-memory)
37
  cached_answers = None