cryogenic22 commited on
Commit
ddc19ea
·
verified ·
1 Parent(s): 028c9cb

Update components/chat.py

Browse files
Files changed (1) hide show
  1. components/chat.py +6 -13
components/chat.py CHANGED
@@ -1,7 +1,6 @@
1
  # components/chat.py
2
  import streamlit as st
3
  from langchain_core.messages import HumanMessage, AIMessage
4
- import asyncio
5
 
6
  def display_chat_interface():
7
  """Display chat interface component"""
@@ -22,21 +21,15 @@ def display_chat_interface():
22
  if prompt := st.chat_input("Ask about the RFPs..."):
23
  try:
24
  with st.spinner("Analyzing documents..."):
25
- # Use asyncio to handle the async call
26
- async def get_response():
27
- response = await st.session_state.qa_system.acall({
28
- "input": prompt,
29
- "chat_history": st.session_state.chat_history
30
- })
31
- return response
32
-
33
- # Run the async function
34
- response = asyncio.run(get_response())
35
 
36
- if response and 'output' in response:
37
  # Add messages to chat history
38
  st.session_state.chat_history.append(HumanMessage(content=prompt))
39
- st.session_state.chat_history.append(AIMessage(content=response['output']))
40
 
41
  # Force refresh to show new messages
42
  st.rerun()
 
1
  # components/chat.py
2
  import streamlit as st
3
  from langchain_core.messages import HumanMessage, AIMessage
 
4
 
5
  def display_chat_interface():
6
  """Display chat interface component"""
 
21
  if prompt := st.chat_input("Ask about the RFPs..."):
22
  try:
23
  with st.spinner("Analyzing documents..."):
24
+ # Pass the question through the chain
25
+ response = st.session_state.qa_system.invoke({
26
+ "question": prompt # Changed from "input" to "question"
27
+ })
 
 
 
 
 
 
28
 
29
+ if response:
30
  # Add messages to chat history
31
  st.session_state.chat_history.append(HumanMessage(content=prompt))
32
+ st.session_state.chat_history.append(AIMessage(content=response))
33
 
34
  # Force refresh to show new messages
35
  st.rerun()