cryogenic22 commited on
Commit
50657bf
·
verified ·
1 Parent(s): 7333afe

Update components/chat.py

Browse files
Files changed (1) hide show
  1. components/chat.py +7 -13
components/chat.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import streamlit as st
2
  from langchain.schema import HumanMessage, AIMessage
3
 
@@ -14,23 +15,16 @@ def display_chat_interface():
14
 
15
  # Chat input
16
  if prompt := st.chat_input("Ask about the RFPs..."):
17
- # Add user message to chat history
18
- current_msg = HumanMessage(content=prompt)
19
- st.session_state.chat_history.append(current_msg)
20
-
21
  try:
22
  with st.spinner("Analyzing documents..."):
23
- # Pass only the messages list without formatting
24
- response = st.session_state.qa_system.invoke({
25
- "input": current_msg.content,
26
- "chat_history": st.session_state.chat_history[:-1] # Exclude current message
27
- })
28
-
29
- # Add AI response to chat history
30
- st.session_state.chat_history.append(AIMessage(content=response["output"]))
31
 
32
  except Exception as e:
33
  st.error(f"Error generating response: {e}")
34
- # Print more detailed error information
35
  import traceback
36
  st.error(f"Detailed error: {traceback.format_exc()}")
 
1
+ # components/chat.py
2
  import streamlit as st
3
  from langchain.schema import HumanMessage, AIMessage
4
 
 
15
 
16
  # Chat input
17
  if prompt := st.chat_input("Ask about the RFPs..."):
 
 
 
 
18
  try:
19
  with st.spinner("Analyzing documents..."):
20
+ # Get response from QA system
21
+ response = st.session_state.qa_system({"question": prompt})
22
+
23
+ # Add messages to chat history
24
+ st.session_state.chat_history.append(HumanMessage(content=prompt))
25
+ st.session_state.chat_history.append(AIMessage(content=response['answer']))
 
 
26
 
27
  except Exception as e:
28
  st.error(f"Error generating response: {e}")
 
29
  import traceback
30
  st.error(f"Detailed error: {traceback.format_exc()}")