cryogenic22 commited on
Commit
26ba544
·
verified ·
1 Parent(s): e48cda9

Update components/chat.py

Browse files
Files changed (1) hide show
  1. components/chat.py +10 -19
components/chat.py CHANGED
@@ -1,15 +1,5 @@
1
  import streamlit as st
2
- from langchain.schema import HumanMessage, AIMessage, SystemMessage
3
-
4
- def format_chat_history(chat_history):
5
- """Format chat history for QA system"""
6
- formatted_history = []
7
- for msg in chat_history[:-1]: # Exclude the most recent message
8
- if isinstance(msg, HumanMessage):
9
- formatted_history.append(SystemMessage(content=f"Human: {msg.content}"))
10
- elif isinstance(msg, AIMessage):
11
- formatted_history.append(SystemMessage(content=f"Assistant: {msg.content}"))
12
- return formatted_history
13
 
14
  def display_chat_interface():
15
  """Display chat interface component"""
@@ -25,21 +15,22 @@ def display_chat_interface():
25
  # Chat input
26
  if prompt := st.chat_input("Ask about the RFPs..."):
27
  # Add user message to chat history
28
- st.session_state.chat_history.append(HumanMessage(content=prompt))
 
29
 
30
  try:
31
  with st.spinner("Analyzing documents..."):
32
- # Create formatted history that Langchain expects
33
- formatted_history = format_chat_history(st.session_state.chat_history)
34
-
35
- # Make the query with formatted history
36
  response = st.session_state.qa_system.invoke({
37
- "input": prompt,
38
- "chat_history": formatted_history
39
  })
40
 
41
  # Add AI response to chat history
42
  st.session_state.chat_history.append(AIMessage(content=response["output"]))
43
 
44
  except Exception as e:
45
- st.error(f"Error generating response: {e}")
 
 
 
 
1
  import streamlit as st
2
+ from langchain.schema import HumanMessage, AIMessage
 
 
 
 
 
 
 
 
 
 
3
 
4
  def display_chat_interface():
5
  """Display chat interface component"""
 
15
  # Chat input
16
  if prompt := st.chat_input("Ask about the RFPs..."):
17
  # Add user message to chat history
18
+ current_msg = HumanMessage(content=prompt)
19
+ st.session_state.chat_history.append(current_msg)
20
 
21
  try:
22
  with st.spinner("Analyzing documents..."):
23
+ # Pass only the messages list without formatting
 
 
 
24
  response = st.session_state.qa_system.invoke({
25
+ "input": current_msg.content,
26
+ "chat_history": st.session_state.chat_history[:-1] # Exclude current message
27
  })
28
 
29
  # Add AI response to chat history
30
  st.session_state.chat_history.append(AIMessage(content=response["output"]))
31
 
32
  except Exception as e:
33
+ st.error(f"Error generating response: {e}")
34
+ # Print more detailed error information
35
+ import traceback
36
+ st.error(f"Detailed error: {traceback.format_exc()}")