cryogenic22 commited on
Commit
e48cda9
·
verified ·
1 Parent(s): f1462f3

Update components/chat.py

Browse files
Files changed (1) hide show
  1. components/chat.py +14 -7
components/chat.py CHANGED
@@ -1,38 +1,45 @@
1
- # src/components/chat.py
2
  import streamlit as st
3
- from langchain.schema import HumanMessage, AIMessage
4
 
5
  def format_chat_history(chat_history):
6
  """Format chat history for QA system"""
7
  formatted_history = []
8
- for msg in chat_history[:-1]:
9
  if isinstance(msg, HumanMessage):
10
- formatted_history.append({"role": "user", "content": msg.content})
11
- else:
12
- formatted_history.append({"role": "assistant", "content": msg.content})
13
  return formatted_history
14
 
15
  def display_chat_interface():
16
  """Display chat interface component"""
17
  st.markdown("### 💬 RFP Analysis Chat")
18
 
 
19
  for message in st.session_state.chat_history:
20
  if isinstance(message, HumanMessage):
21
  st.markdown(f"🧑‍💼 **You**: {message.content}")
22
  else:
23
  st.markdown(f"🤖 **Assistant**: {message.content}")
24
 
 
25
  if prompt := st.chat_input("Ask about the RFPs..."):
 
26
  st.session_state.chat_history.append(HumanMessage(content=prompt))
27
 
28
  try:
29
  with st.spinner("Analyzing documents..."):
 
30
  formatted_history = format_chat_history(st.session_state.chat_history)
 
 
31
  response = st.session_state.qa_system.invoke({
32
  "input": prompt,
33
  "chat_history": formatted_history
34
  })
35
 
 
36
  st.session_state.chat_history.append(AIMessage(content=response["output"]))
 
37
  except Exception as e:
38
- st.error(f"Error generating response: {e}")
 
 
1
  import streamlit as st
2
+ from langchain.schema import HumanMessage, AIMessage, SystemMessage
3
 
4
  def format_chat_history(chat_history):
5
  """Format chat history for QA system"""
6
  formatted_history = []
7
+ for msg in chat_history[:-1]: # Exclude the most recent message
8
  if isinstance(msg, HumanMessage):
9
+ formatted_history.append(SystemMessage(content=f"Human: {msg.content}"))
10
+ elif isinstance(msg, AIMessage):
11
+ formatted_history.append(SystemMessage(content=f"Assistant: {msg.content}"))
12
  return formatted_history
13
 
14
  def display_chat_interface():
15
  """Display chat interface component"""
16
  st.markdown("### 💬 RFP Analysis Chat")
17
 
18
+ # Display chat history
19
  for message in st.session_state.chat_history:
20
  if isinstance(message, HumanMessage):
21
  st.markdown(f"🧑‍💼 **You**: {message.content}")
22
  else:
23
  st.markdown(f"🤖 **Assistant**: {message.content}")
24
 
25
+ # Chat input
26
  if prompt := st.chat_input("Ask about the RFPs..."):
27
+ # Add user message to chat history
28
  st.session_state.chat_history.append(HumanMessage(content=prompt))
29
 
30
  try:
31
  with st.spinner("Analyzing documents..."):
32
+ # Create formatted history that Langchain expects
33
  formatted_history = format_chat_history(st.session_state.chat_history)
34
+
35
+ # Make the query with formatted history
36
  response = st.session_state.qa_system.invoke({
37
  "input": prompt,
38
  "chat_history": formatted_history
39
  })
40
 
41
+ # Add AI response to chat history
42
  st.session_state.chat_history.append(AIMessage(content=response["output"]))
43
+
44
  except Exception as e:
45
+ st.error(f"Error generating response: {e}")