cryogenic22 commited on
Commit
a8872f2
·
verified ·
1 Parent(s): f15234c

Create components/chat.py

Browse files
Files changed (1) hide show
  1. components/chat.py +38 -0
components/chat.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # src/components/chat.py
2
+ import streamlit as st
3
+ from langchain.schema import HumanMessage, AIMessage
4
+
5
+ def format_chat_history(chat_history):
6
+ """Format chat history for QA system"""
7
+ formatted_history = []
8
+ for msg in chat_history[:-1]:
9
+ if isinstance(msg, HumanMessage):
10
+ formatted_history.append({"role": "user", "content": msg.content})
11
+ else:
12
+ formatted_history.append({"role": "assistant", "content": msg.content})
13
+ return formatted_history
14
+
15
+ def display_chat_interface():
16
+ """Display chat interface component"""
17
+ st.markdown("### 💬 RFP Analysis Chat")
18
+
19
+ for message in st.session_state.chat_history:
20
+ if isinstance(message, HumanMessage):
21
+ st.markdown(f"🧑‍💼 **You**: {message.content}")
22
+ else:
23
+ st.markdown(f"🤖 **Assistant**: {message.content}")
24
+
25
+ if prompt := st.chat_input("Ask about the RFPs..."):
26
+ st.session_state.chat_history.append(HumanMessage(content=prompt))
27
+
28
+ try:
29
+ with st.spinner("Analyzing documents..."):
30
+ formatted_history = format_chat_history(st.session_state.chat_history)
31
+ response = st.session_state.qa_system.invoke({
32
+ "input": prompt,
33
+ "chat_history": formatted_history
34
+ })
35
+
36
+ st.session_state.chat_history.append(AIMessage(content=response["output"]))
37
+ except Exception as e:
38
+ st.error(f"Error generating response: {e}")