meesamraza commited on
Commit
8796412
Β·
verified Β·
1 Parent(s): a09f616

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -59
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import os
2
  import logging
 
3
  from dotenv import load_dotenv
4
  import streamlit as st
5
  from PyPDF2 import PdfReader
@@ -9,8 +10,6 @@ from langchain_community.vectorstores import FAISS
9
  from langchain.memory import ConversationBufferMemory
10
  from langchain.chains import ConversationalRetrievalChain
11
  from langchain_groq import ChatGroq
12
- import time
13
- import io
14
 
15
  # --------------------------
16
  # Load environment variables
@@ -63,19 +62,13 @@ def get_vectorstore(text_chunks):
63
  # Conversation chain
64
  # --------------------------
65
  def get_conversation_chain(vectorstore):
66
- try:
67
- llm = ChatGroq(model="llama-3.3-70b-versatile", temperature=0.5)
68
- memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
69
- conversation_chain = ConversationalRetrievalChain.from_llm(
70
- llm=llm,
71
- retriever=vectorstore.as_retriever(),
72
- memory=memory
73
- )
74
- logging.info("βœ… Conversation chain created successfully.")
75
- return conversation_chain
76
- except Exception as e:
77
- logging.error(f"Error creating conversation chain: {e}")
78
- st.error("An error occurred while setting up the conversation chain.")
79
 
80
  # --------------------------
81
  # Handle user input
@@ -85,35 +78,14 @@ def handle_userinput(user_question):
85
  start_time = time.time()
86
  with st.spinner("πŸ€– Thinking..."):
87
  response = st.session_state.conversation({'question': user_question})
88
- st.session_state.chat_history = response['chat_history']
89
  elapsed_time = round(time.time() - start_time, 2)
90
 
91
- # Display chat history in a chat-like format
92
- for i, message in enumerate(st.session_state.chat_history):
93
- if i % 2 == 0:
94
- st.markdown(f"πŸ§‘ **You:** {message.content}")
95
- else:
96
- st.markdown(f"πŸ€– **Bot:** {message.content}")
97
-
98
- # Stats
99
  st.info(f"⏱ Response Time: {elapsed_time}s | πŸ“„ Words: {len(response['answer'].split())}")
100
  else:
101
  st.warning("⚠ Please process the documents first.")
102
 
103
- # --------------------------
104
- # Export chat
105
- # --------------------------
106
- def export_chat():
107
- if st.session_state.chat_history:
108
- chat_text = "\n".join([f"{'User' if i % 2 == 0 else 'Bot'}: {m.content}" for i, m in enumerate(st.session_state.chat_history)])
109
- buffer = io.BytesIO(chat_text.encode())
110
- st.download_button(
111
- label="πŸ’Ύ Download Chat",
112
- data=buffer,
113
- file_name="chat_history.txt",
114
- mime="text/plain"
115
- )
116
-
117
  # --------------------------
118
  # Main Streamlit App
119
  # --------------------------
@@ -125,8 +97,6 @@ def main():
125
  # Session state initialization
126
  if "conversation" not in st.session_state:
127
  st.session_state.conversation = None
128
- if "chat_history" not in st.session_state:
129
- st.session_state.chat_history = None
130
  if "pages_processed" not in st.session_state:
131
  st.session_state.pages_processed = 0
132
 
@@ -149,11 +119,6 @@ def main():
149
  else:
150
  st.warning("Please upload at least one PDF.")
151
 
152
- # Clear chat
153
- if st.button("πŸ—‘ Clear Chat"):
154
- st.session_state.chat_history = None
155
- st.success("Chat cleared.")
156
-
157
  # Main Chat Section
158
  st.subheader("πŸ’¬ Ask a Question")
159
  user_question = st.text_input("Type your question here...")
@@ -163,19 +128,5 @@ def main():
163
  else:
164
  st.warning("Please enter a question before submitting.")
165
 
166
- # Chat History
167
- if st.session_state.chat_history:
168
- st.subheader("πŸ“ Chat History")
169
- for i, message in enumerate(st.session_state.chat_history):
170
- role = "User" if i % 2 == 0 else "Bot"
171
- st.markdown(f"**{role}:** {message.content}")
172
-
173
- # Export chat
174
- export_chat()
175
-
176
- # Footer Branding
177
- st.markdown("---")
178
- st.markdown("**Made with ❀️ by Meesam Raza | Powered by LLaMA 3.3 & Groq AI**")
179
-
180
  if __name__ == '__main__':
181
  main()
 
1
  import os
2
  import logging
3
+ import time
4
  from dotenv import load_dotenv
5
  import streamlit as st
6
  from PyPDF2 import PdfReader
 
10
  from langchain.memory import ConversationBufferMemory
11
  from langchain.chains import ConversationalRetrievalChain
12
  from langchain_groq import ChatGroq
 
 
13
 
14
  # --------------------------
15
  # Load environment variables
 
62
  # Conversation chain
63
  # --------------------------
64
  def get_conversation_chain(vectorstore):
65
+ llm = ChatGroq(model="llama-3.3-70b-versatile", temperature=0.5)
66
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
67
+ return ConversationalRetrievalChain.from_llm(
68
+ llm=llm,
69
+ retriever=vectorstore.as_retriever(),
70
+ memory=memory
71
+ )
 
 
 
 
 
 
72
 
73
  # --------------------------
74
  # Handle user input
 
78
  start_time = time.time()
79
  with st.spinner("πŸ€– Thinking..."):
80
  response = st.session_state.conversation({'question': user_question})
 
81
  elapsed_time = round(time.time() - start_time, 2)
82
 
83
+ # Show response only (no chat history)
84
+ st.markdown(f"**πŸ€– Bot:** {response['answer']}")
 
 
 
 
 
 
85
  st.info(f"⏱ Response Time: {elapsed_time}s | πŸ“„ Words: {len(response['answer'].split())}")
86
  else:
87
  st.warning("⚠ Please process the documents first.")
88
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  # --------------------------
90
  # Main Streamlit App
91
  # --------------------------
 
97
  # Session state initialization
98
  if "conversation" not in st.session_state:
99
  st.session_state.conversation = None
 
 
100
  if "pages_processed" not in st.session_state:
101
  st.session_state.pages_processed = 0
102
 
 
119
  else:
120
  st.warning("Please upload at least one PDF.")
121
 
 
 
 
 
 
122
  # Main Chat Section
123
  st.subheader("πŸ’¬ Ask a Question")
124
  user_question = st.text_input("Type your question here...")
 
128
  else:
129
  st.warning("Please enter a question before submitting.")
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  if __name__ == '__main__':
132
  main()