rairo commited on
Commit
d5886f6
·
verified ·
1 Parent(s): 2a32d5d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -19
app.py CHANGED
@@ -10,7 +10,12 @@ import os
10
 
11
 
12
 
13
- GOOGLE_API_KEY = os.environ['GOOGLE_API_KEY']
 
 
 
 
 
14
 
15
 
16
  # Processing pdfs
@@ -44,30 +49,33 @@ def get_vectorstore(text_chunks):
44
 
45
  # Handling user questions
46
  def handle_userinput(question):
47
- response = st.session_state.conversation({"question": question})
48
- st.session_state.chat_history = response['chat_history']
49
- st.write(response) # Return only the answer from the response
 
 
 
 
50
 
51
- # Storing converstations as chain of outputs
52
  def get_conversation_chain(vectorstore):
53
  llm = ChatGoogleGenerativeAI(model='gemini-2.0-flash-exp')
54
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
55
  conversation_chain = ConversationalRetrievalChain.from_llm(
56
  llm=llm,
57
  retriever=vectorstore.as_retriever(),
58
- memory=memory
59
  )
60
  return conversation_chain
61
 
62
-
63
  def main():
64
- load_dotenv()
65
  st.set_page_config(page_title="Chat with multiple pdfs", page_icon=":books:")
66
 
67
  if "conversation" not in st.session_state:
68
  st.session_state.conversation = None
69
  if "chat_history" not in st.session_state:
70
  st.session_state.chat_history = None
 
 
71
 
72
  st.header("Chat with multiple PDFs :books:")
73
 
@@ -82,17 +90,10 @@ def main():
82
  if st.button("Process"):
83
  with st.spinner("Processing"):
84
  raw_text = get_pdf_text(pdf_docs)
85
-
86
- #convert to chunks
87
  text_chunks = get_text_chunks(raw_text)
88
- st.write(text_chunks)
89
-
90
- #embeddings
91
- vectorstore = get_vectorstore(text_chunks)
92
-
93
-
94
- #create conversation chain
95
- st.session_state.conversation = get_conversation_chain(vectorstore)
96
-
97
 
98
 
 
 
 
10
 
11
 
12
 
13
+ load_dotenv() # Load environment variables at the beginning
14
+ GOOGLE_API_KEY = os.environ.get('GOOGLE_API_KEY') #Use .get to handle if the variable is not present
15
+
16
+ if not GOOGLE_API_KEY:
17
+ st.error("GOOGLE_API_KEY environment variable not set.")
18
+ st.stop()
19
 
20
 
21
  # Processing pdfs
 
49
 
50
  # Handling user questions
51
  def handle_userinput(question):
52
+ if st.session_state.conversation: #Check if conversation is initialised
53
+ response = st.session_state.conversation({"question": question})
54
+ st.session_state.chat_history = response['chat_history']
55
+ answer = response['answer'] # Extract the 'answer' from the response
56
+ st.write(answer)
57
+ else:
58
+ st.write("Please process the documents first.") #Inform user to process documents
59
 
 
60
  def get_conversation_chain(vectorstore):
61
  llm = ChatGoogleGenerativeAI(model='gemini-2.0-flash-exp')
62
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
63
  conversation_chain = ConversationalRetrievalChain.from_llm(
64
  llm=llm,
65
  retriever=vectorstore.as_retriever(),
66
+ memory=memory,
67
  )
68
  return conversation_chain
69
 
 
70
  def main():
 
71
  st.set_page_config(page_title="Chat with multiple pdfs", page_icon=":books:")
72
 
73
  if "conversation" not in st.session_state:
74
  st.session_state.conversation = None
75
  if "chat_history" not in st.session_state:
76
  st.session_state.chat_history = None
77
+ if "vectorstore" not in st.session_state: #Store vectorstore in session state
78
+ st.session_state.vectorstore = None
79
 
80
  st.header("Chat with multiple PDFs :books:")
81
 
 
90
  if st.button("Process"):
91
  with st.spinner("Processing"):
92
  raw_text = get_pdf_text(pdf_docs)
 
 
93
  text_chunks = get_text_chunks(raw_text)
94
+ st.session_state.vectorstore = get_vectorstore(text_chunks) #Store in session state
95
+ st.session_state.conversation = get_conversation_chain(st.session_state.vectorstore) #Use stored vectorstore
 
 
 
 
 
 
 
96
 
97
 
98
+ if __name__ == "__main__":
99
+ main()