nikhmr1235 commited on
Commit
22dd68c
·
verified ·
1 Parent(s): 9cc9353

add conversation history (MULTI-turn conversation support)

Browse files
Files changed (1) hide show
  1. app.py +66 -37
app.py CHANGED
@@ -6,6 +6,10 @@ import fitz
6
  from langchain_community.vectorstores import Chroma
7
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
 
 
 
9
  from langchain.prompts import PromptTemplate
10
  from langchain_core.runnables import RunnablePassthrough
11
  from langchain_core.output_parsers import StrOutputParser
@@ -159,30 +163,40 @@ async def chat_with_pdf(message, history, state: SessionState):
159
  retriever = state.db.as_retriever()
160
  llm = ChatGoogleGenerativeAI(model=LLM_MODEL, temperature=0.7, google_api_key=google_api_key)
161
 
162
- prompt_template = PromptTemplate(
163
- template="""
164
- You are a helpful assistant for a PDF document.
165
- Answer the user's question based on the following context.
166
- If you don't know the answer, just say that you don't know, don't try to make up an answer.
167
- ----------------
168
- Context: {context}
169
- Question: {question}
170
- """,
171
- input_variables=["context", "question"],
172
  )
173
 
174
- rag_chain = (
175
- {"context": retriever, "question": RunnablePassthrough()}
176
- | prompt_template
177
- | llm
178
- | StrOutputParser()
179
- )
 
 
 
180
 
181
- response = await rag_chain.ainvoke(message)
182
- yield response
 
 
 
 
 
 
 
 
 
183
 
184
  with gr.Blocks(title="PDF Chatbot") as demo:
185
- state = gr.State(value=SessionState())
186
 
187
  gr.Markdown(
188
  """
@@ -190,27 +204,42 @@ with gr.Blocks(title="PDF Chatbot") as demo:
190
  Upload a PDF to start a conversation with your document.
191
  """
192
  )
193
-
194
- file_upload_input = gr.File(
195
- file_types=[".pdf"],
196
- label="Upload your PDF document",
197
- interactive=True
198
- )
199
 
200
- chat_interface = gr.ChatInterface(
201
- fn=chat_with_pdf,
202
- additional_inputs=[state],
203
- chatbot=gr.Chatbot(type="messages"),
204
- textbox=gr.Textbox(placeholder="Type your question here...", scale=7, interactive=False),
205
- examples=[["What is the main topic of the document?"], ["Summarize the key findings."], ["Who are the authors?"]],
206
- title="Chat Interface",
207
- theme="soft"
208
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
 
210
  file_upload_input.upload(
211
- fn=process_pdf,
212
  inputs=[file_upload_input, state],
213
- outputs=[file_upload_input, chat_interface.textbox, state]
214
  )
215
 
216
- demo.launch()
 
6
  from langchain_community.vectorstores import Chroma
7
  from langchain_google_genai import ChatGoogleGenerativeAI, GoogleGenerativeAIEmbeddings
8
  from langchain.text_splitter import RecursiveCharacterTextSplitter
9
+ from langchain.chains import create_history_aware_retriever, create_retrieval_chain
10
+ from langchain.chains.combine_documents import create_stuff_documents_chain
11
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
12
+ from langchain_core.messages import HumanMessage, AIMessage
13
  from langchain.prompts import PromptTemplate
14
  from langchain_core.runnables import RunnablePassthrough
15
  from langchain_core.output_parsers import StrOutputParser
 
163
  retriever = state.db.as_retriever()
164
  llm = ChatGoogleGenerativeAI(model=LLM_MODEL, temperature=0.7, google_api_key=google_api_key)
165
 
166
+ condenser_prompt = ChatPromptTemplate.from_messages([
167
+ ("system", "Given a chat history and the latest user question which might reference context in the chat history, formulate a standalone question which can be understood without the chat history. Do NOT answer the question, just reformulate it if needed and otherwise return it as is."),
168
+ MessagesPlaceholder(variable_name="chat_history"),
169
+ ("human", "{input}"),
170
+ ])
171
+
172
+ history_aware_retriever = create_history_aware_retriever(
173
+ llm, retriever, condenser_prompt
 
 
174
  )
175
 
176
+ qa_prompt = ChatPromptTemplate.from_messages([
177
+ ("system", "You are a helpful assistant for a PDF document. Answer the user's question based on the following context. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n\n{context}"),
178
+ MessagesPlaceholder(variable_name="chat_history"),
179
+ ("human", "{input}"),
180
+ ])
181
+
182
+ question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
183
+
184
+ rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)
185
 
186
+ chat_history_for_chain = []
187
+ for user_msg, ai_msg in history:
188
+ chat_history_for_chain.append(HumanMessage(content=user_msg))
189
+ chat_history_for_chain.append(AIMessage(content=ai_msg))
190
+
191
+ response = await rag_chain.ainvoke({
192
+ "chat_history": chat_history_for_chain,
193
+ "input": message
194
+ })
195
+
196
+ yield response["answer"]
197
 
198
  with gr.Blocks(title="PDF Chatbot") as demo:
199
+ state = gr.State()
200
 
201
  gr.Markdown(
202
  """
 
204
  Upload a PDF to start a conversation with your document.
205
  """
206
  )
 
 
 
 
 
 
207
 
208
+ with gr.Row():
209
+ file_upload_input = gr.File(
210
+ file_types=[".pdf"],
211
+ label="Upload your PDF document",
212
+ interactive=True
213
+ )
214
+
215
+ with gr.Row(visible=False) as chat_row:
216
+ chat_interface = gr.ChatInterface(
217
+ fn=chat_with_pdf,
218
+ additional_inputs=[state],
219
+ chatbot=gr.Chatbot(type="messages"),
220
+ textbox=gr.Textbox(placeholder="Type your question here...", scale=7),
221
+ examples=[["What is the main topic of the document?"], ["Summarize the key findings."], ["Who are the authors?"]],
222
+ title="Chat Interface",
223
+ theme="soft",
224
+ type="messages"
225
+ )
226
+
227
+ async def process_and_show_chat(file, state):
228
+ gr.Info("Processing your PDF, please wait...")
229
+ try:
230
+ new_state = SessionState()
231
+ await process_pdf(file, new_state)
232
+ gr.Info("PDF processed successfully! You can now chat with it.")
233
+ return gr.update(visible=True), gr.update(interactive=False), new_state
234
+ except Exception:
235
+ # The exception is already a gr.Error, so it will be displayed in the UI.
236
+ # We just need to return the correct UI updates.
237
+ return gr.update(visible=False), gr.update(interactive=True), state
238
 
239
  file_upload_input.upload(
240
+ fn=process_and_show_chat,
241
  inputs=[file_upload_input, state],
242
+ outputs=[chat_row, file_upload_input, state]
243
  )
244
 
245
+ demo.launch()