Entreprenerdly commited on
Commit
1145323
·
verified ·
1 Parent(s): b14beaa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -8
app.py CHANGED
@@ -75,7 +75,7 @@ def main(message: str):
75
  ChatOpenAI(temperature=0),
76
  vectorstore.as_retriever(),
77
  memory=memory,
78
- return_source_documents=True # This enables returning the source of the answer
79
  )
80
 
81
  cl.Message(content=f"Selected paper: {selected_paper.title}\nLink: {selected_paper.entry_id}\nYou can now ask questions about this paper. Type 'new search' when you want to search for a different paper.").send()
@@ -92,13 +92,17 @@ def main(message: str):
92
  # Answer questions about the selected paper
93
  response = qa_chain({"question": message})
94
  answer = response["answer"]
95
- sources = "\n".join([f"- {doc.metadata['source']}" for doc in response["source_documents"]])
96
-
97
- # Store only the answer in memory
98
- memory.save_context({"question": message}, {"answer": answer})
99
-
100
- # Send the answer and sources as a message
101
- cl.Message(content=f"{answer}\n\nSources:\n{sources}").send()
 
 
 
 
102
 
103
  if __name__ == "__main__":
104
  cl.run()
 
75
  ChatOpenAI(temperature=0),
76
  vectorstore.as_retriever(),
77
  memory=memory,
78
+ return_source_documents=True # Ensure source documents are returned
79
  )
80
 
81
  cl.Message(content=f"Selected paper: {selected_paper.title}\nLink: {selected_paper.entry_id}\nYou can now ask questions about this paper. Type 'new search' when you want to search for a different paper.").send()
 
92
  # Answer questions about the selected paper
93
  response = qa_chain({"question": message})
94
  answer = response["answer"]
95
+ source_documents = response.get("source_documents", [])
96
+
97
+ # Create a message with the answer and sources
98
+ if source_documents:
99
+ sources = "\n".join([f"- {doc.metadata['source']}" for doc in source_documents])
100
+ answer += f"\n\nSources:\n{sources}"
101
+
102
+ cl.Message(content=answer).send()
103
+
104
+ # Store only the answer in memory to prevent errors
105
+ qa_chain.memory.save_context({"question": message}, {"answer": answer})
106
 
107
  if __name__ == "__main__":
108
  cl.run()