Entreprenerdly commited on
Commit
fece03e
·
verified ·
1 Parent(s): 1145323

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -11
app.py CHANGED
@@ -11,7 +11,7 @@ import os
11
  # Set your OpenAI API key
12
  os.environ["OPENAI_API_KEY"] = "sk-proj-vFPqdrr801blzZCRBjztT3BlbkFJJJeQVcc62PA40cQ1S9Zv"
13
 
14
- # Initialize global variables
15
  selected_paper = None
16
  qa_chain = None
17
  papers = []
@@ -28,7 +28,6 @@ def main(message: str):
28
  global selected_paper, qa_chain, papers, state
29
 
30
  if state == "SEARCH":
31
- # Search for papers
32
  search = arxiv.Search(
33
  query=message,
34
  max_results=5,
@@ -36,12 +35,11 @@ def main(message: str):
36
  )
37
 
38
  papers = list(search.results())
39
-
40
  if not papers:
41
  cl.Message(content="No papers found. Please try another search query.").send()
42
  return
43
 
44
- # Create a numbered list of papers with links
45
  paper_list = "\n".join([f"{i+1}. {paper.title} - {paper.authors[0]}\nLink: {paper.entry_id}" for i, paper in enumerate(papers)])
46
  cl.Message(content=f"Please select a paper by entering its number:\n\n{paper_list}\n\nEnter the number of the paper you want to select:").send()
47
  state = "SELECT"
@@ -58,7 +56,6 @@ def main(message: str):
58
  cl.Message(content="Invalid input. Please enter a number.").send()
59
  return
60
 
61
- # Download and process the selected paper
62
  paper_text = selected_paper.summary
63
 
64
  # Split the text into chunks
@@ -75,7 +72,7 @@ def main(message: str):
75
  ChatOpenAI(temperature=0),
76
  vectorstore.as_retriever(),
77
  memory=memory,
78
- return_source_documents=True # Ensure source documents are returned
79
  )
80
 
81
  cl.Message(content=f"Selected paper: {selected_paper.title}\nLink: {selected_paper.entry_id}\nYou can now ask questions about this paper. Type 'new search' when you want to search for a different paper.").send()
@@ -92,16 +89,16 @@ def main(message: str):
92
  # Answer questions about the selected paper
93
  response = qa_chain({"question": message})
94
  answer = response["answer"]
95
- source_documents = response.get("source_documents", [])
96
 
97
- # Create a message with the answer and sources
98
- if source_documents:
99
- sources = "\n".join([f"- {doc.metadata['source']}" for doc in source_documents])
100
  answer += f"\n\nSources:\n{sources}"
101
 
 
102
  cl.Message(content=answer).send()
103
 
104
- # Store only the answer in memory to prevent errors
105
  qa_chain.memory.save_context({"question": message}, {"answer": answer})
106
 
107
  if __name__ == "__main__":
 
11
  # Set your OpenAI API key
12
  os.environ["OPENAI_API_KEY"] = "sk-proj-vFPqdrr801blzZCRBjztT3BlbkFJJJeQVcc62PA40cQ1S9Zv"
13
 
14
+ # Global variables
15
  selected_paper = None
16
  qa_chain = None
17
  papers = []
 
28
  global selected_paper, qa_chain, papers, state
29
 
30
  if state == "SEARCH":
 
31
  search = arxiv.Search(
32
  query=message,
33
  max_results=5,
 
35
  )
36
 
37
  papers = list(search.results())
38
+
39
  if not papers:
40
  cl.Message(content="No papers found. Please try another search query.").send()
41
  return
42
 
 
43
  paper_list = "\n".join([f"{i+1}. {paper.title} - {paper.authors[0]}\nLink: {paper.entry_id}" for i, paper in enumerate(papers)])
44
  cl.Message(content=f"Please select a paper by entering its number:\n\n{paper_list}\n\nEnter the number of the paper you want to select:").send()
45
  state = "SELECT"
 
56
  cl.Message(content="Invalid input. Please enter a number.").send()
57
  return
58
 
 
59
  paper_text = selected_paper.summary
60
 
61
  # Split the text into chunks
 
72
  ChatOpenAI(temperature=0),
73
  vectorstore.as_retriever(),
74
  memory=memory,
75
+ return_source_documents=True
76
  )
77
 
78
  cl.Message(content=f"Selected paper: {selected_paper.title}\nLink: {selected_paper.entry_id}\nYou can now ask questions about this paper. Type 'new search' when you want to search for a different paper.").send()
 
89
  # Answer questions about the selected paper
90
  response = qa_chain({"question": message})
91
  answer = response["answer"]
 
92
 
93
+ # Handling the sources and formatting the response
94
+ sources = "\n".join([f"- {doc.metadata['source']}" for doc in response.get("source_documents", [])])
95
+ if sources:
96
  answer += f"\n\nSources:\n{sources}"
97
 
98
+ # Send the response with sources
99
  cl.Message(content=answer).send()
100
 
101
+ # Store the chat history in memory without storing the sources
102
  qa_chain.memory.save_context({"question": message}, {"answer": answer})
103
 
104
  if __name__ == "__main__":