goketech commited on
Commit
5dd61ce
·
verified ·
1 Parent(s): 7a29fd8

Create 💊drug_and_medicine_chatbot.py

Browse files
pages/💊drug_and_medicine_chatbot.py ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Import the necessary libraries
2
+ import utils
3
+ import streamlit as st
4
+ from streaming import StreamHandler
5
+
6
+ from langchain_openai import ChatOpenAI
7
+ from langchain.chains.conversation.base import ConversationChain
8
+ from langchain.memory.buffer import ConversationBufferMemory
9
+ from langchain.memory import ConversationSummaryMemory
10
+ from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain
11
+ from langchain.prompts import PromptTemplate
12
+ from langchain_community.document_loaders import TextLoader
13
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
14
+ from langchain_chroma import Chroma
15
+ from langchain_openai import OpenAIEmbeddings
16
+
17
+ st.set_page_config(page_title="Sickle cell chatbot", page_icon="🧠")
18
+ st.header('Sickle cell chatbot')
19
+ st.write('Enhancing Chatbot Interactions through sickle cell training')
20
+
21
+ class ContextChatbot:
22
+
23
+ def __init__(self):
24
+ self.openai_model = utils.configure_openai()
25
+
26
+ @st.cache_resource
27
+ def setup_chain(_self):
28
+ file_paths = [
29
+ './Docs/Prenatal Diagonosis.txt',
30
+ './Docs/Sickle Cell Anaemia Yoruba.txt',
31
+ './Docs/Sickle Cell Anemia.txt',
32
+ './Docs/Sickle cell in Africa.txt',
33
+ './Docs/sickle-cell-disease-report.txt',
34
+ './Docs/Blood Basics.txt',
35
+ './Docs/Sickle Cell Basic Info.txt',
36
+ './Docs/Manging Sicklecell.txt',
37
+ './Docs/SickleCell_nutrition.txt',
38
+ './Docs/Lifestyle Advice.txt'
39
+ ]
40
+
41
+ # Load documents
42
+ documents = []
43
+ for path in file_paths:
44
+ try:
45
+ loader = TextLoader(path)
46
+ documents.extend(loader.load())
47
+ print(f"Successfully loaded: {path}")
48
+ except Exception as e:
49
+ print(f"Error loading {path}: {e}")
50
+
51
+ # Check if any documents were loaded
52
+ if not documents:
53
+ raise RuntimeError("No documents were loaded. Please check the file paths and file contents.")
54
+ text_splitter = RecursiveCharacterTextSplitter(
55
+ chunk_size=650, chunk_overlap=60, add_start_index=True
56
+ )
57
+ openai_embedding_model = OpenAIEmbeddings(
58
+ model="text-embedding-ada-002",
59
+ )
60
+ all_splits = text_splitter.split_documents(documents)
61
+ vectorstore = Chroma.from_documents(documents=all_splits, embedding=openai_embedding_model, persist_directory='./Vectorstore/')
62
+ prompt_template = """
63
+ Use the following pieces of context to answer the user's question about sickle cell.
64
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
65
+ ----------------
66
+ {context}
67
+ Question: {question}
68
+ """
69
+
70
+ PROMPT = PromptTemplate(
71
+ template=prompt_template, input_variables=["context", "question"]
72
+ )
73
+ chain_type_kwargs = {"prompt": PROMPT}
74
+ summarizer = ChatOpenAI(model_name= "gpt-4o", temperature=0, streaming=True)
75
+ llm = ChatOpenAI(model_name=_self.openai_model, temperature=0, streaming=True)
76
+
77
+ chat_history = []
78
+
79
+ qa = ConversationalRetrievalChain.from_llm(
80
+ llm = llm,
81
+ chain_type = "stuff",
82
+ memory = ConversationSummaryMemory(llm = summarizer, memory_key='chat_history', input_key='question', output_key= 'answer', return_messages=True),
83
+ retriever = vectorstore.as_retriever(k = 5, search_type="mmr"),
84
+ return_source_documents=True,
85
+ combine_docs_chain_kwargs=chain_type_kwargs
86
+ )
87
+ return qa
88
+
89
+ @utils.enable_chat_history
90
+ def main(self):
91
+ chain = self.setup_chain()
92
+ user_query = st.chat_input(placeholder="Ask me anything!")
93
+ if user_query:
94
+ utils.display_msg(user_query, 'user')
95
+ with st.chat_message("assistant"):
96
+ st_cb = StreamHandler(st.empty())
97
+ result = chain.invoke(
98
+ {"question":user_query},
99
+ {"callbacks": [st_cb]}
100
+ )
101
+ #response = ""
102
+ #for source in result['source_documents']:
103
+ #response += source.metadata['source']
104
+ #response += "Content: " + source.page_content
105
+ response = result["answer"]
106
+ st.session_state.messages.append({"role": "assistant", "content": response})
107
+
108
+ if __name__ == "__main__":
109
+ obj = ContextChatbot()
110
+ obj.main()
111
+ #SickleCell#Import the necessary libraries
112
+ import utils
113
+ import streamlit as st
114
+ from streaming import StreamHandler
115
+
116
+ from langchain_openai import ChatOpenAI
117
+ from langchain.chains.conversation.base import ConversationChain
118
+ from langchain.memory.buffer import ConversationBufferMemory
119
+ from langchain.memory import ConversationSummaryMemory
120
+ from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain
121
+ from langchain.prompts import PromptTemplate
122
+ from langchain_community.document_loaders import TextLoader
123
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
124
+ from langchain_chroma import Chroma
125
+ from langchain_openai import OpenAIEmbeddings
126
+
127
+ st.set_page_config(page_title="Drug and Medicine chatbot", page_icon="💊")
128
+ st.header('Sickle cell chatbot')
129
+ st.write('Enhancing Chatbot Interactions through sickle cell training')
130
+
131
+ class ContextChatbot:
132
+
133
+ def __init__(self):
134
+ self.openai_model = utils.configure_openai()
135
+
136
+ @st.cache_resource
137
+ def setup_chain(_self):
138
+ file_paths = [
139
+ './Docs/Prenatal Diagonosis.txt',
140
+ './Docs/Sickle Cell Anaemia Yoruba.txt',
141
+ './Docs/Sickle Cell Anemia.txt',
142
+ './Docs/Sickle cell in Africa.txt',
143
+ './Docs/sickle-cell-disease-report.txt',
144
+ './Docs/Blood Basics.txt',
145
+ './Docs/Sickle Cell Basic Info.txt',
146
+ './Docs/Manging Sicklecell.txt',
147
+ './Docs/SickleCell_nutrition.txt',
148
+ './Docs/Lifestyle Advice.txt'
149
+ ]
150
+
151
+ # Load documents
152
+ documents = []
153
+ for path in file_paths:
154
+ try:
155
+ loader = TextLoader(path)
156
+ documents.extend(loader.load())
157
+ print(f"Successfully loaded: {path}")
158
+ except Exception as e:
159
+ print(f"Error loading {path}: {e}")
160
+
161
+ # Check if any documents were loaded
162
+ if not documents:
163
+ raise RuntimeError("No documents were loaded. Please check the file paths and file contents.")
164
+ text_splitter = RecursiveCharacterTextSplitter(
165
+ chunk_size=650, chunk_overlap=60, add_start_index=True
166
+ )
167
+ openai_embedding_model = OpenAIEmbeddings(
168
+ model="text-embedding-ada-002",
169
+ )
170
+ all_splits = text_splitter.split_documents(documents)
171
+ vectorstore = Chroma.from_documents(documents=all_splits, embedding=openai_embedding_model, persist_directory='./Vectorstore/')
172
+ prompt_template = """
173
+ Use the following pieces of context to answer the user's question about sickle cell.
174
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
175
+ ----------------
176
+ {context}
177
+ Question: {question}
178
+ """
179
+
180
+ PROMPT = PromptTemplate(
181
+ template=prompt_template, input_variables=["context", "question"]
182
+ )
183
+ chain_type_kwargs = {"prompt": PROMPT}
184
+ summarizer = ChatOpenAI(model_name= "gpt-4o", temperature=0, streaming=True)
185
+ llm = ChatOpenAI(model_name=_self.openai_model, temperature=0, streaming=True)
186
+
187
+ chat_history = []
188
+
189
+ qa = ConversationalRetrievalChain.from_llm(
190
+ llm = llm,
191
+ chain_type = "stuff",
192
+ memory = ConversationSummaryMemory(llm = summarizer, memory_key='chat_history', input_key='question', output_key= 'answer', return_messages=True),
193
+ retriever = vectorstore.as_retriever(k = 5, search_type="mmr"),
194
+ return_source_documents=True,
195
+ combine_docs_chain_kwargs=chain_type_kwargs
196
+ )
197
+ return qa
198
+
199
+ @utils.enable_chat_history
200
+ def main(self):
201
+ chain = self.setup_chain()
202
+ user_query = st.chat_input(placeholder="Ask me anything!")
203
+ if user_query:
204
+ utils.display_msg(user_query, 'user')
205
+ with st.chat_message("assistant"):
206
+ st_cb = StreamHandler(st.empty())
207
+ result = chain.invoke(
208
+ {"question":user_query},
209
+ {"callbacks": [st_cb]}
210
+ )
211
+ #response = ""
212
+ #for source in result['source_documents']:
213
+ #response += source.metadata['source']
214
+ #response += "Content: " + source.page_content
215
+ response = result["answer"]
216
+ st.session_state.messages.append({"role": "assistant", "content": response})
217
+
218
+ if __name__ == "__main__":
219
+ obj = ContextChatbot()
220
+ obj.main()
221
+ #SickleCell#Import the necessary libraries
222
+ import utils
223
+ import streamlit as st
224
+ from streaming import StreamHandler
225
+
226
+ from langchain_openai import ChatOpenAI
227
+ from langchain.chains.conversation.base import ConversationChain
228
+ from langchain.memory.buffer import ConversationBufferMemory
229
+ from langchain.memory import ConversationSummaryMemory
230
+ from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain
231
+ from langchain.prompts import PromptTemplate
232
+ from langchain_community.document_loaders import TextLoader
233
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
234
+ from langchain_chroma import Chroma
235
+ from langchain_openai import OpenAIEmbeddings
236
+
237
+ st.set_page_config(page_title="Sickle cell chatbot", page_icon="🧠")
238
+ st.header('Sickle cell chatbot')
239
+ st.write('Enhancing Chatbot Interactions through sickle cell training')
240
+
241
+ class ContextChatbot:
242
+
243
+ def __init__(self):
244
+ self.openai_model = utils.configure_openai()
245
+
246
+ @st.cache_resource
247
+ def setup_chain(_self):
248
+ file_paths = [
249
+ './Docs/Prenatal Diagonosis.txt',
250
+ './Docs/Sickle Cell Anaemia Yoruba.txt',
251
+ './Docs/Sickle Cell Anemia.txt',
252
+ './Docs/Sickle cell in Africa.txt',
253
+ './Docs/sickle-cell-disease-report.txt',
254
+ './Docs/Blood Basics.txt',
255
+ './Docs/Sickle Cell Basic Info.txt',
256
+ './Docs/Manging Sicklecell.txt',
257
+ './Docs/SickleCell_nutrition.txt',
258
+ './Docs/Lifestyle Advice.txt'
259
+ ]
260
+
261
+ # Load documents
262
+ documents = []
263
+ for path in file_paths:
264
+ try:
265
+ loader = TextLoader(path)
266
+ documents.extend(loader.load())
267
+ print(f"Successfully loaded: {path}")
268
+ except Exception as e:
269
+ print(f"Error loading {path}: {e}")
270
+
271
+ # Check if any documents were loaded
272
+ if not documents:
273
+ raise RuntimeError("No documents were loaded. Please check the file paths and file contents.")
274
+ text_splitter = RecursiveCharacterTextSplitter(
275
+ chunk_size=650, chunk_overlap=60, add_start_index=True
276
+ )
277
+ openai_embedding_model = OpenAIEmbeddings(
278
+ model="text-embedding-ada-002",
279
+ )
280
+ all_splits = text_splitter.split_documents(documents)
281
+ vectorstore = Chroma.from_documents(documents=all_splits, embedding=openai_embedding_model, persist_directory='./Vectorstore/')
282
+ prompt_template = """
283
+ Use the following pieces of context to answer the user's question about sickle cell.
284
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
285
+ ----------------
286
+ {context}
287
+ Question: {question}
288
+ """
289
+
290
+ PROMPT = PromptTemplate(
291
+ template=prompt_template, input_variables=["context", "question"]
292
+ )
293
+ chain_type_kwargs = {"prompt": PROMPT}
294
+ summarizer = ChatOpenAI(model_name= "gpt-4o", temperature=0, streaming=True)
295
+ llm = ChatOpenAI(model_name=_self.openai_model, temperature=0, streaming=True)
296
+
297
+ chat_history = []
298
+
299
+ qa = ConversationalRetrievalChain.from_llm(
300
+ llm = llm,
301
+ chain_type = "stuff",
302
+ memory = ConversationSummaryMemory(llm = summarizer, memory_key='chat_history', input_key='question', output_key= 'answer', return_messages=True),
303
+ retriever = vectorstore.as_retriever(k = 5, search_type="mmr"),
304
+ return_source_documents=True,
305
+ combine_docs_chain_kwargs=chain_type_kwargs
306
+ )
307
+ return qa
308
+
309
+ @utils.enable_chat_history
310
+ def main(self):
311
+ chain = self.setup_chain()
312
+ user_query = st.chat_input(placeholder="Ask me anything!")
313
+ if user_query:
314
+ utils.display_msg(user_query, 'user')
315
+ with st.chat_message("assistant"):
316
+ st_cb = StreamHandler(st.empty())
317
+ result = chain.invoke(
318
+ {"question":user_query},
319
+ {"callbacks": [st_cb]}
320
+ )
321
+ #response = ""
322
+ #for source in result['source_documents']:
323
+ #response += source.metadata['source']
324
+ #response += "Content: " + source.page_content
325
+ response = result["answer"]
326
+ st.session_state.messages.append({"role": "assistant", "content": response})
327
+
328
+ if __name__ == "__main__":
329
+ obj = ContextChatbot()
330
+ obj.main()
331
+ #SickleCell