goketech commited on
Commit
7a29fd8
Β·
verified Β·
1 Parent(s): e0cd0d2

Create πŸ˜‹_food_and_nutrition_chatbot.py

Browse files
pages/πŸ˜‹_food_and_nutrition_chatbot.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Import the necessary libraries
2
+ import utils
3
+ import streamlit as st
4
+ from streaming import StreamHandler
5
+
6
+ from langchain_openai import ChatOpenAI
7
+ from langchain.chains.conversation.base import ConversationChain
8
+ from langchain.memory.buffer import ConversationBufferMemory
9
+ from langchain.memory import ConversationSummaryMemory
10
+ from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain
11
+ from langchain.prompts import PromptTemplate
12
+ from langchain_community.document_loaders import TextLoader
13
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
14
+ from langchain_chroma import Chroma
15
+ from langchain_openai import OpenAIEmbeddings
16
+
17
+ st.set_page_config(page_title="Food and Nutrition chatbot", page_icon="πŸ˜‹")
18
+ st.header('Sickle cell chatbot')
19
+ st.write('Enhancing Chatbot Interactions through sickle cell training')
20
+
21
+ class ContextChatbot:
22
+
23
+ def __init__(self):
24
+ self.openai_model = utils.configure_openai()
25
+
26
+ @st.cache_resource
27
+ def setup_chain(_self):
28
+ file_paths = [
29
+ './Docs/Prenatal Diagonosis.txt',
30
+ './Docs/Sickle Cell Anaemia Yoruba.txt',
31
+ './Docs/Sickle Cell Anemia.txt',
32
+ './Docs/Sickle cell in Africa.txt',
33
+ './Docs/sickle-cell-disease-report.txt',
34
+ './Docs/Blood Basics.txt',
35
+ './Docs/Sickle Cell Basic Info.txt',
36
+ './Docs/Manging Sicklecell.txt',
37
+ './Docs/SickleCell_nutrition.txt',
38
+ './Docs/Lifestyle Advice.txt'
39
+ ]
40
+
41
+ # Load documents
42
+ documents = []
43
+ for path in file_paths:
44
+ try:
45
+ loader = TextLoader(path)
46
+ documents.extend(loader.load())
47
+ print(f"Successfully loaded: {path}")
48
+ except Exception as e:
49
+ print(f"Error loading {path}: {e}")
50
+
51
+ # Check if any documents were loaded
52
+ if not documents:
53
+ raise RuntimeError("No documents were loaded. Please check the file paths and file contents.")
54
+ text_splitter = RecursiveCharacterTextSplitter(
55
+ chunk_size=650, chunk_overlap=60, add_start_index=True
56
+ )
57
+ openai_embedding_model = OpenAIEmbeddings(
58
+ model="text-embedding-ada-002",
59
+ )
60
+ all_splits = text_splitter.split_documents(documents)
61
+ vectorstore = Chroma.from_documents(documents=all_splits, embedding=openai_embedding_model, persist_directory='./Vectorstore/')
62
+ prompt_template = """
63
+ Use the following pieces of context to answer the user's question about sickle cell.
64
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
65
+ ----------------
66
+ {context}
67
+ Question: {question}
68
+ """
69
+
70
+ PROMPT = PromptTemplate(
71
+ template=prompt_template, input_variables=["context", "question"]
72
+ )
73
+ chain_type_kwargs = {"prompt": PROMPT}
74
+ summarizer = ChatOpenAI(model_name= "gpt-4o", temperature=0, streaming=True)
75
+ llm = ChatOpenAI(model_name=_self.openai_model, temperature=0, streaming=True)
76
+
77
+ chat_history = []
78
+
79
+ qa = ConversationalRetrievalChain.from_llm(
80
+ llm = llm,
81
+ chain_type = "stuff",
82
+ memory = ConversationSummaryMemory(llm = summarizer, memory_key='chat_history', input_key='question', output_key= 'answer', return_messages=True),
83
+ retriever = vectorstore.as_retriever(k = 5, search_type="mmr"),
84
+ return_source_documents=True,
85
+ combine_docs_chain_kwargs=chain_type_kwargs
86
+ )
87
+ return qa
88
+
89
+ @utils.enable_chat_history
90
+ def main(self):
91
+ chain = self.setup_chain()
92
+ user_query = st.chat_input(placeholder="Ask me anything!")
93
+ if user_query:
94
+ utils.display_msg(user_query, 'user')
95
+ with st.chat_message("assistant"):
96
+ st_cb = StreamHandler(st.empty())
97
+ result = chain.invoke(
98
+ {"question":user_query},
99
+ {"callbacks": [st_cb]}
100
+ )
101
+ #response = ""
102
+ #for source in result['source_documents']:
103
+ #response += source.metadata['source']
104
+ #response += "Content: " + source.page_content
105
+ response = result["answer"]
106
+ st.session_state.messages.append({"role": "assistant", "content": response})
107
+
108
+ if __name__ == "__main__":
109
+ obj = ContextChatbot()
110
+ obj.main()
111
+ #SickleCell