Update app.py
Browse files
app.py
CHANGED
|
@@ -70,7 +70,6 @@ def generate_response(rag_chain, input_text):
|
|
| 70 |
|
| 71 |
return response
|
| 72 |
|
| 73 |
-
### Ken 12/11/2024 ADD START
|
| 74 |
def get_pdf(uploaded_file):
|
| 75 |
temp_file = "./temp.pdf"
|
| 76 |
if uploaded_file :
|
|
@@ -84,14 +83,13 @@ def get_pdf(uploaded_file):
|
|
| 84 |
loader = PyPDFLoader(temp_file)
|
| 85 |
docs = loader.load()
|
| 86 |
return docs
|
| 87 |
-
### Ken 12/11/2024 ADD END
|
| 88 |
|
| 89 |
|
| 90 |
def main() -> None:
|
| 91 |
|
| 92 |
st.title("🧠 This is a RAG Chatbot with Ollama and Langchain !!!")
|
| 93 |
|
| 94 |
-
st.write("The LLM model
|
| 95 |
st.write("You can upload a PDF to chat with !!!")
|
| 96 |
|
| 97 |
with st.sidebar:
|
|
@@ -100,9 +98,7 @@ def main() -> None:
|
|
| 100 |
|
| 101 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 102 |
|
| 103 |
-
### Ken 12/11/2024 ADD START
|
| 104 |
raw_text = get_pdf(docs)
|
| 105 |
-
### Ken 12/11/2024 ADD END
|
| 106 |
|
| 107 |
#chunks = text_splitter.split_documents(docs)
|
| 108 |
chunks = text_splitter.split_documents(raw_text)
|
|
@@ -140,7 +136,7 @@ def main() -> None:
|
|
| 140 |
|
| 141 |
prompt = ChatPromptTemplate.from_template(prompt)
|
| 142 |
|
| 143 |
-
model = ChatOllama(model="
|
| 144 |
|
| 145 |
rag_chain = (
|
| 146 |
{"context": retriever|format_docs, "question": RunnablePassthrough()}
|
|
@@ -171,3 +167,4 @@ def main() -> None:
|
|
| 171 |
|
| 172 |
if __name__ == "__main__":
|
| 173 |
main()
|
|
|
|
|
|
| 70 |
|
| 71 |
return response
|
| 72 |
|
|
|
|
| 73 |
def get_pdf(uploaded_file):
|
| 74 |
temp_file = "./temp.pdf"
|
| 75 |
if uploaded_file :
|
|
|
|
| 83 |
loader = PyPDFLoader(temp_file)
|
| 84 |
docs = loader.load()
|
| 85 |
return docs
|
|
|
|
| 86 |
|
| 87 |
|
| 88 |
def main() -> None:
|
| 89 |
|
| 90 |
st.title("🧠 This is a RAG Chatbot with Ollama and Langchain !!!")
|
| 91 |
|
| 92 |
+
st.write("The LLM model Llama-3.2 is used")
|
| 93 |
st.write("You can upload a PDF to chat with !!!")
|
| 94 |
|
| 95 |
with st.sidebar:
|
|
|
|
| 98 |
|
| 99 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
| 100 |
|
|
|
|
| 101 |
raw_text = get_pdf(docs)
|
|
|
|
| 102 |
|
| 103 |
#chunks = text_splitter.split_documents(docs)
|
| 104 |
chunks = text_splitter.split_documents(raw_text)
|
|
|
|
| 136 |
|
| 137 |
prompt = ChatPromptTemplate.from_template(prompt)
|
| 138 |
|
| 139 |
+
model = ChatOllama(model="llama3.2:latest")
|
| 140 |
|
| 141 |
rag_chain = (
|
| 142 |
{"context": retriever|format_docs, "question": RunnablePassthrough()}
|
|
|
|
| 167 |
|
| 168 |
if __name__ == "__main__":
|
| 169 |
main()
|
| 170 |
+
|