Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,12 +4,8 @@ from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
| 4 |
import os
|
| 5 |
import google.generativeai as genai
|
| 6 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
| 7 |
-
from langchain_google_genai import GoogleGenerativeAI
|
| 8 |
-
from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
|
| 9 |
-
from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings
|
| 10 |
-
from langchain_google_genai.llms import GoogleGenerativeAI
|
| 11 |
-
from langchain.vectorstores import FAISS
|
| 12 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
|
|
|
| 13 |
from langchain.chains.question_answering import load_qa_chain
|
| 14 |
from langchain.prompts import PromptTemplate
|
| 15 |
from dotenv import load_dotenv
|
|
@@ -20,13 +16,12 @@ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
|
| 20 |
|
| 21 |
|
| 22 |
def get_pdf_text(pdf_docs):
|
| 23 |
-
text=""
|
| 24 |
for pdf in pdf_docs:
|
| 25 |
-
pdf_reader= PdfReader(pdf)
|
| 26 |
for page in pdf_reader.pages:
|
| 27 |
-
text+= page.extract_text()
|
| 28 |
-
return
|
| 29 |
-
|
| 30 |
|
| 31 |
|
| 32 |
def get_text_chunks(text):
|
|
@@ -36,13 +31,12 @@ def get_text_chunks(text):
|
|
| 36 |
|
| 37 |
|
| 38 |
def get_vector_store(text_chunks):
|
| 39 |
-
embeddings = GoogleGenerativeAIEmbeddings(model
|
| 40 |
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
|
| 41 |
vector_store.save_local("faiss_index")
|
| 42 |
|
| 43 |
|
| 44 |
def get_conversational_chain():
|
| 45 |
-
|
| 46 |
prompt_template = """
|
| 47 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
| 48 |
provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
|
|
@@ -52,35 +46,32 @@ def get_conversational_chain():
|
|
| 52 |
Answer:
|
| 53 |
"""
|
| 54 |
|
| 55 |
-
model = ChatGoogleGenerativeAI(model="gemini-pro",
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
|
| 59 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
| 60 |
-
|
| 61 |
return chain
|
| 62 |
|
| 63 |
|
| 64 |
-
|
| 65 |
def user_input(user_question):
|
| 66 |
-
embeddings = GoogleGenerativeAIEmbeddings(model
|
| 67 |
-
|
| 68 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
docs = new_db.similarity_search(user_question)
|
| 70 |
|
| 71 |
chain = get_conversational_chain()
|
| 72 |
|
| 73 |
-
|
| 74 |
-
response = chain(
|
| 75 |
-
{"input_documents":docs, "question": user_question}
|
| 76 |
-
, return_only_outputs=True)
|
| 77 |
|
| 78 |
print(response)
|
| 79 |
st.write("Reply: ", response["output_text"])
|
| 80 |
|
| 81 |
|
| 82 |
-
|
| 83 |
-
|
| 84 |
def main():
|
| 85 |
st.set_page_config("Chat PDF")
|
| 86 |
st.header("Chat with PDF 💁")
|
|
@@ -97,10 +88,13 @@ def main():
|
|
| 97 |
with st.spinner("Processing..."):
|
| 98 |
raw_text = get_pdf_text(pdf_docs)
|
| 99 |
text_chunks = get_text_chunks(raw_text)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 100 |
get_vector_store(text_chunks)
|
| 101 |
st.success("Done")
|
| 102 |
|
| 103 |
|
| 104 |
-
|
| 105 |
if __name__ == "__main__":
|
| 106 |
-
main()
|
|
|
|
| 4 |
import os
|
| 5 |
import google.generativeai as genai
|
| 6 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 8 |
+
from langchain.vectorstores import FAISS
|
| 9 |
from langchain.chains.question_answering import load_qa_chain
|
| 10 |
from langchain.prompts import PromptTemplate
|
| 11 |
from dotenv import load_dotenv
|
|
|
|
| 16 |
|
| 17 |
|
| 18 |
def get_pdf_text(pdf_docs):
|
| 19 |
+
text = ""
|
| 20 |
for pdf in pdf_docs:
|
| 21 |
+
pdf_reader = PdfReader(pdf)
|
| 22 |
for page in pdf_reader.pages:
|
| 23 |
+
text += page.extract_text()
|
| 24 |
+
return text
|
|
|
|
| 25 |
|
| 26 |
|
| 27 |
def get_text_chunks(text):
|
|
|
|
| 31 |
|
| 32 |
|
| 33 |
def get_vector_store(text_chunks):
|
| 34 |
+
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
| 35 |
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
|
| 36 |
vector_store.save_local("faiss_index")
|
| 37 |
|
| 38 |
|
| 39 |
def get_conversational_chain():
|
|
|
|
| 40 |
prompt_template = """
|
| 41 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
| 42 |
provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
|
|
|
|
| 46 |
Answer:
|
| 47 |
"""
|
| 48 |
|
| 49 |
+
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
|
| 50 |
+
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
|
|
|
|
|
|
|
| 51 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
|
|
|
| 52 |
return chain
|
| 53 |
|
| 54 |
|
|
|
|
| 55 |
def user_input(user_question):
|
| 56 |
+
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
| 57 |
+
|
| 58 |
+
faiss_index_path = "faiss_index/index.faiss"
|
| 59 |
+
if os.path.exists(faiss_index_path):
|
| 60 |
+
new_db = FAISS.load_local("faiss_index", embeddings)
|
| 61 |
+
else:
|
| 62 |
+
st.error(f"Error: Could not find FAISS index file at {faiss_index_path}")
|
| 63 |
+
return
|
| 64 |
+
|
| 65 |
docs = new_db.similarity_search(user_question)
|
| 66 |
|
| 67 |
chain = get_conversational_chain()
|
| 68 |
|
| 69 |
+
response = chain({"input_documents": docs, "question": user_question}, return_only_outputs=True)
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
print(response)
|
| 72 |
st.write("Reply: ", response["output_text"])
|
| 73 |
|
| 74 |
|
|
|
|
|
|
|
| 75 |
def main():
|
| 76 |
st.set_page_config("Chat PDF")
|
| 77 |
st.header("Chat with PDF 💁")
|
|
|
|
| 88 |
with st.spinner("Processing..."):
|
| 89 |
raw_text = get_pdf_text(pdf_docs)
|
| 90 |
text_chunks = get_text_chunks(raw_text)
|
| 91 |
+
|
| 92 |
+
if not os.path.exists("faiss_index"):
|
| 93 |
+
os.makedirs("faiss_index")
|
| 94 |
+
|
| 95 |
get_vector_store(text_chunks)
|
| 96 |
st.success("Done")
|
| 97 |
|
| 98 |
|
|
|
|
| 99 |
if __name__ == "__main__":
|
| 100 |
+
main()
|