stevafernandes commited on
Commit
b8ba0c8
·
verified ·
1 Parent(s): 64e3014

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +95 -0
app.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PyPDF2 import PdfReader
3
+ import os
4
+
5
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
6
+ from langchain_google_genai import GoogleGenerativeAIEmbeddings
7
+ from langchain_community.vectorstores import FAISS
8
+ from langchain_google_genai import ChatGoogleGenerativeAI
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain.prompts import PromptTemplate
11
+
12
+ # --- Get API key from environment variable ---
13
+ GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY", "")
14
+
15
+ # Assume PDF file is already in the current directory
16
+ PDF_FILE = "Librarianship.pdf" # Change this to your PDF filename
17
+
18
+
19
+ def get_pdf_text(pdf_file):
20
+ text = ""
21
+ with open(pdf_file, "rb") as file:
22
+ pdf_reader = PdfReader(file)
23
+ for page in pdf_reader.pages:
24
+ page_text = page.extract_text()
25
+ if page_text:
26
+ text += page_text
27
+ return text
28
+
29
+
30
+ def get_text_chunks(text):
31
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
32
+ return text_splitter.split_text(text)
33
+
34
+
35
+ def get_vector_store(text_chunks, api_key):
36
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
37
+ vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
38
+ vector_store.save_local("/tmp/faiss_index")
39
+
40
+
41
+ def get_conversational_chain(api_key):
42
+ prompt_template = """
43
+ You are a helpful assistant that only answers based on the context provided from the PDF document.
44
+ Do not use any external knowledge or assumptions. If the answer is not found in the context below, reply with "I don't know."
45
+ Context:
46
+ {context}
47
+ Question:
48
+ {question}
49
+ Answer:
50
+ """
51
+ model = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0, google_api_key=api_key)
52
+ prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
53
+ chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
54
+ return chain
55
+
56
+
57
+ def user_input(user_question, api_key):
58
+ embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
59
+ new_db = FAISS.load_local("/tmp/faiss_index", embeddings, allow_dangerous_deserialization=True)
60
+ docs = new_db.similarity_search(user_question)
61
+ chain = get_conversational_chain(api_key)
62
+ response = chain({"input_documents": docs, "question": user_question}, return_only_outputs=True)
63
+ st.write("Reply: ", response["output_text"])
64
+
65
+
66
+ def main():
67
+ st.set_page_config(page_title="Chat PDF")
68
+ st.header("Retrieval-Augmented Generation - Gemini 2.0")
69
+ st.markdown("---")
70
+
71
+ api_key = GOOGLE_API_KEY
72
+
73
+ if not api_key:
74
+ st.error("Please set the GOOGLE_API_KEY environment variable.")
75
+ st.stop()
76
+
77
+ if "pdf_processed" not in st.session_state:
78
+ st.session_state["pdf_processed"] = False
79
+
80
+ if not st.session_state["pdf_processed"]:
81
+ with st.spinner("Processing PDF..."):
82
+ raw_text = get_pdf_text(PDF_FILE)
83
+ text_chunks = get_text_chunks(raw_text)
84
+ get_vector_store(text_chunks, api_key)
85
+ st.session_state["pdf_processed"] = True
86
+ st.success("PDF processed! You can now ask questions.")
87
+
88
+ st.subheader("Ask a question about your PDF")
89
+ user_question = st.text_input("Ask a question")
90
+ if user_question:
91
+ user_input(user_question, api_key)
92
+
93
+
94
+ if __name__ == "__main__":
95
+ main()