suriya7 commited on
Commit
6a81938
·
verified ·
1 Parent(s): ae6002e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -10,8 +10,6 @@ from langchain.chains.question_answering import load_qa_chain
10
  from langchain.prompts import PromptTemplate
11
  from dotenv import load_dotenv
12
 
13
- load_dotenv()
14
- os.getenv("GOOGLE_API_KEY")
15
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
16
 
17
 
@@ -33,7 +31,7 @@ def get_text_chunks(text):
33
 
34
  def get_vector_store(text_chunks):
35
  embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
36
- vector_store = FAISS.from_texts(text_chunks, embedding=embeddings,allow_dangerous_deserialization= True)
37
  vector_store.save_local("faiss_index")
38
 
39
 
@@ -44,12 +42,11 @@ def get_conversational_chain():
44
  provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
45
  Context:\n {context}?\n
46
  Question: \n{question}\n
47
-
48
  Answer:
49
  """
50
 
51
  model = ChatGoogleGenerativeAI(model="gemini-pro",
52
- temperature=0.3)
53
 
54
  prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
55
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
@@ -79,7 +76,7 @@ def user_input(user_question):
79
 
80
  def main():
81
  st.set_page_config("Chat PDF")
82
- st.header("Chat with PDF using Gemini💁")
83
 
84
  user_question = st.text_input("Ask a Question from the PDF Files")
85
 
 
10
  from langchain.prompts import PromptTemplate
11
  from dotenv import load_dotenv
12
 
 
 
13
  genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
14
 
15
 
 
31
 
32
  def get_vector_store(text_chunks):
33
  embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
34
+ vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
35
  vector_store.save_local("faiss_index")
36
 
37
 
 
42
  provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
43
  Context:\n {context}?\n
44
  Question: \n{question}\n
 
45
  Answer:
46
  """
47
 
48
  model = ChatGoogleGenerativeAI(model="gemini-pro",
49
+ temperature=0.1)
50
 
51
  prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
52
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
 
76
 
77
  def main():
78
  st.set_page_config("Chat PDF")
79
+ st.header("QnA with Multiple PDF files💁")
80
 
81
  user_question = st.text_input("Ask a Question from the PDF Files")
82