Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -30,7 +30,7 @@ def get_pdf_text(pdf_docs):
|
|
| 30 |
|
| 31 |
|
| 32 |
def get_text_chunks(text):
|
| 33 |
-
text_splitter = RecursiveCharacterTextSplitter(chunk_size=
|
| 34 |
chunks = text_splitter.split_text(text)
|
| 35 |
return chunks
|
| 36 |
|
|
@@ -45,7 +45,7 @@ def get_conversational_chain():
|
|
| 45 |
|
| 46 |
prompt_template = """
|
| 47 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
| 48 |
-
provided context just say, "
|
| 49 |
Context:\n {context}?\n
|
| 50 |
Question: \n{question}\n
|
| 51 |
|
|
@@ -53,7 +53,7 @@ def get_conversational_chain():
|
|
| 53 |
"""
|
| 54 |
|
| 55 |
model = ChatGoogleGenerativeAI(model="gemini-pro",
|
| 56 |
-
temperature=0.
|
| 57 |
|
| 58 |
prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
|
| 59 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
|
|
|
| 30 |
|
| 31 |
|
| 32 |
def get_text_chunks(text):
|
| 33 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=800)
|
| 34 |
chunks = text_splitter.split_text(text)
|
| 35 |
return chunks
|
| 36 |
|
|
|
|
| 45 |
|
| 46 |
prompt_template = """
|
| 47 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
| 48 |
+
provided context just say, "I could not find the answer, can you please try different words", don't provide the wrong answer\n\n
|
| 49 |
Context:\n {context}?\n
|
| 50 |
Question: \n{question}\n
|
| 51 |
|
|
|
|
| 53 |
"""
|
| 54 |
|
| 55 |
model = ChatGoogleGenerativeAI(model="gemini-pro",
|
| 56 |
+
temperature=0.1, google_api_key='AIzaSyAZ6yZYDu8cbdpm1_4XmvWLQIy7PkUR5-M')
|
| 57 |
|
| 58 |
prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
|
| 59 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|