SOLOBot / app.py
Sasidu's picture
Update app.py
09b5eb0
#dotenv package to load the API key
from dotenv import load_dotenv
import textwrap
from langchain import PromptTemplate
from langchain.chains.question_answering import load_qa_chain
from langchain.document_loaders import PyPDFDirectoryLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.vectorstores import Chroma
from langchain_google_genai import GoogleGenerativeAIEmbeddings
from langchain_google_genai import ChatGoogleGenerativeAI
import os
import streamlit as st
import google.generativeai as genai
load_dotenv()
loader = PyPDFDirectoryLoader("pdfs")
data = loader.load_and_split()
texts = text_splitter.split_text(context)
embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001" , google_api_key=os.environ["GOOGLE_API_KEY"])
vector_index = Chroma.from_texts(texts, embeddings).as_retriever()
def get_qa_chain():
prompt_template = """
Please answer the question in as much detail as possible based on the provided context.
Ensure to include all relevant details. If the answer is not available in the provided context,
kindly respond with "The answer is not available in the context." Please avoid providing incorrect answers.
Context:
{context}
Question:
{question}
Answer:
"""
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
# Initialize the model.
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3, google_api_key=userdata.get('GeminiProKey'))
# Load the QA chain.
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
# Return the QA chain as a string.
return chain
def main():
st.title("SOLO Bot")
question = st.text_input("Question: ")
if question:
chain = get_qa_chain()
docs = vector_index.get_relevant_documents(question)
response = chain( {"input_documents":docs, "question": question}, return_only_outputs=True)
st.header("Answer")
st.write(response["output_text"])
if __name__ == "__main__":
main()