import streamlit as st from transformers import T5Tokenizer, T5ForConditionalGeneration from transformers import pipeline import torch from langchain_community.embeddings import SentenceTransformerEmbeddings from langchain_community.vectorstores import FAISS from langchain.chains import RetrievalQA from langchain_community.llms import HuggingFacePipeline #from constants import CHROMA_SETTINGS from streamlit_chat import message import safetensors checkpoint = "LaMini-Flan-T5-77M" tokenizer = T5Tokenizer.from_pretrained(checkpoint) base_model = T5ForConditionalGeneration.from_pretrained( checkpoint, device_map = 'auto', torch_dtype = torch.float32, ) @st.cache_resource def llm_pipeline(): pipe = pipeline( 'text2text-generation', model = base_model, tokenizer = tokenizer, temperature = 0.5 ) local_llm = HuggingFacePipeline(pipeline=pipe) return local_llm @st.cache_resource def qa_llm(): llm = llm_pipeline() embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2") db = FAISS.load_local("vector_data",embeddings) #db = Chroma(persist_directory="db", embedding_function = embeddings, client_settings=CHROMA_SETTINGS) retriever = db.as_retriever() qa = RetrievalQA.from_chain_type( llm = llm, chain_type = "stuff", retriever = retriever, return_source_documents=True ) return qa def process_answer(instruction): response = '' instruction = instruction qa = qa_llm() generated_text = qa(instruction) answer = generated_text['result'] return answer # Display conversation history using Streamlit messages def display_conversation(history): for i in range(len(history["generated"])): message(history["past"][i], is_user=True, key=str(i) + "_user") message(history["generated"][i],key=str(i)) def main(): st.title('Chat with Your Data 🦜📄') with st.expander("About the Chatbot"): st.markdown( """ This is a Generative AI powered Chatbot that interacts with you and you can ask followup questions. """ ) user_input = st.text_input("Question:", placeholder="Ask about your PDF", key='input') with st.form(key='my_form', clear_on_submit=True): submit_button = st.form_submit_button(label='Send') # Initialize session state for generated responses and past messages if "generated" not in st.session_state: st.session_state["generated"] = ["I am ready to help you"] if "past" not in st.session_state: st.session_state["past"] = ["Hey there!👋"] if submit_button and user_input or user_input : st.session_state['past'].append(user_input) with st.spinner('Generating response...'): answer = process_answer({'query': user_input}) st.session_state['generated'].append(answer) if st.session_state["generated"]: display_conversation(st.session_state) if __name__ == '__main__': main()