import streamlit as st from langchain.document_loaders import PyPDFLoader from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.vectorstores import Chroma from langchain.embeddings import HuggingFaceEmbeddings from langchain.chains import RetrievalQA from langchain_google_genai import ChatGoogleGenerativeAI import tempfile import os from dotenv import load_dotenv from pydantic import SecretStr load_dotenv() GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") # ---------------------------- SETUP ---------------------------- st.title("📄 LangChain RAG Chatbot") # Session state if "chat_history" not in st.session_state: st.session_state.chat_history = [] if "qa_chain" not in st.session_state: st.session_state.qa_chain = None # ---------------------------- FILE UPLOAD ---------------------------- st.subheader("Upload your PDF") pdf_file = st.file_uploader("Upload", type="pdf") if pdf_file is not None and st.session_state.qa_chain is None: with st.spinner("🔍 Processing document..."): # Save file temporarily with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp_file: tmp_file.write(pdf_file.read()) tmp_path = tmp_file.name # Load and split PDF loader = PyPDFLoader(tmp_path) documents = loader.load_and_split() splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200) chunks = splitter.split_documents(documents) # Vector store vectordb = Chroma.from_documents( chunks, embeddings, persist_directory="./chroma_db" ) retriever = vectordb.as_retriever() # QA Chain llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", api_key=SecretStr(GOOGLE_API_KEY) if GOOGLE_API_KEY else None) qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever) # Store in session st.session_state.qa_chain = qa_chain st.success("✅ Document processed and indexed!") # ---------------------------- CHAT ---------------------------- if st.session_state.qa_chain: st.subheader("💬 Ask a question") question = st.text_input("You:", key="user_input") if question: with st.spinner("🤖 Generating answer..."): answer = st.session_state.qa_chain.run(question) st.session_state.chat_history.append({"user": question, "bot": answer}) # Display chat history for chat in st.session_state.chat_history: st.markdown(f"🧑 **You:** {chat['user']}") st.markdown(f"🤖 **Bot:** {chat['bot']}") # Reset button if st.button("🔄 Reset Chat"): st.session_state.chat_history = [] st.session_state.qa_chain = None st.rerun() else: st.info("📂 Please upload a PDF to begin.")