Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from pypdf import PdfReader | |
| import io | |
| from gemini_kit import get_llm | |
| from langchain_core.messages import HumanMessage | |
| # Initialize session state for the PDF text and messages | |
| if 'pdf' not in st.session_state: | |
| st.session_state.pdf = "" | |
| if 'messages' not in st.session_state: | |
| st.session_state.messages = [] | |
| if 'extract' not in st.session_state: | |
| st.session_state.extract = True | |
| def upload_pdf(): | |
| print(st.session_state.extract) | |
| print(st.session_state.pdf[:10]) | |
| uploaded_file = st.file_uploader("Choose a PDF file", type="pdf") | |
| if (uploaded_file is not None) & st.session_state.extract: | |
| st.write("Waiting for pdf to be extracted ...") | |
| pdf_reader = PdfReader(io.BytesIO(uploaded_file.read())) | |
| text = "" | |
| for page_num in range(len(pdf_reader.pages)): | |
| page = pdf_reader.pages[page_num] | |
| text += page.extract_text() | |
| # Store the extracted text in session state | |
| st.session_state.pdf = text | |
| st.session_state.extract = False | |
| st.write("PDF Text Extracted. You can chat now!!") | |
| if uploaded_file is None: | |
| st.session_state.extract = True | |
| def chatbot_ui(): | |
| user_input = st.text_input("You: ", "") | |
| if user_input: | |
| st.session_state.messages.append({"user": user_input}) | |
| if st.session_state.pdf: | |
| response = generate_response(st.session_state.pdf, user_input) | |
| else: | |
| response = "Please upload a PDF to get started." | |
| st.session_state.messages.append({"Assistant": response}) | |
| chat = st.button("Clear Chat") | |
| if chat: | |
| st.session_state.messages = [] | |
| for message in st.session_state.messages: | |
| if "user" in message: | |
| st.markdown(f"**You:** {message['user']}") | |
| else: | |
| st.markdown(f"**Assistant:** {message['Assistant']}") | |
| def generate_response(pdf, user_input): | |
| message = f"This is the text extracted from the pdf: {pdf}. The user query is {user_input}." | |
| llm = get_llm() | |
| try: | |
| response = llm.invoke(message).content | |
| except Exception as e: | |
| response = "Error occurred. This might be due to exhaustion of LLM quota or your PDF might be much bigger. The exact error: " + str(e) | |
| return response | |
| def main(): | |
| st.title("NCERT PDF Based AI Assistant") | |
| st.header("Upload a PDF") | |
| # Call upload_pdf() only when the file uploader is interacted with | |
| upload_pdf() | |
| st.header("Chatbot") | |
| chatbot_ui() | |
| if __name__ == "__main__": | |
| main() | |