Surat96 commited on
Commit
8e9090e
·
verified ·
1 Parent(s): c6d1798

Delete app1.py

Browse files
Files changed (1) hide show
  1. app1.py +0 -107
app1.py DELETED
@@ -1,107 +0,0 @@
1
- import streamlit as st
2
- from PyPDF2 import PdfReader
3
- from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
4
- import os, getpass
5
- from langchain_google_genai import GoogleGenerativeAIEmbeddings
6
- import google.generativeai as genai
7
- from langchain.vectorstores import FAISS
8
- from langchain_google_genai import ChatGoogleGenerativeAI
9
- from langchain.chains.question_answering import load_qa_chain
10
- from langchain.prompts import PromptTemplate
11
- from langchain.chains import ConversationalRetrievalChain
12
- from langchain.memory import ConversationBufferMemory
13
-
14
-
15
- #Gemini Key
16
- genai.configure(api_key=os.environ['GOOGLE_API_KEY'])
17
-
18
-
19
- def get_pdf_text(pdf_docs):
20
- text=""
21
- for pdf in pdf_docs:
22
- pdf_reader= PdfReader(pdf)
23
- for page in pdf_reader.pages:
24
- text+= page.extract_text()
25
- return text
26
-
27
-
28
- def get_text_chunks(text):
29
- #RecursiveCharacterTextSplitter CharacterTextSplitter separator="\n",
30
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=1000, length_function=len)#
31
- chunks = text_splitter.split_text(text)
32
- return chunks
33
-
34
-
35
- def get_vector_store(text_chunks):
36
- embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
37
- vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
38
- return vector_store
39
-
40
-
41
- def get_conversational_chain(Fvs):
42
-
43
- prompt_template = """
44
- Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
45
- provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
46
- Context:\n {context}?\n
47
- Question: \n{question}\n
48
-
49
- Answer:
50
- """
51
-
52
- model = ChatGoogleGenerativeAI(model="gemini-1.5-pro",temperature=0.3)
53
- prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"])
54
- memory = ConversationBufferMemory(memory_key = "chat_history", return_messages=True)
55
- chain = ConversationalRetrievalChain.from_llm(llm=model,retriever=Fvs.as_retriever(), memory=memory)
56
-
57
- return chain
58
-
59
-
60
-
61
- def user_input(user_question):
62
- response = st.session_state.conversation({'question': user_question})
63
- st.session_state.chatHistory = response['chat_history']
64
- for i, message in enumerate(st.session_state.chatHistory):
65
- if i%2 == 0:
66
- st.write("Human: ", message.content)
67
- else:
68
- st.write("Bot: ", message.content)
69
-
70
- ## streamlit app
71
- st.set_page_config("Chat With Multiple PDF")
72
- st.header("Chat with Multiple PDF :books:")
73
-
74
- user_question = st.text_input("Ask a Question from the PDF Files")
75
- submit=st.button("Ask the question")
76
-
77
- ## If ask button is clicked
78
- if submit:
79
- if "conversation" not in st.session_state:
80
- st.session_state.conversation = None
81
- if "chatHistory" not in st.session_state:
82
- st.session_state.chatHistory = None
83
- if user_question:
84
- user_input(user_question)
85
-
86
- with st.sidebar:
87
- st.title("Menu:")
88
- pdf_docs = st.file_uploader("Upload your PDF Files", accept_multiple_files=True)
89
- if st.button("Submit & Process"):
90
- with st.spinner("Processing..."):
91
- raw_text = get_pdf_text(pdf_docs)
92
- text_chunks = get_text_chunks(raw_text)
93
- Fvs = get_vector_store(text_chunks)
94
- st.session_state.conversation = get_conversational_chain(Fvs)
95
- st.success("Done")
96
-
97
- if st.button("Clear Chat Window", use_container_width=True, type="primary"):
98
- st.session_state.history = []
99
- st.rerun()
100
-
101
- footer = """
102
- ---
103
- #### Made By [Surat Banerjee](https://www.linkedin.com/in/surat-banerjee/)
104
- For Any Queries, Reach out on [Portfolio](https://suratbanerjee.wixsite.com/myportfoliods)
105
- """
106
-
107
- st.markdown(footer, unsafe_allow_html=True)