Azmathussainthebo commited on
Commit
68c2078
·
verified ·
1 Parent(s): c32677f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +80 -12
app.py CHANGED
@@ -1,4 +1,44 @@
 
 
 
1
  import os
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import streamlit as st
3
  from PyPDF2 import PdfReader
4
  from langchain.text_splitter import CharacterTextSplitter
@@ -6,11 +46,20 @@ from langchain_cohere import CohereEmbeddings
6
  from langchain.vectorstores import FAISS
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
- from langchain_openai import ChatOpenAI
10
 
11
  # Load environment variables
12
- cohere_api_key = os.environ.get("COHERE_API_KEY")
13
- openai_api_key = os.environ.get("OPENAI_API_KEY")
 
 
 
 
 
 
 
 
 
14
 
15
  # Function to extract text from PDF files
16
  def get_pdf_text(pdf_docs):
@@ -40,20 +89,36 @@ def get_vectorstore(text_chunks):
40
 
41
  # Function to set up the conversational retrieval chain
42
  def get_conversation_chain(vectorstore):
43
- llm = ChatOpenAI(model="gpt-4", temperature=0.5, openai_api_key=openai_api_key)
44
- memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
45
- conversation_chain = ConversationalRetrievalChain.from_llm(
46
- llm=llm,
47
- retriever=vectorstore.as_retriever(),
48
- memory=memory
49
- )
50
- return conversation_chain
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
  # Handle user input
53
  def handle_userinput(user_question):
54
  if st.session_state.conversation is not None:
55
  response = st.session_state.conversation({'question': user_question})
56
  st.session_state.chat_history = response['chat_history']
 
57
  for i, message in enumerate(st.session_state.chat_history):
58
  if i % 2 == 0:
59
  st.write(f"*User:* {message.content}")
@@ -65,6 +130,7 @@ def handle_userinput(user_question):
65
  # Main function to run the Streamlit app
66
  def main():
67
  st.set_page_config(page_title="Chat with multiple PDFs", page_icon=":books:")
 
68
  if "conversation" not in st.session_state:
69
  st.session_state.conversation = None
70
  if "chat_history" not in st.session_state:
@@ -77,7 +143,9 @@ def main():
77
 
78
  with st.sidebar:
79
  st.subheader("Your documents")
80
- pdf_docs = st.file_uploader("Upload your PDFs here and click on 'Process'", accept_multiple_files=True)
 
 
81
  if st.button("Process"):
82
  with st.spinner("Processing..."):
83
  raw_text = get_pdf_text(pdf_docs)
 
1
+ pip install langchain_groq
2
+ GROQ_API_KEY=your_groq_api_key_here
3
+ from dotenv import load_dotenv
4
  import os
5
+
6
+ # Load environment variables
7
+ load_dotenv()
8
+
9
+ # Access the Groq API key
10
+ groq_api_key = os.getenv("GROQ_API_KEY")
11
+ from langchain_groq import ChatGroq
12
+ from langchain.memory import ConversationBufferMemory
13
+ from langchain.chains import ConversationalRetrievalChain
14
+
15
+ def get_conversation_chain(vectorstore):
16
+ try:
17
+ # Initialize Groq's Chat model
18
+ llm = ChatGroq(
19
+ model="llama2-70b-4096", # You can use other models like "mixtral-8x7b-32768"
20
+ temperature=0.5,
21
+ groq_api_key=groq_api_key
22
+ )
23
+
24
+ # Set up memory for the conversation
25
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
26
+
27
+ # Create the conversational retrieval chain
28
+ conversation_chain = ConversationalRetrievalChain.from_llm(
29
+ llm=llm,
30
+ retriever=vectorstore.as_retriever(),
31
+ memory=memory
32
+ )
33
+
34
+ logging.info("Conversation chain created successfully.")
35
+ return conversation_chain
36
+ except Exception as e:
37
+ logging.error(f"Error creating conversation chain: {e}")
38
+ st.error("An error occurred while setting up the conversation chain.")
39
+ import os
40
+ import logging
41
+ from dotenv import load_dotenv
42
  import streamlit as st
43
  from PyPDF2 import PdfReader
44
  from langchain.text_splitter import CharacterTextSplitter
 
46
  from langchain.vectorstores import FAISS
47
  from langchain.memory import ConversationBufferMemory
48
  from langchain.chains import ConversationalRetrievalChain
49
+ from langchain_groq import ChatGroq
50
 
51
  # Load environment variables
52
+ load_dotenv()
53
+
54
+ # Set up logging
55
+ logging.basicConfig(
56
+ level=logging.INFO,
57
+ format='%(asctime)s - %(levelname)s - %(message)s'
58
+ )
59
+
60
+ # Access API keys
61
+ cohere_api_key = os.getenv("COHERE_API_KEY")
62
+ groq_api_key = os.getenv("GROQ_API_KEY")
63
 
64
  # Function to extract text from PDF files
65
  def get_pdf_text(pdf_docs):
 
89
 
90
  # Function to set up the conversational retrieval chain
91
  def get_conversation_chain(vectorstore):
92
+ try:
93
+ # Initialize Groq's Chat model
94
+ llm = ChatGroq(
95
+ model="llama2-70b-4096", # You can use other models like "mixtral-8x7b-32768"
96
+ temperature=0.5,
97
+ groq_api_key=groq_api_key
98
+ )
99
+
100
+ # Set up memory for the conversation
101
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
102
+
103
+ # Create the conversational retrieval chain
104
+ conversation_chain = ConversationalRetrievalChain.from_llm(
105
+ llm=llm,
106
+ retriever=vectorstore.as_retriever(),
107
+ memory=memory
108
+ )
109
+
110
+ logging.info("Conversation chain created successfully.")
111
+ return conversation_chain
112
+ except Exception as e:
113
+ logging.error(f"Error creating conversation chain: {e}")
114
+ st.error("An error occurred while setting up the conversation chain.")
115
 
116
  # Handle user input
117
  def handle_userinput(user_question):
118
  if st.session_state.conversation is not None:
119
  response = st.session_state.conversation({'question': user_question})
120
  st.session_state.chat_history = response['chat_history']
121
+
122
  for i, message in enumerate(st.session_state.chat_history):
123
  if i % 2 == 0:
124
  st.write(f"*User:* {message.content}")
 
130
  # Main function to run the Streamlit app
131
  def main():
132
  st.set_page_config(page_title="Chat with multiple PDFs", page_icon=":books:")
133
+
134
  if "conversation" not in st.session_state:
135
  st.session_state.conversation = None
136
  if "chat_history" not in st.session_state:
 
143
 
144
  with st.sidebar:
145
  st.subheader("Your documents")
146
+ pdf_docs = st.file_uploader(
147
+ "Upload your PDFs here and click on 'Process'", accept_multiple_files=True
148
+ )
149
  if st.button("Process"):
150
  with st.spinner("Processing..."):
151
  raw_text = get_pdf_text(pdf_docs)