Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,11 +4,11 @@ from dotenv import load_dotenv
|
|
| 4 |
import streamlit as st
|
| 5 |
from PyPDF2 import PdfReader
|
| 6 |
from langchain.text_splitter import CharacterTextSplitter
|
| 7 |
-
from
|
| 8 |
-
from
|
| 9 |
from langchain.memory import ConversationBufferMemory
|
| 10 |
from langchain.chains import ConversationalRetrievalChain
|
| 11 |
-
from
|
| 12 |
|
| 13 |
# Load environment variables
|
| 14 |
load_dotenv()
|
|
@@ -46,24 +46,24 @@ def get_vectorstore(text_chunks):
|
|
| 46 |
logging.info("Vectorstore created successfully.")
|
| 47 |
return vectorstore
|
| 48 |
except Exception as e:
|
| 49 |
-
logging.error(f"Error creating vectorstore: {e}")
|
| 50 |
-
st.error("An error occurred while creating the vectorstore
|
| 51 |
return None
|
| 52 |
|
| 53 |
def get_conversation_chain(vectorstore):
|
| 54 |
"""Set up the conversational retrieval chain."""
|
| 55 |
try:
|
| 56 |
-
|
| 57 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|
| 58 |
-
llm=
|
| 59 |
retriever=vectorstore.as_retriever(),
|
| 60 |
memory=ConversationBufferMemory(memory_key='chat_history', return_messages=True)
|
| 61 |
)
|
| 62 |
logging.info("Conversation chain created successfully.")
|
| 63 |
return conversation_chain
|
| 64 |
except Exception as e:
|
| 65 |
-
logging.error(f"Error creating conversation chain: {e}")
|
| 66 |
-
st.error("An error occurred while setting up the conversation chain
|
| 67 |
return None
|
| 68 |
|
| 69 |
def handle_userinput(user_question):
|
|
|
|
| 4 |
import streamlit as st
|
| 5 |
from PyPDF2 import PdfReader
|
| 6 |
from langchain.text_splitter import CharacterTextSplitter
|
| 7 |
+
from langchain_community.vectorstores import FAISS
|
| 8 |
+
from langchain_community.embeddings import SentenceTransformerEmbeddings
|
| 9 |
from langchain.memory import ConversationBufferMemory
|
| 10 |
from langchain.chains import ConversationalRetrievalChain
|
| 11 |
+
from langchain.llms import OpenAI
|
| 12 |
|
| 13 |
# Load environment variables
|
| 14 |
load_dotenv()
|
|
|
|
| 46 |
logging.info("Vectorstore created successfully.")
|
| 47 |
return vectorstore
|
| 48 |
except Exception as e:
|
| 49 |
+
logging.error(f"Error creating vectorstore: {e}", exc_info=True)
|
| 50 |
+
st.error(f"An error occurred while creating the vectorstore: {e}")
|
| 51 |
return None
|
| 52 |
|
| 53 |
def get_conversation_chain(vectorstore):
|
| 54 |
"""Set up the conversational retrieval chain."""
|
| 55 |
try:
|
| 56 |
+
llm = OpenAI(model_name="text-davinci-003", temperature=0.5)
|
| 57 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|
| 58 |
+
llm=llm,
|
| 59 |
retriever=vectorstore.as_retriever(),
|
| 60 |
memory=ConversationBufferMemory(memory_key='chat_history', return_messages=True)
|
| 61 |
)
|
| 62 |
logging.info("Conversation chain created successfully.")
|
| 63 |
return conversation_chain
|
| 64 |
except Exception as e:
|
| 65 |
+
logging.error(f"Error creating conversation chain: {e}", exc_info=True)
|
| 66 |
+
st.error(f"An error occurred while setting up the conversation chain: {e}")
|
| 67 |
return None
|
| 68 |
|
| 69 |
def handle_userinput(user_question):
|