Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ from langchain_community.vectorstores import FAISS
|
|
| 6 |
from langchain.memory import ConversationBufferMemory
|
| 7 |
from langchain.chains import ConversationalRetrievalChain
|
| 8 |
from htmlTemplates import css, bot_template, user_template
|
| 9 |
-
from
|
| 10 |
import os
|
| 11 |
from langchain_huggingface import HuggingFaceEmbeddings
|
| 12 |
|
|
@@ -39,20 +39,27 @@ def get_vectorstore(text_chunks):
|
|
| 39 |
|
| 40 |
|
| 41 |
def get_conversation_chain(vectorstore):
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
model_kwargs={"temperature": 0.5, "max_length": 512}
|
| 46 |
)
|
|
|
|
| 47 |
memory = ConversationBufferMemory(
|
| 48 |
-
memory_key='chat_history', return_messages=True
|
|
|
|
|
|
|
| 49 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|
| 50 |
llm=llm,
|
| 51 |
retriever=vectorstore.as_retriever(),
|
| 52 |
memory=memory
|
| 53 |
)
|
| 54 |
-
return conversation_chain
|
| 55 |
|
|
|
|
| 56 |
|
| 57 |
def handle_userinput(user_question):
|
| 58 |
if st.session_state.conversation is None:
|
|
|
|
| 6 |
from langchain.memory import ConversationBufferMemory
|
| 7 |
from langchain.chains import ConversationalRetrievalChain
|
| 8 |
from htmlTemplates import css, bot_template, user_template
|
| 9 |
+
from langchain_community.llms import HuggingFaceEndpoint
|
| 10 |
import os
|
| 11 |
from langchain_huggingface import HuggingFaceEmbeddings
|
| 12 |
|
|
|
|
| 39 |
|
| 40 |
|
| 41 |
def get_conversation_chain(vectorstore):
|
| 42 |
+
|
| 43 |
+
os.environ["HUGGINGFACEHUB_API_TOKEN"] = os.getenv("HUGGINGFACEHUB_API_TOKEN", "")
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
llm = HuggingFaceEndpoint(
|
| 47 |
+
repo_id="google/flan-t5-base",
|
| 48 |
+
huggingfacehub_api_token=os.environ["HUGGINGFACEHUB_API_TOKEN"],
|
| 49 |
model_kwargs={"temperature": 0.5, "max_length": 512}
|
| 50 |
)
|
| 51 |
+
|
| 52 |
memory = ConversationBufferMemory(
|
| 53 |
+
memory_key='chat_history', return_messages=True
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|
| 57 |
llm=llm,
|
| 58 |
retriever=vectorstore.as_retriever(),
|
| 59 |
memory=memory
|
| 60 |
)
|
|
|
|
| 61 |
|
| 62 |
+
return conversation_chain
|
| 63 |
|
| 64 |
def handle_userinput(user_question):
|
| 65 |
if st.session_state.conversation is None:
|