Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,7 +13,9 @@
|
|
| 13 |
# Could not import tiktoken python package. This is needed in order to for OpenAIEmbeddings. Please install it with `pip install tiktoken`.
|
| 14 |
# run the app using the following command in anaconda VS Code terminal
|
| 15 |
# streamlit run app.py
|
| 16 |
-
|
|
|
|
|
|
|
| 17 |
|
| 18 |
import streamlit as st
|
| 19 |
from dotenv import load_dotenv
|
|
@@ -28,6 +30,14 @@ from langchain.memory import ConversationBufferMemory
|
|
| 28 |
from langchain.chains import ConversationalRetrievalChain
|
| 29 |
from htmlTemplates import css, bot_template, user_template
|
| 30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
def get_pdf_text(pdf_docs):
|
| 32 |
text =""
|
| 33 |
for pdf in pdf_docs:
|
|
@@ -132,17 +142,18 @@ def main():
|
|
| 132 |
# Chose to use the best embedding model - intructor_xl ranked higher than OpenAi's embeddings from huggingface leaderboard
|
| 133 |
# https://huggingface.co/spaces/mteb/leaderboard
|
| 134 |
|
|
|
|
| 135 |
vectorstore = get_vectorstore(text_chunks)
|
|
|
|
| 136 |
|
|
|
|
| 137 |
# create conversation chain
|
| 138 |
st.session_state.conversation = get_conversation_chain(vectorstore)
|
| 139 |
#conversation = get_conversation_chain(vectorstore)
|
| 140 |
-
|
|
|
|
| 141 |
#st.session_state.conversation
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
|
| 147 |
if __name__ == '__main__':
|
| 148 |
main()
|
|
|
|
| 13 |
# Could not import tiktoken python package. This is needed in order to for OpenAIEmbeddings. Please install it with `pip install tiktoken`.
|
| 14 |
# run the app using the following command in anaconda VS Code terminal
|
| 15 |
# streamlit run app.py
|
| 16 |
+
import os
|
| 17 |
+
import time
|
| 18 |
+
from loguru import logger
|
| 19 |
|
| 20 |
import streamlit as st
|
| 21 |
from dotenv import load_dotenv
|
|
|
|
| 30 |
from langchain.chains import ConversationalRetrievalChain
|
| 31 |
from htmlTemplates import css, bot_template, user_template
|
| 32 |
|
| 33 |
+
os.environ["TZ"] = "Asia/Shanghai"
|
| 34 |
+
try:
|
| 35 |
+
time.tzset()
|
| 36 |
+
except Exception:
|
| 37 |
+
... # Windows
|
| 38 |
+
logger.warning("Windows, cant set time.tzset()")
|
| 39 |
+
|
| 40 |
+
|
| 41 |
def get_pdf_text(pdf_docs):
|
| 42 |
text =""
|
| 43 |
for pdf in pdf_docs:
|
|
|
|
| 142 |
# Chose to use the best embedding model - intructor_xl ranked higher than OpenAi's embeddings from huggingface leaderboard
|
| 143 |
# https://huggingface.co/spaces/mteb/leaderboard
|
| 144 |
|
| 145 |
+
logger.info("Start get_vectorstore")
|
| 146 |
vectorstore = get_vectorstore(text_chunks)
|
| 147 |
+
logger.info("Done get_vectorstore")
|
| 148 |
|
| 149 |
+
logger.info("Start create conversation chain")
|
| 150 |
# create conversation chain
|
| 151 |
st.session_state.conversation = get_conversation_chain(vectorstore)
|
| 152 |
#conversation = get_conversation_chain(vectorstore)
|
| 153 |
+
logger.info("Done create conversation chain")
|
| 154 |
+
|
| 155 |
#st.session_state.conversation
|
| 156 |
+
|
|
|
|
|
|
|
|
|
|
| 157 |
|
| 158 |
if __name__ == '__main__':
|
| 159 |
main()
|