Spaces:
Runtime error
Runtime error
issue fixed
Browse files
app.py
CHANGED
|
@@ -15,7 +15,6 @@ from utils import get_qa_chain
|
|
| 15 |
load_dotenv()
|
| 16 |
|
| 17 |
openai.api_key = os.environ['OPENAI_API_KEY']
|
| 18 |
-
Path('docs').mkdir(parents=True, exist_ok=True)
|
| 19 |
|
| 20 |
if 'messages' not in st.session_state:
|
| 21 |
st.session_state.messages = []
|
|
@@ -38,7 +37,7 @@ def set_status():
|
|
| 38 |
def process_uploaded_file(uploaded_file):
|
| 39 |
if 'context' not in st.session_state:
|
| 40 |
logger.info(f'file uploaded {uploaded_file}')
|
| 41 |
-
upath = f'
|
| 42 |
logger.info(f'file saved to {upath}')
|
| 43 |
|
| 44 |
with open(upath, 'wb') as hndl:
|
|
|
|
| 15 |
load_dotenv()
|
| 16 |
|
| 17 |
openai.api_key = os.environ['OPENAI_API_KEY']
|
|
|
|
| 18 |
|
| 19 |
if 'messages' not in st.session_state:
|
| 20 |
st.session_state.messages = []
|
|
|
|
| 37 |
def process_uploaded_file(uploaded_file):
|
| 38 |
if 'context' not in st.session_state:
|
| 39 |
logger.info(f'file uploaded {uploaded_file}')
|
| 40 |
+
upath = f'docs/{uploaded_file.name}'
|
| 41 |
logger.info(f'file saved to {upath}')
|
| 42 |
|
| 43 |
with open(upath, 'wb') as hndl:
|
config.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
| 1 |
from pathlib import Path
|
| 2 |
-
|
| 3 |
|
| 4 |
class Config:
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from pathlib import Path
|
| 2 |
+
|
| 3 |
|
| 4 |
class Config:
|
| 5 |
+
chunk_size = 1000
|
| 6 |
+
chunk_overlap = 100
|
| 7 |
+
vectorstore_dir = 'docs'
|
| 8 |
+
vectorstore_path = f'{vectorstore_dir}/vectorstore.pkl'
|
| 9 |
+
chatgpt_model_name = 'gpt-3.5-turbo'
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
Path(Config.vectorstore_dir).unlink(missing_ok=True)
|
utils.py
CHANGED
|
@@ -43,8 +43,8 @@ def create_vectordb(file_path):
|
|
| 43 |
|
| 44 |
print('Splitting text...')
|
| 45 |
text_splitter = RecursiveCharacterTextSplitter(
|
| 46 |
-
chunk_size=
|
| 47 |
-
chunk_overlap=
|
| 48 |
length_function=len,
|
| 49 |
)
|
| 50 |
documents = text_splitter.split_documents(raw_documents)
|
|
|
|
| 43 |
|
| 44 |
print('Splitting text...')
|
| 45 |
text_splitter = RecursiveCharacterTextSplitter(
|
| 46 |
+
chunk_size=Config.chunk_size,
|
| 47 |
+
chunk_overlap=Config.chunk_overlap,
|
| 48 |
length_function=len,
|
| 49 |
)
|
| 50 |
documents = text_splitter.split_documents(raw_documents)
|