Spaces:
Sleeping
Sleeping
Davide Panza
commited on
Update app/backend/text_processing.py
Browse files
app/backend/text_processing.py
CHANGED
|
@@ -1,11 +1,9 @@
|
|
| 1 |
from nltk.tokenize import sent_tokenize
|
| 2 |
import nltk
|
| 3 |
import streamlit as st
|
|
|
|
| 4 |
|
| 5 |
-
|
| 6 |
-
nltk.data.find("tokenizers/punkt")
|
| 7 |
-
except LookupError:
|
| 8 |
-
nltk.download("punkt")
|
| 9 |
|
| 10 |
def text_chunking(text, max_words=750, min_words=400, overlap_sentences=5):
|
| 11 |
"""
|
|
|
|
| 1 |
from nltk.tokenize import sent_tokenize
|
| 2 |
import nltk
|
| 3 |
import streamlit as st
|
| 4 |
+
import os
|
| 5 |
|
| 6 |
+
nltk.data.path.append(os.path.join(os.path.dirname(__file__), "..", "nltk_data"))
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
def text_chunking(text, max_words=750, min_words=400, overlap_sentences=5):
|
| 9 |
"""
|