Chiquitin commited on
Commit
a6416c8
·
1 Parent(s): 9040523

fix las version of sentence segmenter

Browse files
Files changed (1) hide show
  1. src/tokenizer.py +4 -2
src/tokenizer.py CHANGED
@@ -190,13 +190,14 @@ class SentenceSegmenter:
190
 
191
  for article in texts:
192
  doc = self.nlp(article)
193
- for idx, sent in enumerate(doc.sents):
194
 
 
195
  if idx == 0:
196
  # Article opener
197
  sentence_candidates.append(1)
198
  sentence_boundaries.append(1)
199
- elif sent.text.endswith("\n"):
200
  # Paragraph break candidate
201
  sentence_candidates.append(1)
202
  sentence_boundaries.append(0)
@@ -204,6 +205,7 @@ class SentenceSegmenter:
204
  sentence_candidates.append(0)
205
  sentence_boundaries.append(0)
206
 
 
207
  sentences.append(sent.text.replace('\n', '').strip())
208
  sentence_masking.append(1)
209
 
 
190
 
191
  for article in texts:
192
  doc = self.nlp(article)
193
+ last_was_jump = False
194
 
195
+ for idx, sent in enumerate(doc.sents):
196
  if idx == 0:
197
  # Article opener
198
  sentence_candidates.append(1)
199
  sentence_boundaries.append(1)
200
+ elif last_was_jump:
201
  # Paragraph break candidate
202
  sentence_candidates.append(1)
203
  sentence_boundaries.append(0)
 
205
  sentence_candidates.append(0)
206
  sentence_boundaries.append(0)
207
 
208
+ last_was_jump = sent.text.endswith("\n")
209
  sentences.append(sent.text.replace('\n', '').strip())
210
  sentence_masking.append(1)
211