MrPotato commited on
Commit
b670385
·
1 Parent(s): 4193f56

batched tokenization

Browse files
Files changed (1) hide show
  1. docbank.py +17 -17
docbank.py CHANGED
@@ -277,23 +277,23 @@ class Docbank(datasets.GeneratorBasedBuilder):
277
  # )
278
  #print(processed)
279
 
280
- for chunk_id, index in enumerate(range(0, len(tokens), self.CHUNK_SIZE)):
281
- split_tokens = tokens[index:index + self.CHUNK_SIZE]
282
- split_bboxes = bboxes[index:index + self.CHUNK_SIZE]
283
- # split_rgbs = rgbs[index:index + self.CHUNK_SIZE]
284
- # split_fonts = fonts[index:index + self.CHUNK_SIZE]
285
- split_labels = labels[index:index + self.CHUNK_SIZE]
286
 
287
  #tokenized = self.TOKENIZER(processed['words'], boxes=processed['boxes'])
288
 
289
- yield key, {
290
- "id": f"{f_id}_{chunk_id}",
291
- 'words': split_tokens,
292
- "bbox": split_bboxes,
293
- # "RGBs": split_rgbs,
294
- # "fonts": split_fonts,
295
- #"image": image,
296
- "original_image": original_image,
297
- "labels": split_labels
298
- }
299
- key += 1
 
277
  # )
278
  #print(processed)
279
 
280
+ # for chunk_id, index in enumerate(range(0, len(tokens), self.CHUNK_SIZE)):
281
+ # split_tokens = tokens[index:index + self.CHUNK_SIZE]
282
+ # split_bboxes = bboxes[index:index + self.CHUNK_SIZE]
283
+ # # split_rgbs = rgbs[index:index + self.CHUNK_SIZE]
284
+ # # split_fonts = fonts[index:index + self.CHUNK_SIZE]
285
+ # split_labels = labels[index:index + self.CHUNK_SIZE]
286
 
287
  #tokenized = self.TOKENIZER(processed['words'], boxes=processed['boxes'])
288
 
289
+ yield key, {
290
+ "id": f"file_{f_id}",
291
+ 'words': tokens,
292
+ "bbox": bboxes,
293
+ # "RGBs": split_rgbs,
294
+ # "fonts": split_fonts,
295
+ #"image": image,
296
+ "original_image": original_image,
297
+ "labels": labels
298
+ }
299
+ key += 1