MrPotato commited on
Commit
1dd7648
·
1 Parent(s): 8b9c46e
Files changed (1) hide show
  1. docbank.py +3 -3
docbank.py CHANGED
@@ -211,7 +211,7 @@ class Docbank(datasets.GeneratorBasedBuilder):
211
  # print(filepath)
212
  key = 0
213
  for f in filepath:
214
- #print(f)
215
  f_id = f['id']
216
  f_fp_txt = f['filepath_txt']
217
  f_fp_img = f['filepath_img']
@@ -252,7 +252,7 @@ class Docbank(datasets.GeneratorBasedBuilder):
252
  except:
253
  continue
254
 
255
- #print('Processing...')
256
  processed = self.TOKENIZER(
257
  tokens,
258
  boxes=bboxes,
@@ -261,7 +261,7 @@ class Docbank(datasets.GeneratorBasedBuilder):
261
  return_offsets_mapping=False,
262
  return_attention_mask=False,
263
  )
264
- #print(processed)
265
 
266
  for chunk_id, index in enumerate(range(0, len(processed['input_ids']), self.CHUNK_SIZE)):
267
  split_tokens = processed['input_ids'][index:index + self.CHUNK_SIZE]
 
211
  # print(filepath)
212
  key = 0
213
  for f in filepath:
214
+ print(f)
215
  f_id = f['id']
216
  f_fp_txt = f['filepath_txt']
217
  f_fp_img = f['filepath_img']
 
252
  except:
253
  continue
254
 
255
+ print('Processing...')
256
  processed = self.TOKENIZER(
257
  tokens,
258
  boxes=bboxes,
 
261
  return_offsets_mapping=False,
262
  return_attention_mask=False,
263
  )
264
+ print(processed)
265
 
266
  for chunk_id, index in enumerate(range(0, len(processed['input_ids']), self.CHUNK_SIZE)):
267
  split_tokens = processed['input_ids'][index:index + self.CHUNK_SIZE]