currentfear commited on
Commit
f071fed
·
verified ·
1 Parent(s): 52b2268

Training done

Browse files
preprocessor_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "processor_class": "DonutProcessor",
20
  "resample": 2,
21
  "rescale_factor": 0.00392156862745098,
22
- "size": {
23
- "height": 1280,
24
- "width": 960
25
- }
26
  }
 
19
  "processor_class": "DonutProcessor",
20
  "resample": 2,
21
  "rescale_factor": 0.00392156862745098,
22
+ "size": [
23
+ 960,
24
+ 1280
25
+ ]
26
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -163,18 +163,11 @@
163
  "eos_token": "</s>",
164
  "extra_special_tokens": {},
165
  "mask_token": "<mask>",
166
- "max_length": 768,
167
  "model_max_length": 1000000000000000019884624838656,
168
- "pad_to_multiple_of": null,
169
  "pad_token": "<pad>",
170
- "pad_token_type_id": 0,
171
- "padding_side": "right",
172
  "processor_class": "DonutProcessor",
173
  "sep_token": "</s>",
174
  "sp_model_kwargs": {},
175
- "stride": 0,
176
  "tokenizer_class": "XLMRobertaTokenizer",
177
- "truncation_side": "right",
178
- "truncation_strategy": "longest_first",
179
  "unk_token": "<unk>"
180
  }
 
163
  "eos_token": "</s>",
164
  "extra_special_tokens": {},
165
  "mask_token": "<mask>",
 
166
  "model_max_length": 1000000000000000019884624838656,
 
167
  "pad_token": "<pad>",
 
 
168
  "processor_class": "DonutProcessor",
169
  "sep_token": "</s>",
170
  "sp_model_kwargs": {},
 
171
  "tokenizer_class": "XLMRobertaTokenizer",
 
 
172
  "unk_token": "<unk>"
173
  }