cochi1706 commited on
Commit
bb13fba
·
verified ·
1 Parent(s): 807943c

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +2 -2
  2. tokenizer_config.json +7 -0
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:574de68a0f63f2004784a421c7d42c2b2786c05cb38542d2ed3525757a1f7fde
3
- size 11422932
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json CHANGED
@@ -231,9 +231,16 @@
231
  "eos_token": "<|im_end|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
 
234
  "model_max_length": 131072,
 
235
  "pad_token": "<|endoftext|>",
 
 
236
  "split_special_tokens": false,
 
237
  "tokenizer_class": "Qwen2Tokenizer",
 
 
238
  "unk_token": null
239
  }
 
231
  "eos_token": "<|im_end|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
+ "max_length": 512,
235
  "model_max_length": 131072,
236
+ "pad_to_multiple_of": null,
237
  "pad_token": "<|endoftext|>",
238
+ "pad_token_type_id": 0,
239
+ "padding_side": "right",
240
  "split_special_tokens": false,
241
+ "stride": 0,
242
  "tokenizer_class": "Qwen2Tokenizer",
243
+ "truncation_side": "right",
244
+ "truncation_strategy": "longest_first",
245
  "unk_token": null
246
  }