TheS3b commited on
Commit
905f6a0
·
verified ·
1 Parent(s): 8be2afd

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +2 -2
  2. tokenizer_config.json +4 -0
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
- size 11422654
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:352a863cd2761388ccc58f1432467ba6a1037bf12df9069889b142fa246471f6
3
+ size 11422752
tokenizer_config.json CHANGED
@@ -231,9 +231,13 @@
231
  "eos_token": "<|endoftext|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
 
234
  "model_max_length": 131072,
235
  "pad_token": "<|endoftext|>",
236
  "split_special_tokens": false,
 
237
  "tokenizer_class": "Qwen2Tokenizer",
 
 
238
  "unk_token": null
239
  }
 
231
  "eos_token": "<|endoftext|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
+ "max_length": 512,
235
  "model_max_length": 131072,
236
  "pad_token": "<|endoftext|>",
237
  "split_special_tokens": false,
238
+ "stride": 0,
239
  "tokenizer_class": "Qwen2Tokenizer",
240
+ "truncation_side": "right",
241
+ "truncation_strategy": "longest_first",
242
  "unk_token": null
243
  }