abcorrea commited on
Commit
67001d7
·
verified ·
1 Parent(s): 2760bb7

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +2 -14
  2. tokenizer_config.json +2 -2
special_tokens_map.json CHANGED
@@ -14,18 +14,6 @@
14
  "<|image_pad|>",
15
  "<|video_pad|>"
16
  ],
17
- "eos_token": {
18
- "content": "<|im_end|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
  }
 
14
  "<|image_pad|>",
15
  "<|video_pad|>"
16
  ],
17
+ "eos_token": "<|im_end|>",
18
+ "pad_token": "<|fim_pad|>"
 
 
 
 
 
 
 
 
 
 
 
 
19
  }
tokenizer_config.json CHANGED
@@ -231,8 +231,8 @@
231
  "eos_token": "<|im_end|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
- "model_max_length": 131072,
235
- "pad_token": "<|endoftext|>",
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null
 
231
  "eos_token": "<|im_end|>",
232
  "errors": "replace",
233
  "extra_special_tokens": {},
234
+ "model_max_length": 32768,
235
+ "pad_token": "<|fim_pad|>",
236
  "split_special_tokens": false,
237
  "tokenizer_class": "Qwen2Tokenizer",
238
  "unk_token": null