Upload tokenizer
Browse files- tokenizer.json +10 -1
- tokenizer_config.json +4 -0
tokenizer.json
CHANGED
|
@@ -1,7 +1,16 @@
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
"truncation": null,
|
| 4 |
-
"padding":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
"added_tokens": [
|
| 6 |
{
|
| 7 |
"id": 151643,
|
|
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
"truncation": null,
|
| 4 |
+
"padding": {
|
| 5 |
+
"strategy": {
|
| 6 |
+
"Fixed": 2986
|
| 7 |
+
},
|
| 8 |
+
"direction": "Left",
|
| 9 |
+
"pad_to_multiple_of": null,
|
| 10 |
+
"pad_id": 151644,
|
| 11 |
+
"pad_type_id": 0,
|
| 12 |
+
"pad_token": "<|im_start|>"
|
| 13 |
+
},
|
| 14 |
"added_tokens": [
|
| 15 |
{
|
| 16 |
"id": 151643,
|
tokenizer_config.json
CHANGED
|
@@ -35,8 +35,12 @@
|
|
| 35 |
"clean_up_tokenization_spaces": false,
|
| 36 |
"eos_token": "<|endoftext|>",
|
| 37 |
"errors": "replace",
|
|
|
|
| 38 |
"model_max_length": 32768,
|
|
|
|
| 39 |
"pad_token": "<|im_start|>",
|
|
|
|
|
|
|
| 40 |
"split_special_tokens": false,
|
| 41 |
"tokenizer_class": "Qwen2Tokenizer",
|
| 42 |
"unk_token": null
|
|
|
|
| 35 |
"clean_up_tokenization_spaces": false,
|
| 36 |
"eos_token": "<|endoftext|>",
|
| 37 |
"errors": "replace",
|
| 38 |
+
"max_length": 2986,
|
| 39 |
"model_max_length": 32768,
|
| 40 |
+
"pad_to_multiple_of": null,
|
| 41 |
"pad_token": "<|im_start|>",
|
| 42 |
+
"pad_token_type_id": 0,
|
| 43 |
+
"padding_side": "left",
|
| 44 |
"split_special_tokens": false,
|
| 45 |
"tokenizer_class": "Qwen2Tokenizer",
|
| 46 |
"unk_token": null
|