Upload tokenizer
Browse files- tokenizer.json +1 -0
- tokenizer_config.json +0 -1
tokenizer.json
CHANGED
|
@@ -175,6 +175,7 @@
|
|
| 175 |
"end_of_word_suffix": null,
|
| 176 |
"fuse_unk": true,
|
| 177 |
"byte_fallback": true,
|
|
|
|
| 178 |
"vocab": {
|
| 179 |
"<unk>": 0,
|
| 180 |
"<s>": 1,
|
|
|
|
| 175 |
"end_of_word_suffix": null,
|
| 176 |
"fuse_unk": true,
|
| 177 |
"byte_fallback": true,
|
| 178 |
+
"ignore_merges": false,
|
| 179 |
"vocab": {
|
| 180 |
"<unk>": 0,
|
| 181 |
"<s>": 1,
|
tokenizer_config.json
CHANGED
|
@@ -36,7 +36,6 @@
|
|
| 36 |
"chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
|
| 37 |
"clean_up_tokenization_spaces": false,
|
| 38 |
"eos_token": "</s>",
|
| 39 |
-
"legacy": true,
|
| 40 |
"model_max_length": 1000000000000000019884624838656,
|
| 41 |
"pad_token": "</s>",
|
| 42 |
"sp_model_kwargs": {},
|
|
|
|
| 36 |
"chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
|
| 37 |
"clean_up_tokenization_spaces": false,
|
| 38 |
"eos_token": "</s>",
|
|
|
|
| 39 |
"model_max_length": 1000000000000000019884624838656,
|
| 40 |
"pad_token": "</s>",
|
| 41 |
"sp_model_kwargs": {},
|