Add chat_template for eval compatibility
Browse files- tokenizer_config.json +3 -2
tokenizer_config.json
CHANGED
|
@@ -219,5 +219,6 @@
|
|
| 219 |
"model_max_length": 1000000000000000019884624838656,
|
| 220 |
"pad_token": null,
|
| 221 |
"tokenizer_class": "GPTNeoXTokenizer",
|
| 222 |
-
"unk_token": "<|endoftext|>"
|
| 223 |
-
}
|
|
|
|
|
|
| 219 |
"model_max_length": 1000000000000000019884624838656,
|
| 220 |
"pad_token": null,
|
| 221 |
"tokenizer_class": "GPTNeoXTokenizer",
|
| 222 |
+
"unk_token": "<|endoftext|>",
|
| 223 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if message[\"role\"] == \"system\" %}{{ \"<|system|>\n\" + message[\"content\"] + \"\n\" }}{% elif message[\"role\"] == \"user\" %}{{ \"<|user|>\n\" + message[\"content\"] + \"\n\" }}{% elif message[\"role\"] == \"assistant\" %}{% if not loop.last %}{{ \"<|assistant|>\n\" + message[\"content\"] + eos_token + \"\n\" }}{% else %}{{ \"<|assistant|>\n\" + message[\"content\"] + eos_token }}{% endif %}{% endif %}{% if loop.last and add_generation_prompt %}{{ \"<|assistant|>\n\" }}{% endif %}{% endfor %}"
|
| 224 |
+
}
|