| { | |
| "tokenizer_class": "PreTrainedTokenizerFast", | |
| "bos_token": "<BOS>", | |
| "eos_token": "<EOS>", | |
| "unk_token": "<unk>", | |
| "pad_token": "<pad>", | |
| "additional_special_tokens": [ | |
| "<SYSTEM>", | |
| "</SYSTEM>", | |
| "<USER>", | |
| "</USER>", | |
| "<ASSISTANT>", | |
| "</ASSISTANT>" | |
| ], | |
| "clean_up_tokenization_spaces": true, | |
| "model_max_length": 2048, | |
| "tokenizer_file": "tokenizer_v2.json" | |
| } | |