{ "added_tokens_decoder": { "0": { "content": "<|pad|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "<|bos|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "<|eos|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "3": { "content": "<|user|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "4": { "content": "<|assistant|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "5": { "content": "<|sep|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "6": { "content": "<|unk|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "<|user|>", "<|assistant|>", "<|sep|>" ], "bos_token": "<|bos|>", "clean_up_tokenization_spaces": false, "eos_token": "<|eos|>", "extra_special_tokens": {}, "model_max_length": 1000000000000000019884624838656, "pad_token": "<|pad|>", "tokenizer_class": "PreTrainedTokenizerFast" }