{ "add_bos_token": false, "add_prefix_space": false, "added_tokens_decoder": { "0": { "content": "", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "4": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "5": { "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "100000": { "content": "<|startofpoem|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "100001": { "content": "<|endofpoem|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "100002": { "content": "<|topic|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "100003": { "content": "<|continue|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "<|startofpoem|>", "<|endofpoem|>", "<|topic|>", "<|continue|>" ], "bos_token": "", "clean_up_tokenization_spaces": true, "eos_token": "", "errors": "replace", "extra_special_tokens": {}, "mask_token": "", "model_max_length": 1000000000000000019884624838656, "pad_token": "", "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|endoftext|>" }