{ "add_prefix_space": false, "additional_special_tokens": null, "backend": "tokenizers", "bos_token": "<|endoftext|>", "clean_up_tokenization_spaces": false, "eos_token": "<|endoftext|>", "errors": "replace", "extra_special_tokens": [ "<|endoftext|>", "<|im_start|>", "<|im_end|>", "", "", "", "", "", "", "", "", "", "", "", "", "", "" ], "is_local": true, "model_max_length": 1000000000000000019884624838656, "pad_token": null, "tokenizer_class": "GPT2Tokenizer", "unk_token": "<|endoftext|>", "vocab_size": 49152 }