{ "tokenizer_class": "PreTrainedTokenizerFast", "model_max_length": 1000000000, "bos_token": "", "eos_token": "", "unk_token": "", "pad_token": "", "mask_token": "", "clean_up_tokenization_spaces": false }