{ "add_prefix_space": false, "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "unk_token": "<|endoftext|>", "pad_token": "<|endoftext|>", "clean_up_tokenization_spaces": true, "model_max_length": 1000000000000000019884624838656, "tokenizer_class": "GPT2Tokenizer", "vocab_size": 49152 }