{ "tokenizer_class": "SentencePieceTokenizer", "model_max_length": 1024, "vocab_size": 32000, "unk_token": "", "bos_token": "", "eos_token": "", "pad_token": "" }