{ "tokenizer_class": "LlamaTokenizer", "vocab_size": 50000, "model_max_length": 2048 }