{ "backend": "tokenizers", "eos_token": "<|endoftext|>", "extra_special_tokens": [ "[SIMPLE]", "[ENHANCED]" ], "model_max_length": 1000000000000000019884624838656, "pad_token": "[PAD]", "tokenizer_class": "TokenizersBackend" }