{ "backend": "tokenizers", "bos_token": "<[begin▁of▁sentence]>", "clean_up_tokenization_spaces": false, "eos_token": "<[end▁of▁sentence]>", "is_local": true, "model_max_length": 32768, "pad_token": "<[PAD▁TOKEN]>", "padding_side": "left", "sep_token": "<[SEP▁TOKEN]>", "tokenizer_class": "TokenizersBackend" }