| { | |
| "backend": "tokenizers", | |
| "bos_token": "<|bos|>", | |
| "eos_token": "<|eos|>", | |
| "extra_special_tokens": [ | |
| "<|pad|>", | |
| "<|unk|>", | |
| "<|bos|>", | |
| "<|eos|>", | |
| "<|start_of_speech|>", | |
| "<|end_of_speech|>", | |
| "<|start_of_text|>", | |
| "<|end_of_text|>", | |
| "<|speech|>", | |
| "<|sep|>", | |
| "<|male|>", | |
| "<|female|>", | |
| "<|other|>", | |
| "<|accent_us|>", | |
| "<|accent_uk|>", | |
| "<|accent_in|>", | |
| "<|start_of_mix|>", | |
| "<|end_of_mix|>", | |
| "<|start_of_phonemes|>", | |
| "<|end_of_phonemes|>", | |
| "<|puck|>", | |
| "<|kore|>", | |
| "<|conv|>" | |
| ], | |
| "model_max_length": 1000000000000000019884624838656, | |
| "pad_token": "<|pad|>", | |
| "tokenizer_class": "TokenizersBackend", | |
| "unk_token": "<|unk|>" | |
| } | |