| { | |
| "tokenizer_class": "PreTrainedTokenizerFast", | |
| "model_input_names": [ | |
| "input_ids" | |
| ], | |
| "special_tokens": { | |
| "bos_token": "<s>", | |
| "eos_token": "</s>", | |
| "unk_token": "<unk>", | |
| "pad_token": "<pad>" | |
| }, | |
| "vocab_size": 20000, | |
| "tokenizer_file": "tokenizer.model" | |
| } |