| { | |
| "tokenizer_class": "PreTrainedTokenizerFast", | |
| "auto_map": { | |
| "AutoTokenizer": [ | |
| "transformers", | |
| "PreTrainedTokenizerFast" | |
| ] | |
| }, | |
| "bos_token": "<s>", | |
| "eos_token": "</s>", | |
| "unk_token": "<unk>", | |
| "pad_token": "<unk>", | |
| "model_max_length": 512, | |
| "special_tokens_map_file": null, | |
| "name_or_path": "mELECTRA", | |
| "tokenizer_type": "SentencePiece" | |
| } |