| { | |
| "tokenizer_class": "GPT2Tokenizer", | |
| "vocab_size": 50257, | |
| "model_max_length": 512, | |
| "bos_token": "<|endoftext|>", | |
| "eos_token": "<|endoftext|>", | |
| "unk_token": "<|endoftext|>", | |
| "pad_token": "<|endoftext|>", | |
| "add_prefix_space": false, | |
| "do_lower_case": false | |
| } |