{ "version": "1.0", "model_type": "GPT-2", "tokenizer_class": "GPT2Tokenizer", "vocab_file": "vocab.json", "merges_file": "merges.txt", "special_tokens_map": { "pad_token": "", "unk_token": "", "cls_token": "", "sep_token": "", "mask_token": "" } }