{ "add_prefix_space": false, "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "model_max_length": 1024, "name_or_path": "gpt2", "special_tokens_map_file": "special_tokens_map.json", "tokenizer_class": "GPT2Tokenizer", "chat_template": "System: {system}\nUser: {user}\nAssistant:" }