File size: 365 Bytes
5b2581f
1
{"errors": "replace", "unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "pad_token": null, "add_prefix_space": false, "add_bos_token": false, "model_max_length": 1024, "tokenizer_file": "/home/ryan/.cache/huggingface/hub/models--gpt2/snapshots/607a30d783dfa663caf39e06633721c8d4cfcd7e/tokenizer.json", "name_or_path": "gpt2"}