File size: 310 Bytes
6425080 0f51b32 6425080 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"model_type": "gpt2",
"tokenizer_class": "GPT2Tokenizer",
"bos_token": "<go>",
"eos_token": "</s>",
"unk_token": "<unk>",
"pad_token": "<pad>",
"model_max_length": 1024,
"do_lower_case": false,
"special_tokens_map_file": "./special_tokens_map.json",
"tokenizer_file": "./tokenizer.json"
} |