File size: 320 Bytes
b3608e9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "tokenizer_class": "GPT2Tokenizer",
  "model_max_length": 1024,
  "pad_token": "<|endoftext|>",
  "eos_token": "<|endoftext|>",
  "bos_token": "<|endoftext|>",
  "unk_token": "<|endoftext|>",
  "additional_special_tokens": [
    "<|im_start|>",
    "<|im_end|>",
    "<think>",
    "<think/>",
    "<CONTINUE>"
  ]
}