File size: 219 Bytes
1d44361
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "clean_up_tokenization_spaces": true,
  "model_max_length": 2048,
  "special_tokens": [
    "<|pad|>",
    "<|endoftext|>",
    "<|beginoftext|>",
    "<|unk|>"
  ],
  "tokenizer_class": "PreTrainedTokenizerFast"
}