File size: 311 Bytes
6c5f117
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
    "tokenizer_class": "BertTokenizer",
    "do_lower_case": false,
    "strip_accents": false,
    "max_len": 512,
    "init_inputs": [],
    "model_max_length": 512,
    "special_tokens_map_file": null,
    "tokenizer_file": "tokenizer.json",
    "name_or_path": "",
    "tokenizer_type": "BPE"
}