File size: 1,215 Bytes
d59c2ef
bacc8bb
d59c2ef
 
0f7d5c4
d59c2ef
bacc8bb
d59c2ef
 
 
 
0f7d5c4
 
d59c2ef
bacc8bb
d59c2ef
 
 
 
0f7d5c4
 
d59c2ef
bacc8bb
d59c2ef
 
 
 
0f7d5c4
 
d59c2ef
bacc8bb
d59c2ef
 
 
 
bacc8bb
0f7d5c4
 
d59c2ef
 
 
 
 
 
0f7d5c4
d59c2ef
0f7d5c4
 
bacc8bb
0f7d5c4
d59c2ef
0f7d5c4
 
bacc8bb
 
0f7d5c4
d59c2ef
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
{
  "add_prefix_space": false,
  "added_tokens_decoder": {
    "0": {
      "content": "<s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "1": {
      "content": "<pad>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "2": {
      "content": "</s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "3": {
      "content": "<unk>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "50264": {
      "content": "<mask>",
      "lstrip": true,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    }
  },
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": true,
  "cls_token": "<s>",
  "eos_token": "</s>",
  "errors": "replace",
  "mask_token": "<mask>",
  "model_max_length": 512,
  "pad_token": "<pad>",
  "sep_token": "</s>",
  "tokenizer_class": "RobertaTokenizer",
  "trim_offsets": true,
  "unk_token": "<unk>"
}