kushaltatariya commited on
Commit
00c08c9
·
verified ·
1 Parent(s): 21c062b

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,6 +1,9 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
4
- "mask_token": "<mask>",
5
- "unk_token": "<unk>"
 
 
 
6
  }
 
1
  {
2
+ "bos_token": "[BOS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[EOS]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": "[UNK]"
9
  }
tokenization.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:72b0445395e318088698aaef5c4d9b059fe017170e20cc2a3bd8b06d1efabde4
3
- size 436065
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dce53b01d512c2780cbe68d4e8305d5b00a85ea2e3f44dd5af096a431ad03600
3
+ size 436094
tokenizer.json CHANGED
@@ -5,7 +5,7 @@
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
- "content": "<unk>",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
@@ -14,7 +14,7 @@
14
  },
15
  {
16
  "id": 1,
17
- "content": "<s>",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
@@ -23,7 +23,7 @@
23
  },
24
  {
25
  "id": 2,
26
- "content": "</s>",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
@@ -32,7 +32,34 @@
32
  },
33
  {
34
  "id": 3,
35
- "content": "<mask>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
@@ -76,22 +103,34 @@
76
  },
77
  "model": {
78
  "type": "Unigram",
79
- "unk_id": 0,
80
  "vocab": [
81
  [
82
- "<unk>",
 
 
 
 
 
 
 
 
83
  0.0
84
  ],
85
  [
86
- "<s>",
87
  0.0
88
  ],
89
  [
90
- "</s>",
91
  0.0
92
  ],
93
  [
94
- "<mask>",
 
 
 
 
95
  0.0
96
  ],
97
  [
@@ -47738,18 +47777,6 @@
47738
  "▁Albany",
47739
  -12.655906677246094
47740
  ],
47741
- [
47742
- "▁Bedding",
47743
- -12.655911445617676
47744
- ],
47745
- [
47746
- "▁Ezinma",
47747
- -12.655913352966309
47748
- ],
47749
- [
47750
- "▁Bennani",
47751
- -12.655914306640623
47752
- ],
47753
  [
47754
  "Ò",
47755
  -12.70510959625244
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
+ "content": "[PAD]",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
 
14
  },
15
  {
16
  "id": 1,
17
+ "content": "[UNK]",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
 
23
  },
24
  {
25
  "id": 2,
26
+ "content": "[BOS]",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
 
32
  },
33
  {
34
  "id": 3,
35
+ "content": "[EOS]",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ },
42
+ {
43
+ "id": 4,
44
+ "content": "[CLS]",
45
+ "single_word": false,
46
+ "lstrip": false,
47
+ "rstrip": false,
48
+ "normalized": false,
49
+ "special": true
50
+ },
51
+ {
52
+ "id": 5,
53
+ "content": "[SEP]",
54
+ "single_word": false,
55
+ "lstrip": false,
56
+ "rstrip": false,
57
+ "normalized": false,
58
+ "special": true
59
+ },
60
+ {
61
+ "id": 6,
62
+ "content": "[MASK]",
63
  "single_word": false,
64
  "lstrip": false,
65
  "rstrip": false,
 
103
  },
104
  "model": {
105
  "type": "Unigram",
106
+ "unk_id": 1,
107
  "vocab": [
108
  [
109
+ "[PAD]",
110
+ 0.0
111
+ ],
112
+ [
113
+ "[UNK]",
114
+ 0.0
115
+ ],
116
+ [
117
+ "[BOS]",
118
  0.0
119
  ],
120
  [
121
+ "[EOS]",
122
  0.0
123
  ],
124
  [
125
+ "[CLS]",
126
  0.0
127
  ],
128
  [
129
+ "[SEP]",
130
+ 0.0
131
+ ],
132
+ [
133
+ "[MASK]",
134
  0.0
135
  ],
136
  [
 
47777
  "▁Albany",
47778
  -12.655906677246094
47779
  ],
 
 
 
 
 
 
 
 
 
 
 
 
47780
  [
47781
  "Ò",
47782
  -12.70510959625244
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
- "content": "<unk>",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
@@ -12,7 +12,7 @@
12
  "special": true
13
  },
14
  "1": {
15
- "content": "<s>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
@@ -20,7 +20,7 @@
20
  "special": true
21
  },
22
  "2": {
23
- "content": "</s>",
24
  "lstrip": false,
25
  "normalized": false,
26
  "rstrip": false,
@@ -28,7 +28,31 @@
28
  "special": true
29
  },
30
  "3": {
31
- "content": "<mask>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
@@ -36,18 +60,24 @@
36
  "special": false
37
  }
38
  },
39
- "bos_token": "<s>",
40
- "bos_token_id": 1,
41
  "clean_up_tokenization_spaces": false,
42
- "eos_token": "</s>",
43
- "eos_token_id": 2,
 
 
44
  "legacy": true,
45
- "mask_token": "<mask>",
46
- "mask_token_id": 3,
47
  "model_max_length": 1000000000000000019884624838656,
 
 
 
 
48
  "sp_model_kwargs": {},
49
  "spaces_between_special_tokens": false,
50
  "tokenizer_class": "HfSentencePieceTokenizer",
51
- "unk_token": "<unk>",
52
- "unk_token_id": 0
53
  }
 
4
  "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
+ "content": "[PAD]",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
 
12
  "special": true
13
  },
14
  "1": {
15
+ "content": "[UNK]",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
 
20
  "special": true
21
  },
22
  "2": {
23
+ "content": "[BOS]",
24
  "lstrip": false,
25
  "normalized": false,
26
  "rstrip": false,
 
28
  "special": true
29
  },
30
  "3": {
31
+ "content": "[EOS]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "4": {
39
+ "content": "[CLS]",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "5": {
47
+ "content": "[SEP]",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "6": {
55
+ "content": "[MASK]",
56
  "lstrip": false,
57
  "normalized": false,
58
  "rstrip": false,
 
60
  "special": false
61
  }
62
  },
63
+ "bos_token": "[BOS]",
64
+ "bos_token_id": 2,
65
  "clean_up_tokenization_spaces": false,
66
+ "cls_token": "[CLS]",
67
+ "cls_token_id": 4,
68
+ "eos_token": "[EOS]",
69
+ "eos_token_id": 3,
70
  "legacy": true,
71
+ "mask_token": "[MASK]",
72
+ "mask_token_id": 6,
73
  "model_max_length": 1000000000000000019884624838656,
74
+ "pad_token": "[PAD]",
75
+ "pad_token_id": 0,
76
+ "sep_token": "[SEP]",
77
+ "sep_token_id": 5,
78
  "sp_model_kwargs": {},
79
  "spaces_between_special_tokens": false,
80
  "tokenizer_class": "HfSentencePieceTokenizer",
81
+ "unk_token": "[UNK]",
82
+ "unk_token_id": 1
83
  }