sadassa17 commited on
Commit
545ae05
·
1 Parent(s): f6f149c

Upload folder using huggingface_hub

Browse files
bert/config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "madlag/bert-large-uncased-whole-word-masking-finetuned-squadv2",
3
+ "architectures": [
4
+ "BertForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.30.2",
23
+ "type_vocab_size": 2,
24
+ "use_cache": true,
25
+ "vocab_size": 30522
26
+ }
bert/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f5bc82b2230fc37f8b7a0ee803ee1f16b4c5c7105877d357f88ab22c70b7458
3
+ size 1336515565
bert_onnx_pruned/config.json ADDED
@@ -0,0 +1,265 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "madlag/bert-large-uncased-wwm-squadv2-x2.63-f82.6-d16-hybrid-v1",
3
+ "architectures": [
4
+ "BertForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
21
+ "pruned_heads": {
22
+ "0": [
23
+ 0,
24
+ 1,
25
+ 3,
26
+ 4,
27
+ 5,
28
+ 8,
29
+ 9,
30
+ 13,
31
+ 15
32
+ ],
33
+ "1": [
34
+ 0,
35
+ 1,
36
+ 3,
37
+ 5,
38
+ 7,
39
+ 9,
40
+ 10,
41
+ 13,
42
+ 14
43
+ ],
44
+ "2": [
45
+ 0,
46
+ 1,
47
+ 4,
48
+ 6,
49
+ 7,
50
+ 8,
51
+ 9,
52
+ 10,
53
+ 11,
54
+ 12,
55
+ 13,
56
+ 14,
57
+ 15
58
+ ],
59
+ "3": [
60
+ 0,
61
+ 3,
62
+ 5,
63
+ 6,
64
+ 7,
65
+ 8,
66
+ 9,
67
+ 11,
68
+ 13,
69
+ 14,
70
+ 15
71
+ ],
72
+ "4": [
73
+ 0,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 9,
81
+ 10,
82
+ 11,
83
+ 12,
84
+ 14
85
+ ],
86
+ "5": [
87
+ 1,
88
+ 3,
89
+ 4,
90
+ 5,
91
+ 6,
92
+ 7,
93
+ 8,
94
+ 9,
95
+ 12,
96
+ 13,
97
+ 14,
98
+ 15
99
+ ],
100
+ "6": [
101
+ 0,
102
+ 1,
103
+ 2,
104
+ 3,
105
+ 4,
106
+ 7,
107
+ 8,
108
+ 9,
109
+ 10,
110
+ 11,
111
+ 12,
112
+ 15
113
+ ],
114
+ "7": [
115
+ 0,
116
+ 3,
117
+ 4,
118
+ 5,
119
+ 8,
120
+ 9,
121
+ 10,
122
+ 11,
123
+ 12,
124
+ 13,
125
+ 14
126
+ ],
127
+ "8": [
128
+ 3,
129
+ 4,
130
+ 5,
131
+ 7,
132
+ 8,
133
+ 9,
134
+ 10,
135
+ 11,
136
+ 12
137
+ ],
138
+ "9": [
139
+ 0,
140
+ 1,
141
+ 2,
142
+ 3,
143
+ 5,
144
+ 6,
145
+ 7,
146
+ 9,
147
+ 10,
148
+ 13,
149
+ 14,
150
+ 15
151
+ ],
152
+ "10": [
153
+ 1,
154
+ 2,
155
+ 4,
156
+ 5,
157
+ 6,
158
+ 8,
159
+ 11,
160
+ 13
161
+ ],
162
+ "11": [
163
+ 0,
164
+ 2,
165
+ 5,
166
+ 6,
167
+ 7,
168
+ 8,
169
+ 10,
170
+ 12,
171
+ 15
172
+ ],
173
+ "12": [
174
+ 0,
175
+ 2,
176
+ 6,
177
+ 8,
178
+ 9,
179
+ 11,
180
+ 13
181
+ ],
182
+ "13": [
183
+ 2,
184
+ 6,
185
+ 10,
186
+ 12,
187
+ 15
188
+ ],
189
+ "14": [
190
+ 1,
191
+ 5,
192
+ 6,
193
+ 10,
194
+ 11,
195
+ 15
196
+ ],
197
+ "15": [
198
+ 0,
199
+ 9
200
+ ],
201
+ "16": [
202
+ 5,
203
+ 7
204
+ ],
205
+ "17": [
206
+ 1,
207
+ 4,
208
+ 8,
209
+ 12,
210
+ 14
211
+ ],
212
+ "18": [
213
+ 3,
214
+ 4,
215
+ 11
216
+ ],
217
+ "19": [
218
+ 0,
219
+ 5,
220
+ 12
221
+ ],
222
+ "20": [
223
+ 0,
224
+ 4,
225
+ 10,
226
+ 12
227
+ ],
228
+ "21": [
229
+ 0,
230
+ 2,
231
+ 3,
232
+ 4,
233
+ 8,
234
+ 11,
235
+ 12,
236
+ 15
237
+ ],
238
+ "22": [
239
+ 0,
240
+ 1,
241
+ 3,
242
+ 4,
243
+ 7,
244
+ 9,
245
+ 10,
246
+ 11,
247
+ 13,
248
+ 15
249
+ ],
250
+ "23": [
251
+ 2,
252
+ 4,
253
+ 8,
254
+ 9,
255
+ 10,
256
+ 13,
257
+ 14,
258
+ 15
259
+ ]
260
+ },
261
+ "transformers_version": "4.30.2",
262
+ "type_vocab_size": 2,
263
+ "use_cache": true,
264
+ "vocab_size": 30522
265
+ }
bert_onnx_pruned/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d217ca0146005fd6942dbe3770da96821d548efb447f68ef27a4ad2a7915687e
3
+ size 1137529058
bert_onnx_pruned/optimized.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8ecdff75f917b2f4805c7d6600009361c82a3d451a7403b057982379e1788a8
3
+ size 1137065077
bert_onnx_pruned/optimized_fp16.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee222cdfc402b8a18a5be4b1e27bddc73a03157bac91f4b173223711e48620d6
3
+ size 579297550
bert_onnx_pruned/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
bert_onnx_pruned/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
bert_onnx_pruned/tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": true,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": null,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
bert_onnx_pruned/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
bert_pruned/config.json ADDED
@@ -0,0 +1,266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "madlag/bert-large-uncased-wwm-squadv2-x2.63-f82.6-d16-hybrid-v1",
3
+ "architectures": [
4
+ "BertForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
21
+ "pruned_heads": {
22
+ "0": [
23
+ 0,
24
+ 1,
25
+ 3,
26
+ 4,
27
+ 5,
28
+ 8,
29
+ 9,
30
+ 13,
31
+ 15
32
+ ],
33
+ "1": [
34
+ 0,
35
+ 1,
36
+ 3,
37
+ 5,
38
+ 7,
39
+ 9,
40
+ 10,
41
+ 13,
42
+ 14
43
+ ],
44
+ "2": [
45
+ 0,
46
+ 1,
47
+ 4,
48
+ 6,
49
+ 7,
50
+ 8,
51
+ 9,
52
+ 10,
53
+ 11,
54
+ 12,
55
+ 13,
56
+ 14,
57
+ 15
58
+ ],
59
+ "3": [
60
+ 0,
61
+ 3,
62
+ 5,
63
+ 6,
64
+ 7,
65
+ 8,
66
+ 9,
67
+ 11,
68
+ 13,
69
+ 14,
70
+ 15
71
+ ],
72
+ "4": [
73
+ 0,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 9,
81
+ 10,
82
+ 11,
83
+ 12,
84
+ 14
85
+ ],
86
+ "5": [
87
+ 1,
88
+ 3,
89
+ 4,
90
+ 5,
91
+ 6,
92
+ 7,
93
+ 8,
94
+ 9,
95
+ 12,
96
+ 13,
97
+ 14,
98
+ 15
99
+ ],
100
+ "6": [
101
+ 0,
102
+ 1,
103
+ 2,
104
+ 3,
105
+ 4,
106
+ 7,
107
+ 8,
108
+ 9,
109
+ 10,
110
+ 11,
111
+ 12,
112
+ 15
113
+ ],
114
+ "7": [
115
+ 0,
116
+ 3,
117
+ 4,
118
+ 5,
119
+ 8,
120
+ 9,
121
+ 10,
122
+ 11,
123
+ 12,
124
+ 13,
125
+ 14
126
+ ],
127
+ "8": [
128
+ 3,
129
+ 4,
130
+ 5,
131
+ 7,
132
+ 8,
133
+ 9,
134
+ 10,
135
+ 11,
136
+ 12
137
+ ],
138
+ "9": [
139
+ 0,
140
+ 1,
141
+ 2,
142
+ 3,
143
+ 5,
144
+ 6,
145
+ 7,
146
+ 9,
147
+ 10,
148
+ 13,
149
+ 14,
150
+ 15
151
+ ],
152
+ "10": [
153
+ 1,
154
+ 2,
155
+ 4,
156
+ 5,
157
+ 6,
158
+ 8,
159
+ 11,
160
+ 13
161
+ ],
162
+ "11": [
163
+ 0,
164
+ 2,
165
+ 5,
166
+ 6,
167
+ 7,
168
+ 8,
169
+ 10,
170
+ 12,
171
+ 15
172
+ ],
173
+ "12": [
174
+ 0,
175
+ 2,
176
+ 6,
177
+ 8,
178
+ 9,
179
+ 11,
180
+ 13
181
+ ],
182
+ "13": [
183
+ 2,
184
+ 6,
185
+ 10,
186
+ 12,
187
+ 15
188
+ ],
189
+ "14": [
190
+ 1,
191
+ 5,
192
+ 6,
193
+ 10,
194
+ 11,
195
+ 15
196
+ ],
197
+ "15": [
198
+ 0,
199
+ 9
200
+ ],
201
+ "16": [
202
+ 5,
203
+ 7
204
+ ],
205
+ "17": [
206
+ 1,
207
+ 4,
208
+ 8,
209
+ 12,
210
+ 14
211
+ ],
212
+ "18": [
213
+ 3,
214
+ 4,
215
+ 11
216
+ ],
217
+ "19": [
218
+ 0,
219
+ 5,
220
+ 12
221
+ ],
222
+ "20": [
223
+ 0,
224
+ 4,
225
+ 10,
226
+ 12
227
+ ],
228
+ "21": [
229
+ 0,
230
+ 2,
231
+ 3,
232
+ 4,
233
+ 8,
234
+ 11,
235
+ 12,
236
+ 15
237
+ ],
238
+ "22": [
239
+ 0,
240
+ 1,
241
+ 3,
242
+ 4,
243
+ 7,
244
+ 9,
245
+ 10,
246
+ 11,
247
+ 13,
248
+ 15
249
+ ],
250
+ "23": [
251
+ 2,
252
+ 4,
253
+ 8,
254
+ 9,
255
+ 10,
256
+ 13,
257
+ 14,
258
+ 15
259
+ ]
260
+ },
261
+ "torch_dtype": "float32",
262
+ "transformers_version": "4.30.2",
263
+ "type_vocab_size": 2,
264
+ "use_cache": true,
265
+ "vocab_size": 30522
266
+ }
bert_pruned/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a10ec3a2b02361fa9eac70d2362105a1281d044ad79d7d0584cbe6a2d0fcc63
3
+ size 1137140205