Update config.json
Browse files- config.json +8 -2
config.json
CHANGED
|
@@ -18,10 +18,16 @@
|
|
| 18 |
"num_attention_heads": 16,
|
| 19 |
"num_hidden_layers": 24,
|
| 20 |
"pad_token_id": 0,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
"position_embedding_type": "absolute",
|
| 22 |
-
"torch_dtype": "
|
| 23 |
-
"transformers_version": "4.
|
| 24 |
"type_vocab_size": 2,
|
| 25 |
"use_cache": true,
|
| 26 |
"vocab_size": 21128
|
|
|
|
| 27 |
}
|
|
|
|
| 18 |
"num_attention_heads": 16,
|
| 19 |
"num_hidden_layers": 24,
|
| 20 |
"pad_token_id": 0,
|
| 21 |
+
"pooler_fc_size": 768,
|
| 22 |
+
"pooler_num_attention_heads": 12,
|
| 23 |
+
"pooler_num_fc_layers": 3,
|
| 24 |
+
"pooler_size_per_head": 128,
|
| 25 |
+
"pooler_type": "first_token_transform",
|
| 26 |
"position_embedding_type": "absolute",
|
| 27 |
+
"torch_dtype": "float32",
|
| 28 |
+
"transformers_version": "4.37.1",
|
| 29 |
"type_vocab_size": 2,
|
| 30 |
"use_cache": true,
|
| 31 |
"vocab_size": 21128
|
| 32 |
+
|
| 33 |
}
|