Upload config
Browse files- config.json +2 -7
config.json
CHANGED
|
@@ -1,11 +1,7 @@
|
|
| 1 |
{
|
| 2 |
-
"architectures": [
|
| 3 |
-
"ProSSTForMaskedLM"
|
| 4 |
-
],
|
| 5 |
"attention_probs_dropout_prob": 0.1,
|
| 6 |
"auto_map": {
|
| 7 |
-
"AutoConfig": "configuration_prosst.ProSSTConfig"
|
| 8 |
-
"AutoModelForMaskedLM": "modeling_prosst.ProSSTForMaskedLM"
|
| 9 |
},
|
| 10 |
"hidden_act": "gelu",
|
| 11 |
"hidden_dropout_prob": 0.1,
|
|
@@ -35,9 +31,8 @@
|
|
| 35 |
"position_embedding_type": "relative",
|
| 36 |
"relative_attention": true,
|
| 37 |
"scale_hidden": 1,
|
| 38 |
-
"ss_vocab_size":
|
| 39 |
"token_dropout": true,
|
| 40 |
-
"torch_dtype": "float32",
|
| 41 |
"transformers_version": "4.38.2",
|
| 42 |
"type_vocab_size": 0,
|
| 43 |
"vocab_size": 25
|
|
|
|
| 1 |
{
|
|
|
|
|
|
|
|
|
|
| 2 |
"attention_probs_dropout_prob": 0.1,
|
| 3 |
"auto_map": {
|
| 4 |
+
"AutoConfig": "configuration_prosst.ProSSTConfig"
|
|
|
|
| 5 |
},
|
| 6 |
"hidden_act": "gelu",
|
| 7 |
"hidden_dropout_prob": 0.1,
|
|
|
|
| 31 |
"position_embedding_type": "relative",
|
| 32 |
"relative_attention": true,
|
| 33 |
"scale_hidden": 1,
|
| 34 |
+
"ss_vocab_size": 4099,
|
| 35 |
"token_dropout": true,
|
|
|
|
| 36 |
"transformers_version": "4.38.2",
|
| 37 |
"type_vocab_size": 0,
|
| 38 |
"vocab_size": 25
|