Update config.json
Browse files- config.json +8 -2
config.json
CHANGED
|
@@ -80,7 +80,10 @@
|
|
| 80 |
"vocab_size": 49408,
|
| 81 |
"use_attention_bias": false,
|
| 82 |
"use_glu_in_ff": true,
|
| 83 |
-
"ff_post_act_layer_norm": false
|
|
|
|
|
|
|
|
|
|
| 84 |
},
|
| 85 |
"text_config_dict": null,
|
| 86 |
"transformers_version": null,
|
|
@@ -156,7 +159,10 @@
|
|
| 156 |
"torchscript": false,
|
| 157 |
"use_bfloat16": false,
|
| 158 |
"use_glu_in_ff": true,
|
| 159 |
-
"ff_post_act_layer_norm": false
|
|
|
|
|
|
|
|
|
|
| 160 |
},
|
| 161 |
"vision_config_dict": null
|
| 162 |
}
|
|
|
|
| 80 |
"vocab_size": 49408,
|
| 81 |
"use_attention_bias": false,
|
| 82 |
"use_glu_in_ff": true,
|
| 83 |
+
"ff_post_act_layer_norm": false,
|
| 84 |
+
"use_pre_branch_norm": true,
|
| 85 |
+
"use_post_branch_norm": false,
|
| 86 |
+
"use_post_main_norm": false
|
| 87 |
},
|
| 88 |
"text_config_dict": null,
|
| 89 |
"transformers_version": null,
|
|
|
|
| 159 |
"torchscript": false,
|
| 160 |
"use_bfloat16": false,
|
| 161 |
"use_glu_in_ff": true,
|
| 162 |
+
"ff_post_act_layer_norm": false,
|
| 163 |
+
"use_pre_branch_norm": true,
|
| 164 |
+
"use_post_branch_norm": false,
|
| 165 |
+
"use_post_main_norm": false
|
| 166 |
},
|
| 167 |
"vision_config_dict": null
|
| 168 |
}
|