Update config.json
Browse files- config.json +4 -2
config.json
CHANGED
|
@@ -78,7 +78,8 @@
|
|
| 78 |
"transformers_version": "4.16.0.dev0",
|
| 79 |
"use_bfloat16": false,
|
| 80 |
"vocab_size": 49408,
|
| 81 |
-
"use_attention_bias": false
|
|
|
|
| 82 |
},
|
| 83 |
"text_config_dict": null,
|
| 84 |
"transformers_version": null,
|
|
@@ -152,7 +153,8 @@
|
|
| 152 |
"top_p": 1.0,
|
| 153 |
"torch_dtype": null,
|
| 154 |
"torchscript": false,
|
| 155 |
-
"use_bfloat16": false
|
|
|
|
| 156 |
},
|
| 157 |
"vision_config_dict": null
|
| 158 |
}
|
|
|
|
| 78 |
"transformers_version": "4.16.0.dev0",
|
| 79 |
"use_bfloat16": false,
|
| 80 |
"vocab_size": 49408,
|
| 81 |
+
"use_attention_bias": false,
|
| 82 |
+
"use_glu_in_ff": true
|
| 83 |
},
|
| 84 |
"text_config_dict": null,
|
| 85 |
"transformers_version": null,
|
|
|
|
| 153 |
"top_p": 1.0,
|
| 154 |
"torch_dtype": null,
|
| 155 |
"torchscript": false,
|
| 156 |
+
"use_bfloat16": false,
|
| 157 |
+
"use_glu_in_ff": true
|
| 158 |
},
|
| 159 |
"vision_config_dict": null
|
| 160 |
}
|