Update config.json
Browse filesapply renamed kwargs according to https://github.com/huggingface/transformers/pull/22575
- config.json +2 -2
config.json
CHANGED
|
@@ -4,7 +4,7 @@
|
|
| 4 |
"architectures": [
|
| 5 |
"GPTBigCodeForCausalLM"
|
| 6 |
],
|
| 7 |
-
"attention_softmax_in_fp32":
|
| 8 |
"multi_query": true,
|
| 9 |
"attn_pdrop": 0.1,
|
| 10 |
"bos_token_id": 49152,
|
|
@@ -20,7 +20,7 @@
|
|
| 20 |
"n_positions": 2048,
|
| 21 |
"reorder_and_upcast_attn": false,
|
| 22 |
"resid_pdrop": 0.1,
|
| 23 |
-
"
|
| 24 |
"scale_attn_weights": true,
|
| 25 |
"summary_activation": null,
|
| 26 |
"summary_first_dropout": 0.1,
|
|
|
|
| 4 |
"architectures": [
|
| 5 |
"GPTBigCodeForCausalLM"
|
| 6 |
],
|
| 7 |
+
"attention_softmax_in_fp32": false,
|
| 8 |
"multi_query": true,
|
| 9 |
"attn_pdrop": 0.1,
|
| 10 |
"bos_token_id": 49152,
|
|
|
|
| 20 |
"n_positions": 2048,
|
| 21 |
"reorder_and_upcast_attn": false,
|
| 22 |
"resid_pdrop": 0.1,
|
| 23 |
+
"scale_attention_softmax_in_fp32": false,
|
| 24 |
"scale_attn_weights": true,
|
| 25 |
"summary_activation": null,
|
| 26 |
"summary_first_dropout": 0.1,
|