Update config.json (#3)
Browse files- Update config.json (35ec1ecba463e7a9b3086ac20d49fe8e8e4ac54c)
Co-authored-by: Sedrick Keh <sedrickkeh@users.noreply.huggingface.co>
- config.json +1 -3
config.json
CHANGED
|
@@ -4,7 +4,6 @@
|
|
| 4 |
],
|
| 5 |
"model_type": "openlm",
|
| 6 |
"params": null,
|
| 7 |
-
"params_args_dict": {
|
| 8 |
"apply_qk_norm": true,
|
| 9 |
"attn_activation": null,
|
| 10 |
"attn_name": "auto",
|
|
@@ -29,8 +28,7 @@
|
|
| 29 |
"qk_norm": true,
|
| 30 |
"seq_len": 2048,
|
| 31 |
"vocab_size": 50432,
|
| 32 |
-
"weight_tying": false
|
| 33 |
-
},
|
| 34 |
"torch_dtype": "float32",
|
| 35 |
"transformers_version": "4.40.2"
|
| 36 |
}
|
|
|
|
| 4 |
],
|
| 5 |
"model_type": "openlm",
|
| 6 |
"params": null,
|
|
|
|
| 7 |
"apply_qk_norm": true,
|
| 8 |
"attn_activation": null,
|
| 9 |
"attn_name": "auto",
|
|
|
|
| 28 |
"qk_norm": true,
|
| 29 |
"seq_len": 2048,
|
| 30 |
"vocab_size": 50432,
|
| 31 |
+
"weight_tying": false,
|
|
|
|
| 32 |
"torch_dtype": "float32",
|
| 33 |
"transformers_version": "4.40.2"
|
| 34 |
}
|