Upload config.json
Browse files- config.json +2 -1
config.json
CHANGED
|
@@ -6,6 +6,7 @@
|
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 32000,
|
| 8 |
"eos_token_id": 32001,
|
|
|
|
| 9 |
"hidden_act": "silu",
|
| 10 |
"hidden_size": 4096,
|
| 11 |
"initializer_range": 0.02,
|
|
@@ -22,7 +23,7 @@
|
|
| 22 |
"rope_scaling": null,
|
| 23 |
"rope_theta": 10000.0,
|
| 24 |
"tie_word_embeddings": false,
|
| 25 |
-
"transformers_version": "4.
|
| 26 |
"use_cache": true,
|
| 27 |
"vocab_size": 32008
|
| 28 |
}
|
|
|
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 32000,
|
| 8 |
"eos_token_id": 32001,
|
| 9 |
+
"head_dim": 128,
|
| 10 |
"hidden_act": "silu",
|
| 11 |
"hidden_size": 4096,
|
| 12 |
"initializer_range": 0.02,
|
|
|
|
| 23 |
"rope_scaling": null,
|
| 24 |
"rope_theta": 10000.0,
|
| 25 |
"tie_word_embeddings": false,
|
| 26 |
+
"transformers_version": "4.45.1",
|
| 27 |
"use_cache": true,
|
| 28 |
"vocab_size": 32008
|
| 29 |
}
|