Update config.json
#9
by
csy0225
- opened
- config.json +0 -1
config.json
CHANGED
|
@@ -23,7 +23,6 @@
|
|
| 23 |
"torch_dtype": "bfloat16",
|
| 24 |
"use_qk_norm": true,
|
| 25 |
"moe_layers_enum": "3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44",
|
| 26 |
-
"use_mfa": false,
|
| 27 |
"num_attention_heads": 64,
|
| 28 |
"num_attention_groups": 8,
|
| 29 |
"head_dim": 128,
|
|
|
|
| 23 |
"torch_dtype": "bfloat16",
|
| 24 |
"use_qk_norm": true,
|
| 25 |
"moe_layers_enum": "3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44",
|
|
|
|
| 26 |
"num_attention_heads": 64,
|
| 27 |
"num_attention_groups": 8,
|
| 28 |
"head_dim": 128,
|