Update config.json
Browse files- config.json +29 -29
config.json
CHANGED
|
@@ -22,34 +22,34 @@
|
|
| 22 |
"model_type": "llama4_text",
|
| 23 |
"moe_layers": [],
|
| 24 |
"no_rope_layers": [
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
],
|
| 54 |
"num_attention_heads": 16,
|
| 55 |
"num_experts_per_tok": 1,
|
|
@@ -67,6 +67,6 @@
|
|
| 67 |
"torch_dtype": "bfloat16",
|
| 68 |
"transformers_version": "4.52.4",
|
| 69 |
"use_cache": true,
|
| 70 |
-
"use_qk_norm":
|
| 71 |
"vocab_size": 262144
|
| 72 |
}
|
|
|
|
| 22 |
"model_type": "llama4_text",
|
| 23 |
"moe_layers": [],
|
| 24 |
"no_rope_layers": [
|
| 25 |
+
0,
|
| 26 |
+
0,
|
| 27 |
+
0,
|
| 28 |
+
0,
|
| 29 |
+
0,
|
| 30 |
+
0,
|
| 31 |
+
0,
|
| 32 |
+
0,
|
| 33 |
+
0,
|
| 34 |
+
0,
|
| 35 |
+
0,
|
| 36 |
+
0,
|
| 37 |
+
0,
|
| 38 |
+
0,
|
| 39 |
+
0,
|
| 40 |
+
0,
|
| 41 |
+
0,
|
| 42 |
+
0,
|
| 43 |
+
0,
|
| 44 |
+
0,
|
| 45 |
+
0,
|
| 46 |
+
0,
|
| 47 |
+
0,
|
| 48 |
+
0,
|
| 49 |
+
0,
|
| 50 |
+
0,
|
| 51 |
+
0,
|
| 52 |
+
0
|
| 53 |
],
|
| 54 |
"num_attention_heads": 16,
|
| 55 |
"num_experts_per_tok": 1,
|
|
|
|
| 67 |
"torch_dtype": "bfloat16",
|
| 68 |
"transformers_version": "4.52.4",
|
| 69 |
"use_cache": true,
|
| 70 |
+
"use_qk_norm": false,
|
| 71 |
"vocab_size": 262144
|
| 72 |
}
|