if001 commited on
Commit
2468727
·
verified ·
1 Parent(s): d62149e
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -24,7 +24,7 @@
24
  "moe_intermediate_size": 2048,
25
  "moe_layer_freq": 1,
26
  "n_group": 8,
27
- "n_routed_experts": 256,
28
  "n_shared_experts": 1,
29
  "norm_topk_prob": true,
30
  "num_attention_heads": 128,
 
24
  "moe_intermediate_size": 2048,
25
  "moe_layer_freq": 1,
26
  "n_group": 8,
27
+ "n_routed_experts": 3,
28
  "n_shared_experts": 1,
29
  "norm_topk_prob": true,
30
  "num_attention_heads": 128,