File size: 394 Bytes
0ffbb48 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 | {
"d_model": 768,
"d_intermediate": 0,
"n_layer": 16,
"vocab_size": 10002,
"ssm_cfg": {
"d_state": 16,
"d_conv": 4,
"expand": 2,
"layer": "Mamba2"
},
"attn_layer_idx": [],
"attn_cfg": {},
"rms_norm": true,
"residual_in_fp32": true,
"fused_add_norm": true,
"pad_vocab_size_multiple": 8,
"tie_embeddings": true
} |