| { | |
| "d_model": 768, | |
| "d_intermediate": 0, | |
| "n_layer": 16, | |
| "vocab_size": 10002, | |
| "ssm_cfg": { | |
| "d_state": 16, | |
| "d_conv": 4, | |
| "expand": 2, | |
| "layer": "Mamba2" | |
| }, | |
| "attn_layer_idx": [], | |
| "attn_cfg": {}, | |
| "rms_norm": true, | |
| "residual_in_fp32": true, | |
| "fused_add_norm": true, | |
| "pad_vocab_size_multiple": 8, | |
| "tie_embeddings": true | |
| } |