| { |
| "architectures": [ |
| "Qwen3_5ForCausalLM" |
| ], |
| "model_type": "qwen3_5", |
| "quantization": { |
| "group_size": 64, |
| "bits": 4, |
| "mode": "affine", |
| "language_model.model.embed_tokens": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_qkv": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_z": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_a": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_z": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.q_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.k_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| } |
| }, |
| "quantization_config": { |
| "group_size": 64, |
| "bits": 4, |
| "mode": "affine", |
| "language_model.model.embed_tokens": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_qkv": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_z": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.in_proj_a": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.0.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_z": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.1.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.2.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.3.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.4.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.5.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.6.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.7.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.8.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.9.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.10.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.11.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.12.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.13.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.14.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.15.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.16.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.17.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.18.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.q_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.k_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.19.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.linear_attn.out_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.20.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.21.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_qkv": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_z": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_b": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.in_proj_a": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.linear_attn.out_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.gate_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.down_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.22.mlp.up_proj": { |
| "bits": 4, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.q_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.k_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.v_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.self_attn.o_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.gate_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.down_proj": { |
| "bits": 8, |
| "group_size": 64 |
| }, |
| "language_model.model.layers.23.mlp.up_proj": { |
| "bits": 8, |
| "group_size": 64 |
| } |
| }, |
| "tie_word_embeddings": true, |
| "transformers_version": "4.57.0.dev0", |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "attn_output_gate": true, |
| "dtype": "bfloat16", |
| "eos_token_id": 248044, |
| "full_attention_interval": 4, |
| "head_dim": 256, |
| "hidden_act": "silu", |
| "hidden_size": 2048, |
| "initializer_range": 0.02, |
| "intermediate_size": 6144, |
| "layer_types": [ |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention", |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention", |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention", |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention", |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention", |
| "linear_attention", |
| "linear_attention", |
| "linear_attention", |
| "full_attention" |
| ], |
| "linear_conv_kernel_dim": 4, |
| "linear_key_head_dim": 128, |
| "linear_num_key_heads": 16, |
| "linear_num_value_heads": 16, |
| "linear_value_head_dim": 128, |
| "max_position_embeddings": 262144, |
| "mlp_only_layers": [], |
| "mtp_num_hidden_layers": 1, |
| "mtp_use_dedicated_embeddings": false, |
| "num_attention_heads": 8, |
| "num_hidden_layers": 24, |
| "num_key_value_heads": 2, |
| "rms_norm_eps": 1e-06, |
| "use_cache": true, |
| "vocab_size": 248320, |
| "mamba_ssm_dtype": "float32", |
| "rope_parameters": { |
| "rope_theta": 10000000, |
| "partial_rotary_factor": 0.25, |
| "type": "default" |
| } |
| } |
|
|