step3p5
custom_code
fp8
Step-3.5-Flash-FP8 / config.json
csy0225's picture
Update config.json
e73fe94 verified
{
"architectures": [
"Step3p5ForCausalLM"
],
"model_type": "step3p5",
"auto_map": {
"AutoConfig": "configuration_step3p5.Step3p5Config",
"AutoModelForCausalLM": "modeling_step3p5.Step3p5ForCausalLM"
},
"rope_scaling": {
"rope_type": "llama3",
"factor": 2.0,
"original_max_position_embeddings": 131072,
"low_freq_factor": 1.0,
"high_freq_factor": 32.0
},
"yarn_only_types": ["full_attention"],
"hidden_size": 4096,
"intermediate_size": 11264,
"num_hidden_layers": 45,
"max_seq_len": 262144,
"vocab_size": 128896,
"torch_dtype": "bfloat16",
"use_qk_norm": true,
"moe_layers_enum": "3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44",
"num_attention_heads": 64,
"num_attention_groups": 8,
"head_dim": 128,
"use_moe": true,
"moe_num_experts": 288,
"moe_top_k": 8,
"moe_intermediate_size": 1280,
"share_expert_dim": 1280,
"moe_layer_offset": 0,
"moe_every_n_layer": 1,
"norm_expert_weight": true,
"moe_router_activation": "sigmoid",
"moe_router_scaling_factor": 3.0,
"att_impl_type": "GQA",
"rope_theta": [
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0
],
"use_head_wise_attn_gate": true,
"sliding_window": 512,
"use_moe_router_bias": true,
"need_fp32_gate": true,
"sink": false,
"layer_types": [
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention"
],
"use_rope_layers": [],
"num_nextn_predict_layers": 3,
"partial_rotary_factors": [
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0
],
"eos_token_id": [
1,
2,
128007
],
"bos_token_id": 0,
"attention_other_setting": {
"attention_type": "sliding_attention",
"num_attention_heads": 96,
"num_attention_groups": 8,
"head_dim": 128,
"true_head_dim": 128
},
"swiglu_limits": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
7,
7,
0.0,
0.0,
0.0
],
"swiglu_limits_shared": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
16,
0.0,
0.0,
0.0
],
"zero_centered": true,
"max_position_embeddings": 262144,
"quantization_config": {
"quant_method": "fp8",
"activation_scheme": "dynamic",
"fmt": "e4m3",
"weight_block_size": [
128,
128
],
"modules_to_not_convert": [
"lm_head",
"model.embed_tokens",
"model.norm",
"model.layers.0.self_attn.g_proj",
"model.layers.0.self_attn.qkv_proj",
"model.layers.0.self_attn.o_proj",
"model.layers.0.mlp.gate_up_proj",
"model.layers.0.mlp.down_proj",
"model.layers.1.self_attn.g_proj",
"model.layers.1.self_attn.qkv_proj",
"model.layers.1.self_attn.o_proj",
"model.layers.1.mlp.gate_up_proj",
"model.layers.1.mlp.down_proj",
"model.layers.2.self_attn.g_proj",
"model.layers.2.self_attn.qkv_proj",
"model.layers.2.self_attn.o_proj",
"model.layers.2.mlp.gate_up_proj",
"model.layers.2.mlp.down_proj",
"model.layers.3.self_attn.g_proj",
"model.layers.3.self_attn.qkv_proj",
"model.layers.3.self_attn.o_proj",
"model.layers.3.moe.gate",
"model.layers.3.share_expert.gate_up_proj",
"model.layers.3.share_expert.down_proj",
"model.layers.4.self_attn.g_proj",
"model.layers.4.self_attn.qkv_proj",
"model.layers.4.self_attn.o_proj",
"model.layers.4.moe.gate",
"model.layers.4.share_expert.gate_up_proj",
"model.layers.4.share_expert.down_proj",
"model.layers.5.self_attn.g_proj",
"model.layers.5.self_attn.qkv_proj",
"model.layers.5.self_attn.o_proj",
"model.layers.5.moe.gate",
"model.layers.5.share_expert.gate_up_proj",
"model.layers.5.share_expert.down_proj",
"model.layers.6.self_attn.g_proj",
"model.layers.6.self_attn.qkv_proj",
"model.layers.6.self_attn.o_proj",
"model.layers.6.moe.gate",
"model.layers.6.share_expert.gate_up_proj",
"model.layers.6.share_expert.down_proj",
"model.layers.7.self_attn.g_proj",
"model.layers.7.self_attn.qkv_proj",
"model.layers.7.self_attn.o_proj",
"model.layers.7.moe.gate",
"model.layers.7.share_expert.gate_up_proj",
"model.layers.7.share_expert.down_proj",
"model.layers.8.self_attn.g_proj",
"model.layers.8.self_attn.qkv_proj",
"model.layers.8.self_attn.o_proj",
"model.layers.8.moe.gate",
"model.layers.8.share_expert.gate_up_proj",
"model.layers.8.share_expert.down_proj",
"model.layers.9.self_attn.g_proj",
"model.layers.9.self_attn.qkv_proj",
"model.layers.9.self_attn.o_proj",
"model.layers.9.moe.gate",
"model.layers.9.share_expert.gate_up_proj",
"model.layers.9.share_expert.down_proj",
"model.layers.10.self_attn.g_proj",
"model.layers.10.self_attn.qkv_proj",
"model.layers.10.self_attn.o_proj",
"model.layers.10.moe.gate",
"model.layers.10.share_expert.gate_up_proj",
"model.layers.10.share_expert.down_proj",
"model.layers.11.self_attn.g_proj",
"model.layers.11.self_attn.qkv_proj",
"model.layers.11.self_attn.o_proj",
"model.layers.11.moe.gate",
"model.layers.11.share_expert.gate_up_proj",
"model.layers.11.share_expert.down_proj",
"model.layers.12.self_attn.g_proj",
"model.layers.12.self_attn.qkv_proj",
"model.layers.12.self_attn.o_proj",
"model.layers.12.moe.gate",
"model.layers.12.share_expert.gate_up_proj",
"model.layers.12.share_expert.down_proj",
"model.layers.13.self_attn.g_proj",
"model.layers.13.self_attn.qkv_proj",
"model.layers.13.self_attn.o_proj",
"model.layers.13.moe.gate",
"model.layers.13.share_expert.gate_up_proj",
"model.layers.13.share_expert.down_proj",
"model.layers.14.self_attn.g_proj",
"model.layers.14.self_attn.qkv_proj",
"model.layers.14.self_attn.o_proj",
"model.layers.14.moe.gate",
"model.layers.14.share_expert.gate_up_proj",
"model.layers.14.share_expert.down_proj",
"model.layers.15.self_attn.g_proj",
"model.layers.15.self_attn.qkv_proj",
"model.layers.15.self_attn.o_proj",
"model.layers.15.moe.gate",
"model.layers.15.share_expert.gate_up_proj",
"model.layers.15.share_expert.down_proj",
"model.layers.16.self_attn.g_proj",
"model.layers.16.self_attn.qkv_proj",
"model.layers.16.self_attn.o_proj",
"model.layers.16.moe.gate",
"model.layers.16.share_expert.gate_up_proj",
"model.layers.16.share_expert.down_proj",
"model.layers.17.self_attn.g_proj",
"model.layers.17.self_attn.qkv_proj",
"model.layers.17.self_attn.o_proj",
"model.layers.17.moe.gate",
"model.layers.17.share_expert.gate_up_proj",
"model.layers.17.share_expert.down_proj",
"model.layers.18.self_attn.g_proj",
"model.layers.18.self_attn.qkv_proj",
"model.layers.18.self_attn.o_proj",
"model.layers.18.moe.gate",
"model.layers.18.share_expert.gate_up_proj",
"model.layers.18.share_expert.down_proj",
"model.layers.19.self_attn.g_proj",
"model.layers.19.self_attn.qkv_proj",
"model.layers.19.self_attn.o_proj",
"model.layers.19.moe.gate",
"model.layers.19.share_expert.gate_up_proj",
"model.layers.19.share_expert.down_proj",
"model.layers.20.self_attn.g_proj",
"model.layers.20.self_attn.qkv_proj",
"model.layers.20.self_attn.o_proj",
"model.layers.20.moe.gate",
"model.layers.20.share_expert.gate_up_proj",
"model.layers.20.share_expert.down_proj",
"model.layers.21.self_attn.g_proj",
"model.layers.21.self_attn.qkv_proj",
"model.layers.21.self_attn.o_proj",
"model.layers.21.moe.gate",
"model.layers.21.share_expert.gate_up_proj",
"model.layers.21.share_expert.down_proj",
"model.layers.22.self_attn.g_proj",
"model.layers.22.self_attn.qkv_proj",
"model.layers.22.self_attn.o_proj",
"model.layers.22.moe.gate",
"model.layers.22.share_expert.gate_up_proj",
"model.layers.22.share_expert.down_proj",
"model.layers.23.self_attn.g_proj",
"model.layers.23.self_attn.qkv_proj",
"model.layers.23.self_attn.o_proj",
"model.layers.23.moe.gate",
"model.layers.23.share_expert.gate_up_proj",
"model.layers.23.share_expert.down_proj",
"model.layers.24.self_attn.g_proj",
"model.layers.24.self_attn.qkv_proj",
"model.layers.24.self_attn.o_proj",
"model.layers.24.moe.gate",
"model.layers.24.share_expert.gate_up_proj",
"model.layers.24.share_expert.down_proj",
"model.layers.25.self_attn.g_proj",
"model.layers.25.self_attn.qkv_proj",
"model.layers.25.self_attn.o_proj",
"model.layers.25.moe.gate",
"model.layers.25.share_expert.gate_up_proj",
"model.layers.25.share_expert.down_proj",
"model.layers.26.self_attn.g_proj",
"model.layers.26.self_attn.qkv_proj",
"model.layers.26.self_attn.o_proj",
"model.layers.26.moe.gate",
"model.layers.26.share_expert.gate_up_proj",
"model.layers.26.share_expert.down_proj",
"model.layers.27.self_attn.g_proj",
"model.layers.27.self_attn.qkv_proj",
"model.layers.27.self_attn.o_proj",
"model.layers.27.moe.gate",
"model.layers.27.share_expert.gate_up_proj",
"model.layers.27.share_expert.down_proj",
"model.layers.28.self_attn.g_proj",
"model.layers.28.self_attn.qkv_proj",
"model.layers.28.self_attn.o_proj",
"model.layers.28.moe.gate",
"model.layers.28.share_expert.gate_up_proj",
"model.layers.28.share_expert.down_proj",
"model.layers.29.self_attn.g_proj",
"model.layers.29.self_attn.qkv_proj",
"model.layers.29.self_attn.o_proj",
"model.layers.29.moe.gate",
"model.layers.29.share_expert.gate_up_proj",
"model.layers.29.share_expert.down_proj",
"model.layers.30.self_attn.g_proj",
"model.layers.30.self_attn.qkv_proj",
"model.layers.30.self_attn.o_proj",
"model.layers.30.moe.gate",
"model.layers.30.share_expert.gate_up_proj",
"model.layers.30.share_expert.down_proj",
"model.layers.31.self_attn.g_proj",
"model.layers.31.self_attn.qkv_proj",
"model.layers.31.self_attn.o_proj",
"model.layers.31.moe.gate",
"model.layers.31.share_expert.gate_up_proj",
"model.layers.31.share_expert.down_proj",
"model.layers.32.self_attn.g_proj",
"model.layers.32.self_attn.qkv_proj",
"model.layers.32.self_attn.o_proj",
"model.layers.32.moe.gate",
"model.layers.32.share_expert.gate_up_proj",
"model.layers.32.share_expert.down_proj",
"model.layers.33.self_attn.g_proj",
"model.layers.33.self_attn.qkv_proj",
"model.layers.33.self_attn.o_proj",
"model.layers.33.moe.gate",
"model.layers.33.share_expert.gate_up_proj",
"model.layers.33.share_expert.down_proj",
"model.layers.34.self_attn.g_proj",
"model.layers.34.self_attn.qkv_proj",
"model.layers.34.self_attn.o_proj",
"model.layers.34.moe.gate",
"model.layers.34.share_expert.gate_up_proj",
"model.layers.34.share_expert.down_proj",
"model.layers.35.self_attn.g_proj",
"model.layers.35.self_attn.qkv_proj",
"model.layers.35.self_attn.o_proj",
"model.layers.35.moe.gate",
"model.layers.35.share_expert.gate_up_proj",
"model.layers.35.share_expert.down_proj",
"model.layers.36.self_attn.g_proj",
"model.layers.36.self_attn.qkv_proj",
"model.layers.36.self_attn.o_proj",
"model.layers.36.moe.gate",
"model.layers.36.share_expert.gate_up_proj",
"model.layers.36.share_expert.down_proj",
"model.layers.37.self_attn.g_proj",
"model.layers.37.self_attn.qkv_proj",
"model.layers.37.self_attn.o_proj",
"model.layers.37.moe.gate",
"model.layers.37.share_expert.gate_up_proj",
"model.layers.37.share_expert.down_proj",
"model.layers.38.self_attn.g_proj",
"model.layers.38.self_attn.qkv_proj",
"model.layers.38.self_attn.o_proj",
"model.layers.38.moe.gate",
"model.layers.38.share_expert.gate_up_proj",
"model.layers.38.share_expert.down_proj",
"model.layers.39.self_attn.g_proj",
"model.layers.39.self_attn.qkv_proj",
"model.layers.39.self_attn.o_proj",
"model.layers.39.moe.gate",
"model.layers.39.share_expert.gate_up_proj",
"model.layers.39.share_expert.down_proj",
"model.layers.40.self_attn.g_proj",
"model.layers.40.self_attn.qkv_proj",
"model.layers.40.self_attn.o_proj",
"model.layers.40.moe.gate",
"model.layers.40.share_expert.gate_up_proj",
"model.layers.40.share_expert.down_proj",
"model.layers.41.self_attn.g_proj",
"model.layers.41.self_attn.qkv_proj",
"model.layers.41.self_attn.o_proj",
"model.layers.41.moe.gate",
"model.layers.41.share_expert.gate_up_proj",
"model.layers.41.share_expert.down_proj",
"model.layers.42.self_attn.g_proj",
"model.layers.42.self_attn.qkv_proj",
"model.layers.42.self_attn.o_proj",
"model.layers.42.moe.gate",
"model.layers.42.share_expert.gate_up_proj",
"model.layers.42.share_expert.down_proj",
"model.layers.43.self_attn.g_proj",
"model.layers.43.self_attn.qkv_proj",
"model.layers.43.self_attn.o_proj",
"model.layers.43.moe.gate",
"model.layers.43.share_expert.gate_up_proj",
"model.layers.43.share_expert.down_proj",
"model.layers.44.self_attn.g_proj",
"model.layers.44.self_attn.qkv_proj",
"model.layers.44.self_attn.o_proj",
"model.layers.44.moe.gate",
"model.layers.44.share_expert.gate_up_proj",
"model.layers.44.share_expert.down_proj",
"model.layers.45.mtp_block.self_attn.g_proj",
"model.layers.45.mtp_block.self_attn.qkv_proj",
"model.layers.45.mtp_block.self_attn.o_proj",
"model.layers.45.mtp_block.mlp.gate_up_proj",
"model.layers.45.mtp_block.mlp.down_proj",
"model.layers.46.mtp_block.self_attn.g_proj",
"model.layers.46.mtp_block.self_attn.qkv_proj",
"model.layers.46.mtp_block.self_attn.o_proj",
"model.layers.46.mtp_block.mlp.gate_up_proj",
"model.layers.46.mtp_block.mlp.down_proj",
"model.layers.47.mtp_block.self_attn.g_proj",
"model.layers.47.mtp_block.self_attn.qkv_proj",
"model.layers.47.mtp_block.self_attn.o_proj",
"model.layers.47.mtp_block.mlp.gate_up_proj",
"model.layers.47.mtp_block.mlp.down_proj"
]
}
}