Safetensors
step3p5
custom_code
Step-3.5-Flash / config.json
csy0225's picture
Update config.json (#9)
7eb38d5
{
"architectures": [
"Step3p5ForCausalLM"
],
"model_type": "step3p5",
"auto_map": {
"AutoConfig": "configuration_step3p5.Step3p5Config",
"AutoModelForCausalLM": "modeling_step3p5.Step3p5ForCausalLM"
},
"rope_scaling": {
"rope_type": "llama3",
"factor": 2.0,
"original_max_position_embeddings": 131072,
"low_freq_factor": 1.0,
"high_freq_factor": 32.0
},
"yarn_only_types": ["full_attention"],
"hidden_size": 4096,
"intermediate_size": 11264,
"num_hidden_layers": 45,
"max_seq_len": 262144,
"vocab_size": 128896,
"torch_dtype": "bfloat16",
"use_qk_norm": true,
"moe_layers_enum": "3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44",
"num_attention_heads": 64,
"num_attention_groups": 8,
"head_dim": 128,
"use_moe": true,
"moe_num_experts": 288,
"moe_top_k": 8,
"moe_intermediate_size": 1280,
"share_expert_dim": 1280,
"moe_layer_offset": 0,
"moe_every_n_layer": 1,
"norm_expert_weight": true,
"moe_router_activation": "sigmoid",
"moe_router_scaling_factor": 3.0,
"att_impl_type": "GQA",
"rope_theta": [
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0,
5000000.0,
10000.0,
10000.0,
10000.0
],
"use_head_wise_attn_gate": true,
"sliding_window": 512,
"use_moe_router_bias": true,
"need_fp32_gate": true,
"sink": false,
"layer_types": [
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention"
],
"use_rope_layers": [],
"num_nextn_predict_layers": 3,
"partial_rotary_factors": [
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0,
0.5,
1.0,
1.0,
1.0
],
"eos_token_id": [
1,
2,
128007
],
"bos_token_id": 0,
"attention_other_setting": {
"attention_type": "sliding_attention",
"num_attention_heads": 96,
"num_attention_groups": 8,
"head_dim": 128,
"true_head_dim": 128
},
"swiglu_limits": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
7,
7,
0.0,
0.0,
0.0
],
"swiglu_limits_shared": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
16,
0.0,
0.0,
0.0
],
"zero_centered": true,
"max_position_embeddings": 262144
}