File size: 1,107 Bytes
0f78e46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
{
    "architectures": [
        "LongcatFlashForCausalLM"
    ],
    "attention_bias": false,
    "attention_dropout": 0.0,
    "attention_method": "MLA",
    "auto_map": {
        "AutoConfig": "configuration_longcat_flash.LongcatFlashConfig",
        "AutoModel": "modeling_longcat_flash.LongcatFlashModel",
        "AutoModelForCausalLM": "modeling_longcat_flash.LongcatFlashForCausalLM"
    },
    "bos_token_id": 1,
    "eos_token_id": 2,
    "expert_ffn_hidden_size": 2048,
    "ffn_hidden_size": 12288,
    "hidden_size": 6144,
    "kv_lora_rank": 512,
    "max_position_embeddings": 131072,
    "mla_scale_kv_lora": true,
    "mla_scale_q_lora": true,
    "model_type": "longcat_flash",
    "moe_topk": 12,
    "n_routed_experts": 512,
    "num_attention_heads": 64,
    "num_layers": 28,
    "q_lora_rank": 1536,
    "qk_nope_head_dim": 128,
    "qk_rope_head_dim": 64,
    "rms_norm_eps": 1e-05,
    "rope_theta": 10000000.0,
    "routed_scaling_factor": 6.0,
    "use_cache": true,
    "v_head_dim": 128,
    "vocab_size": 131072,
    "zero_expert_num": 256,
    "zero_expert_type": "identity"
}