File size: 926 Bytes
c2f5196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
{
  "model_name": "Qwen3-5M-MoE-2exp-active",
  "model_type": "Qwen3MoeForCausalLM",
  "tokenizer": "gpt2",
  "dtype": "bfloat16",
  "vocab_size": 50257,
  "hidden_size": 128,
  "num_layers": 8,
  "num_attention_heads": 4,
  "num_key_value_heads": 2,
  "head_dim": 32,
  "moe_intermediate_size": 1024,
  "num_experts": 64,
  "num_experts_per_tok": 2,
  "sliding_window": 512,
  "max_position_embeddings": 8192,
  "rope_theta": 500000,
  "layer_types": [
    "sliding_attention",
    "sliding_attention",
    "full_attention",
    "sliding_attention",
    "sliding_attention",
    "full_attention",
    "sliding_attention",
    "sliding_attention"
  ],
  "max_window_layers": 6,
  "parameters_total": 208220928,
  "parameters_active": 13186816,
  "active_ratio": 0.03125,
  "positional_encoding": "rope",
  "normalization": "rmsnorm",
  "activation": "swiglu",
  "tie_word_embeddings": true
}