File size: 482 Bytes
22fe2d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
{
  "architectures": [
    "MiniMoE"
  ],
  "auto_map": {
    "AutoConfig": "mini_moe.MiniMoEConfig",
    "AutoModelForCausalLM": "mini_moe.MiniMoE"
  },
  "dim": 1024,
  "drop_rate": 0.0,
  "dtype": "float32",
  "model_type": "mini-moe",
  "num_attention_kv_heads": 8,
  "num_attention_q_heads": 16,
  "num_expert": 8,
  "num_layers": 12,
  "qkv_bias": false,
  "rope_base": 10000,
  "top_k": 4,
  "transformers_version": "4.56.2",
  "use_aux_loss": false,
  "vocab_size": 32000
}