File size: 265 Bytes
7d75312
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "vocab_size": 10000,
  "dim": 1536,
  "num_layers": 3,
  "num_heads": 12,
  "max_recursion": 5,
  "num_experts": 5,
  "ffn_expansion": 4,
  "max_position_embeddings": 2048,
  "model_type": "MoR",
  "architecture": "MixtureOfRecursions",
  "hidden_act": "gelu"
}