File size: 141 Bytes
a8b81f1
72b3627
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
  "architectures": [
    "AfmoeForCausalLM"
  ],
  "n_layers": 1,
  "vocab_size": 200192,
  "hidden_size": 3,
  "num_attention_heads": 4
}