File size: 275 Bytes
55863b2
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
    "architectures": ["SPECLMHeadModel"],
    "model_type": "spec-1-mini",
    "n_ctx": 1024,
    "n_embd": 768,
    "n_head": 12,
    "n_layer": 12,
    "vocab_size": 50257,
    "activation_function": "gelu",
    "initializer_range": 0.02,
    "layer_norm_epsilon": 1e-5
}