File size: 366 Bytes
be5fb7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
    "dim": 3072,
    "n_layers": 28,
    "n_heads": 24,
    "n_kv_heads": 8,
    "vocab_size": 128256,
    "ffn_dim_multiplier": 1.0,
    "multiple_of": 256,
    "norm_eps": 1e-05,
    "rope_theta": 500000.0,
    "use_scaled_rope": true,
    "quantization_args": {
        "group_size": 32
    },
    "lora_args": {
        "rank": 16,
        "scale": 2.0
    }
}