File size: 203 Bytes
49e0db2
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
    "lora_layers": 8,
    "num_layers": 26,
    "lora_parameters": {
        "rank": 16,
        "scale": 2.0,
        "dropout": 0.05,
        "keys": ["self_attn.q_proj", "self_attn.v_proj"]
    }
}