File size: 530 Bytes
4b10970 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 | {
"step": 160,
"lora_params": {
"lora_rank": 64,
"lora_alpha": 128,
"target_modules": [
"attn.to_q",
"attn.to_k",
"attn.to_v",
"attn.to_out.0",
"attn.add_q_proj",
"attn.add_k_proj",
"attn.add_v_proj",
"attn.to_add_out",
"ff.linear_in",
"ff.linear_out",
"ff_context.linear_in",
"ff_context.linear_out",
"attn.to_qkv_mlp_proj"
]
}
} |