File size: 169 Bytes
d49653d |
1 2 3 4 5 6 7 8 9 10 |
{
"peft_type": "LORA",
"task_type": "CAUSAL_LM",
"r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"bias": "none",
"target_modules": ["q_proj", "v_proj"]
}
|
d49653d |
1 2 3 4 5 6 7 8 9 10 |
{
"peft_type": "LORA",
"task_type": "CAUSAL_LM",
"r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"bias": "none",
"target_modules": ["q_proj", "v_proj"]
}
|