dzur658's picture
Upload adapter and adapter config
b213c2f verified
raw
history blame contribute delete
392 Bytes
{
"model": "Qwen/Qwen3-1.7B",
"num_layers": 28,
"lora_parameters": {
"rank": 8,
"scale": 16,
"dropout": 0.0,
"keys": [
"self_attn.q_proj",
"self_attn.k_proj",
"self_attn.v_proj",
"self_attn.o_proj",
"mlp.gate_proj",
"mlp.up_proj",
"mlp.down_proj"
]
}
}