File size: 237 Bytes
26ba3ce | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"base_model": "Qwen/Qwen3-8B",
"dtype": "bfloat16",
"lora": {
"r": 16,
"alpha": 32,
"dropout": 0.05,
"targets": [
"q_proj",
"k_proj",
"v_proj",
"o_proj"
]
},
"max_seq_length": 384
} |