| { |
| "alpha_pattern": {}, |
| "auto_mapping": null, |
| "base_model_name_or_path": "unsloth/qwen2.5-1.5b-instruct-bnb-4bit", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 16, |
| "lora_dropout": 0, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 16, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": [ |
| "layers.21.mlp.up_proj", |
| "layers.26.self_attn.v_proj", |
| "layers.22.mlp.gate_proj", |
| "layers.22.mlp.up_proj", |
| "layers.23.self_attn.q_proj", |
| "layers.22.self_attn.o_proj", |
| "layers.22.self_attn.k_proj", |
| "layers.22.self_attn.v_proj", |
| "layers.21.mlp.down_proj", |
| "layers.24.self_attn.o_proj", |
| "layers.24.self_attn.k_proj", |
| "layers.23.self_attn.o_proj", |
| "layers.26.self_attn.o_proj", |
| "layers.24.self_attn.q_proj", |
| "layers.27.mlp.down_proj", |
| "layers.24.mlp.down_proj", |
| "layers.27.mlp.up_proj", |
| "layers.23.mlp.down_proj", |
| "layers.25.self_attn.o_proj", |
| "layers.27.self_attn.k_proj", |
| "layers.23.self_attn.v_proj", |
| "layers.21.self_attn.k_proj", |
| "layers.24.mlp.gate_proj", |
| "layers.24.self_attn.v_proj", |
| "layers.21.mlp.gate_proj", |
| "layers.22.self_attn.q_proj", |
| "layers.24.mlp.up_proj", |
| "layers.23.mlp.gate_proj", |
| "layers.26.self_attn.q_proj", |
| "layers.26.self_attn.k_proj", |
| "layers.27.self_attn.v_proj", |
| "layers.22.mlp.down_proj", |
| "layers.27.self_attn.o_proj", |
| "layers.21.self_attn.o_proj", |
| "layers.27.mlp.gate_proj", |
| "layers.25.mlp.gate_proj", |
| "layers.21.self_attn.v_proj", |
| "layers.26.mlp.up_proj", |
| "layers.21.self_attn.q_proj", |
| "layers.25.self_attn.v_proj", |
| "layers.26.mlp.gate_proj", |
| "layers.26.mlp.down_proj", |
| "layers.23.mlp.up_proj", |
| "layers.23.self_attn.k_proj", |
| "layers.25.self_attn.q_proj", |
| "layers.25.mlp.up_proj", |
| "layers.27.self_attn.q_proj", |
| "layers.25.mlp.down_proj", |
| "layers.25.self_attn.k_proj" |
| ], |
| "task_type": "CAUSAL_LM", |
| "use_dora": false, |
| "use_rslora": false |
| } |