File size: 327 Bytes
1a7843f
 
660c4b3
8403946
1a7843f
8403946
 
 
 
1a7843f
bc77551
8403946
 
bc77551
 
1a7843f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "auto_mapping": null,
  "base_model_name_or_path": "meta-llama/Llama-3.2-1B-Instruct",
  "fill_orthonormal": false,
  "inference_mode": true,
  "off_diag": 1,
  "pattern": "banded",
  "peft_type": "SVFT",
  "rank": null,
  "revision": null,
  "target_modules": [
    "q_proj",
    "k_proj"
  ],
  "task_type": "CAUSAL_LM"
}