File size: 589 Bytes
190eb1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
    "default": {
        "peft_type": "LORA",
        "auto_mapping": null,
        "base_model_name_or_path": "meta-llama/Llama-2-70b-hf",
        "revision": null,
        "task_type": "CAUSAL_LM",
        "inference_mode": false,
        "r": 8,
        "target_modules": [
            "q_proj",
            "v_proj"
        ],
        "lora_alpha": 32,
        "lora_dropout": 0.05,
        "fan_in_fan_out": false,
        "bias": "none",
        "modules_to_save": null,
        "init_lora_weights": true,
        "layers_to_transform": null,
        "layers_pattern": null
    }
}