| { |
| "auto_mapping": null, |
| "base_model_name_or_path": "gpt2-medium", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "lora_alpha": 32, |
| "lora_dropout": 0.1, |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 8, |
| "revision": null, |
| "target_modules": [ |
| "transformer.h.0.attn.c_attn", |
| "transformer.h.1.attn.c_attn", |
| "transformer.h.2.attn.c_attn", |
| "transformer.h.3.attn.c_attn", |
| "transformer.h.4.attn.c_attn", |
| "transformer.h.5.attn.c_attn", |
| "transformer.h.6.attn.c_attn", |
| "transformer.h.7.attn.c_attn", |
| "transformer.h.8.attn.c_attn", |
| "transformer.h.9.attn.c_attn", |
| "transformer.h.10.attn.c_attn", |
| "transformer.h.11.attn.c_attn", |
| "transformer.h.12.attn.c_attn", |
| "transformer.h.13.attn.c_attn", |
| "transformer.h.14.attn.c_attn", |
| "transformer.h.15.attn.c_attn", |
| "transformer.h.16.attn.c_attn", |
| "transformer.h.17.attn.c_attn", |
| "transformer.h.18.attn.c_attn", |
| "transformer.h.19.attn.c_attn", |
| "transformer.h.20.attn.c_attn", |
| "transformer.h.21.attn.c_attn", |
| "transformer.h.22.attn.c_attn", |
| "transformer.h.23.attn.c_attn" |
| ], |
| "task_type": "CAUSAL_LM" |
| } |