| { |
| "config": { |
| "alpha": 512, |
| "architecture": "lora", |
| "attn_matrices": [ |
| "q", |
| "v" |
| ], |
| "composition_mode": "add", |
| "dropout": 0.1, |
| "init_weights": "lora", |
| "intermediate_lora": false, |
| "leave_out": [], |
| "output_lora": false, |
| "r": 256, |
| "selfattn_lora": true, |
| "use_gating": false |
| }, |
| "config_id": "5545afacf2f279e5", |
| "hidden_size": 2048, |
| "model_class": "LlamaForCausalLM", |
| "model_name": "meta-llama/Llama-3.2-1B-Instruct", |
| "model_type": "llama", |
| "name": "llama23-1b-instruct-lora256", |
| "version": "adapters.1.0.0" |
| } |