{ "lora_layers": 8, "num_layers": 26, "lora_parameters": { "rank": 16, "scale": 2.0, "dropout": 0.05, "keys": ["self_attn.q_proj", "self_attn.v_proj"] } }