{ "lora_parameters": { "rank": 4, "alpha": 4, "dropout": 0.1, "scale": 2.0 }, "num_layers": 4 }