{ "pooling": { "dropout": 0, "hidden_act": "gelu" }, "inject_adapter": "hra", "hra": { "r": 8, "apply_GS": false, "suffix": ["hra_u"] }, "oft": { "block_size": 16, "is_coft": true, "block_share": false, "eps": 1e-5, "suffix": ["oft_R"] }, "lora": { "lora_r": 8, "lora_alpha": 32, "merge_weights": false, "lora_dropout": 0, "suffix": ["lora_A", "lora_B"] }, "vocab_size": 128100 }