HRA / nlu /experiments /glue /config.json
nvan13's picture
Add files using upload-large-folder tool
ab0f6ec verified
raw
history blame contribute delete
421 Bytes
{
"pooling": {
"dropout": 0,
"hidden_act": "gelu"
},
"inject_adapter": "hra",
"hra": {
"r": 8,
"apply_GS": false,
"suffix": ["hra_u"]
},
"oft": {
"block_size": 16,
"is_coft": true,
"block_share": false,
"eps": 1e-5,
"suffix": ["oft_R"]
},
"lora": {
"lora_r": 8,
"lora_alpha": 32,
"merge_weights": false,
"lora_dropout": 0,
"suffix": ["lora_A", "lora_B"]
},
"vocab_size": 128100
}