F_adapter_ia3_classification_C_30 / adapter_config.json
BigTMiami's picture
Upload model
4f3a03e verified
raw
history blame contribute delete
535 Bytes
{
"config": {
"alpha": 1,
"architecture": "lora",
"attn_matrices": [
"k",
"v"
],
"composition_mode": "scale",
"dropout": 0.0,
"init_weights": "ia3",
"intermediate_lora": true,
"leave_out": [],
"output_lora": false,
"r": 1,
"selfattn_lora": true,
"use_gating": false
},
"hidden_size": 768,
"model_class": "RobertaAdapterModel",
"model_name": "roberta-base",
"model_type": "roberta",
"name": "F_adapter_ia3_classification_C_30_adapter",
"version": "0.1.2"
}