spotrna / config.json
ZhiyuanChen's picture
Upload folder using huggingface_hub
31993ab verified
{
"architectures": [
"SpotRnaModel"
],
"bos_token_id": 1,
"conv_dropout": 0.25,
"dtype": "float32",
"eos_token_id": 2,
"fc_dropout": 0.5,
"hidden_act": "relu",
"id2label": null,
"input_channels": 8,
"label2id": null,
"mask_token_id": 4,
"model_type": "spotrna",
"module_configs": [
{
"blstm_hidden_size": 200,
"conv_channels": 48,
"dilation_cycle": 5,
"fc_act": null,
"fc_hidden_size": 512,
"hidden_act": "relu",
"num_blstm_blocks": 0,
"num_conv_blocks": 16,
"num_fc_blocks": 2,
"output_act": "relu",
"use_dilation": false
},
{
"blstm_hidden_size": 200,
"conv_channels": 64,
"dilation_cycle": 5,
"fc_act": null,
"fc_hidden_size": 512,
"hidden_act": "relu",
"num_blstm_blocks": 0,
"num_conv_blocks": 20,
"num_fc_blocks": 1,
"output_act": "relu",
"use_dilation": false
},
{
"blstm_hidden_size": 200,
"conv_channels": 64,
"dilation_cycle": 5,
"fc_act": null,
"fc_hidden_size": 512,
"hidden_act": "relu",
"num_blstm_blocks": 0,
"num_conv_blocks": 30,
"num_fc_blocks": 1,
"output_act": "relu",
"use_dilation": false
},
{
"blstm_hidden_size": 200,
"conv_channels": 64,
"dilation_cycle": 5,
"fc_act": null,
"fc_hidden_size": 512,
"hidden_act": "elu",
"num_blstm_blocks": 1,
"num_conv_blocks": 30,
"num_fc_blocks": 0,
"output_act": "relu",
"use_dilation": false
},
{
"blstm_hidden_size": 200,
"conv_channels": 64,
"dilation_cycle": 5,
"fc_act": "elu",
"fc_hidden_size": 512,
"hidden_act": "relu",
"num_blstm_blocks": 0,
"num_conv_blocks": 30,
"num_fc_blocks": 1,
"output_act": "elu",
"use_dilation": true
}
],
"null_token_id": 5,
"num_labels": 1,
"pad_token_id": 0,
"threshold": 0.335,
"tie_word_embeddings": true,
"transformers_version": "5.7.0",
"unk_token_id": 3,
"vocab_size": 5
}