encodon-80M-euk / config.json
mohsennp's picture
Upload EnCodon
2238d19 verified
{
"_name_or_path": "/large_storage/goodarzilab/userspace/mohsen/biofm/saved_models/EnCodon/CodonBERT_L2048_l6_a8_b32_r0.0001_mlm0.2_wd0.01_g2_euk_adapt-4pxzs58g/checkpoint-90000",
"architectures": [
"EnCodon"
],
"attention_probs_dropout_prob": 0.1,
"attention_type": "self",
"auto_map": {
"AutoConfig": "configuration_encodon.EnCodonConfig",
"AutoModelForMaskedLM": "modeling_encodon.EnCodon",
"AutoModelForSequenceClassification": "cdsfm.encodon.modeling_encodon.EnCodonForSequenceTask",
"AutoTokenizer": "cdsfm.encodon.tokenization_encodon.EnCodonTokenizer"
},
"classifier_dropout": 0.1,
"dilation_rates": null,
"gamma_init": 1.5763586678760644,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"initializer_range": 0.02,
"intermediate_size": 4096,
"layer_norm_eps": 1e-12,
"lm_type": "distilled",
"max_position_embeddings": 2048,
"model_type": "encodon",
"num_attention_heads": 8,
"num_divs": 0,
"num_hidden_layers": 6,
"pad_token_id": 3,
"pooler_activation": "tanh",
"position_embedding_type": "rotary",
"rotary_theta": 10000.0,
"segment_lengths": null,
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"type_vocab_size": 2,
"use_cache": true,
"use_flash_attn": true,
"use_nsp": false,
"use_rotary_emb": true,
"vocab_size": 69
}