encodon-620M-euk / config.json
mohsennp's picture
Upload EnCodon
c4e3472 verified
{
"_name_or_path": "/large_storage/goodarzilab/userspace/mohsen/biofm/saved_models/EnCodon/CodonBERT_L2048_l12_a16_b16_r1e-05_mlm0.2_wd0.01_g2_euk_adapt-w8cn6xy0/checkpoint-150000",
"architectures": [
"EnCodon"
],
"attention_probs_dropout_prob": 0.1,
"attention_type": "self",
"auto_map": {
"AutoConfig": "configuration_encodon.EnCodonConfig",
"AutoModelForMaskedLM": "modeling_encodon.EnCodon",
"AutoModelForSequenceClassification": "cdsfm.encodon.modeling_encodon.EnCodonForSequenceTask",
"AutoTokenizer": "cdsfm.encodon.tokenization_encodon.EnCodonTokenizer"
},
"classifier_dropout": 0.1,
"dilation_rates": null,
"gamma_init": 1.782709687623856,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 2048,
"initializer_range": 0.02,
"intermediate_size": 8192,
"layer_norm_eps": 1e-12,
"lm_type": "distilled",
"max_position_embeddings": 2048,
"model_type": "encodon",
"num_attention_heads": 16,
"num_divs": 0,
"num_hidden_layers": 12,
"pad_token_id": 3,
"pooler_activation": "tanh",
"position_embedding_type": "rotary",
"rotary_theta": 10000.0,
"segment_lengths": null,
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"type_vocab_size": 2,
"use_cache": true,
"use_flash_attn": true,
"use_nsp": false,
"use_rotary_emb": true,
"vocab_size": 69
}