decodon-200M-euk / config.json
mohsennp's picture
Upload DeCodon
59a04c6 verified
{
"_name_or_path": "/large_storage/goodarzilab/userspace/mohsen/biofm/saved_models/DeCodon/CodonGPT_L2048_l12_a16_b32_r5e-05_wd0.01_g2_euk_adapt_taxid-yzhnq4du/checkpoint-60000",
"architectures": [
"DeCodon"
],
"attention_probs_dropout_prob": 0.1,
"attention_type": "self",
"auto_map": {
"AutoConfig": "configuration_decodon.DeCodonConfig",
"AutoModelForCausalLM": "modeling_decodon.DeCodon",
"AutoTokenizer": "cdsfm.decodon.tokenization_decodon.DeCodonTokenizer"
},
"classifier_dropout": 0.1,
"dilation_rates": null,
"gamma_init": 1.782709687623856,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"initializer_range": 0.02,
"intermediate_size": 4096,
"is_decoder": true,
"layer_norm_eps": 1e-12,
"lm_type": "distilled",
"max_position_embeddings": 2048,
"model_type": "decodon",
"num_attention_heads": 16,
"num_hidden_layers": 12,
"pad_token_id": 3,
"position_embedding_type": "rotary",
"rotary_theta": 10000.0,
"segment_lengths": null,
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"type_vocab_size": 2,
"use_cache": true,
"use_flash_attn": true,
"use_rotary_emb": true,
"vocab_size": 1736
}