ChemMRL / config.json
eacortes's picture
Upload 14 files
f919ea5 verified
{
"architectures": [
"ModChemBertModel"
],
"attention_bias": false,
"attention_dropout": 0.1,
"auto_map": {
"AutoConfig": "configuration_modchembert.ModChemBertConfig",
"AutoModel": "modeling_modchembert.ModChemBertModel",
"AutoModelForMaskedLM": "modeling_modchembert.ModChemBertForMaskedLM",
"AutoModelForSequenceClassification": "modeling_modchembert.ModChemBertForSequenceClassification"
},
"bos_token_id": 0,
"classifier_activation": "gelu",
"classifier_bias": false,
"classifier_dropout": 0.0,
"classifier_pooling": "max_seq_mha",
"classifier_pooling_attention_dropout": 0.1,
"classifier_pooling_last_k": 5,
"classifier_pooling_num_attention_heads": 4,
"cls_token_id": 0,
"decoder_bias": true,
"deterministic_flash_attn": false,
"dtype": "bfloat16",
"embedding_dropout": 0.1,
"eos_token_id": 1,
"global_attn_every_n_layers": 3,
"global_rope_theta": 160000.0,
"hidden_activation": "gelu",
"hidden_size": 1024,
"initializer_cutoff_factor": 2.0,
"initializer_range": 0.02,
"intermediate_size": 1536,
"layer_norm_eps": 1e-05,
"local_attention": 8,
"local_rope_theta": 10000.0,
"max_position_embeddings": 512,
"mlp_bias": false,
"mlp_dropout": 0.1,
"model_type": "modchembert",
"norm_bias": false,
"norm_eps": 1e-05,
"num_attention_heads": 16,
"num_hidden_layers": 22,
"pad_token_id": 2,
"position_embedding_type": "absolute",
"repad_logits_with_grad": false,
"sep_token_id": 1,
"sparse_pred_ignore_index": -100,
"sparse_prediction": false,
"transformers_version": "4.57.1",
"vocab_size": 2362
}