moebertyc4stacked / config.json
mjerome89's picture
Upload CustomTransformerForMaskedLM
b0a7bb6 verified
{
"_name_or_path": "./12l-10_stacked",
"architectures": [
"CustomTransformerForMaskedLM"
],
"attention_probs_dropout_prob": 0.1,
"attn_out_bias": false,
"attn_out_dropout_prob": 0.0,
"attn_qkv_bias": false,
"auto_map": {
"AutoConfig": "model.CustomTransformerConfig",
"AutoModel": "model.CustomTransformerModel",
"AutoModelForMaskedLM": "model.CustomTransformerForMaskedLM",
"AutoModelForSequenceClassification": "model.CustomTransformerForSequenceClassification"
},
"bos_token_id": 1,
"classifier_dropout": null,
"context_len": 128,
"deterministic_fa2": false,
"embedding_size": 768,
"eos_token_id": 2,
"ffn_dim_multiplier": null,
"ffn_hidden_dims": 1536,
"global_attn_every_n_layers": 3,
"hidden_size": 768,
"layernorm_eps": 1e-06,
"local_attn_rotary_emb_base": -1,
"local_attn_rotary_emb_dim": null,
"mask_token_id": 3,
"model_type": "custom_transformer",
"moe_aux_loss_coef": 0.01,
"moe_eps": 1e-06,
"moe_num_experts": 15,
"moe_routed_experts": 1,
"moe_shared_experts": 1,
"num_attention_heads": 12,
"num_dims": 768,
"num_heads": 12,
"num_kv_heads": 12,
"num_layers": 12,
"pad_token_id": 0,
"rope_theta": 100000.0,
"rotary_emb_base": 10000,
"rotary_emb_dim": 64,
"rotary_emb_interleaved": false,
"rotary_emb_scale_base": null,
"sliding_window": 128,
"torch_dtype": "float32",
"transformers_version": "4.48.0",
"use_cache": false,
"use_fa2": true,
"use_flash": true,
"use_lossfreebalance": true,
"use_moe": true,
"use_sdpa_attn_mask": false,
"vocab_size": 50368
}