File size: 636 Bytes
dd9d5c4 0acebcd dd9d5c4 0acebcd 939099f 0acebcd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
{
"model_type": "eve-moe",
"architectures": ["EveMoEForCausalLM"],
"auto_map": {
"AutoConfig": "configuration_eve.EveConfig",
"AutoModelForCausalLM": "modeling_eve.EveMoEForCausalLM"
},
"vocab_size": 50304,
"n_layer": 12,
"n_embd": 512,
"n_head": 8,
"head_dim": 64,
"block_size": 2048,
"num_experts": 8,
"top_k": 2,
"expert_intermediate_size": 1408,
"shared_expert_intermediate_size": 1408,
"router_aux_loss_coef": 0.01,
"rope_theta": 10000.0,
"use_checkpointing": false,
"tie_word_embeddings": true,
"eos_token_id": 50256,
"torch_dtype": "bfloat16",
"transformers_version": "4.46.0"
}
|