Eve-2-MoE-272M / config.json
anthonym21's picture
Upload config.json with huggingface_hub
939099f verified
{
"architecture": "Eve-2-MoE",
"vocab_size": 50304,
"n_layer": 12,
"n_embd": 512,
"n_head": 8,
"head_dim": 64,
"block_size": 2048,
"num_experts": 8,
"top_k": 2,
"expert_intermediate_size": 1408,
"shared_expert_intermediate_size": 1408,
"rope_theta": 10000.0,
"auto_map": {
"AutoConfig": "configuration_eve.EveConfig",
"AutoModelForCausalLM": "modeling_eve.DeepSeekMoE"
},
"architectures": [
"DeepSeekMoE"
]
}