| { | |
| "model_type": "eve-moe", | |
| "architectures": ["EveMoEForCausalLM"], | |
| "auto_map": { | |
| "AutoConfig": "configuration_eve.EveConfig", | |
| "AutoModelForCausalLM": "modeling_eve.EveMoEForCausalLM" | |
| }, | |
| "vocab_size": 50304, | |
| "n_layer": 12, | |
| "n_embd": 512, | |
| "n_head": 8, | |
| "head_dim": 64, | |
| "block_size": 2048, | |
| "num_experts": 8, | |
| "top_k": 2, | |
| "expert_intermediate_size": 1408, | |
| "shared_expert_intermediate_size": 1408, | |
| "router_aux_loss_coef": 0.01, | |
| "rope_theta": 10000.0, | |
| "use_checkpointing": false, | |
| "tie_word_embeddings": true, | |
| "eos_token_id": 50256, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.46.0" | |
| } | |