moe-v9-86k / final_model /config.json
arnomatic's picture
Upload 9 files
a01a334 verified
raw
history blame contribute delete
923 Bytes
{
"activation_function": "gelu",
"architectures": [
"MoEGPTForCausalLM"
],
"aux_loss_alpha": 0.01,
"bias": false,
"block_size": 2048,
"bos_token_id": 128000,
"capacity_factor": 1.25,
"dropout": 0.0,
"dtype": "float32",
"eos_token_id": 128001,
"eval_capacity_factor": 2.0,
"hidden_size": 768,
"initializer_range": 0.1,
"intermediate_size": 2048,
"layer_norm_epsilon": 1e-05,
"max_position_embeddings": 2048,
"model_type": "moe_gpt",
"moe_layer_frequency": 2,
"n_embd": 768,
"n_experts": 8,
"n_experts_active": 2,
"n_head": 12,
"n_layer": 12,
"n_positions": 2048,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 128001,
"rope_theta": 10000.0,
"router_z_loss_alpha": 0.001,
"transformers_version": "4.57.1",
"use_cache": true,
"use_noisy_gating": true,
"use_rope": true,
"vocab_size": 128256
}