LeanMixtral-8x7B / config.json
miike-ai's picture
Upload folder using huggingface_hub
d1cb61d verified
{
"architectures": [
"LeanMixtralForCausalLM"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoModelForCausalLM": "modeling_lean_mixtral.LeanMixtralForCausalLM"
},
"bos_token_id": 1,
"dtype": "bfloat16",
"eos_token_id": 2,
"head_dim": null,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 14336,
"leanmix_compressed_kv_layers": [
3,
8,
13,
14,
16,
22,
23,
24,
25,
26,
27,
28
],
"leanmix_kv_decoder_depth": {
"13": 2,
"14": 2,
"16": 2,
"22": 2,
"23": 2,
"24": 2,
"25": 2,
"26": 2,
"27": 2,
"28": 2,
"3": 2,
"8": 2
},
"leanmix_kv_decoder_hidden_dim": {
"13": 128,
"14": 128,
"16": 128,
"22": 128,
"23": 128,
"24": 128,
"25": 128,
"26": 128,
"27": 128,
"28": 128,
"3": 128,
"8": 128
},
"leanmix_kv_fixed_alpha": {
"13": 1.0,
"14": 1.0,
"16": 1.0,
"22": 1.0,
"23": 1.0,
"24": 1.0,
"25": 1.0,
"26": 1.0,
"27": 1.0,
"28": 1.0,
"3": 1.0,
"8": 1.0
},
"leanmix_kv_input_dims": {
"13": 1024,
"14": 1024,
"16": 1024,
"22": 1024,
"23": 1024,
"24": 1024,
"25": 1024,
"26": 1024,
"27": 1024,
"28": 1024,
"3": 1024,
"8": 1024
},
"leanmix_kv_key_dims": {
"13": 4,
"14": 4,
"16": 4,
"22": 4,
"23": 4,
"24": 4,
"25": 4,
"26": 4,
"27": 4,
"28": 4,
"3": 4,
"8": 4
},
"leanmix_kv_residual_decoder": {
"13": false,
"14": false,
"16": false,
"22": false,
"23": false,
"24": false,
"25": false,
"26": false,
"27": false,
"28": false,
"3": false,
"8": false
},
"leanmix_kv_value_dims": {
"13": 4,
"14": 4,
"16": 4,
"22": 4,
"23": 4,
"24": 4,
"25": 4,
"26": 4,
"27": 4,
"28": 4,
"3": 4,
"8": 4
},
"max_position_embeddings": 32768,
"model_type": "mixtral",
"num_attention_heads": 32,
"num_experts_per_tok": 2,
"num_hidden_layers": 32,
"num_key_value_heads": 8,
"num_local_experts": 8,
"output_router_logits": false,
"rms_norm_eps": 1e-05,
"rope_theta": 1000000.0,
"router_aux_loss_coef": 0.02,
"router_jitter_noise": 0.0,
"sliding_window": null,
"tie_word_embeddings": false,
"transformers_version": "4.57.6",
"use_cache": true,
"vocab_size": 32000
}