minimind-3v / config.json
bilzepython's picture
Duplicate from jingyaogong/minimind-3v
21b4e01
raw
history blame contribute delete
984 Bytes
{
"architectures": [
"MiniMindVLM"
],
"auto_map": {
"AutoConfig": "model_vlm.VLMConfig",
"AutoModelForCausalLM": "model_vlm.MiniMindVLM"
},
"bos_token_id": 1,
"dropout": 0.0,
"dtype": "bfloat16",
"eos_token_id": 2,
"flash_attn": true,
"head_dim": 96,
"hidden_act": "silu",
"hidden_size": 768,
"image_hidden_size": 768,
"image_ids": [
12
],
"image_special_token": "<|image_pad|>",
"image_token_len": 64,
"inference_rope_scaling": false,
"intermediate_size": 2432,
"max_position_embeddings": 32768,
"max_seq_len": 8192,
"model_type": "minimind-v",
"moe_intermediate_size": 2432,
"norm_topk_prob": true,
"num_attention_heads": 8,
"num_experts": 4,
"num_experts_per_tok": 1,
"num_hidden_layers": 8,
"num_key_value_heads": 4,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000000.0,
"router_aux_loss_coef": 0.0005,
"transformers_version": "4.57.6",
"use_moe": false,
"vocab_size": 6400
}