File size: 424 Bytes
5fa4c7e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 | {
"architectures": [
"MiniLLM"
],
"auto_map": {
"AutoConfig": "mini_llm.MiniLLMConfig",
"AutoModelForCausalLM": "mini_llm.MiniLLM"
},
"dim": 1024,
"drop_rate": 0.0,
"dtype": "float32",
"model_type": "mini-llm",
"num_attention_kv_heads": 32,
"num_attention_q_heads": 64,
"num_layers": 12,
"qkv_bias": true,
"rope_base": 10000,
"transformers_version": "4.56.2",
"vocab_size": 32000
}
|