{ "architectures": [ "MiniLLM" ], "auto_map": { "AutoConfig": "mini_llm.MiniLLMConfig", "AutoModelForCausalLM": "mini_llm.MiniLLM" }, "dim": 1024, "drop_rate": 0.0, "dtype": "float32", "model_type": "mini-llm", "num_attention_kv_heads": 32, "num_attention_q_heads": 64, "num_layers": 12, "qkv_bias": true, "rope_base": 10000, "transformers_version": "4.56.2", "vocab_size": 32000 }