{ "vocab_size": 128256, "hidden_size": 3072, "num_hidden_layers": 28, "num_attention_heads": 24, "latent_state_dim": 384, "n_experts": 3, "n_coherence_heads": 8, "expert_intermediate": 1536, "state_injection_layers": [ 4, 8, 12, 16, 20, 24 ], "alpha_inject": 0.03, "use_state_in_attention": true, "use_state_in_ffn": true, "state_fusion_method": "concat" }