{ "vocab_size": 32000, "hidden_size": 4096, "num_hidden_layers": 32, "latent_state_dim": 256, "n_experts": 2, "n_coherence_heads": 4, "expert_intermediate": 1024, "state_injection_layers": [ 8, 16 ], "alpha_inject": 0.02 }