{ "vocab_size": 32000, "block_size": 1024, "n_layer": 16, "n_head": 16, "n_embd": 1024, "dropout": 0.1, "bias": true, "use_rope": true, "rope_base": 10000.0, "use_rmsnorm": true, "rms_norm_eps": 1e-05, "mlp_type": "swiglu", "ffn_hidden_mult": 2.6666666667 }