{ "vocab_size": 50096, "n_layer": 18, "hidden_size": 768, "intermediate_size": 3072, "n_head": 12, "context_size": 512, "rms_norm_eps": 1e-6, "dropout": 0.1, "bos_token_id": 1, "eos_token_id": 2 }