{ "n_layer": 28, "n_head": 24, "n_kv_head": 8, "n_embd": 3072, "n_intermediate": 8192, "vocab_size": 128256, "max_seq_len": 2048, "rope_theta": 500000.0 }