{ "vocab_size": 8192, "n_layer": 8, "n_head": 8, "n_embd": 512, "block_size": 1024, "rope_base": 10000.0, "mlp_mult": 4, "dropout": 0.0, "tie_embeddings": true, "arch": "from_scratch_gpt" }