{ "model_type": "llm-1b-lab", "architectures": [ "LLMModel" ], "vocab_size": 32000, "hidden_dim": 2048, "num_layers": 22, "num_heads": 16, "num_kv_heads": 4, "intermediate_dim": 5632, "max_seq_len": 2048, "dropout": 0.0, "rope_theta": 10000.0, "norm_eps": 1e-06, "weight_tying": true }