File size: 317 Bytes
83bcd5d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | {
"model_type": "llm-1b-lab",
"architectures": [
"LLMModel"
],
"vocab_size": 32000,
"hidden_dim": 2048,
"num_layers": 22,
"num_heads": 16,
"num_kv_heads": 4,
"intermediate_dim": 5632,
"max_seq_len": 2048,
"dropout": 0.0,
"rope_theta": 10000.0,
"norm_eps": 1e-06,
"weight_tying": true
} |