File size: 290 Bytes
6bcdc66 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 | {
"model_type": "tiny-llama",
"hidden_dim": 768,
"n_heads": 12,
"n_layer": 12,
"intermediate_dim": 1024,
"max_position_embedding": 256,
"n_kv_heads": 4,
"vocab_size": 32000,
"padding_idx": 2,
"rms_norm_eps": 1e-5,
"attention_dropout": 0.1,
"rope_theta": 10000
}
|