File size: 407 Bytes
2e6a6e2 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 | {
"model_type": "multiscale_transformer",
"architectures": [
"MultiScaleForCausalLM"
],
"vocab_size": 258,
"d_model": 320,
"n_heads": 5,
"d_ff": 864,
"n_layers_per_scale": 4,
"n_cross_attn_layers": 1,
"max_seq_len": 512,
"dropout": 0.0,
"bias": false,
"rope_theta": 10000.0,
"downsample_factors": [
1,
2,
4
],
"num_parameters": 22506560
} |