File size: 373 Bytes
be3a39d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | {
"architectures": ["SPTForCausalLM"],
"model_type": "spt",
"n_layers": 12,
"vocab_size": 97,
"hidden_size": 512,
"n_attn_heads": 16,
"n_kv_heads": 16,
"intermediate_size": 2048,
"max_len": 2048,
"residual": true,
"normalise": true,
"bos_token_id": 95,
"eos_token_id": 95,
"pad_token_id": 95,
"unk_token_id": 96
} |