File size: 276 Bytes
a96ae94 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 | {
"architectures": [
"VSBForCausalLM"
],
"d_e": 768,
"hidden_size": 1536,
"max_position_embeddings": 131072,
"model_type": "vsbssm",
"num_hidden_layers": 16,
"r": 384,
"torch_dtype": "float32",
"transformers_version": "4.55.4",
"vocab_size": 50257
}
|