File size: 230 Bytes
e63dd1f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
{
"architectures": [
"NovaForCausalLM"
],
"block_size": 256,
"model_type": "nova",
"n_embd": 640,
"n_head": 8,
"n_layer": 4,
"torch_dtype": "float32",
"transformers_version": "4.55.4",
"vocab_size": 6000
}
|