File size: 213 Bytes
222a46a |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"architectures": [
"LLaMAModel"
],
"d_model": 128,
"model_type": "llama",
"num_heads": 4,
"num_layers": 2,
"torch_dtype": "float32",
"transformers_version": "4.41.2",
"vocab_size": 50257
}
|