File size: 302 Bytes
2f0097e 05b14d9 2f0097e 05b14d9 2f0097e 05b14d9 2f0097e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | {
"architectures": [
"LlamaForCausalLM"
],
"model_type": "llama",
"torch_dtype": "bfloat16",
"transformers_version": "4.40.1",
"hidden_size": 5120,
"intermediate_size": 13824,
"num_attention_heads": 40,
"num_hidden_layers": 40,
"num_key_value_heads": 8,
"vocab_size": 151665
} |