ErenAta00's picture
Upload config.json from Google Drive
05b14d9 verified
raw
history blame contribute delete
302 Bytes
{
"architectures": [
"LlamaForCausalLM"
],
"model_type": "llama",
"torch_dtype": "bfloat16",
"transformers_version": "4.40.1",
"hidden_size": 5120,
"intermediate_size": 13824,
"num_attention_heads": 40,
"num_hidden_layers": 40,
"num_key_value_heads": 8,
"vocab_size": 151665
}