File size: 320 Bytes
82739ee | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | {
"architectures": ["LlamaForCausalLM"],
"model_type": "llama",
"hidden_size": 4096,
"intermediate_size": 14336,
"num_attention_heads": 32,
"num_hidden_layers": 32,
"tie_word_embeddings": false,
"rms_norm_eps": 1e-6,
"max_position_embeddings": 2048,
"vocab_size": 32000,
"torch_dtype": "float16"
}
|