File size: 307 Bytes
331104e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 | {
"vocab_size": 4096,
"max_position_embeddings": 256,
"hidden_size": 256,
"num_attention_heads": 8,
"num_hidden_layers": 8,
"intermediate_size": 682,
"rms_norm_eps": 1e-05,
"model_type": "llama",
"architectures": [
"LlamaForCausalLM"
],
"device": "cuda"
} |