File size: 272 Bytes
f5d0286 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
{
"model_type": "llama",
"architectures": [
"FastLanguageModel"
],
"max_seq_length": 2048,
"hidden_size": 4096,
"num_attention_heads": 32,
"num_hidden_layers": 32,
"r": 16,
"lora_alpha": 16,
"lora_dropout": 0,
"use_gradient_checkpointing": true
} |