File size: 423 Bytes
142c903 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
{
"model": "unsloth/gemma-2-9b",
"training_type": "SFT and DPO",
"max_seq_length": 1024,
"dtype": "float16",
"training_args": {
"sft": {
"learning_rate": 2e-06,
"batch_size": 4,
"gradient_accumulation_steps": 4,
"num_train_epochs": 3,
"scheduler": "cosine",
"optim": "adamw_8bit",
"seed": 2802
}
}
} |