File size: 457 Bytes
4357b7d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
{
"model_name": "codellama/CodeLlama-7b-hf",
"sequence_length": 4096,
"lora_config": {
"rank": 64,
"alpha": 32,
"dropout": 0.1
},
"training_config": {
"batch_size": 1,
"accumulation_steps": 4,
"epochs": 4,
"learning_rate": 0.0002
},
"dataset_statistics": {
"total_processed": 2653,
"training_examples": 778,
"evaluation_examples": 195,
"discarded_examples": 1680,
"retention_rate": "36.7%"
}
} |