File size: 618 Bytes
42a2bfa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"bf16": {
"enabled": true
},
"train_micro_batch_size_per_gpu": 1,
"train_batch_size": "auto",
"gradient_accumulation_steps": 1,
"gradient_clipping": 0.05,
"zero_optimization": {
"stage": 2,
"offload_optimizer": {
"device": "none"
},
"overlap_comm": true,
"contiguous_gradients": true,
"sub_group_size": 1e9,
"reduce_bucket_size": 5e8,
"allgather_partitions": true,
"allgather_bucket_size": 2e8,
"reduce_scatter": true
},
"steps_per_print": 100,
"wall_clock_breakdown": false
} |