File size: 541 Bytes
d818273 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"achieved_tflops_per_gpu": 7.528449024090184,
"achieved_tflops_per_gpu_theoretical": 555.6163483029995,
"epoch": 7.0,
"loss_nan_ranks": 0,
"loss_rank_avg": 0.0703798308968544,
"mfu_percent": 2.412964430798136,
"mfu_percent_theoretical": 178.08216291762807,
"total_flos": 3.5396290752681083e+18,
"train_loss": 0.038640074608406225,
"train_runtime": 58770.888,
"train_samples_per_second": 2.619,
"train_steps_per_second": 0.164,
"valid_targets_mean": 1910.0,
"valid_targets_min": 393
} |