File size: 542 Bytes
e6db836 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"achieved_tflops_per_gpu": 4.074660110932522,
"achieved_tflops_per_gpu_theoretical": 159.2315144526889,
"epoch": 5.0,
"loss_nan_ranks": 0,
"loss_rank_avg": 0.40154755115509033,
"mfu_percent": 0.28796184529558455,
"mfu_percent_theoretical": 11.253110562027485,
"total_flos": 1.0927339449708708e+18,
"train_loss": 0.387720340224588,
"train_runtime": 16761.1211,
"train_samples_per_second": 2.983,
"train_steps_per_second": 0.047,
"valid_targets_mean": 4463.1,
"valid_targets_min": 880
} |