File size: 543 Bytes
bee396b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
    "achieved_tflops_per_gpu": 5.760825905539882,
    "achieved_tflops_per_gpu_theoretical": 460.7550205439509,
    "epoch": 7.0,
    "loss_nan_ranks": 0,
    "loss_rank_avg": 0.05363672971725464,
    "mfu_percent": 0.5824899803377029,
    "mfu_percent_theoretical": 46.58796972132971,
    "total_flos": 4.5166057635687956e+18,
    "train_loss": 0.18181621480833796,
    "train_runtime": 98002.5659,
    "train_samples_per_second": 2.181,
    "train_steps_per_second": 0.136,
    "valid_targets_mean": 5394.5,
    "valid_targets_min": 3513
}