File size: 544 Bytes
d0652ee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
    "achieved_tflops_per_gpu": 0.0036721295667827185,
    "achieved_tflops_per_gpu_theoretical": 928.2858502135481,
    "epoch": 7.0,
    "loss_nan_ranks": 0,
    "loss_rank_avg": 0.08077868074178696,
    "mfu_percent": 0.0002595144570164465,
    "mfu_percent_theoretical": 65.60324029777725,
    "total_flos": 1429777853448192.0,
    "train_loss": 0.1377790669734474,
    "train_runtime": 24334.9572,
    "train_samples_per_second": 4.375,
    "train_steps_per_second": 0.274,
    "valid_targets_mean": 1517.8,
    "valid_targets_min": 393
}