| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 3093, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009699321047526674, | |
| "grad_norm": 1.020737016686291, | |
| "learning_rate": 1.0752688172043011e-06, | |
| "loss": 0.3895, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.019398642095053348, | |
| "grad_norm": 0.7534117886463607, | |
| "learning_rate": 2.1505376344086023e-06, | |
| "loss": 0.3209, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.029097963142580018, | |
| "grad_norm": 0.4377375711052428, | |
| "learning_rate": 3.225806451612903e-06, | |
| "loss": 0.2648, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.038797284190106696, | |
| "grad_norm": 0.3125953720371267, | |
| "learning_rate": 4.3010752688172045e-06, | |
| "loss": 0.2353, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04849660523763336, | |
| "grad_norm": 0.26931955183857703, | |
| "learning_rate": 5.376344086021506e-06, | |
| "loss": 0.2211, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.058195926285160036, | |
| "grad_norm": 0.26461533217537725, | |
| "learning_rate": 6.451612903225806e-06, | |
| "loss": 0.2065, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06789524733268672, | |
| "grad_norm": 0.8264176079441389, | |
| "learning_rate": 7.526881720430108e-06, | |
| "loss": 0.2, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07759456838021339, | |
| "grad_norm": 0.2983222843388067, | |
| "learning_rate": 8.602150537634409e-06, | |
| "loss": 0.1932, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08729388942774007, | |
| "grad_norm": 0.581254385336681, | |
| "learning_rate": 9.67741935483871e-06, | |
| "loss": 0.1888, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09699321047526673, | |
| "grad_norm": 0.3121423374050484, | |
| "learning_rate": 9.999865664319414e-06, | |
| "loss": 0.186, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1066925315227934, | |
| "grad_norm": 0.2650187228146991, | |
| "learning_rate": 9.999207711016081e-06, | |
| "loss": 0.182, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11639185257032007, | |
| "grad_norm": 0.26944373713294145, | |
| "learning_rate": 9.998001538251283e-06, | |
| "loss": 0.1779, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12609117361784675, | |
| "grad_norm": 0.32148775806687896, | |
| "learning_rate": 9.99624727829546e-06, | |
| "loss": 0.1743, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13579049466537343, | |
| "grad_norm": 0.23147855380574123, | |
| "learning_rate": 9.99394512352298e-06, | |
| "loss": 0.176, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1454898157129001, | |
| "grad_norm": 0.5006127662496982, | |
| "learning_rate": 9.991095326391061e-06, | |
| "loss": 0.1741, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.15518913676042678, | |
| "grad_norm": 0.4320980518392451, | |
| "learning_rate": 9.98769819941207e-06, | |
| "loss": 0.1702, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.16488845780795344, | |
| "grad_norm": 0.3889487478872514, | |
| "learning_rate": 9.983754115119262e-06, | |
| "loss": 0.1727, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.17458777885548013, | |
| "grad_norm": 0.27135352703963794, | |
| "learning_rate": 9.97926350602593e-06, | |
| "loss": 0.1701, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1842870999030068, | |
| "grad_norm": 0.25440030820854326, | |
| "learning_rate": 9.97422686457796e-06, | |
| "loss": 0.1678, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.19398642095053345, | |
| "grad_norm": 0.236255441704788, | |
| "learning_rate": 9.968644743099848e-06, | |
| "loss": 0.1641, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.20368574199806014, | |
| "grad_norm": 0.45878670927299864, | |
| "learning_rate": 9.96251775373412e-06, | |
| "loss": 0.167, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2133850630455868, | |
| "grad_norm": 0.22778682431164834, | |
| "learning_rate": 9.955846568374201e-06, | |
| "loss": 0.1655, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.22308438409311349, | |
| "grad_norm": 0.23943633515604867, | |
| "learning_rate": 9.948631918590746e-06, | |
| "loss": 0.1617, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.23278370514064015, | |
| "grad_norm": 0.24350940452858558, | |
| "learning_rate": 9.940874595551403e-06, | |
| "loss": 0.1626, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.24248302618816683, | |
| "grad_norm": 0.22335875945360728, | |
| "learning_rate": 9.932575449934063e-06, | |
| "loss": 0.1644, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2521823472356935, | |
| "grad_norm": 0.23496059802926855, | |
| "learning_rate": 9.923735391833564e-06, | |
| "loss": 0.1618, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2618816682832202, | |
| "grad_norm": 0.22613625236616156, | |
| "learning_rate": 9.914355390661897e-06, | |
| "loss": 0.1588, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.27158098933074687, | |
| "grad_norm": 0.28310779926466306, | |
| "learning_rate": 9.904436475041892e-06, | |
| "loss": 0.1599, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2812803103782735, | |
| "grad_norm": 0.23716946411046663, | |
| "learning_rate": 9.893979732694422e-06, | |
| "loss": 0.1608, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2909796314258002, | |
| "grad_norm": 0.22939002583419776, | |
| "learning_rate": 9.882986310319124e-06, | |
| "loss": 0.1571, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3006789524733269, | |
| "grad_norm": 0.1829019556630992, | |
| "learning_rate": 9.871457413468645e-06, | |
| "loss": 0.1602, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.31037827352085356, | |
| "grad_norm": 0.2862249824823463, | |
| "learning_rate": 9.859394306416443e-06, | |
| "loss": 0.1587, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3200775945683802, | |
| "grad_norm": 0.1916670538928815, | |
| "learning_rate": 9.846798312018147e-06, | |
| "loss": 0.1593, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3297769156159069, | |
| "grad_norm": 0.19602376121876022, | |
| "learning_rate": 9.833670811566485e-06, | |
| "loss": 0.1555, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3394762366634336, | |
| "grad_norm": 0.24177047491392098, | |
| "learning_rate": 9.820013244639817e-06, | |
| "loss": 0.1575, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.34917555771096026, | |
| "grad_norm": 0.1876075373970573, | |
| "learning_rate": 9.80582710894426e-06, | |
| "loss": 0.1573, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3588748787584869, | |
| "grad_norm": 0.2927139950691107, | |
| "learning_rate": 9.791113960149458e-06, | |
| "loss": 0.1549, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3685741998060136, | |
| "grad_norm": 0.24672228818761555, | |
| "learning_rate": 9.775875411717981e-06, | |
| "loss": 0.1559, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.37827352085354027, | |
| "grad_norm": 0.25633769481093877, | |
| "learning_rate": 9.760113134728383e-06, | |
| "loss": 0.156, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3879728419010669, | |
| "grad_norm": 0.22368063952475076, | |
| "learning_rate": 9.743828857691964e-06, | |
| "loss": 0.1549, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3976721629485936, | |
| "grad_norm": 0.42577732642751726, | |
| "learning_rate": 9.727024366363208e-06, | |
| "loss": 0.1546, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4073714839961203, | |
| "grad_norm": 0.30290230685059993, | |
| "learning_rate": 9.709701503543954e-06, | |
| "loss": 0.1541, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.41707080504364696, | |
| "grad_norm": 0.19793407217815825, | |
| "learning_rate": 9.691862168881325e-06, | |
| "loss": 0.153, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4267701260911736, | |
| "grad_norm": 0.21683658128298464, | |
| "learning_rate": 9.673508318659399e-06, | |
| "loss": 0.1514, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4364694471387003, | |
| "grad_norm": 0.2542785033568332, | |
| "learning_rate": 9.65464196558468e-06, | |
| "loss": 0.1507, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.44616876818622697, | |
| "grad_norm": 0.18399976352178382, | |
| "learning_rate": 9.635265178565386e-06, | |
| "loss": 0.1529, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.45586808923375366, | |
| "grad_norm": 0.16248682082363208, | |
| "learning_rate": 9.615380082484573e-06, | |
| "loss": 0.1524, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4655674102812803, | |
| "grad_norm": 0.24408305032393154, | |
| "learning_rate": 9.594988857967107e-06, | |
| "loss": 0.1519, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.475266731328807, | |
| "grad_norm": 0.24001150201316104, | |
| "learning_rate": 9.574093741140549e-06, | |
| "loss": 0.1503, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.48496605237633367, | |
| "grad_norm": 0.21337935060277946, | |
| "learning_rate": 9.552697023389923e-06, | |
| "loss": 0.15, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.49466537342386036, | |
| "grad_norm": 0.17464159632335755, | |
| "learning_rate": 9.530801051106449e-06, | |
| "loss": 0.1505, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.504364694471387, | |
| "grad_norm": 0.16833615600888802, | |
| "learning_rate": 9.508408225430237e-06, | |
| "loss": 0.1512, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5140640155189137, | |
| "grad_norm": 0.18834381819632562, | |
| "learning_rate": 9.485521001986961e-06, | |
| "loss": 0.1438, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5237633365664404, | |
| "grad_norm": 0.20243272397310594, | |
| "learning_rate": 9.46214189061859e-06, | |
| "loss": 0.1513, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.533462657613967, | |
| "grad_norm": 0.200191598724974, | |
| "learning_rate": 9.438273455108145e-06, | |
| "loss": 0.1489, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5431619786614937, | |
| "grad_norm": 0.16174114410966278, | |
| "learning_rate": 9.41391831289855e-06, | |
| "loss": 0.1496, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5528612997090203, | |
| "grad_norm": 0.3626796052816055, | |
| "learning_rate": 9.38907913480561e-06, | |
| "loss": 0.1457, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.562560620756547, | |
| "grad_norm": 0.18785993468756543, | |
| "learning_rate": 9.36375864472511e-06, | |
| "loss": 0.1443, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5722599418040737, | |
| "grad_norm": 0.17772800871532357, | |
| "learning_rate": 9.337959619334125e-06, | |
| "loss": 0.1476, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5819592628516004, | |
| "grad_norm": 0.15601447310629252, | |
| "learning_rate": 9.31168488778652e-06, | |
| "loss": 0.1476, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5916585838991271, | |
| "grad_norm": 0.19821208464194934, | |
| "learning_rate": 9.284937331402697e-06, | |
| "loss": 0.1468, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6013579049466538, | |
| "grad_norm": 0.19029097473240159, | |
| "learning_rate": 9.25771988335363e-06, | |
| "loss": 0.1482, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6110572259941804, | |
| "grad_norm": 0.18175837679945547, | |
| "learning_rate": 9.230035528339212e-06, | |
| "loss": 0.1455, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6207565470417071, | |
| "grad_norm": 0.18340667052442786, | |
| "learning_rate": 9.201887302260943e-06, | |
| "loss": 0.1485, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6304558680892337, | |
| "grad_norm": 0.15339041315406826, | |
| "learning_rate": 9.173278291889016e-06, | |
| "loss": 0.1466, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6401551891367604, | |
| "grad_norm": 0.15699088899673416, | |
| "learning_rate": 9.14421163452381e-06, | |
| "loss": 0.1466, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6498545101842871, | |
| "grad_norm": 0.24981887731533495, | |
| "learning_rate": 9.11469051765186e-06, | |
| "loss": 0.1464, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6595538312318138, | |
| "grad_norm": 0.16649549015322454, | |
| "learning_rate": 9.084718178596301e-06, | |
| "loss": 0.1452, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6692531522793405, | |
| "grad_norm": 0.16481654938724005, | |
| "learning_rate": 9.054297904161868e-06, | |
| "loss": 0.1439, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6789524733268671, | |
| "grad_norm": 0.24499406154651857, | |
| "learning_rate": 9.02343303027446e-06, | |
| "loss": 0.1465, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6886517943743938, | |
| "grad_norm": 0.1496332910263056, | |
| "learning_rate": 8.992126941615314e-06, | |
| "loss": 0.1458, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6983511154219205, | |
| "grad_norm": 0.16548997088070388, | |
| "learning_rate": 8.960383071249837e-06, | |
| "loss": 0.1451, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.7080504364694471, | |
| "grad_norm": 0.20683623207389146, | |
| "learning_rate": 8.928204900251136e-06, | |
| "loss": 0.1452, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7177497575169738, | |
| "grad_norm": 0.20126316747636905, | |
| "learning_rate": 8.895595957318277e-06, | |
| "loss": 0.146, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7274490785645005, | |
| "grad_norm": 0.14334846077395094, | |
| "learning_rate": 8.862559818389322e-06, | |
| "loss": 0.1418, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7371483996120272, | |
| "grad_norm": 0.17612864303411396, | |
| "learning_rate": 8.829100106249189e-06, | |
| "loss": 0.1429, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7468477206595538, | |
| "grad_norm": 0.15223827251186925, | |
| "learning_rate": 8.795220490132369e-06, | |
| "loss": 0.1461, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7565470417070805, | |
| "grad_norm": 0.17586006541625654, | |
| "learning_rate": 8.760924685320558e-06, | |
| "loss": 0.1426, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7662463627546072, | |
| "grad_norm": 0.2495465089144099, | |
| "learning_rate": 8.726216452735233e-06, | |
| "loss": 0.1427, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7759456838021338, | |
| "grad_norm": 0.16231569101956547, | |
| "learning_rate": 8.691099598525222e-06, | |
| "loss": 0.1425, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7856450048496605, | |
| "grad_norm": 0.20643117512106057, | |
| "learning_rate": 8.655577973649322e-06, | |
| "loss": 0.1447, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7953443258971872, | |
| "grad_norm": 0.19353894695637477, | |
| "learning_rate": 8.61965547345399e-06, | |
| "loss": 0.1431, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8050436469447139, | |
| "grad_norm": 0.1632307697067024, | |
| "learning_rate": 8.583336037246187e-06, | |
| "loss": 0.1436, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8147429679922406, | |
| "grad_norm": 0.17216990615447947, | |
| "learning_rate": 8.54662364786137e-06, | |
| "loss": 0.1447, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8244422890397672, | |
| "grad_norm": 0.1651724707314526, | |
| "learning_rate": 8.509522331226751e-06, | |
| "loss": 0.139, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8341416100872939, | |
| "grad_norm": 0.1568458339839126, | |
| "learning_rate": 8.47203615591979e-06, | |
| "loss": 0.1427, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8438409311348206, | |
| "grad_norm": 0.17852618591230168, | |
| "learning_rate": 8.434169232722043e-06, | |
| "loss": 0.1404, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8535402521823472, | |
| "grad_norm": 0.20639355600204873, | |
| "learning_rate": 8.395925714168356e-06, | |
| "loss": 0.1412, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8632395732298739, | |
| "grad_norm": 0.15073817817287644, | |
| "learning_rate": 8.357309794091508e-06, | |
| "loss": 0.1413, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8729388942774006, | |
| "grad_norm": 0.15904000752733416, | |
| "learning_rate": 8.318325707162293e-06, | |
| "loss": 0.1413, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8826382153249273, | |
| "grad_norm": 0.1552685969985047, | |
| "learning_rate": 8.278977728425157e-06, | |
| "loss": 0.1419, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8923375363724539, | |
| "grad_norm": 0.15706230396222723, | |
| "learning_rate": 8.239270172829379e-06, | |
| "loss": 0.1416, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.9020368574199806, | |
| "grad_norm": 0.17255106752775495, | |
| "learning_rate": 8.199207394755892e-06, | |
| "loss": 0.1407, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9117361784675073, | |
| "grad_norm": 0.2129566296657695, | |
| "learning_rate": 8.158793787539782e-06, | |
| "loss": 0.1426, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9214354995150339, | |
| "grad_norm": 0.1520917555155708, | |
| "learning_rate": 8.118033782988496e-06, | |
| "loss": 0.1425, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.9311348205625606, | |
| "grad_norm": 0.14387577079486796, | |
| "learning_rate": 8.076931850895858e-06, | |
| "loss": 0.1406, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9408341416100873, | |
| "grad_norm": 0.156372446996249, | |
| "learning_rate": 8.0354924985519e-06, | |
| "loss": 0.1404, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.950533462657614, | |
| "grad_norm": 0.13415353564550458, | |
| "learning_rate": 7.993720270248583e-06, | |
| "loss": 0.1367, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9602327837051406, | |
| "grad_norm": 0.15260385024231266, | |
| "learning_rate": 7.951619746781474e-06, | |
| "loss": 0.1404, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9699321047526673, | |
| "grad_norm": 0.14296007598794033, | |
| "learning_rate": 7.909195544947398e-06, | |
| "loss": 0.1379, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.979631425800194, | |
| "grad_norm": 0.1439203772713178, | |
| "learning_rate": 7.866452317038164e-06, | |
| "loss": 0.1407, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9893307468477207, | |
| "grad_norm": 0.15601557591631698, | |
| "learning_rate": 7.823394750330386e-06, | |
| "loss": 0.1378, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9990300678952473, | |
| "grad_norm": 0.1521155491141262, | |
| "learning_rate": 7.780027566571467e-06, | |
| "loss": 0.1381, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.008729388942774, | |
| "grad_norm": 0.18883092497874396, | |
| "learning_rate": 7.736355521461812e-06, | |
| "loss": 0.1289, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.0184287099903007, | |
| "grad_norm": 0.16791508973001082, | |
| "learning_rate": 7.692383404133302e-06, | |
| "loss": 0.1251, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.0281280310378274, | |
| "grad_norm": 0.1441774496377216, | |
| "learning_rate": 7.648116036624125e-06, | |
| "loss": 0.1256, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.037827352085354, | |
| "grad_norm": 0.13109750689397975, | |
| "learning_rate": 7.6035582733499805e-06, | |
| "loss": 0.1255, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.0475266731328807, | |
| "grad_norm": 0.14171432637868595, | |
| "learning_rate": 7.5587150005717256e-06, | |
| "loss": 0.1241, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.0572259941804074, | |
| "grad_norm": 0.14162368666044384, | |
| "learning_rate": 7.5135911358595615e-06, | |
| "loss": 0.1273, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.066925315227934, | |
| "grad_norm": 0.13418678548273563, | |
| "learning_rate": 7.468191627553752e-06, | |
| "loss": 0.1264, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.0766246362754608, | |
| "grad_norm": 0.13196187275552038, | |
| "learning_rate": 7.42252145422199e-06, | |
| "loss": 0.1293, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.0863239573229875, | |
| "grad_norm": 0.14552608723341326, | |
| "learning_rate": 7.376585624113438e-06, | |
| "loss": 0.1244, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.0960232783705142, | |
| "grad_norm": 0.14422441886749568, | |
| "learning_rate": 7.330389174609516e-06, | |
| "loss": 0.1264, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.1057225994180406, | |
| "grad_norm": 0.13834555525544687, | |
| "learning_rate": 7.283937171671498e-06, | |
| "loss": 0.128, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.1154219204655673, | |
| "grad_norm": 0.13679812310695327, | |
| "learning_rate": 7.2372347092849744e-06, | |
| "loss": 0.1268, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.125121241513094, | |
| "grad_norm": 0.14580868475877667, | |
| "learning_rate": 7.190286908901234e-06, | |
| "loss": 0.127, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.1348205625606207, | |
| "grad_norm": 0.14759352493891592, | |
| "learning_rate": 7.143098918875643e-06, | |
| "loss": 0.1239, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.1445198836081474, | |
| "grad_norm": 0.16306728409304952, | |
| "learning_rate": 7.095675913903067e-06, | |
| "loss": 0.1252, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.154219204655674, | |
| "grad_norm": 0.13619389268479554, | |
| "learning_rate": 7.048023094450412e-06, | |
| "loss": 0.1255, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.1639185257032008, | |
| "grad_norm": 0.14865648794317585, | |
| "learning_rate": 7.0001456861863236e-06, | |
| "loss": 0.125, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.1736178467507274, | |
| "grad_norm": 0.1352158410960604, | |
| "learning_rate": 6.952048939408156e-06, | |
| "loss": 0.126, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.1833171677982541, | |
| "grad_norm": 0.14931606937805553, | |
| "learning_rate": 6.903738128466189e-06, | |
| "loss": 0.1244, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.1930164888457808, | |
| "grad_norm": 0.15591971567787935, | |
| "learning_rate": 6.8552185511852555e-06, | |
| "loss": 0.1257, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.2027158098933075, | |
| "grad_norm": 0.15987884349569545, | |
| "learning_rate": 6.806495528283772e-06, | |
| "loss": 0.1247, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.2124151309408342, | |
| "grad_norm": 0.173600355468448, | |
| "learning_rate": 6.75757440279026e-06, | |
| "loss": 0.1266, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.2221144519883609, | |
| "grad_norm": 0.15555585861546212, | |
| "learning_rate": 6.708460539457418e-06, | |
| "loss": 0.1259, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.2318137730358876, | |
| "grad_norm": 0.13216223386632905, | |
| "learning_rate": 6.659159324173823e-06, | |
| "loss": 0.1264, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.2415130940834143, | |
| "grad_norm": 0.13732331224860048, | |
| "learning_rate": 6.6096761633733065e-06, | |
| "loss": 0.1212, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.251212415130941, | |
| "grad_norm": 0.14550514337128806, | |
| "learning_rate": 6.5600164834420754e-06, | |
| "loss": 0.1232, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.2609117361784676, | |
| "grad_norm": 0.1395324855091477, | |
| "learning_rate": 6.510185730123646e-06, | |
| "loss": 0.1246, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.270611057225994, | |
| "grad_norm": 0.15143922372224142, | |
| "learning_rate": 6.460189367921663e-06, | |
| "loss": 0.1222, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.2803103782735208, | |
| "grad_norm": 0.14430877735904407, | |
| "learning_rate": 6.410032879500647e-06, | |
| "loss": 0.1237, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.2900096993210475, | |
| "grad_norm": 0.15435173824185056, | |
| "learning_rate": 6.35972176508477e-06, | |
| "loss": 0.1246, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.2997090203685742, | |
| "grad_norm": 0.14283758578201033, | |
| "learning_rate": 6.309261541854679e-06, | |
| "loss": 0.1246, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.3094083414161009, | |
| "grad_norm": 0.13781861198691472, | |
| "learning_rate": 6.258657743342486e-06, | |
| "loss": 0.1214, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.3191076624636275, | |
| "grad_norm": 0.13748643897048052, | |
| "learning_rate": 6.207915918824952e-06, | |
| "loss": 0.1261, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.3288069835111542, | |
| "grad_norm": 0.14790322688240057, | |
| "learning_rate": 6.157041632714945e-06, | |
| "loss": 0.1255, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.338506304558681, | |
| "grad_norm": 0.13278715076830586, | |
| "learning_rate": 6.106040463951237e-06, | |
| "loss": 0.1236, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.3482056256062076, | |
| "grad_norm": 0.13946856628394763, | |
| "learning_rate": 6.0549180053867114e-06, | |
| "loss": 0.1198, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.3579049466537343, | |
| "grad_norm": 0.12852192262109208, | |
| "learning_rate": 6.003679863175052e-06, | |
| "loss": 0.1241, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.367604267701261, | |
| "grad_norm": 0.14143420891866582, | |
| "learning_rate": 5.952331656155951e-06, | |
| "loss": 0.1263, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.3773035887487877, | |
| "grad_norm": 0.1282297410386555, | |
| "learning_rate": 5.900879015238948e-06, | |
| "loss": 0.126, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.3870029097963141, | |
| "grad_norm": 0.13873942443095202, | |
| "learning_rate": 5.849327582785943e-06, | |
| "loss": 0.1269, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.3967022308438408, | |
| "grad_norm": 0.13656175147704938, | |
| "learning_rate": 5.797683011992432e-06, | |
| "loss": 0.1203, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.4064015518913675, | |
| "grad_norm": 0.1525890999931462, | |
| "learning_rate": 5.745950966267586e-06, | |
| "loss": 0.1248, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.4161008729388942, | |
| "grad_norm": 0.1378029168356388, | |
| "learning_rate": 5.694137118613185e-06, | |
| "loss": 0.1208, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.4258001939864209, | |
| "grad_norm": 0.1349917266146262, | |
| "learning_rate": 5.642247151001515e-06, | |
| "loss": 0.1219, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.4354995150339476, | |
| "grad_norm": 0.13532958442842166, | |
| "learning_rate": 5.590286753752269e-06, | |
| "loss": 0.1246, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.4451988360814743, | |
| "grad_norm": 0.13590965633443314, | |
| "learning_rate": 5.5382616249085476e-06, | |
| "loss": 0.1238, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.454898157129001, | |
| "grad_norm": 0.1302293751307389, | |
| "learning_rate": 5.486177469611999e-06, | |
| "loss": 0.1218, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.4645974781765276, | |
| "grad_norm": 0.13212636179350218, | |
| "learning_rate": 5.434039999477182e-06, | |
| "loss": 0.1217, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.4742967992240543, | |
| "grad_norm": 0.15765907573882082, | |
| "learning_rate": 5.381854931965238e-06, | |
| "loss": 0.1212, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.483996120271581, | |
| "grad_norm": 0.13087502137063067, | |
| "learning_rate": 5.32962798975689e-06, | |
| "loss": 0.1221, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.4936954413191077, | |
| "grad_norm": 0.1809256405824013, | |
| "learning_rate": 5.2773649001248965e-06, | |
| "loss": 0.1201, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.5033947623666344, | |
| "grad_norm": 0.12904018631952519, | |
| "learning_rate": 5.2250713943059826e-06, | |
| "loss": 0.1222, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.513094083414161, | |
| "grad_norm": 0.1293740910081746, | |
| "learning_rate": 5.172753206872363e-06, | |
| "loss": 0.1209, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.5227934044616878, | |
| "grad_norm": 0.30179091402220243, | |
| "learning_rate": 5.120416075102855e-06, | |
| "loss": 0.1257, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.5324927255092144, | |
| "grad_norm": 0.1385162792200683, | |
| "learning_rate": 5.068065738353748e-06, | |
| "loss": 0.124, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.5421920465567411, | |
| "grad_norm": 0.1255449193902108, | |
| "learning_rate": 5.015707937429398e-06, | |
| "loss": 0.1192, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.5518913676042678, | |
| "grad_norm": 0.15408459889574322, | |
| "learning_rate": 4.9633484139526975e-06, | |
| "loss": 0.1225, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.5615906886517945, | |
| "grad_norm": 0.13856983999023262, | |
| "learning_rate": 4.910992909735432e-06, | |
| "loss": 0.1201, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.5712900096993212, | |
| "grad_norm": 0.12207611538378375, | |
| "learning_rate": 4.8586471661486345e-06, | |
| "loss": 0.1227, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.5809893307468477, | |
| "grad_norm": 0.1266119474511833, | |
| "learning_rate": 4.80631692349297e-06, | |
| "loss": 0.1232, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.5906886517943744, | |
| "grad_norm": 0.14230780661161255, | |
| "learning_rate": 4.754007920369252e-06, | |
| "loss": 0.1213, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.600387972841901, | |
| "grad_norm": 0.13743035777356707, | |
| "learning_rate": 4.7017258930491474e-06, | |
| "loss": 0.1194, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.6100872938894277, | |
| "grad_norm": 0.1386159724833024, | |
| "learning_rate": 4.649476574846113e-06, | |
| "loss": 0.1206, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.6197866149369544, | |
| "grad_norm": 0.12379147527691, | |
| "learning_rate": 4.597265695486685e-06, | |
| "loss": 0.122, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.629485935984481, | |
| "grad_norm": 0.13597192298584537, | |
| "learning_rate": 4.545098980482151e-06, | |
| "loss": 0.1217, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.6391852570320078, | |
| "grad_norm": 0.1281728134758131, | |
| "learning_rate": 4.492982150500677e-06, | |
| "loss": 0.1222, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.6488845780795345, | |
| "grad_norm": 0.14452895293267878, | |
| "learning_rate": 4.44092092073997e-06, | |
| "loss": 0.1203, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.658583899127061, | |
| "grad_norm": 0.734748232446473, | |
| "learning_rate": 4.388921000300553e-06, | |
| "loss": 0.1195, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.6682832201745876, | |
| "grad_norm": 0.13007786192950554, | |
| "learning_rate": 4.336988091559688e-06, | |
| "loss": 0.1179, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.6779825412221143, | |
| "grad_norm": 0.12183531013572281, | |
| "learning_rate": 4.285127889546049e-06, | |
| "loss": 0.1214, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.687681862269641, | |
| "grad_norm": 0.13118954244252978, | |
| "learning_rate": 4.233346081315197e-06, | |
| "loss": 0.1197, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.6973811833171677, | |
| "grad_norm": 0.13481540424732633, | |
| "learning_rate": 4.181648345325934e-06, | |
| "loss": 0.1173, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.7070805043646944, | |
| "grad_norm": 0.13368272764953815, | |
| "learning_rate": 4.13004035081759e-06, | |
| "loss": 0.1177, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.716779825412221, | |
| "grad_norm": 0.1405384765881231, | |
| "learning_rate": 4.078527757188333e-06, | |
| "loss": 0.1193, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.7264791464597478, | |
| "grad_norm": 0.1492903635536138, | |
| "learning_rate": 4.02711621337455e-06, | |
| "loss": 0.1237, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.7361784675072744, | |
| "grad_norm": 0.13618911961601582, | |
| "learning_rate": 3.9758113572313735e-06, | |
| "loss": 0.1189, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.7458777885548011, | |
| "grad_norm": 0.12113555025079864, | |
| "learning_rate": 3.924618814914435e-06, | |
| "loss": 0.119, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.7555771096023278, | |
| "grad_norm": 0.12580610174287832, | |
| "learning_rate": 3.873544200262882e-06, | |
| "loss": 0.1202, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.7652764306498545, | |
| "grad_norm": 0.12160650948969177, | |
| "learning_rate": 3.822593114183777e-06, | |
| "loss": 0.1203, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.7749757516973812, | |
| "grad_norm": 0.11538378695593902, | |
| "learning_rate": 3.7717711440378695e-06, | |
| "loss": 0.1197, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.7846750727449079, | |
| "grad_norm": 0.12474760124557903, | |
| "learning_rate": 3.7210838630268986e-06, | |
| "loss": 0.1185, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.7943743937924346, | |
| "grad_norm": 0.12741060176067462, | |
| "learning_rate": 3.670536829582424e-06, | |
| "loss": 0.1191, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.8040737148399613, | |
| "grad_norm": 0.12351598125613471, | |
| "learning_rate": 3.6201355867562725e-06, | |
| "loss": 0.1191, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.813773035887488, | |
| "grad_norm": 0.14188987252511706, | |
| "learning_rate": 3.569885661612691e-06, | |
| "loss": 0.1195, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.8234723569350146, | |
| "grad_norm": 0.13634330984512036, | |
| "learning_rate": 3.5197925646222387e-06, | |
| "loss": 0.1204, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.8331716779825413, | |
| "grad_norm": 0.2373069723174238, | |
| "learning_rate": 3.4698617890574972e-06, | |
| "loss": 0.1187, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.842870999030068, | |
| "grad_norm": 0.13261383960841805, | |
| "learning_rate": 3.4200988103906747e-06, | |
| "loss": 0.1181, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.8525703200775947, | |
| "grad_norm": 0.13269073521254607, | |
| "learning_rate": 3.3705090856931626e-06, | |
| "loss": 0.1189, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.8622696411251214, | |
| "grad_norm": 0.11857183586240762, | |
| "learning_rate": 3.3210980530370974e-06, | |
| "loss": 0.1163, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.871968962172648, | |
| "grad_norm": 0.12514649915266685, | |
| "learning_rate": 3.2718711308990226e-06, | |
| "loss": 0.1173, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.8816682832201745, | |
| "grad_norm": 0.13278636126865803, | |
| "learning_rate": 3.2228337175656856e-06, | |
| "loss": 0.1175, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.8913676042677012, | |
| "grad_norm": 0.1526458979660249, | |
| "learning_rate": 3.1739911905420617e-06, | |
| "loss": 0.1157, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.901066925315228, | |
| "grad_norm": 0.12356959999521902, | |
| "learning_rate": 3.1253489059616448e-06, | |
| "loss": 0.1206, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.9107662463627546, | |
| "grad_norm": 0.12931045840893188, | |
| "learning_rate": 3.0769121979990845e-06, | |
| "loss": 0.1176, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.9204655674102813, | |
| "grad_norm": 0.13043271523994618, | |
| "learning_rate": 3.028686378285245e-06, | |
| "loss": 0.1188, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.930164888457808, | |
| "grad_norm": 0.1511393413072713, | |
| "learning_rate": 2.9806767353247127e-06, | |
| "loss": 0.1199, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.9398642095053347, | |
| "grad_norm": 0.1313911423759091, | |
| "learning_rate": 2.9328885339158554e-06, | |
| "loss": 0.118, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.9495635305528611, | |
| "grad_norm": 0.1264515550648061, | |
| "learning_rate": 2.8853270145734846e-06, | |
| "loss": 0.1185, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.9592628516003878, | |
| "grad_norm": 0.12231144466512428, | |
| "learning_rate": 2.837997392954165e-06, | |
| "loss": 0.116, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.9689621726479145, | |
| "grad_norm": 0.14521506304206808, | |
| "learning_rate": 2.7909048592842602e-06, | |
| "loss": 0.1152, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.9786614936954412, | |
| "grad_norm": 0.126711825889384, | |
| "learning_rate": 2.7440545777907747e-06, | |
| "loss": 0.1159, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.9883608147429679, | |
| "grad_norm": 0.1681227374854468, | |
| "learning_rate": 2.697451686135031e-06, | |
| "loss": 0.1186, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.9980601357904946, | |
| "grad_norm": 0.13008493981139174, | |
| "learning_rate": 2.6511012948492625e-06, | |
| "loss": 0.1172, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.0077594568380213, | |
| "grad_norm": 0.1413967651108439, | |
| "learning_rate": 2.6050084867761953e-06, | |
| "loss": 0.1045, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.017458777885548, | |
| "grad_norm": 0.1549924427730225, | |
| "learning_rate": 2.5591783165116563e-06, | |
| "loss": 0.105, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.0271580989330746, | |
| "grad_norm": 0.13493394435352007, | |
| "learning_rate": 2.51361580985027e-06, | |
| "loss": 0.1038, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.0368574199806013, | |
| "grad_norm": 0.14278275935536047, | |
| "learning_rate": 2.4683259632343363e-06, | |
| "loss": 0.101, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.046556741028128, | |
| "grad_norm": 0.12912222827072362, | |
| "learning_rate": 2.42331374320591e-06, | |
| "loss": 0.1023, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.0562560620756547, | |
| "grad_norm": 0.13486110619768377, | |
| "learning_rate": 2.3785840858621556e-06, | |
| "loss": 0.1038, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.0659553831231814, | |
| "grad_norm": 0.12322475049858893, | |
| "learning_rate": 2.334141896314057e-06, | |
| "loss": 0.1015, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.075654704170708, | |
| "grad_norm": 0.12313226259292803, | |
| "learning_rate": 2.2899920481485192e-06, | |
| "loss": 0.1006, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.0853540252182348, | |
| "grad_norm": 0.13084594004574415, | |
| "learning_rate": 2.246139382893915e-06, | |
| "loss": 0.103, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.0950533462657615, | |
| "grad_norm": 0.11766251876256842, | |
| "learning_rate": 2.2025887094891657e-06, | |
| "loss": 0.1021, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.104752667313288, | |
| "grad_norm": 0.12060852673739084, | |
| "learning_rate": 2.1593448037563795e-06, | |
| "loss": 0.1012, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.114451988360815, | |
| "grad_norm": 0.12567405538629176, | |
| "learning_rate": 2.116412407877138e-06, | |
| "loss": 0.0989, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.1241513094083415, | |
| "grad_norm": 0.1365525172002471, | |
| "learning_rate": 2.0737962298724513e-06, | |
| "loss": 0.1028, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.133850630455868, | |
| "grad_norm": 0.13749139478988584, | |
| "learning_rate": 2.0315009430864762e-06, | |
| "loss": 0.0981, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.143549951503395, | |
| "grad_norm": 0.12483408419665704, | |
| "learning_rate": 1.989531185674037e-06, | |
| "loss": 0.1006, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.1532492725509216, | |
| "grad_norm": 0.12889816969916387, | |
| "learning_rate": 1.9478915600919877e-06, | |
| "loss": 0.0994, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.1629485935984483, | |
| "grad_norm": 0.13722687929380498, | |
| "learning_rate": 1.9065866325945099e-06, | |
| "loss": 0.1006, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.172647914645975, | |
| "grad_norm": 0.12731978697579938, | |
| "learning_rate": 1.8656209327323704e-06, | |
| "loss": 0.1032, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.1823472356935016, | |
| "grad_norm": 0.12992560572466144, | |
| "learning_rate": 1.824998952856198e-06, | |
| "loss": 0.0995, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.1920465567410283, | |
| "grad_norm": 0.12461701158829513, | |
| "learning_rate": 1.784725147623853e-06, | |
| "loss": 0.1014, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.2017458777885546, | |
| "grad_norm": 0.12197881380183824, | |
| "learning_rate": 1.7448039335119272e-06, | |
| "loss": 0.1002, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.2114451988360813, | |
| "grad_norm": 0.12027153954201142, | |
| "learning_rate": 1.7052396883314154e-06, | |
| "loss": 0.1007, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.221144519883608, | |
| "grad_norm": 0.14042894762171168, | |
| "learning_rate": 1.6660367507476539e-06, | |
| "loss": 0.1032, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.2308438409311346, | |
| "grad_norm": 0.1304473662542888, | |
| "learning_rate": 1.627199419804522e-06, | |
| "loss": 0.0974, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.2405431619786613, | |
| "grad_norm": 0.13385797988412498, | |
| "learning_rate": 1.5887319544530182e-06, | |
| "loss": 0.0993, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.250242483026188, | |
| "grad_norm": 0.12152842920119761, | |
| "learning_rate": 1.5506385730842062e-06, | |
| "loss": 0.0994, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.2599418040737147, | |
| "grad_norm": 0.15097037450611273, | |
| "learning_rate": 1.5129234530666232e-06, | |
| "loss": 0.0996, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.2696411251212414, | |
| "grad_norm": 0.13232561112805757, | |
| "learning_rate": 1.4755907302881927e-06, | |
| "loss": 0.1021, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.279340446168768, | |
| "grad_norm": 0.13689860207073964, | |
| "learning_rate": 1.4386444987026705e-06, | |
| "loss": 0.1008, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.2890397672162948, | |
| "grad_norm": 0.12697227778436346, | |
| "learning_rate": 1.4020888098806924e-06, | |
| "loss": 0.1012, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.2987390882638215, | |
| "grad_norm": 0.1306372848154384, | |
| "learning_rate": 1.3659276725654863e-06, | |
| "loss": 0.0994, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.308438409311348, | |
| "grad_norm": 0.1417516277044696, | |
| "learning_rate": 1.3301650522332566e-06, | |
| "loss": 0.1021, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.318137730358875, | |
| "grad_norm": 0.12583750650057704, | |
| "learning_rate": 1.2948048706583284e-06, | |
| "loss": 0.1011, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.3278370514064015, | |
| "grad_norm": 0.12210301430641879, | |
| "learning_rate": 1.2598510054830888e-06, | |
| "loss": 0.1026, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.337536372453928, | |
| "grad_norm": 0.14086208814152623, | |
| "learning_rate": 1.2253072897927437e-06, | |
| "loss": 0.1004, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.347235693501455, | |
| "grad_norm": 0.12702340474503365, | |
| "learning_rate": 1.1911775116949958e-06, | |
| "loss": 0.1026, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.3569350145489816, | |
| "grad_norm": 0.1325904746306228, | |
| "learning_rate": 1.1574654139046171e-06, | |
| "loss": 0.1004, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.3666343355965083, | |
| "grad_norm": 0.1232419911088865, | |
| "learning_rate": 1.1241746933330338e-06, | |
| "loss": 0.1021, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.376333656644035, | |
| "grad_norm": 0.11926188716205782, | |
| "learning_rate": 1.0913090006829085e-06, | |
| "loss": 0.1004, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.3860329776915616, | |
| "grad_norm": 0.1309045847253476, | |
| "learning_rate": 1.0588719400478004e-06, | |
| "loss": 0.0977, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.3957322987390883, | |
| "grad_norm": 0.13504405075095502, | |
| "learning_rate": 1.026867068516943e-06, | |
| "loss": 0.1021, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.405431619786615, | |
| "grad_norm": 0.1326298312074988, | |
| "learning_rate": 9.952978957851622e-07, | |
| "loss": 0.098, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.4151309408341417, | |
| "grad_norm": 0.13381141022665327, | |
| "learning_rate": 9.641678837679985e-07, | |
| "loss": 0.1, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.4248302618816684, | |
| "grad_norm": 0.12764187585028094, | |
| "learning_rate": 9.334804462220748e-07, | |
| "loss": 0.0968, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.434529582929195, | |
| "grad_norm": 0.12451918444105284, | |
| "learning_rate": 9.032389483707332e-07, | |
| "loss": 0.0985, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.4442289039767218, | |
| "grad_norm": 0.1231163492324899, | |
| "learning_rate": 8.734467065350022e-07, | |
| "loss": 0.1011, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.4539282250242485, | |
| "grad_norm": 0.11884056049855944, | |
| "learning_rate": 8.441069877699287e-07, | |
| "loss": 0.0988, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.463627546071775, | |
| "grad_norm": 0.12149227620777672, | |
| "learning_rate": 8.152230095063051e-07, | |
| "loss": 0.1002, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.473326867119302, | |
| "grad_norm": 0.12036233836849276, | |
| "learning_rate": 7.867979391978398e-07, | |
| "loss": 0.1007, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.4830261881668285, | |
| "grad_norm": 0.12618686097566623, | |
| "learning_rate": 7.588348939738116e-07, | |
| "loss": 0.0986, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.492725509214355, | |
| "grad_norm": 0.12028662540848738, | |
| "learning_rate": 7.31336940297247e-07, | |
| "loss": 0.0992, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.502424830261882, | |
| "grad_norm": 0.11545036923542605, | |
| "learning_rate": 7.043070936286395e-07, | |
| "loss": 0.0978, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.5121241513094086, | |
| "grad_norm": 0.12019777054976608, | |
| "learning_rate": 6.777483180952732e-07, | |
| "loss": 0.1012, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.5218234723569353, | |
| "grad_norm": 0.11782278703441862, | |
| "learning_rate": 6.516635261661775e-07, | |
| "loss": 0.0974, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.531522793404462, | |
| "grad_norm": 0.12063704346978395, | |
| "learning_rate": 6.260555783327366e-07, | |
| "loss": 0.0966, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.541222114451988, | |
| "grad_norm": 0.13985159244922432, | |
| "learning_rate": 6.009272827950042e-07, | |
| "loss": 0.0999, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.550921435499515, | |
| "grad_norm": 0.1344038669131746, | |
| "learning_rate": 5.762813951537582e-07, | |
| "loss": 0.101, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.5606207565470416, | |
| "grad_norm": 0.12488293218956574, | |
| "learning_rate": 5.521206181083111e-07, | |
| "loss": 0.0978, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.5703200775945683, | |
| "grad_norm": 0.12688963896793887, | |
| "learning_rate": 5.28447601160132e-07, | |
| "loss": 0.0965, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.580019398642095, | |
| "grad_norm": 0.12551768173775268, | |
| "learning_rate": 5.052649403223015e-07, | |
| "loss": 0.0976, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.5897187196896216, | |
| "grad_norm": 0.11519577314916575, | |
| "learning_rate": 4.825751778348259e-07, | |
| "loss": 0.0981, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.5994180407371483, | |
| "grad_norm": 0.11984916882145599, | |
| "learning_rate": 4.6038080188585135e-07, | |
| "loss": 0.0992, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.609117361784675, | |
| "grad_norm": 0.1175248528788484, | |
| "learning_rate": 4.38684246338808e-07, | |
| "loss": 0.0987, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.6188166828322017, | |
| "grad_norm": 0.13159038738087242, | |
| "learning_rate": 4.1748789046551055e-07, | |
| "loss": 0.0978, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.6285160038797284, | |
| "grad_norm": 0.12093588925930802, | |
| "learning_rate": 3.967940586852409e-07, | |
| "loss": 0.0976, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.638215324927255, | |
| "grad_norm": 0.11669957994474343, | |
| "learning_rate": 3.7660502030985203e-07, | |
| "loss": 0.0988, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.6479146459747818, | |
| "grad_norm": 0.11770882115889388, | |
| "learning_rate": 3.569229892949133e-07, | |
| "loss": 0.0999, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.6576139670223085, | |
| "grad_norm": 0.11899985694439451, | |
| "learning_rate": 3.3775012399692055e-07, | |
| "loss": 0.0989, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.667313288069835, | |
| "grad_norm": 0.12610632399439725, | |
| "learning_rate": 3.1908852693661116e-07, | |
| "loss": 0.0994, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.677012609117362, | |
| "grad_norm": 0.11520791632519826, | |
| "learning_rate": 3.0094024456840176e-07, | |
| "loss": 0.0987, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.6867119301648885, | |
| "grad_norm": 0.12311171355367961, | |
| "learning_rate": 2.833072670559661e-07, | |
| "loss": 0.0981, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.696411251212415, | |
| "grad_norm": 0.12430861035520042, | |
| "learning_rate": 2.6619152805399286e-07, | |
| "loss": 0.098, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.706110572259942, | |
| "grad_norm": 0.1256391775818187, | |
| "learning_rate": 2.49594904496141e-07, | |
| "loss": 0.0972, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.7158098933074686, | |
| "grad_norm": 0.1132674415609218, | |
| "learning_rate": 2.3351921638921193e-07, | |
| "loss": 0.0986, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.7255092143549953, | |
| "grad_norm": 0.12235997624262486, | |
| "learning_rate": 2.1796622661356238e-07, | |
| "loss": 0.1005, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.735208535402522, | |
| "grad_norm": 0.11873323598735666, | |
| "learning_rate": 2.0293764072978618e-07, | |
| "loss": 0.1017, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.7449078564500486, | |
| "grad_norm": 0.11502932974467346, | |
| "learning_rate": 1.8843510679168341e-07, | |
| "loss": 0.0969, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.7546071774975753, | |
| "grad_norm": 0.11893095537270826, | |
| "learning_rate": 1.744602151655289e-07, | |
| "loss": 0.0967, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.7643064985451016, | |
| "grad_norm": 0.11664278880666168, | |
| "learning_rate": 1.6101449835567273e-07, | |
| "loss": 0.097, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.7740058195926283, | |
| "grad_norm": 0.11762507721842773, | |
| "learning_rate": 1.4809943083648194e-07, | |
| "loss": 0.0966, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.783705140640155, | |
| "grad_norm": 0.11533591737174845, | |
| "learning_rate": 1.3571642889064984e-07, | |
| "loss": 0.0965, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.7934044616876816, | |
| "grad_norm": 0.11593673764859609, | |
| "learning_rate": 1.2386685045388313e-07, | |
| "loss": 0.0934, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.8031037827352083, | |
| "grad_norm": 0.11623880074957675, | |
| "learning_rate": 1.1255199496599034e-07, | |
| "loss": 0.1002, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.812803103782735, | |
| "grad_norm": 0.11273238019620804, | |
| "learning_rate": 1.0177310322838251e-07, | |
| "loss": 0.0984, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.8225024248302617, | |
| "grad_norm": 0.11740279413666126, | |
| "learning_rate": 9.153135726800599e-08, | |
| "loss": 0.0991, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.8322017458777884, | |
| "grad_norm": 0.1279718420944925, | |
| "learning_rate": 8.182788020771826e-08, | |
| "loss": 0.0995, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.841901066925315, | |
| "grad_norm": 0.1128016665542334, | |
| "learning_rate": 7.266373614312927e-08, | |
| "loss": 0.0997, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.8516003879728418, | |
| "grad_norm": 0.115197840258161, | |
| "learning_rate": 6.403993002590425e-08, | |
| "loss": 0.099, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.8612997090203685, | |
| "grad_norm": 0.11734253101361407, | |
| "learning_rate": 5.595740755356627e-08, | |
| "loss": 0.0961, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.870999030067895, | |
| "grad_norm": 0.11746949240153405, | |
| "learning_rate": 4.841705506578587e-08, | |
| "loss": 0.0984, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.880698351115422, | |
| "grad_norm": 0.11752266068577234, | |
| "learning_rate": 4.1419699447186045e-08, | |
| "loss": 0.0995, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.8903976721629485, | |
| "grad_norm": 0.11630585391542382, | |
| "learning_rate": 3.4966108036662006e-08, | |
| "loss": 0.0987, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.900096993210475, | |
| "grad_norm": 0.12834768725602652, | |
| "learning_rate": 2.9056988543239018e-08, | |
| "loss": 0.1002, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.909796314258002, | |
| "grad_norm": 0.11104514790150172, | |
| "learning_rate": 2.3692988968458398e-08, | |
| "loss": 0.0966, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.9194956353055286, | |
| "grad_norm": 0.1262237252529426, | |
| "learning_rate": 1.8874697535319897e-08, | |
| "loss": 0.0965, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.9291949563530553, | |
| "grad_norm": 0.11707823484874674, | |
| "learning_rate": 1.4602642623777752e-08, | |
| "loss": 0.0992, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.938894277400582, | |
| "grad_norm": 0.11846337746129845, | |
| "learning_rate": 1.0877292712792586e-08, | |
| "loss": 0.0954, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.9485935984481086, | |
| "grad_norm": 0.1144938540856074, | |
| "learning_rate": 7.699056328964726e-09, | |
| "loss": 0.0985, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.9582929194956353, | |
| "grad_norm": 0.1287666028823606, | |
| "learning_rate": 5.06828200172893e-09, | |
| "loss": 0.1001, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.967992240543162, | |
| "grad_norm": 0.11479880891480539, | |
| "learning_rate": 2.9852582251355124e-09, | |
| "loss": 0.0979, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.9776915615906887, | |
| "grad_norm": 0.13479592940306032, | |
| "learning_rate": 1.4502134262156519e-09, | |
| "loss": 0.0973, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.9873908826382154, | |
| "grad_norm": 0.14403422269717034, | |
| "learning_rate": 4.6331593993032e-10, | |
| "loss": 0.0973, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.997090203685742, | |
| "grad_norm": 0.11557815982586303, | |
| "learning_rate": 2.467399070893439e-11, | |
| "loss": 0.0964, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 3093, | |
| "total_flos": 3.528412807299072e+16, | |
| "train_loss": 0.12713232355097756, | |
| "train_runtime": 177593.1778, | |
| "train_samples_per_second": 3.064, | |
| "train_steps_per_second": 0.017 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3093, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.528412807299072e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |