| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 3086, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0032404406999351912, | |
| "grad_norm": 13.30365312552572, | |
| "learning_rate": 1.4563106796116507e-07, | |
| "loss": 0.5576, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0064808813998703824, | |
| "grad_norm": 14.204922777466475, | |
| "learning_rate": 3.074433656957929e-07, | |
| "loss": 0.5525, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.009721322099805573, | |
| "grad_norm": 9.138075119736614, | |
| "learning_rate": 4.6925566343042074e-07, | |
| "loss": 0.5424, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.012961762799740765, | |
| "grad_norm": 5.051726436122705, | |
| "learning_rate": 6.310679611650486e-07, | |
| "loss": 0.5232, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.016202203499675955, | |
| "grad_norm": 3.46444651483391, | |
| "learning_rate": 7.928802588996765e-07, | |
| "loss": 0.4552, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.019442644199611146, | |
| "grad_norm": 1.6233777031065288, | |
| "learning_rate": 9.546925566343043e-07, | |
| "loss": 0.3873, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02268308489954634, | |
| "grad_norm": 1.1762000179825056, | |
| "learning_rate": 1.1165048543689322e-06, | |
| "loss": 0.3333, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02592352559948153, | |
| "grad_norm": 0.9558551467742782, | |
| "learning_rate": 1.27831715210356e-06, | |
| "loss": 0.2982, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02916396629941672, | |
| "grad_norm": 0.9422801874700582, | |
| "learning_rate": 1.440129449838188e-06, | |
| "loss": 0.2797, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03240440699935191, | |
| "grad_norm": 0.8214754997776478, | |
| "learning_rate": 1.6019417475728158e-06, | |
| "loss": 0.2596, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0356448476992871, | |
| "grad_norm": 0.9532933362931321, | |
| "learning_rate": 1.7637540453074433e-06, | |
| "loss": 0.2563, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03888528839922229, | |
| "grad_norm": 0.9390014348115954, | |
| "learning_rate": 1.925566343042071e-06, | |
| "loss": 0.2492, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04212572909915749, | |
| "grad_norm": 0.8940098493565622, | |
| "learning_rate": 2.0873786407766993e-06, | |
| "loss": 0.2451, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.04536616979909268, | |
| "grad_norm": 0.956678618390739, | |
| "learning_rate": 2.249190938511327e-06, | |
| "loss": 0.2408, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.04860661049902787, | |
| "grad_norm": 0.9610225933792697, | |
| "learning_rate": 2.411003236245955e-06, | |
| "loss": 0.236, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05184705119896306, | |
| "grad_norm": 0.9269340800650678, | |
| "learning_rate": 2.5728155339805826e-06, | |
| "loss": 0.2246, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.05508749189889825, | |
| "grad_norm": 0.9180868113485474, | |
| "learning_rate": 2.7346278317152108e-06, | |
| "loss": 0.2213, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.05832793259883344, | |
| "grad_norm": 0.9786641616216852, | |
| "learning_rate": 2.8964401294498386e-06, | |
| "loss": 0.2166, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06156837329876863, | |
| "grad_norm": 0.980551217904519, | |
| "learning_rate": 3.058252427184466e-06, | |
| "loss": 0.2119, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.06480881399870382, | |
| "grad_norm": 0.8626959550478579, | |
| "learning_rate": 3.220064724919094e-06, | |
| "loss": 0.2106, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.06804925469863901, | |
| "grad_norm": 0.8489552811589715, | |
| "learning_rate": 3.381877022653722e-06, | |
| "loss": 0.2112, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0712896953985742, | |
| "grad_norm": 0.8916159222304701, | |
| "learning_rate": 3.54368932038835e-06, | |
| "loss": 0.2042, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.07453013609850939, | |
| "grad_norm": 0.9086960370554139, | |
| "learning_rate": 3.7055016181229774e-06, | |
| "loss": 0.1979, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.07777057679844458, | |
| "grad_norm": 0.8413464808652309, | |
| "learning_rate": 3.867313915857606e-06, | |
| "loss": 0.1992, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08101101749837979, | |
| "grad_norm": 0.8989628261518818, | |
| "learning_rate": 4.029126213592233e-06, | |
| "loss": 0.1968, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.08425145819831498, | |
| "grad_norm": 0.9955980487760288, | |
| "learning_rate": 4.190938511326861e-06, | |
| "loss": 0.188, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.08749189889825017, | |
| "grad_norm": 0.9206294362335926, | |
| "learning_rate": 4.352750809061489e-06, | |
| "loss": 0.1869, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.09073233959818536, | |
| "grad_norm": 0.8143016067643809, | |
| "learning_rate": 4.514563106796117e-06, | |
| "loss": 0.1873, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.09397278029812055, | |
| "grad_norm": 0.840197832466951, | |
| "learning_rate": 4.676375404530745e-06, | |
| "loss": 0.1832, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.09721322099805574, | |
| "grad_norm": 0.8031811731017143, | |
| "learning_rate": 4.838187702265373e-06, | |
| "loss": 0.1792, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.10045366169799093, | |
| "grad_norm": 0.7566604882873896, | |
| "learning_rate": 5e-06, | |
| "loss": 0.1741, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.10369410239792612, | |
| "grad_norm": 0.6800771883014759, | |
| "learning_rate": 4.999840024540205e-06, | |
| "loss": 0.1684, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.10693454309786131, | |
| "grad_norm": 0.5814963126209718, | |
| "learning_rate": 4.999360118634534e-06, | |
| "loss": 0.1685, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1101749837977965, | |
| "grad_norm": 0.5139919383970122, | |
| "learning_rate": 4.998560343701524e-06, | |
| "loss": 0.1656, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.11341542449773169, | |
| "grad_norm": 0.4719697466406009, | |
| "learning_rate": 4.997440802096665e-06, | |
| "loss": 0.1662, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.11665586519766688, | |
| "grad_norm": 0.5441236604152835, | |
| "learning_rate": 4.996001637099303e-06, | |
| "loss": 0.1571, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.11989630589760207, | |
| "grad_norm": 0.5924545330957118, | |
| "learning_rate": 4.994243032894303e-06, | |
| "loss": 0.1618, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.12313674659753726, | |
| "grad_norm": 0.4776889270665497, | |
| "learning_rate": 4.992165214548479e-06, | |
| "loss": 0.165, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.12637718729747247, | |
| "grad_norm": 0.4865661174720737, | |
| "learning_rate": 4.989768447981786e-06, | |
| "loss": 0.1631, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.12961762799740764, | |
| "grad_norm": 0.39533484163777743, | |
| "learning_rate": 4.987053039933294e-06, | |
| "loss": 0.1631, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.13285806869734285, | |
| "grad_norm": 0.4194333976090903, | |
| "learning_rate": 4.984019337921919e-06, | |
| "loss": 0.1596, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.13609850939727802, | |
| "grad_norm": 0.417606467091398, | |
| "learning_rate": 4.980667730201964e-06, | |
| "loss": 0.1622, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.13933895009721323, | |
| "grad_norm": 0.4135543560101031, | |
| "learning_rate": 4.976998645713417e-06, | |
| "loss": 0.1626, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.1425793907971484, | |
| "grad_norm": 0.3863220511507732, | |
| "learning_rate": 4.973012554027059e-06, | |
| "loss": 0.1563, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1458198314970836, | |
| "grad_norm": 0.4057624631990764, | |
| "learning_rate": 4.968709965284373e-06, | |
| "loss": 0.1601, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.14906027219701878, | |
| "grad_norm": 0.43679600565915516, | |
| "learning_rate": 4.964091430132246e-06, | |
| "loss": 0.1571, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.152300712896954, | |
| "grad_norm": 0.410392605815529, | |
| "learning_rate": 4.959157539652507e-06, | |
| "loss": 0.1578, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.15554115359688916, | |
| "grad_norm": 0.4269387014050368, | |
| "learning_rate": 4.9539089252862745e-06, | |
| "loss": 0.1564, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.15878159429682437, | |
| "grad_norm": 0.3867586094696986, | |
| "learning_rate": 4.948346258753147e-06, | |
| "loss": 0.157, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.16202203499675957, | |
| "grad_norm": 0.371324732751771, | |
| "learning_rate": 4.942470251965232e-06, | |
| "loss": 0.1555, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.16526247569669475, | |
| "grad_norm": 0.38710422014101686, | |
| "learning_rate": 4.936281656936038e-06, | |
| "loss": 0.1536, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.16850291639662995, | |
| "grad_norm": 0.396987101322912, | |
| "learning_rate": 4.929781265684237e-06, | |
| "loss": 0.1572, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.17174335709656513, | |
| "grad_norm": 0.4019858569286597, | |
| "learning_rate": 4.92296991013229e-06, | |
| "loss": 0.1578, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.17498379779650033, | |
| "grad_norm": 0.38858464136588106, | |
| "learning_rate": 4.915848461999987e-06, | |
| "loss": 0.155, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1782242384964355, | |
| "grad_norm": 0.4114170865581837, | |
| "learning_rate": 4.908417832692879e-06, | |
| "loss": 0.1539, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.18146467919637072, | |
| "grad_norm": 0.40190769617100247, | |
| "learning_rate": 4.900678973185638e-06, | |
| "loss": 0.1493, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1847051198963059, | |
| "grad_norm": 0.3776026088033294, | |
| "learning_rate": 4.892632873900349e-06, | |
| "loss": 0.1571, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1879455605962411, | |
| "grad_norm": 0.3872354376930627, | |
| "learning_rate": 4.884280564579761e-06, | |
| "loss": 0.1546, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.19118600129617627, | |
| "grad_norm": 0.40614075304432395, | |
| "learning_rate": 4.8756231141554925e-06, | |
| "loss": 0.1523, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.19442644199611148, | |
| "grad_norm": 0.3841160702323512, | |
| "learning_rate": 4.866661630611231e-06, | |
| "loss": 0.1497, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.19766688269604665, | |
| "grad_norm": 0.37858054721421236, | |
| "learning_rate": 4.8573972608409395e-06, | |
| "loss": 0.1569, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.20090732339598186, | |
| "grad_norm": 0.4017068446743905, | |
| "learning_rate": 4.847831190502068e-06, | |
| "loss": 0.1506, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.20414776409591703, | |
| "grad_norm": 0.386610704005529, | |
| "learning_rate": 4.837964643863818e-06, | |
| "loss": 0.1552, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.20738820479585224, | |
| "grad_norm": 0.3611796883060248, | |
| "learning_rate": 4.827798883650455e-06, | |
| "loss": 0.1528, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.21062864549578741, | |
| "grad_norm": 0.36233797273951096, | |
| "learning_rate": 4.817335210879712e-06, | |
| "loss": 0.1537, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.21386908619572262, | |
| "grad_norm": 0.355034084310162, | |
| "learning_rate": 4.8065749646962815e-06, | |
| "loss": 0.1482, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.21710952689565782, | |
| "grad_norm": 0.3714493090743044, | |
| "learning_rate": 4.795519522200424e-06, | |
| "loss": 0.1524, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.220349967595593, | |
| "grad_norm": 0.3595872341149548, | |
| "learning_rate": 4.784170298271739e-06, | |
| "loss": 0.1537, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2235904082955282, | |
| "grad_norm": 0.3657197570676038, | |
| "learning_rate": 4.772528745388079e-06, | |
| "loss": 0.1496, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.22683084899546338, | |
| "grad_norm": 0.39757465881991577, | |
| "learning_rate": 4.760596353439665e-06, | |
| "loss": 0.1476, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.23007128969539858, | |
| "grad_norm": 0.3725684613588135, | |
| "learning_rate": 4.748374649538406e-06, | |
| "loss": 0.1505, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.23331173039533376, | |
| "grad_norm": 0.373958535247375, | |
| "learning_rate": 4.735865197822465e-06, | |
| "loss": 0.1521, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.23655217109526896, | |
| "grad_norm": 0.3895028323791198, | |
| "learning_rate": 4.7230695992560725e-06, | |
| "loss": 0.1477, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.23979261179520414, | |
| "grad_norm": 0.362297190858576, | |
| "learning_rate": 4.709989491424641e-06, | |
| "loss": 0.1487, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.24303305249513935, | |
| "grad_norm": 0.3501236992769411, | |
| "learning_rate": 4.69662654832518e-06, | |
| "loss": 0.1511, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.24627349319507452, | |
| "grad_norm": 0.396141744926481, | |
| "learning_rate": 4.682982480152066e-06, | |
| "loss": 0.1545, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.24951393389500973, | |
| "grad_norm": 0.386092381571303, | |
| "learning_rate": 4.669059033078159e-06, | |
| "loss": 0.1548, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.25275437459494493, | |
| "grad_norm": 0.37841627886408896, | |
| "learning_rate": 4.65485798903134e-06, | |
| "loss": 0.1467, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2559948152948801, | |
| "grad_norm": 0.3790829553236213, | |
| "learning_rate": 4.640381165466447e-06, | |
| "loss": 0.1458, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2592352559948153, | |
| "grad_norm": 0.3676425416717676, | |
| "learning_rate": 4.625630415132688e-06, | |
| "loss": 0.1519, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.26247569669475046, | |
| "grad_norm": 0.4020941562750785, | |
| "learning_rate": 4.610607625836515e-06, | |
| "loss": 0.1476, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2657161373946857, | |
| "grad_norm": 0.36503070299017704, | |
| "learning_rate": 4.595314720200028e-06, | |
| "loss": 0.1469, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.26895657809462087, | |
| "grad_norm": 0.3621589576589696, | |
| "learning_rate": 4.579753655414916e-06, | |
| "loss": 0.1469, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.27219701879455604, | |
| "grad_norm": 0.38775207709700354, | |
| "learning_rate": 4.563926422991973e-06, | |
| "loss": 0.1507, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.2754374594944913, | |
| "grad_norm": 0.3808349919379576, | |
| "learning_rate": 4.5478350485062275e-06, | |
| "loss": 0.1505, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.27867790019442645, | |
| "grad_norm": 0.41092711058175224, | |
| "learning_rate": 4.531481591337706e-06, | |
| "loss": 0.1465, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.28191834089436163, | |
| "grad_norm": 0.3677672974804986, | |
| "learning_rate": 4.51486814440787e-06, | |
| "loss": 0.1497, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2851587815942968, | |
| "grad_norm": 0.3623506682005845, | |
| "learning_rate": 4.497996833911771e-06, | |
| "loss": 0.1494, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.28839922229423204, | |
| "grad_norm": 0.37070206947839884, | |
| "learning_rate": 4.48086981904593e-06, | |
| "loss": 0.1543, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2916396629941672, | |
| "grad_norm": 0.3639901651185149, | |
| "learning_rate": 4.463489291732011e-06, | |
| "loss": 0.1414, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2948801036941024, | |
| "grad_norm": 0.3559919942818163, | |
| "learning_rate": 4.445857476336292e-06, | |
| "loss": 0.1486, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.29812054439403757, | |
| "grad_norm": 0.38283982373110453, | |
| "learning_rate": 4.427976629384994e-06, | |
| "loss": 0.1465, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3013609850939728, | |
| "grad_norm": 0.37801217199677106, | |
| "learning_rate": 4.409849039275486e-06, | |
| "loss": 0.1468, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.304601425793908, | |
| "grad_norm": 0.4096620960705283, | |
| "learning_rate": 4.391477025983418e-06, | |
| "loss": 0.1479, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.30784186649384315, | |
| "grad_norm": 0.3604056272858565, | |
| "learning_rate": 4.372862940765811e-06, | |
| "loss": 0.1496, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.31108230719377833, | |
| "grad_norm": 0.3538536423133527, | |
| "learning_rate": 4.354009165860136e-06, | |
| "loss": 0.1482, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.31432274789371356, | |
| "grad_norm": 0.3904102444609031, | |
| "learning_rate": 4.3349181141794414e-06, | |
| "loss": 0.1435, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.31756318859364874, | |
| "grad_norm": 0.3859901702462496, | |
| "learning_rate": 4.315592229003544e-06, | |
| "loss": 0.1456, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3208036292935839, | |
| "grad_norm": 0.35852358880201146, | |
| "learning_rate": 4.2960339836663364e-06, | |
| "loss": 0.145, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.32404406999351915, | |
| "grad_norm": 0.3565242267972036, | |
| "learning_rate": 4.276245881239253e-06, | |
| "loss": 0.1466, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.3272845106934543, | |
| "grad_norm": 0.3433337357670066, | |
| "learning_rate": 4.2562304542109186e-06, | |
| "loss": 0.1439, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.3305249513933895, | |
| "grad_norm": 0.38264147572948926, | |
| "learning_rate": 4.235990264163048e-06, | |
| "loss": 0.1442, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.3337653920933247, | |
| "grad_norm": 0.36500282997002936, | |
| "learning_rate": 4.215527901442609e-06, | |
| "loss": 0.1433, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.3370058327932599, | |
| "grad_norm": 0.3846660553174048, | |
| "learning_rate": 4.194845984830308e-06, | |
| "loss": 0.1474, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.3402462734931951, | |
| "grad_norm": 0.37675710340654855, | |
| "learning_rate": 4.173947161205443e-06, | |
| "loss": 0.149, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.34348671419313026, | |
| "grad_norm": 0.36661580250234177, | |
| "learning_rate": 4.152834105207146e-06, | |
| "loss": 0.1448, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.34672715489306544, | |
| "grad_norm": 0.37636552154818864, | |
| "learning_rate": 4.131509518892093e-06, | |
| "loss": 0.1454, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.34996759559300067, | |
| "grad_norm": 0.3581018731822282, | |
| "learning_rate": 4.109976131388682e-06, | |
| "loss": 0.1469, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.35320803629293585, | |
| "grad_norm": 0.3387824669850879, | |
| "learning_rate": 4.088236698547767e-06, | |
| "loss": 0.1453, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.356448476992871, | |
| "grad_norm": 0.3691931165432145, | |
| "learning_rate": 4.06629400258996e-06, | |
| "loss": 0.1466, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3596889176928062, | |
| "grad_norm": 0.34829825491346456, | |
| "learning_rate": 4.0441508517495605e-06, | |
| "loss": 0.1449, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.36292935839274143, | |
| "grad_norm": 0.38098751196774483, | |
| "learning_rate": 4.021810079915159e-06, | |
| "loss": 0.1466, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.3661697990926766, | |
| "grad_norm": 0.3742527627607329, | |
| "learning_rate": 3.9992745462669515e-06, | |
| "loss": 0.1467, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.3694102397926118, | |
| "grad_norm": 0.3905850349665511, | |
| "learning_rate": 3.976547134910826e-06, | |
| "loss": 0.1452, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.37265068049254696, | |
| "grad_norm": 0.332721575400441, | |
| "learning_rate": 3.953630754509245e-06, | |
| "loss": 0.1478, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.3758911211924822, | |
| "grad_norm": 0.34472432311127355, | |
| "learning_rate": 3.930528337909002e-06, | |
| "loss": 0.1466, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.37913156189241737, | |
| "grad_norm": 0.3742114947748869, | |
| "learning_rate": 3.907242841765874e-06, | |
| "loss": 0.1475, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.38237200259235254, | |
| "grad_norm": 0.363030396921668, | |
| "learning_rate": 3.883777246166219e-06, | |
| "loss": 0.1386, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3856124432922878, | |
| "grad_norm": 0.36210921666900014, | |
| "learning_rate": 3.860134554245597e-06, | |
| "loss": 0.1412, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.38885288399222295, | |
| "grad_norm": 0.3599182096747789, | |
| "learning_rate": 3.836317791804415e-06, | |
| "loss": 0.1408, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.39209332469215813, | |
| "grad_norm": 0.40331300021325894, | |
| "learning_rate": 3.81233000692069e-06, | |
| "loss": 0.1428, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3953337653920933, | |
| "grad_norm": 0.34914858021538103, | |
| "learning_rate": 3.7881742695599567e-06, | |
| "loss": 0.1439, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.39857420609202854, | |
| "grad_norm": 0.3637051905577843, | |
| "learning_rate": 3.763853671182367e-06, | |
| "loss": 0.1445, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.4018146467919637, | |
| "grad_norm": 0.3560295518397911, | |
| "learning_rate": 3.7393713243470475e-06, | |
| "loss": 0.1426, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4050550874918989, | |
| "grad_norm": 0.37115109403035373, | |
| "learning_rate": 3.7147303623137508e-06, | |
| "loss": 0.1438, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.40829552819183407, | |
| "grad_norm": 0.36713310156131235, | |
| "learning_rate": 3.6899339386418635e-06, | |
| "loss": 0.1433, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.4115359688917693, | |
| "grad_norm": 0.3739701552036908, | |
| "learning_rate": 3.664985226786807e-06, | |
| "loss": 0.1415, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.4147764095917045, | |
| "grad_norm": 0.4018803004726532, | |
| "learning_rate": 3.6398874196939015e-06, | |
| "loss": 0.1424, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.41801685029163965, | |
| "grad_norm": 0.3856228913000613, | |
| "learning_rate": 3.614643729389731e-06, | |
| "loss": 0.1434, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.42125729099157483, | |
| "grad_norm": 0.3590367113459008, | |
| "learning_rate": 3.5892573865710665e-06, | |
| "loss": 0.1447, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.42449773169151006, | |
| "grad_norm": 0.35395438465776946, | |
| "learning_rate": 3.5637316401913997e-06, | |
| "loss": 0.1432, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.42773817239144524, | |
| "grad_norm": 0.35628640020346514, | |
| "learning_rate": 3.5380697570451413e-06, | |
| "loss": 0.1404, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.4309786130913804, | |
| "grad_norm": 0.36552890562756624, | |
| "learning_rate": 3.5122750213495355e-06, | |
| "loss": 0.1456, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.43421905379131565, | |
| "grad_norm": 0.37173141387082576, | |
| "learning_rate": 3.486350734324346e-06, | |
| "loss": 0.1435, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.4374594944912508, | |
| "grad_norm": 0.3539914127253098, | |
| "learning_rate": 3.460300213769361e-06, | |
| "loss": 0.1427, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.440699935191186, | |
| "grad_norm": 0.3619461607830295, | |
| "learning_rate": 3.4341267936397843e-06, | |
| "loss": 0.1403, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.4439403758911212, | |
| "grad_norm": 0.35261727478654553, | |
| "learning_rate": 3.4078338236195506e-06, | |
| "loss": 0.1419, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.4471808165910564, | |
| "grad_norm": 0.3320165163284001, | |
| "learning_rate": 3.3814246686926367e-06, | |
| "loss": 0.1422, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.4504212572909916, | |
| "grad_norm": 0.3505007893913486, | |
| "learning_rate": 3.354902708712402e-06, | |
| "loss": 0.1415, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.45366169799092676, | |
| "grad_norm": 0.366809997754886, | |
| "learning_rate": 3.3282713379690418e-06, | |
| "loss": 0.1423, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.45690213869086194, | |
| "grad_norm": 0.37510389340397243, | |
| "learning_rate": 3.301533964755179e-06, | |
| "loss": 0.144, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.46014257939079717, | |
| "grad_norm": 0.3751915143045206, | |
| "learning_rate": 3.2746940109296744e-06, | |
| "loss": 0.143, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.46338302009073234, | |
| "grad_norm": 0.34874611917779574, | |
| "learning_rate": 3.2477549114796892e-06, | |
| "loss": 0.1433, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.4666234607906675, | |
| "grad_norm": 0.34401160771032996, | |
| "learning_rate": 3.220720114081081e-06, | |
| "loss": 0.1429, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.4698639014906027, | |
| "grad_norm": 0.34265687524195426, | |
| "learning_rate": 3.193593078657165e-06, | |
| "loss": 0.1447, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.47310434219053793, | |
| "grad_norm": 0.36594432029338536, | |
| "learning_rate": 3.1663772769359134e-06, | |
| "loss": 0.1417, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.4763447828904731, | |
| "grad_norm": 0.4010466659928031, | |
| "learning_rate": 3.1390761920056423e-06, | |
| "loss": 0.1449, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.4795852235904083, | |
| "grad_norm": 0.38421005144265497, | |
| "learning_rate": 3.111693317869241e-06, | |
| "loss": 0.1415, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.48282566429034346, | |
| "grad_norm": 0.3405551521124088, | |
| "learning_rate": 3.084232158997017e-06, | |
| "loss": 0.1416, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.4860661049902787, | |
| "grad_norm": 0.34368244054627933, | |
| "learning_rate": 3.0566962298781822e-06, | |
| "loss": 0.1366, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.48930654569021387, | |
| "grad_norm": 0.3475737652432212, | |
| "learning_rate": 3.0290890545710732e-06, | |
| "loss": 0.1427, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.49254698639014904, | |
| "grad_norm": 0.3915309068687966, | |
| "learning_rate": 3.0014141662521423e-06, | |
| "loss": 0.142, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.4957874270900843, | |
| "grad_norm": 0.38245602033100184, | |
| "learning_rate": 2.973675106763776e-06, | |
| "loss": 0.1482, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.49902786779001945, | |
| "grad_norm": 0.3660718954078658, | |
| "learning_rate": 2.9458754261610113e-06, | |
| "loss": 0.1382, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.5022683084899546, | |
| "grad_norm": 0.3560275044859087, | |
| "learning_rate": 2.9180186822571953e-06, | |
| "loss": 0.1397, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.5055087491898899, | |
| "grad_norm": 0.3476891353623016, | |
| "learning_rate": 2.8901084401686635e-06, | |
| "loss": 0.1399, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.508749189889825, | |
| "grad_norm": 0.36360266266143876, | |
| "learning_rate": 2.8621482718584615e-06, | |
| "loss": 0.1434, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.5119896305897602, | |
| "grad_norm": 0.3607127979390933, | |
| "learning_rate": 2.8341417556792163e-06, | |
| "loss": 0.1391, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.5152300712896954, | |
| "grad_norm": 0.36135212438772696, | |
| "learning_rate": 2.8060924759151687e-06, | |
| "loss": 0.1369, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.5184705119896306, | |
| "grad_norm": 0.36165014897143566, | |
| "learning_rate": 2.7780040223234623e-06, | |
| "loss": 0.1428, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5217109526895658, | |
| "grad_norm": 0.3809583977124813, | |
| "learning_rate": 2.749879989674718e-06, | |
| "loss": 0.1422, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.5249513933895009, | |
| "grad_norm": 0.3385971069870571, | |
| "learning_rate": 2.72172397729298e-06, | |
| "loss": 0.1356, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.5281918340894362, | |
| "grad_norm": 0.3446592011239239, | |
| "learning_rate": 2.6935395885950705e-06, | |
| "loss": 0.1421, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.5314322747893714, | |
| "grad_norm": 0.3824673660290881, | |
| "learning_rate": 2.665330430629421e-06, | |
| "loss": 0.1397, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.5346727154893065, | |
| "grad_norm": 0.3717938840273103, | |
| "learning_rate": 2.6371001136144458e-06, | |
| "loss": 0.1388, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5379131561892417, | |
| "grad_norm": 0.3442850639482792, | |
| "learning_rate": 2.6088522504764983e-06, | |
| "loss": 0.1412, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.541153596889177, | |
| "grad_norm": 0.35531304056249186, | |
| "learning_rate": 2.580590456387497e-06, | |
| "loss": 0.1392, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.5443940375891121, | |
| "grad_norm": 0.3659296727759621, | |
| "learning_rate": 2.5523183483022406e-06, | |
| "loss": 0.142, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.5476344782890473, | |
| "grad_norm": 0.3598598578768378, | |
| "learning_rate": 2.5240395444955255e-06, | |
| "loss": 0.14, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.5508749189889826, | |
| "grad_norm": 0.37315624702714517, | |
| "learning_rate": 2.495757664099063e-06, | |
| "loss": 0.1422, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.5541153596889177, | |
| "grad_norm": 0.38147815062716345, | |
| "learning_rate": 2.4674763266383096e-06, | |
| "loss": 0.1399, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.5573558003888529, | |
| "grad_norm": 0.3514332109313319, | |
| "learning_rate": 2.439199151569236e-06, | |
| "loss": 0.1389, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.560596241088788, | |
| "grad_norm": 0.3466932777873452, | |
| "learning_rate": 2.41092975781511e-06, | |
| "loss": 0.1364, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.5638366817887233, | |
| "grad_norm": 0.34717040583506037, | |
| "learning_rate": 2.382671763303342e-06, | |
| "loss": 0.1356, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.5670771224886585, | |
| "grad_norm": 0.37026146298749535, | |
| "learning_rate": 2.3544287845024643e-06, | |
| "loss": 0.1399, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.5703175631885936, | |
| "grad_norm": 0.3785438614418004, | |
| "learning_rate": 2.326204435959291e-06, | |
| "loss": 0.1442, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.5735580038885288, | |
| "grad_norm": 0.3809262572423141, | |
| "learning_rate": 2.2980023298363334e-06, | |
| "loss": 0.142, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.5767984445884641, | |
| "grad_norm": 0.3736610534336298, | |
| "learning_rate": 2.2698260754495057e-06, | |
| "loss": 0.1386, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.5800388852883992, | |
| "grad_norm": 0.37243260617527046, | |
| "learning_rate": 2.2416792788062073e-06, | |
| "loss": 0.1379, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.5832793259883344, | |
| "grad_norm": 0.3653198160876884, | |
| "learning_rate": 2.213565542143828e-06, | |
| "loss": 0.1394, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.5865197666882696, | |
| "grad_norm": 0.3456465195181901, | |
| "learning_rate": 2.1854884634687267e-06, | |
| "loss": 0.1361, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.5897602073882048, | |
| "grad_norm": 0.35698643784051837, | |
| "learning_rate": 2.1574516360957588e-06, | |
| "loss": 0.134, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.59300064808814, | |
| "grad_norm": 0.33396274798278874, | |
| "learning_rate": 2.129458648188405e-06, | |
| "loss": 0.1378, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5962410887880751, | |
| "grad_norm": 0.35679763524806046, | |
| "learning_rate": 2.1015130822995553e-06, | |
| "loss": 0.1387, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.5994815294880104, | |
| "grad_norm": 0.3748507983654056, | |
| "learning_rate": 2.0736185149130117e-06, | |
| "loss": 0.1353, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.6027219701879456, | |
| "grad_norm": 0.3680336654813566, | |
| "learning_rate": 2.0457785159857673e-06, | |
| "loss": 0.142, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.6059624108878807, | |
| "grad_norm": 0.3461769829954041, | |
| "learning_rate": 2.0179966484911268e-06, | |
| "loss": 0.1371, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.609202851587816, | |
| "grad_norm": 0.35213432555022733, | |
| "learning_rate": 1.990276467962711e-06, | |
| "loss": 0.1377, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.6124432922877512, | |
| "grad_norm": 0.3838660410707078, | |
| "learning_rate": 1.9626215220394213e-06, | |
| "loss": 0.1403, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.6156837329876863, | |
| "grad_norm": 0.394603150983814, | |
| "learning_rate": 1.9350353500114072e-06, | |
| "loss": 0.1369, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6189241736876215, | |
| "grad_norm": 0.3327444517938039, | |
| "learning_rate": 1.907521482367114e-06, | |
| "loss": 0.1406, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.6221646143875567, | |
| "grad_norm": 0.3704170243923683, | |
| "learning_rate": 1.8800834403414437e-06, | |
| "loss": 0.137, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.6254050550874919, | |
| "grad_norm": 0.40349804036311127, | |
| "learning_rate": 1.8527247354651048e-06, | |
| "loss": 0.1362, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.6286454957874271, | |
| "grad_norm": 0.364607317668594, | |
| "learning_rate": 1.8254488691152145e-06, | |
| "loss": 0.1378, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.6318859364873622, | |
| "grad_norm": 0.38797616750444036, | |
| "learning_rate": 1.7982593320671793e-06, | |
| "loss": 0.1387, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6351263771872975, | |
| "grad_norm": 0.3943004101702645, | |
| "learning_rate": 1.7711596040479523e-06, | |
| "loss": 0.1378, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.6383668178872327, | |
| "grad_norm": 0.34986766543947917, | |
| "learning_rate": 1.744153153290693e-06, | |
| "loss": 0.1382, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.6416072585871678, | |
| "grad_norm": 0.35689489846966777, | |
| "learning_rate": 1.717243436090904e-06, | |
| "loss": 0.1398, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.6448476992871031, | |
| "grad_norm": 0.358287736012017, | |
| "learning_rate": 1.6904338963640913e-06, | |
| "loss": 0.1342, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.6480881399870383, | |
| "grad_norm": 0.35009264406089535, | |
| "learning_rate": 1.6637279652050103e-06, | |
| "loss": 0.1384, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6513285806869734, | |
| "grad_norm": 0.36264463552008447, | |
| "learning_rate": 1.6371290604485532e-06, | |
| "loss": 0.1345, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.6545690213869086, | |
| "grad_norm": 0.37898011262102793, | |
| "learning_rate": 1.610640586232336e-06, | |
| "loss": 0.136, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.6578094620868438, | |
| "grad_norm": 0.3486402988937132, | |
| "learning_rate": 1.5842659325610326e-06, | |
| "loss": 0.1339, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.661049902786779, | |
| "grad_norm": 0.3494806553044058, | |
| "learning_rate": 1.5580084748725193e-06, | |
| "loss": 0.132, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.6642903434867142, | |
| "grad_norm": 0.3376036564372793, | |
| "learning_rate": 1.5318715736058926e-06, | |
| "loss": 0.1419, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.6675307841866494, | |
| "grad_norm": 0.3619041141435384, | |
| "learning_rate": 1.5058585737713883e-06, | |
| "loss": 0.1377, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.6707712248865846, | |
| "grad_norm": 0.37485938510309685, | |
| "learning_rate": 1.4799728045222956e-06, | |
| "loss": 0.1373, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.6740116655865198, | |
| "grad_norm": 0.3596734953623062, | |
| "learning_rate": 1.4542175787288818e-06, | |
| "loss": 0.1396, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.6772521062864549, | |
| "grad_norm": 0.3531801559136217, | |
| "learning_rate": 1.428596192554421e-06, | |
| "loss": 0.139, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.6804925469863902, | |
| "grad_norm": 0.35403583546745904, | |
| "learning_rate": 1.4031119250333393e-06, | |
| "loss": 0.1386, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.6837329876863253, | |
| "grad_norm": 0.3499312200329266, | |
| "learning_rate": 1.3777680376515667e-06, | |
| "loss": 0.138, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.6869734283862605, | |
| "grad_norm": 0.3655179914285288, | |
| "learning_rate": 1.3525677739291338e-06, | |
| "loss": 0.1374, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.6902138690861958, | |
| "grad_norm": 0.36296013901637053, | |
| "learning_rate": 1.3275143590050622e-06, | |
| "loss": 0.1391, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.6934543097861309, | |
| "grad_norm": 0.33352013543504444, | |
| "learning_rate": 1.3026109992246084e-06, | |
| "loss": 0.1356, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.6966947504860661, | |
| "grad_norm": 0.34615241767967, | |
| "learning_rate": 1.2778608817289167e-06, | |
| "loss": 0.1365, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.6999351911860013, | |
| "grad_norm": 0.36999888059560476, | |
| "learning_rate": 1.2532671740471292e-06, | |
| "loss": 0.1411, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.7031756318859365, | |
| "grad_norm": 0.34402496133806365, | |
| "learning_rate": 1.2288330236910025e-06, | |
| "loss": 0.1396, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.7064160725858717, | |
| "grad_norm": 0.35636630900488314, | |
| "learning_rate": 1.204561557752085e-06, | |
| "loss": 0.1394, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.7096565132858069, | |
| "grad_norm": 0.36181518745129104, | |
| "learning_rate": 1.1804558825015172e-06, | |
| "loss": 0.1365, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.712896953985742, | |
| "grad_norm": 0.3563465165177732, | |
| "learning_rate": 1.1565190829924838e-06, | |
| "loss": 0.1362, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.7161373946856773, | |
| "grad_norm": 0.33363782347377735, | |
| "learning_rate": 1.13275422266539e-06, | |
| "loss": 0.1398, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.7193778353856124, | |
| "grad_norm": 0.36414316020816895, | |
| "learning_rate": 1.109164342955803e-06, | |
| "loss": 0.1383, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.7226182760855476, | |
| "grad_norm": 0.36241713354650357, | |
| "learning_rate": 1.0857524629052057e-06, | |
| "loss": 0.1373, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.7258587167854829, | |
| "grad_norm": 0.35575126832820747, | |
| "learning_rate": 1.0625215787746185e-06, | |
| "loss": 0.1376, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.729099157485418, | |
| "grad_norm": 0.3581511044472468, | |
| "learning_rate": 1.0394746636611364e-06, | |
| "loss": 0.1327, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7323395981853532, | |
| "grad_norm": 0.3487467990800773, | |
| "learning_rate": 1.016614667117434e-06, | |
| "loss": 0.1362, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.7355800388852884, | |
| "grad_norm": 0.36540009173824517, | |
| "learning_rate": 9.939445147742786e-07, | |
| "loss": 0.1408, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.7388204795852236, | |
| "grad_norm": 0.3504860844231358, | |
| "learning_rate": 9.714671079661048e-07, | |
| "loss": 0.1383, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.7420609202851588, | |
| "grad_norm": 0.3546645739584433, | |
| "learning_rate": 9.491853233597037e-07, | |
| "loss": 0.1367, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.7453013609850939, | |
| "grad_norm": 0.36305507134438186, | |
| "learning_rate": 9.271020125860658e-07, | |
| "loss": 0.1367, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7485418016850292, | |
| "grad_norm": 0.3632120141566518, | |
| "learning_rate": 9.052200018754276e-07, | |
| "loss": 0.1364, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.7517822423849644, | |
| "grad_norm": 0.3222399957976063, | |
| "learning_rate": 8.835420916955686e-07, | |
| "loss": 0.1376, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.7550226830848995, | |
| "grad_norm": 0.3648037126049122, | |
| "learning_rate": 8.620710563934063e-07, | |
| "loss": 0.1374, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.7582631237848347, | |
| "grad_norm": 0.38493254190484144, | |
| "learning_rate": 8.408096438399371e-07, | |
| "loss": 0.1369, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.76150356448477, | |
| "grad_norm": 0.335337634563654, | |
| "learning_rate": 8.19760575078562e-07, | |
| "loss": 0.1353, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.7647440051847051, | |
| "grad_norm": 0.34948986108995744, | |
| "learning_rate": 7.989265439768412e-07, | |
| "loss": 0.1336, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.7679844458846403, | |
| "grad_norm": 0.3533447598580161, | |
| "learning_rate": 7.783102168817406e-07, | |
| "loss": 0.1338, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.7712248865845756, | |
| "grad_norm": 0.35564500184932724, | |
| "learning_rate": 7.579142322783837e-07, | |
| "loss": 0.1371, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.7744653272845107, | |
| "grad_norm": 0.3565193952902089, | |
| "learning_rate": 7.37741200452384e-07, | |
| "loss": 0.1333, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.7777057679844459, | |
| "grad_norm": 0.38945562323117644, | |
| "learning_rate": 7.177937031557733e-07, | |
| "loss": 0.1384, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.780946208684381, | |
| "grad_norm": 0.33188297048475185, | |
| "learning_rate": 6.98074293276595e-07, | |
| "loss": 0.1304, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.7841866493843163, | |
| "grad_norm": 0.33600376376796814, | |
| "learning_rate": 6.785854945121773e-07, | |
| "loss": 0.1371, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.7874270900842515, | |
| "grad_norm": 0.3733419224635097, | |
| "learning_rate": 6.593298010461541e-07, | |
| "loss": 0.1357, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.7906675307841866, | |
| "grad_norm": 0.360994575329127, | |
| "learning_rate": 6.403096772292611e-07, | |
| "loss": 0.1388, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.7939079714841218, | |
| "grad_norm": 0.3540220491931335, | |
| "learning_rate": 6.215275572639387e-07, | |
| "loss": 0.1383, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.7971484121840571, | |
| "grad_norm": 0.3421614423791235, | |
| "learning_rate": 6.029858448928091e-07, | |
| "loss": 0.1374, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.8003888528839922, | |
| "grad_norm": 0.352474503469002, | |
| "learning_rate": 5.846869130910413e-07, | |
| "loss": 0.1394, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.8036292935839274, | |
| "grad_norm": 0.33788431994154117, | |
| "learning_rate": 5.666331037626585e-07, | |
| "loss": 0.1345, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.8068697342838627, | |
| "grad_norm": 0.32025075334711434, | |
| "learning_rate": 5.488267274408207e-07, | |
| "loss": 0.1349, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.8101101749837978, | |
| "grad_norm": 0.36081495813728437, | |
| "learning_rate": 5.312700629921181e-07, | |
| "loss": 0.1325, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.813350615683733, | |
| "grad_norm": 0.3416845659191573, | |
| "learning_rate": 5.139653573249253e-07, | |
| "loss": 0.1333, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.8165910563836681, | |
| "grad_norm": 0.3831793796512933, | |
| "learning_rate": 4.969148251018385e-07, | |
| "loss": 0.1342, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.8198314970836034, | |
| "grad_norm": 0.3578750131718832, | |
| "learning_rate": 4.801206484562446e-07, | |
| "loss": 0.1352, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.8230719377835386, | |
| "grad_norm": 0.355502152277739, | |
| "learning_rate": 4.6358497671304623e-07, | |
| "loss": 0.1357, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.8263123784834737, | |
| "grad_norm": 0.3510295514913444, | |
| "learning_rate": 4.473099261135977e-07, | |
| "loss": 0.1388, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.829552819183409, | |
| "grad_norm": 0.32078925808882724, | |
| "learning_rate": 4.312975795448593e-07, | |
| "loss": 0.1364, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.8327932598833442, | |
| "grad_norm": 0.3692542497286855, | |
| "learning_rate": 4.1554998627283655e-07, | |
| "loss": 0.1315, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.8360337005832793, | |
| "grad_norm": 0.37137933087389124, | |
| "learning_rate": 4.000691616803076e-07, | |
| "loss": 0.1413, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.8392741412832145, | |
| "grad_norm": 0.3882652809375332, | |
| "learning_rate": 3.8485708700889913e-07, | |
| "loss": 0.1364, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.8425145819831497, | |
| "grad_norm": 0.36297859765424056, | |
| "learning_rate": 3.699157091055225e-07, | |
| "loss": 0.1346, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8457550226830849, | |
| "grad_norm": 0.3565105681283779, | |
| "learning_rate": 3.552469401732181e-07, | |
| "loss": 0.1379, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.8489954633830201, | |
| "grad_norm": 0.33430669724867546, | |
| "learning_rate": 3.408526575264298e-07, | |
| "loss": 0.1407, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.8522359040829552, | |
| "grad_norm": 0.3705327571148099, | |
| "learning_rate": 3.267347033507456e-07, | |
| "loss": 0.1341, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.8554763447828905, | |
| "grad_norm": 0.3473212789683562, | |
| "learning_rate": 3.128948844671334e-07, | |
| "loss": 0.1354, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.8587167854828257, | |
| "grad_norm": 0.35963054694538577, | |
| "learning_rate": 2.993349721007041e-07, | |
| "loss": 0.136, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.8619572261827608, | |
| "grad_norm": 0.37555057368085243, | |
| "learning_rate": 2.860567016540322e-07, | |
| "loss": 0.1379, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.8651976668826961, | |
| "grad_norm": 0.38221465588612524, | |
| "learning_rate": 2.730617724850523e-07, | |
| "loss": 0.138, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.8684381075826313, | |
| "grad_norm": 0.39323307490910586, | |
| "learning_rate": 2.603518476895794e-07, | |
| "loss": 0.1372, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.8716785482825664, | |
| "grad_norm": 0.32668418769533897, | |
| "learning_rate": 2.479285538884649e-07, | |
| "loss": 0.1315, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.8749189889825016, | |
| "grad_norm": 0.36967939498979446, | |
| "learning_rate": 2.3579348101941817e-07, | |
| "loss": 0.1395, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.8781594296824368, | |
| "grad_norm": 0.4148617877855533, | |
| "learning_rate": 2.239481821335296e-07, | |
| "loss": 0.1346, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.881399870382372, | |
| "grad_norm": 0.3507803299933488, | |
| "learning_rate": 2.1239417319650707e-07, | |
| "loss": 0.135, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.8846403110823072, | |
| "grad_norm": 0.3431423389326874, | |
| "learning_rate": 2.0113293289466484e-07, | |
| "loss": 0.1386, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.8878807517822424, | |
| "grad_norm": 0.3556269315719522, | |
| "learning_rate": 1.901659024456784e-07, | |
| "loss": 0.1374, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.8911211924821776, | |
| "grad_norm": 0.3581655711093919, | |
| "learning_rate": 1.7949448541413923e-07, | |
| "loss": 0.1349, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.8943616331821128, | |
| "grad_norm": 0.3539219505292951, | |
| "learning_rate": 1.6912004753192463e-07, | |
| "loss": 0.1372, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.8976020738820479, | |
| "grad_norm": 0.36487534167481467, | |
| "learning_rate": 1.5904391652341034e-07, | |
| "loss": 0.1398, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.9008425145819832, | |
| "grad_norm": 0.3547252654216283, | |
| "learning_rate": 1.492673819355489e-07, | |
| "loss": 0.137, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.9040829552819183, | |
| "grad_norm": 0.3624466589823807, | |
| "learning_rate": 1.3979169497283369e-07, | |
| "loss": 0.1344, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.9073233959818535, | |
| "grad_norm": 0.3597593530124065, | |
| "learning_rate": 1.3061806833716773e-07, | |
| "loss": 0.1351, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.9105638366817888, | |
| "grad_norm": 0.36978575366992134, | |
| "learning_rate": 1.2174767607266214e-07, | |
| "loss": 0.1381, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.9138042773817239, | |
| "grad_norm": 0.3394271179919748, | |
| "learning_rate": 1.1318165341538096e-07, | |
| "loss": 0.1409, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.9170447180816591, | |
| "grad_norm": 0.3886245251582624, | |
| "learning_rate": 1.0492109664805572e-07, | |
| "loss": 0.1357, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.9202851587815943, | |
| "grad_norm": 0.357080659087376, | |
| "learning_rate": 9.69670629597802e-08, | |
| "loss": 0.1367, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.9235255994815295, | |
| "grad_norm": 0.3725663246448965, | |
| "learning_rate": 8.932057031071113e-08, | |
| "loss": 0.1367, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9267660401814647, | |
| "grad_norm": 0.3534059663914867, | |
| "learning_rate": 8.198259730178987e-08, | |
| "loss": 0.1372, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.9300064808813999, | |
| "grad_norm": 0.38942343747420805, | |
| "learning_rate": 7.495408304950213e-08, | |
| "loss": 0.1362, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.933246921581335, | |
| "grad_norm": 0.3482911345014878, | |
| "learning_rate": 6.823592706568738e-08, | |
| "loss": 0.1364, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.9364873622812703, | |
| "grad_norm": 0.3962969062391369, | |
| "learning_rate": 6.182898914241897e-08, | |
| "loss": 0.1397, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.9397278029812054, | |
| "grad_norm": 0.3486632243006723, | |
| "learning_rate": 5.5734089241969434e-08, | |
| "loss": 0.1355, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9429682436811406, | |
| "grad_norm": 0.3414359245355013, | |
| "learning_rate": 4.995200739186967e-08, | |
| "loss": 0.1353, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.9462086843810759, | |
| "grad_norm": 0.3577826479652381, | |
| "learning_rate": 4.448348358508187e-08, | |
| "loss": 0.1349, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.949449125081011, | |
| "grad_norm": 0.39657239893518126, | |
| "learning_rate": 3.932921768529413e-08, | |
| "loss": 0.1391, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.9526895657809462, | |
| "grad_norm": 0.34868268785953926, | |
| "learning_rate": 3.448986933735299e-08, | |
| "loss": 0.1408, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.9559300064808814, | |
| "grad_norm": 0.37214717947029213, | |
| "learning_rate": 2.996605788283924e-08, | |
| "loss": 0.1376, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.9591704471808166, | |
| "grad_norm": 0.37280976502774077, | |
| "learning_rate": 2.5758362280807192e-08, | |
| "loss": 0.1323, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.9624108878807518, | |
| "grad_norm": 0.3529393622845644, | |
| "learning_rate": 2.1867321033687827e-08, | |
| "loss": 0.1349, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.9656513285806869, | |
| "grad_norm": 0.344394233275576, | |
| "learning_rate": 1.8293432118371146e-08, | |
| "loss": 0.1358, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.9688917692806222, | |
| "grad_norm": 0.3857968624258438, | |
| "learning_rate": 1.5037152922475207e-08, | |
| "loss": 0.1406, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.9721322099805574, | |
| "grad_norm": 0.37594692090340304, | |
| "learning_rate": 1.2098900185808781e-08, | |
| "loss": 0.1362, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.9753726506804925, | |
| "grad_norm": 0.345262397252086, | |
| "learning_rate": 9.479049947038743e-09, | |
| "loss": 0.1355, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.9786130913804277, | |
| "grad_norm": 0.39601712390863364, | |
| "learning_rate": 7.177937495561616e-09, | |
| "loss": 0.134, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.981853532080363, | |
| "grad_norm": 0.3544450146581611, | |
| "learning_rate": 5.1958573285959546e-09, | |
| "loss": 0.1367, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.9850939727802981, | |
| "grad_norm": 0.36321038651711207, | |
| "learning_rate": 3.533063113490276e-09, | |
| "loss": 0.1353, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.9883344134802333, | |
| "grad_norm": 0.34666506280478593, | |
| "learning_rate": 2.189767655259578e-09, | |
| "loss": 0.132, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.9915748541801686, | |
| "grad_norm": 0.34052736841522746, | |
| "learning_rate": 1.1661428693512944e-09, | |
| "loss": 0.1306, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.9948152948801037, | |
| "grad_norm": 0.31547647192990236, | |
| "learning_rate": 4.623197596417828e-10, | |
| "loss": 0.1314, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.9980557355800389, | |
| "grad_norm": 0.3819807577926737, | |
| "learning_rate": 7.838840167168071e-11, | |
| "loss": 0.1368, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 3086, | |
| "total_flos": 190452489388032.0, | |
| "train_loss": 0.15608682801641588, | |
| "train_runtime": 8265.837, | |
| "train_samples_per_second": 47.787, | |
| "train_steps_per_second": 0.373 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3086, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 5000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 190452489388032.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |