| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 2286, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0, | |
| "eval_loss": 2.1641297340393066, | |
| "eval_runtime": 4.2967, | |
| "eval_samples_per_second": 4.655, | |
| "eval_steps_per_second": 0.465, | |
| "step": 0 | |
| }, | |
| { | |
| "epoch": 0.0008750820389411508, | |
| "grad_norm": 6.37307071685791, | |
| "learning_rate": 2.915451895043732e-07, | |
| "loss": 1.6434, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008750820389411508, | |
| "grad_norm": 4.0545759201049805, | |
| "learning_rate": 2.915451895043732e-06, | |
| "loss": 1.6965, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.017501640778823015, | |
| "grad_norm": 2.363191604614258, | |
| "learning_rate": 5.830903790087464e-06, | |
| "loss": 1.5272, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02625246116823452, | |
| "grad_norm": 1.6348848342895508, | |
| "learning_rate": 8.746355685131195e-06, | |
| "loss": 1.389, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03500328155764603, | |
| "grad_norm": 1.6716902256011963, | |
| "learning_rate": 1.1661807580174927e-05, | |
| "loss": 1.348, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04375410194705753, | |
| "grad_norm": 1.510228157043457, | |
| "learning_rate": 1.457725947521866e-05, | |
| "loss": 1.2997, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05250492233646904, | |
| "grad_norm": 1.6153247356414795, | |
| "learning_rate": 1.749271137026239e-05, | |
| "loss": 1.2665, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06125574272588055, | |
| "grad_norm": 1.446387767791748, | |
| "learning_rate": 2.0408163265306123e-05, | |
| "loss": 1.237, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07000656311529206, | |
| "grad_norm": 1.3039863109588623, | |
| "learning_rate": 2.3323615160349855e-05, | |
| "loss": 1.2341, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07875738350470357, | |
| "grad_norm": 1.4866383075714111, | |
| "learning_rate": 2.6239067055393584e-05, | |
| "loss": 1.2338, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08750820389411507, | |
| "grad_norm": 1.358859658241272, | |
| "learning_rate": 2.915451895043732e-05, | |
| "loss": 1.2331, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09625902428352658, | |
| "grad_norm": 1.847506046295166, | |
| "learning_rate": 3.206997084548105e-05, | |
| "loss": 1.1628, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10500984467293809, | |
| "grad_norm": 1.7742356061935425, | |
| "learning_rate": 3.498542274052478e-05, | |
| "loss": 1.1846, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1137606650623496, | |
| "grad_norm": 1.3415714502334595, | |
| "learning_rate": 3.790087463556852e-05, | |
| "loss": 1.1699, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1225114854517611, | |
| "grad_norm": 1.5319396257400513, | |
| "learning_rate": 4.0816326530612245e-05, | |
| "loss": 1.181, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1312623058411726, | |
| "grad_norm": 1.3915642499923706, | |
| "learning_rate": 4.373177842565598e-05, | |
| "loss": 1.189, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14001312623058412, | |
| "grad_norm": 1.175917625427246, | |
| "learning_rate": 4.664723032069971e-05, | |
| "loss": 1.1605, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.14876394661999562, | |
| "grad_norm": 1.3125519752502441, | |
| "learning_rate": 4.956268221574344e-05, | |
| "loss": 1.1734, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.15751476700940714, | |
| "grad_norm": 1.2966923713684082, | |
| "learning_rate": 5.247813411078717e-05, | |
| "loss": 1.2075, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.16626558739881864, | |
| "grad_norm": 1.2630099058151245, | |
| "learning_rate": 5.539358600583091e-05, | |
| "loss": 1.1791, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.17501640778823013, | |
| "grad_norm": 1.306665301322937, | |
| "learning_rate": 5.830903790087464e-05, | |
| "loss": 1.1469, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.18376722817764166, | |
| "grad_norm": 1.19467031955719, | |
| "learning_rate": 6.122448979591838e-05, | |
| "loss": 1.185, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.19251804856705315, | |
| "grad_norm": 1.416387677192688, | |
| "learning_rate": 6.41399416909621e-05, | |
| "loss": 1.2061, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.20126886895646467, | |
| "grad_norm": 1.3546264171600342, | |
| "learning_rate": 6.705539358600583e-05, | |
| "loss": 1.2016, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21001968934587617, | |
| "grad_norm": 1.2417850494384766, | |
| "learning_rate": 6.997084548104956e-05, | |
| "loss": 1.1797, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2187705097352877, | |
| "grad_norm": 1.2703570127487183, | |
| "learning_rate": 7.28862973760933e-05, | |
| "loss": 1.2059, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2275213301246992, | |
| "grad_norm": 1.2479429244995117, | |
| "learning_rate": 7.580174927113704e-05, | |
| "loss": 1.1958, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.23627215051411068, | |
| "grad_norm": 1.507601261138916, | |
| "learning_rate": 7.871720116618077e-05, | |
| "loss": 1.2151, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2450229709035222, | |
| "grad_norm": 1.2083854675292969, | |
| "learning_rate": 8.163265306122449e-05, | |
| "loss": 1.2202, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2537737912929337, | |
| "grad_norm": 1.2939566373825073, | |
| "learning_rate": 8.454810495626822e-05, | |
| "loss": 1.1916, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2625246116823452, | |
| "grad_norm": 1.2871382236480713, | |
| "learning_rate": 8.746355685131195e-05, | |
| "loss": 1.222, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.27127543207175675, | |
| "grad_norm": 1.1558256149291992, | |
| "learning_rate": 9.037900874635569e-05, | |
| "loss": 1.2094, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.28002625246116825, | |
| "grad_norm": 1.0638883113861084, | |
| "learning_rate": 9.329446064139942e-05, | |
| "loss": 1.2, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.28877707285057974, | |
| "grad_norm": 1.1072458028793335, | |
| "learning_rate": 9.620991253644317e-05, | |
| "loss": 1.224, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.29752789323999124, | |
| "grad_norm": 1.1332110166549683, | |
| "learning_rate": 9.912536443148688e-05, | |
| "loss": 1.2266, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.30627871362940273, | |
| "grad_norm": 1.0537033081054688, | |
| "learning_rate": 9.999872800049233e-05, | |
| "loss": 1.2173, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3150295340188143, | |
| "grad_norm": 1.090928316116333, | |
| "learning_rate": 9.999249795462235e-05, | |
| "loss": 1.2416, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3237803544082258, | |
| "grad_norm": 1.0084835290908813, | |
| "learning_rate": 9.998107687592326e-05, | |
| "loss": 1.2094, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3325311747976373, | |
| "grad_norm": 1.82576584815979, | |
| "learning_rate": 9.996446595031691e-05, | |
| "loss": 1.2246, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.34128199518704877, | |
| "grad_norm": 1.2305560111999512, | |
| "learning_rate": 9.994266690261928e-05, | |
| "loss": 1.2647, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.35003281557646027, | |
| "grad_norm": 1.170067548751831, | |
| "learning_rate": 9.991568199636148e-05, | |
| "loss": 1.1829, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3587836359658718, | |
| "grad_norm": 1.0299819707870483, | |
| "learning_rate": 9.988351403355454e-05, | |
| "loss": 1.2471, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3675344563552833, | |
| "grad_norm": 0.9742593765258789, | |
| "learning_rate": 9.98461663543987e-05, | |
| "loss": 1.2049, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3762852767446948, | |
| "grad_norm": 0.9377045631408691, | |
| "learning_rate": 9.980364283693638e-05, | |
| "loss": 1.1963, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.3850360971341063, | |
| "grad_norm": 1.0425899028778076, | |
| "learning_rate": 9.97559478966496e-05, | |
| "loss": 1.2167, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.39378691752351785, | |
| "grad_norm": 0.9749513864517212, | |
| "learning_rate": 9.97030864860014e-05, | |
| "loss": 1.2166, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.40253773791292935, | |
| "grad_norm": 1.0552780628204346, | |
| "learning_rate": 9.964506409392176e-05, | |
| "loss": 1.2228, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.41128855830234085, | |
| "grad_norm": 1.0261808633804321, | |
| "learning_rate": 9.958188674523748e-05, | |
| "loss": 1.2252, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.42003937869175234, | |
| "grad_norm": 0.9530945420265198, | |
| "learning_rate": 9.951356100004665e-05, | |
| "loss": 1.1845, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.42879019908116384, | |
| "grad_norm": 0.8990481495857239, | |
| "learning_rate": 9.94400939530375e-05, | |
| "loss": 1.2283, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4375410194705754, | |
| "grad_norm": 0.9713592529296875, | |
| "learning_rate": 9.936149323275169e-05, | |
| "loss": 1.1945, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4462918398599869, | |
| "grad_norm": 1.003854513168335, | |
| "learning_rate": 9.927776700079216e-05, | |
| "loss": 1.2322, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4550426602493984, | |
| "grad_norm": 0.935400664806366, | |
| "learning_rate": 9.918892395097572e-05, | |
| "loss": 1.2272, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4637934806388099, | |
| "grad_norm": 1.075519323348999, | |
| "learning_rate": 9.90949733084303e-05, | |
| "loss": 1.2069, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.47254430102822137, | |
| "grad_norm": 0.9949712157249451, | |
| "learning_rate": 9.8995924828637e-05, | |
| "loss": 1.2083, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.4812951214176329, | |
| "grad_norm": 0.8942195773124695, | |
| "learning_rate": 9.889178879641717e-05, | |
| "loss": 1.2137, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.4900459418070444, | |
| "grad_norm": 0.9525908827781677, | |
| "learning_rate": 9.878257602486445e-05, | |
| "loss": 1.2348, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.4987967621964559, | |
| "grad_norm": 0.895167350769043, | |
| "learning_rate": 9.8668297854222e-05, | |
| "loss": 1.2034, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5075475825858674, | |
| "grad_norm": 1.0244312286376953, | |
| "learning_rate": 9.854896615070495e-05, | |
| "loss": 1.1942, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.516298402975279, | |
| "grad_norm": 0.9835163950920105, | |
| "learning_rate": 9.842459330526826e-05, | |
| "loss": 1.2182, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5250492233646904, | |
| "grad_norm": 0.9238565564155579, | |
| "learning_rate": 9.829519223232006e-05, | |
| "loss": 1.1992, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.533800043754102, | |
| "grad_norm": 0.9063233733177185, | |
| "learning_rate": 9.81607763683808e-05, | |
| "loss": 1.2135, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5425508641435135, | |
| "grad_norm": 0.9810764193534851, | |
| "learning_rate": 9.802135967068782e-05, | |
| "loss": 1.1909, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5513016845329249, | |
| "grad_norm": 0.9388274550437927, | |
| "learning_rate": 9.787695661574632e-05, | |
| "loss": 1.1693, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5600525049223365, | |
| "grad_norm": 0.933283805847168, | |
| "learning_rate": 9.772758219782601e-05, | |
| "loss": 1.1857, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5688033253117479, | |
| "grad_norm": 0.947989284992218, | |
| "learning_rate": 9.757325192740425e-05, | |
| "loss": 1.1901, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.5775541457011595, | |
| "grad_norm": 0.8272686004638672, | |
| "learning_rate": 9.741398182955548e-05, | |
| "loss": 1.2087, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.586304966090571, | |
| "grad_norm": 0.8907813429832458, | |
| "learning_rate": 9.724978844228717e-05, | |
| "loss": 1.201, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.5950557864799825, | |
| "grad_norm": 0.8374966382980347, | |
| "learning_rate": 9.708068881482271e-05, | |
| "loss": 1.2028, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.603806606869394, | |
| "grad_norm": 0.9283761382102966, | |
| "learning_rate": 9.690670050583095e-05, | |
| "loss": 1.1995, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6125574272588055, | |
| "grad_norm": 0.8573452234268188, | |
| "learning_rate": 9.672784158160307e-05, | |
| "loss": 1.1372, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.621308247648217, | |
| "grad_norm": 0.8831506371498108, | |
| "learning_rate": 9.654413061417655e-05, | |
| "loss": 1.1626, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6300590680376286, | |
| "grad_norm": 0.9180812239646912, | |
| "learning_rate": 9.635558667940683e-05, | |
| "loss": 1.1466, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.63880988842704, | |
| "grad_norm": 0.8657775521278381, | |
| "learning_rate": 9.616222935498646e-05, | |
| "loss": 1.2052, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6475607088164516, | |
| "grad_norm": 0.9488115310668945, | |
| "learning_rate": 9.596407871841229e-05, | |
| "loss": 1.1651, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.656311529205863, | |
| "grad_norm": 0.8592752814292908, | |
| "learning_rate": 9.57611553449006e-05, | |
| "loss": 1.1953, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6650623495952745, | |
| "grad_norm": 0.889445960521698, | |
| "learning_rate": 9.555348030525076e-05, | |
| "loss": 1.1647, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.6738131699846861, | |
| "grad_norm": 0.8997176885604858, | |
| "learning_rate": 9.534107516365725e-05, | |
| "loss": 1.167, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.6825639903740975, | |
| "grad_norm": 0.8208065032958984, | |
| "learning_rate": 9.512396197547053e-05, | |
| "loss": 1.1631, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6913148107635091, | |
| "grad_norm": 0.8419785499572754, | |
| "learning_rate": 9.490216328490692e-05, | |
| "loss": 1.1857, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7000656311529205, | |
| "grad_norm": 0.8125676512718201, | |
| "learning_rate": 9.467570212270767e-05, | |
| "loss": 1.2426, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7088164515423321, | |
| "grad_norm": 0.9222508072853088, | |
| "learning_rate": 9.444460200374748e-05, | |
| "loss": 1.1452, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7175672719317436, | |
| "grad_norm": 0.8932722210884094, | |
| "learning_rate": 9.420888692459291e-05, | |
| "loss": 1.1647, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7263180923211551, | |
| "grad_norm": 0.817932665348053, | |
| "learning_rate": 9.396858136101066e-05, | |
| "loss": 1.1618, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7350689127105666, | |
| "grad_norm": 0.8717408180236816, | |
| "learning_rate": 9.372371026542595e-05, | |
| "loss": 1.1517, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7438197330999782, | |
| "grad_norm": 0.8578230142593384, | |
| "learning_rate": 9.347429906433174e-05, | |
| "loss": 1.2009, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7525705534893896, | |
| "grad_norm": 0.8276301622390747, | |
| "learning_rate": 9.322037365564845e-05, | |
| "loss": 1.1627, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7613213738788012, | |
| "grad_norm": 0.7934173941612244, | |
| "learning_rate": 9.296196040603475e-05, | |
| "loss": 1.1735, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.7700721942682126, | |
| "grad_norm": 0.8270857930183411, | |
| "learning_rate": 9.269908614814995e-05, | |
| "loss": 1.149, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7788230146576242, | |
| "grad_norm": 1.0632795095443726, | |
| "learning_rate": 9.243177817786757e-05, | |
| "loss": 1.1937, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.7875738350470357, | |
| "grad_norm": 0.8449203968048096, | |
| "learning_rate": 9.216006425144118e-05, | |
| "loss": 1.1787, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.7963246554364471, | |
| "grad_norm": 0.7634311318397522, | |
| "learning_rate": 9.188397258262221e-05, | |
| "loss": 1.1445, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8050754758258587, | |
| "grad_norm": 0.7856956720352173, | |
| "learning_rate": 9.160353183973046e-05, | |
| "loss": 1.1519, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8138262962152701, | |
| "grad_norm": 0.8162206411361694, | |
| "learning_rate": 9.131877114267711e-05, | |
| "loss": 1.1282, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8225771166046817, | |
| "grad_norm": 0.8263719081878662, | |
| "learning_rate": 9.102972005994122e-05, | |
| "loss": 1.1127, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8313279369940932, | |
| "grad_norm": 0.831969141960144, | |
| "learning_rate": 9.073640860549931e-05, | |
| "loss": 1.1446, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8400787573835047, | |
| "grad_norm": 0.8127765655517578, | |
| "learning_rate": 9.043886723570884e-05, | |
| "loss": 1.1649, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8488295777729162, | |
| "grad_norm": 0.7642777562141418, | |
| "learning_rate": 9.013712684614582e-05, | |
| "loss": 1.1656, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.8575803981623277, | |
| "grad_norm": 0.8071634769439697, | |
| "learning_rate": 8.983121876839657e-05, | |
| "loss": 1.1414, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.8663312185517392, | |
| "grad_norm": 0.7789682149887085, | |
| "learning_rate": 8.95211747668046e-05, | |
| "loss": 1.1127, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.8750820389411508, | |
| "grad_norm": 0.7899104356765747, | |
| "learning_rate": 8.920702703517204e-05, | |
| "loss": 1.1218, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8838328593305622, | |
| "grad_norm": 0.726874053478241, | |
| "learning_rate": 8.888880819341702e-05, | |
| "loss": 1.1345, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.8925836797199738, | |
| "grad_norm": 0.8020316958427429, | |
| "learning_rate": 8.856655128418637e-05, | |
| "loss": 1.1229, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9013345001093852, | |
| "grad_norm": 0.8466740250587463, | |
| "learning_rate": 8.824028976942465e-05, | |
| "loss": 1.1451, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9100853204987968, | |
| "grad_norm": 0.7729008793830872, | |
| "learning_rate": 8.791005752689958e-05, | |
| "loss": 1.1204, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9188361408882083, | |
| "grad_norm": 0.8055347204208374, | |
| "learning_rate": 8.757588884668434e-05, | |
| "loss": 1.1372, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9275869612776197, | |
| "grad_norm": 0.8180489540100098, | |
| "learning_rate": 8.723781842759695e-05, | |
| "loss": 1.1632, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9363377816670313, | |
| "grad_norm": 0.816849946975708, | |
| "learning_rate": 8.689588137359732e-05, | |
| "loss": 1.1226, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.9450886020564427, | |
| "grad_norm": 0.8028609752655029, | |
| "learning_rate": 8.655011319014216e-05, | |
| "loss": 1.1559, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.9538394224458543, | |
| "grad_norm": 0.8719303011894226, | |
| "learning_rate": 8.620054978049829e-05, | |
| "loss": 1.1522, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.9625902428352658, | |
| "grad_norm": 0.766177237033844, | |
| "learning_rate": 8.584722744201447e-05, | |
| "loss": 1.1186, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.9713410632246773, | |
| "grad_norm": 0.7790910005569458, | |
| "learning_rate": 8.549018286235255e-05, | |
| "loss": 1.1227, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.9800918836140888, | |
| "grad_norm": 0.7803717255592346, | |
| "learning_rate": 8.512945311567786e-05, | |
| "loss": 1.1186, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9888427040035004, | |
| "grad_norm": 0.7084935307502747, | |
| "learning_rate": 8.476507565880957e-05, | |
| "loss": 1.1085, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.9975935243929118, | |
| "grad_norm": 0.7696670889854431, | |
| "learning_rate": 8.439708832733142e-05, | |
| "loss": 1.1123, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.006125574272588, | |
| "grad_norm": 0.9948705434799194, | |
| "learning_rate": 8.402552933166288e-05, | |
| "loss": 0.9984, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.0148763946619996, | |
| "grad_norm": 0.8488800525665283, | |
| "learning_rate": 8.36504372530916e-05, | |
| "loss": 0.9603, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.023627215051411, | |
| "grad_norm": 0.8748326897621155, | |
| "learning_rate": 8.32718510397673e-05, | |
| "loss": 0.9535, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.0323780354408225, | |
| "grad_norm": 0.8636799454689026, | |
| "learning_rate": 8.288981000265748e-05, | |
| "loss": 0.9705, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.041128855830234, | |
| "grad_norm": 0.7722444534301758, | |
| "learning_rate": 8.250435381146549e-05, | |
| "loss": 0.9411, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.0498796762196456, | |
| "grad_norm": 0.792931318283081, | |
| "learning_rate": 8.211552249051149e-05, | |
| "loss": 0.9455, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.0586304966090572, | |
| "grad_norm": 0.8179608583450317, | |
| "learning_rate": 8.17233564145764e-05, | |
| "loss": 0.9418, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.0673813169984685, | |
| "grad_norm": 0.8081513047218323, | |
| "learning_rate": 8.132789630470952e-05, | |
| "loss": 0.9451, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.07613213738788, | |
| "grad_norm": 0.8941613435745239, | |
| "learning_rate": 8.092918322400027e-05, | |
| "loss": 0.9336, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.0848829577772916, | |
| "grad_norm": 0.8505945801734924, | |
| "learning_rate": 8.052725857331429e-05, | |
| "loss": 0.9432, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.0936337781667032, | |
| "grad_norm": 0.7953508496284485, | |
| "learning_rate": 8.012216408699455e-05, | |
| "loss": 0.9608, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1023845985561147, | |
| "grad_norm": 0.9065775871276855, | |
| "learning_rate": 7.971394182852789e-05, | |
| "loss": 0.9583, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.1111354189455263, | |
| "grad_norm": 0.8752979040145874, | |
| "learning_rate": 7.930263418617713e-05, | |
| "loss": 0.9752, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.1198862393349376, | |
| "grad_norm": 0.8173210024833679, | |
| "learning_rate": 7.888828386857985e-05, | |
| "loss": 0.9505, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.1286370597243491, | |
| "grad_norm": 0.7964232563972473, | |
| "learning_rate": 7.84709339003135e-05, | |
| "loss": 0.9346, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.1373878801137607, | |
| "grad_norm": 0.8364039063453674, | |
| "learning_rate": 7.805062761742799e-05, | |
| "loss": 0.9606, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.1461387005031722, | |
| "grad_norm": 0.8122743964195251, | |
| "learning_rate": 7.76274086629458e-05, | |
| "loss": 0.9455, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.1548895208925836, | |
| "grad_norm": 0.8229530453681946, | |
| "learning_rate": 7.720132098233031e-05, | |
| "loss": 0.9465, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.1636403412819951, | |
| "grad_norm": 0.7859545946121216, | |
| "learning_rate": 7.677240881892258e-05, | |
| "loss": 0.9421, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.1723911616714067, | |
| "grad_norm": 0.8277194499969482, | |
| "learning_rate": 7.634071670934734e-05, | |
| "loss": 0.9541, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.1811419820608182, | |
| "grad_norm": 0.7695391774177551, | |
| "learning_rate": 7.590628947888847e-05, | |
| "loss": 0.9433, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.1898928024502298, | |
| "grad_norm": 0.82956862449646, | |
| "learning_rate": 7.546917223683453e-05, | |
| "loss": 0.9395, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.1986436228396413, | |
| "grad_norm": 0.8398957252502441, | |
| "learning_rate": 7.502941037179474e-05, | |
| "loss": 0.9432, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.2073944432290526, | |
| "grad_norm": 0.8232219219207764, | |
| "learning_rate": 7.4587049546986e-05, | |
| "loss": 0.9887, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.2161452636184642, | |
| "grad_norm": 0.7886401414871216, | |
| "learning_rate": 7.414213569549145e-05, | |
| "loss": 0.959, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.2248960840078758, | |
| "grad_norm": 0.7588061690330505, | |
| "learning_rate": 7.369471501549087e-05, | |
| "loss": 0.9651, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.2336469043972873, | |
| "grad_norm": 0.8234547972679138, | |
| "learning_rate": 7.324483396546371e-05, | |
| "loss": 0.9935, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.2423977247866986, | |
| "grad_norm": 0.7623049020767212, | |
| "learning_rate": 7.279253925936498e-05, | |
| "loss": 0.95, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.2511485451761102, | |
| "grad_norm": 0.7172533869743347, | |
| "learning_rate": 7.233787786177464e-05, | |
| "loss": 0.9441, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.2598993655655217, | |
| "grad_norm": 0.7973024249076843, | |
| "learning_rate": 7.188089698302098e-05, | |
| "loss": 0.9682, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.2686501859549333, | |
| "grad_norm": 0.7969169020652771, | |
| "learning_rate": 7.14216440742785e-05, | |
| "loss": 0.9541, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.2774010063443448, | |
| "grad_norm": 0.8296008110046387, | |
| "learning_rate": 7.096016682264074e-05, | |
| "loss": 0.9597, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.2861518267337564, | |
| "grad_norm": 0.8538773655891418, | |
| "learning_rate": 7.049651314616862e-05, | |
| "loss": 0.9225, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.2949026471231677, | |
| "grad_norm": 0.8068443536758423, | |
| "learning_rate": 7.003073118891487e-05, | |
| "loss": 0.9282, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.3036534675125793, | |
| "grad_norm": 0.8286350965499878, | |
| "learning_rate": 6.956286931592481e-05, | |
| "loss": 0.9645, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.3124042879019908, | |
| "grad_norm": 0.796650767326355, | |
| "learning_rate": 6.909297610821443e-05, | |
| "loss": 0.934, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.3211551082914024, | |
| "grad_norm": 0.7479636073112488, | |
| "learning_rate": 6.862110035772589e-05, | |
| "loss": 0.9319, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.3299059286808137, | |
| "grad_norm": 0.7734546065330505, | |
| "learning_rate": 6.81472910622611e-05, | |
| "loss": 0.9577, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.3386567490702252, | |
| "grad_norm": 0.821553647518158, | |
| "learning_rate": 6.7671597420394e-05, | |
| "loss": 0.966, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.3474075694596368, | |
| "grad_norm": 0.8141497373580933, | |
| "learning_rate": 6.719406882636196e-05, | |
| "loss": 0.9593, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.3561583898490484, | |
| "grad_norm": 0.7738481163978577, | |
| "learning_rate": 6.671475486493691e-05, | |
| "loss": 0.9474, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.36490921023846, | |
| "grad_norm": 0.7933306694030762, | |
| "learning_rate": 6.62337053062766e-05, | |
| "loss": 0.9297, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.3736600306278715, | |
| "grad_norm": 0.7921035885810852, | |
| "learning_rate": 6.575097010075664e-05, | |
| "loss": 0.9283, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.382410851017283, | |
| "grad_norm": 0.7895304560661316, | |
| "learning_rate": 6.526659937378392e-05, | |
| "loss": 0.9559, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.3911616714066943, | |
| "grad_norm": 0.8518770933151245, | |
| "learning_rate": 6.478064342059164e-05, | |
| "loss": 0.9571, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.3999124917961059, | |
| "grad_norm": 0.8479484915733337, | |
| "learning_rate": 6.429315270101701e-05, | |
| "loss": 0.9578, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.4086633121855174, | |
| "grad_norm": 0.763520359992981, | |
| "learning_rate": 6.380417783426153e-05, | |
| "loss": 0.9304, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.4174141325749288, | |
| "grad_norm": 0.7657537460327148, | |
| "learning_rate": 6.3313769593635e-05, | |
| "loss": 0.9541, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.4261649529643403, | |
| "grad_norm": 0.7461577653884888, | |
| "learning_rate": 6.282197890128338e-05, | |
| "loss": 0.9331, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.4349157733537519, | |
| "grad_norm": 0.7814643383026123, | |
| "learning_rate": 6.23288568229012e-05, | |
| "loss": 0.9431, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.4436665937431634, | |
| "grad_norm": 0.7600048184394836, | |
| "learning_rate": 6.183445456242904e-05, | |
| "loss": 0.9549, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.452417414132575, | |
| "grad_norm": 0.7684770226478577, | |
| "learning_rate": 6.13388234567369e-05, | |
| "loss": 0.9074, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.4611682345219865, | |
| "grad_norm": 0.7696465849876404, | |
| "learning_rate": 6.084201497029333e-05, | |
| "loss": 0.9449, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.469919054911398, | |
| "grad_norm": 0.818397045135498, | |
| "learning_rate": 6.034408068982172e-05, | |
| "loss": 0.9396, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.4786698753008094, | |
| "grad_norm": 0.7443113923072815, | |
| "learning_rate": 5.984507231894364e-05, | |
| "loss": 0.928, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.487420695690221, | |
| "grad_norm": 0.8166981339454651, | |
| "learning_rate": 5.934504167281022e-05, | |
| "loss": 0.9323, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.4961715160796325, | |
| "grad_norm": 0.8331442475318909, | |
| "learning_rate": 5.884404067272174e-05, | |
| "loss": 0.9458, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.5049223364690438, | |
| "grad_norm": 0.7990359663963318, | |
| "learning_rate": 5.834212134073644e-05, | |
| "loss": 0.9094, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.5136731568584554, | |
| "grad_norm": 0.768055260181427, | |
| "learning_rate": 5.7839335794268666e-05, | |
| "loss": 0.9385, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.522423977247867, | |
| "grad_norm": 0.7425386309623718, | |
| "learning_rate": 5.7335736240677174e-05, | |
| "loss": 0.9446, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.5311747976372785, | |
| "grad_norm": 0.7698354721069336, | |
| "learning_rate": 5.683137497184424e-05, | |
| "loss": 0.9057, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.53992561802669, | |
| "grad_norm": 0.8186885118484497, | |
| "learning_rate": 5.6326304358745694e-05, | |
| "loss": 0.9758, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.5486764384161016, | |
| "grad_norm": 0.8006178736686707, | |
| "learning_rate": 5.5820576846013086e-05, | |
| "loss": 0.9424, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.5574272588055131, | |
| "grad_norm": 0.7572160363197327, | |
| "learning_rate": 5.531424494648789e-05, | |
| "loss": 0.945, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.5661780791949247, | |
| "grad_norm": 0.8019212484359741, | |
| "learning_rate": 5.480736123576886e-05, | |
| "loss": 0.9528, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.574928899584336, | |
| "grad_norm": 0.7867367267608643, | |
| "learning_rate": 5.4299978346752756e-05, | |
| "loss": 0.9499, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.5836797199737476, | |
| "grad_norm": 0.7404739856719971, | |
| "learning_rate": 5.379214896416907e-05, | |
| "loss": 0.9177, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.592430540363159, | |
| "grad_norm": 0.712374746799469, | |
| "learning_rate": 5.328392581910961e-05, | |
| "loss": 0.9212, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.6011813607525704, | |
| "grad_norm": 0.7724602222442627, | |
| "learning_rate": 5.277536168355293e-05, | |
| "loss": 0.9402, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.609932181141982, | |
| "grad_norm": 0.8452624678611755, | |
| "learning_rate": 5.226650936488474e-05, | |
| "loss": 0.9193, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.6186830015313936, | |
| "grad_norm": 0.7993892431259155, | |
| "learning_rate": 5.1757421700414645e-05, | |
| "loss": 0.9328, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.627433821920805, | |
| "grad_norm": 0.7931053042411804, | |
| "learning_rate": 5.124815155188966e-05, | |
| "loss": 0.9759, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.6361846423102167, | |
| "grad_norm": 0.7516879439353943, | |
| "learning_rate": 5.0738751800005226e-05, | |
| "loss": 0.9229, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.6449354626996282, | |
| "grad_norm": 0.6935803890228271, | |
| "learning_rate": 5.022927533891434e-05, | |
| "loss": 0.9234, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.6536862830890398, | |
| "grad_norm": 0.8120819926261902, | |
| "learning_rate": 4.9719775070735196e-05, | |
| "loss": 0.9582, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.662437103478451, | |
| "grad_norm": 0.781491219997406, | |
| "learning_rate": 4.921030390005802e-05, | |
| "loss": 0.9177, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.6711879238678626, | |
| "grad_norm": 0.7723197340965271, | |
| "learning_rate": 4.870091472845166e-05, | |
| "loss": 0.9382, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.679938744257274, | |
| "grad_norm": 0.751848578453064, | |
| "learning_rate": 4.819166044897047e-05, | |
| "loss": 0.9148, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.6886895646466855, | |
| "grad_norm": 0.8265687823295593, | |
| "learning_rate": 4.768259394066216e-05, | |
| "loss": 0.9386, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.697440385036097, | |
| "grad_norm": 0.7489150762557983, | |
| "learning_rate": 4.7173768063076956e-05, | |
| "loss": 0.9337, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.7061912054255086, | |
| "grad_norm": 0.8112419247627258, | |
| "learning_rate": 4.666523565077889e-05, | |
| "loss": 0.917, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.7149420258149202, | |
| "grad_norm": 0.7611903548240662, | |
| "learning_rate": 4.615704950785965e-05, | |
| "loss": 0.9338, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.7236928462043317, | |
| "grad_norm": 0.7738552093505859, | |
| "learning_rate": 4.5649262402455704e-05, | |
| "loss": 0.9359, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.7324436665937433, | |
| "grad_norm": 0.7741129994392395, | |
| "learning_rate": 4.5141927061268844e-05, | |
| "loss": 0.9209, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.7411944869831548, | |
| "grad_norm": 0.7815123200416565, | |
| "learning_rate": 4.463509616409144e-05, | |
| "loss": 0.916, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.7499453073725662, | |
| "grad_norm": 0.6793907284736633, | |
| "learning_rate": 4.412882233833629e-05, | |
| "loss": 0.9042, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.7586961277619777, | |
| "grad_norm": 0.711877167224884, | |
| "learning_rate": 4.362315815357197e-05, | |
| "loss": 0.9072, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.767446948151389, | |
| "grad_norm": 0.7671152353286743, | |
| "learning_rate": 4.311815611606423e-05, | |
| "loss": 0.9226, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.7761977685408006, | |
| "grad_norm": 0.7221760153770447, | |
| "learning_rate": 4.261386866332397e-05, | |
| "loss": 0.9077, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.7849485889302121, | |
| "grad_norm": 0.7419185042381287, | |
| "learning_rate": 4.211034815866228e-05, | |
| "loss": 0.9017, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.7936994093196237, | |
| "grad_norm": 0.740652859210968, | |
| "learning_rate": 4.1607646885753206e-05, | |
| "loss": 0.9159, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.8024502297090352, | |
| "grad_norm": 0.7923645377159119, | |
| "learning_rate": 4.110581704320495e-05, | |
| "loss": 0.9135, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.8112010500984468, | |
| "grad_norm": 0.7528581023216248, | |
| "learning_rate": 4.060491073913957e-05, | |
| "loss": 0.9291, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.8199518704878583, | |
| "grad_norm": 0.7974929809570312, | |
| "learning_rate": 4.010497998578239e-05, | |
| "loss": 0.9208, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.8287026908772699, | |
| "grad_norm": 0.7388021349906921, | |
| "learning_rate": 3.960607669406129e-05, | |
| "loss": 0.9155, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.8374535112666812, | |
| "grad_norm": 0.7927631735801697, | |
| "learning_rate": 3.910825266821629e-05, | |
| "loss": 0.9156, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.8462043316560928, | |
| "grad_norm": 0.7545862793922424, | |
| "learning_rate": 3.861155960042057e-05, | |
| "loss": 0.877, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.8549551520455043, | |
| "grad_norm": 0.8433863520622253, | |
| "learning_rate": 3.8116049065412864e-05, | |
| "loss": 0.9107, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.8637059724349156, | |
| "grad_norm": 0.7610001564025879, | |
| "learning_rate": 3.762177251514218e-05, | |
| "loss": 0.9068, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.8724567928243272, | |
| "grad_norm": 0.7452635765075684, | |
| "learning_rate": 3.712878127342515e-05, | |
| "loss": 0.9113, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.8812076132137387, | |
| "grad_norm": 0.7530847787857056, | |
| "learning_rate": 3.663712653061682e-05, | |
| "loss": 0.8881, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.8899584336031503, | |
| "grad_norm": 0.7940835356712341, | |
| "learning_rate": 3.614685933829525e-05, | |
| "loss": 0.8807, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.8987092539925619, | |
| "grad_norm": 0.7291942834854126, | |
| "learning_rate": 3.5658030603960436e-05, | |
| "loss": 0.9112, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.9074600743819734, | |
| "grad_norm": 0.7321741580963135, | |
| "learning_rate": 3.5170691085748324e-05, | |
| "loss": 0.8835, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.916210894771385, | |
| "grad_norm": 0.7166718244552612, | |
| "learning_rate": 3.468489138716029e-05, | |
| "loss": 0.9115, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.9249617151607963, | |
| "grad_norm": 0.741908848285675, | |
| "learning_rate": 3.42006819518086e-05, | |
| "loss": 0.9198, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.9337125355502078, | |
| "grad_norm": 0.8034552931785583, | |
| "learning_rate": 3.371811305817858e-05, | |
| "loss": 0.8952, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.9424633559396194, | |
| "grad_norm": 0.6716712713241577, | |
| "learning_rate": 3.32372348144079e-05, | |
| "loss": 0.9197, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.9512141763290307, | |
| "grad_norm": 0.6731935739517212, | |
| "learning_rate": 3.275809715308349e-05, | |
| "loss": 0.8949, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.9599649967184423, | |
| "grad_norm": 0.7521949410438538, | |
| "learning_rate": 3.228074982605673e-05, | |
| "loss": 0.8949, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.9687158171078538, | |
| "grad_norm": 0.6889147162437439, | |
| "learning_rate": 3.180524239927749e-05, | |
| "loss": 0.8941, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.9774666374972654, | |
| "grad_norm": 0.7169541716575623, | |
| "learning_rate": 3.1331624247647276e-05, | |
| "loss": 0.8922, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.986217457886677, | |
| "grad_norm": 0.7812347412109375, | |
| "learning_rate": 3.0859944549892325e-05, | |
| "loss": 0.8915, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.9949682782760885, | |
| "grad_norm": 0.7609400153160095, | |
| "learning_rate": 3.039025228345725e-05, | |
| "loss": 0.9227, | |
| "step": 2280 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3426, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500.0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.5506578379505664e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |