| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1871, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005344735435595938, | |
| "grad_norm": 23.61518103763694, | |
| "learning_rate": 5.319148936170213e-08, | |
| "loss": 1.3621, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002672367717797969, | |
| "grad_norm": 23.52338386367116, | |
| "learning_rate": 2.6595744680851066e-07, | |
| "loss": 1.3454, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.005344735435595938, | |
| "grad_norm": 15.902737248388277, | |
| "learning_rate": 5.319148936170213e-07, | |
| "loss": 1.3168, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008017103153393906, | |
| "grad_norm": 11.62038726971463, | |
| "learning_rate": 7.97872340425532e-07, | |
| "loss": 1.1663, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.010689470871191877, | |
| "grad_norm": 10.50913201186014, | |
| "learning_rate": 1.0638297872340427e-06, | |
| "loss": 1.0566, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.013361838588989846, | |
| "grad_norm": 3.560582254809462, | |
| "learning_rate": 1.3297872340425533e-06, | |
| "loss": 0.9328, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.016034206306787813, | |
| "grad_norm": 3.318390385053047, | |
| "learning_rate": 1.595744680851064e-06, | |
| "loss": 0.8966, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01870657402458578, | |
| "grad_norm": 2.9993400845793654, | |
| "learning_rate": 1.8617021276595745e-06, | |
| "loss": 0.8769, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.021378941742383754, | |
| "grad_norm": 2.9199745023125967, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 0.8607, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.024051309460181722, | |
| "grad_norm": 2.9991036135574545, | |
| "learning_rate": 2.393617021276596e-06, | |
| "loss": 0.8516, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02672367717797969, | |
| "grad_norm": 2.834663670092906, | |
| "learning_rate": 2.6595744680851065e-06, | |
| "loss": 0.8328, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02939604489577766, | |
| "grad_norm": 3.071674166085022, | |
| "learning_rate": 2.9255319148936174e-06, | |
| "loss": 0.8212, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.032068412613575625, | |
| "grad_norm": 3.0481603346640704, | |
| "learning_rate": 3.191489361702128e-06, | |
| "loss": 0.8242, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.034740780331373594, | |
| "grad_norm": 3.0857566743029907, | |
| "learning_rate": 3.457446808510639e-06, | |
| "loss": 0.8045, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.03741314804917156, | |
| "grad_norm": 3.2871683892802004, | |
| "learning_rate": 3.723404255319149e-06, | |
| "loss": 0.7966, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04008551576696953, | |
| "grad_norm": 3.2034895167525277, | |
| "learning_rate": 3.98936170212766e-06, | |
| "loss": 0.7806, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.04275788348476751, | |
| "grad_norm": 3.226417198218916, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.7729, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.045430251202565476, | |
| "grad_norm": 3.170786690584355, | |
| "learning_rate": 4.521276595744681e-06, | |
| "loss": 0.7828, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.048102618920363445, | |
| "grad_norm": 3.0895839104025717, | |
| "learning_rate": 4.787234042553192e-06, | |
| "loss": 0.7667, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.050774986638161414, | |
| "grad_norm": 3.192938409188057, | |
| "learning_rate": 5.053191489361703e-06, | |
| "loss": 0.7595, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.05344735435595938, | |
| "grad_norm": 3.206609902202038, | |
| "learning_rate": 5.319148936170213e-06, | |
| "loss": 0.7555, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05611972207375735, | |
| "grad_norm": 3.175589519512037, | |
| "learning_rate": 5.5851063829787235e-06, | |
| "loss": 0.7457, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.05879208979155532, | |
| "grad_norm": 3.0405783301327975, | |
| "learning_rate": 5.851063829787235e-06, | |
| "loss": 0.7452, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06146445750935329, | |
| "grad_norm": 2.8250036844985584, | |
| "learning_rate": 6.117021276595745e-06, | |
| "loss": 0.7378, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.06413682522715125, | |
| "grad_norm": 3.1222586810818775, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.7337, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06680919294494922, | |
| "grad_norm": 2.8888701910636425, | |
| "learning_rate": 6.648936170212767e-06, | |
| "loss": 0.7321, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.06948156066274719, | |
| "grad_norm": 3.0481545986847034, | |
| "learning_rate": 6.914893617021278e-06, | |
| "loss": 0.7356, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07215392838054516, | |
| "grad_norm": 2.9221465817003316, | |
| "learning_rate": 7.1808510638297875e-06, | |
| "loss": 0.7163, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.07482629609834313, | |
| "grad_norm": 2.823797619258334, | |
| "learning_rate": 7.446808510638298e-06, | |
| "loss": 0.7179, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0774986638161411, | |
| "grad_norm": 2.974797988982762, | |
| "learning_rate": 7.71276595744681e-06, | |
| "loss": 0.7083, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.08017103153393906, | |
| "grad_norm": 3.1868031201090252, | |
| "learning_rate": 7.97872340425532e-06, | |
| "loss": 0.7223, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08284339925173703, | |
| "grad_norm": 3.313855919178953, | |
| "learning_rate": 8.24468085106383e-06, | |
| "loss": 0.7223, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.08551576696953501, | |
| "grad_norm": 2.917840787459801, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.7195, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08818813468733298, | |
| "grad_norm": 2.770307352801897, | |
| "learning_rate": 8.776595744680852e-06, | |
| "loss": 0.7235, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.09086050240513095, | |
| "grad_norm": 2.765197627364069, | |
| "learning_rate": 9.042553191489362e-06, | |
| "loss": 0.7094, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09353287012292892, | |
| "grad_norm": 2.9219350077214283, | |
| "learning_rate": 9.308510638297872e-06, | |
| "loss": 0.6998, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.09620523784072689, | |
| "grad_norm": 3.1415213219167293, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.7102, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09887760555852486, | |
| "grad_norm": 2.7242573109350645, | |
| "learning_rate": 9.840425531914895e-06, | |
| "loss": 0.7228, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.10154997327632283, | |
| "grad_norm": 2.859985126301972, | |
| "learning_rate": 9.99996515575899e-06, | |
| "loss": 0.7254, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1042223409941208, | |
| "grad_norm": 2.665798747814334, | |
| "learning_rate": 9.99957316362496e-06, | |
| "loss": 0.7109, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.10689470871191876, | |
| "grad_norm": 2.9076197629295817, | |
| "learning_rate": 9.998745658315924e-06, | |
| "loss": 0.6989, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.10956707642971673, | |
| "grad_norm": 2.8125731981919047, | |
| "learning_rate": 9.997482711915926e-06, | |
| "loss": 0.718, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1122394441475147, | |
| "grad_norm": 2.6725574630640594, | |
| "learning_rate": 9.99578443444032e-06, | |
| "loss": 0.7112, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11491181186531267, | |
| "grad_norm": 2.8593331201166152, | |
| "learning_rate": 9.993650973826177e-06, | |
| "loss": 0.7091, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.11758417958311064, | |
| "grad_norm": 2.6883306424483506, | |
| "learning_rate": 9.991082515919402e-06, | |
| "loss": 0.6982, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12025654730090861, | |
| "grad_norm": 2.786493474222006, | |
| "learning_rate": 9.988079284458547e-06, | |
| "loss": 0.6956, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.12292891501870658, | |
| "grad_norm": 2.576508494305329, | |
| "learning_rate": 9.98464154105532e-06, | |
| "loss": 0.6903, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12560128273650453, | |
| "grad_norm": 2.7285477172023342, | |
| "learning_rate": 9.980769585171795e-06, | |
| "loss": 0.69, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.1282736504543025, | |
| "grad_norm": 2.6675103293265545, | |
| "learning_rate": 9.976463754094321e-06, | |
| "loss": 0.6978, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13094601817210047, | |
| "grad_norm": 2.7330300721548664, | |
| "learning_rate": 9.971724422904154e-06, | |
| "loss": 0.6841, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.13361838588989844, | |
| "grad_norm": 2.7271057104317804, | |
| "learning_rate": 9.966552004444772e-06, | |
| "loss": 0.6969, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1362907536076964, | |
| "grad_norm": 2.578197568780461, | |
| "learning_rate": 9.960946949285915e-06, | |
| "loss": 0.6991, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.13896312132549438, | |
| "grad_norm": 2.610755637023348, | |
| "learning_rate": 9.954909745684339e-06, | |
| "loss": 0.7001, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14163548904329235, | |
| "grad_norm": 2.656700773061203, | |
| "learning_rate": 9.948440919541277e-06, | |
| "loss": 0.6793, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.14430785676109031, | |
| "grad_norm": 2.714010328000571, | |
| "learning_rate": 9.94154103435664e-06, | |
| "loss": 0.6854, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14698022447888828, | |
| "grad_norm": 2.5795427101760686, | |
| "learning_rate": 9.934210691179918e-06, | |
| "loss": 0.6705, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.14965259219668625, | |
| "grad_norm": 2.644406518293481, | |
| "learning_rate": 9.926450528557828e-06, | |
| "loss": 0.6872, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15232495991448422, | |
| "grad_norm": 2.626026852267855, | |
| "learning_rate": 9.918261222478687e-06, | |
| "loss": 0.6921, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.1549973276322822, | |
| "grad_norm": 2.675148415559724, | |
| "learning_rate": 9.909643486313533e-06, | |
| "loss": 0.6629, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15766969535008016, | |
| "grad_norm": 2.6288509142094734, | |
| "learning_rate": 9.900598070753981e-06, | |
| "loss": 0.6742, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.16034206306787813, | |
| "grad_norm": 2.607213205803097, | |
| "learning_rate": 9.891125763746824e-06, | |
| "loss": 0.6734, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1630144307856761, | |
| "grad_norm": 2.699217392639012, | |
| "learning_rate": 9.881227390425404e-06, | |
| "loss": 0.6725, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.16568679850347406, | |
| "grad_norm": 2.500608790665497, | |
| "learning_rate": 9.87090381303772e-06, | |
| "loss": 0.6782, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16835916622127206, | |
| "grad_norm": 2.4988371088586443, | |
| "learning_rate": 9.860155930871341e-06, | |
| "loss": 0.6643, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.17103153393907003, | |
| "grad_norm": 2.49890305345079, | |
| "learning_rate": 9.848984680175049e-06, | |
| "loss": 0.6561, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.173703901656868, | |
| "grad_norm": 2.4436899486571733, | |
| "learning_rate": 9.837391034077286e-06, | |
| "loss": 0.6623, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.17637626937466597, | |
| "grad_norm": 2.4799571818750605, | |
| "learning_rate": 9.825376002501393e-06, | |
| "loss": 0.6649, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.17904863709246394, | |
| "grad_norm": 2.5658352982346204, | |
| "learning_rate": 9.812940632077629e-06, | |
| "loss": 0.6666, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.1817210048102619, | |
| "grad_norm": 2.5038692575935007, | |
| "learning_rate": 9.800086006051996e-06, | |
| "loss": 0.6681, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18439337252805987, | |
| "grad_norm": 2.4140997941308586, | |
| "learning_rate": 9.786813244191885e-06, | |
| "loss": 0.6603, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.18706574024585784, | |
| "grad_norm": 2.4461106422469387, | |
| "learning_rate": 9.773123502688532e-06, | |
| "loss": 0.6615, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1897381079636558, | |
| "grad_norm": 2.4373544953494926, | |
| "learning_rate": 9.759017974056292e-06, | |
| "loss": 0.6496, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.19241047568145378, | |
| "grad_norm": 2.48009393879711, | |
| "learning_rate": 9.744497887028774e-06, | |
| "loss": 0.6507, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19508284339925175, | |
| "grad_norm": 2.420896783498181, | |
| "learning_rate": 9.729564506451791e-06, | |
| "loss": 0.6499, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.19775521111704972, | |
| "grad_norm": 2.4342500952426223, | |
| "learning_rate": 9.714219133173194e-06, | |
| "loss": 0.6545, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.20042757883484769, | |
| "grad_norm": 2.726231096446901, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.6384, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.20309994655264565, | |
| "grad_norm": 2.365009266839175, | |
| "learning_rate": 9.682297791229668e-06, | |
| "loss": 0.6583, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.20577231427044362, | |
| "grad_norm": 2.390159536711186, | |
| "learning_rate": 9.665724603235115e-06, | |
| "loss": 0.6303, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2084446819882416, | |
| "grad_norm": 2.527701453079277, | |
| "learning_rate": 9.648744983637471e-06, | |
| "loss": 0.6569, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.21111704970603956, | |
| "grad_norm": 2.417799402235962, | |
| "learning_rate": 9.631360411532609e-06, | |
| "loss": 0.6325, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.21378941742383753, | |
| "grad_norm": 2.4529437190653884, | |
| "learning_rate": 9.61357240129185e-06, | |
| "loss": 0.6577, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2164617851416355, | |
| "grad_norm": 2.2689373207444725, | |
| "learning_rate": 9.59538250243003e-06, | |
| "loss": 0.6297, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.21913415285943347, | |
| "grad_norm": 2.5536672740881223, | |
| "learning_rate": 9.576792299470537e-06, | |
| "loss": 0.6344, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22180652057723144, | |
| "grad_norm": 2.592880004163201, | |
| "learning_rate": 9.557803411807283e-06, | |
| "loss": 0.6298, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.2244788882950294, | |
| "grad_norm": 2.3977529842883167, | |
| "learning_rate": 9.538417493563621e-06, | |
| "loss": 0.6235, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22715125601282737, | |
| "grad_norm": 2.409144104508164, | |
| "learning_rate": 9.518636233448276e-06, | |
| "loss": 0.6312, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.22982362373062534, | |
| "grad_norm": 2.345148626579648, | |
| "learning_rate": 9.498461354608228e-06, | |
| "loss": 0.6335, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2324959914484233, | |
| "grad_norm": 2.7130782463024823, | |
| "learning_rate": 9.47789461447861e-06, | |
| "loss": 0.6402, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.23516835916622128, | |
| "grad_norm": 2.4306200930423363, | |
| "learning_rate": 9.456937804629623e-06, | |
| "loss": 0.6199, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23784072688401925, | |
| "grad_norm": 2.3667962521395154, | |
| "learning_rate": 9.435592750610469e-06, | |
| "loss": 0.6249, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.24051309460181722, | |
| "grad_norm": 2.3445164292760503, | |
| "learning_rate": 9.413861311790327e-06, | |
| "loss": 0.635, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2431854623196152, | |
| "grad_norm": 2.408243915012667, | |
| "learning_rate": 9.391745381196382e-06, | |
| "loss": 0.6199, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.24585783003741316, | |
| "grad_norm": 2.7759339062456228, | |
| "learning_rate": 9.369246885348926e-06, | |
| "loss": 0.6226, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24853019775521112, | |
| "grad_norm": 2.3972288359532863, | |
| "learning_rate": 9.346367784093538e-06, | |
| "loss": 0.616, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.25120256547300907, | |
| "grad_norm": 2.4374485416456135, | |
| "learning_rate": 9.32311007043036e-06, | |
| "loss": 0.6081, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.25387493319080706, | |
| "grad_norm": 2.3942250427385177, | |
| "learning_rate": 9.299475770340492e-06, | |
| "loss": 0.6166, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.256547300908605, | |
| "grad_norm": 2.4289153537652948, | |
| "learning_rate": 9.275466942609495e-06, | |
| "loss": 0.6334, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.259219668626403, | |
| "grad_norm": 2.367943437037186, | |
| "learning_rate": 9.251085678648072e-06, | |
| "loss": 0.608, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.26189203634420094, | |
| "grad_norm": 2.498532655161112, | |
| "learning_rate": 9.226334102309862e-06, | |
| "loss": 0.6058, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.26456440406199894, | |
| "grad_norm": 2.390835195956499, | |
| "learning_rate": 9.201214369706448e-06, | |
| "loss": 0.6195, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.2672367717797969, | |
| "grad_norm": 2.3875744730291304, | |
| "learning_rate": 9.17572866901953e-06, | |
| "loss": 0.6111, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2699091394975949, | |
| "grad_norm": 2.433008431887238, | |
| "learning_rate": 9.14987922031031e-06, | |
| "loss": 0.6147, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.2725815072153928, | |
| "grad_norm": 2.304360833496131, | |
| "learning_rate": 9.123668275326113e-06, | |
| "loss": 0.6007, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2752538749331908, | |
| "grad_norm": 2.455168124864067, | |
| "learning_rate": 9.097098117304223e-06, | |
| "loss": 0.5946, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.27792624265098875, | |
| "grad_norm": 2.3757853193488483, | |
| "learning_rate": 9.070171060773007e-06, | |
| "loss": 0.5796, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.28059861036878675, | |
| "grad_norm": 2.438156658306187, | |
| "learning_rate": 9.042889451350274e-06, | |
| "loss": 0.6095, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.2832709780865847, | |
| "grad_norm": 2.3933478156476267, | |
| "learning_rate": 9.015255665538972e-06, | |
| "loss": 0.6138, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2859433458043827, | |
| "grad_norm": 2.4102250220370918, | |
| "learning_rate": 8.987272110520154e-06, | |
| "loss": 0.5949, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.28861571352218063, | |
| "grad_norm": 2.3405736149434766, | |
| "learning_rate": 8.958941223943292e-06, | |
| "loss": 0.609, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.2912880812399786, | |
| "grad_norm": 2.508552589080363, | |
| "learning_rate": 8.930265473713939e-06, | |
| "loss": 0.6083, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.29396044895777657, | |
| "grad_norm": 2.362031389976222, | |
| "learning_rate": 8.901247357778742e-06, | |
| "loss": 0.5972, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.29663281667557456, | |
| "grad_norm": 2.400704295368671, | |
| "learning_rate": 8.871889403907853e-06, | |
| "loss": 0.5953, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.2993051843933725, | |
| "grad_norm": 2.4672308114305914, | |
| "learning_rate": 8.842194169474727e-06, | |
| "loss": 0.5975, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3019775521111705, | |
| "grad_norm": 2.4217091412097798, | |
| "learning_rate": 8.812164241233354e-06, | |
| "loss": 0.5865, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.30464991982896844, | |
| "grad_norm": 2.4016221326031246, | |
| "learning_rate": 8.781802235092927e-06, | |
| "loss": 0.5853, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.30732228754676644, | |
| "grad_norm": 2.4448063152595947, | |
| "learning_rate": 8.751110795889966e-06, | |
| "loss": 0.5846, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.3099946552645644, | |
| "grad_norm": 2.4101255235456365, | |
| "learning_rate": 8.72009259715793e-06, | |
| "loss": 0.5826, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3126670229823624, | |
| "grad_norm": 2.3229216437391367, | |
| "learning_rate": 8.688750340894324e-06, | |
| "loss": 0.6094, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.3153393907001603, | |
| "grad_norm": 2.364943931043124, | |
| "learning_rate": 8.657086757325328e-06, | |
| "loss": 0.5968, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3180117584179583, | |
| "grad_norm": 2.4893765104704952, | |
| "learning_rate": 8.625104604667965e-06, | |
| "loss": 0.5721, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.32068412613575625, | |
| "grad_norm": 2.4524834063054386, | |
| "learning_rate": 8.592806668889835e-06, | |
| "loss": 0.567, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.32335649385355425, | |
| "grad_norm": 2.4028879963718324, | |
| "learning_rate": 8.560195763466428e-06, | |
| "loss": 0.5931, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3260288615713522, | |
| "grad_norm": 2.3843093171732157, | |
| "learning_rate": 8.527274729136042e-06, | |
| "loss": 0.5843, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3287012292891502, | |
| "grad_norm": 2.3502440673042577, | |
| "learning_rate": 8.494046433652327e-06, | |
| "loss": 0.5738, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.33137359700694813, | |
| "grad_norm": 2.284954690172926, | |
| "learning_rate": 8.460513771534475e-06, | |
| "loss": 0.559, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3340459647247461, | |
| "grad_norm": 2.3612010873776414, | |
| "learning_rate": 8.426679663815073e-06, | |
| "loss": 0.5918, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.3367183324425441, | |
| "grad_norm": 2.161417560267832, | |
| "learning_rate": 8.392547057785662e-06, | |
| "loss": 0.5483, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.33939070016034206, | |
| "grad_norm": 2.4200810566750306, | |
| "learning_rate": 8.358118926739984e-06, | |
| "loss": 0.5514, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.34206306787814006, | |
| "grad_norm": 2.497752095251993, | |
| "learning_rate": 8.323398269714994e-06, | |
| "loss": 0.5798, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.344735435595938, | |
| "grad_norm": 2.4247160367906413, | |
| "learning_rate": 8.288388111229601e-06, | |
| "loss": 0.5579, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.347407803313736, | |
| "grad_norm": 2.4629425257040034, | |
| "learning_rate": 8.25309150102121e-06, | |
| "loss": 0.5599, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.35008017103153394, | |
| "grad_norm": 2.3769509731214855, | |
| "learning_rate": 8.217511513780056e-06, | |
| "loss": 0.5806, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.35275253874933193, | |
| "grad_norm": 2.327564053302344, | |
| "learning_rate": 8.181651248881364e-06, | |
| "loss": 0.5533, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.3554249064671299, | |
| "grad_norm": 2.621051043502927, | |
| "learning_rate": 8.145513830115367e-06, | |
| "loss": 0.5521, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.35809727418492787, | |
| "grad_norm": 2.2454195387127065, | |
| "learning_rate": 8.109102405415195e-06, | |
| "loss": 0.5569, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3607696419027258, | |
| "grad_norm": 2.4656771958068373, | |
| "learning_rate": 8.072420146582649e-06, | |
| "loss": 0.5661, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.3634420096205238, | |
| "grad_norm": 2.3668021071793977, | |
| "learning_rate": 8.035470249011916e-06, | |
| "loss": 0.5713, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.36611437733832175, | |
| "grad_norm": 2.461540400642502, | |
| "learning_rate": 7.998255931411208e-06, | |
| "loss": 0.548, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.36878674505611975, | |
| "grad_norm": 2.3787889057132805, | |
| "learning_rate": 7.960780435522387e-06, | |
| "loss": 0.5583, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3714591127739177, | |
| "grad_norm": 2.43169471297202, | |
| "learning_rate": 7.923047025838573e-06, | |
| "loss": 0.5624, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.3741314804917157, | |
| "grad_norm": 2.5825683747421486, | |
| "learning_rate": 7.885058989319776e-06, | |
| "loss": 0.5597, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.3768038482095136, | |
| "grad_norm": 2.2942237910435876, | |
| "learning_rate": 7.846819635106569e-06, | |
| "loss": 0.5468, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.3794762159273116, | |
| "grad_norm": 2.309892265310505, | |
| "learning_rate": 7.808332294231824e-06, | |
| "loss": 0.5421, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.38214858364510956, | |
| "grad_norm": 2.5953061845305445, | |
| "learning_rate": 7.769600319330553e-06, | |
| "loss": 0.5466, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.38482095136290756, | |
| "grad_norm": 2.290660724345896, | |
| "learning_rate": 7.73062708434785e-06, | |
| "loss": 0.5549, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3874933190807055, | |
| "grad_norm": 2.4134172892827466, | |
| "learning_rate": 7.691415984244998e-06, | |
| "loss": 0.5573, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3901656867985035, | |
| "grad_norm": 2.43710642014278, | |
| "learning_rate": 7.651970434703724e-06, | |
| "loss": 0.528, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.39283805451630144, | |
| "grad_norm": 2.4549602694531165, | |
| "learning_rate": 7.612293871828662e-06, | |
| "loss": 0.5209, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.39551042223409943, | |
| "grad_norm": 2.472644139489206, | |
| "learning_rate": 7.572389751848037e-06, | |
| "loss": 0.5274, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3981827899518974, | |
| "grad_norm": 2.3768528891485317, | |
| "learning_rate": 7.532261550812585e-06, | |
| "loss": 0.5218, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.40085515766969537, | |
| "grad_norm": 2.525268737076288, | |
| "learning_rate": 7.491912764292764e-06, | |
| "loss": 0.5285, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4035275253874933, | |
| "grad_norm": 2.3135932700157515, | |
| "learning_rate": 7.451346907074245e-06, | |
| "loss": 0.5383, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.4061998931052913, | |
| "grad_norm": 2.2820011368190456, | |
| "learning_rate": 7.4105675128517456e-06, | |
| "loss": 0.5386, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.40887226082308925, | |
| "grad_norm": 2.6272733311462844, | |
| "learning_rate": 7.369578133921205e-06, | |
| "loss": 0.5403, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.41154462854088725, | |
| "grad_norm": 2.450459988951531, | |
| "learning_rate": 7.3283823408703466e-06, | |
| "loss": 0.5329, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4142169962586852, | |
| "grad_norm": 2.467816462124768, | |
| "learning_rate": 7.2869837222676445e-06, | |
| "loss": 0.5153, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4168893639764832, | |
| "grad_norm": 2.3951509707530962, | |
| "learning_rate": 7.245385884349716e-06, | |
| "loss": 0.5419, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4195617316942811, | |
| "grad_norm": 2.345171358544047, | |
| "learning_rate": 7.203592450707193e-06, | |
| "loss": 0.5221, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.4222340994120791, | |
| "grad_norm": 2.37034917287765, | |
| "learning_rate": 7.161607061969061e-06, | |
| "loss": 0.5269, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.42490646712987706, | |
| "grad_norm": 2.3541847590463774, | |
| "learning_rate": 7.119433375485527e-06, | |
| "loss": 0.51, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.42757883484767506, | |
| "grad_norm": 2.4956945735597746, | |
| "learning_rate": 7.0770750650094335e-06, | |
| "loss": 0.529, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.430251202565473, | |
| "grad_norm": 2.228633956760255, | |
| "learning_rate": 7.034535820376225e-06, | |
| "loss": 0.5209, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.432923570283271, | |
| "grad_norm": 2.3668330274270675, | |
| "learning_rate": 6.991819347182536e-06, | |
| "loss": 0.5217, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.43559593800106894, | |
| "grad_norm": 2.440206726959897, | |
| "learning_rate": 6.948929366463397e-06, | |
| "loss": 0.5188, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.43826830571886694, | |
| "grad_norm": 2.281413045254601, | |
| "learning_rate": 6.9058696143680895e-06, | |
| "loss": 0.5048, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4409406734366649, | |
| "grad_norm": 2.2528776393400407, | |
| "learning_rate": 6.862643841834686e-06, | |
| "loss": 0.4963, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.4436130411544629, | |
| "grad_norm": 2.298335166855743, | |
| "learning_rate": 6.8192558142633215e-06, | |
| "loss": 0.5167, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4462854088722608, | |
| "grad_norm": 2.3617346093820353, | |
| "learning_rate": 6.77570931118817e-06, | |
| "loss": 0.5038, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.4489577765900588, | |
| "grad_norm": 2.3822373788381577, | |
| "learning_rate": 6.732008125948223e-06, | |
| "loss": 0.5025, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.45163014430785675, | |
| "grad_norm": 2.5581676340917046, | |
| "learning_rate": 6.688156065356845e-06, | |
| "loss": 0.5054, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.45430251202565475, | |
| "grad_norm": 2.5460356769272354, | |
| "learning_rate": 6.644156949370162e-06, | |
| "loss": 0.517, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4569748797434527, | |
| "grad_norm": 2.3347577676503635, | |
| "learning_rate": 6.600014610754306e-06, | |
| "loss": 0.503, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.4596472474612507, | |
| "grad_norm": 2.457882693998114, | |
| "learning_rate": 6.555732894751548e-06, | |
| "loss": 0.5051, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4623196151790486, | |
| "grad_norm": 2.616065387749319, | |
| "learning_rate": 6.511315658745323e-06, | |
| "loss": 0.501, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.4649919828968466, | |
| "grad_norm": 2.348479251904561, | |
| "learning_rate": 6.466766771924231e-06, | |
| "loss": 0.5113, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.46766435061464456, | |
| "grad_norm": 2.4230859156724414, | |
| "learning_rate": 6.422090114944982e-06, | |
| "loss": 0.4974, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.47033671833244256, | |
| "grad_norm": 2.2514326581575785, | |
| "learning_rate": 6.377289579594355e-06, | |
| "loss": 0.4922, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.4730090860502405, | |
| "grad_norm": 2.3642088954621685, | |
| "learning_rate": 6.332369068450175e-06, | |
| "loss": 0.5041, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.4756814537680385, | |
| "grad_norm": 2.508894790656293, | |
| "learning_rate": 6.28733249454138e-06, | |
| "loss": 0.5047, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.47835382148583644, | |
| "grad_norm": 2.3371495180507122, | |
| "learning_rate": 6.242183781007132e-06, | |
| "loss": 0.5009, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.48102618920363444, | |
| "grad_norm": 2.3182291284740013, | |
| "learning_rate": 6.196926860755088e-06, | |
| "loss": 0.4884, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4836985569214324, | |
| "grad_norm": 2.436532921181038, | |
| "learning_rate": 6.151565676118805e-06, | |
| "loss": 0.4961, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.4863709246392304, | |
| "grad_norm": 2.4599123658932647, | |
| "learning_rate": 6.106104178514309e-06, | |
| "loss": 0.5068, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.4890432923570283, | |
| "grad_norm": 2.328143651976841, | |
| "learning_rate": 6.0605463280958995e-06, | |
| "loss": 0.4843, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.4917156600748263, | |
| "grad_norm": 2.2758994279070808, | |
| "learning_rate": 6.014896093411181e-06, | |
| "loss": 0.4856, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.49438802779262425, | |
| "grad_norm": 2.3653301550613484, | |
| "learning_rate": 5.9691574510553505e-06, | |
| "loss": 0.483, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.49706039551042225, | |
| "grad_norm": 2.4152543932592248, | |
| "learning_rate": 5.923334385324809e-06, | |
| "loss": 0.4684, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.4997327632282202, | |
| "grad_norm": 2.304808572699325, | |
| "learning_rate": 5.877430887870081e-06, | |
| "loss": 0.4765, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5024051309460181, | |
| "grad_norm": 2.357394956162013, | |
| "learning_rate": 5.831450957348106e-06, | |
| "loss": 0.4848, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5050774986638161, | |
| "grad_norm": 2.31372123260529, | |
| "learning_rate": 5.7853985990739115e-06, | |
| "loss": 0.4801, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.5077498663816141, | |
| "grad_norm": 2.3349204861070225, | |
| "learning_rate": 5.739277824671711e-06, | |
| "loss": 0.4679, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5104222340994121, | |
| "grad_norm": 2.365618083997351, | |
| "learning_rate": 5.693092651725457e-06, | |
| "loss": 0.4858, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.51309460181721, | |
| "grad_norm": 2.289899872529692, | |
| "learning_rate": 5.646847103428859e-06, | |
| "loss": 0.4733, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.515766969535008, | |
| "grad_norm": 2.4225787265464374, | |
| "learning_rate": 5.600545208234927e-06, | |
| "loss": 0.4693, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.518439337252806, | |
| "grad_norm": 2.409714687251355, | |
| "learning_rate": 5.5541909995050554e-06, | |
| "loss": 0.4768, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.521111704970604, | |
| "grad_norm": 2.438137208132051, | |
| "learning_rate": 5.507788515157677e-06, | |
| "loss": 0.4688, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5237840726884019, | |
| "grad_norm": 2.3126195307394086, | |
| "learning_rate": 5.46134179731651e-06, | |
| "loss": 0.4838, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5264564404061999, | |
| "grad_norm": 2.431507168156365, | |
| "learning_rate": 5.414854891958464e-06, | |
| "loss": 0.4633, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5291288081239979, | |
| "grad_norm": 2.2665425075110117, | |
| "learning_rate": 5.368331848561178e-06, | |
| "loss": 0.4665, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5318011758417959, | |
| "grad_norm": 2.1898447124315097, | |
| "learning_rate": 5.321776719750283e-06, | |
| "loss": 0.4574, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.5344735435595938, | |
| "grad_norm": 2.3240831234078048, | |
| "learning_rate": 5.275193560946372e-06, | |
| "loss": 0.4762, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5371459112773918, | |
| "grad_norm": 2.5172989799788827, | |
| "learning_rate": 5.228586430011732e-06, | |
| "loss": 0.4608, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.5398182789951897, | |
| "grad_norm": 2.4058254619350015, | |
| "learning_rate": 5.181959386896862e-06, | |
| "loss": 0.4697, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5424906467129877, | |
| "grad_norm": 2.393865052227799, | |
| "learning_rate": 5.135316493286818e-06, | |
| "loss": 0.4547, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.5451630144307856, | |
| "grad_norm": 2.279248494233032, | |
| "learning_rate": 5.088661812247389e-06, | |
| "loss": 0.4568, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5478353821485836, | |
| "grad_norm": 2.304039661249158, | |
| "learning_rate": 5.041999407871168e-06, | |
| "loss": 0.4687, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.5505077498663816, | |
| "grad_norm": 2.2364242864738078, | |
| "learning_rate": 4.995333344923531e-06, | |
| "loss": 0.4618, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5531801175841796, | |
| "grad_norm": 2.3385130367436093, | |
| "learning_rate": 4.948667688488552e-06, | |
| "loss": 0.4538, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.5558524853019775, | |
| "grad_norm": 2.1973220897038828, | |
| "learning_rate": 4.9020065036148885e-06, | |
| "loss": 0.4596, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5585248530197755, | |
| "grad_norm": 2.339222114519132, | |
| "learning_rate": 4.85535385496169e-06, | |
| "loss": 0.4639, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.5611972207375735, | |
| "grad_norm": 2.343134563122755, | |
| "learning_rate": 4.808713806444506e-06, | |
| "loss": 0.4465, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5638695884553715, | |
| "grad_norm": 2.1561902388155403, | |
| "learning_rate": 4.762090420881289e-06, | |
| "loss": 0.4629, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.5665419561731694, | |
| "grad_norm": 2.4064924815064734, | |
| "learning_rate": 4.715487759638486e-06, | |
| "loss": 0.4464, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5692143238909674, | |
| "grad_norm": 2.267003038981367, | |
| "learning_rate": 4.66890988227724e-06, | |
| "loss": 0.4605, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.5718866916087654, | |
| "grad_norm": 2.313840406512139, | |
| "learning_rate": 4.622360846199772e-06, | |
| "loss": 0.4516, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5745590593265634, | |
| "grad_norm": 2.291001157001646, | |
| "learning_rate": 4.575844706295938e-06, | |
| "loss": 0.4326, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.5772314270443613, | |
| "grad_norm": 2.358917294812099, | |
| "learning_rate": 4.529365514590002e-06, | |
| "loss": 0.4323, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5799037947621593, | |
| "grad_norm": 2.4267897696158505, | |
| "learning_rate": 4.482927319887669e-06, | |
| "loss": 0.4475, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5825761624799572, | |
| "grad_norm": 2.344155005554764, | |
| "learning_rate": 4.436534167423395e-06, | |
| "loss": 0.4382, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5852485301977552, | |
| "grad_norm": 2.1861870199235125, | |
| "learning_rate": 4.390190098508001e-06, | |
| "loss": 0.4254, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.5879208979155531, | |
| "grad_norm": 2.2411000170176716, | |
| "learning_rate": 4.343899150176635e-06, | |
| "loss": 0.4392, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5905932656333511, | |
| "grad_norm": 2.4188630340463066, | |
| "learning_rate": 4.2976653548371115e-06, | |
| "loss": 0.4374, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.5932656333511491, | |
| "grad_norm": 2.26211287377474, | |
| "learning_rate": 4.251492739918641e-06, | |
| "loss": 0.4334, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5959380010689471, | |
| "grad_norm": 2.3942328827051083, | |
| "learning_rate": 4.205385327521002e-06, | |
| "loss": 0.4438, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.598610368786745, | |
| "grad_norm": 2.1138070718766526, | |
| "learning_rate": 4.159347134064177e-06, | |
| "loss": 0.427, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.601282736504543, | |
| "grad_norm": 2.3128400895172065, | |
| "learning_rate": 4.113382169938488e-06, | |
| "loss": 0.4386, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.603955104222341, | |
| "grad_norm": 2.3090402861553008, | |
| "learning_rate": 4.067494439155236e-06, | |
| "loss": 0.4446, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.606627471940139, | |
| "grad_norm": 2.38557316695186, | |
| "learning_rate": 4.021687938997923e-06, | |
| "loss": 0.4283, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6092998396579369, | |
| "grad_norm": 2.3834734986558086, | |
| "learning_rate": 3.975966659674048e-06, | |
| "loss": 0.4318, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6119722073757349, | |
| "grad_norm": 2.468805695724146, | |
| "learning_rate": 3.930334583967514e-06, | |
| "loss": 0.4319, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6146445750935329, | |
| "grad_norm": 2.2597303719856754, | |
| "learning_rate": 3.884795686891692e-06, | |
| "loss": 0.4192, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6173169428113309, | |
| "grad_norm": 2.220457602337495, | |
| "learning_rate": 3.839353935343156e-06, | |
| "loss": 0.4387, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6199893105291288, | |
| "grad_norm": 2.2115364704480553, | |
| "learning_rate": 3.794013287756125e-06, | |
| "loss": 0.4119, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6226616782469268, | |
| "grad_norm": 2.3325364212519215, | |
| "learning_rate": 3.748777693757646e-06, | |
| "loss": 0.4315, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.6253340459647247, | |
| "grad_norm": 2.305852431544535, | |
| "learning_rate": 3.7036510938235394e-06, | |
| "loss": 0.4379, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6280064136825227, | |
| "grad_norm": 2.315692835056829, | |
| "learning_rate": 3.658637418935146e-06, | |
| "loss": 0.4214, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.6306787814003206, | |
| "grad_norm": 2.2325154851397655, | |
| "learning_rate": 3.613740590236895e-06, | |
| "loss": 0.4232, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6333511491181186, | |
| "grad_norm": 2.346809809533785, | |
| "learning_rate": 3.56896451869474e-06, | |
| "loss": 0.4096, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.6360235168359166, | |
| "grad_norm": 2.2155776617857343, | |
| "learning_rate": 3.524313104755468e-06, | |
| "loss": 0.4257, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6386958845537146, | |
| "grad_norm": 2.301115466602763, | |
| "learning_rate": 3.4797902380069305e-06, | |
| "loss": 0.4161, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.6413682522715125, | |
| "grad_norm": 2.267239712852779, | |
| "learning_rate": 3.4353997968392295e-06, | |
| "loss": 0.4134, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6440406199893105, | |
| "grad_norm": 2.314495503471501, | |
| "learning_rate": 3.3911456481068613e-06, | |
| "loss": 0.4246, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.6467129877071085, | |
| "grad_norm": 2.2423488636530333, | |
| "learning_rate": 3.3470316467918785e-06, | |
| "loss": 0.4019, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6493853554249065, | |
| "grad_norm": 2.350376857517849, | |
| "learning_rate": 3.3030616356680854e-06, | |
| "loss": 0.4085, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.6520577231427044, | |
| "grad_norm": 2.17587476305692, | |
| "learning_rate": 3.2592394449662867e-06, | |
| "loss": 0.4053, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6547300908605024, | |
| "grad_norm": 2.19222340194859, | |
| "learning_rate": 3.2155688920406415e-06, | |
| "loss": 0.4085, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.6574024585783004, | |
| "grad_norm": 2.3348158981976534, | |
| "learning_rate": 3.172053781036132e-06, | |
| "loss": 0.4088, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6600748262960984, | |
| "grad_norm": 2.2791255065148777, | |
| "learning_rate": 3.1286979025571817e-06, | |
| "loss": 0.4162, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.6627471940138963, | |
| "grad_norm": 2.184701125254012, | |
| "learning_rate": 3.0855050333374574e-06, | |
| "loss": 0.4146, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6654195617316943, | |
| "grad_norm": 2.1462756307983004, | |
| "learning_rate": 3.042478935910881e-06, | |
| "loss": 0.4066, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.6680919294494923, | |
| "grad_norm": 2.249540103185502, | |
| "learning_rate": 2.9996233582838686e-06, | |
| "loss": 0.4123, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6707642971672902, | |
| "grad_norm": 2.1365843130945543, | |
| "learning_rate": 2.956942033608843e-06, | |
| "loss": 0.3986, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.6734366648850882, | |
| "grad_norm": 2.2244554395915035, | |
| "learning_rate": 2.914438679859046e-06, | |
| "loss": 0.4022, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6761090326028861, | |
| "grad_norm": 2.1473916197507403, | |
| "learning_rate": 2.8721169995046503e-06, | |
| "loss": 0.4198, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.6787814003206841, | |
| "grad_norm": 2.2135391990804245, | |
| "learning_rate": 2.829980679190254e-06, | |
| "loss": 0.3989, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6814537680384821, | |
| "grad_norm": 2.283258519501861, | |
| "learning_rate": 2.788033389413729e-06, | |
| "loss": 0.4021, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.6841261357562801, | |
| "grad_norm": 2.3358515645118736, | |
| "learning_rate": 2.7462787842064753e-06, | |
| "loss": 0.4121, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.686798503474078, | |
| "grad_norm": 2.314456542436578, | |
| "learning_rate": 2.7047205008151332e-06, | |
| "loss": 0.4009, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.689470871191876, | |
| "grad_norm": 2.1923477921909176, | |
| "learning_rate": 2.6633621593847387e-06, | |
| "loss": 0.4021, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.692143238909674, | |
| "grad_norm": 2.2966892672617463, | |
| "learning_rate": 2.6222073626433587e-06, | |
| "loss": 0.395, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.694815606627472, | |
| "grad_norm": 2.2799392689553, | |
| "learning_rate": 2.5812596955882756e-06, | |
| "loss": 0.3949, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6974879743452699, | |
| "grad_norm": 2.2131564006746123, | |
| "learning_rate": 2.540522725173692e-06, | |
| "loss": 0.4001, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7001603420630679, | |
| "grad_norm": 2.2620859852805197, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.3967, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7028327097808659, | |
| "grad_norm": 2.0891899787268278, | |
| "learning_rate": 2.459695050004688e-06, | |
| "loss": 0.3837, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7055050774986639, | |
| "grad_norm": 2.256364852897407, | |
| "learning_rate": 2.4196113861548233e-06, | |
| "loss": 0.3826, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7081774452164618, | |
| "grad_norm": 2.087597061423158, | |
| "learning_rate": 2.379752500141222e-06, | |
| "loss": 0.3904, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.7108498129342598, | |
| "grad_norm": 2.148214107255344, | |
| "learning_rate": 2.3401218640742894e-06, | |
| "loss": 0.3925, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7135221806520577, | |
| "grad_norm": 2.252603568294772, | |
| "learning_rate": 2.3007229301815643e-06, | |
| "loss": 0.3843, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.7161945483698557, | |
| "grad_norm": 2.5431488127516837, | |
| "learning_rate": 2.2615591305069846e-06, | |
| "loss": 0.3899, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7188669160876536, | |
| "grad_norm": 2.14235725429711, | |
| "learning_rate": 2.2226338766119366e-06, | |
| "loss": 0.3892, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.7215392838054516, | |
| "grad_norm": 2.397791424619344, | |
| "learning_rate": 2.1839505592780658e-06, | |
| "loss": 0.3968, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7242116515232496, | |
| "grad_norm": 2.244839587602645, | |
| "learning_rate": 2.145512548211902e-06, | |
| "loss": 0.3661, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.7268840192410476, | |
| "grad_norm": 2.081746722492156, | |
| "learning_rate": 2.1073231917513336e-06, | |
| "loss": 0.3805, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7295563869588455, | |
| "grad_norm": 2.1548041790438806, | |
| "learning_rate": 2.069385816573928e-06, | |
| "loss": 0.3739, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.7322287546766435, | |
| "grad_norm": 2.2617609726198387, | |
| "learning_rate": 2.0317037274071412e-06, | |
| "loss": 0.3797, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7349011223944415, | |
| "grad_norm": 2.2088291878484694, | |
| "learning_rate": 1.99428020674045e-06, | |
| "loss": 0.3678, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.7375734901122395, | |
| "grad_norm": 2.1855267811178645, | |
| "learning_rate": 1.9571185145394117e-06, | |
| "loss": 0.3873, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7402458578300374, | |
| "grad_norm": 2.1932190918187278, | |
| "learning_rate": 1.9202218879616824e-06, | |
| "loss": 0.3976, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.7429182255478354, | |
| "grad_norm": 2.1791679190224142, | |
| "learning_rate": 1.8835935410750372e-06, | |
| "loss": 0.37, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7455905932656334, | |
| "grad_norm": 2.2552371440070043, | |
| "learning_rate": 1.8472366645773892e-06, | |
| "loss": 0.3728, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.7482629609834314, | |
| "grad_norm": 2.146094844771457, | |
| "learning_rate": 1.8111544255188402e-06, | |
| "loss": 0.3875, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7509353287012293, | |
| "grad_norm": 2.018056735648775, | |
| "learning_rate": 1.7753499670258106e-06, | |
| "loss": 0.3763, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.7536076964190273, | |
| "grad_norm": 2.076212339323707, | |
| "learning_rate": 1.7398264080272371e-06, | |
| "loss": 0.3849, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7562800641368252, | |
| "grad_norm": 2.174554150416714, | |
| "learning_rate": 1.7045868429828745e-06, | |
| "loss": 0.3856, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.7589524318546232, | |
| "grad_norm": 2.151663250597911, | |
| "learning_rate": 1.6696343416137495e-06, | |
| "loss": 0.3861, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7616247995724211, | |
| "grad_norm": 2.3360649402814984, | |
| "learning_rate": 1.6349719486347533e-06, | |
| "loss": 0.3821, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.7642971672902191, | |
| "grad_norm": 2.0553410775677854, | |
| "learning_rate": 1.6006026834894068e-06, | |
| "loss": 0.3669, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7669695350080171, | |
| "grad_norm": 2.1657944021551616, | |
| "learning_rate": 1.5665295400868513e-06, | |
| "loss": 0.3813, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.7696419027258151, | |
| "grad_norm": 2.3170759830967325, | |
| "learning_rate": 1.5327554865410415e-06, | |
| "loss": 0.3723, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.772314270443613, | |
| "grad_norm": 2.1126770007065683, | |
| "learning_rate": 1.499283464912188e-06, | |
| "loss": 0.3706, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.774986638161411, | |
| "grad_norm": 2.372914223040231, | |
| "learning_rate": 1.4661163909504855e-06, | |
| "loss": 0.3771, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.777659005879209, | |
| "grad_norm": 2.3328613136831127, | |
| "learning_rate": 1.4332571538421136e-06, | |
| "loss": 0.3674, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.780331373597007, | |
| "grad_norm": 2.140027640464256, | |
| "learning_rate": 1.4007086159575595e-06, | |
| "loss": 0.3755, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7830037413148049, | |
| "grad_norm": 2.02535020358291, | |
| "learning_rate": 1.3684736126022812e-06, | |
| "loss": 0.3614, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.7856761090326029, | |
| "grad_norm": 2.234825491106363, | |
| "learning_rate": 1.3365549517697234e-06, | |
| "loss": 0.3727, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7883484767504009, | |
| "grad_norm": 2.127621543244066, | |
| "learning_rate": 1.3049554138967052e-06, | |
| "loss": 0.3668, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.7910208444681989, | |
| "grad_norm": 1.9966970197186302, | |
| "learning_rate": 1.2736777516212267e-06, | |
| "loss": 0.3792, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7936932121859968, | |
| "grad_norm": 2.1994020174447764, | |
| "learning_rate": 1.2427246895426826e-06, | |
| "loss": 0.3632, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.7963655799037948, | |
| "grad_norm": 2.325490564770392, | |
| "learning_rate": 1.2120989239845149e-06, | |
| "loss": 0.3784, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.7990379476215927, | |
| "grad_norm": 2.0389410793019094, | |
| "learning_rate": 1.1818031227593491e-06, | |
| "loss": 0.3688, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.8017103153393907, | |
| "grad_norm": 2.237406582848762, | |
| "learning_rate": 1.1518399249365924e-06, | |
| "loss": 0.3675, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8043826830571886, | |
| "grad_norm": 2.0566468097872934, | |
| "learning_rate": 1.1222119406125426e-06, | |
| "loss": 0.3585, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.8070550507749866, | |
| "grad_norm": 2.205571320640559, | |
| "learning_rate": 1.0929217506830292e-06, | |
| "loss": 0.3647, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8097274184927846, | |
| "grad_norm": 2.1023679887919635, | |
| "learning_rate": 1.0639719066185867e-06, | |
| "loss": 0.3715, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.8123997862105826, | |
| "grad_norm": 2.0717279006060316, | |
| "learning_rate": 1.0353649302421982e-06, | |
| "loss": 0.3591, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8150721539283805, | |
| "grad_norm": 2.088163234375785, | |
| "learning_rate": 1.007103313509617e-06, | |
| "loss": 0.3569, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.8177445216461785, | |
| "grad_norm": 2.2753464325922796, | |
| "learning_rate": 9.791895182922911e-07, | |
| "loss": 0.3633, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8204168893639765, | |
| "grad_norm": 2.122716745185861, | |
| "learning_rate": 9.516259761629148e-07, | |
| "loss": 0.3652, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.8230892570817745, | |
| "grad_norm": 2.047095013957556, | |
| "learning_rate": 9.244150881836117e-07, | |
| "loss": 0.36, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8257616247995724, | |
| "grad_norm": 1.944446706318155, | |
| "learning_rate": 8.975592246967713e-07, | |
| "loss": 0.3653, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.8284339925173704, | |
| "grad_norm": 2.038292799471268, | |
| "learning_rate": 8.710607251185799e-07, | |
| "loss": 0.3579, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8311063602351684, | |
| "grad_norm": 2.111895969332577, | |
| "learning_rate": 8.449218977352281e-07, | |
| "loss": 0.3489, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.8337787279529664, | |
| "grad_norm": 2.0806458377180546, | |
| "learning_rate": 8.191450195018313e-07, | |
| "loss": 0.3612, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8364510956707643, | |
| "grad_norm": 2.2095760944892677, | |
| "learning_rate": 7.937323358440935e-07, | |
| "loss": 0.368, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.8391234633885623, | |
| "grad_norm": 2.009589157938905, | |
| "learning_rate": 7.686860604627022e-07, | |
| "loss": 0.3697, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8417958311063602, | |
| "grad_norm": 2.0176043666039076, | |
| "learning_rate": 7.440083751404902e-07, | |
| "loss": 0.3479, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.8444681988241582, | |
| "grad_norm": 2.2398694013369362, | |
| "learning_rate": 7.197014295523879e-07, | |
| "loss": 0.3658, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8471405665419561, | |
| "grad_norm": 2.2248765967199224, | |
| "learning_rate": 6.957673410781617e-07, | |
| "loss": 0.3612, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.8498129342597541, | |
| "grad_norm": 2.1687807466267004, | |
| "learning_rate": 6.722081946179631e-07, | |
| "loss": 0.3537, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8524853019775521, | |
| "grad_norm": 2.152399358341962, | |
| "learning_rate": 6.490260424107231e-07, | |
| "loss": 0.3529, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.8551576696953501, | |
| "grad_norm": 2.071713282983931, | |
| "learning_rate": 6.262229038553752e-07, | |
| "loss": 0.3541, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.857830037413148, | |
| "grad_norm": 2.0120433825076542, | |
| "learning_rate": 6.038007653349437e-07, | |
| "loss": 0.3672, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.860502405130946, | |
| "grad_norm": 2.091504907657452, | |
| "learning_rate": 5.817615800435167e-07, | |
| "loss": 0.3475, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.863174772848744, | |
| "grad_norm": 2.0847773360899837, | |
| "learning_rate": 5.60107267816098e-07, | |
| "loss": 0.3556, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.865847140566542, | |
| "grad_norm": 2.017553941427195, | |
| "learning_rate": 5.388397149613683e-07, | |
| "loss": 0.3713, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8685195082843399, | |
| "grad_norm": 2.3647834736903244, | |
| "learning_rate": 5.179607740973764e-07, | |
| "loss": 0.3631, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.8711918760021379, | |
| "grad_norm": 2.041063738988079, | |
| "learning_rate": 4.974722639901503e-07, | |
| "loss": 0.3573, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8738642437199359, | |
| "grad_norm": 1.9173830344630454, | |
| "learning_rate": 4.773759693952662e-07, | |
| "loss": 0.3483, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.8765366114377339, | |
| "grad_norm": 2.189591381428806, | |
| "learning_rate": 4.576736409023813e-07, | |
| "loss": 0.3617, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8792089791555318, | |
| "grad_norm": 2.075882031385073, | |
| "learning_rate": 4.383669947827368e-07, | |
| "loss": 0.3558, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.8818813468733298, | |
| "grad_norm": 1.9859526858374763, | |
| "learning_rate": 4.194577128396521e-07, | |
| "loss": 0.3531, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8845537145911277, | |
| "grad_norm": 2.169320189826052, | |
| "learning_rate": 4.009474422620269e-07, | |
| "loss": 0.3597, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.8872260823089257, | |
| "grad_norm": 2.0308766276062156, | |
| "learning_rate": 3.828377954808538e-07, | |
| "loss": 0.3471, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8898984500267236, | |
| "grad_norm": 2.0198511100844097, | |
| "learning_rate": 3.651303500287534e-07, | |
| "loss": 0.3576, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.8925708177445216, | |
| "grad_norm": 2.268439126603322, | |
| "learning_rate": 3.4782664840256387e-07, | |
| "loss": 0.3529, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8952431854623196, | |
| "grad_norm": 1.9847511752450961, | |
| "learning_rate": 3.3092819792896913e-07, | |
| "loss": 0.3451, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.8979155531801176, | |
| "grad_norm": 2.1019998352962164, | |
| "learning_rate": 3.1443647063319425e-07, | |
| "loss": 0.3453, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9005879208979155, | |
| "grad_norm": 2.2350648983618453, | |
| "learning_rate": 2.9835290311078123e-07, | |
| "loss": 0.3627, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.9032602886157135, | |
| "grad_norm": 1.9841704743012314, | |
| "learning_rate": 2.8267889640244516e-07, | |
| "loss": 0.3535, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9059326563335115, | |
| "grad_norm": 2.0356033672039544, | |
| "learning_rate": 2.6741581587202747e-07, | |
| "loss": 0.3442, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.9086050240513095, | |
| "grad_norm": 2.1092832232487826, | |
| "learning_rate": 2.525649910875627e-07, | |
| "loss": 0.3509, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9112773917691074, | |
| "grad_norm": 2.131995008854108, | |
| "learning_rate": 2.3812771570545846e-07, | |
| "loss": 0.3511, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.9139497594869054, | |
| "grad_norm": 2.129385304232537, | |
| "learning_rate": 2.2410524735780205e-07, | |
| "loss": 0.3451, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9166221272047034, | |
| "grad_norm": 2.0136786336582877, | |
| "learning_rate": 2.104988075428127e-07, | |
| "loss": 0.3525, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.9192944949225014, | |
| "grad_norm": 2.2858760868766765, | |
| "learning_rate": 1.9730958151843282e-07, | |
| "loss": 0.3484, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9219668626402993, | |
| "grad_norm": 1.964639607321287, | |
| "learning_rate": 1.845387181990821e-07, | |
| "loss": 0.3441, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.9246392303580973, | |
| "grad_norm": 1.9899066828394374, | |
| "learning_rate": 1.7218733005557707e-07, | |
| "loss": 0.3562, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9273115980758952, | |
| "grad_norm": 2.1880261263868483, | |
| "learning_rate": 1.6025649301821877e-07, | |
| "loss": 0.3596, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.9299839657936932, | |
| "grad_norm": 2.107871540829437, | |
| "learning_rate": 1.4874724638307303e-07, | |
| "loss": 0.3464, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9326563335114911, | |
| "grad_norm": 2.1515927738306635, | |
| "learning_rate": 1.376605927214364e-07, | |
| "loss": 0.3391, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.9353287012292891, | |
| "grad_norm": 1.9419157718954796, | |
| "learning_rate": 1.2699749779249926e-07, | |
| "loss": 0.3578, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9380010689470871, | |
| "grad_norm": 2.0515419127475285, | |
| "learning_rate": 1.1675889045922151e-07, | |
| "loss": 0.3468, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.9406734366648851, | |
| "grad_norm": 1.8780773791377043, | |
| "learning_rate": 1.0694566260742001e-07, | |
| "loss": 0.354, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.943345804382683, | |
| "grad_norm": 2.156801824942324, | |
| "learning_rate": 9.755866906807188e-08, | |
| "loss": 0.3529, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.946018172100481, | |
| "grad_norm": 2.049338756194342, | |
| "learning_rate": 8.859872754285403e-08, | |
| "loss": 0.3551, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.948690539818279, | |
| "grad_norm": 1.8992043254542215, | |
| "learning_rate": 8.006661853291298e-08, | |
| "loss": 0.3615, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.951362907536077, | |
| "grad_norm": 1.945897478392813, | |
| "learning_rate": 7.196308527087192e-08, | |
| "loss": 0.3489, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9540352752538749, | |
| "grad_norm": 1.934834420924745, | |
| "learning_rate": 6.428883365609261e-08, | |
| "loss": 0.3485, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.9567076429716729, | |
| "grad_norm": 2.278947830724746, | |
| "learning_rate": 5.704453219318118e-08, | |
| "loss": 0.3465, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9593800106894709, | |
| "grad_norm": 2.019238988360205, | |
| "learning_rate": 5.023081193375357e-08, | |
| "loss": 0.3483, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.9620523784072689, | |
| "grad_norm": 2.077457220463081, | |
| "learning_rate": 4.384826642146844e-08, | |
| "loss": 0.3512, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9647247461250668, | |
| "grad_norm": 1.90275137904516, | |
| "learning_rate": 3.7897451640321326e-08, | |
| "loss": 0.3336, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.9673971138428648, | |
| "grad_norm": 2.2013361831715756, | |
| "learning_rate": 3.2378885966211636e-08, | |
| "loss": 0.3578, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.9700694815606627, | |
| "grad_norm": 1.798115382458221, | |
| "learning_rate": 2.7293050121788843e-08, | |
| "loss": 0.3356, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.9727418492784607, | |
| "grad_norm": 2.204761981011997, | |
| "learning_rate": 2.264038713457706e-08, | |
| "loss": 0.3564, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.9754142169962586, | |
| "grad_norm": 2.114746372544894, | |
| "learning_rate": 1.842130229838035e-08, | |
| "loss": 0.3449, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.9780865847140566, | |
| "grad_norm": 2.2465786265310825, | |
| "learning_rate": 1.4636163137980441e-08, | |
| "loss": 0.3496, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.9807589524318546, | |
| "grad_norm": 2.4079788695162128, | |
| "learning_rate": 1.1285299377118974e-08, | |
| "loss": 0.3567, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.9834313201496526, | |
| "grad_norm": 1.8850228312648118, | |
| "learning_rate": 8.369002909777713e-09, | |
| "loss": 0.3492, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.9861036878674505, | |
| "grad_norm": 2.1554910144187702, | |
| "learning_rate": 5.88752777474999e-09, | |
| "loss": 0.3491, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.9887760555852485, | |
| "grad_norm": 2.0257545592692794, | |
| "learning_rate": 3.841090133511749e-09, | |
| "loss": 0.3442, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9914484233030465, | |
| "grad_norm": 2.169390747670639, | |
| "learning_rate": 2.229868251391598e-09, | |
| "loss": 0.3502, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.9941207910208445, | |
| "grad_norm": 2.169432604667664, | |
| "learning_rate": 1.054002482043237e-09, | |
| "loss": 0.361, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9967931587386424, | |
| "grad_norm": 1.9430919338940775, | |
| "learning_rate": 3.1359525521801326e-10, | |
| "loss": 0.3459, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.9994655264564404, | |
| "grad_norm": 1.988798803083866, | |
| "learning_rate": 8.711067840949661e-12, | |
| "loss": 0.3484, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.393, | |
| "eval_samples_per_second": 2.947, | |
| "eval_steps_per_second": 0.884, | |
| "step": 1871 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1871, | |
| "total_flos": 195874667888640.0, | |
| "train_loss": 0.5146575008465732, | |
| "train_runtime": 16798.2789, | |
| "train_samples_per_second": 1.782, | |
| "train_steps_per_second": 0.111 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1871, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 195874667888640.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |