| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2144, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0023320895522388058, | |
| "grad_norm": 2.1731144958946356, | |
| "learning_rate": 2.3148148148148148e-06, | |
| "loss": 0.8497, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0046641791044776115, | |
| "grad_norm": 1.4455418381410732, | |
| "learning_rate": 4.6296296296296296e-06, | |
| "loss": 0.8271, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006996268656716418, | |
| "grad_norm": 1.6554188288129872, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 0.816, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.009328358208955223, | |
| "grad_norm": 1.0605788457441088, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.7542, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01166044776119403, | |
| "grad_norm": 0.720337911055749, | |
| "learning_rate": 1.1574074074074075e-05, | |
| "loss": 0.7312, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.013992537313432836, | |
| "grad_norm": 0.6053048303655763, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.6962, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01632462686567164, | |
| "grad_norm": 0.5365339422370746, | |
| "learning_rate": 1.6203703703703704e-05, | |
| "loss": 0.6915, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 0.5138446295825095, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.6777, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.020988805970149255, | |
| "grad_norm": 0.5294732057067889, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 0.6738, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02332089552238806, | |
| "grad_norm": 0.5216249762051252, | |
| "learning_rate": 2.314814814814815e-05, | |
| "loss": 0.649, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.025652985074626867, | |
| "grad_norm": 0.4635569420456706, | |
| "learning_rate": 2.5462962962962965e-05, | |
| "loss": 0.6445, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.027985074626865673, | |
| "grad_norm": 0.5711576603157688, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.6442, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03031716417910448, | |
| "grad_norm": 0.5406532747721863, | |
| "learning_rate": 3.0092592592592593e-05, | |
| "loss": 0.6333, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.03264925373134328, | |
| "grad_norm": 0.4921611180106655, | |
| "learning_rate": 3.240740740740741e-05, | |
| "loss": 0.609, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.034981343283582086, | |
| "grad_norm": 0.5585104564168789, | |
| "learning_rate": 3.472222222222222e-05, | |
| "loss": 0.6166, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 0.6060789427946596, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.6241, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.039645522388059705, | |
| "grad_norm": 0.5313035000332463, | |
| "learning_rate": 3.935185185185186e-05, | |
| "loss": 0.6184, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.04197761194029851, | |
| "grad_norm": 0.6556913799681943, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.6211, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.044309701492537316, | |
| "grad_norm": 0.5778296341723107, | |
| "learning_rate": 4.3981481481481486e-05, | |
| "loss": 0.6079, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.04664179104477612, | |
| "grad_norm": 0.5268598973302592, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.6038, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04897388059701493, | |
| "grad_norm": 0.509339973453908, | |
| "learning_rate": 4.8611111111111115e-05, | |
| "loss": 0.6305, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.051305970149253734, | |
| "grad_norm": 0.5496370287982213, | |
| "learning_rate": 4.999989285883431e-05, | |
| "loss": 0.627, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.05363805970149254, | |
| "grad_norm": 0.5434194242110924, | |
| "learning_rate": 4.999868753243865e-05, | |
| "loss": 0.6263, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 0.6282164391718019, | |
| "learning_rate": 4.999614302517356e-05, | |
| "loss": 0.6202, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05830223880597015, | |
| "grad_norm": 0.5814513203649924, | |
| "learning_rate": 4.9992259488494784e-05, | |
| "loss": 0.6049, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.06063432835820896, | |
| "grad_norm": 0.6315852165763514, | |
| "learning_rate": 4.99870371535606e-05, | |
| "loss": 0.6043, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.06296641791044776, | |
| "grad_norm": 0.6282922843004739, | |
| "learning_rate": 4.9980476331218066e-05, | |
| "loss": 0.6484, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.06529850746268656, | |
| "grad_norm": 0.5875693979221333, | |
| "learning_rate": 4.997257741198456e-05, | |
| "loss": 0.6189, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06763059701492537, | |
| "grad_norm": 0.6058703738937499, | |
| "learning_rate": 4.996334086602444e-05, | |
| "loss": 0.6036, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06996268656716417, | |
| "grad_norm": 0.5426212351193616, | |
| "learning_rate": 4.9952767243121146e-05, | |
| "loss": 0.619, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.07229477611940298, | |
| "grad_norm": 0.5276707044683704, | |
| "learning_rate": 4.994085717264443e-05, | |
| "loss": 0.6042, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 0.48342313898689165, | |
| "learning_rate": 4.992761136351291e-05, | |
| "loss": 0.6084, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.07695895522388059, | |
| "grad_norm": 0.43727574748121845, | |
| "learning_rate": 4.99130306041519e-05, | |
| "loss": 0.6086, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07929104477611941, | |
| "grad_norm": 0.5139967015316651, | |
| "learning_rate": 4.989711576244639e-05, | |
| "loss": 0.5885, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.08162313432835822, | |
| "grad_norm": 0.5169853218760131, | |
| "learning_rate": 4.9879867785689506e-05, | |
| "loss": 0.5792, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.08395522388059702, | |
| "grad_norm": 0.4547424820277734, | |
| "learning_rate": 4.986128770052603e-05, | |
| "loss": 0.5979, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.08628731343283583, | |
| "grad_norm": 0.5278283023520385, | |
| "learning_rate": 4.9841376612891365e-05, | |
| "loss": 0.5921, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.08861940298507463, | |
| "grad_norm": 0.4592914703740021, | |
| "learning_rate": 4.9820135707945634e-05, | |
| "loss": 0.5895, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.09095149253731344, | |
| "grad_norm": 0.5604812331209644, | |
| "learning_rate": 4.97975662500032e-05, | |
| "loss": 0.6123, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.4963710392742468, | |
| "learning_rate": 4.9773669582457364e-05, | |
| "loss": 0.5949, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.09561567164179105, | |
| "grad_norm": 0.5341113967257587, | |
| "learning_rate": 4.974844712770044e-05, | |
| "loss": 0.6115, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.09794776119402986, | |
| "grad_norm": 0.5356900218427062, | |
| "learning_rate": 4.972190038703905e-05, | |
| "loss": 0.6062, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.10027985074626866, | |
| "grad_norm": 0.48271662493768214, | |
| "learning_rate": 4.96940309406048e-05, | |
| "loss": 0.5994, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.10261194029850747, | |
| "grad_norm": 0.4656684615513963, | |
| "learning_rate": 4.966484044726024e-05, | |
| "loss": 0.5919, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.10494402985074627, | |
| "grad_norm": 0.45763728379025154, | |
| "learning_rate": 4.963433064450002e-05, | |
| "loss": 0.5897, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.10727611940298508, | |
| "grad_norm": 0.4790657281481012, | |
| "learning_rate": 4.9602503348347625e-05, | |
| "loss": 0.5823, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.10960820895522388, | |
| "grad_norm": 0.49515386205808626, | |
| "learning_rate": 4.956936045324716e-05, | |
| "loss": 0.5709, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.5030427108835758, | |
| "learning_rate": 4.953490393195063e-05, | |
| "loss": 0.6161, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1142723880597015, | |
| "grad_norm": 0.45710707251035176, | |
| "learning_rate": 4.9499135835400526e-05, | |
| "loss": 0.5983, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.1166044776119403, | |
| "grad_norm": 0.4321697797530169, | |
| "learning_rate": 4.9462058292607735e-05, | |
| "loss": 0.5831, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.11893656716417911, | |
| "grad_norm": 0.48082763546682483, | |
| "learning_rate": 4.942367351052481e-05, | |
| "loss": 0.5788, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.12126865671641791, | |
| "grad_norm": 0.4447073632938014, | |
| "learning_rate": 4.938398377391461e-05, | |
| "loss": 0.5882, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.12360074626865672, | |
| "grad_norm": 0.42261904193138444, | |
| "learning_rate": 4.9342991445214334e-05, | |
| "loss": 0.5844, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.1259328358208955, | |
| "grad_norm": 0.4455650933452113, | |
| "learning_rate": 4.930069896439485e-05, | |
| "loss": 0.5829, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.12826492537313433, | |
| "grad_norm": 0.4101033046902724, | |
| "learning_rate": 4.925710884881551e-05, | |
| "loss": 0.5796, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.48901968042593585, | |
| "learning_rate": 4.921222369307427e-05, | |
| "loss": 0.5914, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.13292910447761194, | |
| "grad_norm": 0.4900379168491669, | |
| "learning_rate": 4.916604616885328e-05, | |
| "loss": 0.5865, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.13526119402985073, | |
| "grad_norm": 0.4823958325202437, | |
| "learning_rate": 4.9118579024759854e-05, | |
| "loss": 0.5901, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.13759328358208955, | |
| "grad_norm": 0.4431212546352274, | |
| "learning_rate": 4.9069825086162865e-05, | |
| "loss": 0.568, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.13992537313432835, | |
| "grad_norm": 0.43555586427340276, | |
| "learning_rate": 4.901978725502454e-05, | |
| "loss": 0.5751, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.14225746268656717, | |
| "grad_norm": 0.44959729986609015, | |
| "learning_rate": 4.89684685097278e-05, | |
| "loss": 0.5648, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.14458955223880596, | |
| "grad_norm": 0.4775655291361343, | |
| "learning_rate": 4.891587190489891e-05, | |
| "loss": 0.5876, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.14692164179104478, | |
| "grad_norm": 0.44367799195931545, | |
| "learning_rate": 4.886200057122568e-05, | |
| "loss": 0.59, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.46351014448368566, | |
| "learning_rate": 4.880685771527114e-05, | |
| "loss": 0.578, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1515858208955224, | |
| "grad_norm": 0.4000400178625835, | |
| "learning_rate": 4.8750446619282646e-05, | |
| "loss": 0.5782, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.15391791044776118, | |
| "grad_norm": 0.42361868610958603, | |
| "learning_rate": 4.869277064099654e-05, | |
| "loss": 0.5816, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.15625, | |
| "grad_norm": 0.41929276911015007, | |
| "learning_rate": 4.8633833213438254e-05, | |
| "loss": 0.5787, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.15858208955223882, | |
| "grad_norm": 0.4278081032230215, | |
| "learning_rate": 4.8573637844718e-05, | |
| "loss": 0.6021, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.1609141791044776, | |
| "grad_norm": 0.4435981265670742, | |
| "learning_rate": 4.851218811782195e-05, | |
| "loss": 0.5851, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.16324626865671643, | |
| "grad_norm": 0.4610464853647174, | |
| "learning_rate": 4.844948769039896e-05, | |
| "loss": 0.5843, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.16557835820895522, | |
| "grad_norm": 0.4990418525776248, | |
| "learning_rate": 4.8385540294542855e-05, | |
| "loss": 0.5777, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.41390141315910106, | |
| "learning_rate": 4.83203497365703e-05, | |
| "loss": 0.5833, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.17024253731343283, | |
| "grad_norm": 0.42444589701648866, | |
| "learning_rate": 4.825391989679422e-05, | |
| "loss": 0.5779, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.17257462686567165, | |
| "grad_norm": 0.4048755132488697, | |
| "learning_rate": 4.818625472929286e-05, | |
| "loss": 0.5692, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.17490671641791045, | |
| "grad_norm": 0.44573994111066584, | |
| "learning_rate": 4.811735826167436e-05, | |
| "loss": 0.5605, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.17723880597014927, | |
| "grad_norm": 0.44870138659062336, | |
| "learning_rate": 4.8047234594837143e-05, | |
| "loss": 0.572, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.17957089552238806, | |
| "grad_norm": 0.44214350416809645, | |
| "learning_rate": 4.7975887902725696e-05, | |
| "loss": 0.5602, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.18190298507462688, | |
| "grad_norm": 0.4261029084271384, | |
| "learning_rate": 4.7903322432082185e-05, | |
| "loss": 0.5736, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.18423507462686567, | |
| "grad_norm": 0.36636721217653195, | |
| "learning_rate": 4.78295425021937e-05, | |
| "loss": 0.5547, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.47458322491073895, | |
| "learning_rate": 4.775455250463507e-05, | |
| "loss": 0.5788, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.18889925373134328, | |
| "grad_norm": 0.42726396052859394, | |
| "learning_rate": 4.767835690300759e-05, | |
| "loss": 0.577, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.1912313432835821, | |
| "grad_norm": 0.4153229184095994, | |
| "learning_rate": 4.760096023267322e-05, | |
| "loss": 0.5827, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1935634328358209, | |
| "grad_norm": 0.39439465691893505, | |
| "learning_rate": 4.752236710048472e-05, | |
| "loss": 0.5647, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.1958955223880597, | |
| "grad_norm": 0.47858737477245633, | |
| "learning_rate": 4.744258218451135e-05, | |
| "loss": 0.5787, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1982276119402985, | |
| "grad_norm": 0.42698704619750955, | |
| "learning_rate": 4.736161023376051e-05, | |
| "loss": 0.5513, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.20055970149253732, | |
| "grad_norm": 0.43352489747133166, | |
| "learning_rate": 4.7279456067895e-05, | |
| "loss": 0.5879, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.20289179104477612, | |
| "grad_norm": 0.4366534779661185, | |
| "learning_rate": 4.719612457694616e-05, | |
| "loss": 0.5726, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.41581245276436585, | |
| "learning_rate": 4.71116207210228e-05, | |
| "loss": 0.5577, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.20755597014925373, | |
| "grad_norm": 0.39278696980143335, | |
| "learning_rate": 4.7025949530016e-05, | |
| "loss": 0.5491, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.20988805970149255, | |
| "grad_norm": 0.4229385366750606, | |
| "learning_rate": 4.6939116103299655e-05, | |
| "loss": 0.5679, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.21222014925373134, | |
| "grad_norm": 0.43710561997855185, | |
| "learning_rate": 4.685112560942699e-05, | |
| "loss": 0.5661, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.21455223880597016, | |
| "grad_norm": 0.5250968554881381, | |
| "learning_rate": 4.676198328582288e-05, | |
| "loss": 0.5692, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.21688432835820895, | |
| "grad_norm": 0.3953624442644497, | |
| "learning_rate": 4.6671694438472154e-05, | |
| "loss": 0.5807, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.21921641791044777, | |
| "grad_norm": 0.40181633683171075, | |
| "learning_rate": 4.6580264441603724e-05, | |
| "loss": 0.5676, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.22154850746268656, | |
| "grad_norm": 0.44021613584779223, | |
| "learning_rate": 4.648769873737071e-05, | |
| "loss": 0.5733, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.40102131427870424, | |
| "learning_rate": 4.6394002835526535e-05, | |
| "loss": 0.5731, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.22621268656716417, | |
| "grad_norm": 0.4266724170549558, | |
| "learning_rate": 4.6299182313096916e-05, | |
| "loss": 0.5909, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.228544776119403, | |
| "grad_norm": 0.4125686640546716, | |
| "learning_rate": 4.6203242814047946e-05, | |
| "loss": 0.5255, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.23087686567164178, | |
| "grad_norm": 0.4641709249366716, | |
| "learning_rate": 4.610619004895017e-05, | |
| "loss": 0.5711, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.2332089552238806, | |
| "grad_norm": 0.3968357488226857, | |
| "learning_rate": 4.6008029794638596e-05, | |
| "loss": 0.5452, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2355410447761194, | |
| "grad_norm": 0.403217768357308, | |
| "learning_rate": 4.590876789386893e-05, | |
| "loss": 0.551, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.23787313432835822, | |
| "grad_norm": 0.4010193919066992, | |
| "learning_rate": 4.580841025496974e-05, | |
| "loss": 0.5595, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.240205223880597, | |
| "grad_norm": 0.41662390187739595, | |
| "learning_rate": 4.570696285149084e-05, | |
| "loss": 0.563, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.36311719630385086, | |
| "learning_rate": 4.560443172184763e-05, | |
| "loss": 0.5652, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.24486940298507462, | |
| "grad_norm": 0.41407920938729176, | |
| "learning_rate": 4.55008229689618e-05, | |
| "loss": 0.5503, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.24720149253731344, | |
| "grad_norm": 0.4405772717510258, | |
| "learning_rate": 4.539614275989793e-05, | |
| "loss": 0.5569, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.24953358208955223, | |
| "grad_norm": 0.4201353925492403, | |
| "learning_rate": 4.529039732549653e-05, | |
| "loss": 0.5584, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.251865671641791, | |
| "grad_norm": 0.3945439648388656, | |
| "learning_rate": 4.5183592960003104e-05, | |
| "loss": 0.5454, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.25419776119402987, | |
| "grad_norm": 0.3904200336576666, | |
| "learning_rate": 4.507573602069351e-05, | |
| "loss": 0.559, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.25652985074626866, | |
| "grad_norm": 0.46072885500157207, | |
| "learning_rate": 4.496683292749555e-05, | |
| "loss": 0.5536, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.25886194029850745, | |
| "grad_norm": 0.42578053791450554, | |
| "learning_rate": 4.485689016260686e-05, | |
| "loss": 0.5469, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.37965084505723456, | |
| "learning_rate": 4.4745914270109055e-05, | |
| "loss": 0.5719, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2635261194029851, | |
| "grad_norm": 0.3871506155938371, | |
| "learning_rate": 4.463391185557822e-05, | |
| "loss": 0.5621, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.2658582089552239, | |
| "grad_norm": 0.34520167862600565, | |
| "learning_rate": 4.4520889585691705e-05, | |
| "loss": 0.5674, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2681902985074627, | |
| "grad_norm": 0.3710748999904097, | |
| "learning_rate": 4.440685418783135e-05, | |
| "loss": 0.546, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.27052238805970147, | |
| "grad_norm": 0.3933601502845124, | |
| "learning_rate": 4.429181244968301e-05, | |
| "loss": 0.5513, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.2728544776119403, | |
| "grad_norm": 0.4185563085937644, | |
| "learning_rate": 4.417577121883256e-05, | |
| "loss": 0.5516, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2751865671641791, | |
| "grad_norm": 0.4498108663647128, | |
| "learning_rate": 4.4058737402358295e-05, | |
| "loss": 0.5352, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.2775186567164179, | |
| "grad_norm": 0.4227714398109878, | |
| "learning_rate": 4.394071796641983e-05, | |
| "loss": 0.555, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.5187752318511273, | |
| "learning_rate": 4.38217199358434e-05, | |
| "loss": 0.5638, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.28218283582089554, | |
| "grad_norm": 0.4010448762418649, | |
| "learning_rate": 4.3701750393703786e-05, | |
| "loss": 0.5313, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.28451492537313433, | |
| "grad_norm": 0.38989169644191773, | |
| "learning_rate": 4.3580816480902656e-05, | |
| "loss": 0.5557, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2868470149253731, | |
| "grad_norm": 0.3551587559272834, | |
| "learning_rate": 4.345892539574359e-05, | |
| "loss": 0.5629, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.2891791044776119, | |
| "grad_norm": 0.3752271018747494, | |
| "learning_rate": 4.3336084393503545e-05, | |
| "loss": 0.5395, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.29151119402985076, | |
| "grad_norm": 0.3805402923746079, | |
| "learning_rate": 4.3212300786001045e-05, | |
| "loss": 0.5424, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.29384328358208955, | |
| "grad_norm": 0.3872910865097459, | |
| "learning_rate": 4.308758194116094e-05, | |
| "loss": 0.5652, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.29617537313432835, | |
| "grad_norm": 0.3596225684825542, | |
| "learning_rate": 4.296193528257586e-05, | |
| "loss": 0.569, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.3602254845503326, | |
| "learning_rate": 4.283536828906436e-05, | |
| "loss": 0.5612, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.300839552238806, | |
| "grad_norm": 0.36726034960137754, | |
| "learning_rate": 4.270788849422572e-05, | |
| "loss": 0.5456, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.3031716417910448, | |
| "grad_norm": 0.42290471610215613, | |
| "learning_rate": 4.2579503485991567e-05, | |
| "loss": 0.554, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.30550373134328357, | |
| "grad_norm": 0.3619033441543854, | |
| "learning_rate": 4.245022090617418e-05, | |
| "loss": 0.5575, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.30783582089552236, | |
| "grad_norm": 0.37187893768522473, | |
| "learning_rate": 4.2320048450011684e-05, | |
| "loss": 0.5567, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.3101679104477612, | |
| "grad_norm": 0.3536084758242916, | |
| "learning_rate": 4.218899386570994e-05, | |
| "loss": 0.5407, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.36137231978307244, | |
| "learning_rate": 4.205706495398143e-05, | |
| "loss": 0.5629, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3148320895522388, | |
| "grad_norm": 0.3929037572466559, | |
| "learning_rate": 4.192426956758085e-05, | |
| "loss": 0.533, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.3409013299580602, | |
| "learning_rate": 4.179061561083777e-05, | |
| "loss": 0.5423, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.31949626865671643, | |
| "grad_norm": 0.3875302006330962, | |
| "learning_rate": 4.165611103918612e-05, | |
| "loss": 0.5624, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.3218283582089552, | |
| "grad_norm": 0.4117426045707057, | |
| "learning_rate": 4.1520763858690644e-05, | |
| "loss": 0.5422, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.324160447761194, | |
| "grad_norm": 0.3822435014923177, | |
| "learning_rate": 4.138458212557038e-05, | |
| "loss": 0.5597, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.32649253731343286, | |
| "grad_norm": 0.40859751039743775, | |
| "learning_rate": 4.124757394571914e-05, | |
| "loss": 0.5375, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.32882462686567165, | |
| "grad_norm": 0.35376270605093485, | |
| "learning_rate": 4.110974747422299e-05, | |
| "loss": 0.5441, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.33115671641791045, | |
| "grad_norm": 0.40517007949766226, | |
| "learning_rate": 4.097111091487486e-05, | |
| "loss": 0.5404, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.33348880597014924, | |
| "grad_norm": 0.40133799439007284, | |
| "learning_rate": 4.083167251968625e-05, | |
| "loss": 0.567, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.40796007686855523, | |
| "learning_rate": 4.069144058839605e-05, | |
| "loss": 0.5536, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3381529850746269, | |
| "grad_norm": 0.3586891053467589, | |
| "learning_rate": 4.055042346797643e-05, | |
| "loss": 0.5491, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.34048507462686567, | |
| "grad_norm": 0.38971100356762045, | |
| "learning_rate": 4.040862955213615e-05, | |
| "loss": 0.5595, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.34281716417910446, | |
| "grad_norm": 0.3663608610076594, | |
| "learning_rate": 4.026606728082082e-05, | |
| "loss": 0.5483, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.3451492537313433, | |
| "grad_norm": 0.35456132002369006, | |
| "learning_rate": 4.012274513971061e-05, | |
| "loss": 0.5642, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3474813432835821, | |
| "grad_norm": 0.3685315896394436, | |
| "learning_rate": 3.997867165971512e-05, | |
| "loss": 0.5382, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3498134328358209, | |
| "grad_norm": 0.36917897331690136, | |
| "learning_rate": 3.9833855416465624e-05, | |
| "loss": 0.5493, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3521455223880597, | |
| "grad_norm": 0.40099943476775346, | |
| "learning_rate": 3.968830502980459e-05, | |
| "loss": 0.544, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.38203410425672707, | |
| "learning_rate": 3.954202916327264e-05, | |
| "loss": 0.5414, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3568097014925373, | |
| "grad_norm": 0.3649022039094651, | |
| "learning_rate": 3.939503652359287e-05, | |
| "loss": 0.5428, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.3591417910447761, | |
| "grad_norm": 0.39137773057483016, | |
| "learning_rate": 3.924733586015257e-05, | |
| "loss": 0.5394, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3614738805970149, | |
| "grad_norm": 0.35114090355932165, | |
| "learning_rate": 3.9098935964482476e-05, | |
| "loss": 0.5411, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.36380597014925375, | |
| "grad_norm": 0.3473596414558861, | |
| "learning_rate": 3.894984566973346e-05, | |
| "loss": 0.549, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.36613805970149255, | |
| "grad_norm": 0.34411846026118503, | |
| "learning_rate": 3.880007385015075e-05, | |
| "loss": 0.5382, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.36847014925373134, | |
| "grad_norm": 0.3389415203112235, | |
| "learning_rate": 3.864962942054572e-05, | |
| "loss": 0.5639, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.37080223880597013, | |
| "grad_norm": 0.351582240594775, | |
| "learning_rate": 3.849852133576527e-05, | |
| "loss": 0.5304, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.36495268124322017, | |
| "learning_rate": 3.834675859015876e-05, | |
| "loss": 0.5549, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.37546641791044777, | |
| "grad_norm": 0.3896220021553757, | |
| "learning_rate": 3.819435021704274e-05, | |
| "loss": 0.5446, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.37779850746268656, | |
| "grad_norm": 0.45143305097848657, | |
| "learning_rate": 3.804130528816312e-05, | |
| "loss": 0.5457, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.38013059701492535, | |
| "grad_norm": 0.3513699985563104, | |
| "learning_rate": 3.7887632913155355e-05, | |
| "loss": 0.5344, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.3824626865671642, | |
| "grad_norm": 0.35958918118902816, | |
| "learning_rate": 3.77333422390021e-05, | |
| "loss": 0.5267, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.384794776119403, | |
| "grad_norm": 0.3871250664530478, | |
| "learning_rate": 3.75784424494888e-05, | |
| "loss": 0.5364, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.3871268656716418, | |
| "grad_norm": 0.3627174602408319, | |
| "learning_rate": 3.7422942764657054e-05, | |
| "loss": 0.5472, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.3894589552238806, | |
| "grad_norm": 0.37549355388736905, | |
| "learning_rate": 3.726685244025578e-05, | |
| "loss": 0.5503, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.4381992071243207, | |
| "learning_rate": 3.711018076719034e-05, | |
| "loss": 0.5438, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3941231343283582, | |
| "grad_norm": 0.36668027314474627, | |
| "learning_rate": 3.695293707096947e-05, | |
| "loss": 0.5537, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.396455223880597, | |
| "grad_norm": 0.3806449160418856, | |
| "learning_rate": 3.679513071115025e-05, | |
| "loss": 0.5461, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3987873134328358, | |
| "grad_norm": 0.35429888945629073, | |
| "learning_rate": 3.663677108078094e-05, | |
| "loss": 0.551, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.40111940298507465, | |
| "grad_norm": 0.3558912409357651, | |
| "learning_rate": 3.647786760584194e-05, | |
| "loss": 0.5434, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.40345149253731344, | |
| "grad_norm": 0.3527211756527707, | |
| "learning_rate": 3.6318429744684676e-05, | |
| "loss": 0.5545, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.40578358208955223, | |
| "grad_norm": 0.3656847568112223, | |
| "learning_rate": 3.615846698746869e-05, | |
| "loss": 0.5468, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.408115671641791, | |
| "grad_norm": 0.383186699265511, | |
| "learning_rate": 3.599798885559667e-05, | |
| "loss": 0.5589, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.37392311284389557, | |
| "learning_rate": 3.583700490114776e-05, | |
| "loss": 0.5273, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.41277985074626866, | |
| "grad_norm": 0.3635317613741542, | |
| "learning_rate": 3.5675524706309014e-05, | |
| "loss": 0.5273, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.41511194029850745, | |
| "grad_norm": 0.38305068761455124, | |
| "learning_rate": 3.5513557882805e-05, | |
| "loss": 0.5559, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.41744402985074625, | |
| "grad_norm": 0.3870612561938992, | |
| "learning_rate": 3.5351114071325696e-05, | |
| "loss": 0.5422, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.4197761194029851, | |
| "grad_norm": 0.38347742537836593, | |
| "learning_rate": 3.518820294095267e-05, | |
| "loss": 0.5446, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4221082089552239, | |
| "grad_norm": 0.35092427031052703, | |
| "learning_rate": 3.50248341885835e-05, | |
| "loss": 0.5326, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.4244402985074627, | |
| "grad_norm": 0.3561353515285881, | |
| "learning_rate": 3.486101753835468e-05, | |
| "loss": 0.5403, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.42677238805970147, | |
| "grad_norm": 0.3085374698690792, | |
| "learning_rate": 3.469676274106271e-05, | |
| "loss": 0.5315, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.33075051926007265, | |
| "learning_rate": 3.453207957358377e-05, | |
| "loss": 0.5337, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4314365671641791, | |
| "grad_norm": 0.3515295495723763, | |
| "learning_rate": 3.436697783829178e-05, | |
| "loss": 0.5304, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.4337686567164179, | |
| "grad_norm": 0.3132788673571813, | |
| "learning_rate": 3.420146736247487e-05, | |
| "loss": 0.5161, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.4361007462686567, | |
| "grad_norm": 0.3739603265493346, | |
| "learning_rate": 3.4035557997750506e-05, | |
| "loss": 0.5497, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.43843283582089554, | |
| "grad_norm": 0.34545120023484394, | |
| "learning_rate": 3.386925961947906e-05, | |
| "loss": 0.5439, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.44076492537313433, | |
| "grad_norm": 0.3757040378506225, | |
| "learning_rate": 3.370258212617602e-05, | |
| "loss": 0.5588, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.4430970149253731, | |
| "grad_norm": 0.3402839804931963, | |
| "learning_rate": 3.353553543892277e-05, | |
| "loss": 0.5335, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4454291044776119, | |
| "grad_norm": 0.33230952626406335, | |
| "learning_rate": 3.336812950077611e-05, | |
| "loss": 0.5388, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.35261211656761277, | |
| "learning_rate": 3.320037427617639e-05, | |
| "loss": 0.5375, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.45009328358208955, | |
| "grad_norm": 0.3599414963399435, | |
| "learning_rate": 3.30322797503544e-05, | |
| "loss": 0.547, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.45242537313432835, | |
| "grad_norm": 0.35470640352572197, | |
| "learning_rate": 3.2863855928737026e-05, | |
| "loss": 0.5199, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.45475746268656714, | |
| "grad_norm": 0.3452568269058605, | |
| "learning_rate": 3.2695112836351703e-05, | |
| "loss": 0.5382, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.457089552238806, | |
| "grad_norm": 0.3362066514765104, | |
| "learning_rate": 3.252606051722972e-05, | |
| "loss": 0.5563, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.4594216417910448, | |
| "grad_norm": 0.34701565335688916, | |
| "learning_rate": 3.235670903380832e-05, | |
| "loss": 0.532, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.46175373134328357, | |
| "grad_norm": 0.3518883926304785, | |
| "learning_rate": 3.218706846633183e-05, | |
| "loss": 0.5305, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.46408582089552236, | |
| "grad_norm": 0.33370892763193366, | |
| "learning_rate": 3.201714891225156e-05, | |
| "loss": 0.5288, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.3265309643482067, | |
| "learning_rate": 3.1846960485624886e-05, | |
| "loss": 0.5261, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.46875, | |
| "grad_norm": 0.3428336788771024, | |
| "learning_rate": 3.1676513316513156e-05, | |
| "loss": 0.546, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4710820895522388, | |
| "grad_norm": 0.32497034232497185, | |
| "learning_rate": 3.150581755037877e-05, | |
| "loss": 0.5327, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.47341417910447764, | |
| "grad_norm": 0.32932560895962015, | |
| "learning_rate": 3.133488334748125e-05, | |
| "loss": 0.542, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.47574626865671643, | |
| "grad_norm": 0.34317416669371287, | |
| "learning_rate": 3.1163720882272516e-05, | |
| "loss": 0.5257, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4780783582089552, | |
| "grad_norm": 0.33195906720998813, | |
| "learning_rate": 3.0992340342791246e-05, | |
| "loss": 0.5362, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.480410447761194, | |
| "grad_norm": 0.3234621303310154, | |
| "learning_rate": 3.08207519300565e-05, | |
| "loss": 0.5298, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.48274253731343286, | |
| "grad_norm": 0.3439747189931589, | |
| "learning_rate": 3.064896585746045e-05, | |
| "loss": 0.5216, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.3225618210162447, | |
| "learning_rate": 3.047699235016056e-05, | |
| "loss": 0.5271, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.48740671641791045, | |
| "grad_norm": 0.4481757291622055, | |
| "learning_rate": 3.030484164447085e-05, | |
| "loss": 0.5185, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.48973880597014924, | |
| "grad_norm": 0.34946082293471153, | |
| "learning_rate": 3.0132523987252658e-05, | |
| "loss": 0.5363, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4920708955223881, | |
| "grad_norm": 0.3337244432348151, | |
| "learning_rate": 2.9960049635304755e-05, | |
| "loss": 0.5128, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.4944029850746269, | |
| "grad_norm": 0.33744807849843744, | |
| "learning_rate": 2.9787428854752736e-05, | |
| "loss": 0.5225, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.49673507462686567, | |
| "grad_norm": 0.3589752854354772, | |
| "learning_rate": 2.961467192043807e-05, | |
| "loss": 0.5323, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.49906716417910446, | |
| "grad_norm": 0.3680177579706306, | |
| "learning_rate": 2.9441789115306402e-05, | |
| "loss": 0.5453, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5013992537313433, | |
| "grad_norm": 0.39008642388483844, | |
| "learning_rate": 2.926879072979558e-05, | |
| "loss": 0.5363, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.35182913635444735, | |
| "learning_rate": 2.9095687061223058e-05, | |
| "loss": 0.5345, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5060634328358209, | |
| "grad_norm": 0.3687130437242019, | |
| "learning_rate": 2.8922488413173053e-05, | |
| "loss": 0.5326, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5083955223880597, | |
| "grad_norm": 0.3896087742442836, | |
| "learning_rate": 2.874920509488319e-05, | |
| "loss": 0.5439, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5107276119402985, | |
| "grad_norm": 0.35004354808221283, | |
| "learning_rate": 2.8575847420630887e-05, | |
| "loss": 0.5215, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.5130597014925373, | |
| "grad_norm": 0.34400548959145894, | |
| "learning_rate": 2.8402425709119435e-05, | |
| "loss": 0.5449, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5153917910447762, | |
| "grad_norm": 0.34279814513241086, | |
| "learning_rate": 2.8228950282863776e-05, | |
| "loss": 0.5352, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.5177238805970149, | |
| "grad_norm": 0.33066562290386337, | |
| "learning_rate": 2.8055431467576106e-05, | |
| "loss": 0.5277, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5200559701492538, | |
| "grad_norm": 0.36541005581441127, | |
| "learning_rate": 2.788187959155124e-05, | |
| "loss": 0.5208, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.31819346746550703, | |
| "learning_rate": 2.7708304985051868e-05, | |
| "loss": 0.5477, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.5247201492537313, | |
| "grad_norm": 0.3463253117162502, | |
| "learning_rate": 2.7534717979693647e-05, | |
| "loss": 0.5243, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.5270522388059702, | |
| "grad_norm": 0.32466560863468996, | |
| "learning_rate": 2.7361128907830253e-05, | |
| "loss": 0.5266, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.5293843283582089, | |
| "grad_norm": 0.32661440076143755, | |
| "learning_rate": 2.7187548101938353e-05, | |
| "loss": 0.5166, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.5317164179104478, | |
| "grad_norm": 0.35797548941766444, | |
| "learning_rate": 2.7013985894002623e-05, | |
| "loss": 0.5153, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.5340485074626866, | |
| "grad_norm": 0.3747723691950687, | |
| "learning_rate": 2.6840452614900726e-05, | |
| "loss": 0.54, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.5363805970149254, | |
| "grad_norm": 0.31227276183327857, | |
| "learning_rate": 2.6666958593788405e-05, | |
| "loss": 0.5214, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.5387126865671642, | |
| "grad_norm": 0.32028332057260017, | |
| "learning_rate": 2.649351415748466e-05, | |
| "loss": 0.5385, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.31721311054218543, | |
| "learning_rate": 2.6320129629857093e-05, | |
| "loss": 0.5316, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.5433768656716418, | |
| "grad_norm": 0.32367205608069305, | |
| "learning_rate": 2.6146815331207358e-05, | |
| "loss": 0.5105, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.5457089552238806, | |
| "grad_norm": 0.32785600244724733, | |
| "learning_rate": 2.597358157765692e-05, | |
| "loss": 0.5349, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.5480410447761194, | |
| "grad_norm": 0.3418244281371759, | |
| "learning_rate": 2.5800438680532974e-05, | |
| "loss": 0.5193, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.5503731343283582, | |
| "grad_norm": 0.31552567017064354, | |
| "learning_rate": 2.56273969457547e-05, | |
| "loss": 0.5206, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.5527052238805971, | |
| "grad_norm": 0.3397428895495846, | |
| "learning_rate": 2.545446667321984e-05, | |
| "loss": 0.5379, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.5550373134328358, | |
| "grad_norm": 0.3286049050973758, | |
| "learning_rate": 2.528165815619162e-05, | |
| "loss": 0.5222, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.5573694029850746, | |
| "grad_norm": 0.3331787192546376, | |
| "learning_rate": 2.5108981680686035e-05, | |
| "loss": 0.5255, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.3311603874036119, | |
| "learning_rate": 2.4936447524859625e-05, | |
| "loss": 0.5168, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.5620335820895522, | |
| "grad_norm": 0.3303918399172089, | |
| "learning_rate": 2.4764065958397715e-05, | |
| "loss": 0.5174, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.5643656716417911, | |
| "grad_norm": 0.33363993303401823, | |
| "learning_rate": 2.459184724190308e-05, | |
| "loss": 0.5097, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5666977611940298, | |
| "grad_norm": 0.3338211217947914, | |
| "learning_rate": 2.441980162628527e-05, | |
| "loss": 0.5226, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5690298507462687, | |
| "grad_norm": 0.3505424652287924, | |
| "learning_rate": 2.4247939352150386e-05, | |
| "loss": 0.5325, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5713619402985075, | |
| "grad_norm": 0.34944994698597065, | |
| "learning_rate": 2.4076270649191573e-05, | |
| "loss": 0.5409, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5736940298507462, | |
| "grad_norm": 0.32340035702891917, | |
| "learning_rate": 2.390480573558012e-05, | |
| "loss": 0.5273, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5760261194029851, | |
| "grad_norm": 0.31938992678180467, | |
| "learning_rate": 2.3733554817357246e-05, | |
| "loss": 0.5244, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.3166139465080639, | |
| "learning_rate": 2.3562528087826573e-05, | |
| "loss": 0.5131, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5806902985074627, | |
| "grad_norm": 0.3487335569590694, | |
| "learning_rate": 2.339173572694746e-05, | |
| "loss": 0.5192, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5830223880597015, | |
| "grad_norm": 0.30683594258607133, | |
| "learning_rate": 2.3221187900729003e-05, | |
| "loss": 0.5262, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5853544776119403, | |
| "grad_norm": 0.3575092291210576, | |
| "learning_rate": 2.3050894760624982e-05, | |
| "loss": 0.5305, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5876865671641791, | |
| "grad_norm": 0.34137275146006896, | |
| "learning_rate": 2.2880866442929544e-05, | |
| "loss": 0.5171, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.590018656716418, | |
| "grad_norm": 0.3314783873014681, | |
| "learning_rate": 2.271111306817396e-05, | |
| "loss": 0.5362, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5923507462686567, | |
| "grad_norm": 0.3160634415556086, | |
| "learning_rate": 2.254164474052416e-05, | |
| "loss": 0.5177, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5946828358208955, | |
| "grad_norm": 0.3014157107337593, | |
| "learning_rate": 2.237247154717932e-05, | |
| "loss": 0.5112, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.3291310606063492, | |
| "learning_rate": 2.2203603557771447e-05, | |
| "loss": 0.5116, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5993470149253731, | |
| "grad_norm": 0.32560419347412506, | |
| "learning_rate": 2.2035050823766008e-05, | |
| "loss": 0.5212, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.601679104477612, | |
| "grad_norm": 0.3472024703269534, | |
| "learning_rate": 2.186682337786365e-05, | |
| "loss": 0.526, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6040111940298507, | |
| "grad_norm": 0.30442706575237044, | |
| "learning_rate": 2.1698931233403013e-05, | |
| "loss": 0.5089, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.6063432835820896, | |
| "grad_norm": 0.33335137810186144, | |
| "learning_rate": 2.153138438376473e-05, | |
| "loss": 0.5131, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6086753731343284, | |
| "grad_norm": 0.3210060843108937, | |
| "learning_rate": 2.136419280177655e-05, | |
| "loss": 0.5089, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.6110074626865671, | |
| "grad_norm": 0.3177567286650887, | |
| "learning_rate": 2.119736643911979e-05, | |
| "loss": 0.5301, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.613339552238806, | |
| "grad_norm": 0.2990196515604672, | |
| "learning_rate": 2.1030915225736947e-05, | |
| "loss": 0.52, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.3276526936140588, | |
| "learning_rate": 2.0864849069240645e-05, | |
| "loss": 0.5268, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.6180037313432836, | |
| "grad_norm": 0.2971098930268874, | |
| "learning_rate": 2.0699177854323902e-05, | |
| "loss": 0.5203, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.6203358208955224, | |
| "grad_norm": 0.30828873420690545, | |
| "learning_rate": 2.0533911442171805e-05, | |
| "loss": 0.5181, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.6226679104477612, | |
| "grad_norm": 0.32910014680345584, | |
| "learning_rate": 2.036905966987449e-05, | |
| "loss": 0.5239, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.3169543755211694, | |
| "learning_rate": 2.0204632349841667e-05, | |
| "loss": 0.509, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.6273320895522388, | |
| "grad_norm": 0.2992926255801442, | |
| "learning_rate": 2.0040639269218532e-05, | |
| "loss": 0.5133, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.6296641791044776, | |
| "grad_norm": 0.3356585986496801, | |
| "learning_rate": 1.9877090189303182e-05, | |
| "loss": 0.534, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.6319962686567164, | |
| "grad_norm": 0.3119369573784347, | |
| "learning_rate": 1.9713994844965657e-05, | |
| "loss": 0.512, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.3135149938329128, | |
| "learning_rate": 1.9551362944068462e-05, | |
| "loss": 0.5212, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.636660447761194, | |
| "grad_norm": 0.3294648855188193, | |
| "learning_rate": 1.938920416688874e-05, | |
| "loss": 0.5193, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.6389925373134329, | |
| "grad_norm": 0.32539933928427905, | |
| "learning_rate": 1.922752816554204e-05, | |
| "loss": 0.5125, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.6413246268656716, | |
| "grad_norm": 0.31904607968778304, | |
| "learning_rate": 1.9066344563407856e-05, | |
| "loss": 0.5208, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.6436567164179104, | |
| "grad_norm": 0.3111959523904429, | |
| "learning_rate": 1.890566295455678e-05, | |
| "loss": 0.4994, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.6459888059701493, | |
| "grad_norm": 0.3275382050930323, | |
| "learning_rate": 1.874549290317946e-05, | |
| "loss": 0.5024, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.648320895522388, | |
| "grad_norm": 0.31189886724164534, | |
| "learning_rate": 1.858584394301728e-05, | |
| "loss": 0.5262, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.6506529850746269, | |
| "grad_norm": 0.342658228039591, | |
| "learning_rate": 1.8426725576794918e-05, | |
| "loss": 0.5429, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.32330617361285674, | |
| "learning_rate": 1.8268147275654707e-05, | |
| "loss": 0.5189, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6553171641791045, | |
| "grad_norm": 0.29687021132266317, | |
| "learning_rate": 1.8110118478592915e-05, | |
| "loss": 0.5065, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.6576492537313433, | |
| "grad_norm": 0.30079266830219414, | |
| "learning_rate": 1.7952648591897858e-05, | |
| "loss": 0.5206, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.659981343283582, | |
| "grad_norm": 0.3096917346984741, | |
| "learning_rate": 1.7795746988590027e-05, | |
| "loss": 0.5098, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.6623134328358209, | |
| "grad_norm": 0.3241152516622816, | |
| "learning_rate": 1.7639423007864252e-05, | |
| "loss": 0.5017, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.6646455223880597, | |
| "grad_norm": 0.30514910474437695, | |
| "learning_rate": 1.7483685954533692e-05, | |
| "loss": 0.5108, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.6669776119402985, | |
| "grad_norm": 0.32357387534514104, | |
| "learning_rate": 1.7328545098476106e-05, | |
| "loss": 0.5075, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.6693097014925373, | |
| "grad_norm": 0.3125919347018114, | |
| "learning_rate": 1.717400967408196e-05, | |
| "loss": 0.5129, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.3017747173268348, | |
| "learning_rate": 1.702008887970491e-05, | |
| "loss": 0.51, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.6739738805970149, | |
| "grad_norm": 0.3038952541237383, | |
| "learning_rate": 1.6866791877114165e-05, | |
| "loss": 0.5165, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.6763059701492538, | |
| "grad_norm": 0.301385951224946, | |
| "learning_rate": 1.671412779094926e-05, | |
| "loss": 0.5137, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6786380597014925, | |
| "grad_norm": 0.29572393831965715, | |
| "learning_rate": 1.656210570817685e-05, | |
| "loss": 0.5145, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6809701492537313, | |
| "grad_norm": 0.3342752067990135, | |
| "learning_rate": 1.6410734677549872e-05, | |
| "loss": 0.508, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6833022388059702, | |
| "grad_norm": 0.3453403341497158, | |
| "learning_rate": 1.6260023709068932e-05, | |
| "loss": 0.5146, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6856343283582089, | |
| "grad_norm": 0.3171036782377084, | |
| "learning_rate": 1.6109981773446036e-05, | |
| "loss": 0.5102, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6879664179104478, | |
| "grad_norm": 0.29917424397521875, | |
| "learning_rate": 1.5960617801570555e-05, | |
| "loss": 0.5086, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.30931917309200585, | |
| "learning_rate": 1.58119406839777e-05, | |
| "loss": 0.5131, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6926305970149254, | |
| "grad_norm": 0.29550887254843045, | |
| "learning_rate": 1.566395927031932e-05, | |
| "loss": 0.5029, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6949626865671642, | |
| "grad_norm": 0.30550523923571943, | |
| "learning_rate": 1.5516682368837133e-05, | |
| "loss": 0.5096, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6972947761194029, | |
| "grad_norm": 0.31911694408903835, | |
| "learning_rate": 1.5370118745838453e-05, | |
| "loss": 0.5287, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6996268656716418, | |
| "grad_norm": 0.3050301639561124, | |
| "learning_rate": 1.5224277125174388e-05, | |
| "loss": 0.5162, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7019589552238806, | |
| "grad_norm": 0.3041739601013223, | |
| "learning_rate": 1.5079166187720561e-05, | |
| "loss": 0.5148, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.7042910447761194, | |
| "grad_norm": 0.32709042336575056, | |
| "learning_rate": 1.4934794570860416e-05, | |
| "loss": 0.5314, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.7066231343283582, | |
| "grad_norm": 0.3271425613420151, | |
| "learning_rate": 1.4791170867971132e-05, | |
| "loss": 0.5096, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.31173665895625186, | |
| "learning_rate": 1.464830362791204e-05, | |
| "loss": 0.5089, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.7112873134328358, | |
| "grad_norm": 0.31280945225700124, | |
| "learning_rate": 1.450620135451585e-05, | |
| "loss": 0.5141, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.7136194029850746, | |
| "grad_norm": 0.3029633492207749, | |
| "learning_rate": 1.4364872506082425e-05, | |
| "loss": 0.5206, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.7159514925373134, | |
| "grad_norm": 0.3120512711303495, | |
| "learning_rate": 1.4224325494875385e-05, | |
| "loss": 0.5288, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.7182835820895522, | |
| "grad_norm": 0.31204261621369056, | |
| "learning_rate": 1.4084568686621314e-05, | |
| "loss": 0.5117, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.7206156716417911, | |
| "grad_norm": 0.30972458717271706, | |
| "learning_rate": 1.3945610400011851e-05, | |
| "loss": 0.5234, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.7229477611940298, | |
| "grad_norm": 0.3038198863306939, | |
| "learning_rate": 1.3807458906208546e-05, | |
| "loss": 0.5309, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.7252798507462687, | |
| "grad_norm": 0.30351276635634267, | |
| "learning_rate": 1.3670122428350521e-05, | |
| "loss": 0.5096, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.3009416107835106, | |
| "learning_rate": 1.3533609141065008e-05, | |
| "loss": 0.5037, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.7299440298507462, | |
| "grad_norm": 0.3115101175293925, | |
| "learning_rate": 1.3397927169980773e-05, | |
| "loss": 0.5092, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.7322761194029851, | |
| "grad_norm": 0.3050023046127322, | |
| "learning_rate": 1.326308459124447e-05, | |
| "loss": 0.5093, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.7346082089552238, | |
| "grad_norm": 0.31586860196893074, | |
| "learning_rate": 1.3129089431039931e-05, | |
| "loss": 0.5122, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.7369402985074627, | |
| "grad_norm": 0.31512946760011284, | |
| "learning_rate": 1.299594966511038e-05, | |
| "loss": 0.5238, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.7392723880597015, | |
| "grad_norm": 0.31489040782644456, | |
| "learning_rate": 1.2863673218283783e-05, | |
| "loss": 0.5091, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.7416044776119403, | |
| "grad_norm": 0.3190396380815969, | |
| "learning_rate": 1.2732267964001033e-05, | |
| "loss": 0.503, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.7439365671641791, | |
| "grad_norm": 0.31926216711658545, | |
| "learning_rate": 1.26017417238474e-05, | |
| "loss": 0.5138, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 0.30703349595902396, | |
| "learning_rate": 1.2472102267086904e-05, | |
| "loss": 0.5111, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.7486007462686567, | |
| "grad_norm": 0.3303603139088595, | |
| "learning_rate": 1.2343357310199925e-05, | |
| "loss": 0.5283, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.7509328358208955, | |
| "grad_norm": 0.32035527267812985, | |
| "learning_rate": 1.2215514516423813e-05, | |
| "loss": 0.5248, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.7532649253731343, | |
| "grad_norm": 0.2960384532117532, | |
| "learning_rate": 1.2088581495296852e-05, | |
| "loss": 0.5056, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.7555970149253731, | |
| "grad_norm": 0.2951510172831845, | |
| "learning_rate": 1.1962565802205255e-05, | |
| "loss": 0.505, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.757929104477612, | |
| "grad_norm": 0.29152330525669484, | |
| "learning_rate": 1.1837474937933464e-05, | |
| "loss": 0.5124, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.7602611940298507, | |
| "grad_norm": 0.29251949876960087, | |
| "learning_rate": 1.1713316348217673e-05, | |
| "loss": 0.5077, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.7625932835820896, | |
| "grad_norm": 0.29528274341042277, | |
| "learning_rate": 1.1590097423302684e-05, | |
| "loss": 0.4907, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 0.30185100341646487, | |
| "learning_rate": 1.1467825497501954e-05, | |
| "loss": 0.4986, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.7672574626865671, | |
| "grad_norm": 0.32878665265624396, | |
| "learning_rate": 1.1346507848761077e-05, | |
| "loss": 0.5096, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.769589552238806, | |
| "grad_norm": 0.33863639658890127, | |
| "learning_rate": 1.1226151698224597e-05, | |
| "loss": 0.5211, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.7719216417910447, | |
| "grad_norm": 0.2962877335836816, | |
| "learning_rate": 1.1106764209806127e-05, | |
| "loss": 0.5125, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.7742537313432836, | |
| "grad_norm": 0.3158878505358477, | |
| "learning_rate": 1.0988352489762006e-05, | |
| "loss": 0.5259, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.7765858208955224, | |
| "grad_norm": 0.2881382914118729, | |
| "learning_rate": 1.0870923586268245e-05, | |
| "loss": 0.5075, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.7789179104477612, | |
| "grad_norm": 0.31719158675261994, | |
| "learning_rate": 1.0754484489001085e-05, | |
| "loss": 0.5112, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.78125, | |
| "grad_norm": 0.30964447847533355, | |
| "learning_rate": 1.0639042128720847e-05, | |
| "loss": 0.517, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.3001723609524999, | |
| "learning_rate": 1.052460337685951e-05, | |
| "loss": 0.4999, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.7859141791044776, | |
| "grad_norm": 0.29546723704143385, | |
| "learning_rate": 1.0411175045111602e-05, | |
| "loss": 0.4981, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.7882462686567164, | |
| "grad_norm": 0.3075763476131655, | |
| "learning_rate": 1.0298763885028839e-05, | |
| "loss": 0.5112, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.7905783582089553, | |
| "grad_norm": 0.29591361609011235, | |
| "learning_rate": 1.018737658761817e-05, | |
| "loss": 0.5044, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.792910447761194, | |
| "grad_norm": 0.2933462196983581, | |
| "learning_rate": 1.0077019782943584e-05, | |
| "loss": 0.5151, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7952425373134329, | |
| "grad_norm": 0.3018435924556085, | |
| "learning_rate": 9.967700039731427e-06, | |
| "loss": 0.5062, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7975746268656716, | |
| "grad_norm": 0.28856623086950894, | |
| "learning_rate": 9.859423864979441e-06, | |
| "loss": 0.491, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7999067164179104, | |
| "grad_norm": 0.29990971775965447, | |
| "learning_rate": 9.752197703569422e-06, | |
| "loss": 0.5248, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.3031154798133516, | |
| "learning_rate": 9.646027937883622e-06, | |
| "loss": 0.5099, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.804570895522388, | |
| "grad_norm": 0.29656753451458473, | |
| "learning_rate": 9.54092088742485e-06, | |
| "loss": 0.5278, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.8069029850746269, | |
| "grad_norm": 0.3087030815128696, | |
| "learning_rate": 9.436882808440334e-06, | |
| "loss": 0.5091, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.8092350746268657, | |
| "grad_norm": 0.30044194171207367, | |
| "learning_rate": 9.333919893549294e-06, | |
| "loss": 0.5049, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.8115671641791045, | |
| "grad_norm": 0.30610765770429715, | |
| "learning_rate": 9.232038271374377e-06, | |
| "loss": 0.4965, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.8138992537313433, | |
| "grad_norm": 0.3174108731530412, | |
| "learning_rate": 9.131244006176846e-06, | |
| "loss": 0.5118, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.816231343283582, | |
| "grad_norm": 0.30939560544024536, | |
| "learning_rate": 9.031543097495638e-06, | |
| "loss": 0.4984, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.8185634328358209, | |
| "grad_norm": 0.29094442523658076, | |
| "learning_rate": 8.93294147979023e-06, | |
| "loss": 0.4945, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.2928095193861828, | |
| "learning_rate": 8.835445022087426e-06, | |
| "loss": 0.5115, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.8232276119402985, | |
| "grad_norm": 0.28058171311068625, | |
| "learning_rate": 8.739059527631999e-06, | |
| "loss": 0.5119, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.8255597014925373, | |
| "grad_norm": 0.3094890046076664, | |
| "learning_rate": 8.6437907335413e-06, | |
| "loss": 0.5141, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.8278917910447762, | |
| "grad_norm": 0.2810800661440279, | |
| "learning_rate": 8.549644310463717e-06, | |
| "loss": 0.5167, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.8302238805970149, | |
| "grad_norm": 0.29240889207305526, | |
| "learning_rate": 8.456625862241193e-06, | |
| "loss": 0.5135, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.8325559701492538, | |
| "grad_norm": 0.292684734782098, | |
| "learning_rate": 8.364740925575643e-06, | |
| "loss": 0.5125, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.8348880597014925, | |
| "grad_norm": 0.2959795436894085, | |
| "learning_rate": 8.273994969699394e-06, | |
| "loss": 0.5057, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.8372201492537313, | |
| "grad_norm": 0.2954465382619872, | |
| "learning_rate": 8.184393396049675e-06, | |
| "loss": 0.5069, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.3011785107092073, | |
| "learning_rate": 8.095941537947057e-06, | |
| "loss": 0.5176, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.8418843283582089, | |
| "grad_norm": 0.2912325453859299, | |
| "learning_rate": 8.008644660278051e-06, | |
| "loss": 0.4892, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.8442164179104478, | |
| "grad_norm": 0.3048172340443361, | |
| "learning_rate": 7.922507959181673e-06, | |
| "loss": 0.512, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.8465485074626866, | |
| "grad_norm": 0.2930943668283754, | |
| "learning_rate": 7.837536561740225e-06, | |
| "loss": 0.5033, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.8488805970149254, | |
| "grad_norm": 0.29936839076020144, | |
| "learning_rate": 7.753735525674059e-06, | |
| "loss": 0.5104, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.8512126865671642, | |
| "grad_norm": 0.2951288880142476, | |
| "learning_rate": 7.671109839040547e-06, | |
| "loss": 0.5126, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.8535447761194029, | |
| "grad_norm": 0.2790075544816064, | |
| "learning_rate": 7.58966441993719e-06, | |
| "loss": 0.494, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.8558768656716418, | |
| "grad_norm": 0.29641024588363307, | |
| "learning_rate": 7.509404116208868e-06, | |
| "loss": 0.5077, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.3230633177700583, | |
| "learning_rate": 7.430333705159286e-06, | |
| "loss": 0.5401, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.8605410447761194, | |
| "grad_norm": 0.302196461115966, | |
| "learning_rate": 7.352457893266627e-06, | |
| "loss": 0.531, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.8628731343283582, | |
| "grad_norm": 0.29794221249791447, | |
| "learning_rate": 7.275781315903374e-06, | |
| "loss": 0.5137, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.8652052238805971, | |
| "grad_norm": 0.29632779382150637, | |
| "learning_rate": 7.20030853706046e-06, | |
| "loss": 0.5108, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.8675373134328358, | |
| "grad_norm": 0.29830642010980857, | |
| "learning_rate": 7.126044049075548e-06, | |
| "loss": 0.5163, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.8698694029850746, | |
| "grad_norm": 0.3124152954120725, | |
| "learning_rate": 7.052992272365681e-06, | |
| "loss": 0.5073, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.8722014925373134, | |
| "grad_norm": 0.30181418504381746, | |
| "learning_rate": 6.9811575551641224e-06, | |
| "loss": 0.4966, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.8745335820895522, | |
| "grad_norm": 0.29846707519728344, | |
| "learning_rate": 6.910544173261588e-06, | |
| "loss": 0.5003, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.2951342743446569, | |
| "learning_rate": 6.8411563297516995e-06, | |
| "loss": 0.4912, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.8791977611940298, | |
| "grad_norm": 0.30200825628656813, | |
| "learning_rate": 6.772998154780832e-06, | |
| "loss": 0.5068, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.8815298507462687, | |
| "grad_norm": 0.30227641385772225, | |
| "learning_rate": 6.706073705302254e-06, | |
| "loss": 0.5067, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.8838619402985075, | |
| "grad_norm": 0.300849841970548, | |
| "learning_rate": 6.6403869648346634e-06, | |
| "loss": 0.5056, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.8861940298507462, | |
| "grad_norm": 0.29748535547416677, | |
| "learning_rate": 6.575941843225068e-06, | |
| "loss": 0.4889, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.8885261194029851, | |
| "grad_norm": 0.3017981544449824, | |
| "learning_rate": 6.5127421764160685e-06, | |
| "loss": 0.4947, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.8908582089552238, | |
| "grad_norm": 0.3026018984518875, | |
| "learning_rate": 6.450791726217538e-06, | |
| "loss": 0.5149, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.8931902985074627, | |
| "grad_norm": 0.3081030135585167, | |
| "learning_rate": 6.390094180082694e-06, | |
| "loss": 0.5079, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.3041553330020546, | |
| "learning_rate": 6.330653150888617e-06, | |
| "loss": 0.509, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.8978544776119403, | |
| "grad_norm": 0.30475667040751553, | |
| "learning_rate": 6.272472176721207e-06, | |
| "loss": 0.5124, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.9001865671641791, | |
| "grad_norm": 0.29416760390485075, | |
| "learning_rate": 6.215554720664598e-06, | |
| "loss": 0.5163, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.902518656716418, | |
| "grad_norm": 0.27884692839413544, | |
| "learning_rate": 6.159904170594982e-06, | |
| "loss": 0.5069, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.9048507462686567, | |
| "grad_norm": 0.30054576904592345, | |
| "learning_rate": 6.105523838979022e-06, | |
| "loss": 0.5212, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.9071828358208955, | |
| "grad_norm": 0.30887707397671693, | |
| "learning_rate": 6.052416962676621e-06, | |
| "loss": 0.5012, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.9095149253731343, | |
| "grad_norm": 0.2901477305872294, | |
| "learning_rate": 6.000586702748301e-06, | |
| "loss": 0.4987, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.9118470149253731, | |
| "grad_norm": 0.3119574760410935, | |
| "learning_rate": 5.950036144267021e-06, | |
| "loss": 0.504, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.27442255971280144, | |
| "learning_rate": 5.900768296134551e-06, | |
| "loss": 0.4997, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.9165111940298507, | |
| "grad_norm": 0.31250294548724994, | |
| "learning_rate": 5.852786090902383e-06, | |
| "loss": 0.5129, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.9188432835820896, | |
| "grad_norm": 0.293909840986113, | |
| "learning_rate": 5.8060923845971825e-06, | |
| "loss": 0.506, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.9211753731343284, | |
| "grad_norm": 0.3001956020740078, | |
| "learning_rate": 5.760689956550763e-06, | |
| "loss": 0.5, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.9235074626865671, | |
| "grad_norm": 0.28174848937085023, | |
| "learning_rate": 5.7165815092346825e-06, | |
| "loss": 0.4912, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.925839552238806, | |
| "grad_norm": 0.3020312165517983, | |
| "learning_rate": 5.673769668099364e-06, | |
| "loss": 0.5031, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.9281716417910447, | |
| "grad_norm": 0.2943021791326706, | |
| "learning_rate": 5.632256981417845e-06, | |
| "loss": 0.5051, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.9305037313432836, | |
| "grad_norm": 0.2876667152171628, | |
| "learning_rate": 5.59204592013407e-06, | |
| "loss": 0.4933, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.31254382539787895, | |
| "learning_rate": 5.553138877715833e-06, | |
| "loss": 0.5189, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.9351679104477612, | |
| "grad_norm": 0.2927974593197605, | |
| "learning_rate": 5.515538170012309e-06, | |
| "loss": 0.5028, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.2909113245834731, | |
| "learning_rate": 5.479246035116201e-06, | |
| "loss": 0.495, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.9398320895522388, | |
| "grad_norm": 0.3194578057854513, | |
| "learning_rate": 5.444264633230531e-06, | |
| "loss": 0.5072, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.9421641791044776, | |
| "grad_norm": 0.2944404152818133, | |
| "learning_rate": 5.410596046540051e-06, | |
| "loss": 0.4952, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.9444962686567164, | |
| "grad_norm": 0.3115430812347992, | |
| "learning_rate": 5.378242279087314e-06, | |
| "loss": 0.501, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.9468283582089553, | |
| "grad_norm": 0.29847488276777256, | |
| "learning_rate": 5.347205256653387e-06, | |
| "loss": 0.5064, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.949160447761194, | |
| "grad_norm": 0.2890456115548558, | |
| "learning_rate": 5.317486826643219e-06, | |
| "loss": 0.486, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.29211240702780633, | |
| "learning_rate": 5.28908875797568e-06, | |
| "loss": 0.5082, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.9538246268656716, | |
| "grad_norm": 0.3070505504678916, | |
| "learning_rate": 5.262012740978269e-06, | |
| "loss": 0.5097, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.9561567164179104, | |
| "grad_norm": 0.2773411345027328, | |
| "learning_rate": 5.236260387286509e-06, | |
| "loss": 0.4962, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.9584888059701493, | |
| "grad_norm": 0.29799985399558016, | |
| "learning_rate": 5.2118332297480105e-06, | |
| "loss": 0.5128, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.960820895522388, | |
| "grad_norm": 0.287169325155614, | |
| "learning_rate": 5.1887327223312296e-06, | |
| "loss": 0.4866, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.9631529850746269, | |
| "grad_norm": 0.31835555206048355, | |
| "learning_rate": 5.166960240038937e-06, | |
| "loss": 0.504, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.9654850746268657, | |
| "grad_norm": 0.3095841539651552, | |
| "learning_rate": 5.1465170788263595e-06, | |
| "loss": 0.5117, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.9678171641791045, | |
| "grad_norm": 0.3262964021293147, | |
| "learning_rate": 5.1274044555240525e-06, | |
| "loss": 0.502, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.29662543157502314, | |
| "learning_rate": 5.109623507765466e-06, | |
| "loss": 0.5042, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.972481343283582, | |
| "grad_norm": 0.29199443293251653, | |
| "learning_rate": 5.093175293919228e-06, | |
| "loss": 0.5081, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.9748134328358209, | |
| "grad_norm": 0.29856223815679944, | |
| "learning_rate": 5.07806079302615e-06, | |
| "loss": 0.5128, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.9771455223880597, | |
| "grad_norm": 0.2812213514715703, | |
| "learning_rate": 5.064280904740953e-06, | |
| "loss": 0.5097, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.9794776119402985, | |
| "grad_norm": 0.29489687258433694, | |
| "learning_rate": 5.051836449278715e-06, | |
| "loss": 0.4999, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.9818097014925373, | |
| "grad_norm": 0.30265017359808566, | |
| "learning_rate": 5.040728167366057e-06, | |
| "loss": 0.4966, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.9841417910447762, | |
| "grad_norm": 0.2991437683499533, | |
| "learning_rate": 5.030956720197035e-06, | |
| "loss": 0.4991, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.9864738805970149, | |
| "grad_norm": 0.2993425987702054, | |
| "learning_rate": 5.022522689393809e-06, | |
| "loss": 0.5269, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.28083575851271286, | |
| "learning_rate": 5.015426576972003e-06, | |
| "loss": 0.5034, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.9911380597014925, | |
| "grad_norm": 0.2846014329272288, | |
| "learning_rate": 5.009668805310832e-06, | |
| "loss": 0.5001, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.9934701492537313, | |
| "grad_norm": 0.297526890423307, | |
| "learning_rate": 5.005249717127964e-06, | |
| "loss": 0.5124, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.9958022388059702, | |
| "grad_norm": 0.3006228142618389, | |
| "learning_rate": 5.002169575459111e-06, | |
| "loss": 0.5046, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.9981343283582089, | |
| "grad_norm": 0.3155643221180713, | |
| "learning_rate": 5.000428563642382e-06, | |
| "loss": 0.5086, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2144, | |
| "total_flos": 488578517827584.0, | |
| "train_loss": 0.5429407808540473, | |
| "train_runtime": 24734.1783, | |
| "train_samples_per_second": 1.387, | |
| "train_steps_per_second": 0.087 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2144, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 488578517827584.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |