| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1073, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004659832246039142, | |
| "grad_norm": 393.7763816895652, | |
| "learning_rate": 7.763975155279503e-07, | |
| "loss": 11.0125, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009319664492078284, | |
| "grad_norm": 583.1849322019833, | |
| "learning_rate": 1.5527950310559006e-06, | |
| "loss": 10.1288, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013979496738117428, | |
| "grad_norm": 284.55800044683906, | |
| "learning_rate": 2.329192546583851e-06, | |
| "loss": 5.8118, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01863932898415657, | |
| "grad_norm": 29.083713656887006, | |
| "learning_rate": 3.1055900621118013e-06, | |
| "loss": 1.7313, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.023299161230195712, | |
| "grad_norm": 25.326606656183195, | |
| "learning_rate": 3.881987577639752e-06, | |
| "loss": 1.153, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027958993476234855, | |
| "grad_norm": 3.559209733302196, | |
| "learning_rate": 4.658385093167702e-06, | |
| "loss": 0.9355, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.032618825722273995, | |
| "grad_norm": 1.6561992560161718, | |
| "learning_rate": 5.4347826086956525e-06, | |
| "loss": 0.7933, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03727865796831314, | |
| "grad_norm": 0.9314770858583151, | |
| "learning_rate": 6.2111801242236025e-06, | |
| "loss": 0.7262, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04193849021435228, | |
| "grad_norm": 0.7999144565956597, | |
| "learning_rate": 6.9875776397515525e-06, | |
| "loss": 0.6711, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.046598322460391424, | |
| "grad_norm": 0.5623767156723083, | |
| "learning_rate": 7.763975155279503e-06, | |
| "loss": 0.6428, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05125815470643057, | |
| "grad_norm": 0.5523446689343642, | |
| "learning_rate": 8.540372670807453e-06, | |
| "loss": 0.6097, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05591798695246971, | |
| "grad_norm": 0.47631559008687474, | |
| "learning_rate": 9.316770186335403e-06, | |
| "loss": 0.5772, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06057781919850885, | |
| "grad_norm": 0.48238602877384157, | |
| "learning_rate": 1.0093167701863353e-05, | |
| "loss": 0.5638, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06523765144454799, | |
| "grad_norm": 0.40800058691336943, | |
| "learning_rate": 1.0869565217391305e-05, | |
| "loss": 0.5616, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06989748369058714, | |
| "grad_norm": 0.521882380108243, | |
| "learning_rate": 1.1645962732919255e-05, | |
| "loss": 0.5455, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07455731593662628, | |
| "grad_norm": 0.44631422380650543, | |
| "learning_rate": 1.2422360248447205e-05, | |
| "loss": 0.5379, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07921714818266543, | |
| "grad_norm": 0.4344573507311684, | |
| "learning_rate": 1.3198757763975155e-05, | |
| "loss": 0.5303, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08387698042870456, | |
| "grad_norm": 0.5489393869010802, | |
| "learning_rate": 1.3975155279503105e-05, | |
| "loss": 0.5414, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08853681267474371, | |
| "grad_norm": 0.4469200854990802, | |
| "learning_rate": 1.4751552795031057e-05, | |
| "loss": 0.5242, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09319664492078285, | |
| "grad_norm": 1.0603828069489791, | |
| "learning_rate": 1.5527950310559007e-05, | |
| "loss": 0.5109, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.097856477166822, | |
| "grad_norm": 0.44260212655195375, | |
| "learning_rate": 1.630434782608696e-05, | |
| "loss": 0.5065, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10251630941286113, | |
| "grad_norm": 0.44817804333877553, | |
| "learning_rate": 1.7080745341614907e-05, | |
| "loss": 0.5088, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10717614165890028, | |
| "grad_norm": 0.5288716489545858, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 0.5067, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11183597390493942, | |
| "grad_norm": 0.5048641535944616, | |
| "learning_rate": 1.8633540372670807e-05, | |
| "loss": 0.4939, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11649580615097857, | |
| "grad_norm": 0.6084150968283868, | |
| "learning_rate": 1.940993788819876e-05, | |
| "loss": 0.4912, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1211556383970177, | |
| "grad_norm": 0.6148918698831792, | |
| "learning_rate": 2.0186335403726707e-05, | |
| "loss": 0.4979, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12581547064305684, | |
| "grad_norm": 0.8205827657670669, | |
| "learning_rate": 2.096273291925466e-05, | |
| "loss": 0.4979, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13047530288909598, | |
| "grad_norm": 0.7679297495970169, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.4832, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13513513513513514, | |
| "grad_norm": 0.5041302878945089, | |
| "learning_rate": 2.2515527950310562e-05, | |
| "loss": 0.51, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13979496738117428, | |
| "grad_norm": 0.5530583276412717, | |
| "learning_rate": 2.329192546583851e-05, | |
| "loss": 0.4844, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14445479962721341, | |
| "grad_norm": 0.5301947255391686, | |
| "learning_rate": 2.4068322981366462e-05, | |
| "loss": 0.4927, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14911463187325255, | |
| "grad_norm": 0.48036659403384335, | |
| "learning_rate": 2.484472049689441e-05, | |
| "loss": 0.4742, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15377446411929171, | |
| "grad_norm": 0.46573263299920226, | |
| "learning_rate": 2.5621118012422362e-05, | |
| "loss": 0.4852, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.15843429636533085, | |
| "grad_norm": 0.4355300742627608, | |
| "learning_rate": 2.639751552795031e-05, | |
| "loss": 0.4616, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16309412861137, | |
| "grad_norm": 0.4575025952824799, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.4754, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16775396085740912, | |
| "grad_norm": 0.42748121226122626, | |
| "learning_rate": 2.795031055900621e-05, | |
| "loss": 0.469, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 0.6608945458634785, | |
| "learning_rate": 2.8726708074534165e-05, | |
| "loss": 0.4735, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17707362534948742, | |
| "grad_norm": 0.48638685585860075, | |
| "learning_rate": 2.9503105590062114e-05, | |
| "loss": 0.4829, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18173345759552656, | |
| "grad_norm": 0.5745988215163209, | |
| "learning_rate": 3.0279503105590062e-05, | |
| "loss": 0.4724, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1863932898415657, | |
| "grad_norm": 0.6023582420428638, | |
| "learning_rate": 3.1055900621118014e-05, | |
| "loss": 0.4741, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19105312208760486, | |
| "grad_norm": 0.635176285320767, | |
| "learning_rate": 3.183229813664597e-05, | |
| "loss": 0.4641, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.195712954333644, | |
| "grad_norm": 0.7710044760027541, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.478, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20037278657968313, | |
| "grad_norm": 0.5984872920907087, | |
| "learning_rate": 3.3385093167701865e-05, | |
| "loss": 0.4603, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20503261882572227, | |
| "grad_norm": 0.5291576074653929, | |
| "learning_rate": 3.4161490683229814e-05, | |
| "loss": 0.4649, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2096924510717614, | |
| "grad_norm": 0.5743958987143609, | |
| "learning_rate": 3.493788819875777e-05, | |
| "loss": 0.4473, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21435228331780057, | |
| "grad_norm": 0.7717358450196297, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.4608, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2190121155638397, | |
| "grad_norm": 0.6749709484604279, | |
| "learning_rate": 3.6490683229813665e-05, | |
| "loss": 0.4585, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22367194780987884, | |
| "grad_norm": 0.5624777506954426, | |
| "learning_rate": 3.7267080745341614e-05, | |
| "loss": 0.4563, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22833178005591798, | |
| "grad_norm": 0.5960914355686585, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.462, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.23299161230195714, | |
| "grad_norm": 0.5901408817500576, | |
| "learning_rate": 3.881987577639752e-05, | |
| "loss": 0.4543, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23765144454799628, | |
| "grad_norm": 0.5483953262361844, | |
| "learning_rate": 3.9596273291925465e-05, | |
| "loss": 0.4625, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2423112767940354, | |
| "grad_norm": 0.6755309362164663, | |
| "learning_rate": 4.0372670807453414e-05, | |
| "loss": 0.4579, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24697110904007455, | |
| "grad_norm": 0.510280095507134, | |
| "learning_rate": 4.114906832298137e-05, | |
| "loss": 0.4564, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2516309412861137, | |
| "grad_norm": 0.6620506218445115, | |
| "learning_rate": 4.192546583850932e-05, | |
| "loss": 0.4551, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25629077353215285, | |
| "grad_norm": 0.5995404221607764, | |
| "learning_rate": 4.270186335403727e-05, | |
| "loss": 0.4608, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26095060577819196, | |
| "grad_norm": 0.6548164752439984, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.4534, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2656104380242311, | |
| "grad_norm": 0.6323789306558022, | |
| "learning_rate": 4.425465838509317e-05, | |
| "loss": 0.4577, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2702702702702703, | |
| "grad_norm": 0.6752638258379858, | |
| "learning_rate": 4.5031055900621124e-05, | |
| "loss": 0.46, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2749301025163094, | |
| "grad_norm": 0.6469562014535761, | |
| "learning_rate": 4.580745341614907e-05, | |
| "loss": 0.4646, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.27958993476234856, | |
| "grad_norm": 1.0717105490316798, | |
| "learning_rate": 4.658385093167702e-05, | |
| "loss": 0.4667, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2842497670083877, | |
| "grad_norm": 0.5460080137202862, | |
| "learning_rate": 4.736024844720497e-05, | |
| "loss": 0.4543, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.28890959925442683, | |
| "grad_norm": 0.7653644710191666, | |
| "learning_rate": 4.8136645962732924e-05, | |
| "loss": 0.4465, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.293569431500466, | |
| "grad_norm": 0.6197872261778846, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.4556, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.2982292637465051, | |
| "grad_norm": 0.6617082986942046, | |
| "learning_rate": 4.968944099378882e-05, | |
| "loss": 0.4406, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.30288909599254427, | |
| "grad_norm": 0.6046029165724706, | |
| "learning_rate": 4.994822229892993e-05, | |
| "loss": 0.4387, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30754892823858343, | |
| "grad_norm": 0.6521768830488405, | |
| "learning_rate": 4.986192613047981e-05, | |
| "loss": 0.4492, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.31220876048462254, | |
| "grad_norm": 0.7536190791331463, | |
| "learning_rate": 4.977562996202969e-05, | |
| "loss": 0.4496, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3168685927306617, | |
| "grad_norm": 0.7952129333481134, | |
| "learning_rate": 4.968933379357957e-05, | |
| "loss": 0.4511, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32152842497670087, | |
| "grad_norm": 0.6096740811145186, | |
| "learning_rate": 4.9603037625129445e-05, | |
| "loss": 0.4457, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32618825722274, | |
| "grad_norm": 0.5166618990153663, | |
| "learning_rate": 4.951674145667933e-05, | |
| "loss": 0.4595, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33084808946877914, | |
| "grad_norm": 0.6273578507717906, | |
| "learning_rate": 4.94304452882292e-05, | |
| "loss": 0.4454, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.33550792171481825, | |
| "grad_norm": 0.6394584015921028, | |
| "learning_rate": 4.934414911977908e-05, | |
| "loss": 0.4432, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3401677539608574, | |
| "grad_norm": 0.5368723662365096, | |
| "learning_rate": 4.9257852951328965e-05, | |
| "loss": 0.4398, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 0.5882218874107019, | |
| "learning_rate": 4.917155678287884e-05, | |
| "loss": 0.4533, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3494874184529357, | |
| "grad_norm": 0.5846593984649311, | |
| "learning_rate": 4.908526061442872e-05, | |
| "loss": 0.44, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35414725069897485, | |
| "grad_norm": 0.7811317528955376, | |
| "learning_rate": 4.89989644459786e-05, | |
| "loss": 0.4551, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.35880708294501396, | |
| "grad_norm": 0.5923274246131052, | |
| "learning_rate": 4.891266827752848e-05, | |
| "loss": 0.4573, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3634669151910531, | |
| "grad_norm": 0.5109644015646022, | |
| "learning_rate": 4.882637210907836e-05, | |
| "loss": 0.4409, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3681267474370923, | |
| "grad_norm": 0.7676159217550929, | |
| "learning_rate": 4.874007594062824e-05, | |
| "loss": 0.4473, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3727865796831314, | |
| "grad_norm": 0.4593386933689315, | |
| "learning_rate": 4.865377977217811e-05, | |
| "loss": 0.4319, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37744641192917056, | |
| "grad_norm": 0.5622256565032335, | |
| "learning_rate": 4.8567483603728e-05, | |
| "loss": 0.4508, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3821062441752097, | |
| "grad_norm": 0.4696400190402633, | |
| "learning_rate": 4.8481187435277875e-05, | |
| "loss": 0.4463, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38676607642124883, | |
| "grad_norm": 0.5414428583287468, | |
| "learning_rate": 4.839489126682776e-05, | |
| "loss": 0.4328, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.391425908667288, | |
| "grad_norm": 0.5930633933722221, | |
| "learning_rate": 4.830859509837763e-05, | |
| "loss": 0.451, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3960857409133271, | |
| "grad_norm": 0.558893296619698, | |
| "learning_rate": 4.822229892992751e-05, | |
| "loss": 0.4485, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40074557315936626, | |
| "grad_norm": 0.5557744764476551, | |
| "learning_rate": 4.8136002761477395e-05, | |
| "loss": 0.4352, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40540540540540543, | |
| "grad_norm": 0.4906439618823238, | |
| "learning_rate": 4.804970659302727e-05, | |
| "loss": 0.4481, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.41006523765144454, | |
| "grad_norm": 0.5240735838628965, | |
| "learning_rate": 4.796341042457715e-05, | |
| "loss": 0.4428, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4147250698974837, | |
| "grad_norm": 0.4564128761613534, | |
| "learning_rate": 4.787711425612703e-05, | |
| "loss": 0.4368, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4193849021435228, | |
| "grad_norm": 0.581280738885634, | |
| "learning_rate": 4.779081808767691e-05, | |
| "loss": 0.4278, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.424044734389562, | |
| "grad_norm": 0.5169419812689359, | |
| "learning_rate": 4.770452191922679e-05, | |
| "loss": 0.4344, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.42870456663560114, | |
| "grad_norm": 0.4305845729197184, | |
| "learning_rate": 4.761822575077667e-05, | |
| "loss": 0.4313, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43336439888164024, | |
| "grad_norm": 0.4908203275214332, | |
| "learning_rate": 4.753192958232654e-05, | |
| "loss": 0.4283, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4380242311276794, | |
| "grad_norm": 0.4493706647323888, | |
| "learning_rate": 4.744563341387643e-05, | |
| "loss": 0.4412, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4426840633737186, | |
| "grad_norm": 0.39345083714107254, | |
| "learning_rate": 4.7359337245426306e-05, | |
| "loss": 0.4303, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4473438956197577, | |
| "grad_norm": 0.5501276418980581, | |
| "learning_rate": 4.7273041076976184e-05, | |
| "loss": 0.4426, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45200372786579684, | |
| "grad_norm": 0.6229767049077679, | |
| "learning_rate": 4.718674490852606e-05, | |
| "loss": 0.4352, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.45666356011183595, | |
| "grad_norm": 0.44254941541331244, | |
| "learning_rate": 4.710044874007594e-05, | |
| "loss": 0.4298, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4613233923578751, | |
| "grad_norm": 0.514815810859903, | |
| "learning_rate": 4.7014152571625826e-05, | |
| "loss": 0.4357, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4659832246039143, | |
| "grad_norm": 0.5567992142298348, | |
| "learning_rate": 4.6927856403175704e-05, | |
| "loss": 0.4384, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4706430568499534, | |
| "grad_norm": 0.4363929130790873, | |
| "learning_rate": 4.684156023472558e-05, | |
| "loss": 0.4252, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47530288909599255, | |
| "grad_norm": 0.5087363298381216, | |
| "learning_rate": 4.675526406627546e-05, | |
| "loss": 0.4337, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.47996272134203166, | |
| "grad_norm": 0.6543857019168989, | |
| "learning_rate": 4.666896789782534e-05, | |
| "loss": 0.4314, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.4846225535880708, | |
| "grad_norm": 0.5980592672335253, | |
| "learning_rate": 4.658267172937522e-05, | |
| "loss": 0.4357, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.48928238583411, | |
| "grad_norm": 0.531867864536261, | |
| "learning_rate": 4.64963755609251e-05, | |
| "loss": 0.4347, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.4939422180801491, | |
| "grad_norm": 0.5310185013058427, | |
| "learning_rate": 4.641007939247497e-05, | |
| "loss": 0.4316, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49860205032618826, | |
| "grad_norm": 0.4238291511703444, | |
| "learning_rate": 4.632378322402486e-05, | |
| "loss": 0.4316, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5032618825722274, | |
| "grad_norm": 0.4228811468689937, | |
| "learning_rate": 4.6237487055574736e-05, | |
| "loss": 0.4216, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5079217148182665, | |
| "grad_norm": 0.6259236324395676, | |
| "learning_rate": 4.6151190887124615e-05, | |
| "loss": 0.4284, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5125815470643057, | |
| "grad_norm": 0.39359727948299666, | |
| "learning_rate": 4.606489471867449e-05, | |
| "loss": 0.4281, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.41683164417503143, | |
| "learning_rate": 4.597859855022437e-05, | |
| "loss": 0.4224, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5219012115563839, | |
| "grad_norm": 0.4671813774085083, | |
| "learning_rate": 4.589230238177425e-05, | |
| "loss": 0.4438, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5265610438024231, | |
| "grad_norm": 0.4869338335176134, | |
| "learning_rate": 4.5806006213324134e-05, | |
| "loss": 0.4207, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5312208760484622, | |
| "grad_norm": 0.4777106729946363, | |
| "learning_rate": 4.5719710044874006e-05, | |
| "loss": 0.4208, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5358807082945014, | |
| "grad_norm": 0.42255511031927767, | |
| "learning_rate": 4.563341387642389e-05, | |
| "loss": 0.4267, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5405405405405406, | |
| "grad_norm": 0.4480045558727776, | |
| "learning_rate": 4.554711770797377e-05, | |
| "loss": 0.4263, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5452003727865797, | |
| "grad_norm": 0.531614297394263, | |
| "learning_rate": 4.546082153952365e-05, | |
| "loss": 0.4237, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5498602050326188, | |
| "grad_norm": 0.40047577017376945, | |
| "learning_rate": 4.5374525371073526e-05, | |
| "loss": 0.4202, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.554520037278658, | |
| "grad_norm": 0.3731646867407999, | |
| "learning_rate": 4.5288229202623404e-05, | |
| "loss": 0.4222, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5591798695246971, | |
| "grad_norm": 0.42320189483428194, | |
| "learning_rate": 4.520193303417328e-05, | |
| "loss": 0.4225, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5638397017707363, | |
| "grad_norm": 0.37857425890061425, | |
| "learning_rate": 4.511563686572317e-05, | |
| "loss": 0.4206, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5684995340167754, | |
| "grad_norm": 0.44956632131740865, | |
| "learning_rate": 4.5029340697273045e-05, | |
| "loss": 0.4319, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5731593662628145, | |
| "grad_norm": 0.33299976089643685, | |
| "learning_rate": 4.4943044528822923e-05, | |
| "loss": 0.4185, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5778191985088537, | |
| "grad_norm": 0.48980362627915386, | |
| "learning_rate": 4.48567483603728e-05, | |
| "loss": 0.4158, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5824790307548928, | |
| "grad_norm": 0.42606856224911005, | |
| "learning_rate": 4.477045219192268e-05, | |
| "loss": 0.4376, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.587138863000932, | |
| "grad_norm": 0.42335712525850244, | |
| "learning_rate": 4.4684156023472565e-05, | |
| "loss": 0.428, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5917986952469712, | |
| "grad_norm": 0.5664985838812401, | |
| "learning_rate": 4.4597859855022436e-05, | |
| "loss": 0.4332, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5964585274930102, | |
| "grad_norm": 0.5423679684051941, | |
| "learning_rate": 4.4511563686572315e-05, | |
| "loss": 0.4233, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6011183597390494, | |
| "grad_norm": 0.4909932835209279, | |
| "learning_rate": 4.44252675181222e-05, | |
| "loss": 0.4227, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6057781919850885, | |
| "grad_norm": 0.46291917403602095, | |
| "learning_rate": 4.433897134967208e-05, | |
| "loss": 0.432, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6104380242311277, | |
| "grad_norm": 0.3685674838282931, | |
| "learning_rate": 4.4252675181221956e-05, | |
| "loss": 0.4293, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6150978564771669, | |
| "grad_norm": 0.4434735847938499, | |
| "learning_rate": 4.4166379012771834e-05, | |
| "loss": 0.411, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6197576887232059, | |
| "grad_norm": 0.4405490059116583, | |
| "learning_rate": 4.408008284432171e-05, | |
| "loss": 0.4119, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6244175209692451, | |
| "grad_norm": 0.41398202712429283, | |
| "learning_rate": 4.39937866758716e-05, | |
| "loss": 0.4104, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6290773532152842, | |
| "grad_norm": 0.430975620374675, | |
| "learning_rate": 4.3907490507421476e-05, | |
| "loss": 0.4187, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6337371854613234, | |
| "grad_norm": 0.46409316440230736, | |
| "learning_rate": 4.382119433897135e-05, | |
| "loss": 0.4223, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6383970177073626, | |
| "grad_norm": 0.5073889239648276, | |
| "learning_rate": 4.373489817052123e-05, | |
| "loss": 0.4318, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6430568499534017, | |
| "grad_norm": 0.44601647033035413, | |
| "learning_rate": 4.364860200207111e-05, | |
| "loss": 0.4139, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6477166821994408, | |
| "grad_norm": 0.35725981872190504, | |
| "learning_rate": 4.356230583362099e-05, | |
| "loss": 0.4188, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.65237651444548, | |
| "grad_norm": 0.36000996597376966, | |
| "learning_rate": 4.347600966517087e-05, | |
| "loss": 0.4276, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6570363466915191, | |
| "grad_norm": 0.36954899452412027, | |
| "learning_rate": 4.3389713496720745e-05, | |
| "loss": 0.421, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6616961789375583, | |
| "grad_norm": 0.5096149762957498, | |
| "learning_rate": 4.330341732827063e-05, | |
| "loss": 0.4216, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6663560111835974, | |
| "grad_norm": 0.47369514199125173, | |
| "learning_rate": 4.321712115982051e-05, | |
| "loss": 0.41, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6710158434296365, | |
| "grad_norm": 0.4878910411937167, | |
| "learning_rate": 4.3130824991370387e-05, | |
| "loss": 0.4203, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6756756756756757, | |
| "grad_norm": 0.4569581502802404, | |
| "learning_rate": 4.3044528822920265e-05, | |
| "loss": 0.4192, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6803355079217148, | |
| "grad_norm": 0.49002047470680643, | |
| "learning_rate": 4.295823265447014e-05, | |
| "loss": 0.4141, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.684995340167754, | |
| "grad_norm": 0.3774018667206795, | |
| "learning_rate": 4.287193648602002e-05, | |
| "loss": 0.4181, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.409232068582716, | |
| "learning_rate": 4.27856403175699e-05, | |
| "loss": 0.4116, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6943150046598322, | |
| "grad_norm": 0.4119369239899899, | |
| "learning_rate": 4.269934414911978e-05, | |
| "loss": 0.4075, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6989748369058714, | |
| "grad_norm": 0.46673879834661386, | |
| "learning_rate": 4.261304798066966e-05, | |
| "loss": 0.4121, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7036346691519105, | |
| "grad_norm": 0.4296650734748086, | |
| "learning_rate": 4.252675181221954e-05, | |
| "loss": 0.4148, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7082945013979497, | |
| "grad_norm": 0.4528637619615586, | |
| "learning_rate": 4.244045564376942e-05, | |
| "loss": 0.418, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7129543336439889, | |
| "grad_norm": 0.45440139377397915, | |
| "learning_rate": 4.23541594753193e-05, | |
| "loss": 0.4019, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7176141658900279, | |
| "grad_norm": 0.4262388354810109, | |
| "learning_rate": 4.2267863306869176e-05, | |
| "loss": 0.4109, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7222739981360671, | |
| "grad_norm": 0.3735445076148594, | |
| "learning_rate": 4.2181567138419054e-05, | |
| "loss": 0.4237, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7269338303821062, | |
| "grad_norm": 0.3512403469101282, | |
| "learning_rate": 4.209527096996894e-05, | |
| "loss": 0.4081, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7315936626281454, | |
| "grad_norm": 0.42653954827852203, | |
| "learning_rate": 4.200897480151881e-05, | |
| "loss": 0.4153, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7362534948741846, | |
| "grad_norm": 0.3914875070569304, | |
| "learning_rate": 4.1922678633068695e-05, | |
| "loss": 0.4187, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7409133271202236, | |
| "grad_norm": 0.4234510192995893, | |
| "learning_rate": 4.1836382464618573e-05, | |
| "loss": 0.413, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7455731593662628, | |
| "grad_norm": 0.5028008649488866, | |
| "learning_rate": 4.175008629616845e-05, | |
| "loss": 0.4109, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.750232991612302, | |
| "grad_norm": 0.43119126147507647, | |
| "learning_rate": 4.166379012771833e-05, | |
| "loss": 0.4111, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7548928238583411, | |
| "grad_norm": 0.4089878198633868, | |
| "learning_rate": 4.157749395926821e-05, | |
| "loss": 0.4292, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7595526561043803, | |
| "grad_norm": 0.3681227264794929, | |
| "learning_rate": 4.1491197790818086e-05, | |
| "loss": 0.4037, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7642124883504194, | |
| "grad_norm": 0.5171306938048109, | |
| "learning_rate": 4.140490162236797e-05, | |
| "loss": 0.4193, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7688723205964585, | |
| "grad_norm": 0.48484868999243286, | |
| "learning_rate": 4.131860545391785e-05, | |
| "loss": 0.4109, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7735321528424977, | |
| "grad_norm": 0.4211067902596038, | |
| "learning_rate": 4.123230928546773e-05, | |
| "loss": 0.4059, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7781919850885368, | |
| "grad_norm": 0.4173617020873044, | |
| "learning_rate": 4.1146013117017606e-05, | |
| "loss": 0.4133, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.782851817334576, | |
| "grad_norm": 0.4473614921871721, | |
| "learning_rate": 4.1059716948567484e-05, | |
| "loss": 0.416, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7875116495806151, | |
| "grad_norm": 0.5002445860246322, | |
| "learning_rate": 4.097342078011737e-05, | |
| "loss": 0.4244, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7921714818266542, | |
| "grad_norm": 0.40909509810722117, | |
| "learning_rate": 4.088712461166724e-05, | |
| "loss": 0.4136, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7968313140726934, | |
| "grad_norm": 0.5201852684542898, | |
| "learning_rate": 4.080082844321712e-05, | |
| "loss": 0.4159, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8014911463187325, | |
| "grad_norm": 0.4337009244379661, | |
| "learning_rate": 4.0714532274767004e-05, | |
| "loss": 0.4227, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8061509785647717, | |
| "grad_norm": 0.3559548453016289, | |
| "learning_rate": 4.062823610631688e-05, | |
| "loss": 0.4035, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8108108108108109, | |
| "grad_norm": 0.35878882029881315, | |
| "learning_rate": 4.054193993786676e-05, | |
| "loss": 0.4181, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8154706430568499, | |
| "grad_norm": 0.4404901144250474, | |
| "learning_rate": 4.045564376941664e-05, | |
| "loss": 0.4137, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8201304753028891, | |
| "grad_norm": 1.6876260516310506, | |
| "learning_rate": 4.036934760096652e-05, | |
| "loss": 0.4047, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8247903075489282, | |
| "grad_norm": 0.5529229555727502, | |
| "learning_rate": 4.02830514325164e-05, | |
| "loss": 0.4207, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8294501397949674, | |
| "grad_norm": 0.48256966462788353, | |
| "learning_rate": 4.019675526406628e-05, | |
| "loss": 0.4066, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8341099720410066, | |
| "grad_norm": 0.4515642693340339, | |
| "learning_rate": 4.011045909561615e-05, | |
| "loss": 0.4257, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8387698042870456, | |
| "grad_norm": 0.4292958614741706, | |
| "learning_rate": 4.0024162927166037e-05, | |
| "loss": 0.4084, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8434296365330848, | |
| "grad_norm": 0.33751313521608023, | |
| "learning_rate": 3.9937866758715915e-05, | |
| "loss": 0.41, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.848089468779124, | |
| "grad_norm": 0.47962015646284906, | |
| "learning_rate": 3.98515705902658e-05, | |
| "loss": 0.4156, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8527493010251631, | |
| "grad_norm": 0.5814212414939525, | |
| "learning_rate": 3.976527442181567e-05, | |
| "loss": 0.4086, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8574091332712023, | |
| "grad_norm": 0.4802999638737839, | |
| "learning_rate": 3.967897825336555e-05, | |
| "loss": 0.4148, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 0.42361968430118924, | |
| "learning_rate": 3.9592682084915434e-05, | |
| "loss": 0.4017, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8667287977632805, | |
| "grad_norm": 0.5522937674561437, | |
| "learning_rate": 3.950638591646531e-05, | |
| "loss": 0.4011, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8713886300093197, | |
| "grad_norm": 0.4797515397612485, | |
| "learning_rate": 3.942008974801519e-05, | |
| "loss": 0.4131, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8760484622553588, | |
| "grad_norm": 0.3808360282381303, | |
| "learning_rate": 3.933379357956507e-05, | |
| "loss": 0.4085, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.880708294501398, | |
| "grad_norm": 0.4555417585180783, | |
| "learning_rate": 3.924749741111495e-05, | |
| "loss": 0.4107, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8853681267474371, | |
| "grad_norm": 0.4732696767275139, | |
| "learning_rate": 3.916120124266483e-05, | |
| "loss": 0.4184, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8900279589934762, | |
| "grad_norm": 0.4097283294228305, | |
| "learning_rate": 3.9074905074214704e-05, | |
| "loss": 0.4173, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8946877912395154, | |
| "grad_norm": 0.3745876353455248, | |
| "learning_rate": 3.898860890576458e-05, | |
| "loss": 0.4125, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8993476234855545, | |
| "grad_norm": 0.40878733524270594, | |
| "learning_rate": 3.890231273731447e-05, | |
| "loss": 0.4041, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9040074557315937, | |
| "grad_norm": 0.3008792047141135, | |
| "learning_rate": 3.8816016568864345e-05, | |
| "loss": 0.4021, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9086672879776329, | |
| "grad_norm": 0.3526833420716075, | |
| "learning_rate": 3.8729720400414224e-05, | |
| "loss": 0.4208, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.9133271202236719, | |
| "grad_norm": 0.37028855032759117, | |
| "learning_rate": 3.86434242319641e-05, | |
| "loss": 0.408, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9179869524697111, | |
| "grad_norm": 0.4913153733782135, | |
| "learning_rate": 3.855712806351398e-05, | |
| "loss": 0.4093, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9226467847157502, | |
| "grad_norm": 0.5480655158571721, | |
| "learning_rate": 3.8470831895063865e-05, | |
| "loss": 0.407, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9273066169617894, | |
| "grad_norm": 0.48127533426756625, | |
| "learning_rate": 3.838453572661374e-05, | |
| "loss": 0.4027, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9319664492078286, | |
| "grad_norm": 0.3798213155156242, | |
| "learning_rate": 3.8298239558163615e-05, | |
| "loss": 0.4058, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9366262814538676, | |
| "grad_norm": 1.1135651155036537, | |
| "learning_rate": 3.82119433897135e-05, | |
| "loss": 0.4115, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9412861136999068, | |
| "grad_norm": 0.4170458508497875, | |
| "learning_rate": 3.812564722126338e-05, | |
| "loss": 0.4042, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9459459459459459, | |
| "grad_norm": 0.4736269267410847, | |
| "learning_rate": 3.8039351052813256e-05, | |
| "loss": 0.4128, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9506057781919851, | |
| "grad_norm": 0.4901205065036653, | |
| "learning_rate": 3.7953054884363134e-05, | |
| "loss": 0.4064, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9552656104380243, | |
| "grad_norm": 0.4063392622861729, | |
| "learning_rate": 3.786675871591301e-05, | |
| "loss": 0.4098, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.9599254426840633, | |
| "grad_norm": 0.35724872799277996, | |
| "learning_rate": 3.77804625474629e-05, | |
| "loss": 0.4149, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9645852749301025, | |
| "grad_norm": 0.4922293048787546, | |
| "learning_rate": 3.7694166379012776e-05, | |
| "loss": 0.4073, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9692451071761417, | |
| "grad_norm": 0.44273425501260094, | |
| "learning_rate": 3.7607870210562654e-05, | |
| "loss": 0.4043, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9739049394221808, | |
| "grad_norm": 0.31431106549197013, | |
| "learning_rate": 3.752157404211253e-05, | |
| "loss": 0.3969, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.97856477166822, | |
| "grad_norm": 0.4761750617414784, | |
| "learning_rate": 3.743527787366241e-05, | |
| "loss": 0.4264, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.983224603914259, | |
| "grad_norm": 0.4217457177926442, | |
| "learning_rate": 3.734898170521229e-05, | |
| "loss": 0.4041, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9878844361602982, | |
| "grad_norm": 0.40400582621732106, | |
| "learning_rate": 3.7262685536762174e-05, | |
| "loss": 0.4034, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9925442684063374, | |
| "grad_norm": 0.43061222816006545, | |
| "learning_rate": 3.7176389368312045e-05, | |
| "loss": 0.4083, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9972041006523765, | |
| "grad_norm": 0.5435194886769118, | |
| "learning_rate": 3.709009319986193e-05, | |
| "loss": 0.4082, | |
| "step": 1070 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3219, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.186429923093381e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |