| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2422, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00041288191577208916, | |
| "grad_norm": 24.94086805103786, | |
| "learning_rate": 4.1152263374485605e-08, | |
| "loss": 1.4345, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002064409578860446, | |
| "grad_norm": 24.898038168161193, | |
| "learning_rate": 2.05761316872428e-07, | |
| "loss": 1.4101, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004128819157720892, | |
| "grad_norm": 15.938300194174698, | |
| "learning_rate": 4.11522633744856e-07, | |
| "loss": 1.3943, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006193228736581338, | |
| "grad_norm": 8.846886408597587, | |
| "learning_rate": 6.17283950617284e-07, | |
| "loss": 1.2771, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.008257638315441783, | |
| "grad_norm": 9.948610282503696, | |
| "learning_rate": 8.23045267489712e-07, | |
| "loss": 1.1572, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01032204789430223, | |
| "grad_norm": 4.744946112205854, | |
| "learning_rate": 1.02880658436214e-06, | |
| "loss": 1.0575, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012386457473162676, | |
| "grad_norm": 3.3793530185545926, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.9708, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014450867052023121, | |
| "grad_norm": 3.4379702794812124, | |
| "learning_rate": 1.440329218106996e-06, | |
| "loss": 0.9668, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.016515276630883566, | |
| "grad_norm": 3.110466178670075, | |
| "learning_rate": 1.646090534979424e-06, | |
| "loss": 0.9508, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.018579686209744012, | |
| "grad_norm": 3.0934122499344614, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.9496, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02064409578860446, | |
| "grad_norm": 3.2046299727793572, | |
| "learning_rate": 2.05761316872428e-06, | |
| "loss": 0.9256, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.022708505367464906, | |
| "grad_norm": 2.9857050496874358, | |
| "learning_rate": 2.263374485596708e-06, | |
| "loss": 0.9146, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.02477291494632535, | |
| "grad_norm": 3.010697976532333, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.9185, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.026837324525185797, | |
| "grad_norm": 2.9986835294430274, | |
| "learning_rate": 2.674897119341564e-06, | |
| "loss": 0.9008, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.028901734104046242, | |
| "grad_norm": 3.2527007862069186, | |
| "learning_rate": 2.880658436213992e-06, | |
| "loss": 0.8855, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.030966143682906688, | |
| "grad_norm": 3.2007644630686833, | |
| "learning_rate": 3.08641975308642e-06, | |
| "loss": 0.8783, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03303055326176713, | |
| "grad_norm": 3.196196143650837, | |
| "learning_rate": 3.292181069958848e-06, | |
| "loss": 0.8775, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03509496284062758, | |
| "grad_norm": 3.004102067065696, | |
| "learning_rate": 3.4979423868312762e-06, | |
| "loss": 0.8808, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.037159372419488024, | |
| "grad_norm": 3.118135767316795, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.8775, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03922378199834847, | |
| "grad_norm": 3.0856528450327354, | |
| "learning_rate": 3.909465020576132e-06, | |
| "loss": 0.8634, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.04128819157720892, | |
| "grad_norm": 2.9700335979493167, | |
| "learning_rate": 4.11522633744856e-06, | |
| "loss": 0.872, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04335260115606936, | |
| "grad_norm": 3.1350056589977533, | |
| "learning_rate": 4.3209876543209875e-06, | |
| "loss": 0.8706, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04541701073492981, | |
| "grad_norm": 3.2497160658033852, | |
| "learning_rate": 4.526748971193416e-06, | |
| "loss": 0.8539, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.047481420313790254, | |
| "grad_norm": 3.382546616433861, | |
| "learning_rate": 4.732510288065844e-06, | |
| "loss": 0.8573, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.0495458298926507, | |
| "grad_norm": 3.1741955682207124, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.8433, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.051610239471511145, | |
| "grad_norm": 3.13507489217465, | |
| "learning_rate": 5.1440329218107e-06, | |
| "loss": 0.8546, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.053674649050371594, | |
| "grad_norm": 3.182938519722128, | |
| "learning_rate": 5.349794238683128e-06, | |
| "loss": 0.8505, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05573905862923204, | |
| "grad_norm": 3.3061978280140916, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.8596, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.057803468208092484, | |
| "grad_norm": 3.172304485577032, | |
| "learning_rate": 5.761316872427984e-06, | |
| "loss": 0.8454, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05986787778695293, | |
| "grad_norm": 3.222279073856588, | |
| "learning_rate": 5.967078189300412e-06, | |
| "loss": 0.8552, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.061932287365813375, | |
| "grad_norm": 3.026540545603275, | |
| "learning_rate": 6.17283950617284e-06, | |
| "loss": 0.8365, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06399669694467382, | |
| "grad_norm": 3.1724013332685304, | |
| "learning_rate": 6.3786008230452675e-06, | |
| "loss": 0.8397, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.06606110652353427, | |
| "grad_norm": 3.4516704362207222, | |
| "learning_rate": 6.584362139917696e-06, | |
| "loss": 0.8346, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06812551610239471, | |
| "grad_norm": 3.1199583778413285, | |
| "learning_rate": 6.790123456790124e-06, | |
| "loss": 0.8296, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07018992568125516, | |
| "grad_norm": 3.448087429701013, | |
| "learning_rate": 6.9958847736625525e-06, | |
| "loss": 0.8384, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07225433526011561, | |
| "grad_norm": 3.072968814322973, | |
| "learning_rate": 7.201646090534981e-06, | |
| "loss": 0.846, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07431874483897605, | |
| "grad_norm": 3.1647169459938076, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8256, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0763831544178365, | |
| "grad_norm": 3.105263189462399, | |
| "learning_rate": 7.613168724279836e-06, | |
| "loss": 0.8011, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07844756399669695, | |
| "grad_norm": 2.9946543968073294, | |
| "learning_rate": 7.818930041152263e-06, | |
| "loss": 0.8214, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.0805119735755574, | |
| "grad_norm": 3.25501723509638, | |
| "learning_rate": 8.024691358024692e-06, | |
| "loss": 0.8201, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08257638315441784, | |
| "grad_norm": 3.0087568189695237, | |
| "learning_rate": 8.23045267489712e-06, | |
| "loss": 0.8139, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08464079273327828, | |
| "grad_norm": 3.0911976211351817, | |
| "learning_rate": 8.43621399176955e-06, | |
| "loss": 0.8242, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08670520231213873, | |
| "grad_norm": 3.310286578361364, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 0.8364, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08876961189099918, | |
| "grad_norm": 2.986473336622259, | |
| "learning_rate": 8.847736625514404e-06, | |
| "loss": 0.8316, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.09083402146985962, | |
| "grad_norm": 3.102015428130825, | |
| "learning_rate": 9.053497942386832e-06, | |
| "loss": 0.8128, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09289843104872006, | |
| "grad_norm": 2.906374211555821, | |
| "learning_rate": 9.25925925925926e-06, | |
| "loss": 0.7945, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09496284062758051, | |
| "grad_norm": 3.0506200713500826, | |
| "learning_rate": 9.465020576131688e-06, | |
| "loss": 0.8158, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09702725020644096, | |
| "grad_norm": 3.0384361806454017, | |
| "learning_rate": 9.670781893004116e-06, | |
| "loss": 0.8228, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0990916597853014, | |
| "grad_norm": 3.100843008960617, | |
| "learning_rate": 9.876543209876543e-06, | |
| "loss": 0.8099, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.10115606936416185, | |
| "grad_norm": 2.935521796714598, | |
| "learning_rate": 9.999979213327006e-06, | |
| "loss": 0.8108, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.10322047894302229, | |
| "grad_norm": 3.361172390200279, | |
| "learning_rate": 9.999745365240722e-06, | |
| "loss": 0.7923, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10528488852188274, | |
| "grad_norm": 2.915556545903911, | |
| "learning_rate": 9.999251697919687e-06, | |
| "loss": 0.8045, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10734929810074319, | |
| "grad_norm": 3.212286307862521, | |
| "learning_rate": 9.998498237018056e-06, | |
| "loss": 0.8112, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10941370767960364, | |
| "grad_norm": 3.191428090938715, | |
| "learning_rate": 9.997485021690553e-06, | |
| "loss": 0.8001, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11147811725846409, | |
| "grad_norm": 2.838764527991799, | |
| "learning_rate": 9.996212104590426e-06, | |
| "loss": 0.8074, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11354252683732452, | |
| "grad_norm": 2.9644134550569943, | |
| "learning_rate": 9.994679551866712e-06, | |
| "loss": 0.8033, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11560693641618497, | |
| "grad_norm": 3.118728479888768, | |
| "learning_rate": 9.992887443160804e-06, | |
| "loss": 0.7905, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11767134599504542, | |
| "grad_norm": 2.8425828854985737, | |
| "learning_rate": 9.990835871602305e-06, | |
| "loss": 0.7987, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.11973575557390587, | |
| "grad_norm": 3.091935940986862, | |
| "learning_rate": 9.988524943804196e-06, | |
| "loss": 0.7919, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.12180016515276632, | |
| "grad_norm": 2.9471270363090976, | |
| "learning_rate": 9.985954779857292e-06, | |
| "loss": 0.7885, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12386457473162675, | |
| "grad_norm": 2.761224162387467, | |
| "learning_rate": 9.983125513324e-06, | |
| "loss": 0.784, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1259289843104872, | |
| "grad_norm": 2.779726805954528, | |
| "learning_rate": 9.98003729123138e-06, | |
| "loss": 0.7762, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12799339388934763, | |
| "grad_norm": 2.8806127663541625, | |
| "learning_rate": 9.976690274063507e-06, | |
| "loss": 0.7995, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.13005780346820808, | |
| "grad_norm": 3.0861309652893163, | |
| "learning_rate": 9.973084635753125e-06, | |
| "loss": 0.8002, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.13212221304706853, | |
| "grad_norm": 2.886159344081087, | |
| "learning_rate": 9.969220563672614e-06, | |
| "loss": 0.7742, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13418662262592898, | |
| "grad_norm": 2.9262415184999835, | |
| "learning_rate": 9.965098258624253e-06, | |
| "loss": 0.7807, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.13625103220478943, | |
| "grad_norm": 2.9922747460068324, | |
| "learning_rate": 9.960717934829777e-06, | |
| "loss": 0.7582, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13831544178364988, | |
| "grad_norm": 2.9066701618612596, | |
| "learning_rate": 9.956079819919254e-06, | |
| "loss": 0.7849, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.14037985136251033, | |
| "grad_norm": 2.9577891790066766, | |
| "learning_rate": 9.951184154919253e-06, | |
| "loss": 0.7676, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14244426094137078, | |
| "grad_norm": 2.7242674149455923, | |
| "learning_rate": 9.946031194240317e-06, | |
| "loss": 0.7613, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.14450867052023122, | |
| "grad_norm": 2.738187421683147, | |
| "learning_rate": 9.940621205663745e-06, | |
| "loss": 0.7476, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14657308009909165, | |
| "grad_norm": 2.959447667783807, | |
| "learning_rate": 9.934954470327671e-06, | |
| "loss": 0.757, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1486374896779521, | |
| "grad_norm": 2.9344152274329693, | |
| "learning_rate": 9.92903128271246e-06, | |
| "loss": 0.7559, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15070189925681254, | |
| "grad_norm": 3.0358566235662554, | |
| "learning_rate": 9.9228519506254e-06, | |
| "loss": 0.7671, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.152766308835673, | |
| "grad_norm": 2.917481842444454, | |
| "learning_rate": 9.916416795184716e-06, | |
| "loss": 0.7423, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.15483071841453344, | |
| "grad_norm": 2.858632389269523, | |
| "learning_rate": 9.90972615080287e-06, | |
| "loss": 0.7625, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.1568951279933939, | |
| "grad_norm": 2.7940389389869997, | |
| "learning_rate": 9.902780365169186e-06, | |
| "loss": 0.7344, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15895953757225434, | |
| "grad_norm": 2.966296190863466, | |
| "learning_rate": 9.895579799231791e-06, | |
| "loss": 0.7431, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.1610239471511148, | |
| "grad_norm": 2.734933032890328, | |
| "learning_rate": 9.888124827178847e-06, | |
| "loss": 0.7543, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16308835672997524, | |
| "grad_norm": 3.0106930594709183, | |
| "learning_rate": 9.880415836419108e-06, | |
| "loss": 0.7322, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.16515276630883569, | |
| "grad_norm": 3.1160711879023277, | |
| "learning_rate": 9.872453227561795e-06, | |
| "loss": 0.7217, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1672171758876961, | |
| "grad_norm": 3.025574483100628, | |
| "learning_rate": 9.864237414395768e-06, | |
| "loss": 0.7234, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.16928158546655656, | |
| "grad_norm": 3.0800133917199024, | |
| "learning_rate": 9.855768823868026e-06, | |
| "loss": 0.7451, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.171345995045417, | |
| "grad_norm": 2.878566581474389, | |
| "learning_rate": 9.847047896061526e-06, | |
| "loss": 0.7479, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.17341040462427745, | |
| "grad_norm": 3.168491396422218, | |
| "learning_rate": 9.838075084172303e-06, | |
| "loss": 0.7258, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1754748142031379, | |
| "grad_norm": 2.815061772988165, | |
| "learning_rate": 9.828850854485929e-06, | |
| "loss": 0.7055, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.17753922378199835, | |
| "grad_norm": 2.6873037207867356, | |
| "learning_rate": 9.819375686353277e-06, | |
| "loss": 0.7253, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.1796036333608588, | |
| "grad_norm": 2.8648079803750077, | |
| "learning_rate": 9.809650072165606e-06, | |
| "loss": 0.7218, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.18166804293971925, | |
| "grad_norm": 2.6485478231791446, | |
| "learning_rate": 9.799674517328985e-06, | |
| "loss": 0.7211, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1837324525185797, | |
| "grad_norm": 2.9086971891718365, | |
| "learning_rate": 9.78944954023802e-06, | |
| "loss": 0.7113, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18579686209744012, | |
| "grad_norm": 2.777331603392391, | |
| "learning_rate": 9.778975672248909e-06, | |
| "loss": 0.7236, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18786127167630057, | |
| "grad_norm": 2.896862579927988, | |
| "learning_rate": 9.768253457651853e-06, | |
| "loss": 0.7239, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.18992568125516102, | |
| "grad_norm": 2.788383349290103, | |
| "learning_rate": 9.757283453642741e-06, | |
| "loss": 0.7083, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19199009083402147, | |
| "grad_norm": 2.7799217861819816, | |
| "learning_rate": 9.746066230294216e-06, | |
| "loss": 0.7008, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.19405450041288191, | |
| "grad_norm": 3.0828748770941603, | |
| "learning_rate": 9.734602370526042e-06, | |
| "loss": 0.6949, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19611890999174236, | |
| "grad_norm": 2.8583233928158145, | |
| "learning_rate": 9.722892470074811e-06, | |
| "loss": 0.6923, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.1981833195706028, | |
| "grad_norm": 2.850888921108626, | |
| "learning_rate": 9.710937137462992e-06, | |
| "loss": 0.7056, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.20024772914946326, | |
| "grad_norm": 2.8804381008997697, | |
| "learning_rate": 9.698736993967295e-06, | |
| "loss": 0.7134, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2023121387283237, | |
| "grad_norm": 2.5675849510127473, | |
| "learning_rate": 9.686292673586397e-06, | |
| "loss": 0.6854, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.20437654830718416, | |
| "grad_norm": 2.752726728070582, | |
| "learning_rate": 9.673604823007994e-06, | |
| "loss": 0.6944, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.20644095788604458, | |
| "grad_norm": 2.8340105200706507, | |
| "learning_rate": 9.660674101575188e-06, | |
| "loss": 0.6959, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20850536746490503, | |
| "grad_norm": 2.6749836979567325, | |
| "learning_rate": 9.647501181252228e-06, | |
| "loss": 0.7181, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.21056977704376548, | |
| "grad_norm": 2.871887495235821, | |
| "learning_rate": 9.634086746589597e-06, | |
| "loss": 0.6855, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21263418662262593, | |
| "grad_norm": 3.022510499989922, | |
| "learning_rate": 9.62043149468842e-06, | |
| "loss": 0.6786, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.21469859620148637, | |
| "grad_norm": 2.798631786402729, | |
| "learning_rate": 9.60653613516426e-06, | |
| "loss": 0.6878, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21676300578034682, | |
| "grad_norm": 2.7665584034808557, | |
| "learning_rate": 9.592401390110224e-06, | |
| "loss": 0.6689, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21882741535920727, | |
| "grad_norm": 2.6972597849127995, | |
| "learning_rate": 9.578027994059448e-06, | |
| "loss": 0.6938, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.22089182493806772, | |
| "grad_norm": 2.876624388604322, | |
| "learning_rate": 9.563416693946927e-06, | |
| "loss": 0.6831, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22295623451692817, | |
| "grad_norm": 2.760614779991045, | |
| "learning_rate": 9.548568249070688e-06, | |
| "loss": 0.6721, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.2250206440957886, | |
| "grad_norm": 2.711332679097759, | |
| "learning_rate": 9.533483431052347e-06, | |
| "loss": 0.6734, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.22708505367464904, | |
| "grad_norm": 2.9419038213760573, | |
| "learning_rate": 9.518163023797001e-06, | |
| "loss": 0.6826, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2291494632535095, | |
| "grad_norm": 2.708449916455307, | |
| "learning_rate": 9.502607823452492e-06, | |
| "loss": 0.6909, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.23121387283236994, | |
| "grad_norm": 2.6744723555279712, | |
| "learning_rate": 9.486818638368037e-06, | |
| "loss": 0.6727, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2332782824112304, | |
| "grad_norm": 2.740519766192663, | |
| "learning_rate": 9.47079628905222e-06, | |
| "loss": 0.6643, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.23534269199009084, | |
| "grad_norm": 2.6127044855477544, | |
| "learning_rate": 9.45454160813035e-06, | |
| "loss": 0.641, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.23740710156895128, | |
| "grad_norm": 2.5898360039465547, | |
| "learning_rate": 9.438055440301198e-06, | |
| "loss": 0.654, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.23947151114781173, | |
| "grad_norm": 2.594315428105211, | |
| "learning_rate": 9.421338642293096e-06, | |
| "loss": 0.6612, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.24153592072667218, | |
| "grad_norm": 2.675279309210325, | |
| "learning_rate": 9.404392082819418e-06, | |
| "loss": 0.6835, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.24360033030553263, | |
| "grad_norm": 2.720481221371884, | |
| "learning_rate": 9.387216642533436e-06, | |
| "loss": 0.638, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24566473988439305, | |
| "grad_norm": 2.7274539164197567, | |
| "learning_rate": 9.369813213982557e-06, | |
| "loss": 0.6742, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.2477291494632535, | |
| "grad_norm": 2.881159963630694, | |
| "learning_rate": 9.352182701561932e-06, | |
| "loss": 0.643, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.24979355904211395, | |
| "grad_norm": 2.672785271513205, | |
| "learning_rate": 9.334326021467473e-06, | |
| "loss": 0.6518, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.2518579686209744, | |
| "grad_norm": 2.6806455710314365, | |
| "learning_rate": 9.316244101648229e-06, | |
| "loss": 0.644, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2539223781998348, | |
| "grad_norm": 2.5531072290720296, | |
| "learning_rate": 9.297937881758165e-06, | |
| "loss": 0.6738, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.25598678777869527, | |
| "grad_norm": 2.8562519058356335, | |
| "learning_rate": 9.279408313107342e-06, | |
| "loss": 0.6497, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2580511973575557, | |
| "grad_norm": 2.9118129566241273, | |
| "learning_rate": 9.260656358612461e-06, | |
| "loss": 0.6544, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.26011560693641617, | |
| "grad_norm": 2.6582669873812117, | |
| "learning_rate": 9.241682992746847e-06, | |
| "loss": 0.6492, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2621800165152766, | |
| "grad_norm": 3.135088950824475, | |
| "learning_rate": 9.222489201489792e-06, | |
| "loss": 0.6324, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.26424442609413706, | |
| "grad_norm": 2.5674033607777282, | |
| "learning_rate": 9.203075982275323e-06, | |
| "loss": 0.6461, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2663088356729975, | |
| "grad_norm": 2.615329482313224, | |
| "learning_rate": 9.18344434394037e-06, | |
| "loss": 0.6322, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.26837324525185796, | |
| "grad_norm": 2.6662637432037144, | |
| "learning_rate": 9.163595306672337e-06, | |
| "loss": 0.6245, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2704376548307184, | |
| "grad_norm": 2.788831071229617, | |
| "learning_rate": 9.143529901956089e-06, | |
| "loss": 0.6395, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.27250206440957886, | |
| "grad_norm": 2.7354638041171344, | |
| "learning_rate": 9.12324917252035e-06, | |
| "loss": 0.6239, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2745664739884393, | |
| "grad_norm": 2.6074213157412682, | |
| "learning_rate": 9.102754172283513e-06, | |
| "loss": 0.6146, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.27663088356729976, | |
| "grad_norm": 2.6642140176865485, | |
| "learning_rate": 9.082045966298871e-06, | |
| "loss": 0.6207, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2786952931461602, | |
| "grad_norm": 2.7423993871208547, | |
| "learning_rate": 9.061125630699276e-06, | |
| "loss": 0.6244, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.28075970272502065, | |
| "grad_norm": 2.639661943202539, | |
| "learning_rate": 9.039994252641214e-06, | |
| "loss": 0.6212, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2828241123038811, | |
| "grad_norm": 2.7917609886441443, | |
| "learning_rate": 9.018652930248299e-06, | |
| "loss": 0.6279, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.28488852188274155, | |
| "grad_norm": 2.629871903545716, | |
| "learning_rate": 8.997102772554226e-06, | |
| "loss": 0.6229, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.286952931461602, | |
| "grad_norm": 2.625664182045412, | |
| "learning_rate": 8.975344899445119e-06, | |
| "loss": 0.6177, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.28901734104046245, | |
| "grad_norm": 2.80144553747866, | |
| "learning_rate": 8.953380441601352e-06, | |
| "loss": 0.6225, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2910817506193229, | |
| "grad_norm": 2.672640679384628, | |
| "learning_rate": 8.931210540438778e-06, | |
| "loss": 0.6169, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.2931461601981833, | |
| "grad_norm": 2.709692648262891, | |
| "learning_rate": 8.908836348049421e-06, | |
| "loss": 0.6198, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.29521056977704374, | |
| "grad_norm": 2.681528317767069, | |
| "learning_rate": 8.886259027141603e-06, | |
| "loss": 0.6149, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.2972749793559042, | |
| "grad_norm": 3.03293644975451, | |
| "learning_rate": 8.863479750979523e-06, | |
| "loss": 0.6048, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.29933938893476464, | |
| "grad_norm": 2.670868153191048, | |
| "learning_rate": 8.840499703322286e-06, | |
| "loss": 0.6244, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3014037985136251, | |
| "grad_norm": 2.7608395704880646, | |
| "learning_rate": 8.817320078362388e-06, | |
| "loss": 0.6035, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.30346820809248554, | |
| "grad_norm": 2.62588384982977, | |
| "learning_rate": 8.793942080663658e-06, | |
| "loss": 0.6172, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.305532617671346, | |
| "grad_norm": 2.5889068256927006, | |
| "learning_rate": 8.77036692509866e-06, | |
| "loss": 0.6128, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.30759702725020643, | |
| "grad_norm": 2.8092080143766216, | |
| "learning_rate": 8.746595836785561e-06, | |
| "loss": 0.6218, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3096614368290669, | |
| "grad_norm": 2.6419072436310205, | |
| "learning_rate": 8.722630051024467e-06, | |
| "loss": 0.5877, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.31172584640792733, | |
| "grad_norm": 2.4997349328580363, | |
| "learning_rate": 8.698470813233223e-06, | |
| "loss": 0.6091, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.3137902559867878, | |
| "grad_norm": 2.6466710278909074, | |
| "learning_rate": 8.674119378882707e-06, | |
| "loss": 0.6087, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.31585466556564823, | |
| "grad_norm": 2.9297154437714363, | |
| "learning_rate": 8.649577013431569e-06, | |
| "loss": 0.592, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.3179190751445087, | |
| "grad_norm": 2.549997409403058, | |
| "learning_rate": 8.624844992260483e-06, | |
| "loss": 0.6023, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3199834847233691, | |
| "grad_norm": 2.7080008761741827, | |
| "learning_rate": 8.599924600605865e-06, | |
| "loss": 0.6087, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3220478943022296, | |
| "grad_norm": 2.567143331934158, | |
| "learning_rate": 8.574817133493085e-06, | |
| "loss": 0.5847, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.32411230388109, | |
| "grad_norm": 3.0113192932757418, | |
| "learning_rate": 8.549523895669167e-06, | |
| "loss": 0.5908, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3261767134599505, | |
| "grad_norm": 2.7819521209578206, | |
| "learning_rate": 8.52404620153499e-06, | |
| "loss": 0.5953, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.3282411230388109, | |
| "grad_norm": 2.747216748021648, | |
| "learning_rate": 8.498385375076979e-06, | |
| "loss": 0.6114, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.33030553261767137, | |
| "grad_norm": 2.8476886670194523, | |
| "learning_rate": 8.472542749798302e-06, | |
| "loss": 0.5923, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.33236994219653176, | |
| "grad_norm": 3.078148326221217, | |
| "learning_rate": 8.44651966864958e-06, | |
| "loss": 0.5854, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3344343517753922, | |
| "grad_norm": 2.7069355571781553, | |
| "learning_rate": 8.420317483959086e-06, | |
| "loss": 0.5963, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.33649876135425266, | |
| "grad_norm": 2.651998023029158, | |
| "learning_rate": 8.39393755736248e-06, | |
| "loss": 0.578, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.3385631709331131, | |
| "grad_norm": 2.6251615353567117, | |
| "learning_rate": 8.367381259732042e-06, | |
| "loss": 0.587, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.34062758051197356, | |
| "grad_norm": 2.5308341360997595, | |
| "learning_rate": 8.340649971105444e-06, | |
| "loss": 0.5732, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.342691990090834, | |
| "grad_norm": 2.823077213728183, | |
| "learning_rate": 8.313745080614017e-06, | |
| "loss": 0.5672, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34475639966969446, | |
| "grad_norm": 2.659469137057744, | |
| "learning_rate": 8.286667986410578e-06, | |
| "loss": 0.5858, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3468208092485549, | |
| "grad_norm": 2.845018948853141, | |
| "learning_rate": 8.259420095596766e-06, | |
| "loss": 0.5662, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.34888521882741536, | |
| "grad_norm": 2.6028478895400116, | |
| "learning_rate": 8.232002824149917e-06, | |
| "loss": 0.5606, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.3509496284062758, | |
| "grad_norm": 2.664709732227379, | |
| "learning_rate": 8.204417596849492e-06, | |
| "loss": 0.5807, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.35301403798513625, | |
| "grad_norm": 2.6109659834823478, | |
| "learning_rate": 8.176665847203023e-06, | |
| "loss": 0.5678, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.3550784475639967, | |
| "grad_norm": 2.576771155175357, | |
| "learning_rate": 8.148749017371627e-06, | |
| "loss": 0.5656, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 2.6688801593883817, | |
| "learning_rate": 8.120668558095056e-06, | |
| "loss": 0.5749, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.3592072667217176, | |
| "grad_norm": 2.801881409304358, | |
| "learning_rate": 8.092425928616317e-06, | |
| "loss": 0.5603, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.36127167630057805, | |
| "grad_norm": 2.7158675577145512, | |
| "learning_rate": 8.064022596605825e-06, | |
| "loss": 0.5878, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3633360858794385, | |
| "grad_norm": 2.7112475864205208, | |
| "learning_rate": 8.03546003808515e-06, | |
| "loss": 0.5697, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.36540049545829895, | |
| "grad_norm": 2.757081302105068, | |
| "learning_rate": 8.006739737350303e-06, | |
| "loss": 0.5549, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.3674649050371594, | |
| "grad_norm": 2.6798695533176, | |
| "learning_rate": 7.977863186894598e-06, | |
| "loss": 0.5621, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.36952931461601984, | |
| "grad_norm": 2.901794116790705, | |
| "learning_rate": 7.948831887331113e-06, | |
| "loss": 0.5531, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.37159372419488024, | |
| "grad_norm": 2.6209628611019333, | |
| "learning_rate": 7.91964734731468e-06, | |
| "loss": 0.5529, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3736581337737407, | |
| "grad_norm": 2.631776999408232, | |
| "learning_rate": 7.890311083463511e-06, | |
| "loss": 0.5341, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.37572254335260113, | |
| "grad_norm": 2.6460839052940424, | |
| "learning_rate": 7.860824620280367e-06, | |
| "loss": 0.5589, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3777869529314616, | |
| "grad_norm": 2.7027858769480604, | |
| "learning_rate": 7.831189490073343e-06, | |
| "loss": 0.5662, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.37985136251032203, | |
| "grad_norm": 2.686204866691535, | |
| "learning_rate": 7.80140723287624e-06, | |
| "loss": 0.5557, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3819157720891825, | |
| "grad_norm": 2.537669294604416, | |
| "learning_rate": 7.771479396368533e-06, | |
| "loss": 0.5392, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.38398018166804293, | |
| "grad_norm": 2.634245727933428, | |
| "learning_rate": 7.741407535794939e-06, | |
| "loss": 0.5476, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3860445912469034, | |
| "grad_norm": 2.63998365984595, | |
| "learning_rate": 7.711193213884602e-06, | |
| "loss": 0.5411, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.38810900082576383, | |
| "grad_norm": 2.5066641406139163, | |
| "learning_rate": 7.680838000769889e-06, | |
| "loss": 0.5436, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3901734104046243, | |
| "grad_norm": 2.4698510563626384, | |
| "learning_rate": 7.650343473904776e-06, | |
| "loss": 0.536, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.3922378199834847, | |
| "grad_norm": 2.5102808206533753, | |
| "learning_rate": 7.619711217982899e-06, | |
| "loss": 0.5391, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3943022295623452, | |
| "grad_norm": 2.648139467398325, | |
| "learning_rate": 7.5889428248551854e-06, | |
| "loss": 0.5368, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.3963666391412056, | |
| "grad_norm": 2.6648738415553765, | |
| "learning_rate": 7.558039893447131e-06, | |
| "loss": 0.5496, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.39843104872006607, | |
| "grad_norm": 2.6157509498551526, | |
| "learning_rate": 7.527004029675722e-06, | |
| "loss": 0.5318, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.4004954582989265, | |
| "grad_norm": 2.5975829607560574, | |
| "learning_rate": 7.495836846365965e-06, | |
| "loss": 0.5352, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.40255986787778697, | |
| "grad_norm": 2.92450937390704, | |
| "learning_rate": 7.464539963167091e-06, | |
| "loss": 0.5177, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4046242774566474, | |
| "grad_norm": 2.7355952122295264, | |
| "learning_rate": 7.433115006468373e-06, | |
| "loss": 0.5322, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.40668868703550787, | |
| "grad_norm": 2.5829736114469184, | |
| "learning_rate": 7.40156360931462e-06, | |
| "loss": 0.5298, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4087530966143683, | |
| "grad_norm": 2.705958846647064, | |
| "learning_rate": 7.369887411321305e-06, | |
| "loss": 0.5477, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4108175061932287, | |
| "grad_norm": 2.4846959730474145, | |
| "learning_rate": 7.338088058589362e-06, | |
| "loss": 0.5324, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.41288191577208916, | |
| "grad_norm": 2.5252160694956762, | |
| "learning_rate": 7.306167203619647e-06, | |
| "loss": 0.5335, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4149463253509496, | |
| "grad_norm": 2.509240890701763, | |
| "learning_rate": 7.27412650522706e-06, | |
| "loss": 0.5219, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.41701073492981006, | |
| "grad_norm": 2.567378202198639, | |
| "learning_rate": 7.241967628454345e-06, | |
| "loss": 0.5142, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.4190751445086705, | |
| "grad_norm": 2.479116521394592, | |
| "learning_rate": 7.20969224448556e-06, | |
| "loss": 0.5198, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.42113955408753095, | |
| "grad_norm": 2.456783739959803, | |
| "learning_rate": 7.1773020305592355e-06, | |
| "loss": 0.5141, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4232039636663914, | |
| "grad_norm": 2.6855797799019356, | |
| "learning_rate": 7.1447986698812115e-06, | |
| "loss": 0.5307, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.42526837324525185, | |
| "grad_norm": 2.5323199215752203, | |
| "learning_rate": 7.112183851537166e-06, | |
| "loss": 0.5256, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.4273327828241123, | |
| "grad_norm": 2.735418738176186, | |
| "learning_rate": 7.079459270404842e-06, | |
| "loss": 0.5112, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.42939719240297275, | |
| "grad_norm": 2.6703449875287735, | |
| "learning_rate": 7.046626627065974e-06, | |
| "loss": 0.5099, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4314616019818332, | |
| "grad_norm": 2.4631394786044747, | |
| "learning_rate": 7.013687627717905e-06, | |
| "loss": 0.4978, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.43352601156069365, | |
| "grad_norm": 2.8328555379260196, | |
| "learning_rate": 6.980643984084927e-06, | |
| "loss": 0.5179, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4355904211395541, | |
| "grad_norm": 2.504430665948721, | |
| "learning_rate": 6.947497413329333e-06, | |
| "loss": 0.5199, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.43765483071841454, | |
| "grad_norm": 2.5711442943268663, | |
| "learning_rate": 6.914249637962174e-06, | |
| "loss": 0.5133, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.439719240297275, | |
| "grad_norm": 2.5576807334846747, | |
| "learning_rate": 6.880902385753749e-06, | |
| "loss": 0.51, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.44178364987613544, | |
| "grad_norm": 2.4378114750971833, | |
| "learning_rate": 6.847457389643823e-06, | |
| "loss": 0.4966, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4438480594549959, | |
| "grad_norm": 2.57922332477375, | |
| "learning_rate": 6.8139163876515694e-06, | |
| "loss": 0.5018, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.44591246903385634, | |
| "grad_norm": 2.4923563722799145, | |
| "learning_rate": 6.780281122785243e-06, | |
| "loss": 0.5089, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4479768786127168, | |
| "grad_norm": 2.380488675820629, | |
| "learning_rate": 6.74655334295162e-06, | |
| "loss": 0.4969, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.4500412881915772, | |
| "grad_norm": 2.622744707083321, | |
| "learning_rate": 6.712734800865146e-06, | |
| "loss": 0.5006, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45210569777043763, | |
| "grad_norm": 2.5279298037448514, | |
| "learning_rate": 6.67882725395687e-06, | |
| "loss": 0.4919, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.4541701073492981, | |
| "grad_norm": 2.5696353167432076, | |
| "learning_rate": 6.644832464283105e-06, | |
| "loss": 0.5006, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.45623451692815853, | |
| "grad_norm": 2.6472120693562675, | |
| "learning_rate": 6.6107521984338654e-06, | |
| "loss": 0.5222, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.458298926507019, | |
| "grad_norm": 2.5641259833492915, | |
| "learning_rate": 6.576588227441063e-06, | |
| "loss": 0.5021, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4603633360858794, | |
| "grad_norm": 2.508674684822535, | |
| "learning_rate": 6.5423423266864715e-06, | |
| "loss": 0.5019, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.4624277456647399, | |
| "grad_norm": 2.5803067400324546, | |
| "learning_rate": 6.508016275809472e-06, | |
| "loss": 0.5116, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.4644921552436003, | |
| "grad_norm": 2.4783246238450296, | |
| "learning_rate": 6.473611858614557e-06, | |
| "loss": 0.4939, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.4665565648224608, | |
| "grad_norm": 2.3961470840043986, | |
| "learning_rate": 6.439130862978653e-06, | |
| "loss": 0.4964, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.4686209744013212, | |
| "grad_norm": 2.523268688947089, | |
| "learning_rate": 6.4045750807581875e-06, | |
| "loss": 0.4878, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.47068538398018167, | |
| "grad_norm": 2.464908065215536, | |
| "learning_rate": 6.369946307695994e-06, | |
| "loss": 0.5004, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.4727497935590421, | |
| "grad_norm": 2.4978854529114556, | |
| "learning_rate": 6.335246343327983e-06, | |
| "loss": 0.4928, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.47481420313790257, | |
| "grad_norm": 2.5910220265362405, | |
| "learning_rate": 6.300476990889622e-06, | |
| "loss": 0.4859, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.476878612716763, | |
| "grad_norm": 2.6474515998209314, | |
| "learning_rate": 6.2656400572222374e-06, | |
| "loss": 0.4968, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.47894302229562347, | |
| "grad_norm": 2.5111508875593627, | |
| "learning_rate": 6.230737352679115e-06, | |
| "loss": 0.4825, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4810074318744839, | |
| "grad_norm": 2.6001140755401067, | |
| "learning_rate": 6.1957706910314195e-06, | |
| "loss": 0.4903, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.48307184145334436, | |
| "grad_norm": 2.4405093084108853, | |
| "learning_rate": 6.160741889373948e-06, | |
| "loss": 0.4738, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.4851362510322048, | |
| "grad_norm": 2.6122999506914817, | |
| "learning_rate": 6.12565276803069e-06, | |
| "loss": 0.4851, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.48720066061106526, | |
| "grad_norm": 2.5491172182391284, | |
| "learning_rate": 6.090505150460239e-06, | |
| "loss": 0.4969, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.48926507018992565, | |
| "grad_norm": 2.5615374385855256, | |
| "learning_rate": 6.055300863161035e-06, | |
| "loss": 0.4778, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.4913294797687861, | |
| "grad_norm": 2.4469903993231514, | |
| "learning_rate": 6.020041735576444e-06, | |
| "loss": 0.4787, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.49339388934764655, | |
| "grad_norm": 2.4860974782023346, | |
| "learning_rate": 5.9847295999996835e-06, | |
| "loss": 0.4914, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.495458298926507, | |
| "grad_norm": 2.4900103589842395, | |
| "learning_rate": 5.949366291478621e-06, | |
| "loss": 0.4906, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.49752270850536745, | |
| "grad_norm": 2.5283147963068964, | |
| "learning_rate": 5.9139536477203945e-06, | |
| "loss": 0.4759, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.4995871180842279, | |
| "grad_norm": 2.573137486004109, | |
| "learning_rate": 5.878493508995924e-06, | |
| "loss": 0.4768, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5016515276630884, | |
| "grad_norm": 2.579476813837634, | |
| "learning_rate": 5.842987718044275e-06, | |
| "loss": 0.4679, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5037159372419489, | |
| "grad_norm": 2.497492462831134, | |
| "learning_rate": 5.807438119976904e-06, | |
| "loss": 0.4698, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5057803468208093, | |
| "grad_norm": 2.590655156263669, | |
| "learning_rate": 5.771846562181761e-06, | |
| "loss": 0.4728, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5078447563996696, | |
| "grad_norm": 2.5177297962486493, | |
| "learning_rate": 5.736214894227304e-06, | |
| "loss": 0.473, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5099091659785301, | |
| "grad_norm": 2.6021365054723775, | |
| "learning_rate": 5.700544967766373e-06, | |
| "loss": 0.4753, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5119735755573905, | |
| "grad_norm": 2.604951347166713, | |
| "learning_rate": 5.6648386364399645e-06, | |
| "loss": 0.47, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.514037985136251, | |
| "grad_norm": 2.4157020679627794, | |
| "learning_rate": 5.629097755780913e-06, | |
| "loss": 0.4711, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5161023947151114, | |
| "grad_norm": 2.3659395254457167, | |
| "learning_rate": 5.59332418311746e-06, | |
| "loss": 0.4773, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5181668042939719, | |
| "grad_norm": 2.499361991412955, | |
| "learning_rate": 5.557519777476733e-06, | |
| "loss": 0.4527, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5202312138728323, | |
| "grad_norm": 2.5585105365426513, | |
| "learning_rate": 5.521686399488145e-06, | |
| "loss": 0.4573, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5222956234516928, | |
| "grad_norm": 2.4814508055210567, | |
| "learning_rate": 5.485825911286697e-06, | |
| "loss": 0.4691, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5243600330305532, | |
| "grad_norm": 2.6081291748684716, | |
| "learning_rate": 5.449940176416213e-06, | |
| "loss": 0.4526, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5264244426094137, | |
| "grad_norm": 2.587126119981003, | |
| "learning_rate": 5.414031059732502e-06, | |
| "loss": 0.4605, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5284888521882741, | |
| "grad_norm": 2.5458768564010765, | |
| "learning_rate": 5.378100427306439e-06, | |
| "loss": 0.4467, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5305532617671346, | |
| "grad_norm": 2.422344978356131, | |
| "learning_rate": 5.3421501463269976e-06, | |
| "loss": 0.4563, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.532617671345995, | |
| "grad_norm": 2.481246363328885, | |
| "learning_rate": 5.30618208500422e-06, | |
| "loss": 0.4496, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5346820809248555, | |
| "grad_norm": 2.5964265890691274, | |
| "learning_rate": 5.270198112472128e-06, | |
| "loss": 0.4487, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5367464905037159, | |
| "grad_norm": 2.375375823310664, | |
| "learning_rate": 5.234200098691597e-06, | |
| "loss": 0.4492, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5388109000825764, | |
| "grad_norm": 2.482312531712164, | |
| "learning_rate": 5.19818991435317e-06, | |
| "loss": 0.4524, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5408753096614368, | |
| "grad_norm": 2.6268917122173154, | |
| "learning_rate": 5.162169430779861e-06, | |
| "loss": 0.4343, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5429397192402973, | |
| "grad_norm": 2.543083790336233, | |
| "learning_rate": 5.126140519829888e-06, | |
| "loss": 0.4738, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5450041288191577, | |
| "grad_norm": 2.4547121862030865, | |
| "learning_rate": 5.090105053799418e-06, | |
| "loss": 0.4604, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5470685383980182, | |
| "grad_norm": 2.5806078709462295, | |
| "learning_rate": 5.054064905325262e-06, | |
| "loss": 0.4571, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5491329479768786, | |
| "grad_norm": 2.538503365518751, | |
| "learning_rate": 5.018021947287556e-06, | |
| "loss": 0.4432, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5511973575557391, | |
| "grad_norm": 2.4205037183193157, | |
| "learning_rate": 4.981978052712446e-06, | |
| "loss": 0.4545, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5532617671345995, | |
| "grad_norm": 2.5910957054221817, | |
| "learning_rate": 4.9459350946747405e-06, | |
| "loss": 0.4584, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.55532617671346, | |
| "grad_norm": 2.423593643872453, | |
| "learning_rate": 4.909894946200583e-06, | |
| "loss": 0.454, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.5573905862923204, | |
| "grad_norm": 2.491439793223575, | |
| "learning_rate": 4.873859480170113e-06, | |
| "loss": 0.4535, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5594549958711809, | |
| "grad_norm": 2.6251023268801106, | |
| "learning_rate": 4.83783056922014e-06, | |
| "loss": 0.4475, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5615194054500413, | |
| "grad_norm": 2.379012672081149, | |
| "learning_rate": 4.801810085646831e-06, | |
| "loss": 0.4427, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5635838150289018, | |
| "grad_norm": 2.438307211574731, | |
| "learning_rate": 4.765799901308405e-06, | |
| "loss": 0.4392, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5656482246077622, | |
| "grad_norm": 2.4743966890129534, | |
| "learning_rate": 4.729801887527872e-06, | |
| "loss": 0.4376, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5677126341866227, | |
| "grad_norm": 2.4438402043185636, | |
| "learning_rate": 4.6938179149957815e-06, | |
| "loss": 0.4448, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5697770437654831, | |
| "grad_norm": 2.3233363056641587, | |
| "learning_rate": 4.657849853673004e-06, | |
| "loss": 0.4332, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5718414533443436, | |
| "grad_norm": 2.3513159156694536, | |
| "learning_rate": 4.621899572693564e-06, | |
| "loss": 0.4286, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.573905862923204, | |
| "grad_norm": 2.452790907750448, | |
| "learning_rate": 4.585968940267499e-06, | |
| "loss": 0.4329, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5759702725020645, | |
| "grad_norm": 2.4876165011978544, | |
| "learning_rate": 4.550059823583788e-06, | |
| "loss": 0.4368, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5780346820809249, | |
| "grad_norm": 2.4110190508295926, | |
| "learning_rate": 4.514174088713305e-06, | |
| "loss": 0.4294, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5800990916597853, | |
| "grad_norm": 2.4790636104654724, | |
| "learning_rate": 4.478313600511856e-06, | |
| "loss": 0.438, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5821635012386458, | |
| "grad_norm": 2.5304633815274062, | |
| "learning_rate": 4.442480222523269e-06, | |
| "loss": 0.4383, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5842279108175062, | |
| "grad_norm": 2.365309103378621, | |
| "learning_rate": 4.4066758168825414e-06, | |
| "loss": 0.4383, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5862923203963666, | |
| "grad_norm": 2.4670833952729687, | |
| "learning_rate": 4.370902244219088e-06, | |
| "loss": 0.4389, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.588356729975227, | |
| "grad_norm": 2.3390938676909734, | |
| "learning_rate": 4.335161363560037e-06, | |
| "loss": 0.4291, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5904211395540875, | |
| "grad_norm": 2.362644766829775, | |
| "learning_rate": 4.299455032233631e-06, | |
| "loss": 0.4288, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5924855491329479, | |
| "grad_norm": 2.445730007800526, | |
| "learning_rate": 4.263785105772696e-06, | |
| "loss": 0.4335, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.5945499587118084, | |
| "grad_norm": 2.303973634970439, | |
| "learning_rate": 4.22815343781824e-06, | |
| "loss": 0.4366, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5966143682906688, | |
| "grad_norm": 2.391887494831502, | |
| "learning_rate": 4.192561880023099e-06, | |
| "loss": 0.4261, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5986787778695293, | |
| "grad_norm": 2.293302291662098, | |
| "learning_rate": 4.157012281955726e-06, | |
| "loss": 0.424, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6007431874483897, | |
| "grad_norm": 2.3712447663578655, | |
| "learning_rate": 4.1215064910040795e-06, | |
| "loss": 0.4314, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6028075970272502, | |
| "grad_norm": 2.381812200033987, | |
| "learning_rate": 4.086046352279606e-06, | |
| "loss": 0.4227, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6048720066061106, | |
| "grad_norm": 2.4743735981326114, | |
| "learning_rate": 4.050633708521381e-06, | |
| "loss": 0.4158, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6069364161849711, | |
| "grad_norm": 2.396322395044398, | |
| "learning_rate": 4.015270400000317e-06, | |
| "loss": 0.42, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6090008257638315, | |
| "grad_norm": 2.565114051952279, | |
| "learning_rate": 3.979958264423558e-06, | |
| "loss": 0.4155, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.611065235342692, | |
| "grad_norm": 2.4934554445972954, | |
| "learning_rate": 3.944699136838965e-06, | |
| "loss": 0.4126, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6131296449215524, | |
| "grad_norm": 2.4162398419293187, | |
| "learning_rate": 3.909494849539761e-06, | |
| "loss": 0.4213, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6151940545004129, | |
| "grad_norm": 2.464180248251729, | |
| "learning_rate": 3.874347231969312e-06, | |
| "loss": 0.4235, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6172584640792733, | |
| "grad_norm": 2.4941582294474434, | |
| "learning_rate": 3.839258110626053e-06, | |
| "loss": 0.4307, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6193228736581338, | |
| "grad_norm": 2.276754213405921, | |
| "learning_rate": 3.8042293089685813e-06, | |
| "loss": 0.4065, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6213872832369942, | |
| "grad_norm": 2.324659425092549, | |
| "learning_rate": 3.769262647320886e-06, | |
| "loss": 0.4179, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6234516928158547, | |
| "grad_norm": 2.3013999943076815, | |
| "learning_rate": 3.7343599427777634e-06, | |
| "loss": 0.4157, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6255161023947151, | |
| "grad_norm": 2.3853366654087633, | |
| "learning_rate": 3.69952300911038e-06, | |
| "loss": 0.4087, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6275805119735756, | |
| "grad_norm": 2.3274220274703206, | |
| "learning_rate": 3.6647536566720186e-06, | |
| "loss": 0.4203, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.629644921552436, | |
| "grad_norm": 2.4392433402643947, | |
| "learning_rate": 3.630053692304005e-06, | |
| "loss": 0.4091, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6317093311312965, | |
| "grad_norm": 2.4428540121878894, | |
| "learning_rate": 3.5954249192418133e-06, | |
| "loss": 0.4255, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6337737407101569, | |
| "grad_norm": 2.3810770043048692, | |
| "learning_rate": 3.560869137021349e-06, | |
| "loss": 0.4103, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6358381502890174, | |
| "grad_norm": 2.301863999851205, | |
| "learning_rate": 3.526388141385444e-06, | |
| "loss": 0.4155, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6379025598678778, | |
| "grad_norm": 2.400363519907276, | |
| "learning_rate": 3.4919837241905308e-06, | |
| "loss": 0.4224, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6399669694467383, | |
| "grad_norm": 2.477498090820749, | |
| "learning_rate": 3.4576576733135284e-06, | |
| "loss": 0.3938, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6420313790255987, | |
| "grad_norm": 2.707387721012179, | |
| "learning_rate": 3.4234117725589385e-06, | |
| "loss": 0.4111, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6440957886044592, | |
| "grad_norm": 2.341459106750255, | |
| "learning_rate": 3.3892478015661362e-06, | |
| "loss": 0.4139, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6461601981833196, | |
| "grad_norm": 2.4202740830209137, | |
| "learning_rate": 3.355167535716897e-06, | |
| "loss": 0.4127, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.64822460776218, | |
| "grad_norm": 2.3816575196973577, | |
| "learning_rate": 3.321172746043131e-06, | |
| "loss": 0.4002, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6502890173410405, | |
| "grad_norm": 2.454435864594131, | |
| "learning_rate": 3.2872651991348548e-06, | |
| "loss": 0.403, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.652353426919901, | |
| "grad_norm": 2.591163202406022, | |
| "learning_rate": 3.253446657048382e-06, | |
| "loss": 0.4072, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6544178364987614, | |
| "grad_norm": 2.4282921305894676, | |
| "learning_rate": 3.2197188772147593e-06, | |
| "loss": 0.3869, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.6564822460776218, | |
| "grad_norm": 2.4682745998960827, | |
| "learning_rate": 3.186083612348434e-06, | |
| "loss": 0.4028, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6585466556564823, | |
| "grad_norm": 2.376999029803135, | |
| "learning_rate": 3.1525426103561774e-06, | |
| "loss": 0.4064, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6606110652353427, | |
| "grad_norm": 2.370838413822066, | |
| "learning_rate": 3.119097614246252e-06, | |
| "loss": 0.3978, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6626754748142032, | |
| "grad_norm": 2.362260369144417, | |
| "learning_rate": 3.0857503620378284e-06, | |
| "loss": 0.3905, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6647398843930635, | |
| "grad_norm": 2.421231306606393, | |
| "learning_rate": 3.052502586670669e-06, | |
| "loss": 0.4053, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.666804293971924, | |
| "grad_norm": 2.395970410200359, | |
| "learning_rate": 3.0193560159150747e-06, | |
| "loss": 0.3943, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6688687035507844, | |
| "grad_norm": 2.5057121725436917, | |
| "learning_rate": 2.986312372282097e-06, | |
| "loss": 0.4182, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6709331131296449, | |
| "grad_norm": 2.3776161687385207, | |
| "learning_rate": 2.9533733729340274e-06, | |
| "loss": 0.4077, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6729975227085053, | |
| "grad_norm": 2.5915693629163217, | |
| "learning_rate": 2.920540729595159e-06, | |
| "loss": 0.4085, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6750619322873658, | |
| "grad_norm": 2.443160283840937, | |
| "learning_rate": 2.8878161484628364e-06, | |
| "loss": 0.4028, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6771263418662262, | |
| "grad_norm": 2.420398270818125, | |
| "learning_rate": 2.8552013301187898e-06, | |
| "loss": 0.3972, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6791907514450867, | |
| "grad_norm": 2.4196367974051265, | |
| "learning_rate": 2.8226979694407657e-06, | |
| "loss": 0.4067, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.6812551610239471, | |
| "grad_norm": 2.438274119472032, | |
| "learning_rate": 2.790307755514442e-06, | |
| "loss": 0.4032, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6833195706028076, | |
| "grad_norm": 3.0023110501423944, | |
| "learning_rate": 2.7580323715456564e-06, | |
| "loss": 0.4121, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.685383980181668, | |
| "grad_norm": 2.3597852522402474, | |
| "learning_rate": 2.7258734947729428e-06, | |
| "loss": 0.3938, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6874483897605285, | |
| "grad_norm": 2.4413156249499854, | |
| "learning_rate": 2.6938327963803545e-06, | |
| "loss": 0.3925, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6895127993393889, | |
| "grad_norm": 2.441149846551036, | |
| "learning_rate": 2.6619119414106385e-06, | |
| "loss": 0.3902, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6915772089182494, | |
| "grad_norm": 2.4274483132890685, | |
| "learning_rate": 2.6301125886786965e-06, | |
| "loss": 0.3933, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.6936416184971098, | |
| "grad_norm": 2.514301355730002, | |
| "learning_rate": 2.5984363906853814e-06, | |
| "loss": 0.3847, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6957060280759703, | |
| "grad_norm": 2.23025942549343, | |
| "learning_rate": 2.566884993531628e-06, | |
| "loss": 0.3868, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.6977704376548307, | |
| "grad_norm": 2.3910909958649764, | |
| "learning_rate": 2.5354600368329108e-06, | |
| "loss": 0.379, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6998348472336912, | |
| "grad_norm": 2.5413951969823554, | |
| "learning_rate": 2.5041631536340374e-06, | |
| "loss": 0.3909, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7018992568125516, | |
| "grad_norm": 2.360003867942285, | |
| "learning_rate": 2.472995970324282e-06, | |
| "loss": 0.3888, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7039636663914121, | |
| "grad_norm": 2.4621248065886516, | |
| "learning_rate": 2.4419601065528703e-06, | |
| "loss": 0.3803, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7060280759702725, | |
| "grad_norm": 2.4221633580536865, | |
| "learning_rate": 2.4110571751448154e-06, | |
| "loss": 0.3804, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.708092485549133, | |
| "grad_norm": 2.3044799343595685, | |
| "learning_rate": 2.3802887820171012e-06, | |
| "loss": 0.3709, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7101568951279934, | |
| "grad_norm": 2.3101555840305585, | |
| "learning_rate": 2.349656526095224e-06, | |
| "loss": 0.382, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7122213047068539, | |
| "grad_norm": 2.439524803446898, | |
| "learning_rate": 2.3191619992301144e-06, | |
| "loss": 0.3969, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 2.3771584605894174, | |
| "learning_rate": 2.2888067861153983e-06, | |
| "loss": 0.3792, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7163501238645748, | |
| "grad_norm": 2.5666012183468587, | |
| "learning_rate": 2.2585924642050638e-06, | |
| "loss": 0.3802, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7184145334434352, | |
| "grad_norm": 2.3145141740784307, | |
| "learning_rate": 2.2285206036314684e-06, | |
| "loss": 0.3835, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7204789430222956, | |
| "grad_norm": 2.520653602273766, | |
| "learning_rate": 2.1985927671237605e-06, | |
| "loss": 0.3766, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.7225433526011561, | |
| "grad_norm": 2.470460484865602, | |
| "learning_rate": 2.168810509926659e-06, | |
| "loss": 0.3897, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7246077621800165, | |
| "grad_norm": 2.408249418201136, | |
| "learning_rate": 2.1391753797196328e-06, | |
| "loss": 0.3952, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.726672171758877, | |
| "grad_norm": 2.4402970653227247, | |
| "learning_rate": 2.1096889165364894e-06, | |
| "loss": 0.375, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7287365813377374, | |
| "grad_norm": 2.4710285995197188, | |
| "learning_rate": 2.0803526526853206e-06, | |
| "loss": 0.3748, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7308009909165979, | |
| "grad_norm": 2.2841746369286633, | |
| "learning_rate": 2.0511681126688883e-06, | |
| "loss": 0.3799, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7328654004954583, | |
| "grad_norm": 2.46102171882109, | |
| "learning_rate": 2.022136813105401e-06, | |
| "loss": 0.3729, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7349298100743188, | |
| "grad_norm": 2.2829340264643156, | |
| "learning_rate": 1.9932602626496994e-06, | |
| "loss": 0.3722, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7369942196531792, | |
| "grad_norm": 2.4439441720908, | |
| "learning_rate": 1.9645399619148515e-06, | |
| "loss": 0.3774, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7390586292320397, | |
| "grad_norm": 2.363486720634483, | |
| "learning_rate": 1.9359774033941757e-06, | |
| "loss": 0.3775, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7411230388109001, | |
| "grad_norm": 2.2893262909558065, | |
| "learning_rate": 1.9075740713836854e-06, | |
| "loss": 0.3851, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7431874483897605, | |
| "grad_norm": 2.381579584567696, | |
| "learning_rate": 1.8793314419049446e-06, | |
| "loss": 0.3736, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7452518579686209, | |
| "grad_norm": 2.3588892144435714, | |
| "learning_rate": 1.8512509826283752e-06, | |
| "loss": 0.3647, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7473162675474814, | |
| "grad_norm": 2.468444814623241, | |
| "learning_rate": 1.8233341527969777e-06, | |
| "loss": 0.3731, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7493806771263418, | |
| "grad_norm": 2.4216200751888217, | |
| "learning_rate": 1.7955824031505097e-06, | |
| "loss": 0.3741, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7514450867052023, | |
| "grad_norm": 2.3925423466955884, | |
| "learning_rate": 1.7679971758500836e-06, | |
| "loss": 0.3652, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7535094962840627, | |
| "grad_norm": 2.386508421187067, | |
| "learning_rate": 1.7405799044032356e-06, | |
| "loss": 0.3685, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.7555739058629232, | |
| "grad_norm": 2.439989821363405, | |
| "learning_rate": 1.7133320135894233e-06, | |
| "loss": 0.3653, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7576383154417836, | |
| "grad_norm": 2.3039350711469186, | |
| "learning_rate": 1.686254919385985e-06, | |
| "loss": 0.3783, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.7597027250206441, | |
| "grad_norm": 2.216200410510777, | |
| "learning_rate": 1.6593500288945574e-06, | |
| "loss": 0.3646, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7617671345995045, | |
| "grad_norm": 2.4989495842816405, | |
| "learning_rate": 1.6326187402679577e-06, | |
| "loss": 0.3741, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.763831544178365, | |
| "grad_norm": 2.4154932431289673, | |
| "learning_rate": 1.6060624426375222e-06, | |
| "loss": 0.3698, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7658959537572254, | |
| "grad_norm": 2.473649546182009, | |
| "learning_rate": 1.5796825160409168e-06, | |
| "loss": 0.3672, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7679603633360859, | |
| "grad_norm": 2.355038901564694, | |
| "learning_rate": 1.5534803313504215e-06, | |
| "loss": 0.3566, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7700247729149463, | |
| "grad_norm": 2.3588413583977625, | |
| "learning_rate": 1.5274572502016982e-06, | |
| "loss": 0.3711, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7720891824938068, | |
| "grad_norm": 2.401065534914905, | |
| "learning_rate": 1.5016146249230229e-06, | |
| "loss": 0.3736, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7741535920726672, | |
| "grad_norm": 2.342195370497388, | |
| "learning_rate": 1.4759537984650114e-06, | |
| "loss": 0.3626, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7762180016515277, | |
| "grad_norm": 2.3601036223952634, | |
| "learning_rate": 1.4504761043308346e-06, | |
| "loss": 0.3534, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7782824112303881, | |
| "grad_norm": 2.3058771526048405, | |
| "learning_rate": 1.425182866506918e-06, | |
| "loss": 0.3633, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7803468208092486, | |
| "grad_norm": 2.308152081660379, | |
| "learning_rate": 1.4000753993941369e-06, | |
| "loss": 0.3723, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.782411230388109, | |
| "grad_norm": 2.4604302877257664, | |
| "learning_rate": 1.3751550077395181e-06, | |
| "loss": 0.3523, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7844756399669695, | |
| "grad_norm": 2.551739458655361, | |
| "learning_rate": 1.3504229865684326e-06, | |
| "loss": 0.3706, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7865400495458299, | |
| "grad_norm": 2.4143979686568104, | |
| "learning_rate": 1.3258806211172954e-06, | |
| "loss": 0.3588, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7886044591246903, | |
| "grad_norm": 2.364426767604513, | |
| "learning_rate": 1.3015291867667757e-06, | |
| "loss": 0.3508, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7906688687035508, | |
| "grad_norm": 2.3233651206895147, | |
| "learning_rate": 1.2773699489755343e-06, | |
| "loss": 0.3571, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7927332782824112, | |
| "grad_norm": 2.3196733264987675, | |
| "learning_rate": 1.2534041632144407e-06, | |
| "loss": 0.3405, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7947976878612717, | |
| "grad_norm": 2.347467115991222, | |
| "learning_rate": 1.2296330749013408e-06, | |
| "loss": 0.3502, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.7968620974401321, | |
| "grad_norm": 2.3004029534314174, | |
| "learning_rate": 1.2060579193363431e-06, | |
| "loss": 0.3564, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7989265070189926, | |
| "grad_norm": 2.5374233309055083, | |
| "learning_rate": 1.1826799216376128e-06, | |
| "loss": 0.3778, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.800990916597853, | |
| "grad_norm": 2.4431483706896007, | |
| "learning_rate": 1.1595002966777152e-06, | |
| "loss": 0.356, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8030553261767135, | |
| "grad_norm": 2.35423434746266, | |
| "learning_rate": 1.136520249020478e-06, | |
| "loss": 0.3491, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8051197357555739, | |
| "grad_norm": 2.3767268596250672, | |
| "learning_rate": 1.113740972858398e-06, | |
| "loss": 0.348, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8071841453344344, | |
| "grad_norm": 2.4631890684934965, | |
| "learning_rate": 1.0911636519505791e-06, | |
| "loss": 0.3671, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8092485549132948, | |
| "grad_norm": 2.201423417502954, | |
| "learning_rate": 1.068789459561223e-06, | |
| "loss": 0.3459, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8113129644921553, | |
| "grad_norm": 2.36077824921438, | |
| "learning_rate": 1.0466195583986487e-06, | |
| "loss": 0.3639, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8133773740710157, | |
| "grad_norm": 2.354003763601073, | |
| "learning_rate": 1.024655100554882e-06, | |
| "loss": 0.3643, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.8154417836498762, | |
| "grad_norm": 2.4101822702917004, | |
| "learning_rate": 1.002897227445777e-06, | |
| "loss": 0.3617, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8175061932287366, | |
| "grad_norm": 2.4833584786174163, | |
| "learning_rate": 9.813470697517009e-07, | |
| "loss": 0.3501, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8195706028075971, | |
| "grad_norm": 2.3627937000377393, | |
| "learning_rate": 9.600057473587876e-07, | |
| "loss": 0.3671, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.8216350123864574, | |
| "grad_norm": 2.415763997145714, | |
| "learning_rate": 9.388743693007247e-07, | |
| "loss": 0.3541, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.8236994219653179, | |
| "grad_norm": 2.4671751077694406, | |
| "learning_rate": 9.1795403370113e-07, | |
| "loss": 0.3535, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8257638315441783, | |
| "grad_norm": 2.3775799288896287, | |
| "learning_rate": 8.972458277164886e-07, | |
| "loss": 0.3516, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8278282411230388, | |
| "grad_norm": 2.2938470233646604, | |
| "learning_rate": 8.76750827479651e-07, | |
| "loss": 0.3398, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8298926507018992, | |
| "grad_norm": 2.2933382256010497, | |
| "learning_rate": 8.564700980439122e-07, | |
| "loss": 0.3608, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8319570602807597, | |
| "grad_norm": 2.3382609787220785, | |
| "learning_rate": 8.364046933276642e-07, | |
| "loss": 0.3666, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8340214698596201, | |
| "grad_norm": 2.44152050789561, | |
| "learning_rate": 8.165556560596316e-07, | |
| "loss": 0.3485, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8360858794384806, | |
| "grad_norm": 2.3858823443159034, | |
| "learning_rate": 7.969240177246779e-07, | |
| "loss": 0.3547, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.838150289017341, | |
| "grad_norm": 2.453983334202833, | |
| "learning_rate": 7.775107985102087e-07, | |
| "loss": 0.3651, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8402146985962015, | |
| "grad_norm": 2.5029504947887875, | |
| "learning_rate": 7.583170072531543e-07, | |
| "loss": 0.3509, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8422791081750619, | |
| "grad_norm": 2.482629951124892, | |
| "learning_rate": 7.393436413875411e-07, | |
| "loss": 0.3476, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8443435177539224, | |
| "grad_norm": 2.283216056409345, | |
| "learning_rate": 7.205916868926604e-07, | |
| "loss": 0.3526, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8464079273327828, | |
| "grad_norm": 2.3590631737094845, | |
| "learning_rate": 7.020621182418347e-07, | |
| "loss": 0.34, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8484723369116433, | |
| "grad_norm": 2.6201296500024553, | |
| "learning_rate": 6.837558983517723e-07, | |
| "loss": 0.3535, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8505367464905037, | |
| "grad_norm": 2.4227114258373095, | |
| "learning_rate": 6.656739785325278e-07, | |
| "loss": 0.3507, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8526011560693642, | |
| "grad_norm": 2.390954329942295, | |
| "learning_rate": 6.478172984380687e-07, | |
| "loss": 0.3532, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.8546655656482246, | |
| "grad_norm": 2.556923085001048, | |
| "learning_rate": 6.301867860174443e-07, | |
| "loss": 0.3434, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.856729975227085, | |
| "grad_norm": 2.3169295549044335, | |
| "learning_rate": 6.127833574665642e-07, | |
| "loss": 0.3459, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.8587943848059455, | |
| "grad_norm": 2.3497867089138644, | |
| "learning_rate": 5.956079171805818e-07, | |
| "loss": 0.3415, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.860858794384806, | |
| "grad_norm": 2.1735667040509137, | |
| "learning_rate": 5.78661357706905e-07, | |
| "loss": 0.3379, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.8629232039636664, | |
| "grad_norm": 2.5779842919745355, | |
| "learning_rate": 5.619445596988026e-07, | |
| "loss": 0.3425, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8649876135425268, | |
| "grad_norm": 2.401992025146321, | |
| "learning_rate": 5.45458391869651e-07, | |
| "loss": 0.3489, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8670520231213873, | |
| "grad_norm": 2.3105243423015587, | |
| "learning_rate": 5.29203710947781e-07, | |
| "loss": 0.335, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8691164327002477, | |
| "grad_norm": 2.5027833045418943, | |
| "learning_rate": 5.131813616319641e-07, | |
| "loss": 0.35, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8711808422791082, | |
| "grad_norm": 2.492666189587263, | |
| "learning_rate": 4.973921765475093e-07, | |
| "loss": 0.3419, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8732452518579686, | |
| "grad_norm": 2.476326337718451, | |
| "learning_rate": 4.818369762030001e-07, | |
| "loss": 0.3437, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8753096614368291, | |
| "grad_norm": 2.4895934072647923, | |
| "learning_rate": 4.6651656894765307e-07, | |
| "loss": 0.345, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8773740710156895, | |
| "grad_norm": 2.432351776306209, | |
| "learning_rate": 4.5143175092931257e-07, | |
| "loss": 0.3457, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.87943848059455, | |
| "grad_norm": 2.5446256871769894, | |
| "learning_rate": 4.3658330605307496e-07, | |
| "loss": 0.3501, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8815028901734104, | |
| "grad_norm": 2.400237240106358, | |
| "learning_rate": 4.2197200594055135e-07, | |
| "loss": 0.3417, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8835672997522709, | |
| "grad_norm": 2.420863056343347, | |
| "learning_rate": 4.0759860988977715e-07, | |
| "loss": 0.3336, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8856317093311313, | |
| "grad_norm": 2.5855831775345317, | |
| "learning_rate": 3.934638648357414e-07, | |
| "loss": 0.3473, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.8876961189099918, | |
| "grad_norm": 2.4571094742538633, | |
| "learning_rate": 3.795685053115805e-07, | |
| "loss": 0.3425, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8897605284888522, | |
| "grad_norm": 2.318969751255526, | |
| "learning_rate": 3.659132534104054e-07, | |
| "loss": 0.339, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8918249380677127, | |
| "grad_norm": 2.464561015161846, | |
| "learning_rate": 3.5249881874777235e-07, | |
| "loss": 0.3415, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8938893476465731, | |
| "grad_norm": 2.5739185699856653, | |
| "learning_rate": 3.3932589842481434e-07, | |
| "loss": 0.3549, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.8959537572254336, | |
| "grad_norm": 2.3924860465104554, | |
| "learning_rate": 3.2639517699200765e-07, | |
| "loss": 0.3501, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.898018166804294, | |
| "grad_norm": 2.4561049545962983, | |
| "learning_rate": 3.1370732641360424e-07, | |
| "loss": 0.3382, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.9000825763831544, | |
| "grad_norm": 2.4531360622529985, | |
| "learning_rate": 3.0126300603270763e-07, | |
| "loss": 0.3386, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9021469859620148, | |
| "grad_norm": 2.3726671856862698, | |
| "learning_rate": 2.89062862537009e-07, | |
| "loss": 0.3382, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9042113955408753, | |
| "grad_norm": 2.4508531032953704, | |
| "learning_rate": 2.7710752992518906e-07, | |
| "loss": 0.3475, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9062758051197357, | |
| "grad_norm": 2.344783384097571, | |
| "learning_rate": 2.653976294739596e-07, | |
| "loss": 0.3435, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9083402146985962, | |
| "grad_norm": 2.3832943118751495, | |
| "learning_rate": 2.539337697057853e-07, | |
| "loss": 0.3386, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9104046242774566, | |
| "grad_norm": 2.2656029102160917, | |
| "learning_rate": 2.4271654635726003e-07, | |
| "loss": 0.326, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.9124690338563171, | |
| "grad_norm": 2.531439971555802, | |
| "learning_rate": 2.3174654234814854e-07, | |
| "loss": 0.3336, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9145334434351775, | |
| "grad_norm": 2.526736652879497, | |
| "learning_rate": 2.2102432775109151e-07, | |
| "loss": 0.3431, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.916597853014038, | |
| "grad_norm": 2.499234877151574, | |
| "learning_rate": 2.1055045976198286e-07, | |
| "loss": 0.3274, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9186622625928984, | |
| "grad_norm": 2.347953826058282, | |
| "learning_rate": 2.0032548267101547e-07, | |
| "loss": 0.3415, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.9207266721717589, | |
| "grad_norm": 2.2781319110873923, | |
| "learning_rate": 1.9034992783439398e-07, | |
| "loss": 0.3313, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9227910817506193, | |
| "grad_norm": 2.4985215803253777, | |
| "learning_rate": 1.8062431364672394e-07, | |
| "loss": 0.347, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9248554913294798, | |
| "grad_norm": 2.3523550820055807, | |
| "learning_rate": 1.7114914551407125e-07, | |
| "loss": 0.3405, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.9269199009083402, | |
| "grad_norm": 2.35149253079431, | |
| "learning_rate": 1.619249158276981e-07, | |
| "loss": 0.3396, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9289843104872006, | |
| "grad_norm": 2.5391046088636724, | |
| "learning_rate": 1.529521039384757e-07, | |
| "loss": 0.3382, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9310487200660611, | |
| "grad_norm": 2.436611958199382, | |
| "learning_rate": 1.4423117613197535e-07, | |
| "loss": 0.3507, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9331131296449215, | |
| "grad_norm": 2.368900058293189, | |
| "learning_rate": 1.357625856042344e-07, | |
| "loss": 0.3436, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.935177539223782, | |
| "grad_norm": 2.483713260867796, | |
| "learning_rate": 1.275467724382068e-07, | |
| "loss": 0.3493, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9372419488026424, | |
| "grad_norm": 2.3378032035177556, | |
| "learning_rate": 1.1958416358089308e-07, | |
| "loss": 0.3366, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9393063583815029, | |
| "grad_norm": 2.427445932737846, | |
| "learning_rate": 1.1187517282115478e-07, | |
| "loss": 0.3357, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.9413707679603633, | |
| "grad_norm": 2.45979849427157, | |
| "learning_rate": 1.0442020076820992e-07, | |
| "loss": 0.3432, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9434351775392238, | |
| "grad_norm": 2.4102972284673485, | |
| "learning_rate": 9.72196348308152e-08, | |
| "loss": 0.3421, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9454995871180842, | |
| "grad_norm": 2.4147926771751127, | |
| "learning_rate": 9.027384919713167e-08, | |
| "loss": 0.3449, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9475639966969447, | |
| "grad_norm": 2.492908131419299, | |
| "learning_rate": 8.358320481528404e-08, | |
| "loss": 0.3349, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9496284062758051, | |
| "grad_norm": 2.445696827488344, | |
| "learning_rate": 7.714804937459964e-08, | |
| "loss": 0.3327, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9516928158546656, | |
| "grad_norm": 2.4859237981346434, | |
| "learning_rate": 7.096871728754173e-08, | |
| "loss": 0.3369, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.953757225433526, | |
| "grad_norm": 2.4755163895408483, | |
| "learning_rate": 6.504552967233014e-08, | |
| "loss": 0.3341, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.9558216350123865, | |
| "grad_norm": 2.4903780675355947, | |
| "learning_rate": 5.9378794336255595e-08, | |
| "loss": 0.3259, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9578860445912469, | |
| "grad_norm": 2.3608897794698986, | |
| "learning_rate": 5.396880575968266e-08, | |
| "loss": 0.34, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9599504541701074, | |
| "grad_norm": 2.349066629113589, | |
| "learning_rate": 4.881584508074688e-08, | |
| "loss": 0.3285, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.9620148637489678, | |
| "grad_norm": 2.475193042134275, | |
| "learning_rate": 4.39201800807465e-08, | |
| "loss": 0.339, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9640792733278283, | |
| "grad_norm": 2.3656425365762384, | |
| "learning_rate": 3.92820651702247e-08, | |
| "loss": 0.3358, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.9661436829066887, | |
| "grad_norm": 2.441572724635035, | |
| "learning_rate": 3.4901741375747975e-08, | |
| "loss": 0.3471, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9682080924855492, | |
| "grad_norm": 2.3678044079850413, | |
| "learning_rate": 3.077943632738556e-08, | |
| "loss": 0.3499, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9702725020644096, | |
| "grad_norm": 2.630292946781589, | |
| "learning_rate": 2.6915364246875574e-08, | |
| "loss": 0.341, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9723369116432701, | |
| "grad_norm": 2.388897601971072, | |
| "learning_rate": 2.3309725936493387e-08, | |
| "loss": 0.3437, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.9744013212221305, | |
| "grad_norm": 2.4665523794028785, | |
| "learning_rate": 1.9962708768620498e-08, | |
| "loss": 0.3453, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.976465730800991, | |
| "grad_norm": 2.448813469296534, | |
| "learning_rate": 1.687448667600067e-08, | |
| "loss": 0.3408, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9785301403798513, | |
| "grad_norm": 2.3718128749961576, | |
| "learning_rate": 1.4045220142708816e-08, | |
| "loss": 0.35, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9805945499587118, | |
| "grad_norm": 2.4841177332012325, | |
| "learning_rate": 1.1475056195804358e-08, | |
| "loss": 0.3518, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9826589595375722, | |
| "grad_norm": 2.493744247830282, | |
| "learning_rate": 9.164128397695648e-09, | |
| "loss": 0.332, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9847233691164327, | |
| "grad_norm": 2.2819847476640107, | |
| "learning_rate": 7.112556839196649e-09, | |
| "loss": 0.3301, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.9867877786952931, | |
| "grad_norm": 2.4247247484873173, | |
| "learning_rate": 5.3204481332880255e-09, | |
| "loss": 0.3433, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9888521882741536, | |
| "grad_norm": 2.4373319136694285, | |
| "learning_rate": 3.787895409574915e-09, | |
| "loss": 0.3388, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.990916597853014, | |
| "grad_norm": 2.3711173222964037, | |
| "learning_rate": 2.514978309447469e-09, | |
| "loss": 0.3433, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9929810074318745, | |
| "grad_norm": 2.3426000581900652, | |
| "learning_rate": 1.501762981945265e-09, | |
| "loss": 0.3368, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.9950454170107349, | |
| "grad_norm": 2.35822764782016, | |
| "learning_rate": 7.483020803145114e-10, | |
| "loss": 0.3396, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9971098265895953, | |
| "grad_norm": 2.4725534953927233, | |
| "learning_rate": 2.5463475927745007e-10, | |
| "loss": 0.3401, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.9991742361684558, | |
| "grad_norm": 2.407515233988016, | |
| "learning_rate": 2.078667299343362e-11, | |
| "loss": 0.3298, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3946, | |
| "eval_samples_per_second": 2.946, | |
| "eval_steps_per_second": 0.884, | |
| "step": 2422 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2422, | |
| "total_flos": 253558763028480.0, | |
| "train_loss": 0.530042272082643, | |
| "train_runtime": 21564.4437, | |
| "train_samples_per_second": 1.797, | |
| "train_steps_per_second": 0.112 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2422, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 253558763028480.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |