| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.537744641192917, | |
| "eval_steps": 500, | |
| "global_step": 1650, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004659832246039142, | |
| "grad_norm": 59.78214918326703, | |
| "learning_rate": 7.763975155279503e-07, | |
| "loss": 10.6865, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009319664492078284, | |
| "grad_norm": 61.61821429517621, | |
| "learning_rate": 1.5527950310559006e-06, | |
| "loss": 10.4783, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013979496738117428, | |
| "grad_norm": 100.18886340338842, | |
| "learning_rate": 2.329192546583851e-06, | |
| "loss": 8.8595, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01863932898415657, | |
| "grad_norm": 28.80282471816165, | |
| "learning_rate": 3.1055900621118013e-06, | |
| "loss": 2.9699, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.023299161230195712, | |
| "grad_norm": 2.9469993742141027, | |
| "learning_rate": 3.881987577639752e-06, | |
| "loss": 1.3152, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027958993476234855, | |
| "grad_norm": 1.273876748177396, | |
| "learning_rate": 4.658385093167702e-06, | |
| "loss": 0.9979, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.032618825722273995, | |
| "grad_norm": 1.1847847745298432, | |
| "learning_rate": 5.4347826086956525e-06, | |
| "loss": 0.8462, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03727865796831314, | |
| "grad_norm": 0.6184161495875878, | |
| "learning_rate": 6.2111801242236025e-06, | |
| "loss": 0.7823, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04193849021435228, | |
| "grad_norm": 0.4611063105597401, | |
| "learning_rate": 6.9875776397515525e-06, | |
| "loss": 0.7195, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.046598322460391424, | |
| "grad_norm": 0.3903179820498685, | |
| "learning_rate": 7.763975155279503e-06, | |
| "loss": 0.6868, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05125815470643057, | |
| "grad_norm": 0.44815192407139737, | |
| "learning_rate": 8.540372670807453e-06, | |
| "loss": 0.6483, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05591798695246971, | |
| "grad_norm": 0.3363352375310443, | |
| "learning_rate": 9.316770186335403e-06, | |
| "loss": 0.61, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06057781919850885, | |
| "grad_norm": 0.31214409078055283, | |
| "learning_rate": 1.0093167701863353e-05, | |
| "loss": 0.5932, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06523765144454799, | |
| "grad_norm": 0.32957450493165075, | |
| "learning_rate": 1.0869565217391305e-05, | |
| "loss": 0.5886, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06989748369058714, | |
| "grad_norm": 0.3311294918877853, | |
| "learning_rate": 1.1645962732919255e-05, | |
| "loss": 0.5704, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07455731593662628, | |
| "grad_norm": 0.3181163049876225, | |
| "learning_rate": 1.2422360248447205e-05, | |
| "loss": 0.5604, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07921714818266543, | |
| "grad_norm": 0.3310250029185392, | |
| "learning_rate": 1.3198757763975155e-05, | |
| "loss": 0.5508, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08387698042870456, | |
| "grad_norm": 0.2864578039803888, | |
| "learning_rate": 1.3975155279503105e-05, | |
| "loss": 0.5606, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08853681267474371, | |
| "grad_norm": 0.2527763382663063, | |
| "learning_rate": 1.4751552795031057e-05, | |
| "loss": 0.542, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09319664492078285, | |
| "grad_norm": 0.30977172064299785, | |
| "learning_rate": 1.5527950310559007e-05, | |
| "loss": 0.5272, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.097856477166822, | |
| "grad_norm": 0.3503262906800675, | |
| "learning_rate": 1.630434782608696e-05, | |
| "loss": 0.5214, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10251630941286113, | |
| "grad_norm": 0.3032318880335728, | |
| "learning_rate": 1.7080745341614907e-05, | |
| "loss": 0.5229, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10717614165890028, | |
| "grad_norm": 0.32192998759757896, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 0.5201, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11183597390493942, | |
| "grad_norm": 0.37017049044979194, | |
| "learning_rate": 1.8633540372670807e-05, | |
| "loss": 0.5067, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11649580615097857, | |
| "grad_norm": 0.27625624649694025, | |
| "learning_rate": 1.940993788819876e-05, | |
| "loss": 0.5026, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1211556383970177, | |
| "grad_norm": 0.32732522406326287, | |
| "learning_rate": 2.0186335403726707e-05, | |
| "loss": 0.5083, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12581547064305684, | |
| "grad_norm": 0.3974302327759709, | |
| "learning_rate": 2.096273291925466e-05, | |
| "loss": 0.5069, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13047530288909598, | |
| "grad_norm": 0.49055099062465235, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.4918, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13513513513513514, | |
| "grad_norm": 0.3509510737287038, | |
| "learning_rate": 2.2515527950310562e-05, | |
| "loss": 0.5182, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13979496738117428, | |
| "grad_norm": 0.4060738145091598, | |
| "learning_rate": 2.329192546583851e-05, | |
| "loss": 0.4924, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14445479962721341, | |
| "grad_norm": 0.42238178931670933, | |
| "learning_rate": 2.4068322981366462e-05, | |
| "loss": 0.5005, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14911463187325255, | |
| "grad_norm": 0.42361270461040995, | |
| "learning_rate": 2.484472049689441e-05, | |
| "loss": 0.4809, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15377446411929171, | |
| "grad_norm": 0.4419148082648927, | |
| "learning_rate": 2.5621118012422362e-05, | |
| "loss": 0.4922, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.15843429636533085, | |
| "grad_norm": 0.37817457175825797, | |
| "learning_rate": 2.639751552795031e-05, | |
| "loss": 0.4682, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16309412861137, | |
| "grad_norm": 0.4612740179437555, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.4812, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16775396085740912, | |
| "grad_norm": 0.4027204736632852, | |
| "learning_rate": 2.795031055900621e-05, | |
| "loss": 0.4743, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 0.3662916369622068, | |
| "learning_rate": 2.8726708074534165e-05, | |
| "loss": 0.4771, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17707362534948742, | |
| "grad_norm": 0.44549022951820444, | |
| "learning_rate": 2.9503105590062114e-05, | |
| "loss": 0.4872, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18173345759552656, | |
| "grad_norm": 0.4421692278535386, | |
| "learning_rate": 3.0279503105590062e-05, | |
| "loss": 0.4768, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1863932898415657, | |
| "grad_norm": 0.4592171701659634, | |
| "learning_rate": 3.1055900621118014e-05, | |
| "loss": 0.4782, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19105312208760486, | |
| "grad_norm": 0.5338610618981041, | |
| "learning_rate": 3.183229813664597e-05, | |
| "loss": 0.4677, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.195712954333644, | |
| "grad_norm": 0.679375515050287, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.4817, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20037278657968313, | |
| "grad_norm": 0.5075771202954662, | |
| "learning_rate": 3.3385093167701865e-05, | |
| "loss": 0.4632, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20503261882572227, | |
| "grad_norm": 0.5271972882853628, | |
| "learning_rate": 3.4161490683229814e-05, | |
| "loss": 0.4674, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2096924510717614, | |
| "grad_norm": 0.45927485782401883, | |
| "learning_rate": 3.493788819875777e-05, | |
| "loss": 0.4496, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21435228331780057, | |
| "grad_norm": 0.3875430276374643, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.4628, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2190121155638397, | |
| "grad_norm": 0.43592470909651004, | |
| "learning_rate": 3.6490683229813665e-05, | |
| "loss": 0.4604, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22367194780987884, | |
| "grad_norm": 0.45541861707423287, | |
| "learning_rate": 3.7267080745341614e-05, | |
| "loss": 0.4578, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22833178005591798, | |
| "grad_norm": 0.5415802082513514, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.4628, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.23299161230195714, | |
| "grad_norm": 0.48756789908879733, | |
| "learning_rate": 3.881987577639752e-05, | |
| "loss": 0.4551, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23765144454799628, | |
| "grad_norm": 0.48641029082339876, | |
| "learning_rate": 3.9596273291925465e-05, | |
| "loss": 0.4636, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2423112767940354, | |
| "grad_norm": 0.44167537560377024, | |
| "learning_rate": 4.0372670807453414e-05, | |
| "loss": 0.4584, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24697110904007455, | |
| "grad_norm": 0.49523500628083084, | |
| "learning_rate": 4.114906832298137e-05, | |
| "loss": 0.457, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2516309412861137, | |
| "grad_norm": 0.46362590765498735, | |
| "learning_rate": 4.192546583850932e-05, | |
| "loss": 0.4553, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25629077353215285, | |
| "grad_norm": 0.41387594745783063, | |
| "learning_rate": 4.270186335403727e-05, | |
| "loss": 0.4606, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26095060577819196, | |
| "grad_norm": 0.4327868844195844, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.4529, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2656104380242311, | |
| "grad_norm": 0.425878793618421, | |
| "learning_rate": 4.425465838509317e-05, | |
| "loss": 0.457, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2702702702702703, | |
| "grad_norm": 0.49167640847055744, | |
| "learning_rate": 4.5031055900621124e-05, | |
| "loss": 0.4586, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2749301025163094, | |
| "grad_norm": 0.6223762427590684, | |
| "learning_rate": 4.580745341614907e-05, | |
| "loss": 0.4627, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.27958993476234856, | |
| "grad_norm": 0.6138447122463503, | |
| "learning_rate": 4.658385093167702e-05, | |
| "loss": 0.4643, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2842497670083877, | |
| "grad_norm": 0.5410077879825527, | |
| "learning_rate": 4.736024844720497e-05, | |
| "loss": 0.4531, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.28890959925442683, | |
| "grad_norm": 0.5248106064020833, | |
| "learning_rate": 4.8136645962732924e-05, | |
| "loss": 0.4453, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.293569431500466, | |
| "grad_norm": 0.4576312238038398, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.4531, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.2982292637465051, | |
| "grad_norm": 0.7616375740291316, | |
| "learning_rate": 4.968944099378882e-05, | |
| "loss": 0.4387, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.30288909599254427, | |
| "grad_norm": 0.5230246761762287, | |
| "learning_rate": 4.994822229892993e-05, | |
| "loss": 0.4367, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30754892823858343, | |
| "grad_norm": 0.4333888474147187, | |
| "learning_rate": 4.986192613047981e-05, | |
| "loss": 0.4468, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.31220876048462254, | |
| "grad_norm": 0.6558069118015527, | |
| "learning_rate": 4.977562996202969e-05, | |
| "loss": 0.447, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3168685927306617, | |
| "grad_norm": 0.4781469048268466, | |
| "learning_rate": 4.968933379357957e-05, | |
| "loss": 0.4487, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32152842497670087, | |
| "grad_norm": 0.4582486158349481, | |
| "learning_rate": 4.9603037625129445e-05, | |
| "loss": 0.4433, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32618825722274, | |
| "grad_norm": 0.4234664780262906, | |
| "learning_rate": 4.951674145667933e-05, | |
| "loss": 0.4568, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33084808946877914, | |
| "grad_norm": 0.4766441062632173, | |
| "learning_rate": 4.94304452882292e-05, | |
| "loss": 0.4431, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.33550792171481825, | |
| "grad_norm": 0.5294140666427822, | |
| "learning_rate": 4.934414911977908e-05, | |
| "loss": 0.4405, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3401677539608574, | |
| "grad_norm": 0.3838225157756671, | |
| "learning_rate": 4.9257852951328965e-05, | |
| "loss": 0.437, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 0.4289011151413574, | |
| "learning_rate": 4.917155678287884e-05, | |
| "loss": 0.4498, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3494874184529357, | |
| "grad_norm": 0.4739139367556648, | |
| "learning_rate": 4.908526061442872e-05, | |
| "loss": 0.4364, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35414725069897485, | |
| "grad_norm": 0.6531685734098429, | |
| "learning_rate": 4.89989644459786e-05, | |
| "loss": 0.4507, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.35880708294501396, | |
| "grad_norm": 0.38736077721702633, | |
| "learning_rate": 4.891266827752848e-05, | |
| "loss": 0.4526, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3634669151910531, | |
| "grad_norm": 0.39074723361967706, | |
| "learning_rate": 4.882637210907836e-05, | |
| "loss": 0.4372, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3681267474370923, | |
| "grad_norm": 0.4998720672786241, | |
| "learning_rate": 4.874007594062824e-05, | |
| "loss": 0.4432, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3727865796831314, | |
| "grad_norm": 0.41449696680501236, | |
| "learning_rate": 4.865377977217811e-05, | |
| "loss": 0.4284, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37744641192917056, | |
| "grad_norm": 0.42798981510455325, | |
| "learning_rate": 4.8567483603728e-05, | |
| "loss": 0.4466, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3821062441752097, | |
| "grad_norm": 0.453510571603012, | |
| "learning_rate": 4.8481187435277875e-05, | |
| "loss": 0.4425, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38676607642124883, | |
| "grad_norm": 0.5613852564226437, | |
| "learning_rate": 4.839489126682776e-05, | |
| "loss": 0.4296, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.391425908667288, | |
| "grad_norm": 0.4991311823287778, | |
| "learning_rate": 4.830859509837763e-05, | |
| "loss": 0.4477, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3960857409133271, | |
| "grad_norm": 0.41710093216407557, | |
| "learning_rate": 4.822229892992751e-05, | |
| "loss": 0.4451, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40074557315936626, | |
| "grad_norm": 0.44852195468081424, | |
| "learning_rate": 4.8136002761477395e-05, | |
| "loss": 0.4322, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40540540540540543, | |
| "grad_norm": 0.5873258297489329, | |
| "learning_rate": 4.804970659302727e-05, | |
| "loss": 0.445, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.41006523765144454, | |
| "grad_norm": 0.5301184440251494, | |
| "learning_rate": 4.796341042457715e-05, | |
| "loss": 0.439, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4147250698974837, | |
| "grad_norm": 0.554152067322795, | |
| "learning_rate": 4.787711425612703e-05, | |
| "loss": 0.4337, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4193849021435228, | |
| "grad_norm": 0.4875794890348032, | |
| "learning_rate": 4.779081808767691e-05, | |
| "loss": 0.4245, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.424044734389562, | |
| "grad_norm": 0.4690783572871423, | |
| "learning_rate": 4.770452191922679e-05, | |
| "loss": 0.4314, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.42870456663560114, | |
| "grad_norm": 0.3725289513240759, | |
| "learning_rate": 4.761822575077667e-05, | |
| "loss": 0.4283, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43336439888164024, | |
| "grad_norm": 0.4830268598668616, | |
| "learning_rate": 4.753192958232654e-05, | |
| "loss": 0.4255, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4380242311276794, | |
| "grad_norm": 0.43173494250112954, | |
| "learning_rate": 4.744563341387643e-05, | |
| "loss": 0.4378, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4426840633737186, | |
| "grad_norm": 0.43237002431737065, | |
| "learning_rate": 4.7359337245426306e-05, | |
| "loss": 0.4277, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4473438956197577, | |
| "grad_norm": 0.41385681702794824, | |
| "learning_rate": 4.7273041076976184e-05, | |
| "loss": 0.4394, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45200372786579684, | |
| "grad_norm": 0.40157124060011173, | |
| "learning_rate": 4.718674490852606e-05, | |
| "loss": 0.432, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.45666356011183595, | |
| "grad_norm": 0.39938983254093463, | |
| "learning_rate": 4.710044874007594e-05, | |
| "loss": 0.4264, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4613233923578751, | |
| "grad_norm": 0.39732323279012777, | |
| "learning_rate": 4.7014152571625826e-05, | |
| "loss": 0.4321, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4659832246039143, | |
| "grad_norm": 0.4747358464791143, | |
| "learning_rate": 4.6927856403175704e-05, | |
| "loss": 0.435, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4706430568499534, | |
| "grad_norm": 0.3698718174123855, | |
| "learning_rate": 4.684156023472558e-05, | |
| "loss": 0.4221, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47530288909599255, | |
| "grad_norm": 0.4305572996344627, | |
| "learning_rate": 4.675526406627546e-05, | |
| "loss": 0.4303, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.47996272134203166, | |
| "grad_norm": 0.6085259797324423, | |
| "learning_rate": 4.666896789782534e-05, | |
| "loss": 0.4281, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.4846225535880708, | |
| "grad_norm": 0.5730318489213171, | |
| "learning_rate": 4.658267172937522e-05, | |
| "loss": 0.4321, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.48928238583411, | |
| "grad_norm": 0.4332245949035479, | |
| "learning_rate": 4.64963755609251e-05, | |
| "loss": 0.4309, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.4939422180801491, | |
| "grad_norm": 0.508102013567185, | |
| "learning_rate": 4.641007939247497e-05, | |
| "loss": 0.428, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49860205032618826, | |
| "grad_norm": 0.34669842614662666, | |
| "learning_rate": 4.632378322402486e-05, | |
| "loss": 0.4283, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5032618825722274, | |
| "grad_norm": 0.3889254150420956, | |
| "learning_rate": 4.6237487055574736e-05, | |
| "loss": 0.4178, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5079217148182665, | |
| "grad_norm": 0.49239466237923585, | |
| "learning_rate": 4.6151190887124615e-05, | |
| "loss": 0.4244, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5125815470643057, | |
| "grad_norm": 0.4397316581317278, | |
| "learning_rate": 4.606489471867449e-05, | |
| "loss": 0.4245, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.3573410355093717, | |
| "learning_rate": 4.597859855022437e-05, | |
| "loss": 0.4192, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5219012115563839, | |
| "grad_norm": 0.43267928489519075, | |
| "learning_rate": 4.589230238177425e-05, | |
| "loss": 0.4397, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5265610438024231, | |
| "grad_norm": 0.4821719775119577, | |
| "learning_rate": 4.5806006213324134e-05, | |
| "loss": 0.4177, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5312208760484622, | |
| "grad_norm": 0.5349367976109402, | |
| "learning_rate": 4.5719710044874006e-05, | |
| "loss": 0.4175, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5358807082945014, | |
| "grad_norm": 0.47146780494171403, | |
| "learning_rate": 4.563341387642389e-05, | |
| "loss": 0.4234, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5405405405405406, | |
| "grad_norm": 0.3724800991591985, | |
| "learning_rate": 4.554711770797377e-05, | |
| "loss": 0.4238, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5452003727865797, | |
| "grad_norm": 0.46072368721402923, | |
| "learning_rate": 4.546082153952365e-05, | |
| "loss": 0.4209, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5498602050326188, | |
| "grad_norm": 0.40398358486961483, | |
| "learning_rate": 4.5374525371073526e-05, | |
| "loss": 0.4171, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.554520037278658, | |
| "grad_norm": 0.3510727726291796, | |
| "learning_rate": 4.5288229202623404e-05, | |
| "loss": 0.4191, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5591798695246971, | |
| "grad_norm": 0.38603847092047144, | |
| "learning_rate": 4.520193303417328e-05, | |
| "loss": 0.4194, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5638397017707363, | |
| "grad_norm": 0.3689646251475895, | |
| "learning_rate": 4.511563686572317e-05, | |
| "loss": 0.4182, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5684995340167754, | |
| "grad_norm": 0.408796996489251, | |
| "learning_rate": 4.5029340697273045e-05, | |
| "loss": 0.4293, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5731593662628145, | |
| "grad_norm": 0.32130017833210983, | |
| "learning_rate": 4.4943044528822923e-05, | |
| "loss": 0.4159, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5778191985088537, | |
| "grad_norm": 0.3772249822258766, | |
| "learning_rate": 4.48567483603728e-05, | |
| "loss": 0.4131, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5824790307548928, | |
| "grad_norm": 0.35575898721323274, | |
| "learning_rate": 4.477045219192268e-05, | |
| "loss": 0.4344, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.587138863000932, | |
| "grad_norm": 0.38751558564769756, | |
| "learning_rate": 4.4684156023472565e-05, | |
| "loss": 0.425, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5917986952469712, | |
| "grad_norm": 0.4663549933535284, | |
| "learning_rate": 4.4597859855022436e-05, | |
| "loss": 0.4296, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5964585274930102, | |
| "grad_norm": 0.4662472899566146, | |
| "learning_rate": 4.4511563686572315e-05, | |
| "loss": 0.4202, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6011183597390494, | |
| "grad_norm": 0.39559722373002326, | |
| "learning_rate": 4.44252675181222e-05, | |
| "loss": 0.4197, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6057781919850885, | |
| "grad_norm": 0.4348073933035661, | |
| "learning_rate": 4.433897134967208e-05, | |
| "loss": 0.4284, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6104380242311277, | |
| "grad_norm": 0.4064242144273277, | |
| "learning_rate": 4.4252675181221956e-05, | |
| "loss": 0.4262, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6150978564771669, | |
| "grad_norm": 0.3413504824042106, | |
| "learning_rate": 4.4166379012771834e-05, | |
| "loss": 0.408, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6197576887232059, | |
| "grad_norm": 0.4625137937805045, | |
| "learning_rate": 4.408008284432171e-05, | |
| "loss": 0.4091, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6244175209692451, | |
| "grad_norm": 0.4365632325921953, | |
| "learning_rate": 4.39937866758716e-05, | |
| "loss": 0.4076, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6290773532152842, | |
| "grad_norm": 0.43623665649462795, | |
| "learning_rate": 4.3907490507421476e-05, | |
| "loss": 0.4158, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6337371854613234, | |
| "grad_norm": 0.4462282373794903, | |
| "learning_rate": 4.382119433897135e-05, | |
| "loss": 0.419, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6383970177073626, | |
| "grad_norm": 0.4909062825850483, | |
| "learning_rate": 4.373489817052123e-05, | |
| "loss": 0.4284, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6430568499534017, | |
| "grad_norm": 0.4261067060277007, | |
| "learning_rate": 4.364860200207111e-05, | |
| "loss": 0.4108, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6477166821994408, | |
| "grad_norm": 0.36806589117120897, | |
| "learning_rate": 4.356230583362099e-05, | |
| "loss": 0.4154, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.65237651444548, | |
| "grad_norm": 0.34856157577107083, | |
| "learning_rate": 4.347600966517087e-05, | |
| "loss": 0.4246, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6570363466915191, | |
| "grad_norm": 0.35691903362815613, | |
| "learning_rate": 4.3389713496720745e-05, | |
| "loss": 0.4178, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6616961789375583, | |
| "grad_norm": 0.4185903662996916, | |
| "learning_rate": 4.330341732827063e-05, | |
| "loss": 0.4184, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6663560111835974, | |
| "grad_norm": 0.36798569357808036, | |
| "learning_rate": 4.321712115982051e-05, | |
| "loss": 0.4067, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6710158434296365, | |
| "grad_norm": 0.3788649720605493, | |
| "learning_rate": 4.3130824991370387e-05, | |
| "loss": 0.417, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6756756756756757, | |
| "grad_norm": 0.33269308559456673, | |
| "learning_rate": 4.3044528822920265e-05, | |
| "loss": 0.4158, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6803355079217148, | |
| "grad_norm": 0.41281885956590714, | |
| "learning_rate": 4.295823265447014e-05, | |
| "loss": 0.4107, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.684995340167754, | |
| "grad_norm": 0.35535706301183545, | |
| "learning_rate": 4.287193648602002e-05, | |
| "loss": 0.4149, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.3277576836962773, | |
| "learning_rate": 4.27856403175699e-05, | |
| "loss": 0.4086, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6943150046598322, | |
| "grad_norm": 0.40279395134887447, | |
| "learning_rate": 4.269934414911978e-05, | |
| "loss": 0.4043, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6989748369058714, | |
| "grad_norm": 0.43373381741082817, | |
| "learning_rate": 4.261304798066966e-05, | |
| "loss": 0.4089, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7036346691519105, | |
| "grad_norm": 0.36906806847778895, | |
| "learning_rate": 4.252675181221954e-05, | |
| "loss": 0.4113, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7082945013979497, | |
| "grad_norm": 0.41940285749229916, | |
| "learning_rate": 4.244045564376942e-05, | |
| "loss": 0.4145, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7129543336439889, | |
| "grad_norm": 0.35669549672804485, | |
| "learning_rate": 4.23541594753193e-05, | |
| "loss": 0.3988, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7176141658900279, | |
| "grad_norm": 0.3590318963506859, | |
| "learning_rate": 4.2267863306869176e-05, | |
| "loss": 0.4075, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7222739981360671, | |
| "grad_norm": 0.40077547109104333, | |
| "learning_rate": 4.2181567138419054e-05, | |
| "loss": 0.4202, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7269338303821062, | |
| "grad_norm": 0.37047082422653643, | |
| "learning_rate": 4.209527096996894e-05, | |
| "loss": 0.4049, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7315936626281454, | |
| "grad_norm": 0.3795501401345936, | |
| "learning_rate": 4.200897480151881e-05, | |
| "loss": 0.4121, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7362534948741846, | |
| "grad_norm": 0.4503984514039821, | |
| "learning_rate": 4.1922678633068695e-05, | |
| "loss": 0.4152, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7409133271202236, | |
| "grad_norm": 0.44284905780951017, | |
| "learning_rate": 4.1836382464618573e-05, | |
| "loss": 0.4098, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7455731593662628, | |
| "grad_norm": 0.44691031160954076, | |
| "learning_rate": 4.175008629616845e-05, | |
| "loss": 0.4078, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.750232991612302, | |
| "grad_norm": 0.3735786310769167, | |
| "learning_rate": 4.166379012771833e-05, | |
| "loss": 0.4077, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7548928238583411, | |
| "grad_norm": 0.3384286028508436, | |
| "learning_rate": 4.157749395926821e-05, | |
| "loss": 0.4257, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7595526561043803, | |
| "grad_norm": 0.38501244831095327, | |
| "learning_rate": 4.1491197790818086e-05, | |
| "loss": 0.4005, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7642124883504194, | |
| "grad_norm": 0.6658921381907594, | |
| "learning_rate": 4.140490162236797e-05, | |
| "loss": 0.4152, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7688723205964585, | |
| "grad_norm": 0.5108682931365331, | |
| "learning_rate": 4.131860545391785e-05, | |
| "loss": 0.4078, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7735321528424977, | |
| "grad_norm": 0.3959267206623357, | |
| "learning_rate": 4.123230928546773e-05, | |
| "loss": 0.4025, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7781919850885368, | |
| "grad_norm": 0.40541297925684416, | |
| "learning_rate": 4.1146013117017606e-05, | |
| "loss": 0.4095, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.782851817334576, | |
| "grad_norm": 0.4483501398906087, | |
| "learning_rate": 4.1059716948567484e-05, | |
| "loss": 0.4127, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7875116495806151, | |
| "grad_norm": 0.4577963072705479, | |
| "learning_rate": 4.097342078011737e-05, | |
| "loss": 0.4204, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7921714818266542, | |
| "grad_norm": 0.3657002203284406, | |
| "learning_rate": 4.088712461166724e-05, | |
| "loss": 0.4105, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7968313140726934, | |
| "grad_norm": 0.5219753962445839, | |
| "learning_rate": 4.080082844321712e-05, | |
| "loss": 0.4121, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8014911463187325, | |
| "grad_norm": 0.4051529596962449, | |
| "learning_rate": 4.0714532274767004e-05, | |
| "loss": 0.4189, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8061509785647717, | |
| "grad_norm": 0.3645428936825488, | |
| "learning_rate": 4.062823610631688e-05, | |
| "loss": 0.4003, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8108108108108109, | |
| "grad_norm": 0.31752471962451867, | |
| "learning_rate": 4.054193993786676e-05, | |
| "loss": 0.415, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8154706430568499, | |
| "grad_norm": 0.3601862957329616, | |
| "learning_rate": 4.045564376941664e-05, | |
| "loss": 0.4112, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8201304753028891, | |
| "grad_norm": 0.36233941550256943, | |
| "learning_rate": 4.036934760096652e-05, | |
| "loss": 0.4027, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8247903075489282, | |
| "grad_norm": 0.41290807812840935, | |
| "learning_rate": 4.02830514325164e-05, | |
| "loss": 0.4159, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8294501397949674, | |
| "grad_norm": 0.42156370600929094, | |
| "learning_rate": 4.019675526406628e-05, | |
| "loss": 0.4026, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8341099720410066, | |
| "grad_norm": 0.369144224526896, | |
| "learning_rate": 4.011045909561615e-05, | |
| "loss": 0.4216, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8387698042870456, | |
| "grad_norm": 0.35301766518057, | |
| "learning_rate": 4.0024162927166037e-05, | |
| "loss": 0.4047, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8434296365330848, | |
| "grad_norm": 0.30727777017232927, | |
| "learning_rate": 3.9937866758715915e-05, | |
| "loss": 0.4061, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.848089468779124, | |
| "grad_norm": 0.4595118441196378, | |
| "learning_rate": 3.98515705902658e-05, | |
| "loss": 0.4121, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8527493010251631, | |
| "grad_norm": 0.5868283893854741, | |
| "learning_rate": 3.976527442181567e-05, | |
| "loss": 0.4051, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8574091332712023, | |
| "grad_norm": 0.4408220457405354, | |
| "learning_rate": 3.967897825336555e-05, | |
| "loss": 0.4113, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 0.329430445296284, | |
| "learning_rate": 3.9592682084915434e-05, | |
| "loss": 0.398, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8667287977632805, | |
| "grad_norm": 0.4772951342481958, | |
| "learning_rate": 3.950638591646531e-05, | |
| "loss": 0.398, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8713886300093197, | |
| "grad_norm": 0.39009508489962785, | |
| "learning_rate": 3.942008974801519e-05, | |
| "loss": 0.4091, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8760484622553588, | |
| "grad_norm": 0.31283951592729364, | |
| "learning_rate": 3.933379357956507e-05, | |
| "loss": 0.4051, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.880708294501398, | |
| "grad_norm": 0.38362272885626797, | |
| "learning_rate": 3.924749741111495e-05, | |
| "loss": 0.4072, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8853681267474371, | |
| "grad_norm": 0.4294812901025146, | |
| "learning_rate": 3.916120124266483e-05, | |
| "loss": 0.4148, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8900279589934762, | |
| "grad_norm": 0.4001848027163216, | |
| "learning_rate": 3.9074905074214704e-05, | |
| "loss": 0.4136, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8946877912395154, | |
| "grad_norm": 0.34026388057585993, | |
| "learning_rate": 3.898860890576458e-05, | |
| "loss": 0.4093, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8993476234855545, | |
| "grad_norm": 0.40668203338539827, | |
| "learning_rate": 3.890231273731447e-05, | |
| "loss": 0.4008, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9040074557315937, | |
| "grad_norm": 0.3294333705662259, | |
| "learning_rate": 3.8816016568864345e-05, | |
| "loss": 0.3987, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9086672879776329, | |
| "grad_norm": 0.342795942254618, | |
| "learning_rate": 3.8729720400414224e-05, | |
| "loss": 0.4176, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.9133271202236719, | |
| "grad_norm": 0.39628105185451024, | |
| "learning_rate": 3.86434242319641e-05, | |
| "loss": 0.4047, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9179869524697111, | |
| "grad_norm": 0.46631619804138, | |
| "learning_rate": 3.855712806351398e-05, | |
| "loss": 0.4068, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9226467847157502, | |
| "grad_norm": 0.43261058596610696, | |
| "learning_rate": 3.8470831895063865e-05, | |
| "loss": 0.4038, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9273066169617894, | |
| "grad_norm": 0.37541980348762116, | |
| "learning_rate": 3.838453572661374e-05, | |
| "loss": 0.3995, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9319664492078286, | |
| "grad_norm": 0.4184847694680633, | |
| "learning_rate": 3.8298239558163615e-05, | |
| "loss": 0.4025, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9366262814538676, | |
| "grad_norm": 0.3857591059837171, | |
| "learning_rate": 3.82119433897135e-05, | |
| "loss": 0.4075, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9412861136999068, | |
| "grad_norm": 0.39800461331863696, | |
| "learning_rate": 3.812564722126338e-05, | |
| "loss": 0.4, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9459459459459459, | |
| "grad_norm": 0.4143464836173206, | |
| "learning_rate": 3.8039351052813256e-05, | |
| "loss": 0.4083, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9506057781919851, | |
| "grad_norm": 0.3920202302045243, | |
| "learning_rate": 3.7953054884363134e-05, | |
| "loss": 0.4023, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9552656104380243, | |
| "grad_norm": 0.37828355875661107, | |
| "learning_rate": 3.786675871591301e-05, | |
| "loss": 0.4056, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.9599254426840633, | |
| "grad_norm": 0.29419352669408066, | |
| "learning_rate": 3.77804625474629e-05, | |
| "loss": 0.4107, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9645852749301025, | |
| "grad_norm": 0.3997612657539297, | |
| "learning_rate": 3.7694166379012776e-05, | |
| "loss": 0.4031, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9692451071761417, | |
| "grad_norm": 0.38608182016741566, | |
| "learning_rate": 3.7607870210562654e-05, | |
| "loss": 0.4005, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9739049394221808, | |
| "grad_norm": 0.28600031302754375, | |
| "learning_rate": 3.752157404211253e-05, | |
| "loss": 0.3933, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.97856477166822, | |
| "grad_norm": 0.38107411335734614, | |
| "learning_rate": 3.743527787366241e-05, | |
| "loss": 0.4225, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.983224603914259, | |
| "grad_norm": 0.39170807411989667, | |
| "learning_rate": 3.734898170521229e-05, | |
| "loss": 0.4004, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9878844361602982, | |
| "grad_norm": 0.38467780448535566, | |
| "learning_rate": 3.7262685536762174e-05, | |
| "loss": 0.3997, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9925442684063374, | |
| "grad_norm": 0.4020140968286371, | |
| "learning_rate": 3.7176389368312045e-05, | |
| "loss": 0.4037, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9972041006523765, | |
| "grad_norm": 0.46309836470033766, | |
| "learning_rate": 3.709009319986193e-05, | |
| "loss": 0.4041, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.0018639328984156, | |
| "grad_norm": 0.3901703886810142, | |
| "learning_rate": 3.700379703141181e-05, | |
| "loss": 0.369, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.0065237651444547, | |
| "grad_norm": 0.3010234352246036, | |
| "learning_rate": 3.6917500862961687e-05, | |
| "loss": 0.3448, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.011183597390494, | |
| "grad_norm": 0.4157638937857459, | |
| "learning_rate": 3.6831204694511565e-05, | |
| "loss": 0.3518, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.015843429636533, | |
| "grad_norm": 0.3603522336344581, | |
| "learning_rate": 3.674490852606144e-05, | |
| "loss": 0.3586, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.0205032618825722, | |
| "grad_norm": 0.32822298799590405, | |
| "learning_rate": 3.665861235761132e-05, | |
| "loss": 0.3447, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.0251630941286114, | |
| "grad_norm": 0.35038116243974443, | |
| "learning_rate": 3.6572316189161206e-05, | |
| "loss": 0.337, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.0298229263746506, | |
| "grad_norm": 0.2841436076230565, | |
| "learning_rate": 3.6486020020711085e-05, | |
| "loss": 0.343, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 0.3116345416367525, | |
| "learning_rate": 3.639972385226096e-05, | |
| "loss": 0.3432, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.0391425908667289, | |
| "grad_norm": 0.3151855230791428, | |
| "learning_rate": 3.631342768381084e-05, | |
| "loss": 0.3431, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.0438024231127678, | |
| "grad_norm": 0.35993766510002373, | |
| "learning_rate": 3.622713151536072e-05, | |
| "loss": 0.343, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.048462255358807, | |
| "grad_norm": 0.32743934813623815, | |
| "learning_rate": 3.6140835346910604e-05, | |
| "loss": 0.347, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.0531220876048462, | |
| "grad_norm": 0.3000370756870265, | |
| "learning_rate": 3.6054539178460476e-05, | |
| "loss": 0.3433, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.0577819198508853, | |
| "grad_norm": 0.38781790959885876, | |
| "learning_rate": 3.5968243010010354e-05, | |
| "loss": 0.3594, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.0624417520969245, | |
| "grad_norm": 0.32014554787819977, | |
| "learning_rate": 3.588194684156024e-05, | |
| "loss": 0.3503, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.0671015843429636, | |
| "grad_norm": 0.370735809498283, | |
| "learning_rate": 3.579565067311012e-05, | |
| "loss": 0.3417, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.0717614165890028, | |
| "grad_norm": 0.3024601758163409, | |
| "learning_rate": 3.5709354504659995e-05, | |
| "loss": 0.3385, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.076421248835042, | |
| "grad_norm": 0.3396899881369013, | |
| "learning_rate": 3.5623058336209874e-05, | |
| "loss": 0.3496, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.0810810810810811, | |
| "grad_norm": 0.28870357807743696, | |
| "learning_rate": 3.553676216775975e-05, | |
| "loss": 0.3406, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.0857409133271203, | |
| "grad_norm": 0.28568156626690694, | |
| "learning_rate": 3.545046599930964e-05, | |
| "loss": 0.3523, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.0904007455731595, | |
| "grad_norm": 0.27294319365310815, | |
| "learning_rate": 3.536416983085951e-05, | |
| "loss": 0.3537, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.0950605778191984, | |
| "grad_norm": 0.36694769383304165, | |
| "learning_rate": 3.5277873662409386e-05, | |
| "loss": 0.3403, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.0997204100652376, | |
| "grad_norm": 0.33630445896051475, | |
| "learning_rate": 3.519157749395927e-05, | |
| "loss": 0.3465, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.1043802423112767, | |
| "grad_norm": 0.4121586071124287, | |
| "learning_rate": 3.510528132550915e-05, | |
| "loss": 0.3436, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.109040074557316, | |
| "grad_norm": 0.3107462600176289, | |
| "learning_rate": 3.501898515705903e-05, | |
| "loss": 0.3455, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.113699906803355, | |
| "grad_norm": 0.32616980094028847, | |
| "learning_rate": 3.4932688988608906e-05, | |
| "loss": 0.3569, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.1183597390493942, | |
| "grad_norm": 0.5721259222478717, | |
| "learning_rate": 3.4846392820158784e-05, | |
| "loss": 0.3462, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.1230195712954334, | |
| "grad_norm": 0.39091963037590294, | |
| "learning_rate": 3.476009665170867e-05, | |
| "loss": 0.3458, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.1276794035414726, | |
| "grad_norm": 0.3647869778433765, | |
| "learning_rate": 3.467380048325855e-05, | |
| "loss": 0.3463, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.1323392357875117, | |
| "grad_norm": 0.3278226928089527, | |
| "learning_rate": 3.458750431480842e-05, | |
| "loss": 0.3511, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.1369990680335509, | |
| "grad_norm": 0.36472407535793705, | |
| "learning_rate": 3.4501208146358304e-05, | |
| "loss": 0.3507, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.14165890027959, | |
| "grad_norm": 0.3162625401383028, | |
| "learning_rate": 3.441491197790818e-05, | |
| "loss": 0.3465, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.146318732525629, | |
| "grad_norm": 0.2935130903926503, | |
| "learning_rate": 3.432861580945806e-05, | |
| "loss": 0.3474, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.1509785647716682, | |
| "grad_norm": 0.34052282866497324, | |
| "learning_rate": 3.424231964100794e-05, | |
| "loss": 0.3463, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.1556383970177073, | |
| "grad_norm": 0.36877838244359007, | |
| "learning_rate": 3.415602347255782e-05, | |
| "loss": 0.3453, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.1602982292637465, | |
| "grad_norm": 0.2733712910566977, | |
| "learning_rate": 3.40697273041077e-05, | |
| "loss": 0.3519, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.1649580615097856, | |
| "grad_norm": 0.37390448821752814, | |
| "learning_rate": 3.398343113565758e-05, | |
| "loss": 0.3457, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1696178937558248, | |
| "grad_norm": 0.2862398262715606, | |
| "learning_rate": 3.389713496720746e-05, | |
| "loss": 0.3254, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.174277726001864, | |
| "grad_norm": 0.27627811665115726, | |
| "learning_rate": 3.381083879875734e-05, | |
| "loss": 0.3449, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.1789375582479031, | |
| "grad_norm": 0.2984551216682011, | |
| "learning_rate": 3.3724542630307215e-05, | |
| "loss": 0.3412, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.1835973904939423, | |
| "grad_norm": 0.3117052887160424, | |
| "learning_rate": 3.363824646185709e-05, | |
| "loss": 0.3501, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.1882572227399812, | |
| "grad_norm": 0.26523368148938603, | |
| "learning_rate": 3.355195029340698e-05, | |
| "loss": 0.3443, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.1929170549860204, | |
| "grad_norm": 0.3235527702018126, | |
| "learning_rate": 3.346565412495685e-05, | |
| "loss": 0.3544, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.1975768872320596, | |
| "grad_norm": 0.33231065381424213, | |
| "learning_rate": 3.3379357956506735e-05, | |
| "loss": 0.3557, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.2022367194780987, | |
| "grad_norm": 0.33109896147037454, | |
| "learning_rate": 3.329306178805661e-05, | |
| "loss": 0.3391, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 0.29911644384621444, | |
| "learning_rate": 3.320676561960649e-05, | |
| "loss": 0.3439, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.211556383970177, | |
| "grad_norm": 0.4166015301847235, | |
| "learning_rate": 3.312046945115637e-05, | |
| "loss": 0.352, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.2162162162162162, | |
| "grad_norm": 0.40766415919978594, | |
| "learning_rate": 3.303417328270625e-05, | |
| "loss": 0.3509, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.2208760484622554, | |
| "grad_norm": 0.32382906649763094, | |
| "learning_rate": 3.2947877114256126e-05, | |
| "loss": 0.34, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.2255358807082946, | |
| "grad_norm": 0.30767189965326197, | |
| "learning_rate": 3.286158094580601e-05, | |
| "loss": 0.3602, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.2301957129543337, | |
| "grad_norm": 0.31880798888411954, | |
| "learning_rate": 3.277528477735589e-05, | |
| "loss": 0.3458, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.2348555452003729, | |
| "grad_norm": 0.31313877181562205, | |
| "learning_rate": 3.268898860890577e-05, | |
| "loss": 0.3526, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.2395153774464118, | |
| "grad_norm": 0.2890512077214384, | |
| "learning_rate": 3.2602692440455645e-05, | |
| "loss": 0.3408, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.244175209692451, | |
| "grad_norm": 0.2836812272018177, | |
| "learning_rate": 3.2516396272005524e-05, | |
| "loss": 0.3434, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.2488350419384902, | |
| "grad_norm": 0.4540592435912553, | |
| "learning_rate": 3.243010010355541e-05, | |
| "loss": 0.3492, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.2534948741845293, | |
| "grad_norm": 0.3702760451037345, | |
| "learning_rate": 3.234380393510528e-05, | |
| "loss": 0.3368, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.2581547064305685, | |
| "grad_norm": 0.2917660806063631, | |
| "learning_rate": 3.225750776665516e-05, | |
| "loss": 0.3437, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.2628145386766076, | |
| "grad_norm": 0.3005932483580072, | |
| "learning_rate": 3.217121159820504e-05, | |
| "loss": 0.3533, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.2674743709226468, | |
| "grad_norm": 0.31926938268885113, | |
| "learning_rate": 3.208491542975492e-05, | |
| "loss": 0.3433, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.272134203168686, | |
| "grad_norm": 0.31462085002303564, | |
| "learning_rate": 3.19986192613048e-05, | |
| "loss": 0.3484, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.2767940354147251, | |
| "grad_norm": 0.3668129958844129, | |
| "learning_rate": 3.191232309285468e-05, | |
| "loss": 0.3473, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.281453867660764, | |
| "grad_norm": 0.36011114846832903, | |
| "learning_rate": 3.1826026924404556e-05, | |
| "loss": 0.3374, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.2861136999068035, | |
| "grad_norm": 0.3707136617543373, | |
| "learning_rate": 3.173973075595444e-05, | |
| "loss": 0.3519, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.2907735321528424, | |
| "grad_norm": 0.31634559612246044, | |
| "learning_rate": 3.165343458750431e-05, | |
| "loss": 0.3532, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.2954333643988816, | |
| "grad_norm": 0.36512675923388166, | |
| "learning_rate": 3.156713841905419e-05, | |
| "loss": 0.344, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.3000931966449207, | |
| "grad_norm": 0.2724467755048477, | |
| "learning_rate": 3.1480842250604076e-05, | |
| "loss": 0.3483, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.30475302889096, | |
| "grad_norm": 0.360595305928704, | |
| "learning_rate": 3.1394546082153954e-05, | |
| "loss": 0.3423, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.309412861136999, | |
| "grad_norm": 0.331462059138007, | |
| "learning_rate": 3.130824991370383e-05, | |
| "loss": 0.3533, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.3140726933830382, | |
| "grad_norm": 0.3123108183369381, | |
| "learning_rate": 3.122195374525371e-05, | |
| "loss": 0.3412, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.3187325256290774, | |
| "grad_norm": 0.30675615259167105, | |
| "learning_rate": 3.113565757680359e-05, | |
| "loss": 0.3486, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.3233923578751166, | |
| "grad_norm": 0.3247753979750987, | |
| "learning_rate": 3.1049361408353474e-05, | |
| "loss": 0.336, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.3280521901211557, | |
| "grad_norm": 0.24573704166768065, | |
| "learning_rate": 3.096306523990335e-05, | |
| "loss": 0.3439, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.3327120223671947, | |
| "grad_norm": 0.2784691576334795, | |
| "learning_rate": 3.0876769071453223e-05, | |
| "loss": 0.3523, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.337371854613234, | |
| "grad_norm": 0.3279920083531312, | |
| "learning_rate": 3.079047290300311e-05, | |
| "loss": 0.3504, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.342031686859273, | |
| "grad_norm": 0.630072707291437, | |
| "learning_rate": 3.070417673455299e-05, | |
| "loss": 0.3508, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.3466915191053122, | |
| "grad_norm": 0.3062935559964653, | |
| "learning_rate": 3.061788056610287e-05, | |
| "loss": 0.3534, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.3513513513513513, | |
| "grad_norm": 0.34136097201146864, | |
| "learning_rate": 3.053158439765274e-05, | |
| "loss": 0.3395, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.3560111835973905, | |
| "grad_norm": 0.3051224256041293, | |
| "learning_rate": 3.0445288229202625e-05, | |
| "loss": 0.3469, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.3606710158434296, | |
| "grad_norm": 0.2975038729445838, | |
| "learning_rate": 3.0358992060752506e-05, | |
| "loss": 0.3529, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.3653308480894688, | |
| "grad_norm": 0.31823310979778036, | |
| "learning_rate": 3.027269589230238e-05, | |
| "loss": 0.347, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.369990680335508, | |
| "grad_norm": 0.2896430196330622, | |
| "learning_rate": 3.018639972385226e-05, | |
| "loss": 0.3439, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.3746505125815471, | |
| "grad_norm": 0.352875453228656, | |
| "learning_rate": 3.010010355540214e-05, | |
| "loss": 0.346, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 0.25417462359338994, | |
| "learning_rate": 3.001380738695202e-05, | |
| "loss": 0.3302, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.3839701770736252, | |
| "grad_norm": 0.2981082945058558, | |
| "learning_rate": 2.99275112185019e-05, | |
| "loss": 0.3493, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.3886300093196646, | |
| "grad_norm": 0.3626842220779335, | |
| "learning_rate": 2.984121505005178e-05, | |
| "loss": 0.3522, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.3932898415657036, | |
| "grad_norm": 0.34072090786926984, | |
| "learning_rate": 2.9754918881601657e-05, | |
| "loss": 0.3605, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.3979496738117427, | |
| "grad_norm": 0.36566049127137545, | |
| "learning_rate": 2.966862271315154e-05, | |
| "loss": 0.3572, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.402609506057782, | |
| "grad_norm": 0.30367298168910534, | |
| "learning_rate": 2.9582326544701417e-05, | |
| "loss": 0.3503, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.407269338303821, | |
| "grad_norm": 0.2828621496023037, | |
| "learning_rate": 2.9496030376251292e-05, | |
| "loss": 0.3454, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.4119291705498602, | |
| "grad_norm": 0.3069391721232517, | |
| "learning_rate": 2.9409734207801177e-05, | |
| "loss": 0.3503, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.4165890027958994, | |
| "grad_norm": 0.37676918497607415, | |
| "learning_rate": 2.9323438039351052e-05, | |
| "loss": 0.3483, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.4212488350419386, | |
| "grad_norm": 0.3439297031393127, | |
| "learning_rate": 2.9237141870900937e-05, | |
| "loss": 0.3447, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.4259086672879777, | |
| "grad_norm": 0.3359348980856527, | |
| "learning_rate": 2.9150845702450812e-05, | |
| "loss": 0.3434, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.4305684995340169, | |
| "grad_norm": 0.3706215198106378, | |
| "learning_rate": 2.906454953400069e-05, | |
| "loss": 0.3481, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.4352283317800558, | |
| "grad_norm": 0.26527833144434804, | |
| "learning_rate": 2.897825336555057e-05, | |
| "loss": 0.3443, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.439888164026095, | |
| "grad_norm": 0.35150178535932075, | |
| "learning_rate": 2.889195719710045e-05, | |
| "loss": 0.3446, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.4445479962721341, | |
| "grad_norm": 0.34068866122527197, | |
| "learning_rate": 2.8805661028650328e-05, | |
| "loss": 0.341, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.4492078285181733, | |
| "grad_norm": 0.44905663737611096, | |
| "learning_rate": 2.871936486020021e-05, | |
| "loss": 0.3386, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.4538676607642125, | |
| "grad_norm": 0.31579368636055327, | |
| "learning_rate": 2.8633068691750088e-05, | |
| "loss": 0.341, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.4585274930102516, | |
| "grad_norm": 0.31166330874399784, | |
| "learning_rate": 2.854677252329997e-05, | |
| "loss": 0.3434, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.4631873252562908, | |
| "grad_norm": 0.3677885578765502, | |
| "learning_rate": 2.8460476354849848e-05, | |
| "loss": 0.3502, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.46784715750233, | |
| "grad_norm": 0.3763718138729198, | |
| "learning_rate": 2.8374180186399723e-05, | |
| "loss": 0.3501, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.4725069897483691, | |
| "grad_norm": 0.3319071868875067, | |
| "learning_rate": 2.8287884017949608e-05, | |
| "loss": 0.3513, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.477166821994408, | |
| "grad_norm": 0.35758179709780585, | |
| "learning_rate": 2.8201587849499482e-05, | |
| "loss": 0.3444, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.4818266542404475, | |
| "grad_norm": 0.33577991103004173, | |
| "learning_rate": 2.811529168104936e-05, | |
| "loss": 0.3545, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.4864864864864864, | |
| "grad_norm": 0.3025741672240685, | |
| "learning_rate": 2.8028995512599242e-05, | |
| "loss": 0.3453, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.4911463187325256, | |
| "grad_norm": 0.3828036069686848, | |
| "learning_rate": 2.794269934414912e-05, | |
| "loss": 0.3472, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.4958061509785647, | |
| "grad_norm": 0.3043778519192715, | |
| "learning_rate": 2.7856403175699002e-05, | |
| "loss": 0.3507, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.500465983224604, | |
| "grad_norm": 0.3382233040513506, | |
| "learning_rate": 2.777010700724888e-05, | |
| "loss": 0.3461, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.505125815470643, | |
| "grad_norm": 0.33027493167325966, | |
| "learning_rate": 2.768381083879876e-05, | |
| "loss": 0.3412, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.5097856477166822, | |
| "grad_norm": 0.28284106409517795, | |
| "learning_rate": 2.759751467034864e-05, | |
| "loss": 0.3434, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.5144454799627214, | |
| "grad_norm": 0.27285762390023743, | |
| "learning_rate": 2.751121850189852e-05, | |
| "loss": 0.3381, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.5191053122087603, | |
| "grad_norm": 0.2856683184270901, | |
| "learning_rate": 2.7424922333448393e-05, | |
| "loss": 0.3312, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.5237651444547997, | |
| "grad_norm": 0.32602889682943975, | |
| "learning_rate": 2.7338626164998278e-05, | |
| "loss": 0.3384, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.5284249767008387, | |
| "grad_norm": 0.3112657240375754, | |
| "learning_rate": 2.7252329996548153e-05, | |
| "loss": 0.3412, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.533084808946878, | |
| "grad_norm": 0.26091256859148315, | |
| "learning_rate": 2.7166033828098038e-05, | |
| "loss": 0.3566, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.537744641192917, | |
| "grad_norm": 0.3123052376838657, | |
| "learning_rate": 2.7079737659647913e-05, | |
| "loss": 0.3455, | |
| "step": 1650 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3219, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 550, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.412694515424166e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |