| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0251630941286114, |
| "eval_steps": 500, |
| "global_step": 1100, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004659832246039142, |
| "grad_norm": 59.78214918326703, |
| "learning_rate": 7.763975155279503e-07, |
| "loss": 10.6865, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.009319664492078284, |
| "grad_norm": 61.61821429517621, |
| "learning_rate": 1.5527950310559006e-06, |
| "loss": 10.4783, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013979496738117428, |
| "grad_norm": 100.18886340338842, |
| "learning_rate": 2.329192546583851e-06, |
| "loss": 8.8595, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01863932898415657, |
| "grad_norm": 28.80282471816165, |
| "learning_rate": 3.1055900621118013e-06, |
| "loss": 2.9699, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023299161230195712, |
| "grad_norm": 2.9469993742141027, |
| "learning_rate": 3.881987577639752e-06, |
| "loss": 1.3152, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.027958993476234855, |
| "grad_norm": 1.273876748177396, |
| "learning_rate": 4.658385093167702e-06, |
| "loss": 0.9979, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.032618825722273995, |
| "grad_norm": 1.1847847745298432, |
| "learning_rate": 5.4347826086956525e-06, |
| "loss": 0.8462, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.03727865796831314, |
| "grad_norm": 0.6184161495875878, |
| "learning_rate": 6.2111801242236025e-06, |
| "loss": 0.7823, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04193849021435228, |
| "grad_norm": 0.4611063105597401, |
| "learning_rate": 6.9875776397515525e-06, |
| "loss": 0.7195, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.046598322460391424, |
| "grad_norm": 0.3903179820498685, |
| "learning_rate": 7.763975155279503e-06, |
| "loss": 0.6868, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.05125815470643057, |
| "grad_norm": 0.44815192407139737, |
| "learning_rate": 8.540372670807453e-06, |
| "loss": 0.6483, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.05591798695246971, |
| "grad_norm": 0.3363352375310443, |
| "learning_rate": 9.316770186335403e-06, |
| "loss": 0.61, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06057781919850885, |
| "grad_norm": 0.31214409078055283, |
| "learning_rate": 1.0093167701863353e-05, |
| "loss": 0.5932, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.06523765144454799, |
| "grad_norm": 0.32957450493165075, |
| "learning_rate": 1.0869565217391305e-05, |
| "loss": 0.5886, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06989748369058714, |
| "grad_norm": 0.3311294918877853, |
| "learning_rate": 1.1645962732919255e-05, |
| "loss": 0.5704, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.07455731593662628, |
| "grad_norm": 0.3181163049876225, |
| "learning_rate": 1.2422360248447205e-05, |
| "loss": 0.5604, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07921714818266543, |
| "grad_norm": 0.3310250029185392, |
| "learning_rate": 1.3198757763975155e-05, |
| "loss": 0.5508, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.08387698042870456, |
| "grad_norm": 0.2864578039803888, |
| "learning_rate": 1.3975155279503105e-05, |
| "loss": 0.5606, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08853681267474371, |
| "grad_norm": 0.2527763382663063, |
| "learning_rate": 1.4751552795031057e-05, |
| "loss": 0.542, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.09319664492078285, |
| "grad_norm": 0.30977172064299785, |
| "learning_rate": 1.5527950310559007e-05, |
| "loss": 0.5272, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.097856477166822, |
| "grad_norm": 0.3503262906800675, |
| "learning_rate": 1.630434782608696e-05, |
| "loss": 0.5214, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.10251630941286113, |
| "grad_norm": 0.3032318880335728, |
| "learning_rate": 1.7080745341614907e-05, |
| "loss": 0.5229, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.10717614165890028, |
| "grad_norm": 0.32192998759757896, |
| "learning_rate": 1.785714285714286e-05, |
| "loss": 0.5201, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.11183597390493942, |
| "grad_norm": 0.37017049044979194, |
| "learning_rate": 1.8633540372670807e-05, |
| "loss": 0.5067, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.11649580615097857, |
| "grad_norm": 0.27625624649694025, |
| "learning_rate": 1.940993788819876e-05, |
| "loss": 0.5026, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1211556383970177, |
| "grad_norm": 0.32732522406326287, |
| "learning_rate": 2.0186335403726707e-05, |
| "loss": 0.5083, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.12581547064305684, |
| "grad_norm": 0.3974302327759709, |
| "learning_rate": 2.096273291925466e-05, |
| "loss": 0.5069, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.13047530288909598, |
| "grad_norm": 0.49055099062465235, |
| "learning_rate": 2.173913043478261e-05, |
| "loss": 0.4918, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.13513513513513514, |
| "grad_norm": 0.3509510737287038, |
| "learning_rate": 2.2515527950310562e-05, |
| "loss": 0.5182, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.13979496738117428, |
| "grad_norm": 0.4060738145091598, |
| "learning_rate": 2.329192546583851e-05, |
| "loss": 0.4924, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.14445479962721341, |
| "grad_norm": 0.42238178931670933, |
| "learning_rate": 2.4068322981366462e-05, |
| "loss": 0.5005, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.14911463187325255, |
| "grad_norm": 0.42361270461040995, |
| "learning_rate": 2.484472049689441e-05, |
| "loss": 0.4809, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.15377446411929171, |
| "grad_norm": 0.4419148082648927, |
| "learning_rate": 2.5621118012422362e-05, |
| "loss": 0.4922, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.15843429636533085, |
| "grad_norm": 0.37817457175825797, |
| "learning_rate": 2.639751552795031e-05, |
| "loss": 0.4682, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.16309412861137, |
| "grad_norm": 0.4612740179437555, |
| "learning_rate": 2.7173913043478262e-05, |
| "loss": 0.4812, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.16775396085740912, |
| "grad_norm": 0.4027204736632852, |
| "learning_rate": 2.795031055900621e-05, |
| "loss": 0.4743, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.1724137931034483, |
| "grad_norm": 0.3662916369622068, |
| "learning_rate": 2.8726708074534165e-05, |
| "loss": 0.4771, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.17707362534948742, |
| "grad_norm": 0.44549022951820444, |
| "learning_rate": 2.9503105590062114e-05, |
| "loss": 0.4872, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.18173345759552656, |
| "grad_norm": 0.4421692278535386, |
| "learning_rate": 3.0279503105590062e-05, |
| "loss": 0.4768, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1863932898415657, |
| "grad_norm": 0.4592171701659634, |
| "learning_rate": 3.1055900621118014e-05, |
| "loss": 0.4782, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.19105312208760486, |
| "grad_norm": 0.5338610618981041, |
| "learning_rate": 3.183229813664597e-05, |
| "loss": 0.4677, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.195712954333644, |
| "grad_norm": 0.679375515050287, |
| "learning_rate": 3.260869565217392e-05, |
| "loss": 0.4817, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.20037278657968313, |
| "grad_norm": 0.5075771202954662, |
| "learning_rate": 3.3385093167701865e-05, |
| "loss": 0.4632, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.20503261882572227, |
| "grad_norm": 0.5271972882853628, |
| "learning_rate": 3.4161490683229814e-05, |
| "loss": 0.4674, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.2096924510717614, |
| "grad_norm": 0.45927485782401883, |
| "learning_rate": 3.493788819875777e-05, |
| "loss": 0.4496, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.21435228331780057, |
| "grad_norm": 0.3875430276374643, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 0.4628, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.2190121155638397, |
| "grad_norm": 0.43592470909651004, |
| "learning_rate": 3.6490683229813665e-05, |
| "loss": 0.4604, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.22367194780987884, |
| "grad_norm": 0.45541861707423287, |
| "learning_rate": 3.7267080745341614e-05, |
| "loss": 0.4578, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.22833178005591798, |
| "grad_norm": 0.5415802082513514, |
| "learning_rate": 3.804347826086957e-05, |
| "loss": 0.4628, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.23299161230195714, |
| "grad_norm": 0.48756789908879733, |
| "learning_rate": 3.881987577639752e-05, |
| "loss": 0.4551, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.23765144454799628, |
| "grad_norm": 0.48641029082339876, |
| "learning_rate": 3.9596273291925465e-05, |
| "loss": 0.4636, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.2423112767940354, |
| "grad_norm": 0.44167537560377024, |
| "learning_rate": 4.0372670807453414e-05, |
| "loss": 0.4584, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.24697110904007455, |
| "grad_norm": 0.49523500628083084, |
| "learning_rate": 4.114906832298137e-05, |
| "loss": 0.457, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.2516309412861137, |
| "grad_norm": 0.46362590765498735, |
| "learning_rate": 4.192546583850932e-05, |
| "loss": 0.4553, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.25629077353215285, |
| "grad_norm": 0.41387594745783063, |
| "learning_rate": 4.270186335403727e-05, |
| "loss": 0.4606, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.26095060577819196, |
| "grad_norm": 0.4327868844195844, |
| "learning_rate": 4.347826086956522e-05, |
| "loss": 0.4529, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2656104380242311, |
| "grad_norm": 0.425878793618421, |
| "learning_rate": 4.425465838509317e-05, |
| "loss": 0.457, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.2702702702702703, |
| "grad_norm": 0.49167640847055744, |
| "learning_rate": 4.5031055900621124e-05, |
| "loss": 0.4586, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2749301025163094, |
| "grad_norm": 0.6223762427590684, |
| "learning_rate": 4.580745341614907e-05, |
| "loss": 0.4627, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.27958993476234856, |
| "grad_norm": 0.6138447122463503, |
| "learning_rate": 4.658385093167702e-05, |
| "loss": 0.4643, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.2842497670083877, |
| "grad_norm": 0.5410077879825527, |
| "learning_rate": 4.736024844720497e-05, |
| "loss": 0.4531, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.28890959925442683, |
| "grad_norm": 0.5248106064020833, |
| "learning_rate": 4.8136645962732924e-05, |
| "loss": 0.4453, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.293569431500466, |
| "grad_norm": 0.4576312238038398, |
| "learning_rate": 4.891304347826087e-05, |
| "loss": 0.4531, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.2982292637465051, |
| "grad_norm": 0.7616375740291316, |
| "learning_rate": 4.968944099378882e-05, |
| "loss": 0.4387, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.30288909599254427, |
| "grad_norm": 0.5230246761762287, |
| "learning_rate": 4.994822229892993e-05, |
| "loss": 0.4367, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.30754892823858343, |
| "grad_norm": 0.4333888474147187, |
| "learning_rate": 4.986192613047981e-05, |
| "loss": 0.4468, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.31220876048462254, |
| "grad_norm": 0.6558069118015527, |
| "learning_rate": 4.977562996202969e-05, |
| "loss": 0.447, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.3168685927306617, |
| "grad_norm": 0.4781469048268466, |
| "learning_rate": 4.968933379357957e-05, |
| "loss": 0.4487, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.32152842497670087, |
| "grad_norm": 0.4582486158349481, |
| "learning_rate": 4.9603037625129445e-05, |
| "loss": 0.4433, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.32618825722274, |
| "grad_norm": 0.4234664780262906, |
| "learning_rate": 4.951674145667933e-05, |
| "loss": 0.4568, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.33084808946877914, |
| "grad_norm": 0.4766441062632173, |
| "learning_rate": 4.94304452882292e-05, |
| "loss": 0.4431, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.33550792171481825, |
| "grad_norm": 0.5294140666427822, |
| "learning_rate": 4.934414911977908e-05, |
| "loss": 0.4405, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3401677539608574, |
| "grad_norm": 0.3838225157756671, |
| "learning_rate": 4.9257852951328965e-05, |
| "loss": 0.437, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3448275862068966, |
| "grad_norm": 0.4289011151413574, |
| "learning_rate": 4.917155678287884e-05, |
| "loss": 0.4498, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3494874184529357, |
| "grad_norm": 0.4739139367556648, |
| "learning_rate": 4.908526061442872e-05, |
| "loss": 0.4364, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.35414725069897485, |
| "grad_norm": 0.6531685734098429, |
| "learning_rate": 4.89989644459786e-05, |
| "loss": 0.4507, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.35880708294501396, |
| "grad_norm": 0.38736077721702633, |
| "learning_rate": 4.891266827752848e-05, |
| "loss": 0.4526, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.3634669151910531, |
| "grad_norm": 0.39074723361967706, |
| "learning_rate": 4.882637210907836e-05, |
| "loss": 0.4372, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3681267474370923, |
| "grad_norm": 0.4998720672786241, |
| "learning_rate": 4.874007594062824e-05, |
| "loss": 0.4432, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.3727865796831314, |
| "grad_norm": 0.41449696680501236, |
| "learning_rate": 4.865377977217811e-05, |
| "loss": 0.4284, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.37744641192917056, |
| "grad_norm": 0.42798981510455325, |
| "learning_rate": 4.8567483603728e-05, |
| "loss": 0.4466, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.3821062441752097, |
| "grad_norm": 0.453510571603012, |
| "learning_rate": 4.8481187435277875e-05, |
| "loss": 0.4425, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.38676607642124883, |
| "grad_norm": 0.5613852564226437, |
| "learning_rate": 4.839489126682776e-05, |
| "loss": 0.4296, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.391425908667288, |
| "grad_norm": 0.4991311823287778, |
| "learning_rate": 4.830859509837763e-05, |
| "loss": 0.4477, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.3960857409133271, |
| "grad_norm": 0.41710093216407557, |
| "learning_rate": 4.822229892992751e-05, |
| "loss": 0.4451, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.40074557315936626, |
| "grad_norm": 0.44852195468081424, |
| "learning_rate": 4.8136002761477395e-05, |
| "loss": 0.4322, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.40540540540540543, |
| "grad_norm": 0.5873258297489329, |
| "learning_rate": 4.804970659302727e-05, |
| "loss": 0.445, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.41006523765144454, |
| "grad_norm": 0.5301184440251494, |
| "learning_rate": 4.796341042457715e-05, |
| "loss": 0.439, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.4147250698974837, |
| "grad_norm": 0.554152067322795, |
| "learning_rate": 4.787711425612703e-05, |
| "loss": 0.4337, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.4193849021435228, |
| "grad_norm": 0.4875794890348032, |
| "learning_rate": 4.779081808767691e-05, |
| "loss": 0.4245, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.424044734389562, |
| "grad_norm": 0.4690783572871423, |
| "learning_rate": 4.770452191922679e-05, |
| "loss": 0.4314, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.42870456663560114, |
| "grad_norm": 0.3725289513240759, |
| "learning_rate": 4.761822575077667e-05, |
| "loss": 0.4283, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.43336439888164024, |
| "grad_norm": 0.4830268598668616, |
| "learning_rate": 4.753192958232654e-05, |
| "loss": 0.4255, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.4380242311276794, |
| "grad_norm": 0.43173494250112954, |
| "learning_rate": 4.744563341387643e-05, |
| "loss": 0.4378, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.4426840633737186, |
| "grad_norm": 0.43237002431737065, |
| "learning_rate": 4.7359337245426306e-05, |
| "loss": 0.4277, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.4473438956197577, |
| "grad_norm": 0.41385681702794824, |
| "learning_rate": 4.7273041076976184e-05, |
| "loss": 0.4394, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.45200372786579684, |
| "grad_norm": 0.40157124060011173, |
| "learning_rate": 4.718674490852606e-05, |
| "loss": 0.432, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.45666356011183595, |
| "grad_norm": 0.39938983254093463, |
| "learning_rate": 4.710044874007594e-05, |
| "loss": 0.4264, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4613233923578751, |
| "grad_norm": 0.39732323279012777, |
| "learning_rate": 4.7014152571625826e-05, |
| "loss": 0.4321, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4659832246039143, |
| "grad_norm": 0.4747358464791143, |
| "learning_rate": 4.6927856403175704e-05, |
| "loss": 0.435, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.4706430568499534, |
| "grad_norm": 0.3698718174123855, |
| "learning_rate": 4.684156023472558e-05, |
| "loss": 0.4221, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.47530288909599255, |
| "grad_norm": 0.4305572996344627, |
| "learning_rate": 4.675526406627546e-05, |
| "loss": 0.4303, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.47996272134203166, |
| "grad_norm": 0.6085259797324423, |
| "learning_rate": 4.666896789782534e-05, |
| "loss": 0.4281, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.4846225535880708, |
| "grad_norm": 0.5730318489213171, |
| "learning_rate": 4.658267172937522e-05, |
| "loss": 0.4321, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.48928238583411, |
| "grad_norm": 0.4332245949035479, |
| "learning_rate": 4.64963755609251e-05, |
| "loss": 0.4309, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.4939422180801491, |
| "grad_norm": 0.508102013567185, |
| "learning_rate": 4.641007939247497e-05, |
| "loss": 0.428, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.49860205032618826, |
| "grad_norm": 0.34669842614662666, |
| "learning_rate": 4.632378322402486e-05, |
| "loss": 0.4283, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.5032618825722274, |
| "grad_norm": 0.3889254150420956, |
| "learning_rate": 4.6237487055574736e-05, |
| "loss": 0.4178, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5079217148182665, |
| "grad_norm": 0.49239466237923585, |
| "learning_rate": 4.6151190887124615e-05, |
| "loss": 0.4244, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.5125815470643057, |
| "grad_norm": 0.4397316581317278, |
| "learning_rate": 4.606489471867449e-05, |
| "loss": 0.4245, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5172413793103449, |
| "grad_norm": 0.3573410355093717, |
| "learning_rate": 4.597859855022437e-05, |
| "loss": 0.4192, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.5219012115563839, |
| "grad_norm": 0.43267928489519075, |
| "learning_rate": 4.589230238177425e-05, |
| "loss": 0.4397, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.5265610438024231, |
| "grad_norm": 0.4821719775119577, |
| "learning_rate": 4.5806006213324134e-05, |
| "loss": 0.4177, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.5312208760484622, |
| "grad_norm": 0.5349367976109402, |
| "learning_rate": 4.5719710044874006e-05, |
| "loss": 0.4175, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.5358807082945014, |
| "grad_norm": 0.47146780494171403, |
| "learning_rate": 4.563341387642389e-05, |
| "loss": 0.4234, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5405405405405406, |
| "grad_norm": 0.3724800991591985, |
| "learning_rate": 4.554711770797377e-05, |
| "loss": 0.4238, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.5452003727865797, |
| "grad_norm": 0.46072368721402923, |
| "learning_rate": 4.546082153952365e-05, |
| "loss": 0.4209, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.5498602050326188, |
| "grad_norm": 0.40398358486961483, |
| "learning_rate": 4.5374525371073526e-05, |
| "loss": 0.4171, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.554520037278658, |
| "grad_norm": 0.3510727726291796, |
| "learning_rate": 4.5288229202623404e-05, |
| "loss": 0.4191, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.5591798695246971, |
| "grad_norm": 0.38603847092047144, |
| "learning_rate": 4.520193303417328e-05, |
| "loss": 0.4194, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.5638397017707363, |
| "grad_norm": 0.3689646251475895, |
| "learning_rate": 4.511563686572317e-05, |
| "loss": 0.4182, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5684995340167754, |
| "grad_norm": 0.408796996489251, |
| "learning_rate": 4.5029340697273045e-05, |
| "loss": 0.4293, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5731593662628145, |
| "grad_norm": 0.32130017833210983, |
| "learning_rate": 4.4943044528822923e-05, |
| "loss": 0.4159, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5778191985088537, |
| "grad_norm": 0.3772249822258766, |
| "learning_rate": 4.48567483603728e-05, |
| "loss": 0.4131, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5824790307548928, |
| "grad_norm": 0.35575898721323274, |
| "learning_rate": 4.477045219192268e-05, |
| "loss": 0.4344, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.587138863000932, |
| "grad_norm": 0.38751558564769756, |
| "learning_rate": 4.4684156023472565e-05, |
| "loss": 0.425, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5917986952469712, |
| "grad_norm": 0.4663549933535284, |
| "learning_rate": 4.4597859855022436e-05, |
| "loss": 0.4296, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5964585274930102, |
| "grad_norm": 0.4662472899566146, |
| "learning_rate": 4.4511563686572315e-05, |
| "loss": 0.4202, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.6011183597390494, |
| "grad_norm": 0.39559722373002326, |
| "learning_rate": 4.44252675181222e-05, |
| "loss": 0.4197, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.6057781919850885, |
| "grad_norm": 0.4348073933035661, |
| "learning_rate": 4.433897134967208e-05, |
| "loss": 0.4284, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6104380242311277, |
| "grad_norm": 0.4064242144273277, |
| "learning_rate": 4.4252675181221956e-05, |
| "loss": 0.4262, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.6150978564771669, |
| "grad_norm": 0.3413504824042106, |
| "learning_rate": 4.4166379012771834e-05, |
| "loss": 0.408, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.6197576887232059, |
| "grad_norm": 0.4625137937805045, |
| "learning_rate": 4.408008284432171e-05, |
| "loss": 0.4091, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.6244175209692451, |
| "grad_norm": 0.4365632325921953, |
| "learning_rate": 4.39937866758716e-05, |
| "loss": 0.4076, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.6290773532152842, |
| "grad_norm": 0.43623665649462795, |
| "learning_rate": 4.3907490507421476e-05, |
| "loss": 0.4158, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6337371854613234, |
| "grad_norm": 0.4462282373794903, |
| "learning_rate": 4.382119433897135e-05, |
| "loss": 0.419, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.6383970177073626, |
| "grad_norm": 0.4909062825850483, |
| "learning_rate": 4.373489817052123e-05, |
| "loss": 0.4284, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.6430568499534017, |
| "grad_norm": 0.4261067060277007, |
| "learning_rate": 4.364860200207111e-05, |
| "loss": 0.4108, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.6477166821994408, |
| "grad_norm": 0.36806589117120897, |
| "learning_rate": 4.356230583362099e-05, |
| "loss": 0.4154, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.65237651444548, |
| "grad_norm": 0.34856157577107083, |
| "learning_rate": 4.347600966517087e-05, |
| "loss": 0.4246, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.6570363466915191, |
| "grad_norm": 0.35691903362815613, |
| "learning_rate": 4.3389713496720745e-05, |
| "loss": 0.4178, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.6616961789375583, |
| "grad_norm": 0.4185903662996916, |
| "learning_rate": 4.330341732827063e-05, |
| "loss": 0.4184, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.6663560111835974, |
| "grad_norm": 0.36798569357808036, |
| "learning_rate": 4.321712115982051e-05, |
| "loss": 0.4067, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.6710158434296365, |
| "grad_norm": 0.3788649720605493, |
| "learning_rate": 4.3130824991370387e-05, |
| "loss": 0.417, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.6756756756756757, |
| "grad_norm": 0.33269308559456673, |
| "learning_rate": 4.3044528822920265e-05, |
| "loss": 0.4158, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6803355079217148, |
| "grad_norm": 0.41281885956590714, |
| "learning_rate": 4.295823265447014e-05, |
| "loss": 0.4107, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.684995340167754, |
| "grad_norm": 0.35535706301183545, |
| "learning_rate": 4.287193648602002e-05, |
| "loss": 0.4149, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6896551724137931, |
| "grad_norm": 0.3277576836962773, |
| "learning_rate": 4.27856403175699e-05, |
| "loss": 0.4086, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6943150046598322, |
| "grad_norm": 0.40279395134887447, |
| "learning_rate": 4.269934414911978e-05, |
| "loss": 0.4043, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6989748369058714, |
| "grad_norm": 0.43373381741082817, |
| "learning_rate": 4.261304798066966e-05, |
| "loss": 0.4089, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.7036346691519105, |
| "grad_norm": 0.36906806847778895, |
| "learning_rate": 4.252675181221954e-05, |
| "loss": 0.4113, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.7082945013979497, |
| "grad_norm": 0.41940285749229916, |
| "learning_rate": 4.244045564376942e-05, |
| "loss": 0.4145, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.7129543336439889, |
| "grad_norm": 0.35669549672804485, |
| "learning_rate": 4.23541594753193e-05, |
| "loss": 0.3988, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.7176141658900279, |
| "grad_norm": 0.3590318963506859, |
| "learning_rate": 4.2267863306869176e-05, |
| "loss": 0.4075, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.7222739981360671, |
| "grad_norm": 0.40077547109104333, |
| "learning_rate": 4.2181567138419054e-05, |
| "loss": 0.4202, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7269338303821062, |
| "grad_norm": 0.37047082422653643, |
| "learning_rate": 4.209527096996894e-05, |
| "loss": 0.4049, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.7315936626281454, |
| "grad_norm": 0.3795501401345936, |
| "learning_rate": 4.200897480151881e-05, |
| "loss": 0.4121, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.7362534948741846, |
| "grad_norm": 0.4503984514039821, |
| "learning_rate": 4.1922678633068695e-05, |
| "loss": 0.4152, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.7409133271202236, |
| "grad_norm": 0.44284905780951017, |
| "learning_rate": 4.1836382464618573e-05, |
| "loss": 0.4098, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.7455731593662628, |
| "grad_norm": 0.44691031160954076, |
| "learning_rate": 4.175008629616845e-05, |
| "loss": 0.4078, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.750232991612302, |
| "grad_norm": 0.3735786310769167, |
| "learning_rate": 4.166379012771833e-05, |
| "loss": 0.4077, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.7548928238583411, |
| "grad_norm": 0.3384286028508436, |
| "learning_rate": 4.157749395926821e-05, |
| "loss": 0.4257, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.7595526561043803, |
| "grad_norm": 0.38501244831095327, |
| "learning_rate": 4.1491197790818086e-05, |
| "loss": 0.4005, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.7642124883504194, |
| "grad_norm": 0.6658921381907594, |
| "learning_rate": 4.140490162236797e-05, |
| "loss": 0.4152, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.7688723205964585, |
| "grad_norm": 0.5108682931365331, |
| "learning_rate": 4.131860545391785e-05, |
| "loss": 0.4078, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.7735321528424977, |
| "grad_norm": 0.3959267206623357, |
| "learning_rate": 4.123230928546773e-05, |
| "loss": 0.4025, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.7781919850885368, |
| "grad_norm": 0.40541297925684416, |
| "learning_rate": 4.1146013117017606e-05, |
| "loss": 0.4095, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.782851817334576, |
| "grad_norm": 0.4483501398906087, |
| "learning_rate": 4.1059716948567484e-05, |
| "loss": 0.4127, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.7875116495806151, |
| "grad_norm": 0.4577963072705479, |
| "learning_rate": 4.097342078011737e-05, |
| "loss": 0.4204, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7921714818266542, |
| "grad_norm": 0.3657002203284406, |
| "learning_rate": 4.088712461166724e-05, |
| "loss": 0.4105, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7968313140726934, |
| "grad_norm": 0.5219753962445839, |
| "learning_rate": 4.080082844321712e-05, |
| "loss": 0.4121, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.8014911463187325, |
| "grad_norm": 0.4051529596962449, |
| "learning_rate": 4.0714532274767004e-05, |
| "loss": 0.4189, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.8061509785647717, |
| "grad_norm": 0.3645428936825488, |
| "learning_rate": 4.062823610631688e-05, |
| "loss": 0.4003, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.8108108108108109, |
| "grad_norm": 0.31752471962451867, |
| "learning_rate": 4.054193993786676e-05, |
| "loss": 0.415, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.8154706430568499, |
| "grad_norm": 0.3601862957329616, |
| "learning_rate": 4.045564376941664e-05, |
| "loss": 0.4112, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8201304753028891, |
| "grad_norm": 0.36233941550256943, |
| "learning_rate": 4.036934760096652e-05, |
| "loss": 0.4027, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.8247903075489282, |
| "grad_norm": 0.41290807812840935, |
| "learning_rate": 4.02830514325164e-05, |
| "loss": 0.4159, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.8294501397949674, |
| "grad_norm": 0.42156370600929094, |
| "learning_rate": 4.019675526406628e-05, |
| "loss": 0.4026, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.8341099720410066, |
| "grad_norm": 0.369144224526896, |
| "learning_rate": 4.011045909561615e-05, |
| "loss": 0.4216, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.8387698042870456, |
| "grad_norm": 0.35301766518057, |
| "learning_rate": 4.0024162927166037e-05, |
| "loss": 0.4047, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.8434296365330848, |
| "grad_norm": 0.30727777017232927, |
| "learning_rate": 3.9937866758715915e-05, |
| "loss": 0.4061, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.848089468779124, |
| "grad_norm": 0.4595118441196378, |
| "learning_rate": 3.98515705902658e-05, |
| "loss": 0.4121, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.8527493010251631, |
| "grad_norm": 0.5868283893854741, |
| "learning_rate": 3.976527442181567e-05, |
| "loss": 0.4051, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.8574091332712023, |
| "grad_norm": 0.4408220457405354, |
| "learning_rate": 3.967897825336555e-05, |
| "loss": 0.4113, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 0.329430445296284, |
| "learning_rate": 3.9592682084915434e-05, |
| "loss": 0.398, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.8667287977632805, |
| "grad_norm": 0.4772951342481958, |
| "learning_rate": 3.950638591646531e-05, |
| "loss": 0.398, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.8713886300093197, |
| "grad_norm": 0.39009508489962785, |
| "learning_rate": 3.942008974801519e-05, |
| "loss": 0.4091, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.8760484622553588, |
| "grad_norm": 0.31283951592729364, |
| "learning_rate": 3.933379357956507e-05, |
| "loss": 0.4051, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.880708294501398, |
| "grad_norm": 0.38362272885626797, |
| "learning_rate": 3.924749741111495e-05, |
| "loss": 0.4072, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.8853681267474371, |
| "grad_norm": 0.4294812901025146, |
| "learning_rate": 3.916120124266483e-05, |
| "loss": 0.4148, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.8900279589934762, |
| "grad_norm": 0.4001848027163216, |
| "learning_rate": 3.9074905074214704e-05, |
| "loss": 0.4136, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.8946877912395154, |
| "grad_norm": 0.34026388057585993, |
| "learning_rate": 3.898860890576458e-05, |
| "loss": 0.4093, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.8993476234855545, |
| "grad_norm": 0.40668203338539827, |
| "learning_rate": 3.890231273731447e-05, |
| "loss": 0.4008, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.9040074557315937, |
| "grad_norm": 0.3294333705662259, |
| "learning_rate": 3.8816016568864345e-05, |
| "loss": 0.3987, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.9086672879776329, |
| "grad_norm": 0.342795942254618, |
| "learning_rate": 3.8729720400414224e-05, |
| "loss": 0.4176, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9133271202236719, |
| "grad_norm": 0.39628105185451024, |
| "learning_rate": 3.86434242319641e-05, |
| "loss": 0.4047, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.9179869524697111, |
| "grad_norm": 0.46631619804138, |
| "learning_rate": 3.855712806351398e-05, |
| "loss": 0.4068, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.9226467847157502, |
| "grad_norm": 0.43261058596610696, |
| "learning_rate": 3.8470831895063865e-05, |
| "loss": 0.4038, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.9273066169617894, |
| "grad_norm": 0.37541980348762116, |
| "learning_rate": 3.838453572661374e-05, |
| "loss": 0.3995, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.9319664492078286, |
| "grad_norm": 0.4184847694680633, |
| "learning_rate": 3.8298239558163615e-05, |
| "loss": 0.4025, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9366262814538676, |
| "grad_norm": 0.3857591059837171, |
| "learning_rate": 3.82119433897135e-05, |
| "loss": 0.4075, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.9412861136999068, |
| "grad_norm": 0.39800461331863696, |
| "learning_rate": 3.812564722126338e-05, |
| "loss": 0.4, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.9459459459459459, |
| "grad_norm": 0.4143464836173206, |
| "learning_rate": 3.8039351052813256e-05, |
| "loss": 0.4083, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.9506057781919851, |
| "grad_norm": 0.3920202302045243, |
| "learning_rate": 3.7953054884363134e-05, |
| "loss": 0.4023, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.9552656104380243, |
| "grad_norm": 0.37828355875661107, |
| "learning_rate": 3.786675871591301e-05, |
| "loss": 0.4056, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.9599254426840633, |
| "grad_norm": 0.29419352669408066, |
| "learning_rate": 3.77804625474629e-05, |
| "loss": 0.4107, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.9645852749301025, |
| "grad_norm": 0.3997612657539297, |
| "learning_rate": 3.7694166379012776e-05, |
| "loss": 0.4031, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.9692451071761417, |
| "grad_norm": 0.38608182016741566, |
| "learning_rate": 3.7607870210562654e-05, |
| "loss": 0.4005, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.9739049394221808, |
| "grad_norm": 0.28600031302754375, |
| "learning_rate": 3.752157404211253e-05, |
| "loss": 0.3933, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.97856477166822, |
| "grad_norm": 0.38107411335734614, |
| "learning_rate": 3.743527787366241e-05, |
| "loss": 0.4225, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.983224603914259, |
| "grad_norm": 0.39170807411989667, |
| "learning_rate": 3.734898170521229e-05, |
| "loss": 0.4004, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.9878844361602982, |
| "grad_norm": 0.38467780448535566, |
| "learning_rate": 3.7262685536762174e-05, |
| "loss": 0.3997, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.9925442684063374, |
| "grad_norm": 0.4020140968286371, |
| "learning_rate": 3.7176389368312045e-05, |
| "loss": 0.4037, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.9972041006523765, |
| "grad_norm": 0.46309836470033766, |
| "learning_rate": 3.709009319986193e-05, |
| "loss": 0.4041, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.0018639328984156, |
| "grad_norm": 0.3901703886810142, |
| "learning_rate": 3.700379703141181e-05, |
| "loss": 0.369, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.0065237651444547, |
| "grad_norm": 0.3010234352246036, |
| "learning_rate": 3.6917500862961687e-05, |
| "loss": 0.3448, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.011183597390494, |
| "grad_norm": 0.4157638937857459, |
| "learning_rate": 3.6831204694511565e-05, |
| "loss": 0.3518, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.015843429636533, |
| "grad_norm": 0.3603522336344581, |
| "learning_rate": 3.674490852606144e-05, |
| "loss": 0.3586, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.0205032618825722, |
| "grad_norm": 0.32822298799590405, |
| "learning_rate": 3.665861235761132e-05, |
| "loss": 0.3447, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.0251630941286114, |
| "grad_norm": 0.35038116243974443, |
| "learning_rate": 3.6572316189161206e-05, |
| "loss": 0.337, |
| "step": 1100 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 3219, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 550, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.4183398014609e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|