| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 2146, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004659832246039142, |
| "grad_norm": 63.97114436632525, |
| "learning_rate": 7.763975155279503e-07, |
| "loss": 11.009, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.009319664492078284, |
| "grad_norm": 97.6207743013609, |
| "learning_rate": 1.5527950310559006e-06, |
| "loss": 10.0558, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013979496738117428, |
| "grad_norm": 68.74620690530702, |
| "learning_rate": 2.329192546583851e-06, |
| "loss": 5.5709, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01863932898415657, |
| "grad_norm": 7.329953761956908, |
| "learning_rate": 3.1055900621118013e-06, |
| "loss": 1.674, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023299161230195712, |
| "grad_norm": 2.631209184742148, |
| "learning_rate": 3.881987577639752e-06, |
| "loss": 1.1266, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.027958993476234855, |
| "grad_norm": 1.2736389968892712, |
| "learning_rate": 4.658385093167702e-06, |
| "loss": 0.9104, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.032618825722273995, |
| "grad_norm": 0.7641614678643728, |
| "learning_rate": 5.4347826086956525e-06, |
| "loss": 0.7886, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.03727865796831314, |
| "grad_norm": 0.6854613522496884, |
| "learning_rate": 6.2111801242236025e-06, |
| "loss": 0.7361, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04193849021435228, |
| "grad_norm": 0.5502087380767395, |
| "learning_rate": 6.9875776397515525e-06, |
| "loss": 0.6884, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.046598322460391424, |
| "grad_norm": 0.4674581907162505, |
| "learning_rate": 7.763975155279503e-06, |
| "loss": 0.6644, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.05125815470643057, |
| "grad_norm": 0.40309163851709934, |
| "learning_rate": 8.540372670807453e-06, |
| "loss": 0.6316, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.05591798695246971, |
| "grad_norm": 0.3284620469689577, |
| "learning_rate": 9.316770186335403e-06, |
| "loss": 0.5978, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06057781919850885, |
| "grad_norm": 0.30466734688995895, |
| "learning_rate": 1.0093167701863353e-05, |
| "loss": 0.5837, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.06523765144454799, |
| "grad_norm": 0.3095353132100952, |
| "learning_rate": 1.0869565217391305e-05, |
| "loss": 0.5808, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06989748369058714, |
| "grad_norm": 0.2742862834044995, |
| "learning_rate": 1.1645962732919255e-05, |
| "loss": 0.5639, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.07455731593662628, |
| "grad_norm": 0.28624072357525304, |
| "learning_rate": 1.2422360248447205e-05, |
| "loss": 0.5552, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07921714818266543, |
| "grad_norm": 0.35136842750239394, |
| "learning_rate": 1.3198757763975155e-05, |
| "loss": 0.5465, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.08387698042870456, |
| "grad_norm": 0.3554715302423071, |
| "learning_rate": 1.3975155279503105e-05, |
| "loss": 0.5572, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08853681267474371, |
| "grad_norm": 0.30333751588191904, |
| "learning_rate": 1.4751552795031057e-05, |
| "loss": 0.5389, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.09319664492078285, |
| "grad_norm": 0.34447253483888585, |
| "learning_rate": 1.5527950310559007e-05, |
| "loss": 0.5248, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.097856477166822, |
| "grad_norm": 0.3509763920502021, |
| "learning_rate": 1.630434782608696e-05, |
| "loss": 0.5197, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.10251630941286113, |
| "grad_norm": 0.3047065908098556, |
| "learning_rate": 1.7080745341614907e-05, |
| "loss": 0.5215, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.10717614165890028, |
| "grad_norm": 0.35772122594235833, |
| "learning_rate": 1.785714285714286e-05, |
| "loss": 0.519, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.11183597390493942, |
| "grad_norm": 0.4237939965410238, |
| "learning_rate": 1.8633540372670807e-05, |
| "loss": 0.5057, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.11649580615097857, |
| "grad_norm": 0.3052118953673648, |
| "learning_rate": 1.940993788819876e-05, |
| "loss": 0.5022, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1211556383970177, |
| "grad_norm": 0.48506740368145884, |
| "learning_rate": 2.0186335403726707e-05, |
| "loss": 0.5083, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.12581547064305684, |
| "grad_norm": 0.4067639562809268, |
| "learning_rate": 2.096273291925466e-05, |
| "loss": 0.5074, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.13047530288909598, |
| "grad_norm": 0.44239635913158043, |
| "learning_rate": 2.173913043478261e-05, |
| "loss": 0.4919, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.13513513513513514, |
| "grad_norm": 0.3510658763415586, |
| "learning_rate": 2.2515527950310562e-05, |
| "loss": 0.5188, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.13979496738117428, |
| "grad_norm": 0.44553534657252714, |
| "learning_rate": 2.329192546583851e-05, |
| "loss": 0.4929, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.14445479962721341, |
| "grad_norm": 0.44028135611068503, |
| "learning_rate": 2.4068322981366462e-05, |
| "loss": 0.5013, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.14911463187325255, |
| "grad_norm": 0.4559159420874923, |
| "learning_rate": 2.484472049689441e-05, |
| "loss": 0.4821, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.15377446411929171, |
| "grad_norm": 0.47309971975261556, |
| "learning_rate": 2.5621118012422362e-05, |
| "loss": 0.4933, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.15843429636533085, |
| "grad_norm": 0.4022989429457791, |
| "learning_rate": 2.639751552795031e-05, |
| "loss": 0.4694, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.16309412861137, |
| "grad_norm": 0.48867133460334755, |
| "learning_rate": 2.7173913043478262e-05, |
| "loss": 0.483, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.16775396085740912, |
| "grad_norm": 0.4405332554716524, |
| "learning_rate": 2.795031055900621e-05, |
| "loss": 0.4759, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.1724137931034483, |
| "grad_norm": 0.4170098031366455, |
| "learning_rate": 2.8726708074534165e-05, |
| "loss": 0.4792, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.17707362534948742, |
| "grad_norm": 0.5595553852386751, |
| "learning_rate": 2.9503105590062114e-05, |
| "loss": 0.4891, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.18173345759552656, |
| "grad_norm": 0.6802692412088472, |
| "learning_rate": 3.0279503105590062e-05, |
| "loss": 0.4796, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1863932898415657, |
| "grad_norm": 0.5052964875363417, |
| "learning_rate": 3.1055900621118014e-05, |
| "loss": 0.482, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.19105312208760486, |
| "grad_norm": 1.0027813070497937, |
| "learning_rate": 3.183229813664597e-05, |
| "loss": 0.471, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.195712954333644, |
| "grad_norm": 0.6324877868930993, |
| "learning_rate": 3.260869565217392e-05, |
| "loss": 0.484, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.20037278657968313, |
| "grad_norm": 0.5481580441751476, |
| "learning_rate": 3.3385093167701865e-05, |
| "loss": 0.4658, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.20503261882572227, |
| "grad_norm": 0.5572577049127609, |
| "learning_rate": 3.4161490683229814e-05, |
| "loss": 0.4701, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.2096924510717614, |
| "grad_norm": 0.5062995178514071, |
| "learning_rate": 3.493788819875777e-05, |
| "loss": 0.4521, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.21435228331780057, |
| "grad_norm": 0.49686277193314815, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 0.4654, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.2190121155638397, |
| "grad_norm": 0.4384456812240532, |
| "learning_rate": 3.6490683229813665e-05, |
| "loss": 0.4629, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.22367194780987884, |
| "grad_norm": 0.40181315075678486, |
| "learning_rate": 3.7267080745341614e-05, |
| "loss": 0.4604, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.22833178005591798, |
| "grad_norm": 0.4680413353176079, |
| "learning_rate": 3.804347826086957e-05, |
| "loss": 0.4658, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.23299161230195714, |
| "grad_norm": 0.4121568463339279, |
| "learning_rate": 3.881987577639752e-05, |
| "loss": 0.458, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.23765144454799628, |
| "grad_norm": 0.46257624459214514, |
| "learning_rate": 3.9596273291925465e-05, |
| "loss": 0.4664, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.2423112767940354, |
| "grad_norm": 0.4208718113851754, |
| "learning_rate": 4.0372670807453414e-05, |
| "loss": 0.4616, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.24697110904007455, |
| "grad_norm": 0.4920194796206926, |
| "learning_rate": 4.114906832298137e-05, |
| "loss": 0.4601, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.2516309412861137, |
| "grad_norm": 0.44517914619453425, |
| "learning_rate": 4.192546583850932e-05, |
| "loss": 0.4585, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.25629077353215285, |
| "grad_norm": 0.4272832211849931, |
| "learning_rate": 4.270186335403727e-05, |
| "loss": 0.4645, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.26095060577819196, |
| "grad_norm": 0.46232694776349703, |
| "learning_rate": 4.347826086956522e-05, |
| "loss": 0.4566, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2656104380242311, |
| "grad_norm": 0.47318397108506427, |
| "learning_rate": 4.425465838509317e-05, |
| "loss": 0.4606, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.2702702702702703, |
| "grad_norm": 0.5203346361695503, |
| "learning_rate": 4.5031055900621124e-05, |
| "loss": 0.4627, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2749301025163094, |
| "grad_norm": 0.6727002183656305, |
| "learning_rate": 4.580745341614907e-05, |
| "loss": 0.4669, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.27958993476234856, |
| "grad_norm": 0.7920690776017782, |
| "learning_rate": 4.658385093167702e-05, |
| "loss": 0.4696, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.2842497670083877, |
| "grad_norm": 0.5549969347516743, |
| "learning_rate": 4.736024844720497e-05, |
| "loss": 0.4577, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.28890959925442683, |
| "grad_norm": 0.49025638861057025, |
| "learning_rate": 4.8136645962732924e-05, |
| "loss": 0.4493, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.293569431500466, |
| "grad_norm": 0.48227724645400744, |
| "learning_rate": 4.891304347826087e-05, |
| "loss": 0.4576, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.2982292637465051, |
| "grad_norm": 0.6932156419493832, |
| "learning_rate": 4.968944099378882e-05, |
| "loss": 0.4429, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.30288909599254427, |
| "grad_norm": 0.5932544065676107, |
| "learning_rate": 4.994822229892993e-05, |
| "loss": 0.4406, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.30754892823858343, |
| "grad_norm": 0.44595884846414735, |
| "learning_rate": 4.986192613047981e-05, |
| "loss": 0.4514, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.31220876048462254, |
| "grad_norm": 0.7093678389168179, |
| "learning_rate": 4.977562996202969e-05, |
| "loss": 0.4522, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.3168685927306617, |
| "grad_norm": 0.3992271710082444, |
| "learning_rate": 4.968933379357957e-05, |
| "loss": 0.4527, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.32152842497670087, |
| "grad_norm": 0.4322675744616485, |
| "learning_rate": 4.9603037625129445e-05, |
| "loss": 0.4474, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.32618825722274, |
| "grad_norm": 0.4894749100842417, |
| "learning_rate": 4.951674145667933e-05, |
| "loss": 0.4611, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.33084808946877914, |
| "grad_norm": 0.45545284883251785, |
| "learning_rate": 4.94304452882292e-05, |
| "loss": 0.447, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.33550792171481825, |
| "grad_norm": 0.5018284119539658, |
| "learning_rate": 4.934414911977908e-05, |
| "loss": 0.4449, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3401677539608574, |
| "grad_norm": 0.4101489952466933, |
| "learning_rate": 4.9257852951328965e-05, |
| "loss": 0.4411, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3448275862068966, |
| "grad_norm": 0.4492613666448338, |
| "learning_rate": 4.917155678287884e-05, |
| "loss": 0.4542, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3494874184529357, |
| "grad_norm": 0.5065139889539029, |
| "learning_rate": 4.908526061442872e-05, |
| "loss": 0.441, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.35414725069897485, |
| "grad_norm": 0.5142632297631841, |
| "learning_rate": 4.89989644459786e-05, |
| "loss": 0.4559, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.35880708294501396, |
| "grad_norm": 0.38956464528036483, |
| "learning_rate": 4.891266827752848e-05, |
| "loss": 0.4576, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.3634669151910531, |
| "grad_norm": 0.48099181069040514, |
| "learning_rate": 4.882637210907836e-05, |
| "loss": 0.4415, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3681267474370923, |
| "grad_norm": 0.4527262541512366, |
| "learning_rate": 4.874007594062824e-05, |
| "loss": 0.4479, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.3727865796831314, |
| "grad_norm": 0.45728910187809607, |
| "learning_rate": 4.865377977217811e-05, |
| "loss": 0.4326, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.37744641192917056, |
| "grad_norm": 0.5325567837367734, |
| "learning_rate": 4.8567483603728e-05, |
| "loss": 0.4511, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.3821062441752097, |
| "grad_norm": 0.47530962989623887, |
| "learning_rate": 4.8481187435277875e-05, |
| "loss": 0.4472, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.38676607642124883, |
| "grad_norm": 0.5492535133414717, |
| "learning_rate": 4.839489126682776e-05, |
| "loss": 0.4335, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.391425908667288, |
| "grad_norm": 0.5374577291241773, |
| "learning_rate": 4.830859509837763e-05, |
| "loss": 0.4519, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.3960857409133271, |
| "grad_norm": 0.3671416917960664, |
| "learning_rate": 4.822229892992751e-05, |
| "loss": 0.4493, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.40074557315936626, |
| "grad_norm": 0.47641546071577745, |
| "learning_rate": 4.8136002761477395e-05, |
| "loss": 0.4363, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.40540540540540543, |
| "grad_norm": 0.5060064177084717, |
| "learning_rate": 4.804970659302727e-05, |
| "loss": 0.4493, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.41006523765144454, |
| "grad_norm": 0.4421952331981267, |
| "learning_rate": 4.796341042457715e-05, |
| "loss": 0.4438, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.4147250698974837, |
| "grad_norm": 0.4912038645027838, |
| "learning_rate": 4.787711425612703e-05, |
| "loss": 0.4381, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.4193849021435228, |
| "grad_norm": 0.49994262075500673, |
| "learning_rate": 4.779081808767691e-05, |
| "loss": 0.4287, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.424044734389562, |
| "grad_norm": 0.46833774296806413, |
| "learning_rate": 4.770452191922679e-05, |
| "loss": 0.4356, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.42870456663560114, |
| "grad_norm": 0.37814791573621137, |
| "learning_rate": 4.761822575077667e-05, |
| "loss": 0.4325, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.43336439888164024, |
| "grad_norm": 0.4979334392926385, |
| "learning_rate": 4.753192958232654e-05, |
| "loss": 0.4296, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.4380242311276794, |
| "grad_norm": 0.45595286465505486, |
| "learning_rate": 4.744563341387643e-05, |
| "loss": 0.4424, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.4426840633737186, |
| "grad_norm": 0.38484174975814034, |
| "learning_rate": 4.7359337245426306e-05, |
| "loss": 0.4317, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.4473438956197577, |
| "grad_norm": 0.5318530012205278, |
| "learning_rate": 4.7273041076976184e-05, |
| "loss": 0.4437, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.45200372786579684, |
| "grad_norm": 0.5500470128648078, |
| "learning_rate": 4.718674490852606e-05, |
| "loss": 0.4364, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.45666356011183595, |
| "grad_norm": 0.46022150094529285, |
| "learning_rate": 4.710044874007594e-05, |
| "loss": 0.4307, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4613233923578751, |
| "grad_norm": 0.4453333531177356, |
| "learning_rate": 4.7014152571625826e-05, |
| "loss": 0.4366, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4659832246039143, |
| "grad_norm": 0.5514063928798473, |
| "learning_rate": 4.6927856403175704e-05, |
| "loss": 0.4396, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.4706430568499534, |
| "grad_norm": 0.39385926743432437, |
| "learning_rate": 4.684156023472558e-05, |
| "loss": 0.4262, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.47530288909599255, |
| "grad_norm": 0.5044340425827932, |
| "learning_rate": 4.675526406627546e-05, |
| "loss": 0.4348, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.47996272134203166, |
| "grad_norm": 0.6595607098484944, |
| "learning_rate": 4.666896789782534e-05, |
| "loss": 0.4327, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.4846225535880708, |
| "grad_norm": 0.6115798965441747, |
| "learning_rate": 4.658267172937522e-05, |
| "loss": 0.4364, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.48928238583411, |
| "grad_norm": 0.44577176108357613, |
| "learning_rate": 4.64963755609251e-05, |
| "loss": 0.4352, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.4939422180801491, |
| "grad_norm": 0.5381399519172929, |
| "learning_rate": 4.641007939247497e-05, |
| "loss": 0.4323, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.49860205032618826, |
| "grad_norm": 0.37136037243594944, |
| "learning_rate": 4.632378322402486e-05, |
| "loss": 0.4325, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.5032618825722274, |
| "grad_norm": 0.3892034994234061, |
| "learning_rate": 4.6237487055574736e-05, |
| "loss": 0.4225, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5079217148182665, |
| "grad_norm": 0.49386732668387484, |
| "learning_rate": 4.6151190887124615e-05, |
| "loss": 0.4287, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.5125815470643057, |
| "grad_norm": 0.4168272091126582, |
| "learning_rate": 4.606489471867449e-05, |
| "loss": 0.4289, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5172413793103449, |
| "grad_norm": 0.3582043405694146, |
| "learning_rate": 4.597859855022437e-05, |
| "loss": 0.4231, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.5219012115563839, |
| "grad_norm": 0.41696696369770525, |
| "learning_rate": 4.589230238177425e-05, |
| "loss": 0.4443, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.5265610438024231, |
| "grad_norm": 0.4861469903998086, |
| "learning_rate": 4.5806006213324134e-05, |
| "loss": 0.4216, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.5312208760484622, |
| "grad_norm": 0.5140354855395411, |
| "learning_rate": 4.5719710044874006e-05, |
| "loss": 0.4214, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.5358807082945014, |
| "grad_norm": 0.41866668365773924, |
| "learning_rate": 4.563341387642389e-05, |
| "loss": 0.4276, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5405405405405406, |
| "grad_norm": 0.3584845668041098, |
| "learning_rate": 4.554711770797377e-05, |
| "loss": 0.4275, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.5452003727865797, |
| "grad_norm": 0.49174805356075396, |
| "learning_rate": 4.546082153952365e-05, |
| "loss": 0.4246, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.5498602050326188, |
| "grad_norm": 0.41185105098746255, |
| "learning_rate": 4.5374525371073526e-05, |
| "loss": 0.421, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.554520037278658, |
| "grad_norm": 0.36837391509005896, |
| "learning_rate": 4.5288229202623404e-05, |
| "loss": 0.423, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.5591798695246971, |
| "grad_norm": 0.38520965444801625, |
| "learning_rate": 4.520193303417328e-05, |
| "loss": 0.4233, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.5638397017707363, |
| "grad_norm": 0.36649198023585905, |
| "learning_rate": 4.511563686572317e-05, |
| "loss": 0.4219, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5684995340167754, |
| "grad_norm": 0.4164679504509725, |
| "learning_rate": 4.5029340697273045e-05, |
| "loss": 0.433, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5731593662628145, |
| "grad_norm": 0.3214873987463138, |
| "learning_rate": 4.4943044528822923e-05, |
| "loss": 0.4197, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5778191985088537, |
| "grad_norm": 0.44600645286913126, |
| "learning_rate": 4.48567483603728e-05, |
| "loss": 0.4168, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5824790307548928, |
| "grad_norm": 0.37318395537297955, |
| "learning_rate": 4.477045219192268e-05, |
| "loss": 0.4385, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.587138863000932, |
| "grad_norm": 0.3604844647747845, |
| "learning_rate": 4.4684156023472565e-05, |
| "loss": 0.429, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5917986952469712, |
| "grad_norm": 0.49391135959870375, |
| "learning_rate": 4.4597859855022436e-05, |
| "loss": 0.4336, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5964585274930102, |
| "grad_norm": 0.5003007414297862, |
| "learning_rate": 4.4511563686572315e-05, |
| "loss": 0.4244, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.6011183597390494, |
| "grad_norm": 0.4136262369903074, |
| "learning_rate": 4.44252675181222e-05, |
| "loss": 0.4239, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.6057781919850885, |
| "grad_norm": 0.4100037093770799, |
| "learning_rate": 4.433897134967208e-05, |
| "loss": 0.4329, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6104380242311277, |
| "grad_norm": 0.40473278171973515, |
| "learning_rate": 4.4252675181221956e-05, |
| "loss": 0.4302, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.6150978564771669, |
| "grad_norm": 0.3581991814954745, |
| "learning_rate": 4.4166379012771834e-05, |
| "loss": 0.4122, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.6197576887232059, |
| "grad_norm": 0.4380544935583559, |
| "learning_rate": 4.408008284432171e-05, |
| "loss": 0.4127, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.6244175209692451, |
| "grad_norm": 0.4100795719599541, |
| "learning_rate": 4.39937866758716e-05, |
| "loss": 0.4115, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.6290773532152842, |
| "grad_norm": 0.40367865374562034, |
| "learning_rate": 4.3907490507421476e-05, |
| "loss": 0.4201, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6337371854613234, |
| "grad_norm": 0.4255661108043704, |
| "learning_rate": 4.382119433897135e-05, |
| "loss": 0.4232, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.6383970177073626, |
| "grad_norm": 0.43799906091812546, |
| "learning_rate": 4.373489817052123e-05, |
| "loss": 0.4326, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.6430568499534017, |
| "grad_norm": 0.4582804945285276, |
| "learning_rate": 4.364860200207111e-05, |
| "loss": 0.4147, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.6477166821994408, |
| "grad_norm": 0.3745759468060015, |
| "learning_rate": 4.356230583362099e-05, |
| "loss": 0.4199, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.65237651444548, |
| "grad_norm": 0.35894331983296773, |
| "learning_rate": 4.347600966517087e-05, |
| "loss": 0.4283, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.6570363466915191, |
| "grad_norm": 0.4107553298840247, |
| "learning_rate": 4.3389713496720745e-05, |
| "loss": 0.4219, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.6616961789375583, |
| "grad_norm": 0.4827554934119169, |
| "learning_rate": 4.330341732827063e-05, |
| "loss": 0.4225, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.6663560111835974, |
| "grad_norm": 0.3805082888445549, |
| "learning_rate": 4.321712115982051e-05, |
| "loss": 0.411, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.6710158434296365, |
| "grad_norm": 0.3764900820609077, |
| "learning_rate": 4.3130824991370387e-05, |
| "loss": 0.4214, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.6756756756756757, |
| "grad_norm": 0.3457535674984788, |
| "learning_rate": 4.3044528822920265e-05, |
| "loss": 0.42, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6803355079217148, |
| "grad_norm": 0.44229272421355714, |
| "learning_rate": 4.295823265447014e-05, |
| "loss": 0.415, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.684995340167754, |
| "grad_norm": 0.3405071649598411, |
| "learning_rate": 4.287193648602002e-05, |
| "loss": 0.4188, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6896551724137931, |
| "grad_norm": 0.3420276540344356, |
| "learning_rate": 4.27856403175699e-05, |
| "loss": 0.4125, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6943150046598322, |
| "grad_norm": 0.3714271443540526, |
| "learning_rate": 4.269934414911978e-05, |
| "loss": 0.4082, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6989748369058714, |
| "grad_norm": 0.4440957065820136, |
| "learning_rate": 4.261304798066966e-05, |
| "loss": 0.4132, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.7036346691519105, |
| "grad_norm": 0.38502725849051433, |
| "learning_rate": 4.252675181221954e-05, |
| "loss": 0.4155, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.7082945013979497, |
| "grad_norm": 0.4101980956792011, |
| "learning_rate": 4.244045564376942e-05, |
| "loss": 0.4187, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.7129543336439889, |
| "grad_norm": 0.3949116271091045, |
| "learning_rate": 4.23541594753193e-05, |
| "loss": 0.4027, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.7176141658900279, |
| "grad_norm": 0.3778946135235895, |
| "learning_rate": 4.2267863306869176e-05, |
| "loss": 0.4117, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.7222739981360671, |
| "grad_norm": 0.3840085266541027, |
| "learning_rate": 4.2181567138419054e-05, |
| "loss": 0.4244, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7269338303821062, |
| "grad_norm": 0.35414793238950554, |
| "learning_rate": 4.209527096996894e-05, |
| "loss": 0.409, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.7315936626281454, |
| "grad_norm": 0.4017361351425298, |
| "learning_rate": 4.200897480151881e-05, |
| "loss": 0.4161, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.7362534948741846, |
| "grad_norm": 0.4577917995677147, |
| "learning_rate": 4.1922678633068695e-05, |
| "loss": 0.4194, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.7409133271202236, |
| "grad_norm": 0.4449028327643515, |
| "learning_rate": 4.1836382464618573e-05, |
| "loss": 0.4139, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.7455731593662628, |
| "grad_norm": 0.4278118467092459, |
| "learning_rate": 4.175008629616845e-05, |
| "loss": 0.4119, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.750232991612302, |
| "grad_norm": 0.41286072521941974, |
| "learning_rate": 4.166379012771833e-05, |
| "loss": 0.4118, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.7548928238583411, |
| "grad_norm": 0.36674422319093963, |
| "learning_rate": 4.157749395926821e-05, |
| "loss": 0.4301, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.7595526561043803, |
| "grad_norm": 0.3466794250376395, |
| "learning_rate": 4.1491197790818086e-05, |
| "loss": 0.4043, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.7642124883504194, |
| "grad_norm": 0.5419218965239389, |
| "learning_rate": 4.140490162236797e-05, |
| "loss": 0.4199, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.7688723205964585, |
| "grad_norm": 0.4446206400997879, |
| "learning_rate": 4.131860545391785e-05, |
| "loss": 0.4119, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.7735321528424977, |
| "grad_norm": 0.3679372318535966, |
| "learning_rate": 4.123230928546773e-05, |
| "loss": 0.4067, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.7781919850885368, |
| "grad_norm": 0.3883744139143354, |
| "learning_rate": 4.1146013117017606e-05, |
| "loss": 0.4139, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.782851817334576, |
| "grad_norm": 0.4406909830090548, |
| "learning_rate": 4.1059716948567484e-05, |
| "loss": 0.4169, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.7875116495806151, |
| "grad_norm": 0.46114038026303233, |
| "learning_rate": 4.097342078011737e-05, |
| "loss": 0.4249, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7921714818266542, |
| "grad_norm": 0.3915360251493529, |
| "learning_rate": 4.088712461166724e-05, |
| "loss": 0.4145, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7968313140726934, |
| "grad_norm": 0.5167680703260195, |
| "learning_rate": 4.080082844321712e-05, |
| "loss": 0.4166, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.8014911463187325, |
| "grad_norm": 0.3754422426152857, |
| "learning_rate": 4.0714532274767004e-05, |
| "loss": 0.4234, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.8061509785647717, |
| "grad_norm": 0.3476171939553642, |
| "learning_rate": 4.062823610631688e-05, |
| "loss": 0.4044, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.8108108108108109, |
| "grad_norm": 0.362219475724925, |
| "learning_rate": 4.054193993786676e-05, |
| "loss": 0.4189, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.8154706430568499, |
| "grad_norm": 0.3820875006288019, |
| "learning_rate": 4.045564376941664e-05, |
| "loss": 0.4145, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8201304753028891, |
| "grad_norm": 0.5867270655165092, |
| "learning_rate": 4.036934760096652e-05, |
| "loss": 0.4057, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.8247903075489282, |
| "grad_norm": 0.4109568389025995, |
| "learning_rate": 4.02830514325164e-05, |
| "loss": 0.42, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.8294501397949674, |
| "grad_norm": 0.4342697112855388, |
| "learning_rate": 4.019675526406628e-05, |
| "loss": 0.4065, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.8341099720410066, |
| "grad_norm": 0.3703770039740134, |
| "learning_rate": 4.011045909561615e-05, |
| "loss": 0.426, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.8387698042870456, |
| "grad_norm": 0.44291526775558954, |
| "learning_rate": 4.0024162927166037e-05, |
| "loss": 0.4087, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.8434296365330848, |
| "grad_norm": 0.33257023554753873, |
| "learning_rate": 3.9937866758715915e-05, |
| "loss": 0.4104, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.848089468779124, |
| "grad_norm": 0.5210603832824299, |
| "learning_rate": 3.98515705902658e-05, |
| "loss": 0.4164, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.8527493010251631, |
| "grad_norm": 0.607238634824021, |
| "learning_rate": 3.976527442181567e-05, |
| "loss": 0.4094, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.8574091332712023, |
| "grad_norm": 0.4484844822862872, |
| "learning_rate": 3.967897825336555e-05, |
| "loss": 0.4157, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 0.3688615067553498, |
| "learning_rate": 3.9592682084915434e-05, |
| "loss": 0.4029, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.8667287977632805, |
| "grad_norm": 0.5024805631475334, |
| "learning_rate": 3.950638591646531e-05, |
| "loss": 0.4017, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.8713886300093197, |
| "grad_norm": 0.382016518381038, |
| "learning_rate": 3.942008974801519e-05, |
| "loss": 0.4135, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.8760484622553588, |
| "grad_norm": 0.30605957215975665, |
| "learning_rate": 3.933379357956507e-05, |
| "loss": 0.4091, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.880708294501398, |
| "grad_norm": 0.3877784507885669, |
| "learning_rate": 3.924749741111495e-05, |
| "loss": 0.4113, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.8853681267474371, |
| "grad_norm": 0.48733905055236465, |
| "learning_rate": 3.916120124266483e-05, |
| "loss": 0.4191, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.8900279589934762, |
| "grad_norm": 0.43034903356385523, |
| "learning_rate": 3.9074905074214704e-05, |
| "loss": 0.4182, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.8946877912395154, |
| "grad_norm": 0.3390149226125637, |
| "learning_rate": 3.898860890576458e-05, |
| "loss": 0.4134, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.8993476234855545, |
| "grad_norm": 0.3977054990731856, |
| "learning_rate": 3.890231273731447e-05, |
| "loss": 0.405, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.9040074557315937, |
| "grad_norm": 0.31681352931839235, |
| "learning_rate": 3.8816016568864345e-05, |
| "loss": 0.4028, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.9086672879776329, |
| "grad_norm": 0.32371549087627044, |
| "learning_rate": 3.8729720400414224e-05, |
| "loss": 0.4215, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9133271202236719, |
| "grad_norm": 0.404454156562585, |
| "learning_rate": 3.86434242319641e-05, |
| "loss": 0.4088, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.9179869524697111, |
| "grad_norm": 0.5071586018047269, |
| "learning_rate": 3.855712806351398e-05, |
| "loss": 0.4138, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.9226467847157502, |
| "grad_norm": 0.4612419638937902, |
| "learning_rate": 3.8470831895063865e-05, |
| "loss": 0.4084, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.9273066169617894, |
| "grad_norm": 0.38416516599213146, |
| "learning_rate": 3.838453572661374e-05, |
| "loss": 0.4041, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.9319664492078286, |
| "grad_norm": 0.44592015311937055, |
| "learning_rate": 3.8298239558163615e-05, |
| "loss": 0.4072, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9366262814538676, |
| "grad_norm": 0.39712989940263566, |
| "learning_rate": 3.82119433897135e-05, |
| "loss": 0.412, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.9412861136999068, |
| "grad_norm": 0.38203797576686194, |
| "learning_rate": 3.812564722126338e-05, |
| "loss": 0.4043, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.9459459459459459, |
| "grad_norm": 0.424117954981762, |
| "learning_rate": 3.8039351052813256e-05, |
| "loss": 0.4128, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.9506057781919851, |
| "grad_norm": 0.42226979399269843, |
| "learning_rate": 3.7953054884363134e-05, |
| "loss": 0.4064, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.9552656104380243, |
| "grad_norm": 0.36275341625519136, |
| "learning_rate": 3.786675871591301e-05, |
| "loss": 0.41, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.9599254426840633, |
| "grad_norm": 0.2989486198553155, |
| "learning_rate": 3.77804625474629e-05, |
| "loss": 0.415, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.9645852749301025, |
| "grad_norm": 0.4490357688676246, |
| "learning_rate": 3.7694166379012776e-05, |
| "loss": 0.4075, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.9692451071761417, |
| "grad_norm": 0.4117148830526542, |
| "learning_rate": 3.7607870210562654e-05, |
| "loss": 0.405, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.9739049394221808, |
| "grad_norm": 0.3029562636088919, |
| "learning_rate": 3.752157404211253e-05, |
| "loss": 0.3974, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.97856477166822, |
| "grad_norm": 0.45571770500867936, |
| "learning_rate": 3.743527787366241e-05, |
| "loss": 0.4274, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.983224603914259, |
| "grad_norm": 0.3893154421399455, |
| "learning_rate": 3.734898170521229e-05, |
| "loss": 0.4046, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.9878844361602982, |
| "grad_norm": 0.4126071992656825, |
| "learning_rate": 3.7262685536762174e-05, |
| "loss": 0.4039, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.9925442684063374, |
| "grad_norm": 0.3618208759302852, |
| "learning_rate": 3.7176389368312045e-05, |
| "loss": 0.4085, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.9972041006523765, |
| "grad_norm": 0.37089340832251017, |
| "learning_rate": 3.709009319986193e-05, |
| "loss": 0.4083, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.0018639328984156, |
| "grad_norm": 0.3466767328048872, |
| "learning_rate": 3.700379703141181e-05, |
| "loss": 0.3726, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.0065237651444547, |
| "grad_norm": 0.32629710042784293, |
| "learning_rate": 3.6917500862961687e-05, |
| "loss": 0.3473, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.011183597390494, |
| "grad_norm": 0.4255519477332497, |
| "learning_rate": 3.6831204694511565e-05, |
| "loss": 0.3538, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.015843429636533, |
| "grad_norm": 0.37257752212763834, |
| "learning_rate": 3.674490852606144e-05, |
| "loss": 0.3605, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.0205032618825722, |
| "grad_norm": 0.3188706073756023, |
| "learning_rate": 3.665861235761132e-05, |
| "loss": 0.3462, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.0251630941286114, |
| "grad_norm": 0.3323491863961747, |
| "learning_rate": 3.6572316189161206e-05, |
| "loss": 0.3373, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.0298229263746506, |
| "grad_norm": 0.3147904606802217, |
| "learning_rate": 3.6486020020711085e-05, |
| "loss": 0.3435, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.0344827586206897, |
| "grad_norm": 0.31148716928303666, |
| "learning_rate": 3.639972385226096e-05, |
| "loss": 0.3444, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.0391425908667289, |
| "grad_norm": 0.3115874216444653, |
| "learning_rate": 3.631342768381084e-05, |
| "loss": 0.3439, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.0438024231127678, |
| "grad_norm": 0.3850573772968161, |
| "learning_rate": 3.622713151536072e-05, |
| "loss": 0.3445, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.048462255358807, |
| "grad_norm": 0.331203932647263, |
| "learning_rate": 3.6140835346910604e-05, |
| "loss": 0.3479, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.0531220876048462, |
| "grad_norm": 0.3247246142084058, |
| "learning_rate": 3.6054539178460476e-05, |
| "loss": 0.3442, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.0577819198508853, |
| "grad_norm": 0.39344991446138056, |
| "learning_rate": 3.5968243010010354e-05, |
| "loss": 0.3609, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.0624417520969245, |
| "grad_norm": 0.3313029115877748, |
| "learning_rate": 3.588194684156024e-05, |
| "loss": 0.3519, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.0671015843429636, |
| "grad_norm": 0.3505328862414304, |
| "learning_rate": 3.579565067311012e-05, |
| "loss": 0.3432, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.0717614165890028, |
| "grad_norm": 0.30763353170600233, |
| "learning_rate": 3.5709354504659995e-05, |
| "loss": 0.3397, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.076421248835042, |
| "grad_norm": 0.33720507780205905, |
| "learning_rate": 3.5623058336209874e-05, |
| "loss": 0.3508, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.0810810810810811, |
| "grad_norm": 0.29987986518950155, |
| "learning_rate": 3.553676216775975e-05, |
| "loss": 0.342, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.0857409133271203, |
| "grad_norm": 0.2918824556630535, |
| "learning_rate": 3.545046599930964e-05, |
| "loss": 0.354, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.0904007455731595, |
| "grad_norm": 0.28884108622026256, |
| "learning_rate": 3.536416983085951e-05, |
| "loss": 0.3555, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.0950605778191984, |
| "grad_norm": 0.3492325417202242, |
| "learning_rate": 3.5277873662409386e-05, |
| "loss": 0.3419, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.0997204100652376, |
| "grad_norm": 0.3374356989327302, |
| "learning_rate": 3.519157749395927e-05, |
| "loss": 0.3479, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.1043802423112767, |
| "grad_norm": 0.40694167907836565, |
| "learning_rate": 3.510528132550915e-05, |
| "loss": 0.345, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.109040074557316, |
| "grad_norm": 0.29634913375582494, |
| "learning_rate": 3.501898515705903e-05, |
| "loss": 0.347, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.113699906803355, |
| "grad_norm": 0.33725135215696544, |
| "learning_rate": 3.4932688988608906e-05, |
| "loss": 0.3584, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.1183597390493942, |
| "grad_norm": 0.36441882351153176, |
| "learning_rate": 3.4846392820158784e-05, |
| "loss": 0.3475, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.1230195712954334, |
| "grad_norm": 0.43771868179170575, |
| "learning_rate": 3.476009665170867e-05, |
| "loss": 0.3476, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.1276794035414726, |
| "grad_norm": 0.36715913285296337, |
| "learning_rate": 3.467380048325855e-05, |
| "loss": 0.3456, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.1323392357875117, |
| "grad_norm": 0.3210278084887568, |
| "learning_rate": 3.458750431480842e-05, |
| "loss": 0.3528, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.1369990680335509, |
| "grad_norm": 0.40978152129347956, |
| "learning_rate": 3.4501208146358304e-05, |
| "loss": 0.3517, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.14165890027959, |
| "grad_norm": 0.3501037455697221, |
| "learning_rate": 3.441491197790818e-05, |
| "loss": 0.3474, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.146318732525629, |
| "grad_norm": 0.2949808119143393, |
| "learning_rate": 3.432861580945806e-05, |
| "loss": 0.3487, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.1509785647716682, |
| "grad_norm": 0.33537243519091453, |
| "learning_rate": 3.424231964100794e-05, |
| "loss": 0.348, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.1556383970177073, |
| "grad_norm": 0.34215531389067516, |
| "learning_rate": 3.415602347255782e-05, |
| "loss": 0.3465, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.1602982292637465, |
| "grad_norm": 0.28579102346746615, |
| "learning_rate": 3.40697273041077e-05, |
| "loss": 0.3535, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.1649580615097856, |
| "grad_norm": 0.38985713423130514, |
| "learning_rate": 3.398343113565758e-05, |
| "loss": 0.3476, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.1696178937558248, |
| "grad_norm": 0.2757491186716454, |
| "learning_rate": 3.389713496720746e-05, |
| "loss": 0.3265, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.174277726001864, |
| "grad_norm": 0.26622935533794223, |
| "learning_rate": 3.381083879875734e-05, |
| "loss": 0.3463, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.1789375582479031, |
| "grad_norm": 0.2994815102975485, |
| "learning_rate": 3.3724542630307215e-05, |
| "loss": 0.3429, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.1835973904939423, |
| "grad_norm": 0.30900186681072417, |
| "learning_rate": 3.363824646185709e-05, |
| "loss": 0.3522, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.1882572227399812, |
| "grad_norm": 0.28156243744828824, |
| "learning_rate": 3.355195029340698e-05, |
| "loss": 0.3458, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.1929170549860204, |
| "grad_norm": 0.34048483109754407, |
| "learning_rate": 3.346565412495685e-05, |
| "loss": 0.3567, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.1975768872320596, |
| "grad_norm": 0.3482879519870774, |
| "learning_rate": 3.3379357956506735e-05, |
| "loss": 0.3578, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.2022367194780987, |
| "grad_norm": 0.3433620665119787, |
| "learning_rate": 3.329306178805661e-05, |
| "loss": 0.3408, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.206896551724138, |
| "grad_norm": 0.33173956917096376, |
| "learning_rate": 3.320676561960649e-05, |
| "loss": 0.3456, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.211556383970177, |
| "grad_norm": 0.4271653931916803, |
| "learning_rate": 3.312046945115637e-05, |
| "loss": 0.3561, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.2162162162162162, |
| "grad_norm": 0.38709260099877685, |
| "learning_rate": 3.303417328270625e-05, |
| "loss": 0.3527, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.2208760484622554, |
| "grad_norm": 0.29041529382376463, |
| "learning_rate": 3.2947877114256126e-05, |
| "loss": 0.3422, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.2255358807082946, |
| "grad_norm": 0.31574510099421743, |
| "learning_rate": 3.286158094580601e-05, |
| "loss": 0.3619, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.2301957129543337, |
| "grad_norm": 0.2779026955554108, |
| "learning_rate": 3.277528477735589e-05, |
| "loss": 0.348, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.2348555452003729, |
| "grad_norm": 0.3117138832842607, |
| "learning_rate": 3.268898860890577e-05, |
| "loss": 0.3548, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.2395153774464118, |
| "grad_norm": 0.34556214123661766, |
| "learning_rate": 3.2602692440455645e-05, |
| "loss": 0.3427, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.244175209692451, |
| "grad_norm": 0.3207908460587471, |
| "learning_rate": 3.2516396272005524e-05, |
| "loss": 0.3457, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.2488350419384902, |
| "grad_norm": 0.4579638822801661, |
| "learning_rate": 3.243010010355541e-05, |
| "loss": 0.3511, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.2534948741845293, |
| "grad_norm": 0.3706933498980916, |
| "learning_rate": 3.234380393510528e-05, |
| "loss": 0.3385, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.2581547064305685, |
| "grad_norm": 0.28751428494705217, |
| "learning_rate": 3.225750776665516e-05, |
| "loss": 0.3454, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.2628145386766076, |
| "grad_norm": 0.29088848949940943, |
| "learning_rate": 3.217121159820504e-05, |
| "loss": 0.3554, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.2674743709226468, |
| "grad_norm": 0.32043873791461, |
| "learning_rate": 3.208491542975492e-05, |
| "loss": 0.3448, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.272134203168686, |
| "grad_norm": 0.32135003427013953, |
| "learning_rate": 3.19986192613048e-05, |
| "loss": 0.3504, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.2767940354147251, |
| "grad_norm": 0.34836051639597065, |
| "learning_rate": 3.191232309285468e-05, |
| "loss": 0.3491, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.281453867660764, |
| "grad_norm": 0.3083433720315085, |
| "learning_rate": 3.1826026924404556e-05, |
| "loss": 0.339, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.2861136999068035, |
| "grad_norm": 0.3294061695691229, |
| "learning_rate": 3.173973075595444e-05, |
| "loss": 0.3536, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.2907735321528424, |
| "grad_norm": 0.3243247321823703, |
| "learning_rate": 3.165343458750431e-05, |
| "loss": 0.3529, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.2954333643988816, |
| "grad_norm": 0.34216987102113755, |
| "learning_rate": 3.156713841905419e-05, |
| "loss": 0.3455, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.3000931966449207, |
| "grad_norm": 0.2719200783387942, |
| "learning_rate": 3.1480842250604076e-05, |
| "loss": 0.3494, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.30475302889096, |
| "grad_norm": 0.3505609559139422, |
| "learning_rate": 3.1394546082153954e-05, |
| "loss": 0.3437, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.309412861136999, |
| "grad_norm": 0.33365123410554454, |
| "learning_rate": 3.130824991370383e-05, |
| "loss": 0.3544, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.3140726933830382, |
| "grad_norm": 0.3189436936052233, |
| "learning_rate": 3.122195374525371e-05, |
| "loss": 0.3424, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.3187325256290774, |
| "grad_norm": 0.2863006966606347, |
| "learning_rate": 3.113565757680359e-05, |
| "loss": 0.3503, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.3233923578751166, |
| "grad_norm": 0.28826221521063, |
| "learning_rate": 3.1049361408353474e-05, |
| "loss": 0.3376, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.3280521901211557, |
| "grad_norm": 0.24894240427324923, |
| "learning_rate": 3.096306523990335e-05, |
| "loss": 0.3455, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.3327120223671947, |
| "grad_norm": 0.27568016054738054, |
| "learning_rate": 3.0876769071453223e-05, |
| "loss": 0.3541, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.337371854613234, |
| "grad_norm": 0.35064905716882083, |
| "learning_rate": 3.079047290300311e-05, |
| "loss": 0.3519, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.342031686859273, |
| "grad_norm": 0.3212183182772905, |
| "learning_rate": 3.070417673455299e-05, |
| "loss": 0.3505, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.3466915191053122, |
| "grad_norm": 0.28916536258878456, |
| "learning_rate": 3.061788056610287e-05, |
| "loss": 0.3553, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.3513513513513513, |
| "grad_norm": 0.3235421441470868, |
| "learning_rate": 3.053158439765274e-05, |
| "loss": 0.3411, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.3560111835973905, |
| "grad_norm": 0.2968602289573508, |
| "learning_rate": 3.0445288229202625e-05, |
| "loss": 0.3488, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.3606710158434296, |
| "grad_norm": 0.3184864286908833, |
| "learning_rate": 3.0358992060752506e-05, |
| "loss": 0.3548, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.3653308480894688, |
| "grad_norm": 0.3229863708803619, |
| "learning_rate": 3.027269589230238e-05, |
| "loss": 0.3487, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.369990680335508, |
| "grad_norm": 0.3221853882096656, |
| "learning_rate": 3.018639972385226e-05, |
| "loss": 0.3463, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.3746505125815471, |
| "grad_norm": 0.36492627157017027, |
| "learning_rate": 3.010010355540214e-05, |
| "loss": 0.3479, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.3793103448275863, |
| "grad_norm": 0.26851057138257656, |
| "learning_rate": 3.001380738695202e-05, |
| "loss": 0.3317, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.3839701770736252, |
| "grad_norm": 0.2833599124258928, |
| "learning_rate": 2.99275112185019e-05, |
| "loss": 0.349, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.3886300093196646, |
| "grad_norm": 0.3540057666942934, |
| "learning_rate": 2.984121505005178e-05, |
| "loss": 0.3541, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.3932898415657036, |
| "grad_norm": 0.37206165635102434, |
| "learning_rate": 2.9754918881601657e-05, |
| "loss": 0.3625, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.3979496738117427, |
| "grad_norm": 0.3421043062141437, |
| "learning_rate": 2.966862271315154e-05, |
| "loss": 0.3588, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.402609506057782, |
| "grad_norm": 0.3495424193811505, |
| "learning_rate": 2.9582326544701417e-05, |
| "loss": 0.3531, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.407269338303821, |
| "grad_norm": 0.30714455983322153, |
| "learning_rate": 2.9496030376251292e-05, |
| "loss": 0.347, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.4119291705498602, |
| "grad_norm": 0.271764182801288, |
| "learning_rate": 2.9409734207801177e-05, |
| "loss": 0.3504, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.4165890027958994, |
| "grad_norm": 0.345977791976646, |
| "learning_rate": 2.9323438039351052e-05, |
| "loss": 0.35, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.4212488350419386, |
| "grad_norm": 0.3090209854200775, |
| "learning_rate": 2.9237141870900937e-05, |
| "loss": 0.3457, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.4259086672879777, |
| "grad_norm": 0.2902474757739303, |
| "learning_rate": 2.9150845702450812e-05, |
| "loss": 0.3448, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.4305684995340169, |
| "grad_norm": 0.32236612838715284, |
| "learning_rate": 2.906454953400069e-05, |
| "loss": 0.3497, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.4352283317800558, |
| "grad_norm": 0.32048885565576973, |
| "learning_rate": 2.897825336555057e-05, |
| "loss": 0.346, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.439888164026095, |
| "grad_norm": 0.36763831382668277, |
| "learning_rate": 2.889195719710045e-05, |
| "loss": 0.3462, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.4445479962721341, |
| "grad_norm": 0.3667395673175459, |
| "learning_rate": 2.8805661028650328e-05, |
| "loss": 0.3426, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.4492078285181733, |
| "grad_norm": 0.43244599196751526, |
| "learning_rate": 2.871936486020021e-05, |
| "loss": 0.3398, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.4538676607642125, |
| "grad_norm": 0.32356353846643177, |
| "learning_rate": 2.8633068691750088e-05, |
| "loss": 0.3425, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.4585274930102516, |
| "grad_norm": 0.3412037069282531, |
| "learning_rate": 2.854677252329997e-05, |
| "loss": 0.3451, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.4631873252562908, |
| "grad_norm": 0.3590171697330432, |
| "learning_rate": 2.8460476354849848e-05, |
| "loss": 0.3524, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.46784715750233, |
| "grad_norm": 0.3940544933936819, |
| "learning_rate": 2.8374180186399723e-05, |
| "loss": 0.3519, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.4725069897483691, |
| "grad_norm": 0.3182855473229679, |
| "learning_rate": 2.8287884017949608e-05, |
| "loss": 0.3533, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.477166821994408, |
| "grad_norm": 0.3568443752239294, |
| "learning_rate": 2.8201587849499482e-05, |
| "loss": 0.346, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.4818266542404475, |
| "grad_norm": 0.34974506261722194, |
| "learning_rate": 2.811529168104936e-05, |
| "loss": 0.3564, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.4864864864864864, |
| "grad_norm": 0.31142218614639605, |
| "learning_rate": 2.8028995512599242e-05, |
| "loss": 0.3473, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.4911463187325256, |
| "grad_norm": 0.3634657161601137, |
| "learning_rate": 2.794269934414912e-05, |
| "loss": 0.3485, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.4958061509785647, |
| "grad_norm": 0.29200266930057117, |
| "learning_rate": 2.7856403175699002e-05, |
| "loss": 0.3529, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.500465983224604, |
| "grad_norm": 0.3440371768426119, |
| "learning_rate": 2.777010700724888e-05, |
| "loss": 0.3477, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.505125815470643, |
| "grad_norm": 0.32373391689242287, |
| "learning_rate": 2.768381083879876e-05, |
| "loss": 0.3431, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.5097856477166822, |
| "grad_norm": 0.26950475692133025, |
| "learning_rate": 2.759751467034864e-05, |
| "loss": 0.3453, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.5144454799627214, |
| "grad_norm": 0.2826210501773821, |
| "learning_rate": 2.751121850189852e-05, |
| "loss": 0.3397, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.5191053122087603, |
| "grad_norm": 0.3102161140774324, |
| "learning_rate": 2.7424922333448393e-05, |
| "loss": 0.3328, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.5237651444547997, |
| "grad_norm": 0.33760697921958377, |
| "learning_rate": 2.7338626164998278e-05, |
| "loss": 0.3403, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.5284249767008387, |
| "grad_norm": 0.29166453965201583, |
| "learning_rate": 2.7252329996548153e-05, |
| "loss": 0.3434, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.533084808946878, |
| "grad_norm": 0.2798133072967164, |
| "learning_rate": 2.7166033828098038e-05, |
| "loss": 0.3586, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.537744641192917, |
| "grad_norm": 0.32261771570684744, |
| "learning_rate": 2.7079737659647913e-05, |
| "loss": 0.3473, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.5424044734389561, |
| "grad_norm": 0.32981956943151086, |
| "learning_rate": 2.699344149119779e-05, |
| "loss": 0.3543, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.5470643056849953, |
| "grad_norm": 0.32701364952130885, |
| "learning_rate": 2.6907145322747673e-05, |
| "loss": 0.3488, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.5517241379310345, |
| "grad_norm": 0.3332712178312528, |
| "learning_rate": 2.682084915429755e-05, |
| "loss": 0.3398, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.5563839701770736, |
| "grad_norm": 0.348283845697873, |
| "learning_rate": 2.673455298584743e-05, |
| "loss": 0.3447, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.5610438024231128, |
| "grad_norm": 0.3437847787547748, |
| "learning_rate": 2.664825681739731e-05, |
| "loss": 0.352, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.565703634669152, |
| "grad_norm": 0.35408777756271287, |
| "learning_rate": 2.6561960648947186e-05, |
| "loss": 0.3437, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.570363466915191, |
| "grad_norm": 0.34896651367726, |
| "learning_rate": 2.647566448049707e-05, |
| "loss": 0.3398, |
| "step": 1685 |
| }, |
| { |
| "epoch": 1.5750232991612303, |
| "grad_norm": 0.32694492129697167, |
| "learning_rate": 2.6389368312046945e-05, |
| "loss": 0.3521, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.5796831314072692, |
| "grad_norm": 0.2909691544432443, |
| "learning_rate": 2.6303072143596824e-05, |
| "loss": 0.3561, |
| "step": 1695 |
| }, |
| { |
| "epoch": 1.5843429636533086, |
| "grad_norm": 0.38721758891012714, |
| "learning_rate": 2.6216775975146705e-05, |
| "loss": 0.3486, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.5890027958993476, |
| "grad_norm": 0.311267472576106, |
| "learning_rate": 2.6130479806696584e-05, |
| "loss": 0.3458, |
| "step": 1705 |
| }, |
| { |
| "epoch": 1.5936626281453867, |
| "grad_norm": 0.3757471184213451, |
| "learning_rate": 2.6044183638246462e-05, |
| "loss": 0.3518, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.598322460391426, |
| "grad_norm": 0.2688206025720871, |
| "learning_rate": 2.5957887469796343e-05, |
| "loss": 0.341, |
| "step": 1715 |
| }, |
| { |
| "epoch": 1.602982292637465, |
| "grad_norm": 0.3092285021432447, |
| "learning_rate": 2.587159130134622e-05, |
| "loss": 0.3451, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.6076421248835042, |
| "grad_norm": 0.28400015059271655, |
| "learning_rate": 2.5785295132896096e-05, |
| "loss": 0.3451, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.6123019571295434, |
| "grad_norm": 0.3445231051932557, |
| "learning_rate": 2.569899896444598e-05, |
| "loss": 0.339, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.6169617893755825, |
| "grad_norm": 0.38314927018404166, |
| "learning_rate": 2.5612702795995856e-05, |
| "loss": 0.3501, |
| "step": 1735 |
| }, |
| { |
| "epoch": 1.6216216216216215, |
| "grad_norm": 0.29012666606461185, |
| "learning_rate": 2.552640662754574e-05, |
| "loss": 0.3442, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.6262814538676609, |
| "grad_norm": 0.3288769112109895, |
| "learning_rate": 2.5440110459095616e-05, |
| "loss": 0.3512, |
| "step": 1745 |
| }, |
| { |
| "epoch": 1.6309412861136998, |
| "grad_norm": 0.3868687049946097, |
| "learning_rate": 2.5353814290645494e-05, |
| "loss": 0.3498, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.6356011183597392, |
| "grad_norm": 0.34707374673996927, |
| "learning_rate": 2.5267518122195376e-05, |
| "loss": 0.3419, |
| "step": 1755 |
| }, |
| { |
| "epoch": 1.6402609506057781, |
| "grad_norm": 0.38650219982852274, |
| "learning_rate": 2.5181221953745254e-05, |
| "loss": 0.3502, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.6449207828518173, |
| "grad_norm": 0.30442621914305384, |
| "learning_rate": 2.5094925785295132e-05, |
| "loss": 0.352, |
| "step": 1765 |
| }, |
| { |
| "epoch": 1.6495806150978565, |
| "grad_norm": 0.295831906556078, |
| "learning_rate": 2.5008629616845014e-05, |
| "loss": 0.3451, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.6542404473438956, |
| "grad_norm": 0.41956198605419515, |
| "learning_rate": 2.4922333448394892e-05, |
| "loss": 0.338, |
| "step": 1775 |
| }, |
| { |
| "epoch": 1.6589002795899348, |
| "grad_norm": 0.34229517285638916, |
| "learning_rate": 2.483603727994477e-05, |
| "loss": 0.3475, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.6635601118359737, |
| "grad_norm": 0.31924807835467234, |
| "learning_rate": 2.4749741111494652e-05, |
| "loss": 0.3446, |
| "step": 1785 |
| }, |
| { |
| "epoch": 1.6682199440820131, |
| "grad_norm": 0.3284589327531291, |
| "learning_rate": 2.466344494304453e-05, |
| "loss": 0.3438, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.672879776328052, |
| "grad_norm": 0.28703634586528093, |
| "learning_rate": 2.457714877459441e-05, |
| "loss": 0.3366, |
| "step": 1795 |
| }, |
| { |
| "epoch": 1.6775396085740915, |
| "grad_norm": 0.2932607124323887, |
| "learning_rate": 2.4490852606144287e-05, |
| "loss": 0.3366, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.6821994408201304, |
| "grad_norm": 0.296583913776671, |
| "learning_rate": 2.440455643769417e-05, |
| "loss": 0.34, |
| "step": 1805 |
| }, |
| { |
| "epoch": 1.6868592730661698, |
| "grad_norm": 0.2687683226730502, |
| "learning_rate": 2.4318260269244047e-05, |
| "loss": 0.3434, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.6915191053122087, |
| "grad_norm": 0.25347808663410265, |
| "learning_rate": 2.4231964100793925e-05, |
| "loss": 0.331, |
| "step": 1815 |
| }, |
| { |
| "epoch": 1.696178937558248, |
| "grad_norm": 0.2713693960239594, |
| "learning_rate": 2.4145667932343803e-05, |
| "loss": 0.3426, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.700838769804287, |
| "grad_norm": 0.26766349202361095, |
| "learning_rate": 2.4059371763893685e-05, |
| "loss": 0.3468, |
| "step": 1825 |
| }, |
| { |
| "epoch": 1.7054986020503262, |
| "grad_norm": 0.2761857212120324, |
| "learning_rate": 2.3973075595443563e-05, |
| "loss": 0.3524, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.7101584342963654, |
| "grad_norm": 0.3533387971020311, |
| "learning_rate": 2.388677942699344e-05, |
| "loss": 0.347, |
| "step": 1835 |
| }, |
| { |
| "epoch": 1.7148182665424043, |
| "grad_norm": 0.3304465470183926, |
| "learning_rate": 2.3800483258543323e-05, |
| "loss": 0.3433, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.7194780987884437, |
| "grad_norm": 0.2838212354941664, |
| "learning_rate": 2.37141870900932e-05, |
| "loss": 0.3487, |
| "step": 1845 |
| }, |
| { |
| "epoch": 1.7241379310344827, |
| "grad_norm": 0.27182912167501216, |
| "learning_rate": 2.3627890921643083e-05, |
| "loss": 0.3307, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.728797763280522, |
| "grad_norm": 0.3463275642546345, |
| "learning_rate": 2.3541594753192957e-05, |
| "loss": 0.3407, |
| "step": 1855 |
| }, |
| { |
| "epoch": 1.733457595526561, |
| "grad_norm": 0.33680897836510765, |
| "learning_rate": 2.345529858474284e-05, |
| "loss": 0.341, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.7381174277726001, |
| "grad_norm": 0.33727330449578286, |
| "learning_rate": 2.3369002416292717e-05, |
| "loss": 0.3405, |
| "step": 1865 |
| }, |
| { |
| "epoch": 1.7427772600186393, |
| "grad_norm": 0.3100575835627644, |
| "learning_rate": 2.32827062478426e-05, |
| "loss": 0.3394, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.7474370922646785, |
| "grad_norm": 0.2826440437227791, |
| "learning_rate": 2.3196410079392474e-05, |
| "loss": 0.348, |
| "step": 1875 |
| }, |
| { |
| "epoch": 1.7520969245107176, |
| "grad_norm": 0.32083683767973736, |
| "learning_rate": 2.3110113910942355e-05, |
| "loss": 0.3425, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.7567567567567568, |
| "grad_norm": 0.29227267913176436, |
| "learning_rate": 2.3023817742492234e-05, |
| "loss": 0.3482, |
| "step": 1885 |
| }, |
| { |
| "epoch": 1.761416589002796, |
| "grad_norm": 0.2698620187901574, |
| "learning_rate": 2.2937521574042115e-05, |
| "loss": 0.3457, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.766076421248835, |
| "grad_norm": 0.3151861063948914, |
| "learning_rate": 2.285122540559199e-05, |
| "loss": 0.3431, |
| "step": 1895 |
| }, |
| { |
| "epoch": 1.7707362534948743, |
| "grad_norm": 0.3538300373789918, |
| "learning_rate": 2.276492923714187e-05, |
| "loss": 0.3353, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.7753960857409132, |
| "grad_norm": 0.27314570126621124, |
| "learning_rate": 2.267863306869175e-05, |
| "loss": 0.3347, |
| "step": 1905 |
| }, |
| { |
| "epoch": 1.7800559179869526, |
| "grad_norm": 0.260828982118303, |
| "learning_rate": 2.259233690024163e-05, |
| "loss": 0.3476, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.7847157502329916, |
| "grad_norm": 0.293875378087096, |
| "learning_rate": 2.250604073179151e-05, |
| "loss": 0.3308, |
| "step": 1915 |
| }, |
| { |
| "epoch": 1.7893755824790307, |
| "grad_norm": 0.3118963241797147, |
| "learning_rate": 2.2419744563341388e-05, |
| "loss": 0.3469, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.7940354147250699, |
| "grad_norm": 0.25482474228286517, |
| "learning_rate": 2.233344839489127e-05, |
| "loss": 0.3435, |
| "step": 1925 |
| }, |
| { |
| "epoch": 1.798695246971109, |
| "grad_norm": 0.29389984211878534, |
| "learning_rate": 2.2247152226441148e-05, |
| "loss": 0.34, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.8033550792171482, |
| "grad_norm": 0.29489566146760227, |
| "learning_rate": 2.2160856057991026e-05, |
| "loss": 0.345, |
| "step": 1935 |
| }, |
| { |
| "epoch": 1.8080149114631874, |
| "grad_norm": 0.2852643696421498, |
| "learning_rate": 2.2074559889540904e-05, |
| "loss": 0.3327, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.8126747437092265, |
| "grad_norm": 0.26648292779453114, |
| "learning_rate": 2.1988263721090786e-05, |
| "loss": 0.3495, |
| "step": 1945 |
| }, |
| { |
| "epoch": 1.8173345759552655, |
| "grad_norm": 0.27853154206944286, |
| "learning_rate": 2.1901967552640664e-05, |
| "loss": 0.333, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.8219944082013049, |
| "grad_norm": 0.3338574731573854, |
| "learning_rate": 2.1815671384190542e-05, |
| "loss": 0.341, |
| "step": 1955 |
| }, |
| { |
| "epoch": 1.8266542404473438, |
| "grad_norm": 0.3269171924058155, |
| "learning_rate": 2.172937521574042e-05, |
| "loss": 0.3447, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.8313140726933832, |
| "grad_norm": 0.30017033096794987, |
| "learning_rate": 2.1643079047290302e-05, |
| "loss": 0.3458, |
| "step": 1965 |
| }, |
| { |
| "epoch": 1.8359739049394221, |
| "grad_norm": 0.2850784608066985, |
| "learning_rate": 2.155678287884018e-05, |
| "loss": 0.3434, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.8406337371854613, |
| "grad_norm": 0.2656541385829881, |
| "learning_rate": 2.147048671039006e-05, |
| "loss": 0.3516, |
| "step": 1975 |
| }, |
| { |
| "epoch": 1.8452935694315005, |
| "grad_norm": 0.2471203909463743, |
| "learning_rate": 2.1384190541939937e-05, |
| "loss": 0.3418, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.8499534016775396, |
| "grad_norm": 0.2650899753963081, |
| "learning_rate": 2.129789437348982e-05, |
| "loss": 0.3377, |
| "step": 1985 |
| }, |
| { |
| "epoch": 1.8546132339235788, |
| "grad_norm": 0.2647940247061497, |
| "learning_rate": 2.1211598205039697e-05, |
| "loss": 0.3564, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.8592730661696177, |
| "grad_norm": 0.3227812765644576, |
| "learning_rate": 2.1125302036589575e-05, |
| "loss": 0.3421, |
| "step": 1995 |
| }, |
| { |
| "epoch": 1.8639328984156571, |
| "grad_norm": 0.297267058414044, |
| "learning_rate": 2.1039005868139457e-05, |
| "loss": 0.3409, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.868592730661696, |
| "grad_norm": 0.2805203477278729, |
| "learning_rate": 2.0952709699689335e-05, |
| "loss": 0.3344, |
| "step": 2005 |
| }, |
| { |
| "epoch": 1.8732525629077355, |
| "grad_norm": 0.2885697842925641, |
| "learning_rate": 2.0866413531239216e-05, |
| "loss": 0.3499, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.8779123951537744, |
| "grad_norm": 0.2864142255057866, |
| "learning_rate": 2.078011736278909e-05, |
| "loss": 0.336, |
| "step": 2015 |
| }, |
| { |
| "epoch": 1.8825722273998136, |
| "grad_norm": 0.27559826462867176, |
| "learning_rate": 2.0693821194338973e-05, |
| "loss": 0.3308, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.8872320596458527, |
| "grad_norm": 0.3245194476026471, |
| "learning_rate": 2.060752502588885e-05, |
| "loss": 0.337, |
| "step": 2025 |
| }, |
| { |
| "epoch": 1.8918918918918919, |
| "grad_norm": 0.24340386122312743, |
| "learning_rate": 2.0521228857438733e-05, |
| "loss": 0.3441, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.896551724137931, |
| "grad_norm": 0.24501266752590212, |
| "learning_rate": 2.0434932688988608e-05, |
| "loss": 0.3371, |
| "step": 2035 |
| }, |
| { |
| "epoch": 1.9012115563839702, |
| "grad_norm": 0.24503608734019988, |
| "learning_rate": 2.034863652053849e-05, |
| "loss": 0.3398, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.9058713886300094, |
| "grad_norm": 0.24066133214971583, |
| "learning_rate": 2.0262340352088367e-05, |
| "loss": 0.3556, |
| "step": 2045 |
| }, |
| { |
| "epoch": 1.9105312208760483, |
| "grad_norm": 0.2932258663979882, |
| "learning_rate": 2.017604418363825e-05, |
| "loss": 0.3461, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.9151910531220877, |
| "grad_norm": 0.2923717720171897, |
| "learning_rate": 2.0089748015188127e-05, |
| "loss": 0.3407, |
| "step": 2055 |
| }, |
| { |
| "epoch": 1.9198508853681266, |
| "grad_norm": 0.2440018819032557, |
| "learning_rate": 2.0003451846738005e-05, |
| "loss": 0.3481, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.924510717614166, |
| "grad_norm": 0.3006737152563499, |
| "learning_rate": 1.9917155678287887e-05, |
| "loss": 0.3375, |
| "step": 2065 |
| }, |
| { |
| "epoch": 1.929170549860205, |
| "grad_norm": 0.26381943626428767, |
| "learning_rate": 1.9830859509837765e-05, |
| "loss": 0.345, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.9338303821062441, |
| "grad_norm": 0.2559642563887702, |
| "learning_rate": 1.9744563341387643e-05, |
| "loss": 0.3463, |
| "step": 2075 |
| }, |
| { |
| "epoch": 1.9384902143522833, |
| "grad_norm": 0.2854146175559252, |
| "learning_rate": 1.9658267172937522e-05, |
| "loss": 0.3415, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.9431500465983225, |
| "grad_norm": 0.3551942925927518, |
| "learning_rate": 1.9571971004487403e-05, |
| "loss": 0.3411, |
| "step": 2085 |
| }, |
| { |
| "epoch": 1.9478098788443616, |
| "grad_norm": 0.30092223062194995, |
| "learning_rate": 1.948567483603728e-05, |
| "loss": 0.3472, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.9524697110904008, |
| "grad_norm": 0.30697657331539163, |
| "learning_rate": 1.939937866758716e-05, |
| "loss": 0.3295, |
| "step": 2095 |
| }, |
| { |
| "epoch": 1.95712954333644, |
| "grad_norm": 0.2980503359064689, |
| "learning_rate": 1.9313082499137038e-05, |
| "loss": 0.3468, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.961789375582479, |
| "grad_norm": 0.3059325972042569, |
| "learning_rate": 1.922678633068692e-05, |
| "loss": 0.3412, |
| "step": 2105 |
| }, |
| { |
| "epoch": 1.9664492078285183, |
| "grad_norm": 0.2960890819228813, |
| "learning_rate": 1.9140490162236798e-05, |
| "loss": 0.3466, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.9711090400745572, |
| "grad_norm": 0.2851735506449098, |
| "learning_rate": 1.9054193993786676e-05, |
| "loss": 0.354, |
| "step": 2115 |
| }, |
| { |
| "epoch": 1.9757688723205966, |
| "grad_norm": 0.2643197569523461, |
| "learning_rate": 1.8967897825336554e-05, |
| "loss": 0.342, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.9804287045666356, |
| "grad_norm": 0.2544038073538777, |
| "learning_rate": 1.8881601656886436e-05, |
| "loss": 0.3393, |
| "step": 2125 |
| }, |
| { |
| "epoch": 1.9850885368126747, |
| "grad_norm": 0.2713719078498063, |
| "learning_rate": 1.8795305488436314e-05, |
| "loss": 0.3428, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.9897483690587139, |
| "grad_norm": 0.3067483498066776, |
| "learning_rate": 1.8709009319986192e-05, |
| "loss": 0.3315, |
| "step": 2135 |
| }, |
| { |
| "epoch": 1.994408201304753, |
| "grad_norm": 0.2733634463212891, |
| "learning_rate": 1.8622713151536074e-05, |
| "loss": 0.3385, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.9990680335507922, |
| "grad_norm": 0.2646728305606801, |
| "learning_rate": 1.8536416983085952e-05, |
| "loss": 0.3432, |
| "step": 2145 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 3219, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8376286011392e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|