| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.961363636363636, | |
| "eval_steps": 500, | |
| "global_step": 3063, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.022727272727272728, | |
| "grad_norm": 0.026781119406223297, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.922, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.045454545454545456, | |
| "grad_norm": 0.024741673842072487, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.8923, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06818181818181818, | |
| "grad_norm": 0.02995569072663784, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.9527, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 0.029402069747447968, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.9525, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.11363636363636363, | |
| "grad_norm": 0.030222086235880852, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.868, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13636363636363635, | |
| "grad_norm": 0.026196401566267014, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.8827, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1590909090909091, | |
| "grad_norm": 0.029812786728143692, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.9446, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 0.02471482940018177, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.9012, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.20454545454545456, | |
| "grad_norm": 0.032019663602113724, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.9249, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.22727272727272727, | |
| "grad_norm": 0.03502049297094345, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.9458, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.03877561166882515, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.9368, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 0.03687964007258415, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.9022, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.29545454545454547, | |
| "grad_norm": 0.03736700490117073, | |
| "learning_rate": 5.2e-06, | |
| "loss": 0.9155, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3181818181818182, | |
| "grad_norm": 0.04338955879211426, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 0.8981, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3409090909090909, | |
| "grad_norm": 0.03441348299384117, | |
| "learning_rate": 6e-06, | |
| "loss": 0.8779, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 0.04274949058890343, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.9095, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.38636363636363635, | |
| "grad_norm": 0.04366679862141609, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 0.8781, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4090909090909091, | |
| "grad_norm": 0.041996780782938004, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 0.9327, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4318181818181818, | |
| "grad_norm": 0.050978969782590866, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 0.864, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 0.04162677377462387, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.9109, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4772727272727273, | |
| "grad_norm": 0.041858162730932236, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.8516, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.04480082169175148, | |
| "learning_rate": 8.8e-06, | |
| "loss": 0.8642, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5227272727272727, | |
| "grad_norm": 0.035324063152074814, | |
| "learning_rate": 9.200000000000002e-06, | |
| "loss": 0.8552, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 0.04161485657095909, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.8037, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5681818181818182, | |
| "grad_norm": 0.04798528924584389, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8653, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5909090909090909, | |
| "grad_norm": 0.03867500275373459, | |
| "learning_rate": 9.999688185881378e-06, | |
| "loss": 0.8552, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6136363636363636, | |
| "grad_norm": 0.04236117750406265, | |
| "learning_rate": 9.998752782416726e-06, | |
| "loss": 0.8153, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 0.036168813705444336, | |
| "learning_rate": 9.997193906274847e-06, | |
| "loss": 0.8214, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6590909090909091, | |
| "grad_norm": 0.042327072471380234, | |
| "learning_rate": 9.995011751887578e-06, | |
| "loss": 0.8484, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6818181818181818, | |
| "grad_norm": 0.03896569833159447, | |
| "learning_rate": 9.992206591425538e-06, | |
| "loss": 0.8249, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7045454545454546, | |
| "grad_norm": 0.042347826063632965, | |
| "learning_rate": 9.988778774764182e-06, | |
| "loss": 0.8279, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.037002451717853546, | |
| "learning_rate": 9.984728729440162e-06, | |
| "loss": 0.8321, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.03902013972401619, | |
| "learning_rate": 9.980056960598003e-06, | |
| "loss": 0.8043, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7727272727272727, | |
| "grad_norm": 0.03872638940811157, | |
| "learning_rate": 9.974764050927098e-06, | |
| "loss": 0.8564, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7954545454545454, | |
| "grad_norm": 0.03531708940863609, | |
| "learning_rate": 9.968850660589034e-06, | |
| "loss": 0.7862, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.03553822636604309, | |
| "learning_rate": 9.962317527135248e-06, | |
| "loss": 0.8055, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8409090909090909, | |
| "grad_norm": 0.034433577209711075, | |
| "learning_rate": 9.955165465415041e-06, | |
| "loss": 0.8178, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8636363636363636, | |
| "grad_norm": 0.03880152851343155, | |
| "learning_rate": 9.947395367473945e-06, | |
| "loss": 0.7978, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8863636363636364, | |
| "grad_norm": 0.035060495138168335, | |
| "learning_rate": 9.939008202442448e-06, | |
| "loss": 0.8052, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.029547426849603653, | |
| "learning_rate": 9.930005016415148e-06, | |
| "loss": 0.8159, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9318181818181818, | |
| "grad_norm": 0.03689836710691452, | |
| "learning_rate": 9.920386932320243e-06, | |
| "loss": 0.8229, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9545454545454546, | |
| "grad_norm": 0.035304516553878784, | |
| "learning_rate": 9.910155149779505e-06, | |
| "loss": 0.8144, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9772727272727273, | |
| "grad_norm": 0.03637601435184479, | |
| "learning_rate": 9.899310944958634e-06, | |
| "loss": 0.8451, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.032583512365818024, | |
| "learning_rate": 9.887855670408098e-06, | |
| "loss": 0.8083, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.0227272727272727, | |
| "grad_norm": 0.037623800337314606, | |
| "learning_rate": 9.87579075489443e-06, | |
| "loss": 0.8463, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0454545454545454, | |
| "grad_norm": 0.0319579653441906, | |
| "learning_rate": 9.863117703222032e-06, | |
| "loss": 0.8575, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0681818181818181, | |
| "grad_norm": 0.03368237987160683, | |
| "learning_rate": 9.849838096045475e-06, | |
| "loss": 0.8193, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 0.03865516185760498, | |
| "learning_rate": 9.835953589672364e-06, | |
| "loss": 0.7893, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.1136363636363635, | |
| "grad_norm": 0.0340314581990242, | |
| "learning_rate": 9.82146591585675e-06, | |
| "loss": 0.8193, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.1363636363636362, | |
| "grad_norm": 0.03837134689092636, | |
| "learning_rate": 9.806376881583122e-06, | |
| "loss": 0.7955, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1590909090909092, | |
| "grad_norm": 0.03679390251636505, | |
| "learning_rate": 9.790688368841054e-06, | |
| "loss": 0.8371, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.03493216633796692, | |
| "learning_rate": 9.774402334390454e-06, | |
| "loss": 0.8137, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.2045454545454546, | |
| "grad_norm": 0.0344357043504715, | |
| "learning_rate": 9.757520809517515e-06, | |
| "loss": 0.8007, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.2272727272727273, | |
| "grad_norm": 0.036025360226631165, | |
| "learning_rate": 9.740045899781353e-06, | |
| "loss": 0.8015, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.03330492600798607, | |
| "learning_rate": 9.721979784751403e-06, | |
| "loss": 0.8023, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.03598447144031525, | |
| "learning_rate": 9.703324717735556e-06, | |
| "loss": 0.8281, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2954545454545454, | |
| "grad_norm": 0.03607608377933502, | |
| "learning_rate": 9.684083025499125e-06, | |
| "loss": 0.8004, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.3181818181818181, | |
| "grad_norm": 0.03628605976700783, | |
| "learning_rate": 9.664257107974633e-06, | |
| "loss": 0.7802, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.3409090909090908, | |
| "grad_norm": 0.036778271198272705, | |
| "learning_rate": 9.643849437962476e-06, | |
| "loss": 0.8156, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.03817847743630409, | |
| "learning_rate": 9.622862560822515e-06, | |
| "loss": 0.8253, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3863636363636362, | |
| "grad_norm": 0.039363328367471695, | |
| "learning_rate": 9.601299094156584e-06, | |
| "loss": 0.8231, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.4090909090909092, | |
| "grad_norm": 0.04012521356344223, | |
| "learning_rate": 9.579161727482027e-06, | |
| "loss": 0.7971, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4318181818181819, | |
| "grad_norm": 0.03911637142300606, | |
| "learning_rate": 9.556453221896234e-06, | |
| "loss": 0.7919, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 0.037754397839307785, | |
| "learning_rate": 9.53317640973227e-06, | |
| "loss": 0.8145, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4772727272727273, | |
| "grad_norm": 0.03671397641301155, | |
| "learning_rate": 9.509334194205597e-06, | |
| "loss": 0.7797, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.03528128191828728, | |
| "learning_rate": 9.484929549051987e-06, | |
| "loss": 0.8019, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.5227272727272727, | |
| "grad_norm": 0.038603346794843674, | |
| "learning_rate": 9.459965518156607e-06, | |
| "loss": 0.8291, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.03947769105434418, | |
| "learning_rate": 9.434445215174372e-06, | |
| "loss": 0.8228, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.5681818181818183, | |
| "grad_norm": 0.037381611764431, | |
| "learning_rate": 9.408371823141597e-06, | |
| "loss": 0.8238, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5909090909090908, | |
| "grad_norm": 0.03954509273171425, | |
| "learning_rate": 9.381748594078983e-06, | |
| "loss": 0.8188, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.6136363636363638, | |
| "grad_norm": 0.038289956748485565, | |
| "learning_rate": 9.354578848586011e-06, | |
| "loss": 0.7787, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.038311704993247986, | |
| "learning_rate": 9.326865975426782e-06, | |
| "loss": 0.786, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6590909090909092, | |
| "grad_norm": 0.03961798921227455, | |
| "learning_rate": 9.298613431107339e-06, | |
| "loss": 0.7887, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6818181818181817, | |
| "grad_norm": 0.038849055767059326, | |
| "learning_rate": 9.269824739444568e-06, | |
| "loss": 0.8028, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.7045454545454546, | |
| "grad_norm": 0.04103654995560646, | |
| "learning_rate": 9.240503491126676e-06, | |
| "loss": 0.8409, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.03809870034456253, | |
| "learning_rate": 9.210653343265341e-06, | |
| "loss": 0.809, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.037855364382267, | |
| "learning_rate": 9.180278018939581e-06, | |
| "loss": 0.8192, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7727272727272727, | |
| "grad_norm": 0.04093025252223015, | |
| "learning_rate": 9.149381306731392e-06, | |
| "loss": 0.8421, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7954545454545454, | |
| "grad_norm": 0.036524541676044464, | |
| "learning_rate": 9.117967060253205e-06, | |
| "loss": 0.7747, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.04049857705831528, | |
| "learning_rate": 9.086039197667255e-06, | |
| "loss": 0.8201, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8409090909090908, | |
| "grad_norm": 0.0417928546667099, | |
| "learning_rate": 9.053601701196871e-06, | |
| "loss": 0.7924, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8636363636363638, | |
| "grad_norm": 0.04128099977970123, | |
| "learning_rate": 9.020658616629804e-06, | |
| "loss": 0.8143, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8863636363636362, | |
| "grad_norm": 0.040873561054468155, | |
| "learning_rate": 8.987214052813605e-06, | |
| "loss": 0.8141, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 0.04162426292896271, | |
| "learning_rate": 8.953272181143149e-06, | |
| "loss": 0.7927, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.9318181818181817, | |
| "grad_norm": 0.039612602442502975, | |
| "learning_rate": 8.918837235040357e-06, | |
| "loss": 0.8134, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.9545454545454546, | |
| "grad_norm": 0.039780184626579285, | |
| "learning_rate": 8.883913509426176e-06, | |
| "loss": 0.7678, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9772727272727273, | |
| "grad_norm": 0.0440000519156456, | |
| "learning_rate": 8.848505360184895e-06, | |
| "loss": 0.79, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.04121037945151329, | |
| "learning_rate": 8.812617203620853e-06, | |
| "loss": 0.7499, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.022727272727273, | |
| "grad_norm": 0.04250968620181084, | |
| "learning_rate": 8.776253515907611e-06, | |
| "loss": 0.8203, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.0454545454545454, | |
| "grad_norm": 0.04543096572160721, | |
| "learning_rate": 8.739418832529665e-06, | |
| "loss": 0.8114, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.0681818181818183, | |
| "grad_norm": 0.0420922227203846, | |
| "learning_rate": 8.70211774771675e-06, | |
| "loss": 0.8247, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.090909090909091, | |
| "grad_norm": 0.04692165553569794, | |
| "learning_rate": 8.664354913870813e-06, | |
| "loss": 0.8278, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.1136363636363638, | |
| "grad_norm": 0.040763940662145615, | |
| "learning_rate": 8.626135040985762e-06, | |
| "loss": 0.7801, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.1363636363636362, | |
| "grad_norm": 0.046543512493371964, | |
| "learning_rate": 8.587462896059986e-06, | |
| "loss": 0.8104, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.159090909090909, | |
| "grad_norm": 0.04344585910439491, | |
| "learning_rate": 8.548343302501796e-06, | |
| "loss": 0.8122, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.1818181818181817, | |
| "grad_norm": 0.046868693083524704, | |
| "learning_rate": 8.508781139527831e-06, | |
| "loss": 0.8149, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.2045454545454546, | |
| "grad_norm": 0.04557256028056145, | |
| "learning_rate": 8.46878134155448e-06, | |
| "loss": 0.7924, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.227272727272727, | |
| "grad_norm": 0.04453985020518303, | |
| "learning_rate": 8.428348897582442e-06, | |
| "loss": 0.7918, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.04629656299948692, | |
| "learning_rate": 8.387488850574473e-06, | |
| "loss": 0.7705, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.0446246862411499, | |
| "learning_rate": 8.346206296826387e-06, | |
| "loss": 0.7967, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2954545454545454, | |
| "grad_norm": 0.044356152415275574, | |
| "learning_rate": 8.30450638533143e-06, | |
| "loss": 0.7817, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.3181818181818183, | |
| "grad_norm": 0.043874483555555344, | |
| "learning_rate": 8.262394317138062e-06, | |
| "loss": 0.784, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.340909090909091, | |
| "grad_norm": 0.044783398509025574, | |
| "learning_rate": 8.219875344701255e-06, | |
| "loss": 0.787, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.3636363636363638, | |
| "grad_norm": 0.04361894354224205, | |
| "learning_rate": 8.176954771227372e-06, | |
| "loss": 0.7436, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.3863636363636362, | |
| "grad_norm": 0.04469903185963631, | |
| "learning_rate": 8.133637950012732e-06, | |
| "loss": 0.817, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.409090909090909, | |
| "grad_norm": 0.04548465088009834, | |
| "learning_rate": 8.089930283775906e-06, | |
| "loss": 0.7791, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.4318181818181817, | |
| "grad_norm": 0.04449579119682312, | |
| "learning_rate": 8.04583722398386e-06, | |
| "loss": 0.8076, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.4545454545454546, | |
| "grad_norm": 0.05069056153297424, | |
| "learning_rate": 8.001364270172029e-06, | |
| "loss": 0.7932, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.4772727272727275, | |
| "grad_norm": 0.04452840983867645, | |
| "learning_rate": 7.95651696925837e-06, | |
| "loss": 0.7752, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.044558193534612656, | |
| "learning_rate": 7.911300914851523e-06, | |
| "loss": 0.7854, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.5227272727272725, | |
| "grad_norm": 0.044422492384910583, | |
| "learning_rate": 7.865721746553151e-06, | |
| "loss": 0.7784, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.04627745598554611, | |
| "learning_rate": 7.819785149254534e-06, | |
| "loss": 0.7476, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.5681818181818183, | |
| "grad_norm": 0.04218703508377075, | |
| "learning_rate": 7.773496852427504e-06, | |
| "loss": 0.8409, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.590909090909091, | |
| "grad_norm": 0.050779055804014206, | |
| "learning_rate": 7.72686262940986e-06, | |
| "loss": 0.8242, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.6136363636363638, | |
| "grad_norm": 0.04828439652919769, | |
| "learning_rate": 7.679888296685256e-06, | |
| "loss": 0.7958, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.6363636363636362, | |
| "grad_norm": 0.05190173536539078, | |
| "learning_rate": 7.632579713157755e-06, | |
| "loss": 0.8086, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.659090909090909, | |
| "grad_norm": 0.046120744198560715, | |
| "learning_rate": 7.58494277942107e-06, | |
| "loss": 0.83, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.6818181818181817, | |
| "grad_norm": 0.051556602120399475, | |
| "learning_rate": 7.5369834370226005e-06, | |
| "loss": 0.8245, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.7045454545454546, | |
| "grad_norm": 0.045309435576200485, | |
| "learning_rate": 7.488707667722381e-06, | |
| "loss": 0.7797, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.046404462307691574, | |
| "learning_rate": 7.440121492746992e-06, | |
| "loss": 0.7935, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.04626648873090744, | |
| "learning_rate": 7.391230972038567e-06, | |
| "loss": 0.8127, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.7727272727272725, | |
| "grad_norm": 0.049298033118247986, | |
| "learning_rate": 7.342042203498952e-06, | |
| "loss": 0.8366, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.7954545454545454, | |
| "grad_norm": 0.047652631998062134, | |
| "learning_rate": 7.292561322229152e-06, | |
| "loss": 0.7797, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.05071092024445534, | |
| "learning_rate": 7.2427944997641186e-06, | |
| "loss": 0.7936, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.840909090909091, | |
| "grad_norm": 0.04736604541540146, | |
| "learning_rate": 7.192747943303008e-06, | |
| "loss": 0.8147, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.8636363636363638, | |
| "grad_norm": 0.044138215482234955, | |
| "learning_rate": 7.142427894934975e-06, | |
| "loss": 0.7978, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.8863636363636362, | |
| "grad_norm": 0.04737458750605583, | |
| "learning_rate": 7.0918406308606336e-06, | |
| "loss": 0.8327, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.909090909090909, | |
| "grad_norm": 0.05238456651568413, | |
| "learning_rate": 7.040992460609246e-06, | |
| "loss": 0.816, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.9318181818181817, | |
| "grad_norm": 0.04763491824269295, | |
| "learning_rate": 6.98988972625177e-06, | |
| "loss": 0.8347, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.9545454545454546, | |
| "grad_norm": 0.045320168137550354, | |
| "learning_rate": 6.938538801609834e-06, | |
| "loss": 0.79, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.9772727272727275, | |
| "grad_norm": 0.04802026227116585, | |
| "learning_rate": 6.886946091460762e-06, | |
| "loss": 0.8046, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.047994792461395264, | |
| "learning_rate": 6.835118030738731e-06, | |
| "loss": 0.8179, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.022727272727273, | |
| "grad_norm": 0.04660705476999283, | |
| "learning_rate": 6.78306108373217e-06, | |
| "loss": 0.7756, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.0454545454545454, | |
| "grad_norm": 0.04617464914917946, | |
| "learning_rate": 6.730781743277498e-06, | |
| "loss": 0.79, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.0681818181818183, | |
| "grad_norm": 0.0491408072412014, | |
| "learning_rate": 6.678286529949303e-06, | |
| "loss": 0.819, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.090909090909091, | |
| "grad_norm": 0.046425145119428635, | |
| "learning_rate": 6.625581991247054e-06, | |
| "loss": 0.7697, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 3.1136363636363638, | |
| "grad_norm": 0.046251095831394196, | |
| "learning_rate": 6.5726747007784655e-06, | |
| "loss": 0.7601, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.1363636363636362, | |
| "grad_norm": 0.04971086233854294, | |
| "learning_rate": 6.5195712574395945e-06, | |
| "loss": 0.784, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.159090909090909, | |
| "grad_norm": 0.05220796912908554, | |
| "learning_rate": 6.466278284591793e-06, | |
| "loss": 0.7537, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.1818181818181817, | |
| "grad_norm": 0.04714221879839897, | |
| "learning_rate": 6.4128024292356065e-06, | |
| "loss": 0.8391, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.2045454545454546, | |
| "grad_norm": 0.051231637597084045, | |
| "learning_rate": 6.3591503611817155e-06, | |
| "loss": 0.8392, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.227272727272727, | |
| "grad_norm": 0.0514928475022316, | |
| "learning_rate": 6.305328772219044e-06, | |
| "loss": 0.7629, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 0.05148511752486229, | |
| "learning_rate": 6.251344375280124e-06, | |
| "loss": 0.8342, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.050781212747097015, | |
| "learning_rate": 6.197203903603814e-06, | |
| "loss": 0.7909, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.2954545454545454, | |
| "grad_norm": 0.05220821499824524, | |
| "learning_rate": 6.142914109895499e-06, | |
| "loss": 0.8147, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.3181818181818183, | |
| "grad_norm": 0.055957481265068054, | |
| "learning_rate": 6.088481765484849e-06, | |
| "loss": 0.8109, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.340909090909091, | |
| "grad_norm": 0.047921840101480484, | |
| "learning_rate": 6.033913659481261e-06, | |
| "loss": 0.7386, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.3636363636363638, | |
| "grad_norm": 0.05010027065873146, | |
| "learning_rate": 5.979216597927088e-06, | |
| "loss": 0.7778, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.3863636363636362, | |
| "grad_norm": 0.05108246952295303, | |
| "learning_rate": 5.924397402948745e-06, | |
| "loss": 0.7815, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.409090909090909, | |
| "grad_norm": 0.05210183188319206, | |
| "learning_rate": 5.86946291190582e-06, | |
| "loss": 0.8016, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.4318181818181817, | |
| "grad_norm": 0.052292466163635254, | |
| "learning_rate": 5.8144199765382736e-06, | |
| "loss": 0.7615, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.4545454545454546, | |
| "grad_norm": 0.05555827170610428, | |
| "learning_rate": 5.759275462111858e-06, | |
| "loss": 0.773, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.4772727272727275, | |
| "grad_norm": 0.05111211910843849, | |
| "learning_rate": 5.704036246561836e-06, | |
| "loss": 0.8308, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.05257695913314819, | |
| "learning_rate": 5.648709219635134e-06, | |
| "loss": 0.8092, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.5227272727272725, | |
| "grad_norm": 0.056300047785043716, | |
| "learning_rate": 5.5933012820310095e-06, | |
| "loss": 0.8009, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.05202391743659973, | |
| "learning_rate": 5.537819344540349e-06, | |
| "loss": 0.8005, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.5681818181818183, | |
| "grad_norm": 0.0530916303396225, | |
| "learning_rate": 5.482270327183732e-06, | |
| "loss": 0.8038, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.590909090909091, | |
| "grad_norm": 0.05197259411215782, | |
| "learning_rate": 5.42666115834831e-06, | |
| "loss": 0.7946, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.6136363636363638, | |
| "grad_norm": 0.052419282495975494, | |
| "learning_rate": 5.37099877392367e-06, | |
| "loss": 0.8152, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.06443214416503906, | |
| "learning_rate": 5.315290116436752e-06, | |
| "loss": 0.8069, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.659090909090909, | |
| "grad_norm": 0.05957184359431267, | |
| "learning_rate": 5.259542134185925e-06, | |
| "loss": 0.8128, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.6818181818181817, | |
| "grad_norm": 0.0548897311091423, | |
| "learning_rate": 5.203761780374372e-06, | |
| "loss": 0.7734, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.7045454545454546, | |
| "grad_norm": 0.05778632313013077, | |
| "learning_rate": 5.147956012242837e-06, | |
| "loss": 0.8028, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.7272727272727275, | |
| "grad_norm": 0.057780634611845016, | |
| "learning_rate": 5.092131790201882e-06, | |
| "loss": 0.7897, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.05503814294934273, | |
| "learning_rate": 5.0362960769637415e-06, | |
| "loss": 0.7838, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.7727272727272725, | |
| "grad_norm": 0.05687331035733223, | |
| "learning_rate": 4.980455836673902e-06, | |
| "loss": 0.7852, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.7954545454545454, | |
| "grad_norm": 0.059677232056856155, | |
| "learning_rate": 4.924618034042488e-06, | |
| "loss": 0.8021, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.056579746305942535, | |
| "learning_rate": 4.868789633475583e-06, | |
| "loss": 0.8155, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.840909090909091, | |
| "grad_norm": 0.055581267923116684, | |
| "learning_rate": 4.812977598206595e-06, | |
| "loss": 0.8065, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.8636363636363638, | |
| "grad_norm": 0.047986991703510284, | |
| "learning_rate": 4.757188889427761e-06, | |
| "loss": 0.8285, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.8863636363636362, | |
| "grad_norm": 0.059346918016672134, | |
| "learning_rate": 4.701430465421897e-06, | |
| "loss": 0.7768, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.909090909090909, | |
| "grad_norm": 0.059216298162937164, | |
| "learning_rate": 4.645709280694545e-06, | |
| "loss": 0.8109, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.9318181818181817, | |
| "grad_norm": 0.05952894687652588, | |
| "learning_rate": 4.590032285106541e-06, | |
| "loss": 0.8026, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.9545454545454546, | |
| "grad_norm": 0.057186175137758255, | |
| "learning_rate": 4.534406423007212e-06, | |
| "loss": 0.8187, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.9772727272727275, | |
| "grad_norm": 0.059881679713726044, | |
| "learning_rate": 4.478838632368221e-06, | |
| "loss": 0.8422, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.057068806141614914, | |
| "learning_rate": 4.423335843918233e-06, | |
| "loss": 0.804, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 4.0227272727272725, | |
| "grad_norm": 0.05267658457159996, | |
| "learning_rate": 4.367904980278475e-06, | |
| "loss": 0.7534, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 4.045454545454546, | |
| "grad_norm": 0.06152062863111496, | |
| "learning_rate": 4.312552955099299e-06, | |
| "loss": 0.8301, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 4.068181818181818, | |
| "grad_norm": 0.05504327639937401, | |
| "learning_rate": 4.257286672197888e-06, | |
| "loss": 0.8144, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 4.090909090909091, | |
| "grad_norm": 0.05590361729264259, | |
| "learning_rate": 4.202113024697156e-06, | |
| "loss": 0.7977, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 4.113636363636363, | |
| "grad_norm": 0.06019588187336922, | |
| "learning_rate": 4.1470388941660076e-06, | |
| "loss": 0.8213, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 4.136363636363637, | |
| "grad_norm": 0.05402204021811485, | |
| "learning_rate": 4.092071149761035e-06, | |
| "loss": 0.7839, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 4.159090909090909, | |
| "grad_norm": 0.05966588109731674, | |
| "learning_rate": 4.037216647369745e-06, | |
| "loss": 0.7739, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 4.181818181818182, | |
| "grad_norm": 0.06286252290010452, | |
| "learning_rate": 3.982482228755468e-06, | |
| "loss": 0.8067, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 4.204545454545454, | |
| "grad_norm": 0.05303211137652397, | |
| "learning_rate": 3.9278747207039995e-06, | |
| "loss": 0.7935, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 4.2272727272727275, | |
| "grad_norm": 0.06371833384037018, | |
| "learning_rate": 3.873400934172137e-06, | |
| "loss": 0.7752, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 0.06287337839603424, | |
| "learning_rate": 3.8190676634381775e-06, | |
| "loss": 0.8394, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.2727272727272725, | |
| "grad_norm": 0.059799861162900925, | |
| "learning_rate": 3.7648816852544873e-06, | |
| "loss": 0.7587, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.295454545454546, | |
| "grad_norm": 0.05993163585662842, | |
| "learning_rate": 3.710849758002282e-06, | |
| "loss": 0.7891, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.318181818181818, | |
| "grad_norm": 0.05344055965542793, | |
| "learning_rate": 3.6569786208486692e-06, | |
| "loss": 0.8, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.340909090909091, | |
| "grad_norm": 0.05599065497517586, | |
| "learning_rate": 3.6032749929061078e-06, | |
| "loss": 0.8135, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.363636363636363, | |
| "grad_norm": 0.0619407594203949, | |
| "learning_rate": 3.5497455723943673e-06, | |
| "loss": 0.785, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.386363636363637, | |
| "grad_norm": 0.06416480988264084, | |
| "learning_rate": 3.4963970358050756e-06, | |
| "loss": 0.8161, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.409090909090909, | |
| "grad_norm": 0.05641242861747742, | |
| "learning_rate": 3.443236037068999e-06, | |
| "loss": 0.8188, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.431818181818182, | |
| "grad_norm": 0.05751458555459976, | |
| "learning_rate": 3.3902692067261256e-06, | |
| "loss": 0.801, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.454545454545454, | |
| "grad_norm": 0.05939492955803871, | |
| "learning_rate": 3.3375031510986606e-06, | |
| "loss": 0.775, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.4772727272727275, | |
| "grad_norm": 0.059105709195137024, | |
| "learning_rate": 3.2849444514670587e-06, | |
| "loss": 0.7853, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.06332360953092575, | |
| "learning_rate": 3.2325996632491597e-06, | |
| "loss": 0.7645, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.5227272727272725, | |
| "grad_norm": 0.05962349474430084, | |
| "learning_rate": 3.180475315182563e-06, | |
| "loss": 0.8033, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.05617254227399826, | |
| "learning_rate": 3.1285779085103313e-06, | |
| "loss": 0.7815, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 4.568181818181818, | |
| "grad_norm": 0.06171968951821327, | |
| "learning_rate": 3.07691391617011e-06, | |
| "loss": 0.7809, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 4.590909090909091, | |
| "grad_norm": 0.060661982744932175, | |
| "learning_rate": 3.0254897819867974e-06, | |
| "loss": 0.8506, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 4.613636363636363, | |
| "grad_norm": 0.05857427418231964, | |
| "learning_rate": 2.974311919868821e-06, | |
| "loss": 0.7433, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 4.636363636363637, | |
| "grad_norm": 0.06344863772392273, | |
| "learning_rate": 2.9233867130081677e-06, | |
| "loss": 0.8371, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 4.659090909090909, | |
| "grad_norm": 0.056676462292671204, | |
| "learning_rate": 2.87272051308424e-06, | |
| "loss": 0.8027, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 4.681818181818182, | |
| "grad_norm": 0.06252826750278473, | |
| "learning_rate": 2.822319639471619e-06, | |
| "loss": 0.8059, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 4.704545454545455, | |
| "grad_norm": 0.06604735553264618, | |
| "learning_rate": 2.7721903784519056e-06, | |
| "loss": 0.7642, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 4.7272727272727275, | |
| "grad_norm": 0.06098971143364906, | |
| "learning_rate": 2.7223389824296354e-06, | |
| "loss": 0.7714, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 0.05798983573913574, | |
| "learning_rate": 2.6727716691524474e-06, | |
| "loss": 0.8064, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 4.7727272727272725, | |
| "grad_norm": 0.05816567689180374, | |
| "learning_rate": 2.6234946209355906e-06, | |
| "loss": 0.7697, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.795454545454545, | |
| "grad_norm": 0.058813825249671936, | |
| "learning_rate": 2.574513983890803e-06, | |
| "loss": 0.8058, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.818181818181818, | |
| "grad_norm": 0.07025865465402603, | |
| "learning_rate": 2.525835867159754e-06, | |
| "loss": 0.8349, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.840909090909091, | |
| "grad_norm": 0.056820739060640335, | |
| "learning_rate": 2.4774663421520688e-06, | |
| "loss": 0.768, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.863636363636363, | |
| "grad_norm": 0.06224190443754196, | |
| "learning_rate": 2.429411441788072e-06, | |
| "loss": 0.7897, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.886363636363637, | |
| "grad_norm": 0.05832672491669655, | |
| "learning_rate": 2.381677159746324e-06, | |
| "loss": 0.8114, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.909090909090909, | |
| "grad_norm": 0.059900589287281036, | |
| "learning_rate": 2.334269449716058e-06, | |
| "loss": 0.7891, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.931818181818182, | |
| "grad_norm": 0.0635821595788002, | |
| "learning_rate": 2.287194224654602e-06, | |
| "loss": 0.8026, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.954545454545455, | |
| "grad_norm": 0.05612201988697052, | |
| "learning_rate": 2.24045735604988e-06, | |
| "loss": 0.7916, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.9772727272727275, | |
| "grad_norm": 0.05763179436326027, | |
| "learning_rate": 2.1940646731880887e-06, | |
| "loss": 0.7679, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.05941809341311455, | |
| "learning_rate": 2.148021962426635e-06, | |
| "loss": 0.8058, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 5.0227272727272725, | |
| "grad_norm": 0.07011684030294418, | |
| "learning_rate": 2.102334966472429e-06, | |
| "loss": 0.8188, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 5.045454545454546, | |
| "grad_norm": 0.056112293154001236, | |
| "learning_rate": 2.057009383665621e-06, | |
| "loss": 0.7685, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 5.068181818181818, | |
| "grad_norm": 0.05940523371100426, | |
| "learning_rate": 2.012050867268873e-06, | |
| "loss": 0.7676, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 5.090909090909091, | |
| "grad_norm": 0.06017325818538666, | |
| "learning_rate": 1.967465024762251e-06, | |
| "loss": 0.7589, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 5.113636363636363, | |
| "grad_norm": 0.05958075076341629, | |
| "learning_rate": 1.9232574171438273e-06, | |
| "loss": 0.8143, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 5.136363636363637, | |
| "grad_norm": 0.059776924550533295, | |
| "learning_rate": 1.8794335582360873e-06, | |
| "loss": 0.8186, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 5.159090909090909, | |
| "grad_norm": 0.061050403863191605, | |
| "learning_rate": 1.8359989139982021e-06, | |
| "loss": 0.7815, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 5.181818181818182, | |
| "grad_norm": 0.05887814611196518, | |
| "learning_rate": 1.7929589018443016e-06, | |
| "loss": 0.8223, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 5.204545454545454, | |
| "grad_norm": 0.0575253888964653, | |
| "learning_rate": 1.7503188899677642e-06, | |
| "loss": 0.8012, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 5.2272727272727275, | |
| "grad_norm": 0.06290145218372345, | |
| "learning_rate": 1.7080841966716777e-06, | |
| "loss": 0.7403, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "grad_norm": 0.06359874457120895, | |
| "learning_rate": 1.6662600897055132e-06, | |
| "loss": 0.8377, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 5.2727272727272725, | |
| "grad_norm": 0.06408390402793884, | |
| "learning_rate": 1.6248517856080848e-06, | |
| "loss": 0.7776, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 5.295454545454546, | |
| "grad_norm": 0.0607023611664772, | |
| "learning_rate": 1.5838644490569372e-06, | |
| "loss": 0.8408, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 5.318181818181818, | |
| "grad_norm": 0.06409065425395966, | |
| "learning_rate": 1.5433031922241538e-06, | |
| "loss": 0.8198, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 5.340909090909091, | |
| "grad_norm": 0.06282657384872437, | |
| "learning_rate": 1.5031730741387567e-06, | |
| "loss": 0.7881, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 5.363636363636363, | |
| "grad_norm": 0.06287471950054169, | |
| "learning_rate": 1.4634791000557058e-06, | |
| "loss": 0.7462, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 5.386363636363637, | |
| "grad_norm": 0.06724315136671066, | |
| "learning_rate": 1.4242262208316187e-06, | |
| "loss": 0.8024, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 5.409090909090909, | |
| "grad_norm": 0.061781395226716995, | |
| "learning_rate": 1.3854193323072707e-06, | |
| "loss": 0.7826, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 5.431818181818182, | |
| "grad_norm": 0.06064334139227867, | |
| "learning_rate": 1.3470632746969586e-06, | |
| "loss": 0.7883, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 5.454545454545454, | |
| "grad_norm": 0.05812542885541916, | |
| "learning_rate": 1.3091628319848016e-06, | |
| "loss": 0.7925, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 5.4772727272727275, | |
| "grad_norm": 0.059440333396196365, | |
| "learning_rate": 1.271722731328055e-06, | |
| "loss": 0.7692, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "grad_norm": 0.06575117260217667, | |
| "learning_rate": 1.2347476424675142e-06, | |
| "loss": 0.7772, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 5.5227272727272725, | |
| "grad_norm": 0.06302516162395477, | |
| "learning_rate": 1.1982421771450775e-06, | |
| "loss": 0.7927, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 5.545454545454545, | |
| "grad_norm": 0.06540846079587936, | |
| "learning_rate": 1.1622108885285415e-06, | |
| "loss": 0.7724, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 5.568181818181818, | |
| "grad_norm": 0.06591441482305527, | |
| "learning_rate": 1.126658270643708e-06, | |
| "loss": 0.779, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 5.590909090909091, | |
| "grad_norm": 0.06068432703614235, | |
| "learning_rate": 1.0915887578138606e-06, | |
| "loss": 0.7768, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 5.613636363636363, | |
| "grad_norm": 0.0614112988114357, | |
| "learning_rate": 1.057006724106695e-06, | |
| "loss": 0.8061, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 5.636363636363637, | |
| "grad_norm": 0.06147473677992821, | |
| "learning_rate": 1.0229164827887512e-06, | |
| "loss": 0.805, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 5.659090909090909, | |
| "grad_norm": 0.06091708317399025, | |
| "learning_rate": 9.89322285787454e-07, | |
| "loss": 0.7883, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 5.681818181818182, | |
| "grad_norm": 0.05688236653804779, | |
| "learning_rate": 9.5622832316077e-07, | |
| "loss": 0.7763, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 5.704545454545455, | |
| "grad_norm": 0.06602354347705841, | |
| "learning_rate": 9.236387225746185e-07, | |
| "loss": 0.809, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 5.7272727272727275, | |
| "grad_norm": 0.06536935269832611, | |
| "learning_rate": 8.915575487880324e-07, | |
| "loss": 0.779, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "grad_norm": 0.06449291855096817, | |
| "learning_rate": 8.599888031461801e-07, | |
| "loss": 0.7984, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 5.7727272727272725, | |
| "grad_norm": 0.06858620792627335, | |
| "learning_rate": 8.289364230813052e-07, | |
| "loss": 0.8298, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 5.795454545454545, | |
| "grad_norm": 0.05864429101347923, | |
| "learning_rate": 7.984042816216109e-07, | |
| "loss": 0.7846, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 5.818181818181818, | |
| "grad_norm": 0.059347003698349, | |
| "learning_rate": 7.683961869082146e-07, | |
| "loss": 0.8246, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 5.840909090909091, | |
| "grad_norm": 0.06505056470632553, | |
| "learning_rate": 7.389158817201541e-07, | |
| "loss": 0.811, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 5.863636363636363, | |
| "grad_norm": 0.06296805292367935, | |
| "learning_rate": 7.099670430075828e-07, | |
| "loss": 0.8004, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 5.886363636363637, | |
| "grad_norm": 0.06585711985826492, | |
| "learning_rate": 6.815532814331505e-07, | |
| "loss": 0.7814, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 5.909090909090909, | |
| "grad_norm": 0.06298382580280304, | |
| "learning_rate": 6.536781409216664e-07, | |
| "loss": 0.8115, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 5.931818181818182, | |
| "grad_norm": 0.06103487312793732, | |
| "learning_rate": 6.263450982180813e-07, | |
| "loss": 0.8036, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 5.954545454545455, | |
| "grad_norm": 0.06315191090106964, | |
| "learning_rate": 5.995575624538397e-07, | |
| "loss": 0.7637, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 5.9772727272727275, | |
| "grad_norm": 0.06248250976204872, | |
| "learning_rate": 5.733188747216845e-07, | |
| "loss": 0.7912, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.06412005424499512, | |
| "learning_rate": 5.476323076589319e-07, | |
| "loss": 0.815, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 6.0227272727272725, | |
| "grad_norm": 0.060897987335920334, | |
| "learning_rate": 5.225010650392887e-07, | |
| "loss": 0.8012, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 6.045454545454546, | |
| "grad_norm": 0.06144240126013756, | |
| "learning_rate": 4.979282813732622e-07, | |
| "loss": 0.798, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 6.068181818181818, | |
| "grad_norm": 0.05944688990712166, | |
| "learning_rate": 4.739170215172051e-07, | |
| "loss": 0.7948, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 6.090909090909091, | |
| "grad_norm": 0.06139964610338211, | |
| "learning_rate": 4.5047028029104877e-07, | |
| "loss": 0.8287, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 6.113636363636363, | |
| "grad_norm": 0.06420081108808517, | |
| "learning_rate": 4.27590982104773e-07, | |
| "loss": 0.7973, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 6.136363636363637, | |
| "grad_norm": 0.06291543692350388, | |
| "learning_rate": 4.0528198059365863e-07, | |
| "loss": 0.7956, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 6.159090909090909, | |
| "grad_norm": 0.05762983486056328, | |
| "learning_rate": 3.8354605826235957e-07, | |
| "loss": 0.8054, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 6.181818181818182, | |
| "grad_norm": 0.05916955694556236, | |
| "learning_rate": 3.623859261378654e-07, | |
| "loss": 0.7634, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 6.204545454545454, | |
| "grad_norm": 0.064091257750988, | |
| "learning_rate": 3.4180422343135346e-07, | |
| "loss": 0.8345, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 6.2272727272727275, | |
| "grad_norm": 0.061927612870931625, | |
| "learning_rate": 3.2180351720902003e-07, | |
| "loss": 0.8129, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "grad_norm": 0.06499668955802917, | |
| "learning_rate": 3.0238630207189867e-07, | |
| "loss": 0.8057, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 6.2727272727272725, | |
| "grad_norm": 0.06719426810741425, | |
| "learning_rate": 2.8355499984471737e-07, | |
| "loss": 0.7677, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 6.295454545454546, | |
| "grad_norm": 0.07048514485359192, | |
| "learning_rate": 2.6531195927384067e-07, | |
| "loss": 0.8099, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 6.318181818181818, | |
| "grad_norm": 0.06214950233697891, | |
| "learning_rate": 2.476594557343126e-07, | |
| "loss": 0.7769, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 6.340909090909091, | |
| "grad_norm": 0.06586616486310959, | |
| "learning_rate": 2.305996909460695e-07, | |
| "loss": 0.8022, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 6.363636363636363, | |
| "grad_norm": 0.06200889125466347, | |
| "learning_rate": 2.141347926993176e-07, | |
| "loss": 0.771, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 6.386363636363637, | |
| "grad_norm": 0.058896422386169434, | |
| "learning_rate": 1.9826681458915088e-07, | |
| "loss": 0.7611, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 6.409090909090909, | |
| "grad_norm": 0.06591584533452988, | |
| "learning_rate": 1.8299773575941483e-07, | |
| "loss": 0.8151, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 6.431818181818182, | |
| "grad_norm": 0.05796649307012558, | |
| "learning_rate": 1.6832946065585e-07, | |
| "loss": 0.8017, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 6.454545454545454, | |
| "grad_norm": 0.059807244688272476, | |
| "learning_rate": 1.542638187885681e-07, | |
| "loss": 0.812, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 6.4772727272727275, | |
| "grad_norm": 0.06900903582572937, | |
| "learning_rate": 1.4080256450385578e-07, | |
| "loss": 0.8105, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "grad_norm": 0.06508809328079224, | |
| "learning_rate": 1.2794737676536993e-07, | |
| "loss": 0.7147, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 6.5227272727272725, | |
| "grad_norm": 0.06716930121183395, | |
| "learning_rate": 1.1569985894472446e-07, | |
| "loss": 0.7677, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 6.545454545454545, | |
| "grad_norm": 0.06471340358257294, | |
| "learning_rate": 1.0406153862150881e-07, | |
| "loss": 0.82, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 6.568181818181818, | |
| "grad_norm": 0.05981110781431198, | |
| "learning_rate": 9.30338673927611e-08, | |
| "loss": 0.7904, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 6.590909090909091, | |
| "grad_norm": 0.06340961158275604, | |
| "learning_rate": 8.261822069191461e-08, | |
| "loss": 0.8186, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 6.613636363636363, | |
| "grad_norm": 0.06290656328201294, | |
| "learning_rate": 7.281589761724828e-08, | |
| "loss": 0.7665, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 6.636363636363637, | |
| "grad_norm": 0.07086827605962753, | |
| "learning_rate": 6.362812076985359e-08, | |
| "loss": 0.7613, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 6.659090909090909, | |
| "grad_norm": 0.06620458513498306, | |
| "learning_rate": 5.505603610114651e-08, | |
| "loss": 0.7762, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 6.681818181818182, | |
| "grad_norm": 0.06518013775348663, | |
| "learning_rate": 4.7100712769937994e-08, | |
| "loss": 0.7707, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 6.704545454545455, | |
| "grad_norm": 0.06568311899900436, | |
| "learning_rate": 3.976314300908002e-08, | |
| "loss": 0.8052, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 6.7272727272727275, | |
| "grad_norm": 0.06033078953623772, | |
| "learning_rate": 3.3044242001712415e-08, | |
| "loss": 0.7725, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "grad_norm": 0.060128528624773026, | |
| "learning_rate": 2.6944847767114146e-08, | |
| "loss": 0.7712, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 6.7727272727272725, | |
| "grad_norm": 0.06406668573617935, | |
| "learning_rate": 2.1465721056179168e-08, | |
| "loss": 0.8116, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 6.795454545454545, | |
| "grad_norm": 0.05837489292025566, | |
| "learning_rate": 1.6607545256533407e-08, | |
| "loss": 0.8084, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 6.818181818181818, | |
| "grad_norm": 0.06511975079774857, | |
| "learning_rate": 1.2370926307300746e-08, | |
| "loss": 0.8018, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 6.840909090909091, | |
| "grad_norm": 0.0646391287446022, | |
| "learning_rate": 8.756392623521237e-09, | |
| "loss": 0.7921, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 6.863636363636363, | |
| "grad_norm": 0.06122921034693718, | |
| "learning_rate": 5.7643950302488375e-09, | |
| "loss": 0.7981, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 6.886363636363637, | |
| "grad_norm": 0.06278009712696075, | |
| "learning_rate": 3.3953067063213772e-09, | |
| "loss": 0.7755, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 6.909090909090909, | |
| "grad_norm": 0.06424996256828308, | |
| "learning_rate": 1.649423137813355e-09, | |
| "loss": 0.7825, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 6.931818181818182, | |
| "grad_norm": 0.07108237594366074, | |
| "learning_rate": 5.26962081183191e-10, | |
| "loss": 0.8121, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 6.954545454545455, | |
| "grad_norm": 0.06434624642133713, | |
| "learning_rate": 2.806353611273238e-11, | |
| "loss": 0.8185, | |
| "step": 3060 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3063, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.098946440672051e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |