| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2032, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0004921259842519685, | |
| "grad_norm": 23.31243227887501, | |
| "learning_rate": 4.901960784313726e-08, | |
| "loss": 1.3041, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0024606299212598425, | |
| "grad_norm": 22.65150067024134, | |
| "learning_rate": 2.4509803921568627e-07, | |
| "loss": 1.3265, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004921259842519685, | |
| "grad_norm": 15.292894125437156, | |
| "learning_rate": 4.901960784313725e-07, | |
| "loss": 1.2787, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0073818897637795275, | |
| "grad_norm": 11.631090567819937, | |
| "learning_rate": 7.352941176470589e-07, | |
| "loss": 1.1562, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.00984251968503937, | |
| "grad_norm": 9.646255722079733, | |
| "learning_rate": 9.80392156862745e-07, | |
| "loss": 1.034, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.012303149606299213, | |
| "grad_norm": 3.4756736835120527, | |
| "learning_rate": 1.2254901960784314e-06, | |
| "loss": 0.9172, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.014763779527559055, | |
| "grad_norm": 3.5367361650105713, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 0.8905, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0172244094488189, | |
| "grad_norm": 2.9532159399291715, | |
| "learning_rate": 1.715686274509804e-06, | |
| "loss": 0.8669, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01968503937007874, | |
| "grad_norm": 2.987224307405508, | |
| "learning_rate": 1.96078431372549e-06, | |
| "loss": 0.8522, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02214566929133858, | |
| "grad_norm": 3.0327169155373643, | |
| "learning_rate": 2.2058823529411767e-06, | |
| "loss": 0.836, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.024606299212598427, | |
| "grad_norm": 2.9696131563676604, | |
| "learning_rate": 2.450980392156863e-06, | |
| "loss": 0.8255, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.027066929133858268, | |
| "grad_norm": 2.9107492411725255, | |
| "learning_rate": 2.696078431372549e-06, | |
| "loss": 0.8164, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.02952755905511811, | |
| "grad_norm": 3.179766275534896, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.8096, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03198818897637795, | |
| "grad_norm": 2.977082299395809, | |
| "learning_rate": 3.1862745098039216e-06, | |
| "loss": 0.7963, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.0344488188976378, | |
| "grad_norm": 3.205853591397459, | |
| "learning_rate": 3.431372549019608e-06, | |
| "loss": 0.7901, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.036909448818897635, | |
| "grad_norm": 3.1315630768509353, | |
| "learning_rate": 3.6764705882352946e-06, | |
| "loss": 0.7767, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03937007874015748, | |
| "grad_norm": 3.096477288852399, | |
| "learning_rate": 3.92156862745098e-06, | |
| "loss": 0.7566, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.041830708661417325, | |
| "grad_norm": 3.2283660154120506, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.7644, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.04429133858267716, | |
| "grad_norm": 3.1008617146769186, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 0.7569, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.04675196850393701, | |
| "grad_norm": 3.105327114732427, | |
| "learning_rate": 4.65686274509804e-06, | |
| "loss": 0.7492, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.04921259842519685, | |
| "grad_norm": 3.225174397352076, | |
| "learning_rate": 4.901960784313726e-06, | |
| "loss": 0.7429, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05167322834645669, | |
| "grad_norm": 3.141738860566073, | |
| "learning_rate": 5.147058823529411e-06, | |
| "loss": 0.739, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.054133858267716536, | |
| "grad_norm": 2.906670881803452, | |
| "learning_rate": 5.392156862745098e-06, | |
| "loss": 0.7288, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.056594488188976375, | |
| "grad_norm": 3.09403070052093, | |
| "learning_rate": 5.637254901960784e-06, | |
| "loss": 0.728, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.05905511811023622, | |
| "grad_norm": 2.9126008928013802, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.7061, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.061515748031496065, | |
| "grad_norm": 2.9961256902373083, | |
| "learning_rate": 6.1274509803921575e-06, | |
| "loss": 0.7126, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.0639763779527559, | |
| "grad_norm": 3.0577685284099676, | |
| "learning_rate": 6.372549019607843e-06, | |
| "loss": 0.6999, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.06643700787401575, | |
| "grad_norm": 2.8718298238643536, | |
| "learning_rate": 6.61764705882353e-06, | |
| "loss": 0.7074, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0688976377952756, | |
| "grad_norm": 2.949080366187911, | |
| "learning_rate": 6.862745098039216e-06, | |
| "loss": 0.7309, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07135826771653543, | |
| "grad_norm": 2.823397926647436, | |
| "learning_rate": 7.107843137254903e-06, | |
| "loss": 0.6996, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.07381889763779527, | |
| "grad_norm": 3.316451324017047, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 0.7016, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.07627952755905512, | |
| "grad_norm": 2.8480650348548813, | |
| "learning_rate": 7.598039215686275e-06, | |
| "loss": 0.6951, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.07874015748031496, | |
| "grad_norm": 2.915178234267035, | |
| "learning_rate": 7.84313725490196e-06, | |
| "loss": 0.6966, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0812007874015748, | |
| "grad_norm": 3.0408390144425046, | |
| "learning_rate": 8.088235294117648e-06, | |
| "loss": 0.6927, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.08366141732283465, | |
| "grad_norm": 2.837154093525882, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.7011, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.08612204724409449, | |
| "grad_norm": 2.7540641975037876, | |
| "learning_rate": 8.57843137254902e-06, | |
| "loss": 0.695, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.08858267716535433, | |
| "grad_norm": 2.7716956770569583, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.6853, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09104330708661418, | |
| "grad_norm": 2.8990662989123637, | |
| "learning_rate": 9.068627450980392e-06, | |
| "loss": 0.6857, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.09350393700787402, | |
| "grad_norm": 2.9398658838050964, | |
| "learning_rate": 9.31372549019608e-06, | |
| "loss": 0.6966, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.09596456692913385, | |
| "grad_norm": 2.7073170749633344, | |
| "learning_rate": 9.558823529411766e-06, | |
| "loss": 0.6963, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.0984251968503937, | |
| "grad_norm": 3.2719600595061333, | |
| "learning_rate": 9.803921568627451e-06, | |
| "loss": 0.6916, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.10088582677165354, | |
| "grad_norm": 2.8574324742359645, | |
| "learning_rate": 9.999992616075212e-06, | |
| "loss": 0.6875, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.10334645669291338, | |
| "grad_norm": 2.806608611714717, | |
| "learning_rate": 9.999734180997554e-06, | |
| "loss": 0.7001, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.10580708661417323, | |
| "grad_norm": 2.780665218604847, | |
| "learning_rate": 9.999106571489132e-06, | |
| "loss": 0.6847, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.10826771653543307, | |
| "grad_norm": 2.983241394670366, | |
| "learning_rate": 9.998109833891883e-06, | |
| "loss": 0.6922, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.11072834645669291, | |
| "grad_norm": 2.865172691779986, | |
| "learning_rate": 9.996744041803731e-06, | |
| "loss": 0.6728, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.11318897637795275, | |
| "grad_norm": 2.7203417188474606, | |
| "learning_rate": 9.995009296073138e-06, | |
| "loss": 0.6722, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1156496062992126, | |
| "grad_norm": 2.815878121246174, | |
| "learning_rate": 9.992905724791669e-06, | |
| "loss": 0.6714, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.11811023622047244, | |
| "grad_norm": 2.5449098471373426, | |
| "learning_rate": 9.990433483284527e-06, | |
| "loss": 0.666, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12057086614173228, | |
| "grad_norm": 2.666741563905933, | |
| "learning_rate": 9.987592754099086e-06, | |
| "loss": 0.6719, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.12303149606299213, | |
| "grad_norm": 2.62243520796961, | |
| "learning_rate": 9.984383746991416e-06, | |
| "loss": 0.6748, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.12549212598425197, | |
| "grad_norm": 2.596847921065236, | |
| "learning_rate": 9.980806698910787e-06, | |
| "loss": 0.6756, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.1279527559055118, | |
| "grad_norm": 2.6151984578687033, | |
| "learning_rate": 9.976861873982177e-06, | |
| "loss": 0.685, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.13041338582677164, | |
| "grad_norm": 2.5709337687836404, | |
| "learning_rate": 9.972549563486776e-06, | |
| "loss": 0.6798, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.1328740157480315, | |
| "grad_norm": 2.5668696343287998, | |
| "learning_rate": 9.967870085840463e-06, | |
| "loss": 0.6816, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.13533464566929135, | |
| "grad_norm": 2.352396761416029, | |
| "learning_rate": 9.962823786570306e-06, | |
| "loss": 0.6706, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.1377952755905512, | |
| "grad_norm": 2.444524340997986, | |
| "learning_rate": 9.95741103828905e-06, | |
| "loss": 0.6671, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.14025590551181102, | |
| "grad_norm": 2.7130142131683086, | |
| "learning_rate": 9.951632240667592e-06, | |
| "loss": 0.6652, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.14271653543307086, | |
| "grad_norm": 2.5508688935496684, | |
| "learning_rate": 9.945487820405487e-06, | |
| "loss": 0.6647, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1451771653543307, | |
| "grad_norm": 2.4817419773819935, | |
| "learning_rate": 9.938978231199419e-06, | |
| "loss": 0.6788, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.14763779527559054, | |
| "grad_norm": 2.639378213034635, | |
| "learning_rate": 9.932103953709724e-06, | |
| "loss": 0.659, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.1500984251968504, | |
| "grad_norm": 2.5183413163268487, | |
| "learning_rate": 9.924865495524884e-06, | |
| "loss": 0.674, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.15255905511811024, | |
| "grad_norm": 2.8881622271285976, | |
| "learning_rate": 9.917263391124046e-06, | |
| "loss": 0.6464, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.15501968503937008, | |
| "grad_norm": 2.6428697378846797, | |
| "learning_rate": 9.90929820183757e-06, | |
| "loss": 0.6494, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.15748031496062992, | |
| "grad_norm": 3.1947867208966114, | |
| "learning_rate": 9.900970515805564e-06, | |
| "loss": 0.6528, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.15994094488188976, | |
| "grad_norm": 2.5034057561523793, | |
| "learning_rate": 9.892280947934472e-06, | |
| "loss": 0.6516, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.1624015748031496, | |
| "grad_norm": 2.572567301520793, | |
| "learning_rate": 9.883230139851656e-06, | |
| "loss": 0.6498, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.16486220472440946, | |
| "grad_norm": 2.6660264292368563, | |
| "learning_rate": 9.873818759858034e-06, | |
| "loss": 0.6406, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.1673228346456693, | |
| "grad_norm": 2.4560816689361253, | |
| "learning_rate": 9.864047502878717e-06, | |
| "loss": 0.651, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.16978346456692914, | |
| "grad_norm": 2.678597823681308, | |
| "learning_rate": 9.853917090411713e-06, | |
| "loss": 0.6578, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.17224409448818898, | |
| "grad_norm": 2.6171604231492, | |
| "learning_rate": 9.84342827047464e-06, | |
| "loss": 0.6348, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1747047244094488, | |
| "grad_norm": 2.551899353239778, | |
| "learning_rate": 9.832581817549497e-06, | |
| "loss": 0.6219, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.17716535433070865, | |
| "grad_norm": 2.4276351136649765, | |
| "learning_rate": 9.821378532525479e-06, | |
| "loss": 0.642, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1796259842519685, | |
| "grad_norm": 2.422853104745474, | |
| "learning_rate": 9.809819242639841e-06, | |
| "loss": 0.6204, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.18208661417322836, | |
| "grad_norm": 2.6470697866805684, | |
| "learning_rate": 9.79790480141681e-06, | |
| "loss": 0.6394, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1845472440944882, | |
| "grad_norm": 2.6994688633424917, | |
| "learning_rate": 9.785636088604571e-06, | |
| "loss": 0.6406, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.18700787401574803, | |
| "grad_norm": 2.4061138828452693, | |
| "learning_rate": 9.773014010110298e-06, | |
| "loss": 0.6492, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.18946850393700787, | |
| "grad_norm": 2.370123273088171, | |
| "learning_rate": 9.760039497933266e-06, | |
| "loss": 0.6414, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.1919291338582677, | |
| "grad_norm": 2.4482559145385867, | |
| "learning_rate": 9.74671351009604e-06, | |
| "loss": 0.6536, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.19438976377952755, | |
| "grad_norm": 2.55122301106893, | |
| "learning_rate": 9.733037030573725e-06, | |
| "loss": 0.6282, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.1968503937007874, | |
| "grad_norm": 2.6881092845236494, | |
| "learning_rate": 9.719011069221316e-06, | |
| "loss": 0.6393, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.19931102362204725, | |
| "grad_norm": 2.564008663360688, | |
| "learning_rate": 9.704636661699133e-06, | |
| "loss": 0.6136, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.2017716535433071, | |
| "grad_norm": 2.6715128106285397, | |
| "learning_rate": 9.68991486939635e-06, | |
| "loss": 0.633, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.20423228346456693, | |
| "grad_norm": 2.551797597800453, | |
| "learning_rate": 9.674846779352613e-06, | |
| "loss": 0.6156, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.20669291338582677, | |
| "grad_norm": 2.3786438893017516, | |
| "learning_rate": 9.659433504177786e-06, | |
| "loss": 0.6106, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2091535433070866, | |
| "grad_norm": 2.3796641628030213, | |
| "learning_rate": 9.643676181969792e-06, | |
| "loss": 0.6195, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.21161417322834647, | |
| "grad_norm": 2.5423570668621336, | |
| "learning_rate": 9.62757597623058e-06, | |
| "loss": 0.6079, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2140748031496063, | |
| "grad_norm": 2.474384235864706, | |
| "learning_rate": 9.611134075780209e-06, | |
| "loss": 0.6209, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.21653543307086615, | |
| "grad_norm": 2.397344059591843, | |
| "learning_rate": 9.59435169466907e-06, | |
| "loss": 0.6243, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.21899606299212598, | |
| "grad_norm": 3.1088975724873107, | |
| "learning_rate": 9.577230072088246e-06, | |
| "loss": 0.6127, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.22145669291338582, | |
| "grad_norm": 2.4594892986106163, | |
| "learning_rate": 9.559770472277996e-06, | |
| "loss": 0.6106, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.22391732283464566, | |
| "grad_norm": 2.9569346766183284, | |
| "learning_rate": 9.541974184434426e-06, | |
| "loss": 0.609, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.2263779527559055, | |
| "grad_norm": 2.4846238281160047, | |
| "learning_rate": 9.523842522614285e-06, | |
| "loss": 0.6043, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.22883858267716536, | |
| "grad_norm": 2.935509859202847, | |
| "learning_rate": 9.505376825637933e-06, | |
| "loss": 0.596, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.2312992125984252, | |
| "grad_norm": 2.5763492949566857, | |
| "learning_rate": 9.486578456990494e-06, | |
| "loss": 0.6023, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.23375984251968504, | |
| "grad_norm": 2.3536525504543886, | |
| "learning_rate": 9.467448804721171e-06, | |
| "loss": 0.5847, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.23622047244094488, | |
| "grad_norm": 2.4574335684690167, | |
| "learning_rate": 9.447989281340753e-06, | |
| "loss": 0.6047, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.23868110236220472, | |
| "grad_norm": 2.624076017146294, | |
| "learning_rate": 9.428201323717327e-06, | |
| "loss": 0.6062, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.24114173228346455, | |
| "grad_norm": 2.4691502767185605, | |
| "learning_rate": 9.408086392970167e-06, | |
| "loss": 0.6054, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.24360236220472442, | |
| "grad_norm": 2.414421822722432, | |
| "learning_rate": 9.387645974361858e-06, | |
| "loss": 0.5915, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.24606299212598426, | |
| "grad_norm": 2.5729864933782407, | |
| "learning_rate": 9.36688157718862e-06, | |
| "loss": 0.577, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2485236220472441, | |
| "grad_norm": 2.433681147757855, | |
| "learning_rate": 9.345794734668866e-06, | |
| "loss": 0.5917, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.25098425196850394, | |
| "grad_norm": 2.3154418135227623, | |
| "learning_rate": 9.324387003829993e-06, | |
| "loss": 0.5998, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2534448818897638, | |
| "grad_norm": 2.55257772483269, | |
| "learning_rate": 9.302659965393404e-06, | |
| "loss": 0.5857, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2559055118110236, | |
| "grad_norm": 2.4946365361225644, | |
| "learning_rate": 9.280615223657801e-06, | |
| "loss": 0.5827, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.25836614173228345, | |
| "grad_norm": 2.500526915346629, | |
| "learning_rate": 9.258254406380718e-06, | |
| "loss": 0.5995, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.2608267716535433, | |
| "grad_norm": 2.3817364291512018, | |
| "learning_rate": 9.23557916465833e-06, | |
| "loss": 0.5867, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2632874015748031, | |
| "grad_norm": 2.4753865562956174, | |
| "learning_rate": 9.212591172803541e-06, | |
| "loss": 0.5783, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.265748031496063, | |
| "grad_norm": 2.5801050004231065, | |
| "learning_rate": 9.189292128222355e-06, | |
| "loss": 0.5813, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.26820866141732286, | |
| "grad_norm": 2.584784920772415, | |
| "learning_rate": 9.165683751288537e-06, | |
| "loss": 0.5983, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.2706692913385827, | |
| "grad_norm": 2.450271967521756, | |
| "learning_rate": 9.141767785216585e-06, | |
| "loss": 0.5769, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.27312992125984253, | |
| "grad_norm": 2.6039636332560647, | |
| "learning_rate": 9.117545995933015e-06, | |
| "loss": 0.5718, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.2755905511811024, | |
| "grad_norm": 2.601231973112569, | |
| "learning_rate": 9.093020171945966e-06, | |
| "loss": 0.5667, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2780511811023622, | |
| "grad_norm": 2.3467484299199004, | |
| "learning_rate": 9.068192124213135e-06, | |
| "loss": 0.5672, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.28051181102362205, | |
| "grad_norm": 2.3680256671083555, | |
| "learning_rate": 9.043063686008066e-06, | |
| "loss": 0.57, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2829724409448819, | |
| "grad_norm": 2.4137455409633786, | |
| "learning_rate": 9.017636712784776e-06, | |
| "loss": 0.5712, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.2854330708661417, | |
| "grad_norm": 2.4018818185261113, | |
| "learning_rate": 8.991913082040752e-06, | |
| "loss": 0.5711, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.28789370078740156, | |
| "grad_norm": 2.3925227881413758, | |
| "learning_rate": 8.96589469317832e-06, | |
| "loss": 0.5744, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2903543307086614, | |
| "grad_norm": 2.419064751860148, | |
| "learning_rate": 8.9395834673644e-06, | |
| "loss": 0.5645, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.29281496062992124, | |
| "grad_norm": 2.4183358211763117, | |
| "learning_rate": 8.912981347388634e-06, | |
| "loss": 0.5713, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.2952755905511811, | |
| "grad_norm": 2.4035996214865385, | |
| "learning_rate": 8.886090297519956e-06, | |
| "loss": 0.5586, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.29773622047244097, | |
| "grad_norm": 2.3898298313986746, | |
| "learning_rate": 8.85891230336153e-06, | |
| "loss": 0.5753, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3001968503937008, | |
| "grad_norm": 2.3838569096704174, | |
| "learning_rate": 8.83144937170415e-06, | |
| "loss": 0.5537, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.30265748031496065, | |
| "grad_norm": 2.3434733442165694, | |
| "learning_rate": 8.803703530378059e-06, | |
| "loss": 0.5519, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.3051181102362205, | |
| "grad_norm": 2.411303416705029, | |
| "learning_rate": 8.775676828103205e-06, | |
| "loss": 0.5566, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3075787401574803, | |
| "grad_norm": 2.274050270642361, | |
| "learning_rate": 8.747371334337983e-06, | |
| "loss": 0.5724, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.31003937007874016, | |
| "grad_norm": 2.27730588028585, | |
| "learning_rate": 8.718789139126417e-06, | |
| "loss": 0.5455, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 2.5754926590587583, | |
| "learning_rate": 8.689932352943837e-06, | |
| "loss": 0.5429, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.31496062992125984, | |
| "grad_norm": 2.4172029994058675, | |
| "learning_rate": 8.660803106541044e-06, | |
| "loss": 0.5479, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3174212598425197, | |
| "grad_norm": 2.5555378647777984, | |
| "learning_rate": 8.631403550786979e-06, | |
| "loss": 0.5484, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.3198818897637795, | |
| "grad_norm": 2.3967597741633226, | |
| "learning_rate": 8.601735856509903e-06, | |
| "loss": 0.5469, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.32234251968503935, | |
| "grad_norm": 2.373278113755569, | |
| "learning_rate": 8.571802214337107e-06, | |
| "loss": 0.5633, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.3248031496062992, | |
| "grad_norm": 2.4117225792084174, | |
| "learning_rate": 8.541604834533159e-06, | |
| "loss": 0.5305, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.32726377952755903, | |
| "grad_norm": 2.3453259515749987, | |
| "learning_rate": 8.511145946836704e-06, | |
| "loss": 0.5324, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.3297244094488189, | |
| "grad_norm": 2.3475561192164185, | |
| "learning_rate": 8.48042780029581e-06, | |
| "loss": 0.5354, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.33218503937007876, | |
| "grad_norm": 2.5000856827198885, | |
| "learning_rate": 8.449452663101918e-06, | |
| "loss": 0.5329, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.3346456692913386, | |
| "grad_norm": 2.3458647990929706, | |
| "learning_rate": 8.418222822422348e-06, | |
| "loss": 0.5411, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.33710629921259844, | |
| "grad_norm": 2.3189936115521057, | |
| "learning_rate": 8.386740584231431e-06, | |
| "loss": 0.5454, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.3395669291338583, | |
| "grad_norm": 2.4777244895023682, | |
| "learning_rate": 8.355008273140222e-06, | |
| "loss": 0.5251, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3420275590551181, | |
| "grad_norm": 2.359411571149058, | |
| "learning_rate": 8.323028232224863e-06, | |
| "loss": 0.5302, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.34448818897637795, | |
| "grad_norm": 2.5636980904043147, | |
| "learning_rate": 8.290802822853576e-06, | |
| "loss": 0.5339, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.3469488188976378, | |
| "grad_norm": 2.275823789852264, | |
| "learning_rate": 8.258334424512293e-06, | |
| "loss": 0.5463, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.3494094488188976, | |
| "grad_norm": 2.3400669632288045, | |
| "learning_rate": 8.22562543462897e-06, | |
| "loss": 0.5378, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.35187007874015747, | |
| "grad_norm": 2.783424096566288, | |
| "learning_rate": 8.192678268396545e-06, | |
| "loss": 0.5234, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.3543307086614173, | |
| "grad_norm": 2.642493538990398, | |
| "learning_rate": 8.159495358594627e-06, | |
| "loss": 0.5296, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.35679133858267714, | |
| "grad_norm": 2.339987415840943, | |
| "learning_rate": 8.126079155409845e-06, | |
| "loss": 0.5226, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.359251968503937, | |
| "grad_norm": 2.606591687658944, | |
| "learning_rate": 8.092432126254933e-06, | |
| "loss": 0.5024, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3617125984251969, | |
| "grad_norm": 2.3316758567132023, | |
| "learning_rate": 8.058556755586537e-06, | |
| "loss": 0.5232, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.3641732283464567, | |
| "grad_norm": 2.2808248676254106, | |
| "learning_rate": 8.024455544721778e-06, | |
| "loss": 0.5242, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.36663385826771655, | |
| "grad_norm": 2.339963303398186, | |
| "learning_rate": 7.990131011653545e-06, | |
| "loss": 0.5213, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3690944881889764, | |
| "grad_norm": 2.30834374014112, | |
| "learning_rate": 7.955585690864567e-06, | |
| "loss": 0.5116, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3715551181102362, | |
| "grad_norm": 2.341410358638227, | |
| "learning_rate": 7.920822133140285e-06, | |
| "loss": 0.5262, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.37401574803149606, | |
| "grad_norm": 2.317753639182959, | |
| "learning_rate": 7.88584290538049e-06, | |
| "loss": 0.4944, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3764763779527559, | |
| "grad_norm": 2.362281420048018, | |
| "learning_rate": 7.850650590409795e-06, | |
| "loss": 0.5117, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.37893700787401574, | |
| "grad_norm": 2.293068731009847, | |
| "learning_rate": 7.815247786786919e-06, | |
| "loss": 0.5222, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3813976377952756, | |
| "grad_norm": 2.4142944753553226, | |
| "learning_rate": 7.779637108612813e-06, | |
| "loss": 0.5184, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3838582677165354, | |
| "grad_norm": 2.295074364990194, | |
| "learning_rate": 7.743821185337634e-06, | |
| "loss": 0.5138, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.38631889763779526, | |
| "grad_norm": 2.585308292041289, | |
| "learning_rate": 7.7078026615666e-06, | |
| "loss": 0.5001, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3887795275590551, | |
| "grad_norm": 2.2713254983647477, | |
| "learning_rate": 7.671584196864703e-06, | |
| "loss": 0.5147, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.391240157480315, | |
| "grad_norm": 2.4376608902377654, | |
| "learning_rate": 7.635168465560343e-06, | |
| "loss": 0.507, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.3937007874015748, | |
| "grad_norm": 2.4223733658282653, | |
| "learning_rate": 7.598558156547842e-06, | |
| "loss": 0.5063, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.39616141732283466, | |
| "grad_norm": 2.35389116511011, | |
| "learning_rate": 7.561755973088917e-06, | |
| "loss": 0.5093, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3986220472440945, | |
| "grad_norm": 2.319034453879933, | |
| "learning_rate": 7.52476463261306e-06, | |
| "loss": 0.5098, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.40108267716535434, | |
| "grad_norm": 2.272257773980295, | |
| "learning_rate": 7.487586866516897e-06, | |
| "loss": 0.4958, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.4035433070866142, | |
| "grad_norm": 2.5366922080030396, | |
| "learning_rate": 7.450225419962498e-06, | |
| "loss": 0.502, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.406003937007874, | |
| "grad_norm": 2.3870428918474103, | |
| "learning_rate": 7.412683051674681e-06, | |
| "loss": 0.5051, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.40846456692913385, | |
| "grad_norm": 2.380345521994677, | |
| "learning_rate": 7.374962533737306e-06, | |
| "loss": 0.5016, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4109251968503937, | |
| "grad_norm": 2.427812569299047, | |
| "learning_rate": 7.3370666513885965e-06, | |
| "loss": 0.5037, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.41338582677165353, | |
| "grad_norm": 2.4170189534645274, | |
| "learning_rate": 7.298998202815474e-06, | |
| "loss": 0.4844, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.41584645669291337, | |
| "grad_norm": 2.3398171081039916, | |
| "learning_rate": 7.260759998946945e-06, | |
| "loss": 0.4994, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.4183070866141732, | |
| "grad_norm": 2.3098501643887133, | |
| "learning_rate": 7.2223548632465424e-06, | |
| "loss": 0.4906, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.42076771653543305, | |
| "grad_norm": 2.2992578109062816, | |
| "learning_rate": 7.183785631503851e-06, | |
| "loss": 0.4814, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.42322834645669294, | |
| "grad_norm": 2.320153128563025, | |
| "learning_rate": 7.145055151625113e-06, | |
| "loss": 0.476, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4256889763779528, | |
| "grad_norm": 2.302377341714018, | |
| "learning_rate": 7.1061662834229375e-06, | |
| "loss": 0.4913, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.4281496062992126, | |
| "grad_norm": 2.438546206078301, | |
| "learning_rate": 7.0671218984051385e-06, | |
| "loss": 0.4795, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.43061023622047245, | |
| "grad_norm": 2.392813365504554, | |
| "learning_rate": 7.0279248795627156e-06, | |
| "loss": 0.4803, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.4330708661417323, | |
| "grad_norm": 2.5633842472168253, | |
| "learning_rate": 6.988578121156956e-06, | |
| "loss": 0.4974, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.43553149606299213, | |
| "grad_norm": 2.457385840882956, | |
| "learning_rate": 6.9490845285057505e-06, | |
| "loss": 0.4757, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.43799212598425197, | |
| "grad_norm": 2.400558932262586, | |
| "learning_rate": 6.909447017769047e-06, | |
| "loss": 0.4851, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4404527559055118, | |
| "grad_norm": 2.193351075843019, | |
| "learning_rate": 6.869668515733536e-06, | |
| "loss": 0.4784, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.44291338582677164, | |
| "grad_norm": 2.352406645221301, | |
| "learning_rate": 6.829751959596544e-06, | |
| "loss": 0.4777, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4453740157480315, | |
| "grad_norm": 2.253882575264478, | |
| "learning_rate": 6.789700296749141e-06, | |
| "loss": 0.4666, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.4478346456692913, | |
| "grad_norm": 2.4407939968445866, | |
| "learning_rate": 6.749516484558518e-06, | |
| "loss": 0.4768, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.45029527559055116, | |
| "grad_norm": 2.2491788412764264, | |
| "learning_rate": 6.709203490149615e-06, | |
| "loss": 0.4772, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.452755905511811, | |
| "grad_norm": 2.4694365290945552, | |
| "learning_rate": 6.668764290186039e-06, | |
| "loss": 0.4634, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4552165354330709, | |
| "grad_norm": 2.3240221243347143, | |
| "learning_rate": 6.628201870650262e-06, | |
| "loss": 0.4604, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.45767716535433073, | |
| "grad_norm": 2.3389829365748143, | |
| "learning_rate": 6.587519226623137e-06, | |
| "loss": 0.4588, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.46013779527559057, | |
| "grad_norm": 2.32124589854166, | |
| "learning_rate": 6.546719362062763e-06, | |
| "loss": 0.4631, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.4625984251968504, | |
| "grad_norm": 2.3451020281399337, | |
| "learning_rate": 6.50580528958265e-06, | |
| "loss": 0.463, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.46505905511811024, | |
| "grad_norm": 2.3077274496575693, | |
| "learning_rate": 6.464780030229297e-06, | |
| "loss": 0.4611, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.4675196850393701, | |
| "grad_norm": 2.2871501745688736, | |
| "learning_rate": 6.423646613259103e-06, | |
| "loss": 0.465, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4699803149606299, | |
| "grad_norm": 2.2800216781758533, | |
| "learning_rate": 6.382408075914698e-06, | |
| "loss": 0.4675, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.47244094488188976, | |
| "grad_norm": 2.321774543216144, | |
| "learning_rate": 6.341067463200678e-06, | |
| "loss": 0.4662, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.4749015748031496, | |
| "grad_norm": 2.301396790933567, | |
| "learning_rate": 6.299627827658757e-06, | |
| "loss": 0.4696, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.47736220472440943, | |
| "grad_norm": 2.364754911702198, | |
| "learning_rate": 6.258092229142383e-06, | |
| "loss": 0.456, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.47982283464566927, | |
| "grad_norm": 2.4175258302790024, | |
| "learning_rate": 6.216463734590797e-06, | |
| "loss": 0.4729, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4822834645669291, | |
| "grad_norm": 2.349960577391609, | |
| "learning_rate": 6.174745417802563e-06, | |
| "loss": 0.4544, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.484744094488189, | |
| "grad_norm": 2.429920075114885, | |
| "learning_rate": 6.132940359208625e-06, | |
| "loss": 0.4593, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.48720472440944884, | |
| "grad_norm": 2.239163716833317, | |
| "learning_rate": 6.09105164564483e-06, | |
| "loss": 0.4593, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4896653543307087, | |
| "grad_norm": 2.425828040930759, | |
| "learning_rate": 6.049082370124011e-06, | |
| "loss": 0.4327, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.4921259842519685, | |
| "grad_norm": 2.262917647817437, | |
| "learning_rate": 6.007035631607605e-06, | |
| "loss": 0.4315, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.49458661417322836, | |
| "grad_norm": 2.352811780362867, | |
| "learning_rate": 5.964914534776814e-06, | |
| "loss": 0.445, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4970472440944882, | |
| "grad_norm": 2.3459263663849095, | |
| "learning_rate": 5.9227221898033785e-06, | |
| "loss": 0.4454, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.49950787401574803, | |
| "grad_norm": 2.335644494714758, | |
| "learning_rate": 5.880461712119913e-06, | |
| "loss": 0.4511, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.5019685039370079, | |
| "grad_norm": 2.4237181972135864, | |
| "learning_rate": 5.838136222189874e-06, | |
| "loss": 0.4414, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5044291338582677, | |
| "grad_norm": 2.3828455096271424, | |
| "learning_rate": 5.795748845277143e-06, | |
| "loss": 0.4461, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.5068897637795275, | |
| "grad_norm": 2.304602958345143, | |
| "learning_rate": 5.75330271121526e-06, | |
| "loss": 0.4477, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5093503937007874, | |
| "grad_norm": 2.352277931816423, | |
| "learning_rate": 5.710800954176326e-06, | |
| "loss": 0.4437, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.5118110236220472, | |
| "grad_norm": 2.2359320701170966, | |
| "learning_rate": 5.668246712439579e-06, | |
| "loss": 0.4373, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5142716535433071, | |
| "grad_norm": 2.256437860516146, | |
| "learning_rate": 5.625643128159658e-06, | |
| "loss": 0.4363, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.5167322834645669, | |
| "grad_norm": 2.1555198144458623, | |
| "learning_rate": 5.582993347134604e-06, | |
| "loss": 0.4356, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5191929133858267, | |
| "grad_norm": 2.2635088277193125, | |
| "learning_rate": 5.540300518573564e-06, | |
| "loss": 0.4444, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.5216535433070866, | |
| "grad_norm": 2.3921742944394424, | |
| "learning_rate": 5.4975677948642704e-06, | |
| "loss": 0.4294, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5241141732283464, | |
| "grad_norm": 2.2443372707986478, | |
| "learning_rate": 5.454798331340261e-06, | |
| "loss": 0.4444, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.5265748031496063, | |
| "grad_norm": 2.3139794779379805, | |
| "learning_rate": 5.4119952860479e-06, | |
| "loss": 0.4327, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5290354330708661, | |
| "grad_norm": 2.2992274738499816, | |
| "learning_rate": 5.369161819513189e-06, | |
| "loss": 0.4346, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.531496062992126, | |
| "grad_norm": 2.457790310288889, | |
| "learning_rate": 5.3263010945083994e-06, | |
| "loss": 0.4344, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5339566929133859, | |
| "grad_norm": 2.2742045912367277, | |
| "learning_rate": 5.283416275818531e-06, | |
| "loss": 0.43, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5364173228346457, | |
| "grad_norm": 2.376014712384954, | |
| "learning_rate": 5.240510530007641e-06, | |
| "loss": 0.4242, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5388779527559056, | |
| "grad_norm": 2.2883534114209656, | |
| "learning_rate": 5.1975870251850105e-06, | |
| "loss": 0.4289, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.5413385826771654, | |
| "grad_norm": 2.441355636112588, | |
| "learning_rate": 5.1546489307712345e-06, | |
| "loss": 0.4253, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5437992125984252, | |
| "grad_norm": 2.1890329237409962, | |
| "learning_rate": 5.111699417264177e-06, | |
| "loss": 0.4227, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.5462598425196851, | |
| "grad_norm": 2.2703982193213474, | |
| "learning_rate": 5.06874165600488e-06, | |
| "loss": 0.4245, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5487204724409449, | |
| "grad_norm": 2.405433035408274, | |
| "learning_rate": 5.025778818943391e-06, | |
| "loss": 0.417, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.5511811023622047, | |
| "grad_norm": 2.2132101107920534, | |
| "learning_rate": 4.982814078404543e-06, | |
| "loss": 0.4139, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.5536417322834646, | |
| "grad_norm": 2.268080735075916, | |
| "learning_rate": 4.939850606853724e-06, | |
| "loss": 0.4131, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.5561023622047244, | |
| "grad_norm": 2.3602331832748513, | |
| "learning_rate": 4.89689157666262e-06, | |
| "loss": 0.4224, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.5585629921259843, | |
| "grad_norm": 2.38297917623373, | |
| "learning_rate": 4.853940159874972e-06, | |
| "loss": 0.4134, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.5610236220472441, | |
| "grad_norm": 2.248181071176039, | |
| "learning_rate": 4.8109995279723556e-06, | |
| "loss": 0.412, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.5634842519685039, | |
| "grad_norm": 2.266008354369167, | |
| "learning_rate": 4.768072851640006e-06, | |
| "loss": 0.4091, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.5659448818897638, | |
| "grad_norm": 2.1519828496273186, | |
| "learning_rate": 4.7251633005326935e-06, | |
| "loss": 0.404, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.5684055118110236, | |
| "grad_norm": 2.176644707992909, | |
| "learning_rate": 4.682274043040682e-06, | |
| "loss": 0.4107, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.5708661417322834, | |
| "grad_norm": 2.395492504865672, | |
| "learning_rate": 4.639408246055781e-06, | |
| "loss": 0.3997, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.5733267716535433, | |
| "grad_norm": 2.348358233043625, | |
| "learning_rate": 4.596569074737501e-06, | |
| "loss": 0.4204, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.5757874015748031, | |
| "grad_norm": 2.330232940749561, | |
| "learning_rate": 4.5537596922793475e-06, | |
| "loss": 0.3882, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.578248031496063, | |
| "grad_norm": 2.161208383403752, | |
| "learning_rate": 4.510983259675252e-06, | |
| "loss": 0.4133, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.5807086614173228, | |
| "grad_norm": 2.1324298889752527, | |
| "learning_rate": 4.468242935486164e-06, | |
| "loss": 0.3971, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.5831692913385826, | |
| "grad_norm": 2.2519302159370906, | |
| "learning_rate": 4.425541875606837e-06, | |
| "loss": 0.3995, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.5856299212598425, | |
| "grad_norm": 2.4130036837118847, | |
| "learning_rate": 4.38288323303279e-06, | |
| "loss": 0.4152, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.5880905511811023, | |
| "grad_norm": 2.1744607103601314, | |
| "learning_rate": 4.340270157627496e-06, | |
| "loss": 0.3969, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.5905511811023622, | |
| "grad_norm": 2.317972305667509, | |
| "learning_rate": 4.29770579588981e-06, | |
| "loss": 0.4133, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.593011811023622, | |
| "grad_norm": 2.141942303749325, | |
| "learning_rate": 4.255193290721626e-06, | |
| "loss": 0.3968, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.5954724409448819, | |
| "grad_norm": 2.0672055368246878, | |
| "learning_rate": 4.2127357811958006e-06, | |
| "loss": 0.3936, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5979330708661418, | |
| "grad_norm": 2.256440389057466, | |
| "learning_rate": 4.170336402324393e-06, | |
| "loss": 0.3988, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.6003937007874016, | |
| "grad_norm": 2.192195012979975, | |
| "learning_rate": 4.127998284827148e-06, | |
| "loss": 0.3923, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6028543307086615, | |
| "grad_norm": 2.2572386075080493, | |
| "learning_rate": 4.085724554900359e-06, | |
| "loss": 0.3908, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.6053149606299213, | |
| "grad_norm": 2.3140141947994817, | |
| "learning_rate": 4.043518333986009e-06, | |
| "loss": 0.4003, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6077755905511811, | |
| "grad_norm": 2.187606337961814, | |
| "learning_rate": 4.001382738541291e-06, | |
| "loss": 0.3866, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.610236220472441, | |
| "grad_norm": 2.372001940156093, | |
| "learning_rate": 3.9593208798085094e-06, | |
| "loss": 0.3964, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6126968503937008, | |
| "grad_norm": 2.3150086846943148, | |
| "learning_rate": 3.9173358635853285e-06, | |
| "loss": 0.3972, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.6151574803149606, | |
| "grad_norm": 2.1690469819216185, | |
| "learning_rate": 3.875430789995454e-06, | |
| "loss": 0.395, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6176181102362205, | |
| "grad_norm": 2.2589312032748348, | |
| "learning_rate": 3.833608753259729e-06, | |
| "loss": 0.3885, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.6200787401574803, | |
| "grad_norm": 2.216561553861059, | |
| "learning_rate": 3.791872841467643e-06, | |
| "loss": 0.3826, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6225393700787402, | |
| "grad_norm": 2.2645021793003033, | |
| "learning_rate": 3.7502261363493346e-06, | |
| "loss": 0.3919, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 2.397270292319783, | |
| "learning_rate": 3.708671713048017e-06, | |
| "loss": 0.3815, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6274606299212598, | |
| "grad_norm": 2.1965437050738292, | |
| "learning_rate": 3.6672126398929273e-06, | |
| "loss": 0.3878, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.6299212598425197, | |
| "grad_norm": 2.208748833631679, | |
| "learning_rate": 3.625851978172765e-06, | |
| "loss": 0.3843, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6323818897637795, | |
| "grad_norm": 2.192586855405063, | |
| "learning_rate": 3.5845927819096405e-06, | |
| "loss": 0.3759, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.6348425196850394, | |
| "grad_norm": 2.093601284747752, | |
| "learning_rate": 3.543438097633577e-06, | |
| "loss": 0.3845, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6373031496062992, | |
| "grad_norm": 2.1548335840642117, | |
| "learning_rate": 3.5023909641575604e-06, | |
| "loss": 0.3654, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.639763779527559, | |
| "grad_norm": 2.2141075323196615, | |
| "learning_rate": 3.4614544123531476e-06, | |
| "loss": 0.3847, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6422244094488189, | |
| "grad_norm": 2.310653909281361, | |
| "learning_rate": 3.4206314649266813e-06, | |
| "loss": 0.3832, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.6446850393700787, | |
| "grad_norm": 2.155987784465937, | |
| "learning_rate": 3.3799251361960883e-06, | |
| "loss": 0.3795, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.6471456692913385, | |
| "grad_norm": 2.2490179123681924, | |
| "learning_rate": 3.339338431868311e-06, | |
| "loss": 0.3642, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.6496062992125984, | |
| "grad_norm": 2.138647573366281, | |
| "learning_rate": 3.29887434881737e-06, | |
| "loss": 0.396, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.6520669291338582, | |
| "grad_norm": 2.137277976513333, | |
| "learning_rate": 3.2585358748630725e-06, | |
| "loss": 0.3783, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.6545275590551181, | |
| "grad_norm": 2.348805202763454, | |
| "learning_rate": 3.2183259885504003e-06, | |
| "loss": 0.3596, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.656988188976378, | |
| "grad_norm": 2.100095406421258, | |
| "learning_rate": 3.1782476589295803e-06, | |
| "loss": 0.3694, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.6594488188976378, | |
| "grad_norm": 2.101459864885911, | |
| "learning_rate": 3.138303845336844e-06, | |
| "loss": 0.3773, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.6619094488188977, | |
| "grad_norm": 2.1450470538690185, | |
| "learning_rate": 3.098497497175925e-06, | |
| "loss": 0.3762, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.6643700787401575, | |
| "grad_norm": 2.116070303858192, | |
| "learning_rate": 3.0588315537002682e-06, | |
| "loss": 0.3712, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.6668307086614174, | |
| "grad_norm": 2.346922185113573, | |
| "learning_rate": 3.0193089437960043e-06, | |
| "loss": 0.376, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.6692913385826772, | |
| "grad_norm": 2.408659108286324, | |
| "learning_rate": 2.9799325857656856e-06, | |
| "loss": 0.3634, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.671751968503937, | |
| "grad_norm": 2.029972040437485, | |
| "learning_rate": 2.940705387112798e-06, | |
| "loss": 0.3452, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.6742125984251969, | |
| "grad_norm": 2.1424763238566533, | |
| "learning_rate": 2.901630244327075e-06, | |
| "loss": 0.3743, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.6766732283464567, | |
| "grad_norm": 2.2110942680398304, | |
| "learning_rate": 2.862710042670629e-06, | |
| "loss": 0.3771, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.6791338582677166, | |
| "grad_norm": 2.2684421059421447, | |
| "learning_rate": 2.8239476559649013e-06, | |
| "loss": 0.367, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.6815944881889764, | |
| "grad_norm": 2.0621583436640645, | |
| "learning_rate": 2.7853459463784643e-06, | |
| "loss": 0.3658, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.6840551181102362, | |
| "grad_norm": 2.242244798887825, | |
| "learning_rate": 2.7469077642156844e-06, | |
| "loss": 0.3605, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.6865157480314961, | |
| "grad_norm": 2.062160047060375, | |
| "learning_rate": 2.7086359477062542e-06, | |
| "loss": 0.3526, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.6889763779527559, | |
| "grad_norm": 2.29144280479743, | |
| "learning_rate": 2.6705333227956304e-06, | |
| "loss": 0.3665, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6914370078740157, | |
| "grad_norm": 2.0387207809725307, | |
| "learning_rate": 2.6326027029363575e-06, | |
| "loss": 0.3649, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.6938976377952756, | |
| "grad_norm": 2.385677814762644, | |
| "learning_rate": 2.5948468888803323e-06, | |
| "loss": 0.3648, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.6963582677165354, | |
| "grad_norm": 2.1908900610604563, | |
| "learning_rate": 2.557268668472002e-06, | |
| "loss": 0.3675, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.6988188976377953, | |
| "grad_norm": 2.166961094130946, | |
| "learning_rate": 2.5198708164425046e-06, | |
| "loss": 0.367, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7012795275590551, | |
| "grad_norm": 2.188441150276078, | |
| "learning_rate": 2.482656094204794e-06, | |
| "loss": 0.3718, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.7037401574803149, | |
| "grad_norm": 1.954385650261843, | |
| "learning_rate": 2.445627249649742e-06, | |
| "loss": 0.3531, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7062007874015748, | |
| "grad_norm": 2.411031920640521, | |
| "learning_rate": 2.4087870169432263e-06, | |
| "loss": 0.3502, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.7086614173228346, | |
| "grad_norm": 2.1532752142595166, | |
| "learning_rate": 2.372138116324254e-06, | |
| "loss": 0.3638, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7111220472440944, | |
| "grad_norm": 2.185262551284941, | |
| "learning_rate": 2.3356832539040976e-06, | |
| "loss": 0.3522, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.7135826771653543, | |
| "grad_norm": 2.1439278425241866, | |
| "learning_rate": 2.2994251214664754e-06, | |
| "loss": 0.3629, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7160433070866141, | |
| "grad_norm": 1.99507705520623, | |
| "learning_rate": 2.263366396268806e-06, | |
| "loss": 0.3628, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.718503937007874, | |
| "grad_norm": 2.013315275148486, | |
| "learning_rate": 2.227509740844508e-06, | |
| "loss": 0.3503, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7209645669291339, | |
| "grad_norm": 1.996243251169151, | |
| "learning_rate": 2.191857802806409e-06, | |
| "loss": 0.3487, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.7234251968503937, | |
| "grad_norm": 2.06614377414641, | |
| "learning_rate": 2.1564132146512494e-06, | |
| "loss": 0.3608, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7258858267716536, | |
| "grad_norm": 2.051788916773039, | |
| "learning_rate": 2.1211785935652974e-06, | |
| "loss": 0.3526, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.7283464566929134, | |
| "grad_norm": 2.181094283410733, | |
| "learning_rate": 2.086156541231109e-06, | |
| "loss": 0.3536, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7308070866141733, | |
| "grad_norm": 2.0484687331248628, | |
| "learning_rate": 2.05134964363541e-06, | |
| "loss": 0.3527, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.7332677165354331, | |
| "grad_norm": 1.9761991384957989, | |
| "learning_rate": 2.016760470878158e-06, | |
| "loss": 0.3538, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.7357283464566929, | |
| "grad_norm": 2.214422894487858, | |
| "learning_rate": 1.9823915769827672e-06, | |
| "loss": 0.3633, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.7381889763779528, | |
| "grad_norm": 2.0408630603692766, | |
| "learning_rate": 1.948245499707523e-06, | |
| "loss": 0.3472, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7406496062992126, | |
| "grad_norm": 2.0996002302569314, | |
| "learning_rate": 1.9143247603581925e-06, | |
| "loss": 0.3522, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.7431102362204725, | |
| "grad_norm": 2.247669742538547, | |
| "learning_rate": 1.8806318636018666e-06, | |
| "loss": 0.3481, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.7455708661417323, | |
| "grad_norm": 2.4555870624560403, | |
| "learning_rate": 1.8471692972820027e-06, | |
| "loss": 0.3551, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.7480314960629921, | |
| "grad_norm": 2.134743767839275, | |
| "learning_rate": 1.8139395322347335e-06, | |
| "loss": 0.3493, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.750492125984252, | |
| "grad_norm": 2.1033701544382923, | |
| "learning_rate": 1.780945022106424e-06, | |
| "loss": 0.3588, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.7529527559055118, | |
| "grad_norm": 1.9612786140780765, | |
| "learning_rate": 1.7481882031724929e-06, | |
| "loss": 0.3479, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.7554133858267716, | |
| "grad_norm": 2.008623872712887, | |
| "learning_rate": 1.7156714941575292e-06, | |
| "loss": 0.3577, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.7578740157480315, | |
| "grad_norm": 2.2526169298505514, | |
| "learning_rate": 1.6833972960566868e-06, | |
| "loss": 0.3528, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.7603346456692913, | |
| "grad_norm": 1.9950398761316037, | |
| "learning_rate": 1.6513679919583975e-06, | |
| "loss": 0.3486, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.7627952755905512, | |
| "grad_norm": 1.9434259852096614, | |
| "learning_rate": 1.6195859468684199e-06, | |
| "loss": 0.3489, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.765255905511811, | |
| "grad_norm": 2.1701727992207465, | |
| "learning_rate": 1.588053507535195e-06, | |
| "loss": 0.3385, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.7677165354330708, | |
| "grad_norm": 2.168599644222399, | |
| "learning_rate": 1.5567730022765753e-06, | |
| "loss": 0.3416, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.7701771653543307, | |
| "grad_norm": 2.0981894375859165, | |
| "learning_rate": 1.5257467408078996e-06, | |
| "loss": 0.3458, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.7726377952755905, | |
| "grad_norm": 1.9614927382222864, | |
| "learning_rate": 1.494977014071441e-06, | |
| "loss": 0.3318, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.7750984251968503, | |
| "grad_norm": 2.166688955879591, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.3458, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.7775590551181102, | |
| "grad_norm": 2.0253836113110406, | |
| "learning_rate": 1.434216233685441e-06, | |
| "loss": 0.3538, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.78001968503937, | |
| "grad_norm": 1.8793981802734039, | |
| "learning_rate": 1.4042296665397187e-06, | |
| "loss": 0.3354, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.78248031496063, | |
| "grad_norm": 2.105809618033566, | |
| "learning_rate": 1.374508606802586e-06, | |
| "loss": 0.3258, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.7849409448818898, | |
| "grad_norm": 1.9824032732878212, | |
| "learning_rate": 1.3450552490417712e-06, | |
| "loss": 0.3281, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.7874015748031497, | |
| "grad_norm": 1.9366896041970953, | |
| "learning_rate": 1.3158717680582128e-06, | |
| "loss": 0.3414, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.7898622047244095, | |
| "grad_norm": 2.018035793577225, | |
| "learning_rate": 1.286960318725471e-06, | |
| "loss": 0.3404, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.7923228346456693, | |
| "grad_norm": 2.1710225067217985, | |
| "learning_rate": 1.2583230358306053e-06, | |
| "loss": 0.3516, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.7947834645669292, | |
| "grad_norm": 2.000479839139359, | |
| "learning_rate": 1.2299620339165607e-06, | |
| "loss": 0.3366, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.797244094488189, | |
| "grad_norm": 1.8749243087399245, | |
| "learning_rate": 1.201879407126012e-06, | |
| "loss": 0.3388, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.7997047244094488, | |
| "grad_norm": 2.069231321759092, | |
| "learning_rate": 1.1740772290467518e-06, | |
| "loss": 0.3431, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.8021653543307087, | |
| "grad_norm": 2.118336292087268, | |
| "learning_rate": 1.1465575525585743e-06, | |
| "loss": 0.3246, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8046259842519685, | |
| "grad_norm": 1.9757583231325198, | |
| "learning_rate": 1.119322409681689e-06, | |
| "loss": 0.3372, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.8070866141732284, | |
| "grad_norm": 2.2589600684887405, | |
| "learning_rate": 1.0923738114266824e-06, | |
| "loss": 0.3417, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8095472440944882, | |
| "grad_norm": 1.9963933143672985, | |
| "learning_rate": 1.0657137476460272e-06, | |
| "loss": 0.3425, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.812007874015748, | |
| "grad_norm": 1.893114692364661, | |
| "learning_rate": 1.0393441868871507e-06, | |
| "loss": 0.3305, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8144685039370079, | |
| "grad_norm": 2.0572316488132896, | |
| "learning_rate": 1.0132670762470875e-06, | |
| "loss": 0.3391, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.8169291338582677, | |
| "grad_norm": 2.034904027560339, | |
| "learning_rate": 9.874843412286994e-07, | |
| "loss": 0.3394, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8193897637795275, | |
| "grad_norm": 2.1863593987138388, | |
| "learning_rate": 9.619978855985017e-07, | |
| "loss": 0.3355, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.8218503937007874, | |
| "grad_norm": 2.0283259392336714, | |
| "learning_rate": 9.368095912460934e-07, | |
| "loss": 0.3343, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8243110236220472, | |
| "grad_norm": 2.1337003379875363, | |
| "learning_rate": 9.119213180451974e-07, | |
| "loss": 0.3357, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.8267716535433071, | |
| "grad_norm": 2.1677182856182284, | |
| "learning_rate": 8.87334903716332e-07, | |
| "loss": 0.335, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.8292322834645669, | |
| "grad_norm": 2.0673251750486004, | |
| "learning_rate": 8.630521636911171e-07, | |
| "loss": 0.339, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.8316929133858267, | |
| "grad_norm": 1.9497478181644272, | |
| "learning_rate": 8.390748909782204e-07, | |
| "loss": 0.332, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.8341535433070866, | |
| "grad_norm": 1.908954894416974, | |
| "learning_rate": 8.154048560309669e-07, | |
| "loss": 0.3379, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.8366141732283464, | |
| "grad_norm": 1.8808203335472995, | |
| "learning_rate": 7.920438066166097e-07, | |
| "loss": 0.3348, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.8390748031496063, | |
| "grad_norm": 1.9558013890045594, | |
| "learning_rate": 7.689934676872768e-07, | |
| "loss": 0.3367, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.8415354330708661, | |
| "grad_norm": 2.0773598203017283, | |
| "learning_rate": 7.462555412526062e-07, | |
| "loss": 0.3297, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.843996062992126, | |
| "grad_norm": 2.0479405714620467, | |
| "learning_rate": 7.238317062540661e-07, | |
| "loss": 0.3259, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.8464566929133859, | |
| "grad_norm": 2.2136763305185516, | |
| "learning_rate": 7.017236184409859e-07, | |
| "loss": 0.3376, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.8489173228346457, | |
| "grad_norm": 1.92785210881461, | |
| "learning_rate": 6.799329102482988e-07, | |
| "loss": 0.3206, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.8513779527559056, | |
| "grad_norm": 2.003246589538247, | |
| "learning_rate": 6.584611906760036e-07, | |
| "loss": 0.3222, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.8538385826771654, | |
| "grad_norm": 1.9366541542704103, | |
| "learning_rate": 6.373100451703601e-07, | |
| "loss": 0.3511, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.8562992125984252, | |
| "grad_norm": 1.9039707630588159, | |
| "learning_rate": 6.164810355068179e-07, | |
| "loss": 0.3354, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.8587598425196851, | |
| "grad_norm": 1.9298814444778931, | |
| "learning_rate": 5.959756996746996e-07, | |
| "loss": 0.3389, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.8612204724409449, | |
| "grad_norm": 2.004345071619692, | |
| "learning_rate": 5.757955517636365e-07, | |
| "loss": 0.3287, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.8636811023622047, | |
| "grad_norm": 2.1811819959990104, | |
| "learning_rate": 5.559420818517702e-07, | |
| "loss": 0.3351, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.8661417322834646, | |
| "grad_norm": 1.89963944872893, | |
| "learning_rate": 5.364167558957267e-07, | |
| "loss": 0.3236, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.8686023622047244, | |
| "grad_norm": 2.1366517896474013, | |
| "learning_rate": 5.172210156223745e-07, | |
| "loss": 0.3334, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.8710629921259843, | |
| "grad_norm": 2.0497253975585776, | |
| "learning_rate": 4.983562784223645e-07, | |
| "loss": 0.3193, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.8735236220472441, | |
| "grad_norm": 1.9256352142774849, | |
| "learning_rate": 4.798239372454738e-07, | |
| "loss": 0.3255, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.8759842519685039, | |
| "grad_norm": 1.8950883031373469, | |
| "learning_rate": 4.6162536049775387e-07, | |
| "loss": 0.3286, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.8784448818897638, | |
| "grad_norm": 1.972455244802642, | |
| "learning_rate": 4.437618919404851e-07, | |
| "loss": 0.3233, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.8809055118110236, | |
| "grad_norm": 1.8881967486267786, | |
| "learning_rate": 4.262348505909608e-07, | |
| "loss": 0.3258, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.8833661417322834, | |
| "grad_norm": 1.9153066210168417, | |
| "learning_rate": 4.0904553062508677e-07, | |
| "loss": 0.3268, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.8858267716535433, | |
| "grad_norm": 1.9315033383610785, | |
| "learning_rate": 3.9219520128182087e-07, | |
| "loss": 0.3217, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.8882874015748031, | |
| "grad_norm": 1.8809719451973854, | |
| "learning_rate": 3.756851067694606e-07, | |
| "loss": 0.3233, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.890748031496063, | |
| "grad_norm": 2.1555114490828773, | |
| "learning_rate": 3.5951646617376603e-07, | |
| "loss": 0.3307, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.8932086614173228, | |
| "grad_norm": 2.0120227245929616, | |
| "learning_rate": 3.436904733679436e-07, | |
| "loss": 0.3232, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.8956692913385826, | |
| "grad_norm": 1.9736031365392204, | |
| "learning_rate": 3.2820829692449984e-07, | |
| "loss": 0.3311, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.8981299212598425, | |
| "grad_norm": 1.8523947337945588, | |
| "learning_rate": 3.130710800289416e-07, | |
| "loss": 0.3148, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.9005905511811023, | |
| "grad_norm": 1.878892829538069, | |
| "learning_rate": 2.982799403953801e-07, | |
| "loss": 0.324, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.9030511811023622, | |
| "grad_norm": 1.987792475207628, | |
| "learning_rate": 2.8383597018398876e-07, | |
| "loss": 0.3102, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.905511811023622, | |
| "grad_norm": 1.9031414178003596, | |
| "learning_rate": 2.697402359203638e-07, | |
| "loss": 0.32, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.9079724409448819, | |
| "grad_norm": 1.8869500063866222, | |
| "learning_rate": 2.559937784167743e-07, | |
| "loss": 0.3155, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.9104330708661418, | |
| "grad_norm": 2.1187123599093463, | |
| "learning_rate": 2.4259761269530667e-07, | |
| "loss": 0.3182, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9128937007874016, | |
| "grad_norm": 1.9267715394422684, | |
| "learning_rate": 2.2955272791291894e-07, | |
| "loss": 0.33, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.9153543307086615, | |
| "grad_norm": 1.8425029751605457, | |
| "learning_rate": 2.1686008728840301e-07, | |
| "loss": 0.3197, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9178149606299213, | |
| "grad_norm": 2.0278203810787025, | |
| "learning_rate": 2.0452062803126005e-07, | |
| "loss": 0.3231, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.9202755905511811, | |
| "grad_norm": 1.7848707123264098, | |
| "learning_rate": 1.9253526127249787e-07, | |
| "loss": 0.3278, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.922736220472441, | |
| "grad_norm": 1.9327226468941292, | |
| "learning_rate": 1.8090487199735663e-07, | |
| "loss": 0.3232, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.9251968503937008, | |
| "grad_norm": 2.0573534052381253, | |
| "learning_rate": 1.6963031897995863e-07, | |
| "loss": 0.3208, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.9276574803149606, | |
| "grad_norm": 1.9992889422665294, | |
| "learning_rate": 1.5871243471990372e-07, | |
| "loss": 0.3279, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.9301181102362205, | |
| "grad_norm": 1.9663769138651053, | |
| "learning_rate": 1.4815202538079e-07, | |
| "loss": 0.3131, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.9325787401574803, | |
| "grad_norm": 1.9420232366381145, | |
| "learning_rate": 1.379498707306942e-07, | |
| "loss": 0.3154, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.9350393700787402, | |
| "grad_norm": 1.8761946587475562, | |
| "learning_rate": 1.28106724084594e-07, | |
| "loss": 0.3288, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 1.9747460755326152, | |
| "learning_rate": 1.1862331224873902e-07, | |
| "loss": 0.321, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.9399606299212598, | |
| "grad_norm": 1.9629141327158952, | |
| "learning_rate": 1.0950033546699213e-07, | |
| "loss": 0.3232, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.9424212598425197, | |
| "grad_norm": 1.9231852739099, | |
| "learning_rate": 1.0073846736911697e-07, | |
| "loss": 0.3274, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.9448818897637795, | |
| "grad_norm": 1.8637963898259335, | |
| "learning_rate": 9.233835492104326e-08, | |
| "loss": 0.3148, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.9473425196850394, | |
| "grad_norm": 1.8554645087673565, | |
| "learning_rate": 8.430061837709058e-08, | |
| "loss": 0.3157, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.9498031496062992, | |
| "grad_norm": 1.9179859150173564, | |
| "learning_rate": 7.662585123417609e-08, | |
| "loss": 0.313, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.952263779527559, | |
| "grad_norm": 1.9997598136445862, | |
| "learning_rate": 6.931462018798407e-08, | |
| "loss": 0.327, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.9547244094488189, | |
| "grad_norm": 1.7891184752305465, | |
| "learning_rate": 6.236746509112824e-08, | |
| "loss": 0.3288, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.9571850393700787, | |
| "grad_norm": 1.9615522166928385, | |
| "learning_rate": 5.5784898913284754e-08, | |
| "loss": 0.3262, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.9596456692913385, | |
| "grad_norm": 1.9691371737883545, | |
| "learning_rate": 4.9567407703319247e-08, | |
| "loss": 0.3267, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.9621062992125984, | |
| "grad_norm": 1.898782902874794, | |
| "learning_rate": 4.3715450553393765e-08, | |
| "loss": 0.3249, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.9645669291338582, | |
| "grad_norm": 1.870173511604065, | |
| "learning_rate": 3.8229459565070074e-08, | |
| "loss": 0.3098, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.9670275590551181, | |
| "grad_norm": 1.9343349572257391, | |
| "learning_rate": 3.3109839817404564e-08, | |
| "loss": 0.3174, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.969488188976378, | |
| "grad_norm": 1.8713222491611672, | |
| "learning_rate": 2.8356969337035578e-08, | |
| "loss": 0.3258, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.9719488188976378, | |
| "grad_norm": 2.0105229467033623, | |
| "learning_rate": 2.3971199070271234e-08, | |
| "loss": 0.3218, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.9744094488188977, | |
| "grad_norm": 1.8419224922915125, | |
| "learning_rate": 1.99528528571763e-08, | |
| "loss": 0.3263, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.9768700787401575, | |
| "grad_norm": 1.9427348939895677, | |
| "learning_rate": 1.6302227407660744e-08, | |
| "loss": 0.3204, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.9793307086614174, | |
| "grad_norm": 1.9469887140438777, | |
| "learning_rate": 1.3019592279569503e-08, | |
| "loss": 0.3244, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.9817913385826772, | |
| "grad_norm": 2.0599655468724083, | |
| "learning_rate": 1.0105189858779507e-08, | |
| "loss": 0.3232, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.984251968503937, | |
| "grad_norm": 1.8816796183008666, | |
| "learning_rate": 7.559235341302872e-09, | |
| "loss": 0.3219, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.9867125984251969, | |
| "grad_norm": 1.857060246508601, | |
| "learning_rate": 5.381916717395186e-09, | |
| "loss": 0.3219, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.9891732283464567, | |
| "grad_norm": 1.978523180576824, | |
| "learning_rate": 3.573394757676596e-09, | |
| "loss": 0.3198, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.9916338582677166, | |
| "grad_norm": 1.9471876230350458, | |
| "learning_rate": 2.1338030012596488e-09, | |
| "loss": 0.3268, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.9940944881889764, | |
| "grad_norm": 1.92513808096097, | |
| "learning_rate": 1.0632477458888401e-09, | |
| "loss": 0.3286, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.9965551181102362, | |
| "grad_norm": 1.9350126459591657, | |
| "learning_rate": 3.618080400924484e-10, | |
| "loss": 0.3267, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.9990157480314961, | |
| "grad_norm": 1.8399795773652499, | |
| "learning_rate": 2.9535677343872637e-11, | |
| "loss": 0.3234, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3027, | |
| "eval_samples_per_second": 3.028, | |
| "eval_steps_per_second": 0.908, | |
| "step": 2032 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2032, | |
| "total_flos": 212729730170880.0, | |
| "train_loss": 0.4842223212064251, | |
| "train_runtime": 18335.7154, | |
| "train_samples_per_second": 1.773, | |
| "train_steps_per_second": 0.111 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2032, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 212729730170880.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |