| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1656, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0006038647342995169, | |
| "grad_norm": 10.485260000044514, | |
| "learning_rate": 6.02409638554217e-08, | |
| "loss": 1.7287, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0030193236714975845, | |
| "grad_norm": 10.032015562071633, | |
| "learning_rate": 3.0120481927710845e-07, | |
| "loss": 1.6893, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006038647342995169, | |
| "grad_norm": 9.612514394248656, | |
| "learning_rate": 6.024096385542169e-07, | |
| "loss": 1.7063, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.009057971014492754, | |
| "grad_norm": 7.557592473361227, | |
| "learning_rate": 9.036144578313254e-07, | |
| "loss": 1.6866, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.012077294685990338, | |
| "grad_norm": 4.565478227553907, | |
| "learning_rate": 1.2048192771084338e-06, | |
| "loss": 1.6072, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015096618357487922, | |
| "grad_norm": 2.9798309816553026, | |
| "learning_rate": 1.5060240963855425e-06, | |
| "loss": 1.5475, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.018115942028985508, | |
| "grad_norm": 2.7008857218275306, | |
| "learning_rate": 1.8072289156626508e-06, | |
| "loss": 1.4782, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.021135265700483092, | |
| "grad_norm": 2.2357152537880713, | |
| "learning_rate": 2.1084337349397595e-06, | |
| "loss": 1.3857, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.024154589371980676, | |
| "grad_norm": 1.9475475298214286, | |
| "learning_rate": 2.4096385542168676e-06, | |
| "loss": 1.3258, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02717391304347826, | |
| "grad_norm": 1.4792226037165068, | |
| "learning_rate": 2.710843373493976e-06, | |
| "loss": 1.2592, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.030193236714975844, | |
| "grad_norm": 1.2792537099768388, | |
| "learning_rate": 3.012048192771085e-06, | |
| "loss": 1.2245, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03321256038647343, | |
| "grad_norm": 1.2523748122313099, | |
| "learning_rate": 3.313253012048193e-06, | |
| "loss": 1.1959, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.036231884057971016, | |
| "grad_norm": 1.0864985697164489, | |
| "learning_rate": 3.6144578313253016e-06, | |
| "loss": 1.1785, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0392512077294686, | |
| "grad_norm": 1.142977110301484, | |
| "learning_rate": 3.91566265060241e-06, | |
| "loss": 1.1762, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.042270531400966184, | |
| "grad_norm": 1.098649172466519, | |
| "learning_rate": 4.216867469879519e-06, | |
| "loss": 1.1607, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04528985507246377, | |
| "grad_norm": 1.057865362960247, | |
| "learning_rate": 4.518072289156627e-06, | |
| "loss": 1.1589, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.04830917874396135, | |
| "grad_norm": 1.054325832096538, | |
| "learning_rate": 4.819277108433735e-06, | |
| "loss": 1.1344, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.051328502415458936, | |
| "grad_norm": 1.099271650362997, | |
| "learning_rate": 5.120481927710844e-06, | |
| "loss": 1.1252, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.05434782608695652, | |
| "grad_norm": 1.0998416744125088, | |
| "learning_rate": 5.421686746987952e-06, | |
| "loss": 1.1215, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.057367149758454104, | |
| "grad_norm": 1.1151773621303298, | |
| "learning_rate": 5.722891566265061e-06, | |
| "loss": 1.1135, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.06038647342995169, | |
| "grad_norm": 1.0965263618680452, | |
| "learning_rate": 6.02409638554217e-06, | |
| "loss": 1.1255, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06340579710144928, | |
| "grad_norm": 1.1335389413366264, | |
| "learning_rate": 6.325301204819277e-06, | |
| "loss": 1.0936, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.06642512077294686, | |
| "grad_norm": 1.130790372670886, | |
| "learning_rate": 6.626506024096386e-06, | |
| "loss": 1.0998, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06944444444444445, | |
| "grad_norm": 1.1733531035655729, | |
| "learning_rate": 6.927710843373494e-06, | |
| "loss": 1.1035, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.07246376811594203, | |
| "grad_norm": 1.3208668126357825, | |
| "learning_rate": 7.228915662650603e-06, | |
| "loss": 1.0642, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07548309178743962, | |
| "grad_norm": 1.3106018142699076, | |
| "learning_rate": 7.530120481927712e-06, | |
| "loss": 1.0809, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.0785024154589372, | |
| "grad_norm": 1.1965232933467733, | |
| "learning_rate": 7.83132530120482e-06, | |
| "loss": 1.0739, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08152173913043478, | |
| "grad_norm": 1.196930618773617, | |
| "learning_rate": 8.132530120481928e-06, | |
| "loss": 1.0711, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.08454106280193237, | |
| "grad_norm": 1.135659652675276, | |
| "learning_rate": 8.433734939759038e-06, | |
| "loss": 1.0489, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08756038647342995, | |
| "grad_norm": 1.1334631389824636, | |
| "learning_rate": 8.734939759036145e-06, | |
| "loss": 1.0737, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.09057971014492754, | |
| "grad_norm": 1.2418129844310752, | |
| "learning_rate": 9.036144578313254e-06, | |
| "loss": 1.0458, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09359903381642512, | |
| "grad_norm": 1.2437853442182127, | |
| "learning_rate": 9.337349397590362e-06, | |
| "loss": 1.036, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.0966183574879227, | |
| "grad_norm": 1.1287786590038726, | |
| "learning_rate": 9.63855421686747e-06, | |
| "loss": 1.0394, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09963768115942029, | |
| "grad_norm": 1.1656244356824084, | |
| "learning_rate": 9.93975903614458e-06, | |
| "loss": 1.0357, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.10265700483091787, | |
| "grad_norm": 1.2290880544327538, | |
| "learning_rate": 9.999822178354131e-06, | |
| "loss": 1.0407, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10567632850241546, | |
| "grad_norm": 1.1787986003632454, | |
| "learning_rate": 9.999099799595088e-06, | |
| "loss": 1.0202, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.10869565217391304, | |
| "grad_norm": 1.3579493877689446, | |
| "learning_rate": 9.997821830092095e-06, | |
| "loss": 0.9995, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11171497584541062, | |
| "grad_norm": 1.236495272199702, | |
| "learning_rate": 9.995988411876328e-06, | |
| "loss": 1.0094, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.11473429951690821, | |
| "grad_norm": 1.1891336346777943, | |
| "learning_rate": 9.993599748710505e-06, | |
| "loss": 1.0038, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11775362318840579, | |
| "grad_norm": 1.1457890107370514, | |
| "learning_rate": 9.990656106066257e-06, | |
| "loss": 0.9842, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.12077294685990338, | |
| "grad_norm": 1.197983166001558, | |
| "learning_rate": 9.9871578110946e-06, | |
| "loss": 0.9947, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12379227053140096, | |
| "grad_norm": 1.2276842269473731, | |
| "learning_rate": 9.983105252589599e-06, | |
| "loss": 0.9743, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.12681159420289856, | |
| "grad_norm": 1.2305159106762475, | |
| "learning_rate": 9.978498880945138e-06, | |
| "loss": 0.9699, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12983091787439613, | |
| "grad_norm": 1.287504685759514, | |
| "learning_rate": 9.97333920810488e-06, | |
| "loss": 0.9774, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.13285024154589373, | |
| "grad_norm": 1.2170202059428485, | |
| "learning_rate": 9.967626807505359e-06, | |
| "loss": 0.9723, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1358695652173913, | |
| "grad_norm": 1.1891615466242549, | |
| "learning_rate": 9.961362314012258e-06, | |
| "loss": 0.9634, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 1.259330792562172, | |
| "learning_rate": 9.954546423849842e-06, | |
| "loss": 0.9549, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14190821256038647, | |
| "grad_norm": 1.2632205741447957, | |
| "learning_rate": 9.947179894523594e-06, | |
| "loss": 0.9464, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.14492753623188406, | |
| "grad_norm": 1.1804173499957151, | |
| "learning_rate": 9.93926354473601e-06, | |
| "loss": 0.9398, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14794685990338163, | |
| "grad_norm": 1.1537804333636499, | |
| "learning_rate": 9.930798254295628e-06, | |
| "loss": 0.9653, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.15096618357487923, | |
| "grad_norm": 1.1421841081498953, | |
| "learning_rate": 9.921784964019234e-06, | |
| "loss": 0.9358, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1539855072463768, | |
| "grad_norm": 1.1449230835912114, | |
| "learning_rate": 9.91222467562731e-06, | |
| "loss": 0.9491, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.1570048309178744, | |
| "grad_norm": 1.063437049402578, | |
| "learning_rate": 9.902118451632694e-06, | |
| "loss": 0.9465, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16002415458937197, | |
| "grad_norm": 1.085929837294407, | |
| "learning_rate": 9.891467415222511e-06, | |
| "loss": 0.9275, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.16304347826086957, | |
| "grad_norm": 1.1524817196228028, | |
| "learning_rate": 9.880272750133328e-06, | |
| "loss": 0.9213, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.16606280193236714, | |
| "grad_norm": 1.1194662690657433, | |
| "learning_rate": 9.868535700519605e-06, | |
| "loss": 0.9315, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.16908212560386474, | |
| "grad_norm": 1.0905003429988187, | |
| "learning_rate": 9.856257570815415e-06, | |
| "loss": 0.9282, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1721014492753623, | |
| "grad_norm": 1.1166663888676513, | |
| "learning_rate": 9.843439725589481e-06, | |
| "loss": 0.9281, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.1751207729468599, | |
| "grad_norm": 1.1630718200026984, | |
| "learning_rate": 9.83008358939351e-06, | |
| "loss": 0.9088, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17814009661835747, | |
| "grad_norm": 1.1447207666491321, | |
| "learning_rate": 9.81619064660388e-06, | |
| "loss": 0.9035, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.18115942028985507, | |
| "grad_norm": 1.1556484647632965, | |
| "learning_rate": 9.801762441256663e-06, | |
| "loss": 0.9182, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18417874396135267, | |
| "grad_norm": 1.091531593613017, | |
| "learning_rate": 9.786800576876026e-06, | |
| "loss": 0.907, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.18719806763285024, | |
| "grad_norm": 1.0623938281401049, | |
| "learning_rate": 9.77130671629602e-06, | |
| "loss": 0.9087, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.19021739130434784, | |
| "grad_norm": 1.1041557388108325, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.9097, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.1932367149758454, | |
| "grad_norm": 1.1073654251326603, | |
| "learning_rate": 9.738729953308104e-06, | |
| "loss": 0.9231, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.196256038647343, | |
| "grad_norm": 1.0823061373486982, | |
| "learning_rate": 9.72165067142163e-06, | |
| "loss": 0.908, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.19927536231884058, | |
| "grad_norm": 1.0339903077159918, | |
| "learning_rate": 9.70404663397628e-06, | |
| "loss": 0.9107, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20229468599033817, | |
| "grad_norm": 1.162588874842489, | |
| "learning_rate": 9.68591979745235e-06, | |
| "loss": 0.8985, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.20531400966183574, | |
| "grad_norm": 1.1076782260164195, | |
| "learning_rate": 9.667272176433063e-06, | |
| "loss": 0.8979, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 1.0465799767504245, | |
| "learning_rate": 9.648105843380674e-06, | |
| "loss": 0.9082, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2113526570048309, | |
| "grad_norm": 1.1501313415042025, | |
| "learning_rate": 9.628422928406133e-06, | |
| "loss": 0.9, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2143719806763285, | |
| "grad_norm": 1.0644785823217744, | |
| "learning_rate": 9.608225619032361e-06, | |
| "loss": 0.9029, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.21739130434782608, | |
| "grad_norm": 1.0757621081585198, | |
| "learning_rate": 9.587516159951118e-06, | |
| "loss": 0.8902, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.22041062801932368, | |
| "grad_norm": 1.0472929816511527, | |
| "learning_rate": 9.566296852773541e-06, | |
| "loss": 0.8921, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.22342995169082125, | |
| "grad_norm": 1.0276460031701986, | |
| "learning_rate": 9.544570055774348e-06, | |
| "loss": 0.902, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22644927536231885, | |
| "grad_norm": 1.0354757235612004, | |
| "learning_rate": 9.522338183629737e-06, | |
| "loss": 0.9007, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.22946859903381642, | |
| "grad_norm": 1.0665922729683273, | |
| "learning_rate": 9.499603707149035e-06, | |
| "loss": 0.8954, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23248792270531402, | |
| "grad_norm": 1.0107097936080194, | |
| "learning_rate": 9.476369153000076e-06, | |
| "loss": 0.8903, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.23550724637681159, | |
| "grad_norm": 1.0771246264884797, | |
| "learning_rate": 9.45263710342842e-06, | |
| "loss": 0.8959, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.23852657004830918, | |
| "grad_norm": 1.0423588321422452, | |
| "learning_rate": 9.428410195970337e-06, | |
| "loss": 0.8816, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.24154589371980675, | |
| "grad_norm": 1.0678506449777372, | |
| "learning_rate": 9.403691123159707e-06, | |
| "loss": 0.895, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24456521739130435, | |
| "grad_norm": 1.081407003837132, | |
| "learning_rate": 9.378482632228745e-06, | |
| "loss": 0.8995, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.24758454106280192, | |
| "grad_norm": 0.9982667658619887, | |
| "learning_rate": 9.352787524802707e-06, | |
| "loss": 0.8921, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2506038647342995, | |
| "grad_norm": 0.9993884156920768, | |
| "learning_rate": 9.326608656588502e-06, | |
| "loss": 0.8697, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.2536231884057971, | |
| "grad_norm": 0.9802873427819824, | |
| "learning_rate": 9.299948937057325e-06, | |
| "loss": 0.8783, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.25664251207729466, | |
| "grad_norm": 1.070955381882503, | |
| "learning_rate": 9.272811329121305e-06, | |
| "loss": 0.8712, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.25966183574879226, | |
| "grad_norm": 0.9929872238415203, | |
| "learning_rate": 9.245198848804197e-06, | |
| "loss": 0.8615, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.26268115942028986, | |
| "grad_norm": 1.0161446197547341, | |
| "learning_rate": 9.217114564906208e-06, | |
| "loss": 0.8741, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.26570048309178745, | |
| "grad_norm": 1.0336101978655394, | |
| "learning_rate": 9.188561598662921e-06, | |
| "loss": 0.8625, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.26871980676328505, | |
| "grad_norm": 0.987163288450998, | |
| "learning_rate": 9.159543123398416e-06, | |
| "loss": 0.8867, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.2717391304347826, | |
| "grad_norm": 0.9812717125356297, | |
| "learning_rate": 9.130062364172582e-06, | |
| "loss": 0.8765, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2747584541062802, | |
| "grad_norm": 1.0149030855230246, | |
| "learning_rate": 9.1001225974227e-06, | |
| "loss": 0.8836, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 0.9859822686466229, | |
| "learning_rate": 9.0697271505993e-06, | |
| "loss": 0.8863, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2807971014492754, | |
| "grad_norm": 0.9863559376411042, | |
| "learning_rate": 9.038879401796358e-06, | |
| "loss": 0.8764, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.28381642512077293, | |
| "grad_norm": 0.9709580333992508, | |
| "learning_rate": 9.00758277937586e-06, | |
| "loss": 0.8708, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.28683574879227053, | |
| "grad_norm": 0.9672345556799925, | |
| "learning_rate": 8.975840761586772e-06, | |
| "loss": 0.8685, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.2898550724637681, | |
| "grad_norm": 1.0442405044492378, | |
| "learning_rate": 8.94365687617849e-06, | |
| "loss": 0.8714, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2928743961352657, | |
| "grad_norm": 1.003497060733472, | |
| "learning_rate": 8.911034700008757e-06, | |
| "loss": 0.8723, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.29589371980676327, | |
| "grad_norm": 0.9912702297034715, | |
| "learning_rate": 8.87797785864615e-06, | |
| "loss": 0.8755, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.29891304347826086, | |
| "grad_norm": 0.9620747196303233, | |
| "learning_rate": 8.844490025967126e-06, | |
| "loss": 0.8674, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.30193236714975846, | |
| "grad_norm": 1.0470048364648747, | |
| "learning_rate": 8.810574923747729e-06, | |
| "loss": 0.8555, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.30495169082125606, | |
| "grad_norm": 0.9573341316350298, | |
| "learning_rate": 8.776236321249955e-06, | |
| "loss": 0.8806, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.3079710144927536, | |
| "grad_norm": 0.9951662459035352, | |
| "learning_rate": 8.741478034802835e-06, | |
| "loss": 0.8621, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3109903381642512, | |
| "grad_norm": 0.9902413122683058, | |
| "learning_rate": 8.706303927378306e-06, | |
| "loss": 0.8673, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.3140096618357488, | |
| "grad_norm": 1.062455795370126, | |
| "learning_rate": 8.670717908161878e-06, | |
| "loss": 0.8728, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3170289855072464, | |
| "grad_norm": 1.0382561615088126, | |
| "learning_rate": 8.634723932118184e-06, | |
| "loss": 0.8708, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.32004830917874394, | |
| "grad_norm": 0.9807369655332918, | |
| "learning_rate": 8.598325999551425e-06, | |
| "loss": 0.869, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.32306763285024154, | |
| "grad_norm": 1.041804239187294, | |
| "learning_rate": 8.56152815566078e-06, | |
| "loss": 0.8603, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.32608695652173914, | |
| "grad_norm": 0.98653685926571, | |
| "learning_rate": 8.524334490090848e-06, | |
| "loss": 0.8601, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32910628019323673, | |
| "grad_norm": 1.0824551595193905, | |
| "learning_rate": 8.486749136477112e-06, | |
| "loss": 0.8527, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.3321256038647343, | |
| "grad_norm": 1.012559998288933, | |
| "learning_rate": 8.448776271986542e-06, | |
| "loss": 0.8484, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3351449275362319, | |
| "grad_norm": 1.0472603651755694, | |
| "learning_rate": 8.41042011685336e-06, | |
| "loss": 0.8764, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.33816425120772947, | |
| "grad_norm": 1.0091109552760384, | |
| "learning_rate": 8.371684933909996e-06, | |
| "loss": 0.8778, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.34118357487922707, | |
| "grad_norm": 0.9679690923784716, | |
| "learning_rate": 8.33257502811334e-06, | |
| "loss": 0.8573, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.3442028985507246, | |
| "grad_norm": 1.0239715610365467, | |
| "learning_rate": 8.293094746066283e-06, | |
| "loss": 0.8759, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3472222222222222, | |
| "grad_norm": 1.0215620182556282, | |
| "learning_rate": 8.253248475534656e-06, | |
| "loss": 0.8524, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.3502415458937198, | |
| "grad_norm": 1.0214058034537257, | |
| "learning_rate": 8.213040644959572e-06, | |
| "loss": 0.8479, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3532608695652174, | |
| "grad_norm": 0.9901787487365526, | |
| "learning_rate": 8.172475722965263e-06, | |
| "loss": 0.8387, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.35628019323671495, | |
| "grad_norm": 0.9666966190379256, | |
| "learning_rate": 8.131558217862444e-06, | |
| "loss": 0.8389, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.35929951690821255, | |
| "grad_norm": 0.9919380582986378, | |
| "learning_rate": 8.090292677147268e-06, | |
| "loss": 0.8623, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.36231884057971014, | |
| "grad_norm": 0.9829820982309616, | |
| "learning_rate": 8.048683686995921e-06, | |
| "loss": 0.8483, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36533816425120774, | |
| "grad_norm": 1.0033573699240617, | |
| "learning_rate": 8.006735871754932e-06, | |
| "loss": 0.8467, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.36835748792270534, | |
| "grad_norm": 0.9729124425349844, | |
| "learning_rate": 7.96445389342722e-06, | |
| "loss": 0.8482, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3713768115942029, | |
| "grad_norm": 0.9846164642356084, | |
| "learning_rate": 7.921842451153982e-06, | |
| "loss": 0.8585, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.3743961352657005, | |
| "grad_norm": 1.0204270522909766, | |
| "learning_rate": 7.878906280692424e-06, | |
| "loss": 0.8609, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3774154589371981, | |
| "grad_norm": 0.9508580696057062, | |
| "learning_rate": 7.835650153889449e-06, | |
| "loss": 0.8559, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.3804347826086957, | |
| "grad_norm": 1.0138140609913775, | |
| "learning_rate": 7.792078878151318e-06, | |
| "loss": 0.8369, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3834541062801932, | |
| "grad_norm": 0.9931878326042032, | |
| "learning_rate": 7.748197295909359e-06, | |
| "loss": 0.8505, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.3864734299516908, | |
| "grad_norm": 1.0342608052140974, | |
| "learning_rate": 7.704010284081801e-06, | |
| "loss": 0.849, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3894927536231884, | |
| "grad_norm": 0.9819364059644684, | |
| "learning_rate": 7.65952275353175e-06, | |
| "loss": 0.8368, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.392512077294686, | |
| "grad_norm": 1.0187861799083533, | |
| "learning_rate": 7.614739648521412e-06, | |
| "loss": 0.8391, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.39553140096618356, | |
| "grad_norm": 1.0055302486721571, | |
| "learning_rate": 7.56966594616259e-06, | |
| "loss": 0.8473, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.39855072463768115, | |
| "grad_norm": 1.0213194412236104, | |
| "learning_rate": 7.524306655863544e-06, | |
| "loss": 0.8584, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.40157004830917875, | |
| "grad_norm": 1.0457938971980807, | |
| "learning_rate": 7.478666818772252e-06, | |
| "loss": 0.8462, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.40458937198067635, | |
| "grad_norm": 0.9848170024807922, | |
| "learning_rate": 7.432751507216146e-06, | |
| "loss": 0.844, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.4076086956521739, | |
| "grad_norm": 1.0787598071037892, | |
| "learning_rate": 7.386565824138378e-06, | |
| "loss": 0.8326, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.4106280193236715, | |
| "grad_norm": 0.9847580752716315, | |
| "learning_rate": 7.3401149025306995e-06, | |
| "loss": 0.853, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4136473429951691, | |
| "grad_norm": 1.0402445922973875, | |
| "learning_rate": 7.293403904862981e-06, | |
| "loss": 0.849, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.952409187764052, | |
| "learning_rate": 7.246438022509465e-06, | |
| "loss": 0.8323, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4196859903381642, | |
| "grad_norm": 1.0019800169099848, | |
| "learning_rate": 7.199222475171812e-06, | |
| "loss": 0.8508, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.4227053140096618, | |
| "grad_norm": 0.9868139541797347, | |
| "learning_rate": 7.151762510298985e-06, | |
| "loss": 0.8313, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4257246376811594, | |
| "grad_norm": 0.9834600378684528, | |
| "learning_rate": 7.104063402504065e-06, | |
| "loss": 0.8431, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.428743961352657, | |
| "grad_norm": 0.9566341224700101, | |
| "learning_rate": 7.056130452978039e-06, | |
| "loss": 0.8291, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.43176328502415456, | |
| "grad_norm": 0.9504357134569014, | |
| "learning_rate": 7.0079689889006275e-06, | |
| "loss": 0.8176, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 0.987828355029499, | |
| "learning_rate": 6.959584362848239e-06, | |
| "loss": 0.8282, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.43780193236714976, | |
| "grad_norm": 0.9731535769551426, | |
| "learning_rate": 6.910981952199097e-06, | |
| "loss": 0.8376, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.44082125603864736, | |
| "grad_norm": 1.0015295092351129, | |
| "learning_rate": 6.862167158535599e-06, | |
| "loss": 0.828, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4438405797101449, | |
| "grad_norm": 0.9879634179543377, | |
| "learning_rate": 6.813145407044003e-06, | |
| "loss": 0.8194, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.4468599033816425, | |
| "grad_norm": 0.9753047696912267, | |
| "learning_rate": 6.763922145911474e-06, | |
| "loss": 0.8336, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4498792270531401, | |
| "grad_norm": 0.9924657298914499, | |
| "learning_rate": 6.714502845720595e-06, | |
| "loss": 0.8329, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4528985507246377, | |
| "grad_norm": 1.0659305402069452, | |
| "learning_rate": 6.664892998841361e-06, | |
| "loss": 0.8224, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4559178743961353, | |
| "grad_norm": 0.9926149466224716, | |
| "learning_rate": 6.61509811882078e-06, | |
| "loss": 0.8204, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.45893719806763283, | |
| "grad_norm": 1.0312896682847614, | |
| "learning_rate": 6.565123739770102e-06, | |
| "loss": 0.8205, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.46195652173913043, | |
| "grad_norm": 0.9742444674619639, | |
| "learning_rate": 6.5149754157497645e-06, | |
| "loss": 0.8281, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.46497584541062803, | |
| "grad_norm": 0.9571473848092585, | |
| "learning_rate": 6.464658720152135e-06, | |
| "loss": 0.8164, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46799516908212563, | |
| "grad_norm": 0.9959898314723186, | |
| "learning_rate": 6.41417924508208e-06, | |
| "loss": 0.843, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.47101449275362317, | |
| "grad_norm": 0.9960648835738334, | |
| "learning_rate": 6.363542600735486e-06, | |
| "loss": 0.8167, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.47403381642512077, | |
| "grad_norm": 1.0006502116357578, | |
| "learning_rate": 6.312754414775737e-06, | |
| "loss": 0.8306, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.47705314009661837, | |
| "grad_norm": 1.0341128294537794, | |
| "learning_rate": 6.261820331708275e-06, | |
| "loss": 0.8404, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.48007246376811596, | |
| "grad_norm": 1.013484766876777, | |
| "learning_rate": 6.210746012253277e-06, | |
| "loss": 0.8383, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.4830917874396135, | |
| "grad_norm": 1.0048584571797106, | |
| "learning_rate": 6.159537132716532e-06, | |
| "loss": 0.8128, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4861111111111111, | |
| "grad_norm": 1.0066412617634983, | |
| "learning_rate": 6.108199384358595e-06, | |
| "loss": 0.8182, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.4891304347826087, | |
| "grad_norm": 0.9873011172531366, | |
| "learning_rate": 6.0567384727622566e-06, | |
| "loss": 0.8206, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4921497584541063, | |
| "grad_norm": 1.0092839597226688, | |
| "learning_rate": 6.005160117198448e-06, | |
| "loss": 0.8345, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.49516908212560384, | |
| "grad_norm": 0.9625783040223157, | |
| "learning_rate": 5.953470049990605e-06, | |
| "loss": 0.8195, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.49818840579710144, | |
| "grad_norm": 1.0146672682921258, | |
| "learning_rate": 5.90167401587759e-06, | |
| "loss": 0.8222, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.501207729468599, | |
| "grad_norm": 0.9750234992861176, | |
| "learning_rate": 5.84977777137523e-06, | |
| "loss": 0.8149, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5042270531400966, | |
| "grad_norm": 1.0388609954354933, | |
| "learning_rate": 5.797787084136556e-06, | |
| "loss": 0.8297, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.5072463768115942, | |
| "grad_norm": 1.0092212264949234, | |
| "learning_rate": 5.745707732310781e-06, | |
| "loss": 0.8255, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5102657004830918, | |
| "grad_norm": 1.0014867558076073, | |
| "learning_rate": 5.693545503901149e-06, | |
| "loss": 0.8212, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5132850241545893, | |
| "grad_norm": 0.9955486191780001, | |
| "learning_rate": 5.641306196121643e-06, | |
| "loss": 0.8202, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5163043478260869, | |
| "grad_norm": 1.033006798193324, | |
| "learning_rate": 5.5889956147527156e-06, | |
| "loss": 0.8209, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.5193236714975845, | |
| "grad_norm": 1.0429153983324886, | |
| "learning_rate": 5.536619573496027e-06, | |
| "loss": 0.8194, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5223429951690821, | |
| "grad_norm": 1.026989922350979, | |
| "learning_rate": 5.484183893328332e-06, | |
| "loss": 0.8199, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.5253623188405797, | |
| "grad_norm": 1.0238509806451253, | |
| "learning_rate": 5.431694401854545e-06, | |
| "loss": 0.7951, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5283816425120773, | |
| "grad_norm": 0.9563995023957641, | |
| "learning_rate": 5.379156932660067e-06, | |
| "loss": 0.8185, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.5314009661835749, | |
| "grad_norm": 0.9676712123206674, | |
| "learning_rate": 5.326577324662459e-06, | |
| "loss": 0.8089, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5344202898550725, | |
| "grad_norm": 0.9898039955174832, | |
| "learning_rate": 5.273961421462505e-06, | |
| "loss": 0.8238, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.5374396135265701, | |
| "grad_norm": 0.9452218463977323, | |
| "learning_rate": 5.221315070694775e-06, | |
| "loss": 0.8042, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5404589371980676, | |
| "grad_norm": 1.0185049851273638, | |
| "learning_rate": 5.168644123377725e-06, | |
| "loss": 0.8017, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.5434782608695652, | |
| "grad_norm": 1.0173971075365176, | |
| "learning_rate": 5.1159544332634256e-06, | |
| "loss": 0.821, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5464975845410628, | |
| "grad_norm": 1.0610338990705965, | |
| "learning_rate": 5.063251856186991e-06, | |
| "loss": 0.802, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.5495169082125604, | |
| "grad_norm": 0.978719611658541, | |
| "learning_rate": 5.010542249415761e-06, | |
| "loss": 0.8133, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.552536231884058, | |
| "grad_norm": 0.9889441669899723, | |
| "learning_rate": 4.95783147099835e-06, | |
| "loss": 0.8039, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 1.0148896374998926, | |
| "learning_rate": 4.90512537911358e-06, | |
| "loss": 0.8146, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5585748792270532, | |
| "grad_norm": 0.9891668368377164, | |
| "learning_rate": 4.852429831419428e-06, | |
| "loss": 0.8214, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.5615942028985508, | |
| "grad_norm": 1.041355982063113, | |
| "learning_rate": 4.799750684402006e-06, | |
| "loss": 0.8041, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5646135265700483, | |
| "grad_norm": 1.0153214084697235, | |
| "learning_rate": 4.747093792724679e-06, | |
| "loss": 0.8121, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5676328502415459, | |
| "grad_norm": 1.0183122535937061, | |
| "learning_rate": 4.6944650085774095e-06, | |
| "loss": 0.8002, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5706521739130435, | |
| "grad_norm": 0.9957864079530732, | |
| "learning_rate": 4.641870181026322e-06, | |
| "loss": 0.8157, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.5736714975845411, | |
| "grad_norm": 1.0370461103314468, | |
| "learning_rate": 4.589315155363683e-06, | |
| "loss": 0.811, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5766908212560387, | |
| "grad_norm": 1.004518929806163, | |
| "learning_rate": 4.53680577245824e-06, | |
| "loss": 0.8016, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5797101449275363, | |
| "grad_norm": 0.9969753713374386, | |
| "learning_rate": 4.484347868106097e-06, | |
| "loss": 0.7989, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5827294685990339, | |
| "grad_norm": 1.007321359295188, | |
| "learning_rate": 4.431947272382118e-06, | |
| "loss": 0.8094, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5857487922705314, | |
| "grad_norm": 0.98791492635868, | |
| "learning_rate": 4.379609808992e-06, | |
| "loss": 0.8143, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5887681159420289, | |
| "grad_norm": 0.9922336761644133, | |
| "learning_rate": 4.327341294625019e-06, | |
| "loss": 0.7996, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5917874396135265, | |
| "grad_norm": 0.9725954746289334, | |
| "learning_rate": 4.275147538307594e-06, | |
| "loss": 0.8133, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5948067632850241, | |
| "grad_norm": 0.9717320614269667, | |
| "learning_rate": 4.223034340757666e-06, | |
| "loss": 0.8005, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5978260869565217, | |
| "grad_norm": 0.9389495916799526, | |
| "learning_rate": 4.171007493740023e-06, | |
| "loss": 0.7958, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.6008454106280193, | |
| "grad_norm": 0.9810023298322507, | |
| "learning_rate": 4.1190727794226175e-06, | |
| "loss": 0.8165, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.6038647342995169, | |
| "grad_norm": 0.9779941495613733, | |
| "learning_rate": 4.067235969733937e-06, | |
| "loss": 0.7976, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6068840579710145, | |
| "grad_norm": 0.9510093739942304, | |
| "learning_rate": 4.015502825721537e-06, | |
| "loss": 0.8073, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.6099033816425121, | |
| "grad_norm": 0.9936696587970497, | |
| "learning_rate": 3.963879096911751e-06, | |
| "loss": 0.8068, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6129227053140096, | |
| "grad_norm": 0.9978910530135231, | |
| "learning_rate": 3.91237052067072e-06, | |
| "loss": 0.8062, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.6159420289855072, | |
| "grad_norm": 0.9723677719807516, | |
| "learning_rate": 3.860982821566729e-06, | |
| "loss": 0.8178, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6189613526570048, | |
| "grad_norm": 0.9657700887809438, | |
| "learning_rate": 3.8097217107340107e-06, | |
| "loss": 0.7836, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.6219806763285024, | |
| "grad_norm": 0.9716901570758927, | |
| "learning_rate": 3.7585928852380025e-06, | |
| "loss": 0.7911, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 1.0126975602818444, | |
| "learning_rate": 3.7076020274421996e-06, | |
| "loss": 0.8127, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.6280193236714976, | |
| "grad_norm": 1.0012384413238815, | |
| "learning_rate": 3.6567548043766157e-06, | |
| "loss": 0.7926, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6310386473429952, | |
| "grad_norm": 1.0533344104817726, | |
| "learning_rate": 3.6060568671079658e-06, | |
| "loss": 0.7971, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.6340579710144928, | |
| "grad_norm": 0.9909170809609505, | |
| "learning_rate": 3.5555138501116247e-06, | |
| "loss": 0.8032, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6370772946859904, | |
| "grad_norm": 1.0058635009835104, | |
| "learning_rate": 3.5051313706453995e-06, | |
| "loss": 0.8088, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.6400966183574879, | |
| "grad_norm": 0.982286478542063, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.8021, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6431159420289855, | |
| "grad_norm": 0.9912609730677274, | |
| "learning_rate": 3.40487040350303e-06, | |
| "loss": 0.8048, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.6461352657004831, | |
| "grad_norm": 1.0240454701942718, | |
| "learning_rate": 3.355003058646105e-06, | |
| "loss": 0.8219, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6491545893719807, | |
| "grad_norm": 0.955787854052142, | |
| "learning_rate": 3.305318535719343e-06, | |
| "loss": 0.7735, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.6521739130434783, | |
| "grad_norm": 0.9556171904961169, | |
| "learning_rate": 3.2558223565691104e-06, | |
| "loss": 0.7955, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6551932367149759, | |
| "grad_norm": 0.9869405009720019, | |
| "learning_rate": 3.2065200221095905e-06, | |
| "loss": 0.7903, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.6582125603864735, | |
| "grad_norm": 1.0184010512565573, | |
| "learning_rate": 3.1574170117114293e-06, | |
| "loss": 0.8059, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6612318840579711, | |
| "grad_norm": 0.9615842976570751, | |
| "learning_rate": 3.1085187825927555e-06, | |
| "loss": 0.7919, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.6642512077294686, | |
| "grad_norm": 1.0051644960541348, | |
| "learning_rate": 3.0598307692126904e-06, | |
| "loss": 0.8025, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6672705314009661, | |
| "grad_norm": 0.9641208944556371, | |
| "learning_rate": 3.0113583826673655e-06, | |
| "loss": 0.799, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.6702898550724637, | |
| "grad_norm": 0.9925774913072852, | |
| "learning_rate": 2.9631070100885373e-06, | |
| "loss": 0.8106, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6733091787439613, | |
| "grad_norm": 1.0107634066694202, | |
| "learning_rate": 2.915082014044883e-06, | |
| "loss": 0.8057, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.6763285024154589, | |
| "grad_norm": 1.0046756211140373, | |
| "learning_rate": 2.867288731946004e-06, | |
| "loss": 0.8031, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6793478260869565, | |
| "grad_norm": 1.0053926491433522, | |
| "learning_rate": 2.8197324754492456e-06, | |
| "loss": 0.7921, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.6823671497584541, | |
| "grad_norm": 1.006856299076646, | |
| "learning_rate": 2.7724185298693596e-06, | |
| "loss": 0.792, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6853864734299517, | |
| "grad_norm": 0.9902733067610593, | |
| "learning_rate": 2.7253521535911144e-06, | |
| "loss": 0.8113, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6884057971014492, | |
| "grad_norm": 1.022489282390403, | |
| "learning_rate": 2.678538577484871e-06, | |
| "loss": 0.802, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6914251207729468, | |
| "grad_norm": 1.0032934808608855, | |
| "learning_rate": 2.6319830043252616e-06, | |
| "loss": 0.7909, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 1.0127829062494287, | |
| "learning_rate": 2.5856906082129313e-06, | |
| "loss": 0.8063, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.697463768115942, | |
| "grad_norm": 1.0334651022516967, | |
| "learning_rate": 2.53966653399952e-06, | |
| "loss": 0.7905, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.7004830917874396, | |
| "grad_norm": 0.9947842946950315, | |
| "learning_rate": 2.4939158967158657e-06, | |
| "loss": 0.8053, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.7035024154589372, | |
| "grad_norm": 1.0205258197932596, | |
| "learning_rate": 2.448443781003527e-06, | |
| "loss": 0.7921, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.7065217391304348, | |
| "grad_norm": 1.016719694781447, | |
| "learning_rate": 2.403255240549693e-06, | |
| "loss": 0.7954, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7095410628019324, | |
| "grad_norm": 0.9765393946874324, | |
| "learning_rate": 2.3583552975255108e-06, | |
| "loss": 0.7831, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.7125603864734299, | |
| "grad_norm": 1.0013834706558566, | |
| "learning_rate": 2.313748942027956e-06, | |
| "loss": 0.798, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7155797101449275, | |
| "grad_norm": 0.9832682717375602, | |
| "learning_rate": 2.269441131525213e-06, | |
| "loss": 0.7868, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.7185990338164251, | |
| "grad_norm": 1.012871482321928, | |
| "learning_rate": 2.225436790305733e-06, | |
| "loss": 0.8016, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7216183574879227, | |
| "grad_norm": 1.0215595382277152, | |
| "learning_rate": 2.181740808930947e-06, | |
| "loss": 0.7958, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.7246376811594203, | |
| "grad_norm": 0.9908102708938528, | |
| "learning_rate": 2.1383580436917452e-06, | |
| "loss": 0.7881, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7276570048309179, | |
| "grad_norm": 0.9703952014882963, | |
| "learning_rate": 2.0952933160687456e-06, | |
| "loss": 0.7935, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.7306763285024155, | |
| "grad_norm": 1.0107034346751447, | |
| "learning_rate": 2.052551412196456e-06, | |
| "loss": 0.7904, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7336956521739131, | |
| "grad_norm": 0.9826589600845559, | |
| "learning_rate": 2.010137082331354e-06, | |
| "loss": 0.78, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.7367149758454107, | |
| "grad_norm": 1.040060721769402, | |
| "learning_rate": 1.96805504032393e-06, | |
| "loss": 0.7876, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7397342995169082, | |
| "grad_norm": 0.9802643555891328, | |
| "learning_rate": 1.9263099630948274e-06, | |
| "loss": 0.7926, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.7427536231884058, | |
| "grad_norm": 0.9845466368440403, | |
| "learning_rate": 1.8849064901150372e-06, | |
| "loss": 0.7882, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7457729468599034, | |
| "grad_norm": 0.9835806835545442, | |
| "learning_rate": 1.8438492228902893e-06, | |
| "loss": 0.7891, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.748792270531401, | |
| "grad_norm": 1.0076367530435018, | |
| "learning_rate": 1.8031427244496357e-06, | |
| "loss": 0.7867, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7518115942028986, | |
| "grad_norm": 1.0313420481341204, | |
| "learning_rate": 1.7627915188383382e-06, | |
| "loss": 0.7982, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.7548309178743962, | |
| "grad_norm": 0.9516216668137772, | |
| "learning_rate": 1.7228000906150672e-06, | |
| "loss": 0.7916, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7578502415458938, | |
| "grad_norm": 0.9955635624833065, | |
| "learning_rate": 1.6831728843534962e-06, | |
| "loss": 0.7871, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.7608695652173914, | |
| "grad_norm": 1.0446368549655927, | |
| "learning_rate": 1.6439143041483352e-06, | |
| "loss": 0.7949, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7638888888888888, | |
| "grad_norm": 1.0107540263173203, | |
| "learning_rate": 1.6050287131258862e-06, | |
| "loss": 0.7782, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.7669082125603864, | |
| "grad_norm": 1.0090639381521684, | |
| "learning_rate": 1.5665204329591066e-06, | |
| "loss": 0.7733, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.769927536231884, | |
| "grad_norm": 1.017965866931342, | |
| "learning_rate": 1.528393743387328e-06, | |
| "loss": 0.7854, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.7729468599033816, | |
| "grad_norm": 0.9757529990214369, | |
| "learning_rate": 1.4906528817406052e-06, | |
| "loss": 0.7896, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7759661835748792, | |
| "grad_norm": 0.9627515940764363, | |
| "learning_rate": 1.453302042468786e-06, | |
| "loss": 0.7743, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.7789855072463768, | |
| "grad_norm": 0.9813369690634022, | |
| "learning_rate": 1.4163453766753537e-06, | |
| "loss": 0.7861, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7820048309178744, | |
| "grad_norm": 0.950962763947152, | |
| "learning_rate": 1.3797869916560692e-06, | |
| "loss": 0.7769, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.785024154589372, | |
| "grad_norm": 1.0035116704753089, | |
| "learning_rate": 1.3436309504425137e-06, | |
| "loss": 0.7842, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7880434782608695, | |
| "grad_norm": 1.0316977087419814, | |
| "learning_rate": 1.3078812713505079e-06, | |
| "loss": 0.7889, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7910628019323671, | |
| "grad_norm": 0.9724293464755874, | |
| "learning_rate": 1.2725419275335404e-06, | |
| "loss": 0.792, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7940821256038647, | |
| "grad_norm": 1.024840109809241, | |
| "learning_rate": 1.237616846541192e-06, | |
| "loss": 0.7868, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7971014492753623, | |
| "grad_norm": 1.0132634017753674, | |
| "learning_rate": 1.2031099098826376e-06, | |
| "loss": 0.7905, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.8001207729468599, | |
| "grad_norm": 0.9844574186374294, | |
| "learning_rate": 1.1690249525952569e-06, | |
| "loss": 0.7908, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.8031400966183575, | |
| "grad_norm": 1.013907969102555, | |
| "learning_rate": 1.1353657628184217e-06, | |
| "loss": 0.7698, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8061594202898551, | |
| "grad_norm": 1.0118537572839486, | |
| "learning_rate": 1.1021360813724924e-06, | |
| "loss": 0.7926, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.8091787439613527, | |
| "grad_norm": 0.9789094346489745, | |
| "learning_rate": 1.0693396013430552e-06, | |
| "loss": 0.7882, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8121980676328503, | |
| "grad_norm": 0.9834676458951446, | |
| "learning_rate": 1.036979967670494e-06, | |
| "loss": 0.7928, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.8152173913043478, | |
| "grad_norm": 1.0061732844801357, | |
| "learning_rate": 1.0050607767448928e-06, | |
| "loss": 0.7744, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8182367149758454, | |
| "grad_norm": 1.0145249274790882, | |
| "learning_rate": 9.735855760063412e-07, | |
| "loss": 0.7907, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.821256038647343, | |
| "grad_norm": 1.0014057594170862, | |
| "learning_rate": 9.425578635506721e-07, | |
| "loss": 0.7821, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8242753623188406, | |
| "grad_norm": 0.9863204913817585, | |
| "learning_rate": 9.119810877406998e-07, | |
| "loss": 0.786, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.8272946859903382, | |
| "grad_norm": 0.9959450118029448, | |
| "learning_rate": 8.818586468229695e-07, | |
| "loss": 0.7838, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8303140096618358, | |
| "grad_norm": 1.0702182133587888, | |
| "learning_rate": 8.521938885500825e-07, | |
| "loss": 0.786, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.9968624582335651, | |
| "learning_rate": 8.229901098086335e-07, | |
| "loss": 0.7862, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.836352657004831, | |
| "grad_norm": 0.9863986016846847, | |
| "learning_rate": 7.942505562528024e-07, | |
| "loss": 0.787, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.8393719806763285, | |
| "grad_norm": 0.9545460547627316, | |
| "learning_rate": 7.659784219436373e-07, | |
| "loss": 0.7731, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.842391304347826, | |
| "grad_norm": 1.007290245733153, | |
| "learning_rate": 7.381768489940678e-07, | |
| "loss": 0.7667, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.8454106280193237, | |
| "grad_norm": 0.9929498673353992, | |
| "learning_rate": 7.108489272197089e-07, | |
| "loss": 0.7895, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8484299516908212, | |
| "grad_norm": 0.9848636431503859, | |
| "learning_rate": 6.839976937954479e-07, | |
| "loss": 0.7891, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.8514492753623188, | |
| "grad_norm": 1.0038021231533398, | |
| "learning_rate": 6.576261329179123e-07, | |
| "loss": 0.7863, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8544685990338164, | |
| "grad_norm": 1.0021524457556812, | |
| "learning_rate": 6.317371754738044e-07, | |
| "loss": 0.7933, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.857487922705314, | |
| "grad_norm": 0.9768075572854382, | |
| "learning_rate": 6.06333698714171e-07, | |
| "loss": 0.7771, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8605072463768116, | |
| "grad_norm": 0.9820365924428122, | |
| "learning_rate": 5.814185259346267e-07, | |
| "loss": 0.777, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.8635265700483091, | |
| "grad_norm": 0.9872147557767675, | |
| "learning_rate": 5.56994426161584e-07, | |
| "loss": 0.791, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8665458937198067, | |
| "grad_norm": 0.9829200122907301, | |
| "learning_rate": 5.330641138445064e-07, | |
| "loss": 0.7764, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 0.9944086706773513, | |
| "learning_rate": 5.096302485542265e-07, | |
| "loss": 0.7891, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8725845410628019, | |
| "grad_norm": 1.0225322366517011, | |
| "learning_rate": 4.866954346873715e-07, | |
| "loss": 0.7897, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.8756038647342995, | |
| "grad_norm": 1.0049159635351292, | |
| "learning_rate": 4.642622211769099e-07, | |
| "loss": 0.7868, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8786231884057971, | |
| "grad_norm": 0.9901193471531758, | |
| "learning_rate": 4.4233310120887387e-07, | |
| "loss": 0.7786, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.8816425120772947, | |
| "grad_norm": 0.9957012743951548, | |
| "learning_rate": 4.209105119452628e-07, | |
| "loss": 0.7836, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8846618357487923, | |
| "grad_norm": 1.0004791381791707, | |
| "learning_rate": 3.999968342531918e-07, | |
| "loss": 0.768, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.8876811594202898, | |
| "grad_norm": 0.9987769951972304, | |
| "learning_rate": 3.7959439244027727e-07, | |
| "loss": 0.7749, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8907004830917874, | |
| "grad_norm": 0.9862109297591363, | |
| "learning_rate": 3.5970545399632574e-07, | |
| "loss": 0.778, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.893719806763285, | |
| "grad_norm": 0.9791302034502961, | |
| "learning_rate": 3.4033222934131914e-07, | |
| "loss": 0.7771, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8967391304347826, | |
| "grad_norm": 0.9625302334808536, | |
| "learning_rate": 3.214768715797656e-07, | |
| "loss": 0.7638, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.8997584541062802, | |
| "grad_norm": 0.9748416151171142, | |
| "learning_rate": 3.0314147626139543e-07, | |
| "loss": 0.7845, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.9027777777777778, | |
| "grad_norm": 0.9949692829145934, | |
| "learning_rate": 2.853280811482734e-07, | |
| "loss": 0.7722, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.9057971014492754, | |
| "grad_norm": 0.9744535368779195, | |
| "learning_rate": 2.6803866598832216e-07, | |
| "loss": 0.7794, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.908816425120773, | |
| "grad_norm": 0.992041943581358, | |
| "learning_rate": 2.5127515229529665e-07, | |
| "loss": 0.7836, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.9118357487922706, | |
| "grad_norm": 0.9813156647468022, | |
| "learning_rate": 2.350394031352343e-07, | |
| "loss": 0.7863, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9148550724637681, | |
| "grad_norm": 0.9963647656973325, | |
| "learning_rate": 2.1933322291938897e-07, | |
| "loss": 0.782, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.9178743961352657, | |
| "grad_norm": 1.0200768363247714, | |
| "learning_rate": 2.041583572037037e-07, | |
| "loss": 0.7924, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9208937198067633, | |
| "grad_norm": 0.9739653495159714, | |
| "learning_rate": 1.8951649249480287e-07, | |
| "loss": 0.7899, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.9239130434782609, | |
| "grad_norm": 0.995954989126771, | |
| "learning_rate": 1.7540925606256088e-07, | |
| "loss": 0.8044, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9269323671497585, | |
| "grad_norm": 0.9939242624751221, | |
| "learning_rate": 1.6183821575925186e-07, | |
| "loss": 0.7777, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.9299516908212561, | |
| "grad_norm": 0.9799525062498867, | |
| "learning_rate": 1.4880487984529846e-07, | |
| "loss": 0.7888, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9329710144927537, | |
| "grad_norm": 0.9911311370863874, | |
| "learning_rate": 1.363106968216482e-07, | |
| "loss": 0.7935, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.9359903381642513, | |
| "grad_norm": 1.003564658664697, | |
| "learning_rate": 1.243570552687895e-07, | |
| "loss": 0.7938, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9390096618357487, | |
| "grad_norm": 0.9800326260440435, | |
| "learning_rate": 1.1294528369242663e-07, | |
| "loss": 0.7928, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.9420289855072463, | |
| "grad_norm": 0.986565572978078, | |
| "learning_rate": 1.020766503758347e-07, | |
| "loss": 0.7906, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9450483091787439, | |
| "grad_norm": 0.966200390084136, | |
| "learning_rate": 9.175236323890058e-08, | |
| "loss": 0.7864, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.9480676328502415, | |
| "grad_norm": 0.9843328895814176, | |
| "learning_rate": 8.197356970388148e-08, | |
| "loss": 0.7798, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9510869565217391, | |
| "grad_norm": 1.0058804827600163, | |
| "learning_rate": 7.274135656787917e-08, | |
| "loss": 0.7904, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.9541062801932367, | |
| "grad_norm": 0.9570446950074213, | |
| "learning_rate": 6.405674988205602e-08, | |
| "loss": 0.7725, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9571256038647343, | |
| "grad_norm": 1.0097440052548536, | |
| "learning_rate": 5.592071483760397e-08, | |
| "loss": 0.7988, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.9601449275362319, | |
| "grad_norm": 1.0050549132372466, | |
| "learning_rate": 4.833415565847155e-08, | |
| "loss": 0.7843, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9631642512077294, | |
| "grad_norm": 1.013353688401092, | |
| "learning_rate": 4.1297915500873034e-08, | |
| "loss": 0.7919, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.966183574879227, | |
| "grad_norm": 0.977208847695192, | |
| "learning_rate": 3.481277635957903e-08, | |
| "loss": 0.7908, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9692028985507246, | |
| "grad_norm": 0.9806969063880059, | |
| "learning_rate": 2.88794589810093e-08, | |
| "loss": 0.7883, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 0.9884154444375535, | |
| "learning_rate": 2.3498622783128533e-08, | |
| "loss": 0.7756, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9752415458937198, | |
| "grad_norm": 1.0036601159352383, | |
| "learning_rate": 1.8670865782161042e-08, | |
| "loss": 0.785, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.9782608695652174, | |
| "grad_norm": 1.0066570396591208, | |
| "learning_rate": 1.4396724526127282e-08, | |
| "loss": 0.784, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.981280193236715, | |
| "grad_norm": 1.0110694707000738, | |
| "learning_rate": 1.067667403521433e-08, | |
| "loss": 0.7993, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.9842995169082126, | |
| "grad_norm": 0.9862631356943906, | |
| "learning_rate": 7.51112774898144e-09, | |
| "loss": 0.7716, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9873188405797102, | |
| "grad_norm": 1.0008914135572442, | |
| "learning_rate": 4.900437480413467e-09, | |
| "loss": 0.7775, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.9903381642512077, | |
| "grad_norm": 0.9753190697055402, | |
| "learning_rate": 2.844893376816593e-09, | |
| "loss": 0.7655, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9933574879227053, | |
| "grad_norm": 0.9719880320026482, | |
| "learning_rate": 1.3447238875774482e-09, | |
| "loss": 0.7747, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.9963768115942029, | |
| "grad_norm": 0.9866118574069571, | |
| "learning_rate": 4.000957387700899e-10, | |
| "loss": 0.7889, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9993961352657005, | |
| "grad_norm": 1.0028543954306115, | |
| "learning_rate": 1.1113914626381672e-11, | |
| "loss": 0.7873, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 0.8993, | |
| "eval_samples_per_second": 11.12, | |
| "eval_steps_per_second": 3.336, | |
| "step": 1656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1656, | |
| "total_flos": 44008382398464.0, | |
| "train_loss": 0.0, | |
| "train_runtime": 0.0097, | |
| "train_samples_per_second": 2738278.668, | |
| "train_steps_per_second": 171187.643 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1656, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 44008382398464.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |