| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9996906897618311, |
| "eval_steps": 500, |
| "global_step": 1414, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0007069948301003049, |
| "grad_norm": 867.8281860351562, |
| "learning_rate": 1e-06, |
| "loss": 69.0521, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0014139896602006097, |
| "grad_norm": 522.1878662109375, |
| "learning_rate": 1.9244594481721914e-05, |
| "loss": 75.8844, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0021209844903009147, |
| "grad_norm": 447.0108337402344, |
| "learning_rate": 2.991699809439337e-05, |
| "loss": 74.97, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0028279793204012194, |
| "grad_norm": 5898.1640625, |
| "learning_rate": 3.748918896344382e-05, |
| "loss": 81.0476, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0035349741505015246, |
| "grad_norm": 226.5022430419922, |
| "learning_rate": 4.336263650693704e-05, |
| "loss": 68.3264, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.004241968980601829, |
| "grad_norm": 112.70651245117188, |
| "learning_rate": 4.8161592576115274e-05, |
| "loss": 60.3818, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0049489638107021346, |
| "grad_norm": 93.56769561767578, |
| "learning_rate": 5.2219052119207004e-05, |
| "loss": 59.0934, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.005655958640802439, |
| "grad_norm": 241.02914428710938, |
| "learning_rate": 5.5733783445165726e-05, |
| "loss": 58.6418, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.006362953470902744, |
| "grad_norm": 431.45977783203125, |
| "learning_rate": 5.883399618878674e-05, |
| "loss": 58.1764, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.007069948301003049, |
| "grad_norm": 349.70782470703125, |
| "learning_rate": 6.160723098865897e-05, |
| "loss": 56.8595, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.007776943131103354, |
| "grad_norm": 48.183162689208984, |
| "learning_rate": 6.411592701928434e-05, |
| "loss": 56.9374, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.008483937961203659, |
| "grad_norm": 28.044233322143555, |
| "learning_rate": 6.64061870578372e-05, |
| "loss": 55.5306, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.009190932791303964, |
| "grad_norm": 30.87534523010254, |
| "learning_rate": 6.851302206154156e-05, |
| "loss": 55.0557, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.009897927621404269, |
| "grad_norm": 24.83538818359375, |
| "learning_rate": 7.046364660092892e-05, |
| "loss": 50.3177, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.010604922451504573, |
| "grad_norm": 28.679935455322266, |
| "learning_rate": 7.22796346013304e-05, |
| "loss": 54.1606, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.011311917281604878, |
| "grad_norm": 29.871997833251953, |
| "learning_rate": 7.397837792688765e-05, |
| "loss": 52.391, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.012018912111705183, |
| "grad_norm": 25.709745407104492, |
| "learning_rate": 7.557410199771932e-05, |
| "loss": 52.2417, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.012725906941805488, |
| "grad_norm": 25.342071533203125, |
| "learning_rate": 7.707859067050864e-05, |
| "loss": 55.4544, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.013432901771905793, |
| "grad_norm": 25.05597686767578, |
| "learning_rate": 7.850171487052752e-05, |
| "loss": 52.1423, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.014139896602006099, |
| "grad_norm": 23.790668487548828, |
| "learning_rate": 7.985182547038086e-05, |
| "loss": 52.9892, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.014846891432106402, |
| "grad_norm": 24.078054428100586, |
| "learning_rate": 8.113605021360038e-05, |
| "loss": 52.7495, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.015553886262206707, |
| "grad_norm": 22.381742477416992, |
| "learning_rate": 8.236052150100626e-05, |
| "loss": 52.8961, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.016260881092307014, |
| "grad_norm": 23.896352767944336, |
| "learning_rate": 8.353055350120496e-05, |
| "loss": 51.6978, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.016967875922407318, |
| "grad_norm": 22.911603927612305, |
| "learning_rate": 8.46507815395591e-05, |
| "loss": 51.3604, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01767487075250762, |
| "grad_norm": 22.95720672607422, |
| "learning_rate": 8.572527301387408e-05, |
| "loss": 50.2589, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.018381865582607928, |
| "grad_norm": 24.270532608032227, |
| "learning_rate": 8.675761654326347e-05, |
| "loss": 53.2253, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01908886041270823, |
| "grad_norm": 22.97658920288086, |
| "learning_rate": 8.77509942831801e-05, |
| "loss": 51.9815, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.019795855242808538, |
| "grad_norm": 24.958389282226562, |
| "learning_rate": 8.870824108265084e-05, |
| "loss": 52.542, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.02050285007290884, |
| "grad_norm": 22.157556533813477, |
| "learning_rate": 8.963189325601443e-05, |
| "loss": 49.7963, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.021209844903009145, |
| "grad_norm": 21.693815231323242, |
| "learning_rate": 9.052422908305234e-05, |
| "loss": 50.776, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.021916839733109452, |
| "grad_norm": 23.532215118408203, |
| "learning_rate": 9.138730266585143e-05, |
| "loss": 53.4715, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.022623834563209756, |
| "grad_norm": 23.66779899597168, |
| "learning_rate": 9.222297240860955e-05, |
| "loss": 51.4714, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.023330829393310062, |
| "grad_norm": 21.056167602539062, |
| "learning_rate": 9.30329251136777e-05, |
| "loss": 50.4526, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.024037824223410366, |
| "grad_norm": 20.85773468017578, |
| "learning_rate": 9.381869647944122e-05, |
| "loss": 49.2931, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.024744819053510673, |
| "grad_norm": 21.533872604370117, |
| "learning_rate": 9.458168862614404e-05, |
| "loss": 49.6295, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.025451813883610976, |
| "grad_norm": 23.687660217285156, |
| "learning_rate": 9.532318515223055e-05, |
| "loss": 51.6082, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.02615880871371128, |
| "grad_norm": 25.589447021484375, |
| "learning_rate": 9.604436412734158e-05, |
| "loss": 49.9036, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.026865803543811587, |
| "grad_norm": 23.40888786315918, |
| "learning_rate": 9.674630935224942e-05, |
| "loss": 49.4263, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02757279837391189, |
| "grad_norm": 24.978803634643555, |
| "learning_rate": 9.743002015593493e-05, |
| "loss": 50.1287, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.028279793204012197, |
| "grad_norm": 27.688739776611328, |
| "learning_rate": 9.809641995210277e-05, |
| "loss": 49.186, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.0289867880341125, |
| "grad_norm": 28.714271545410156, |
| "learning_rate": 9.874636373899326e-05, |
| "loss": 49.1801, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.029693782864212804, |
| "grad_norm": 58.4798583984375, |
| "learning_rate": 9.938064469532229e-05, |
| "loss": 48.9665, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.03040077769431311, |
| "grad_norm": 54.95769119262695, |
| "learning_rate": 9.999999999999999e-05, |
| "loss": 51.0544, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.031107772524413414, |
| "grad_norm": 38.86529541015625, |
| "learning_rate": 0.0001, |
| "loss": 46.6882, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.03181476735451372, |
| "grad_norm": 49.0484619140625, |
| "learning_rate": 9.99277899343545e-05, |
| "loss": 45.8064, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.03252176218461403, |
| "grad_norm": 50.259254455566406, |
| "learning_rate": 9.985557986870897e-05, |
| "loss": 44.7941, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.03322875701471433, |
| "grad_norm": 44.155155181884766, |
| "learning_rate": 9.978336980306346e-05, |
| "loss": 45.6409, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.033935751844814635, |
| "grad_norm": 48.193572998046875, |
| "learning_rate": 9.971115973741795e-05, |
| "loss": 42.198, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.03464274667491494, |
| "grad_norm": 58.65378189086914, |
| "learning_rate": 9.963894967177244e-05, |
| "loss": 41.381, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.03534974150501524, |
| "grad_norm": 101.705810546875, |
| "learning_rate": 9.956673960612691e-05, |
| "loss": 41.7807, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03605673633511555, |
| "grad_norm": 52.42247009277344, |
| "learning_rate": 9.94945295404814e-05, |
| "loss": 39.5665, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.036763731165215856, |
| "grad_norm": 56.90324020385742, |
| "learning_rate": 9.942231947483589e-05, |
| "loss": 37.5718, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.03747072599531616, |
| "grad_norm": 63.50370788574219, |
| "learning_rate": 9.935010940919037e-05, |
| "loss": 37.1649, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.03817772082541646, |
| "grad_norm": 81.95431518554688, |
| "learning_rate": 9.927789934354487e-05, |
| "loss": 36.9419, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.038884715655516766, |
| "grad_norm": 70.1633071899414, |
| "learning_rate": 9.920568927789935e-05, |
| "loss": 36.1688, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.039591710485617077, |
| "grad_norm": 55.53812026977539, |
| "learning_rate": 9.913347921225384e-05, |
| "loss": 34.972, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.04029870531571738, |
| "grad_norm": 56.5380859375, |
| "learning_rate": 9.906126914660831e-05, |
| "loss": 33.7794, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.04100570014581768, |
| "grad_norm": 63.752079010009766, |
| "learning_rate": 9.89890590809628e-05, |
| "loss": 32.1362, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.04171269497591799, |
| "grad_norm": 63.47700500488281, |
| "learning_rate": 9.891684901531729e-05, |
| "loss": 31.9549, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.04241968980601829, |
| "grad_norm": 64.81684875488281, |
| "learning_rate": 9.884463894967178e-05, |
| "loss": 32.808, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0431266846361186, |
| "grad_norm": 64.31057739257812, |
| "learning_rate": 9.877242888402627e-05, |
| "loss": 30.4557, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.043833679466218904, |
| "grad_norm": 106.54881286621094, |
| "learning_rate": 9.870021881838075e-05, |
| "loss": 29.8655, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.04454067429631921, |
| "grad_norm": 83.85649108886719, |
| "learning_rate": 9.862800875273524e-05, |
| "loss": 30.1123, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.04524766912641951, |
| "grad_norm": 97.56901550292969, |
| "learning_rate": 9.855579868708971e-05, |
| "loss": 30.7334, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.045954663956519815, |
| "grad_norm": 47.09314727783203, |
| "learning_rate": 9.84835886214442e-05, |
| "loss": 28.8072, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.046661658786620125, |
| "grad_norm": 64.4630355834961, |
| "learning_rate": 9.841137855579869e-05, |
| "loss": 28.9196, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.04736865361672043, |
| "grad_norm": 86.87482452392578, |
| "learning_rate": 9.833916849015318e-05, |
| "loss": 28.0566, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.04807564844682073, |
| "grad_norm": 55.95505905151367, |
| "learning_rate": 9.826695842450767e-05, |
| "loss": 27.4693, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.048782643276921035, |
| "grad_norm": 45.14374923706055, |
| "learning_rate": 9.819474835886215e-05, |
| "loss": 27.5724, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.049489638107021346, |
| "grad_norm": 70.64986419677734, |
| "learning_rate": 9.812253829321663e-05, |
| "loss": 27.9396, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.05019663293712165, |
| "grad_norm": 45.24726104736328, |
| "learning_rate": 9.805032822757111e-05, |
| "loss": 25.0216, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.05090362776722195, |
| "grad_norm": 60.04133605957031, |
| "learning_rate": 9.797811816192561e-05, |
| "loss": 25.2692, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.051610622597322256, |
| "grad_norm": 39.81468200683594, |
| "learning_rate": 9.790590809628009e-05, |
| "loss": 24.609, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.05231761742742256, |
| "grad_norm": 47.94467544555664, |
| "learning_rate": 9.783369803063458e-05, |
| "loss": 26.2778, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.05302461225752287, |
| "grad_norm": 34.34703826904297, |
| "learning_rate": 9.776148796498907e-05, |
| "loss": 23.7904, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.05373160708762317, |
| "grad_norm": 53.06298828125, |
| "learning_rate": 9.768927789934354e-05, |
| "loss": 24.0161, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.05443860191772348, |
| "grad_norm": 50.771827697753906, |
| "learning_rate": 9.761706783369803e-05, |
| "loss": 24.3783, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.05514559674782378, |
| "grad_norm": 39.64093017578125, |
| "learning_rate": 9.754485776805252e-05, |
| "loss": 24.1985, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.055852591577924084, |
| "grad_norm": 40.369510650634766, |
| "learning_rate": 9.747264770240701e-05, |
| "loss": 24.0333, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.056559586408024394, |
| "grad_norm": 43.88887023925781, |
| "learning_rate": 9.740043763676149e-05, |
| "loss": 23.6668, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.0572665812381247, |
| "grad_norm": 42.15245056152344, |
| "learning_rate": 9.732822757111598e-05, |
| "loss": 24.157, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.057973576068225, |
| "grad_norm": 45.39596939086914, |
| "learning_rate": 9.725601750547047e-05, |
| "loss": 23.0294, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.058680570898325304, |
| "grad_norm": 41.89541244506836, |
| "learning_rate": 9.718380743982494e-05, |
| "loss": 22.6679, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.05938756572842561, |
| "grad_norm": 38.90410614013672, |
| "learning_rate": 9.711159737417943e-05, |
| "loss": 20.6634, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.06009456055852592, |
| "grad_norm": 44.17354965209961, |
| "learning_rate": 9.703938730853392e-05, |
| "loss": 22.3198, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.06080155538862622, |
| "grad_norm": 44.40744400024414, |
| "learning_rate": 9.696717724288841e-05, |
| "loss": 22.6737, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.061508550218726525, |
| "grad_norm": 49.779388427734375, |
| "learning_rate": 9.689496717724289e-05, |
| "loss": 21.4484, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.06221554504882683, |
| "grad_norm": 38.24262237548828, |
| "learning_rate": 9.682275711159738e-05, |
| "loss": 20.6562, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.06292253987892714, |
| "grad_norm": 34.11885070800781, |
| "learning_rate": 9.675054704595187e-05, |
| "loss": 21.33, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.06362953470902744, |
| "grad_norm": 37.97296142578125, |
| "learning_rate": 9.667833698030636e-05, |
| "loss": 19.8617, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.06433652953912775, |
| "grad_norm": 35.56360626220703, |
| "learning_rate": 9.660612691466084e-05, |
| "loss": 21.5979, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.06504352436922806, |
| "grad_norm": 39.539939880371094, |
| "learning_rate": 9.653391684901532e-05, |
| "loss": 19.6249, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.06575051919932835, |
| "grad_norm": 32.25932693481445, |
| "learning_rate": 9.646170678336981e-05, |
| "loss": 21.2756, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.06645751402942866, |
| "grad_norm": 38.31186294555664, |
| "learning_rate": 9.638949671772429e-05, |
| "loss": 20.3971, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.06716450885952896, |
| "grad_norm": 40.35636901855469, |
| "learning_rate": 9.631728665207878e-05, |
| "loss": 19.8489, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.06787150368962927, |
| "grad_norm": 31.688522338867188, |
| "learning_rate": 9.624507658643326e-05, |
| "loss": 19.9785, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.06857849851972958, |
| "grad_norm": 31.74046516418457, |
| "learning_rate": 9.617286652078775e-05, |
| "loss": 19.8483, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.06928549334982988, |
| "grad_norm": 36.49064636230469, |
| "learning_rate": 9.610065645514224e-05, |
| "loss": 20.2419, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.06999248817993019, |
| "grad_norm": 31.634632110595703, |
| "learning_rate": 9.602844638949672e-05, |
| "loss": 18.9565, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.07069948301003048, |
| "grad_norm": 37.62042236328125, |
| "learning_rate": 9.595623632385121e-05, |
| "loss": 19.4741, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0714064778401308, |
| "grad_norm": 40.450321197509766, |
| "learning_rate": 9.588402625820568e-05, |
| "loss": 20.3956, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.0721134726702311, |
| "grad_norm": 36.62712478637695, |
| "learning_rate": 9.581181619256017e-05, |
| "loss": 18.8533, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.0728204675003314, |
| "grad_norm": 34.651710510253906, |
| "learning_rate": 9.573960612691466e-05, |
| "loss": 18.2028, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.07352746233043171, |
| "grad_norm": 39.63356018066406, |
| "learning_rate": 9.566739606126915e-05, |
| "loss": 19.2314, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.07423445716053201, |
| "grad_norm": 33.7768669128418, |
| "learning_rate": 9.559518599562364e-05, |
| "loss": 18.8913, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.07494145199063232, |
| "grad_norm": 33.89146041870117, |
| "learning_rate": 9.552297592997812e-05, |
| "loss": 19.5537, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.07564844682073263, |
| "grad_norm": 33.79356002807617, |
| "learning_rate": 9.545076586433261e-05, |
| "loss": 20.1578, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.07635544165083293, |
| "grad_norm": 28.928699493408203, |
| "learning_rate": 9.53785557986871e-05, |
| "loss": 19.4719, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.07706243648093324, |
| "grad_norm": 41.88840866088867, |
| "learning_rate": 9.530634573304159e-05, |
| "loss": 18.8494, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.07776943131103353, |
| "grad_norm": 36.432682037353516, |
| "learning_rate": 9.523413566739606e-05, |
| "loss": 19.1342, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.07847642614113384, |
| "grad_norm": 35.987552642822266, |
| "learning_rate": 9.516192560175055e-05, |
| "loss": 19.1107, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.07918342097123415, |
| "grad_norm": 44.01519012451172, |
| "learning_rate": 9.508971553610504e-05, |
| "loss": 18.7917, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.07989041580133445, |
| "grad_norm": 31.156208038330078, |
| "learning_rate": 9.501750547045952e-05, |
| "loss": 18.7651, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.08059741063143476, |
| "grad_norm": 27.615875244140625, |
| "learning_rate": 9.4945295404814e-05, |
| "loss": 17.4872, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.08130440546153506, |
| "grad_norm": 30.91271209716797, |
| "learning_rate": 9.48730853391685e-05, |
| "loss": 19.8748, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.08201140029163537, |
| "grad_norm": 33.25605773925781, |
| "learning_rate": 9.480087527352299e-05, |
| "loss": 19.1411, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.08271839512173568, |
| "grad_norm": 29.771507263183594, |
| "learning_rate": 9.472866520787746e-05, |
| "loss": 18.2639, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.08342538995183597, |
| "grad_norm": 32.23770523071289, |
| "learning_rate": 9.465645514223195e-05, |
| "loss": 18.257, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.08413238478193628, |
| "grad_norm": 36.424015045166016, |
| "learning_rate": 9.458424507658644e-05, |
| "loss": 17.0622, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.08483937961203658, |
| "grad_norm": 39.868797302246094, |
| "learning_rate": 9.451203501094092e-05, |
| "loss": 19.4193, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.08554637444213689, |
| "grad_norm": 29.13055992126465, |
| "learning_rate": 9.443982494529542e-05, |
| "loss": 19.4787, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.0862533692722372, |
| "grad_norm": 32.32482147216797, |
| "learning_rate": 9.43676148796499e-05, |
| "loss": 18.933, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.0869603641023375, |
| "grad_norm": 30.419885635375977, |
| "learning_rate": 9.429540481400438e-05, |
| "loss": 17.8486, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.08766735893243781, |
| "grad_norm": 28.050142288208008, |
| "learning_rate": 9.422319474835886e-05, |
| "loss": 20.1933, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.0883743537625381, |
| "grad_norm": 29.47435188293457, |
| "learning_rate": 9.415098468271335e-05, |
| "loss": 17.4255, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.08908134859263842, |
| "grad_norm": 29.57215118408203, |
| "learning_rate": 9.407877461706784e-05, |
| "loss": 20.1363, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.08978834342273873, |
| "grad_norm": 38.4536247253418, |
| "learning_rate": 9.400656455142233e-05, |
| "loss": 18.2052, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.09049533825283902, |
| "grad_norm": 28.376087188720703, |
| "learning_rate": 9.393435448577682e-05, |
| "loss": 19.2259, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.09120233308293933, |
| "grad_norm": 26.830381393432617, |
| "learning_rate": 9.38621444201313e-05, |
| "loss": 19.3953, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.09190932791303963, |
| "grad_norm": 27.166439056396484, |
| "learning_rate": 9.378993435448578e-05, |
| "loss": 18.9521, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.09261632274313994, |
| "grad_norm": 28.81890869140625, |
| "learning_rate": 9.371772428884026e-05, |
| "loss": 17.7015, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.09332331757324025, |
| "grad_norm": 32.495296478271484, |
| "learning_rate": 9.364551422319475e-05, |
| "loss": 19.0709, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.09403031240334055, |
| "grad_norm": 27.91164779663086, |
| "learning_rate": 9.357330415754924e-05, |
| "loss": 18.7305, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.09473730723344086, |
| "grad_norm": 32.352169036865234, |
| "learning_rate": 9.350109409190373e-05, |
| "loss": 17.947, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.09544430206354117, |
| "grad_norm": 41.09231948852539, |
| "learning_rate": 9.342888402625822e-05, |
| "loss": 18.0234, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.09615129689364146, |
| "grad_norm": 29.327669143676758, |
| "learning_rate": 9.335667396061269e-05, |
| "loss": 18.0342, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.09685829172374177, |
| "grad_norm": 31.431499481201172, |
| "learning_rate": 9.328446389496718e-05, |
| "loss": 18.1777, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.09756528655384207, |
| "grad_norm": 28.50454330444336, |
| "learning_rate": 9.321225382932166e-05, |
| "loss": 17.4591, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.09827228138394238, |
| "grad_norm": 29.735980987548828, |
| "learning_rate": 9.314004376367616e-05, |
| "loss": 17.5018, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.09897927621404269, |
| "grad_norm": 34.070980072021484, |
| "learning_rate": 9.306783369803064e-05, |
| "loss": 17.5913, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.09968627104414299, |
| "grad_norm": 30.582719802856445, |
| "learning_rate": 9.299562363238513e-05, |
| "loss": 17.0948, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.1003932658742433, |
| "grad_norm": 26.284706115722656, |
| "learning_rate": 9.292341356673962e-05, |
| "loss": 18.7132, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.1011002607043436, |
| "grad_norm": 33.66963195800781, |
| "learning_rate": 9.285120350109409e-05, |
| "loss": 18.8174, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.1018072555344439, |
| "grad_norm": 33.1568603515625, |
| "learning_rate": 9.277899343544858e-05, |
| "loss": 17.7703, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.10251425036454422, |
| "grad_norm": 24.135400772094727, |
| "learning_rate": 9.270678336980307e-05, |
| "loss": 17.4647, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.10322124519464451, |
| "grad_norm": 29.076414108276367, |
| "learning_rate": 9.263457330415756e-05, |
| "loss": 17.4553, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.10392824002474482, |
| "grad_norm": 35.003414154052734, |
| "learning_rate": 9.256236323851204e-05, |
| "loss": 16.8381, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.10463523485484512, |
| "grad_norm": 35.33264923095703, |
| "learning_rate": 9.249015317286652e-05, |
| "loss": 17.2061, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.10534222968494543, |
| "grad_norm": 25.581878662109375, |
| "learning_rate": 9.241794310722101e-05, |
| "loss": 18.4266, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.10604922451504574, |
| "grad_norm": 36.04518127441406, |
| "learning_rate": 9.234573304157549e-05, |
| "loss": 17.6767, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.10675621934514604, |
| "grad_norm": 25.03700828552246, |
| "learning_rate": 9.227352297592998e-05, |
| "loss": 18.6159, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.10746321417524635, |
| "grad_norm": 37.83554458618164, |
| "learning_rate": 9.220131291028447e-05, |
| "loss": 17.1416, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.10817020900534664, |
| "grad_norm": 32.96514129638672, |
| "learning_rate": 9.212910284463896e-05, |
| "loss": 18.1587, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.10887720383544695, |
| "grad_norm": 25.70465850830078, |
| "learning_rate": 9.205689277899343e-05, |
| "loss": 17.3172, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.10958419866554726, |
| "grad_norm": 24.10750961303711, |
| "learning_rate": 9.198468271334792e-05, |
| "loss": 17.2018, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.11029119349564756, |
| "grad_norm": 24.535715103149414, |
| "learning_rate": 9.191247264770241e-05, |
| "loss": 17.2166, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.11099818832574787, |
| "grad_norm": 24.88667869567871, |
| "learning_rate": 9.184026258205689e-05, |
| "loss": 20.2791, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.11170518315584817, |
| "grad_norm": 24.478866577148438, |
| "learning_rate": 9.176805251641139e-05, |
| "loss": 18.1817, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.11241217798594848, |
| "grad_norm": 30.161104202270508, |
| "learning_rate": 9.169584245076587e-05, |
| "loss": 17.7581, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.11311917281604879, |
| "grad_norm": 29.541017532348633, |
| "learning_rate": 9.162363238512036e-05, |
| "loss": 17.4958, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.11382616764614908, |
| "grad_norm": 32.381771087646484, |
| "learning_rate": 9.155142231947483e-05, |
| "loss": 16.9631, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.1145331624762494, |
| "grad_norm": 31.770675659179688, |
| "learning_rate": 9.147921225382932e-05, |
| "loss": 17.7155, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.11524015730634969, |
| "grad_norm": 30.45294952392578, |
| "learning_rate": 9.140700218818381e-05, |
| "loss": 19.232, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.11594715213645, |
| "grad_norm": 26.106555938720703, |
| "learning_rate": 9.13347921225383e-05, |
| "loss": 17.4079, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.11665414696655031, |
| "grad_norm": 25.87933921813965, |
| "learning_rate": 9.126258205689279e-05, |
| "loss": 17.6005, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.11736114179665061, |
| "grad_norm": 27.44808578491211, |
| "learning_rate": 9.119037199124727e-05, |
| "loss": 16.6392, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.11806813662675092, |
| "grad_norm": 25.52296257019043, |
| "learning_rate": 9.111816192560176e-05, |
| "loss": 17.2663, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.11877513145685122, |
| "grad_norm": 24.483516693115234, |
| "learning_rate": 9.104595185995623e-05, |
| "loss": 15.9337, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.11948212628695153, |
| "grad_norm": 23.002857208251953, |
| "learning_rate": 9.097374179431072e-05, |
| "loss": 16.215, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.12018912111705184, |
| "grad_norm": 26.76035499572754, |
| "learning_rate": 9.090153172866521e-05, |
| "loss": 16.72, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.12089611594715213, |
| "grad_norm": 29.699464797973633, |
| "learning_rate": 9.08293216630197e-05, |
| "loss": 17.7319, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.12160311077725244, |
| "grad_norm": 30.24201202392578, |
| "learning_rate": 9.075711159737419e-05, |
| "loss": 16.9052, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.12231010560735274, |
| "grad_norm": 48.706581115722656, |
| "learning_rate": 9.068490153172867e-05, |
| "loss": 17.2807, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.12301710043745305, |
| "grad_norm": 25.212533950805664, |
| "learning_rate": 9.061269146608315e-05, |
| "loss": 16.5931, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.12372409526755336, |
| "grad_norm": 29.620086669921875, |
| "learning_rate": 9.054048140043763e-05, |
| "loss": 16.2194, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.12443109009765366, |
| "grad_norm": 26.384183883666992, |
| "learning_rate": 9.046827133479213e-05, |
| "loss": 16.1872, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.12513808492775397, |
| "grad_norm": 25.466659545898438, |
| "learning_rate": 9.039606126914661e-05, |
| "loss": 16.7155, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.12584507975785428, |
| "grad_norm": 31.032506942749023, |
| "learning_rate": 9.03238512035011e-05, |
| "loss": 17.6649, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.1265520745879546, |
| "grad_norm": 24.821929931640625, |
| "learning_rate": 9.025164113785559e-05, |
| "loss": 17.3971, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.12725906941805487, |
| "grad_norm": 25.913917541503906, |
| "learning_rate": 9.017943107221006e-05, |
| "loss": 16.564, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.12796606424815518, |
| "grad_norm": 27.99454116821289, |
| "learning_rate": 9.010722100656455e-05, |
| "loss": 19.4556, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.1286730590782555, |
| "grad_norm": 23.89288902282715, |
| "learning_rate": 9.003501094091904e-05, |
| "loss": 17.549, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.1293800539083558, |
| "grad_norm": 26.861974716186523, |
| "learning_rate": 8.996280087527353e-05, |
| "loss": 16.8727, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.1300870487384561, |
| "grad_norm": 24.672622680664062, |
| "learning_rate": 8.989059080962801e-05, |
| "loss": 15.7491, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.1307940435685564, |
| "grad_norm": 24.098909378051758, |
| "learning_rate": 8.98183807439825e-05, |
| "loss": 16.9385, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.1315010383986567, |
| "grad_norm": 26.149381637573242, |
| "learning_rate": 8.974617067833699e-05, |
| "loss": 17.5179, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.13220803322875702, |
| "grad_norm": 33.0228271484375, |
| "learning_rate": 8.967396061269146e-05, |
| "loss": 18.6741, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.13291502805885733, |
| "grad_norm": 30.52547264099121, |
| "learning_rate": 8.960175054704595e-05, |
| "loss": 16.0168, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.13362202288895764, |
| "grad_norm": 24.271406173706055, |
| "learning_rate": 8.952954048140044e-05, |
| "loss": 17.562, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.13432901771905792, |
| "grad_norm": 31.642528533935547, |
| "learning_rate": 8.945733041575493e-05, |
| "loss": 16.4493, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.13503601254915823, |
| "grad_norm": 24.342866897583008, |
| "learning_rate": 8.938512035010941e-05, |
| "loss": 15.8088, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.13574300737925854, |
| "grad_norm": 33.57277297973633, |
| "learning_rate": 8.93129102844639e-05, |
| "loss": 18.1225, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.13645000220935885, |
| "grad_norm": 25.293317794799805, |
| "learning_rate": 8.924070021881839e-05, |
| "loss": 17.1738, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.13715699703945916, |
| "grad_norm": 25.74216079711914, |
| "learning_rate": 8.916849015317288e-05, |
| "loss": 17.5808, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.13786399186955944, |
| "grad_norm": 25.824739456176758, |
| "learning_rate": 8.909628008752736e-05, |
| "loss": 16.29, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.13857098669965975, |
| "grad_norm": 21.56652069091797, |
| "learning_rate": 8.902407002188184e-05, |
| "loss": 17.3717, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.13927798152976006, |
| "grad_norm": 27.956878662109375, |
| "learning_rate": 8.895185995623633e-05, |
| "loss": 16.8826, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.13998497635986037, |
| "grad_norm": 25.947261810302734, |
| "learning_rate": 8.88796498905908e-05, |
| "loss": 17.7311, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.14069197118996069, |
| "grad_norm": 26.35049057006836, |
| "learning_rate": 8.88074398249453e-05, |
| "loss": 16.7053, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.14139896602006097, |
| "grad_norm": 23.005706787109375, |
| "learning_rate": 8.873522975929978e-05, |
| "loss": 17.5012, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.14210596085016128, |
| "grad_norm": 24.649900436401367, |
| "learning_rate": 8.866301969365427e-05, |
| "loss": 16.9559, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.1428129556802616, |
| "grad_norm": 23.98031997680664, |
| "learning_rate": 8.859080962800876e-05, |
| "loss": 15.1581, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.1435199505103619, |
| "grad_norm": 27.078948974609375, |
| "learning_rate": 8.851859956236324e-05, |
| "loss": 16.7947, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.1442269453404622, |
| "grad_norm": 26.9580020904541, |
| "learning_rate": 8.844638949671773e-05, |
| "loss": 16.5367, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.1449339401705625, |
| "grad_norm": 22.63777732849121, |
| "learning_rate": 8.83741794310722e-05, |
| "loss": 16.4217, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1456409350006628, |
| "grad_norm": 26.905527114868164, |
| "learning_rate": 8.83019693654267e-05, |
| "loss": 15.5522, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.1463479298307631, |
| "grad_norm": 23.968271255493164, |
| "learning_rate": 8.822975929978118e-05, |
| "loss": 14.9917, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.14705492466086342, |
| "grad_norm": 26.18745994567871, |
| "learning_rate": 8.815754923413567e-05, |
| "loss": 16.8354, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.14776191949096373, |
| "grad_norm": 24.03843879699707, |
| "learning_rate": 8.808533916849016e-05, |
| "loss": 16.5202, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.14846891432106402, |
| "grad_norm": 22.780261993408203, |
| "learning_rate": 8.801312910284464e-05, |
| "loss": 15.2645, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.14917590915116433, |
| "grad_norm": 24.574827194213867, |
| "learning_rate": 8.794091903719913e-05, |
| "loss": 16.0331, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.14988290398126464, |
| "grad_norm": 26.111984252929688, |
| "learning_rate": 8.786870897155362e-05, |
| "loss": 16.5817, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.15058989881136495, |
| "grad_norm": 20.612037658691406, |
| "learning_rate": 8.77964989059081e-05, |
| "loss": 17.2985, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.15129689364146526, |
| "grad_norm": 20.90341567993164, |
| "learning_rate": 8.772428884026258e-05, |
| "loss": 16.5036, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.15200388847156554, |
| "grad_norm": 20.831926345825195, |
| "learning_rate": 8.765207877461707e-05, |
| "loss": 15.8133, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.15271088330166585, |
| "grad_norm": 26.699604034423828, |
| "learning_rate": 8.757986870897156e-05, |
| "loss": 15.9572, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.15341787813176616, |
| "grad_norm": 27.643829345703125, |
| "learning_rate": 8.750765864332604e-05, |
| "loss": 16.6718, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.15412487296186647, |
| "grad_norm": 27.599285125732422, |
| "learning_rate": 8.743544857768053e-05, |
| "loss": 16.9441, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.15483186779196678, |
| "grad_norm": 24.13196563720703, |
| "learning_rate": 8.736323851203502e-05, |
| "loss": 17.9485, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.15553886262206706, |
| "grad_norm": 23.68052864074707, |
| "learning_rate": 8.72910284463895e-05, |
| "loss": 15.4747, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.15624585745216737, |
| "grad_norm": 25.603376388549805, |
| "learning_rate": 8.721881838074398e-05, |
| "loss": 16.7561, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.15695285228226769, |
| "grad_norm": 24.765710830688477, |
| "learning_rate": 8.714660831509847e-05, |
| "loss": 15.5788, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.157659847112368, |
| "grad_norm": 21.479066848754883, |
| "learning_rate": 8.707439824945296e-05, |
| "loss": 17.7476, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.1583668419424683, |
| "grad_norm": 20.06366539001465, |
| "learning_rate": 8.700218818380744e-05, |
| "loss": 15.1917, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.1590738367725686, |
| "grad_norm": 22.646343231201172, |
| "learning_rate": 8.692997811816194e-05, |
| "loss": 16.2958, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.1597808316026689, |
| "grad_norm": 22.053787231445312, |
| "learning_rate": 8.685776805251641e-05, |
| "loss": 16.4618, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.1604878264327692, |
| "grad_norm": 22.88997459411621, |
| "learning_rate": 8.67855579868709e-05, |
| "loss": 16.0649, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.16119482126286952, |
| "grad_norm": 22.48101806640625, |
| "learning_rate": 8.671334792122538e-05, |
| "loss": 15.7039, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.16190181609296983, |
| "grad_norm": 25.803606033325195, |
| "learning_rate": 8.664113785557987e-05, |
| "loss": 16.5133, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.1626088109230701, |
| "grad_norm": 22.281761169433594, |
| "learning_rate": 8.656892778993436e-05, |
| "loss": 16.2889, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.16331580575317042, |
| "grad_norm": 26.732994079589844, |
| "learning_rate": 8.649671772428885e-05, |
| "loss": 16.9495, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.16402280058327073, |
| "grad_norm": 22.665224075317383, |
| "learning_rate": 8.642450765864334e-05, |
| "loss": 17.289, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.16472979541337104, |
| "grad_norm": 21.641260147094727, |
| "learning_rate": 8.635229759299781e-05, |
| "loss": 16.0455, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.16543679024347135, |
| "grad_norm": 25.28057098388672, |
| "learning_rate": 8.62800875273523e-05, |
| "loss": 17.1432, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.16614378507357164, |
| "grad_norm": 25.27671241760254, |
| "learning_rate": 8.620787746170678e-05, |
| "loss": 17.2155, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.16685077990367195, |
| "grad_norm": 27.388463973999023, |
| "learning_rate": 8.613566739606127e-05, |
| "loss": 16.2077, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.16755777473377226, |
| "grad_norm": 21.950359344482422, |
| "learning_rate": 8.606345733041576e-05, |
| "loss": 17.8045, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.16826476956387257, |
| "grad_norm": 23.083728790283203, |
| "learning_rate": 8.599124726477025e-05, |
| "loss": 17.1346, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.16897176439397288, |
| "grad_norm": 24.17853355407715, |
| "learning_rate": 8.591903719912474e-05, |
| "loss": 15.6759, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.16967875922407316, |
| "grad_norm": 21.20966148376465, |
| "learning_rate": 8.584682713347921e-05, |
| "loss": 17.0444, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.17038575405417347, |
| "grad_norm": 22.764076232910156, |
| "learning_rate": 8.57746170678337e-05, |
| "loss": 15.188, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.17109274888427378, |
| "grad_norm": 21.44521713256836, |
| "learning_rate": 8.570240700218818e-05, |
| "loss": 16.7055, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.1717997437143741, |
| "grad_norm": 23.313913345336914, |
| "learning_rate": 8.563019693654267e-05, |
| "loss": 16.3735, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.1725067385444744, |
| "grad_norm": 21.593088150024414, |
| "learning_rate": 8.555798687089716e-05, |
| "loss": 14.0391, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.17321373337457469, |
| "grad_norm": 20.44053840637207, |
| "learning_rate": 8.548577680525165e-05, |
| "loss": 16.2655, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.173920728204675, |
| "grad_norm": 21.79986572265625, |
| "learning_rate": 8.541356673960614e-05, |
| "loss": 16.0105, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.1746277230347753, |
| "grad_norm": 31.781938552856445, |
| "learning_rate": 8.534135667396061e-05, |
| "loss": 16.5409, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.17533471786487562, |
| "grad_norm": 33.61064910888672, |
| "learning_rate": 8.52691466083151e-05, |
| "loss": 17.2873, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.17604171269497593, |
| "grad_norm": 22.703304290771484, |
| "learning_rate": 8.519693654266959e-05, |
| "loss": 15.2044, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.1767487075250762, |
| "grad_norm": 20.650047302246094, |
| "learning_rate": 8.512472647702408e-05, |
| "loss": 16.8391, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.17745570235517652, |
| "grad_norm": 25.48831558227539, |
| "learning_rate": 8.505251641137856e-05, |
| "loss": 16.8433, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.17816269718527683, |
| "grad_norm": 21.704578399658203, |
| "learning_rate": 8.498030634573304e-05, |
| "loss": 16.3913, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.17886969201537714, |
| "grad_norm": 23.87643814086914, |
| "learning_rate": 8.490809628008753e-05, |
| "loss": 15.8773, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.17957668684547745, |
| "grad_norm": 23.842498779296875, |
| "learning_rate": 8.483588621444201e-05, |
| "loss": 17.0596, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.18028368167557773, |
| "grad_norm": 24.21112823486328, |
| "learning_rate": 8.47636761487965e-05, |
| "loss": 16.4233, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.18099067650567804, |
| "grad_norm": 21.40100860595703, |
| "learning_rate": 8.469146608315099e-05, |
| "loss": 17.5836, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.18169767133577835, |
| "grad_norm": 22.97970199584961, |
| "learning_rate": 8.461925601750548e-05, |
| "loss": 17.3076, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.18240466616587866, |
| "grad_norm": 23.0300235748291, |
| "learning_rate": 8.454704595185995e-05, |
| "loss": 16.2172, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.18311166099597898, |
| "grad_norm": 21.156230926513672, |
| "learning_rate": 8.447483588621444e-05, |
| "loss": 14.648, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.18381865582607926, |
| "grad_norm": 22.934730529785156, |
| "learning_rate": 8.440262582056893e-05, |
| "loss": 17.373, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.18452565065617957, |
| "grad_norm": 23.799009323120117, |
| "learning_rate": 8.433041575492341e-05, |
| "loss": 15.3648, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.18523264548627988, |
| "grad_norm": 22.114885330200195, |
| "learning_rate": 8.425820568927791e-05, |
| "loss": 16.3895, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.1859396403163802, |
| "grad_norm": 26.017728805541992, |
| "learning_rate": 8.418599562363239e-05, |
| "loss": 15.9444, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.1866466351464805, |
| "grad_norm": 25.28594970703125, |
| "learning_rate": 8.411378555798688e-05, |
| "loss": 16.4829, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.1873536299765808, |
| "grad_norm": 21.194791793823242, |
| "learning_rate": 8.404157549234135e-05, |
| "loss": 15.6376, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.1880606248066811, |
| "grad_norm": 25.057748794555664, |
| "learning_rate": 8.396936542669584e-05, |
| "loss": 16.9039, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.1887676196367814, |
| "grad_norm": 22.067426681518555, |
| "learning_rate": 8.389715536105033e-05, |
| "loss": 16.105, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.1894746144668817, |
| "grad_norm": 23.50616455078125, |
| "learning_rate": 8.382494529540482e-05, |
| "loss": 15.3005, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.19018160929698202, |
| "grad_norm": 22.99486541748047, |
| "learning_rate": 8.375273522975931e-05, |
| "loss": 16.2793, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.19088860412708233, |
| "grad_norm": 28.1767578125, |
| "learning_rate": 8.368052516411379e-05, |
| "loss": 14.8503, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.19159559895718262, |
| "grad_norm": 21.05082893371582, |
| "learning_rate": 8.360831509846828e-05, |
| "loss": 14.3509, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.19230259378728293, |
| "grad_norm": 27.14815902709961, |
| "learning_rate": 8.353610503282275e-05, |
| "loss": 16.2584, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.19300958861738324, |
| "grad_norm": 24.146907806396484, |
| "learning_rate": 8.346389496717724e-05, |
| "loss": 16.2285, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.19371658344748355, |
| "grad_norm": 21.399860382080078, |
| "learning_rate": 8.339168490153173e-05, |
| "loss": 16.4681, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.19442357827758386, |
| "grad_norm": 23.532634735107422, |
| "learning_rate": 8.331947483588622e-05, |
| "loss": 17.9858, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.19513057310768414, |
| "grad_norm": 22.816146850585938, |
| "learning_rate": 8.324726477024071e-05, |
| "loss": 14.9456, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.19583756793778445, |
| "grad_norm": 19.56248664855957, |
| "learning_rate": 8.317505470459519e-05, |
| "loss": 15.6442, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.19654456276788476, |
| "grad_norm": 24.974672317504883, |
| "learning_rate": 8.310284463894968e-05, |
| "loss": 18.1326, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.19725155759798507, |
| "grad_norm": 24.85466766357422, |
| "learning_rate": 8.303063457330415e-05, |
| "loss": 14.3476, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.19795855242808538, |
| "grad_norm": 18.94011878967285, |
| "learning_rate": 8.295842450765865e-05, |
| "loss": 15.3077, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.19866554725818567, |
| "grad_norm": 18.339811325073242, |
| "learning_rate": 8.288621444201313e-05, |
| "loss": 15.5157, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.19937254208828598, |
| "grad_norm": 36.65610122680664, |
| "learning_rate": 8.281400437636762e-05, |
| "loss": 16.4754, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.20007953691838629, |
| "grad_norm": 20.609729766845703, |
| "learning_rate": 8.274179431072211e-05, |
| "loss": 14.7549, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.2007865317484866, |
| "grad_norm": 18.10039710998535, |
| "learning_rate": 8.266958424507658e-05, |
| "loss": 16.8433, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.2014935265785869, |
| "grad_norm": 22.75161361694336, |
| "learning_rate": 8.259737417943107e-05, |
| "loss": 14.9722, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.2022005214086872, |
| "grad_norm": 23.14825439453125, |
| "learning_rate": 8.252516411378556e-05, |
| "loss": 15.1921, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.2029075162387875, |
| "grad_norm": 32.537208557128906, |
| "learning_rate": 8.245295404814005e-05, |
| "loss": 17.0106, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.2036145110688878, |
| "grad_norm": 23.347766876220703, |
| "learning_rate": 8.238074398249453e-05, |
| "loss": 17.4193, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.20432150589898812, |
| "grad_norm": 20.956504821777344, |
| "learning_rate": 8.230853391684902e-05, |
| "loss": 16.9236, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.20502850072908843, |
| "grad_norm": 19.826589584350586, |
| "learning_rate": 8.223632385120351e-05, |
| "loss": 15.4246, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2057354955591887, |
| "grad_norm": 20.190919876098633, |
| "learning_rate": 8.216411378555798e-05, |
| "loss": 15.6196, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.20644249038928902, |
| "grad_norm": 21.368432998657227, |
| "learning_rate": 8.209190371991247e-05, |
| "loss": 15.6062, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.20714948521938933, |
| "grad_norm": 19.60832405090332, |
| "learning_rate": 8.201969365426696e-05, |
| "loss": 14.8065, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.20785648004948964, |
| "grad_norm": 20.27286720275879, |
| "learning_rate": 8.194748358862145e-05, |
| "loss": 16.176, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.20856347487958996, |
| "grad_norm": 20.443204879760742, |
| "learning_rate": 8.187527352297593e-05, |
| "loss": 15.325, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.20927046970969024, |
| "grad_norm": 25.870630264282227, |
| "learning_rate": 8.180306345733042e-05, |
| "loss": 16.4023, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.20997746453979055, |
| "grad_norm": 20.376365661621094, |
| "learning_rate": 8.17308533916849e-05, |
| "loss": 15.4194, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.21068445936989086, |
| "grad_norm": 18.827817916870117, |
| "learning_rate": 8.16586433260394e-05, |
| "loss": 15.4671, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.21139145419999117, |
| "grad_norm": 24.46773910522461, |
| "learning_rate": 8.158643326039389e-05, |
| "loss": 16.0512, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.21209844903009148, |
| "grad_norm": 26.15793800354004, |
| "learning_rate": 8.151422319474836e-05, |
| "loss": 16.7677, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.21280544386019176, |
| "grad_norm": 23.193317413330078, |
| "learning_rate": 8.144201312910285e-05, |
| "loss": 17.2927, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.21351243869029207, |
| "grad_norm": 22.430593490600586, |
| "learning_rate": 8.136980306345733e-05, |
| "loss": 15.1819, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.21421943352039238, |
| "grad_norm": 18.35970115661621, |
| "learning_rate": 8.129759299781182e-05, |
| "loss": 15.0625, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.2149264283504927, |
| "grad_norm": 23.23087501525879, |
| "learning_rate": 8.12253829321663e-05, |
| "loss": 15.07, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.215633423180593, |
| "grad_norm": 19.622215270996094, |
| "learning_rate": 8.11531728665208e-05, |
| "loss": 17.0095, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.2163404180106933, |
| "grad_norm": 19.727991104125977, |
| "learning_rate": 8.108096280087528e-05, |
| "loss": 15.6499, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.2170474128407936, |
| "grad_norm": 24.34721565246582, |
| "learning_rate": 8.100875273522976e-05, |
| "loss": 15.6125, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.2177544076708939, |
| "grad_norm": 20.973655700683594, |
| "learning_rate": 8.093654266958425e-05, |
| "loss": 14.6315, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.21846140250099422, |
| "grad_norm": 24.30404281616211, |
| "learning_rate": 8.086433260393873e-05, |
| "loss": 12.6647, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.21916839733109453, |
| "grad_norm": 21.908384323120117, |
| "learning_rate": 8.079212253829321e-05, |
| "loss": 14.0689, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.2198753921611948, |
| "grad_norm": 19.879352569580078, |
| "learning_rate": 8.071991247264772e-05, |
| "loss": 17.1264, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.22058238699129512, |
| "grad_norm": 18.12099266052246, |
| "learning_rate": 8.06477024070022e-05, |
| "loss": 15.1719, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.22128938182139543, |
| "grad_norm": 19.50701141357422, |
| "learning_rate": 8.057549234135668e-05, |
| "loss": 15.1601, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.22199637665149574, |
| "grad_norm": 19.107711791992188, |
| "learning_rate": 8.050328227571116e-05, |
| "loss": 16.1114, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.22270337148159605, |
| "grad_norm": 21.994384765625, |
| "learning_rate": 8.043107221006565e-05, |
| "loss": 16.0056, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.22341036631169633, |
| "grad_norm": 21.152990341186523, |
| "learning_rate": 8.035886214442014e-05, |
| "loss": 15.4405, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.22411736114179664, |
| "grad_norm": 22.611316680908203, |
| "learning_rate": 8.028665207877463e-05, |
| "loss": 14.4719, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.22482435597189696, |
| "grad_norm": 21.554346084594727, |
| "learning_rate": 8.02144420131291e-05, |
| "loss": 15.4078, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.22553135080199727, |
| "grad_norm": 23.436702728271484, |
| "learning_rate": 8.014223194748359e-05, |
| "loss": 14.9444, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.22623834563209758, |
| "grad_norm": 18.821659088134766, |
| "learning_rate": 8.007002188183808e-05, |
| "loss": 15.274, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.22694534046219786, |
| "grad_norm": 21.983806610107422, |
| "learning_rate": 7.999781181619256e-05, |
| "loss": 14.9261, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.22765233529229817, |
| "grad_norm": 21.53936004638672, |
| "learning_rate": 7.992560175054705e-05, |
| "loss": 16.714, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.22835933012239848, |
| "grad_norm": 19.361005783081055, |
| "learning_rate": 7.985339168490154e-05, |
| "loss": 14.7466, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.2290663249524988, |
| "grad_norm": 18.576501846313477, |
| "learning_rate": 7.978118161925603e-05, |
| "loss": 16.1151, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.2297733197825991, |
| "grad_norm": 18.696819305419922, |
| "learning_rate": 7.97089715536105e-05, |
| "loss": 15.3406, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.23048031461269938, |
| "grad_norm": 22.205421447753906, |
| "learning_rate": 7.963676148796499e-05, |
| "loss": 14.5233, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.2311873094427997, |
| "grad_norm": 21.301319122314453, |
| "learning_rate": 7.956455142231948e-05, |
| "loss": 14.5574, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.2318943042729, |
| "grad_norm": 19.592769622802734, |
| "learning_rate": 7.949234135667396e-05, |
| "loss": 15.653, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.23260129910300031, |
| "grad_norm": 20.18784523010254, |
| "learning_rate": 7.942013129102846e-05, |
| "loss": 15.2137, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.23330829393310062, |
| "grad_norm": 27.54798698425293, |
| "learning_rate": 7.934792122538294e-05, |
| "loss": 16.9681, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.2340152887632009, |
| "grad_norm": 18.011117935180664, |
| "learning_rate": 7.927571115973742e-05, |
| "loss": 14.9344, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.23472228359330122, |
| "grad_norm": 17.401601791381836, |
| "learning_rate": 7.92035010940919e-05, |
| "loss": 15.7647, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.23542927842340153, |
| "grad_norm": 21.158891677856445, |
| "learning_rate": 7.913129102844639e-05, |
| "loss": 14.8844, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.23613627325350184, |
| "grad_norm": 19.250036239624023, |
| "learning_rate": 7.905908096280088e-05, |
| "loss": 16.1713, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.23684326808360215, |
| "grad_norm": 19.4791202545166, |
| "learning_rate": 7.898687089715537e-05, |
| "loss": 16.3514, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.23755026291370243, |
| "grad_norm": 18.960033416748047, |
| "learning_rate": 7.891466083150986e-05, |
| "loss": 14.6703, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.23825725774380274, |
| "grad_norm": 19.392698287963867, |
| "learning_rate": 7.884245076586433e-05, |
| "loss": 15.4453, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.23896425257390305, |
| "grad_norm": 24.39886474609375, |
| "learning_rate": 7.877024070021882e-05, |
| "loss": 16.2057, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.23967124740400336, |
| "grad_norm": 20.101104736328125, |
| "learning_rate": 7.86980306345733e-05, |
| "loss": 14.8668, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.24037824223410367, |
| "grad_norm": 19.75140380859375, |
| "learning_rate": 7.862582056892779e-05, |
| "loss": 14.1417, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.24108523706420396, |
| "grad_norm": 20.29349708557129, |
| "learning_rate": 7.855361050328228e-05, |
| "loss": 15.7627, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.24179223189430427, |
| "grad_norm": 19.623151779174805, |
| "learning_rate": 7.848140043763677e-05, |
| "loss": 16.0984, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.24249922672440458, |
| "grad_norm": 18.32193946838379, |
| "learning_rate": 7.840919037199126e-05, |
| "loss": 14.4583, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.2432062215545049, |
| "grad_norm": 23.00566291809082, |
| "learning_rate": 7.833698030634573e-05, |
| "loss": 15.3754, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.2439132163846052, |
| "grad_norm": 18.748294830322266, |
| "learning_rate": 7.826477024070022e-05, |
| "loss": 15.1513, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.24462021121470548, |
| "grad_norm": 19.001272201538086, |
| "learning_rate": 7.81925601750547e-05, |
| "loss": 13.934, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.2453272060448058, |
| "grad_norm": 18.504486083984375, |
| "learning_rate": 7.812035010940919e-05, |
| "loss": 14.2281, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.2460342008749061, |
| "grad_norm": 18.790077209472656, |
| "learning_rate": 7.804814004376369e-05, |
| "loss": 14.1004, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.2467411957050064, |
| "grad_norm": 18.546693801879883, |
| "learning_rate": 7.797592997811817e-05, |
| "loss": 15.2542, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.24744819053510672, |
| "grad_norm": 17.351430892944336, |
| "learning_rate": 7.790371991247266e-05, |
| "loss": 15.9018, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.248155185365207, |
| "grad_norm": 22.45965576171875, |
| "learning_rate": 7.783150984682713e-05, |
| "loss": 16.1705, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.24886218019530731, |
| "grad_norm": 24.87619400024414, |
| "learning_rate": 7.775929978118162e-05, |
| "loss": 15.8143, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.24956917502540762, |
| "grad_norm": 19.052438735961914, |
| "learning_rate": 7.768708971553611e-05, |
| "loss": 14.8811, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.25027616985550794, |
| "grad_norm": 16.649532318115234, |
| "learning_rate": 7.76148796498906e-05, |
| "loss": 14.9621, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.2509831646856082, |
| "grad_norm": 21.764617919921875, |
| "learning_rate": 7.754266958424508e-05, |
| "loss": 15.196, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.25169015951570856, |
| "grad_norm": 17.580827713012695, |
| "learning_rate": 7.747045951859957e-05, |
| "loss": 14.2718, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.25239715434580884, |
| "grad_norm": 22.757190704345703, |
| "learning_rate": 7.739824945295405e-05, |
| "loss": 16.2971, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.2531041491759092, |
| "grad_norm": 23.23011016845703, |
| "learning_rate": 7.732603938730853e-05, |
| "loss": 15.0298, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.25381114400600946, |
| "grad_norm": 18.563827514648438, |
| "learning_rate": 7.725382932166302e-05, |
| "loss": 13.9777, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.25451813883610974, |
| "grad_norm": 18.0550479888916, |
| "learning_rate": 7.718161925601751e-05, |
| "loss": 15.8373, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.2552251336662101, |
| "grad_norm": 18.1561279296875, |
| "learning_rate": 7.7109409190372e-05, |
| "loss": 14.195, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.25593212849631036, |
| "grad_norm": 20.923843383789062, |
| "learning_rate": 7.703719912472647e-05, |
| "loss": 14.4304, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.2566391233264107, |
| "grad_norm": 19.624542236328125, |
| "learning_rate": 7.696498905908096e-05, |
| "loss": 14.8553, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.257346118156511, |
| "grad_norm": 31.558462142944336, |
| "learning_rate": 7.689277899343545e-05, |
| "loss": 15.293, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.25805311298661127, |
| "grad_norm": 22.13149642944336, |
| "learning_rate": 7.682056892778993e-05, |
| "loss": 16.3153, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.2587601078167116, |
| "grad_norm": 20.600019454956055, |
| "learning_rate": 7.674835886214443e-05, |
| "loss": 13.9211, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.2594671026468119, |
| "grad_norm": 17.84665298461914, |
| "learning_rate": 7.667614879649891e-05, |
| "loss": 14.4503, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.2601740974769122, |
| "grad_norm": 21.022363662719727, |
| "learning_rate": 7.66039387308534e-05, |
| "loss": 15.0906, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.2608810923070125, |
| "grad_norm": 20.317344665527344, |
| "learning_rate": 7.653172866520787e-05, |
| "loss": 15.4168, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.2615880871371128, |
| "grad_norm": 21.427474975585938, |
| "learning_rate": 7.645951859956236e-05, |
| "loss": 13.9196, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.26229508196721313, |
| "grad_norm": 18.921598434448242, |
| "learning_rate": 7.638730853391685e-05, |
| "loss": 15.762, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.2630020767973134, |
| "grad_norm": 18.84344482421875, |
| "learning_rate": 7.631509846827134e-05, |
| "loss": 13.6822, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.26370907162741375, |
| "grad_norm": 24.704998016357422, |
| "learning_rate": 7.624288840262583e-05, |
| "loss": 15.9235, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.26441606645751403, |
| "grad_norm": 20.00655746459961, |
| "learning_rate": 7.617067833698031e-05, |
| "loss": 14.3952, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.2651230612876143, |
| "grad_norm": 24.082733154296875, |
| "learning_rate": 7.60984682713348e-05, |
| "loss": 15.3455, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.26583005611771465, |
| "grad_norm": 26.383352279663086, |
| "learning_rate": 7.602625820568927e-05, |
| "loss": 14.619, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.26653705094781494, |
| "grad_norm": 19.369657516479492, |
| "learning_rate": 7.595404814004376e-05, |
| "loss": 15.3833, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.2672440457779153, |
| "grad_norm": 17.416828155517578, |
| "learning_rate": 7.588183807439825e-05, |
| "loss": 15.9244, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.26795104060801556, |
| "grad_norm": 20.211816787719727, |
| "learning_rate": 7.580962800875274e-05, |
| "loss": 14.9873, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.26865803543811584, |
| "grad_norm": 18.542783737182617, |
| "learning_rate": 7.573741794310723e-05, |
| "loss": 15.0154, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.2693650302682162, |
| "grad_norm": 25.668907165527344, |
| "learning_rate": 7.56652078774617e-05, |
| "loss": 14.9847, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.27007202509831646, |
| "grad_norm": 17.961862564086914, |
| "learning_rate": 7.55929978118162e-05, |
| "loss": 15.7291, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.2707790199284168, |
| "grad_norm": 17.490516662597656, |
| "learning_rate": 7.552078774617067e-05, |
| "loss": 14.7697, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.2714860147585171, |
| "grad_norm": 20.74443817138672, |
| "learning_rate": 7.544857768052517e-05, |
| "loss": 13.9046, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.27219300958861736, |
| "grad_norm": 20.980619430541992, |
| "learning_rate": 7.537636761487966e-05, |
| "loss": 15.4803, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.2729000044187177, |
| "grad_norm": 16.939481735229492, |
| "learning_rate": 7.530415754923414e-05, |
| "loss": 14.944, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.273606999248818, |
| "grad_norm": 21.0670223236084, |
| "learning_rate": 7.523194748358863e-05, |
| "loss": 14.8068, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.2743139940789183, |
| "grad_norm": 19.005022048950195, |
| "learning_rate": 7.51597374179431e-05, |
| "loss": 14.6864, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.2750209889090186, |
| "grad_norm": 21.184141159057617, |
| "learning_rate": 7.50875273522976e-05, |
| "loss": 15.4141, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.2757279837391189, |
| "grad_norm": 24.792299270629883, |
| "learning_rate": 7.501531728665208e-05, |
| "loss": 15.6437, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.2764349785692192, |
| "grad_norm": 21.77752113342285, |
| "learning_rate": 7.494310722100657e-05, |
| "loss": 14.8935, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.2771419733993195, |
| "grad_norm": 17.924402236938477, |
| "learning_rate": 7.487089715536105e-05, |
| "loss": 14.8585, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.27784896822941985, |
| "grad_norm": 16.164682388305664, |
| "learning_rate": 7.479868708971554e-05, |
| "loss": 15.3977, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.27855596305952013, |
| "grad_norm": 20.66676139831543, |
| "learning_rate": 7.472647702407003e-05, |
| "loss": 13.9999, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.2792629578896204, |
| "grad_norm": 17.76395606994629, |
| "learning_rate": 7.46542669584245e-05, |
| "loss": 15.7318, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.27996995271972075, |
| "grad_norm": 20.148448944091797, |
| "learning_rate": 7.458205689277899e-05, |
| "loss": 16.1327, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.28067694754982103, |
| "grad_norm": 17.416706085205078, |
| "learning_rate": 7.450984682713348e-05, |
| "loss": 15.1453, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.28138394237992137, |
| "grad_norm": 19.03015899658203, |
| "learning_rate": 7.443763676148797e-05, |
| "loss": 14.2771, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.28209093721002165, |
| "grad_norm": 24.30375862121582, |
| "learning_rate": 7.436542669584245e-05, |
| "loss": 15.2704, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.28279793204012194, |
| "grad_norm": 15.849617004394531, |
| "learning_rate": 7.429321663019694e-05, |
| "loss": 13.9025, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2835049268702223, |
| "grad_norm": 18.239795684814453, |
| "learning_rate": 7.422100656455143e-05, |
| "loss": 14.2617, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.28421192170032256, |
| "grad_norm": 16.995162963867188, |
| "learning_rate": 7.414879649890592e-05, |
| "loss": 15.2838, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.2849189165304229, |
| "grad_norm": 18.74176788330078, |
| "learning_rate": 7.40765864332604e-05, |
| "loss": 14.6324, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.2856259113605232, |
| "grad_norm": 19.847698211669922, |
| "learning_rate": 7.400437636761488e-05, |
| "loss": 14.863, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.28633290619062346, |
| "grad_norm": 22.41224479675293, |
| "learning_rate": 7.393216630196937e-05, |
| "loss": 15.3046, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.2870399010207238, |
| "grad_norm": 19.6437931060791, |
| "learning_rate": 7.385995623632385e-05, |
| "loss": 15.9613, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.2877468958508241, |
| "grad_norm": 18.622400283813477, |
| "learning_rate": 7.378774617067834e-05, |
| "loss": 14.8365, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.2884538906809244, |
| "grad_norm": 15.845386505126953, |
| "learning_rate": 7.371553610503283e-05, |
| "loss": 14.7285, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2891608855110247, |
| "grad_norm": 17.305540084838867, |
| "learning_rate": 7.364332603938731e-05, |
| "loss": 12.5411, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.289867880341125, |
| "grad_norm": 21.858407974243164, |
| "learning_rate": 7.35711159737418e-05, |
| "loss": 14.1674, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2905748751712253, |
| "grad_norm": 17.716867446899414, |
| "learning_rate": 7.349890590809628e-05, |
| "loss": 13.9771, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.2912818700013256, |
| "grad_norm": 19.153947830200195, |
| "learning_rate": 7.342669584245077e-05, |
| "loss": 15.4464, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.29198886483142594, |
| "grad_norm": 19.239585876464844, |
| "learning_rate": 7.335448577680525e-05, |
| "loss": 15.6705, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2926958596615262, |
| "grad_norm": 20.419544219970703, |
| "learning_rate": 7.328227571115973e-05, |
| "loss": 14.868, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.2934028544916265, |
| "grad_norm": 24.518224716186523, |
| "learning_rate": 7.321006564551424e-05, |
| "loss": 13.4362, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.29410984932172685, |
| "grad_norm": 20.21552276611328, |
| "learning_rate": 7.313785557986871e-05, |
| "loss": 15.6206, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.29481684415182713, |
| "grad_norm": 20.633731842041016, |
| "learning_rate": 7.30656455142232e-05, |
| "loss": 14.4674, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.29552383898192747, |
| "grad_norm": 20.492298126220703, |
| "learning_rate": 7.299343544857768e-05, |
| "loss": 13.6936, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.29623083381202775, |
| "grad_norm": 16.64995765686035, |
| "learning_rate": 7.292122538293217e-05, |
| "loss": 14.5776, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.29693782864212803, |
| "grad_norm": 16.90037727355957, |
| "learning_rate": 7.284901531728666e-05, |
| "loss": 14.7451, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.29764482347222837, |
| "grad_norm": 22.07757568359375, |
| "learning_rate": 7.277680525164115e-05, |
| "loss": 15.1344, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.29835181830232865, |
| "grad_norm": 20.070301055908203, |
| "learning_rate": 7.270459518599564e-05, |
| "loss": 15.2919, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.299058813132429, |
| "grad_norm": 20.72273826599121, |
| "learning_rate": 7.263238512035011e-05, |
| "loss": 15.1353, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.2997658079625293, |
| "grad_norm": 18.998750686645508, |
| "learning_rate": 7.25601750547046e-05, |
| "loss": 13.6777, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.30047280279262956, |
| "grad_norm": 17.999582290649414, |
| "learning_rate": 7.248796498905908e-05, |
| "loss": 13.6477, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.3011797976227299, |
| "grad_norm": 25.615734100341797, |
| "learning_rate": 7.241575492341357e-05, |
| "loss": 15.8751, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.3018867924528302, |
| "grad_norm": 19.07990074157715, |
| "learning_rate": 7.234354485776806e-05, |
| "loss": 14.2716, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.3025937872829305, |
| "grad_norm": 18.45189094543457, |
| "learning_rate": 7.227133479212255e-05, |
| "loss": 15.6696, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.3033007821130308, |
| "grad_norm": 17.569032669067383, |
| "learning_rate": 7.219912472647702e-05, |
| "loss": 14.7068, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.3040077769431311, |
| "grad_norm": 17.7779483795166, |
| "learning_rate": 7.212691466083151e-05, |
| "loss": 15.4694, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.3047147717732314, |
| "grad_norm": 21.57255744934082, |
| "learning_rate": 7.2054704595186e-05, |
| "loss": 14.7624, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.3054217666033317, |
| "grad_norm": 18.774274826049805, |
| "learning_rate": 7.198249452954048e-05, |
| "loss": 15.298, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.30612876143343204, |
| "grad_norm": 19.423994064331055, |
| "learning_rate": 7.191028446389498e-05, |
| "loss": 15.2514, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.3068357562635323, |
| "grad_norm": 21.010740280151367, |
| "learning_rate": 7.183807439824946e-05, |
| "loss": 16.4023, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.3075427510936326, |
| "grad_norm": 18.57482147216797, |
| "learning_rate": 7.176586433260394e-05, |
| "loss": 14.6922, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.30824974592373294, |
| "grad_norm": 21.362197875976562, |
| "learning_rate": 7.169365426695842e-05, |
| "loss": 15.3559, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.3089567407538332, |
| "grad_norm": 18.94207763671875, |
| "learning_rate": 7.162144420131291e-05, |
| "loss": 15.7067, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.30966373558393356, |
| "grad_norm": 20.90492820739746, |
| "learning_rate": 7.15492341356674e-05, |
| "loss": 13.3837, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.31037073041403385, |
| "grad_norm": 19.057661056518555, |
| "learning_rate": 7.147702407002189e-05, |
| "loss": 16.2484, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.31107772524413413, |
| "grad_norm": 19.009706497192383, |
| "learning_rate": 7.140481400437638e-05, |
| "loss": 13.5528, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.31178472007423447, |
| "grad_norm": 16.633657455444336, |
| "learning_rate": 7.133260393873085e-05, |
| "loss": 14.4136, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.31249171490433475, |
| "grad_norm": 23.301849365234375, |
| "learning_rate": 7.126039387308534e-05, |
| "loss": 15.5324, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.3131987097344351, |
| "grad_norm": 19.16399574279785, |
| "learning_rate": 7.118818380743982e-05, |
| "loss": 14.37, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.31390570456453537, |
| "grad_norm": 20.813108444213867, |
| "learning_rate": 7.111597374179431e-05, |
| "loss": 15.9513, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.31461269939463565, |
| "grad_norm": 20.032018661499023, |
| "learning_rate": 7.10437636761488e-05, |
| "loss": 14.5105, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.315319694224736, |
| "grad_norm": 20.41152572631836, |
| "learning_rate": 7.097155361050329e-05, |
| "loss": 14.2337, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.3160266890548363, |
| "grad_norm": 18.890499114990234, |
| "learning_rate": 7.089934354485778e-05, |
| "loss": 14.4166, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.3167336838849366, |
| "grad_norm": 22.742734909057617, |
| "learning_rate": 7.082713347921225e-05, |
| "loss": 14.1206, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.3174406787150369, |
| "grad_norm": 19.84697723388672, |
| "learning_rate": 7.075492341356674e-05, |
| "loss": 13.6646, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.3181476735451372, |
| "grad_norm": 15.977441787719727, |
| "learning_rate": 7.068271334792122e-05, |
| "loss": 14.1537, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3188546683752375, |
| "grad_norm": 20.601211547851562, |
| "learning_rate": 7.061050328227571e-05, |
| "loss": 16.1045, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.3195616632053378, |
| "grad_norm": 20.70127296447754, |
| "learning_rate": 7.053829321663021e-05, |
| "loss": 15.5461, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.32026865803543814, |
| "grad_norm": 20.077213287353516, |
| "learning_rate": 7.046608315098469e-05, |
| "loss": 15.0559, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.3209756528655384, |
| "grad_norm": 17.75333023071289, |
| "learning_rate": 7.039387308533918e-05, |
| "loss": 15.5746, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.3216826476956387, |
| "grad_norm": 19.279191970825195, |
| "learning_rate": 7.032166301969365e-05, |
| "loss": 14.1511, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.32238964252573904, |
| "grad_norm": 23.31242561340332, |
| "learning_rate": 7.024945295404814e-05, |
| "loss": 14.4471, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.3230966373558393, |
| "grad_norm": 20.544729232788086, |
| "learning_rate": 7.017724288840263e-05, |
| "loss": 14.2697, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.32380363218593966, |
| "grad_norm": 20.453166961669922, |
| "learning_rate": 7.010503282275712e-05, |
| "loss": 16.2543, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.32451062701603994, |
| "grad_norm": 17.394886016845703, |
| "learning_rate": 7.00328227571116e-05, |
| "loss": 15.5171, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.3252176218461402, |
| "grad_norm": 20.17839813232422, |
| "learning_rate": 6.996061269146609e-05, |
| "loss": 14.7051, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.32592461667624056, |
| "grad_norm": 17.22258758544922, |
| "learning_rate": 6.988840262582057e-05, |
| "loss": 14.1478, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.32663161150634085, |
| "grad_norm": 19.707887649536133, |
| "learning_rate": 6.981619256017505e-05, |
| "loss": 14.2404, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.3273386063364412, |
| "grad_norm": 21.5001163482666, |
| "learning_rate": 6.974398249452954e-05, |
| "loss": 14.6055, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.32804560116654147, |
| "grad_norm": 16.11020851135254, |
| "learning_rate": 6.967177242888403e-05, |
| "loss": 14.7407, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.32875259599664175, |
| "grad_norm": 17.926362991333008, |
| "learning_rate": 6.959956236323852e-05, |
| "loss": 13.7554, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.3294595908267421, |
| "grad_norm": 20.194194793701172, |
| "learning_rate": 6.9527352297593e-05, |
| "loss": 15.0067, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.33016658565684237, |
| "grad_norm": 20.20330810546875, |
| "learning_rate": 6.945514223194748e-05, |
| "loss": 14.5011, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.3308735804869427, |
| "grad_norm": 22.408048629760742, |
| "learning_rate": 6.938293216630197e-05, |
| "loss": 14.1764, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.331580575317043, |
| "grad_norm": 16.50922966003418, |
| "learning_rate": 6.931072210065645e-05, |
| "loss": 14.2467, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.3322875701471433, |
| "grad_norm": 19.757509231567383, |
| "learning_rate": 6.923851203501095e-05, |
| "loss": 15.6154, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.3329945649772436, |
| "grad_norm": 18.997314453125, |
| "learning_rate": 6.916630196936543e-05, |
| "loss": 14.8435, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.3337015598073439, |
| "grad_norm": 19.006284713745117, |
| "learning_rate": 6.909409190371992e-05, |
| "loss": 14.3226, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.33440855463744423, |
| "grad_norm": 20.591211318969727, |
| "learning_rate": 6.90218818380744e-05, |
| "loss": 14.5859, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.3351155494675445, |
| "grad_norm": 21.25404930114746, |
| "learning_rate": 6.894967177242888e-05, |
| "loss": 15.4005, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.3358225442976448, |
| "grad_norm": 19.690427780151367, |
| "learning_rate": 6.887746170678337e-05, |
| "loss": 13.4944, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.33652953912774514, |
| "grad_norm": 18.97504997253418, |
| "learning_rate": 6.880525164113786e-05, |
| "loss": 15.8937, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.3372365339578454, |
| "grad_norm": 20.747446060180664, |
| "learning_rate": 6.873304157549235e-05, |
| "loss": 15.5289, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.33794352878794576, |
| "grad_norm": 47.02959442138672, |
| "learning_rate": 6.866083150984683e-05, |
| "loss": 12.8834, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.33865052361804604, |
| "grad_norm": 19.15494728088379, |
| "learning_rate": 6.858862144420132e-05, |
| "loss": 14.4253, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.3393575184481463, |
| "grad_norm": 20.28019905090332, |
| "learning_rate": 6.851641137855579e-05, |
| "loss": 14.4871, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.34006451327824666, |
| "grad_norm": 21.266618728637695, |
| "learning_rate": 6.844420131291028e-05, |
| "loss": 16.2024, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.34077150810834694, |
| "grad_norm": 16.92910385131836, |
| "learning_rate": 6.837199124726477e-05, |
| "loss": 14.8702, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.3414785029384473, |
| "grad_norm": 17.67193603515625, |
| "learning_rate": 6.829978118161926e-05, |
| "loss": 14.5345, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.34218549776854756, |
| "grad_norm": 18.977420806884766, |
| "learning_rate": 6.822757111597375e-05, |
| "loss": 14.5069, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.34289249259864785, |
| "grad_norm": 21.56228256225586, |
| "learning_rate": 6.815536105032823e-05, |
| "loss": 14.5382, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.3435994874287482, |
| "grad_norm": 21.39179039001465, |
| "learning_rate": 6.808315098468272e-05, |
| "loss": 15.6628, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.34430648225884847, |
| "grad_norm": 19.451231002807617, |
| "learning_rate": 6.801094091903719e-05, |
| "loss": 14.3872, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.3450134770889488, |
| "grad_norm": 16.918291091918945, |
| "learning_rate": 6.79387308533917e-05, |
| "loss": 12.4315, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.3457204719190491, |
| "grad_norm": 15.275004386901855, |
| "learning_rate": 6.786652078774618e-05, |
| "loss": 13.0213, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.34642746674914937, |
| "grad_norm": 16.77974510192871, |
| "learning_rate": 6.779431072210066e-05, |
| "loss": 15.1569, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.3471344615792497, |
| "grad_norm": 17.43290901184082, |
| "learning_rate": 6.772210065645515e-05, |
| "loss": 13.2295, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.34784145640935, |
| "grad_norm": 18.017515182495117, |
| "learning_rate": 6.764989059080962e-05, |
| "loss": 13.2887, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.34854845123945033, |
| "grad_norm": 20.11395263671875, |
| "learning_rate": 6.757768052516411e-05, |
| "loss": 14.3822, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.3492554460695506, |
| "grad_norm": 16.92176628112793, |
| "learning_rate": 6.75054704595186e-05, |
| "loss": 13.756, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.3499624408996509, |
| "grad_norm": 17.108287811279297, |
| "learning_rate": 6.743326039387309e-05, |
| "loss": 14.2471, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.35066943572975123, |
| "grad_norm": 17.316476821899414, |
| "learning_rate": 6.736105032822757e-05, |
| "loss": 14.6229, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.3513764305598515, |
| "grad_norm": 17.58360481262207, |
| "learning_rate": 6.728884026258206e-05, |
| "loss": 15.1998, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.35208342538995185, |
| "grad_norm": 25.55293846130371, |
| "learning_rate": 6.721663019693655e-05, |
| "loss": 16.3773, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.35279042022005214, |
| "grad_norm": 17.25092124938965, |
| "learning_rate": 6.714442013129102e-05, |
| "loss": 13.9458, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.3534974150501524, |
| "grad_norm": 21.322893142700195, |
| "learning_rate": 6.707221006564551e-05, |
| "loss": 13.8305, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.35420440988025276, |
| "grad_norm": 22.437015533447266, |
| "learning_rate": 6.7e-05, |
| "loss": 14.1218, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.35491140471035304, |
| "grad_norm": 25.787944793701172, |
| "learning_rate": 6.692778993435449e-05, |
| "loss": 14.9072, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.3556183995404534, |
| "grad_norm": 20.496932983398438, |
| "learning_rate": 6.685557986870897e-05, |
| "loss": 15.5787, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.35632539437055366, |
| "grad_norm": 17.342716217041016, |
| "learning_rate": 6.678336980306346e-05, |
| "loss": 14.5554, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.35703238920065394, |
| "grad_norm": 17.56897735595703, |
| "learning_rate": 6.671115973741795e-05, |
| "loss": 15.3665, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.3577393840307543, |
| "grad_norm": 19.746797561645508, |
| "learning_rate": 6.663894967177244e-05, |
| "loss": 14.08, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.35844637886085456, |
| "grad_norm": 17.250167846679688, |
| "learning_rate": 6.656673960612693e-05, |
| "loss": 14.5058, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.3591533736909549, |
| "grad_norm": 16.402482986450195, |
| "learning_rate": 6.64945295404814e-05, |
| "loss": 15.3058, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.3598603685210552, |
| "grad_norm": 17.24100685119629, |
| "learning_rate": 6.642231947483589e-05, |
| "loss": 15.2186, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.36056736335115547, |
| "grad_norm": 17.75218963623047, |
| "learning_rate": 6.635010940919037e-05, |
| "loss": 14.2236, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.3612743581812558, |
| "grad_norm": 19.30592918395996, |
| "learning_rate": 6.627789934354486e-05, |
| "loss": 13.7971, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.3619813530113561, |
| "grad_norm": 16.735496520996094, |
| "learning_rate": 6.620568927789935e-05, |
| "loss": 12.8338, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.3626883478414564, |
| "grad_norm": 18.537858963012695, |
| "learning_rate": 6.613347921225383e-05, |
| "loss": 14.715, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.3633953426715567, |
| "grad_norm": 17.93909454345703, |
| "learning_rate": 6.606126914660832e-05, |
| "loss": 15.0655, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.364102337501657, |
| "grad_norm": 21.184032440185547, |
| "learning_rate": 6.59890590809628e-05, |
| "loss": 13.1322, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.36480933233175733, |
| "grad_norm": 22.548582077026367, |
| "learning_rate": 6.591684901531729e-05, |
| "loss": 14.5715, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.3655163271618576, |
| "grad_norm": 17.91143226623535, |
| "learning_rate": 6.584463894967177e-05, |
| "loss": 16.3511, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.36622332199195795, |
| "grad_norm": 21.47669219970703, |
| "learning_rate": 6.577242888402625e-05, |
| "loss": 14.7748, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.36693031682205823, |
| "grad_norm": 17.893421173095703, |
| "learning_rate": 6.570021881838076e-05, |
| "loss": 13.6889, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.3676373116521585, |
| "grad_norm": 18.86720848083496, |
| "learning_rate": 6.562800875273523e-05, |
| "loss": 14.3464, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.36834430648225885, |
| "grad_norm": 17.936094284057617, |
| "learning_rate": 6.555579868708972e-05, |
| "loss": 14.1864, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.36905130131235914, |
| "grad_norm": 17.583314895629883, |
| "learning_rate": 6.54835886214442e-05, |
| "loss": 14.6255, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.3697582961424595, |
| "grad_norm": 18.166719436645508, |
| "learning_rate": 6.541137855579869e-05, |
| "loss": 14.9096, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.37046529097255976, |
| "grad_norm": 15.307825088500977, |
| "learning_rate": 6.533916849015316e-05, |
| "loss": 13.7143, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.3711722858026601, |
| "grad_norm": 14.538102149963379, |
| "learning_rate": 6.526695842450767e-05, |
| "loss": 13.5959, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.3718792806327604, |
| "grad_norm": 17.80873680114746, |
| "learning_rate": 6.519474835886216e-05, |
| "loss": 13.6337, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.37258627546286066, |
| "grad_norm": 18.472633361816406, |
| "learning_rate": 6.512253829321663e-05, |
| "loss": 13.9751, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.373293270292961, |
| "grad_norm": 22.643638610839844, |
| "learning_rate": 6.505032822757112e-05, |
| "loss": 14.0875, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.3740002651230613, |
| "grad_norm": 17.975576400756836, |
| "learning_rate": 6.49781181619256e-05, |
| "loss": 14.5217, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.3747072599531616, |
| "grad_norm": 17.388160705566406, |
| "learning_rate": 6.490590809628009e-05, |
| "loss": 14.6185, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.3754142547832619, |
| "grad_norm": 20.172466278076172, |
| "learning_rate": 6.483369803063458e-05, |
| "loss": 14.3061, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.3761212496133622, |
| "grad_norm": 17.396696090698242, |
| "learning_rate": 6.476148796498907e-05, |
| "loss": 12.8709, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.3768282444434625, |
| "grad_norm": 160.06143188476562, |
| "learning_rate": 6.468927789934354e-05, |
| "loss": 15.2518, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.3775352392735628, |
| "grad_norm": 18.92376136779785, |
| "learning_rate": 6.461706783369803e-05, |
| "loss": 13.0073, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.37824223410366314, |
| "grad_norm": 19.5358943939209, |
| "learning_rate": 6.454485776805252e-05, |
| "loss": 14.3698, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.3789492289337634, |
| "grad_norm": 17.032445907592773, |
| "learning_rate": 6.4472647702407e-05, |
| "loss": 15.5133, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.3796562237638637, |
| "grad_norm": 18.888500213623047, |
| "learning_rate": 6.44004376367615e-05, |
| "loss": 13.7176, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.38036321859396405, |
| "grad_norm": 16.944372177124023, |
| "learning_rate": 6.432822757111598e-05, |
| "loss": 14.7796, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.38107021342406433, |
| "grad_norm": 17.16058921813965, |
| "learning_rate": 6.425601750547046e-05, |
| "loss": 15.3055, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.38177720825416467, |
| "grad_norm": 16.601852416992188, |
| "learning_rate": 6.418380743982494e-05, |
| "loss": 15.0371, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.38248420308426495, |
| "grad_norm": 17.420427322387695, |
| "learning_rate": 6.411159737417943e-05, |
| "loss": 14.7263, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.38319119791436523, |
| "grad_norm": 15.347443580627441, |
| "learning_rate": 6.403938730853392e-05, |
| "loss": 13.7447, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.38389819274446557, |
| "grad_norm": 16.60781478881836, |
| "learning_rate": 6.396717724288841e-05, |
| "loss": 15.4559, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.38460518757456585, |
| "grad_norm": 16.276700973510742, |
| "learning_rate": 6.38949671772429e-05, |
| "loss": 13.3962, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.3853121824046662, |
| "grad_norm": 22.067554473876953, |
| "learning_rate": 6.382275711159737e-05, |
| "loss": 13.5659, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.3860191772347665, |
| "grad_norm": 18.238330841064453, |
| "learning_rate": 6.375054704595186e-05, |
| "loss": 13.4307, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.38672617206486676, |
| "grad_norm": 18.11393165588379, |
| "learning_rate": 6.367833698030634e-05, |
| "loss": 13.1592, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.3874331668949671, |
| "grad_norm": 22.48624610900879, |
| "learning_rate": 6.360612691466083e-05, |
| "loss": 14.6767, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.3881401617250674, |
| "grad_norm": 20.461183547973633, |
| "learning_rate": 6.353391684901532e-05, |
| "loss": 13.5224, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.3888471565551677, |
| "grad_norm": 17.46296501159668, |
| "learning_rate": 6.346170678336981e-05, |
| "loss": 14.4498, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.389554151385268, |
| "grad_norm": 16.26678466796875, |
| "learning_rate": 6.33894967177243e-05, |
| "loss": 12.3649, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.3902611462153683, |
| "grad_norm": 16.18904685974121, |
| "learning_rate": 6.331728665207877e-05, |
| "loss": 14.502, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.3909681410454686, |
| "grad_norm": 15.722247123718262, |
| "learning_rate": 6.324507658643326e-05, |
| "loss": 15.1522, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.3916751358755689, |
| "grad_norm": 18.695825576782227, |
| "learning_rate": 6.317286652078774e-05, |
| "loss": 14.2403, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.39238213070566924, |
| "grad_norm": 17.641643524169922, |
| "learning_rate": 6.310065645514223e-05, |
| "loss": 12.1116, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.3930891255357695, |
| "grad_norm": 18.540403366088867, |
| "learning_rate": 6.302844638949673e-05, |
| "loss": 15.2668, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.3937961203658698, |
| "grad_norm": 19.0169620513916, |
| "learning_rate": 6.29562363238512e-05, |
| "loss": 16.3723, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.39450311519597014, |
| "grad_norm": 15.761820793151855, |
| "learning_rate": 6.28840262582057e-05, |
| "loss": 14.452, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.3952101100260704, |
| "grad_norm": 16.140188217163086, |
| "learning_rate": 6.281181619256017e-05, |
| "loss": 14.1948, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.39591710485617077, |
| "grad_norm": 17.851272583007812, |
| "learning_rate": 6.273960612691466e-05, |
| "loss": 14.1573, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.39662409968627105, |
| "grad_norm": 16.403974533081055, |
| "learning_rate": 6.266739606126915e-05, |
| "loss": 13.3192, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.39733109451637133, |
| "grad_norm": 20.728595733642578, |
| "learning_rate": 6.259518599562364e-05, |
| "loss": 16.3781, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.39803808934647167, |
| "grad_norm": 17.328998565673828, |
| "learning_rate": 6.252297592997813e-05, |
| "loss": 14.3546, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.39874508417657195, |
| "grad_norm": 17.004539489746094, |
| "learning_rate": 6.24507658643326e-05, |
| "loss": 14.7815, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.3994520790066723, |
| "grad_norm": 18.353988647460938, |
| "learning_rate": 6.23785557986871e-05, |
| "loss": 14.2092, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.40015907383677257, |
| "grad_norm": 18.1688175201416, |
| "learning_rate": 6.230634573304157e-05, |
| "loss": 14.9211, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.40086606866687285, |
| "grad_norm": 16.45387840270996, |
| "learning_rate": 6.223413566739606e-05, |
| "loss": 13.2728, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.4015730634969732, |
| "grad_norm": 15.667755126953125, |
| "learning_rate": 6.216192560175055e-05, |
| "loss": 13.6101, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.4022800583270735, |
| "grad_norm": 16.11159324645996, |
| "learning_rate": 6.208971553610504e-05, |
| "loss": 15.6139, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.4029870531571738, |
| "grad_norm": 16.811134338378906, |
| "learning_rate": 6.201750547045951e-05, |
| "loss": 13.7581, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.4036940479872741, |
| "grad_norm": 25.489770889282227, |
| "learning_rate": 6.1945295404814e-05, |
| "loss": 13.9163, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.4044010428173744, |
| "grad_norm": 17.47158432006836, |
| "learning_rate": 6.18730853391685e-05, |
| "loss": 14.9882, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.4051080376474747, |
| "grad_norm": 16.549089431762695, |
| "learning_rate": 6.180087527352298e-05, |
| "loss": 14.9192, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.405815032477575, |
| "grad_norm": 18.736282348632812, |
| "learning_rate": 6.172866520787747e-05, |
| "loss": 13.5939, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.40652202730767534, |
| "grad_norm": 15.82469367980957, |
| "learning_rate": 6.165645514223195e-05, |
| "loss": 15.7199, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.4072290221377756, |
| "grad_norm": 16.66925048828125, |
| "learning_rate": 6.158424507658644e-05, |
| "loss": 13.5893, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.4079360169678759, |
| "grad_norm": 16.197856903076172, |
| "learning_rate": 6.151203501094091e-05, |
| "loss": 12.5333, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.40864301179797624, |
| "grad_norm": 18.60298728942871, |
| "learning_rate": 6.143982494529542e-05, |
| "loss": 13.3609, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.4093500066280765, |
| "grad_norm": 18.529094696044922, |
| "learning_rate": 6.13676148796499e-05, |
| "loss": 14.9298, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.41005700145817686, |
| "grad_norm": 19.059642791748047, |
| "learning_rate": 6.129540481400438e-05, |
| "loss": 13.1881, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.41076399628827714, |
| "grad_norm": 17.82415199279785, |
| "learning_rate": 6.122319474835887e-05, |
| "loss": 12.5294, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.4114709911183774, |
| "grad_norm": 15.246479988098145, |
| "learning_rate": 6.115098468271335e-05, |
| "loss": 13.1343, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.41217798594847777, |
| "grad_norm": 18.232219696044922, |
| "learning_rate": 6.107877461706784e-05, |
| "loss": 14.1395, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.41288498077857805, |
| "grad_norm": 25.185590744018555, |
| "learning_rate": 6.100656455142232e-05, |
| "loss": 15.0905, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.4135919756086784, |
| "grad_norm": 25.201833724975586, |
| "learning_rate": 6.093435448577681e-05, |
| "loss": 15.0098, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.41429897043877867, |
| "grad_norm": 20.78860855102539, |
| "learning_rate": 6.08621444201313e-05, |
| "loss": 14.0662, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.41500596526887895, |
| "grad_norm": 32.28705978393555, |
| "learning_rate": 6.0789934354485774e-05, |
| "loss": 14.281, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.4157129600989793, |
| "grad_norm": 18.067970275878906, |
| "learning_rate": 6.071772428884027e-05, |
| "loss": 14.2242, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.41641995492907957, |
| "grad_norm": 21.324962615966797, |
| "learning_rate": 6.0645514223194746e-05, |
| "loss": 13.2133, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.4171269497591799, |
| "grad_norm": 14.73901081085205, |
| "learning_rate": 6.0573304157549235e-05, |
| "loss": 14.2005, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.4178339445892802, |
| "grad_norm": 20.540681838989258, |
| "learning_rate": 6.050109409190372e-05, |
| "loss": 14.2718, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.4185409394193805, |
| "grad_norm": 17.47296905517578, |
| "learning_rate": 6.042888402625821e-05, |
| "loss": 15.0917, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.4192479342494808, |
| "grad_norm": 17.222566604614258, |
| "learning_rate": 6.03566739606127e-05, |
| "loss": 13.2435, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.4199549290795811, |
| "grad_norm": 17.993486404418945, |
| "learning_rate": 6.028446389496718e-05, |
| "loss": 14.8301, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.42066192390968143, |
| "grad_norm": 15.780810356140137, |
| "learning_rate": 6.021225382932167e-05, |
| "loss": 13.1897, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.4213689187397817, |
| "grad_norm": 16.930824279785156, |
| "learning_rate": 6.0140043763676145e-05, |
| "loss": 13.8884, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.422075913569882, |
| "grad_norm": 17.779985427856445, |
| "learning_rate": 6.006783369803064e-05, |
| "loss": 13.1271, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.42278290839998234, |
| "grad_norm": 22.52481460571289, |
| "learning_rate": 5.999562363238512e-05, |
| "loss": 15.1317, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.4234899032300826, |
| "grad_norm": 17.568035125732422, |
| "learning_rate": 5.9923413566739606e-05, |
| "loss": 14.3153, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.42419689806018296, |
| "grad_norm": 15.473036766052246, |
| "learning_rate": 5.9851203501094096e-05, |
| "loss": 14.0021, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.42490389289028324, |
| "grad_norm": 19.565162658691406, |
| "learning_rate": 5.977899343544858e-05, |
| "loss": 15.8899, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.4256108877203835, |
| "grad_norm": 15.546586990356445, |
| "learning_rate": 5.970678336980307e-05, |
| "loss": 14.276, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.42631788255048386, |
| "grad_norm": 16.37678337097168, |
| "learning_rate": 5.963457330415755e-05, |
| "loss": 14.8217, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.42702487738058414, |
| "grad_norm": 17.48716163635254, |
| "learning_rate": 5.956236323851204e-05, |
| "loss": 13.0474, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.4277318722106845, |
| "grad_norm": 15.518293380737305, |
| "learning_rate": 5.9490153172866516e-05, |
| "loss": 13.5803, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.42843886704078477, |
| "grad_norm": 17.393667221069336, |
| "learning_rate": 5.941794310722101e-05, |
| "loss": 13.1692, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.42914586187088505, |
| "grad_norm": 17.597732543945312, |
| "learning_rate": 5.934573304157549e-05, |
| "loss": 14.37, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.4298528567009854, |
| "grad_norm": 20.43920135498047, |
| "learning_rate": 5.927352297592998e-05, |
| "loss": 14.6832, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.43055985153108567, |
| "grad_norm": 16.366361618041992, |
| "learning_rate": 5.9201312910284466e-05, |
| "loss": 13.5082, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.431266846361186, |
| "grad_norm": 20.55727195739746, |
| "learning_rate": 5.912910284463895e-05, |
| "loss": 14.3706, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.4319738411912863, |
| "grad_norm": 15.425431251525879, |
| "learning_rate": 5.905689277899344e-05, |
| "loss": 14.2575, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.4326808360213866, |
| "grad_norm": 15.034273147583008, |
| "learning_rate": 5.898468271334792e-05, |
| "loss": 14.42, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.4333878308514869, |
| "grad_norm": 15.592576026916504, |
| "learning_rate": 5.891247264770241e-05, |
| "loss": 14.2622, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.4340948256815872, |
| "grad_norm": 16.532777786254883, |
| "learning_rate": 5.8840262582056886e-05, |
| "loss": 14.1881, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.43480182051168753, |
| "grad_norm": 16.355937957763672, |
| "learning_rate": 5.876805251641138e-05, |
| "loss": 14.3817, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.4355088153417878, |
| "grad_norm": 17.28464126586914, |
| "learning_rate": 5.869584245076587e-05, |
| "loss": 14.5989, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.4362158101718881, |
| "grad_norm": 15.116608619689941, |
| "learning_rate": 5.862363238512035e-05, |
| "loss": 14.2519, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.43692280500198843, |
| "grad_norm": 15.99145793914795, |
| "learning_rate": 5.855142231947484e-05, |
| "loss": 13.1821, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.4376297998320887, |
| "grad_norm": 16.347198486328125, |
| "learning_rate": 5.847921225382932e-05, |
| "loss": 14.6225, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.43833679466218906, |
| "grad_norm": 20.394723892211914, |
| "learning_rate": 5.840700218818381e-05, |
| "loss": 12.4356, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.43904378949228934, |
| "grad_norm": 15.125354766845703, |
| "learning_rate": 5.833479212253829e-05, |
| "loss": 14.4433, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.4397507843223896, |
| "grad_norm": 20.564420700073242, |
| "learning_rate": 5.826258205689278e-05, |
| "loss": 13.1734, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.44045777915248996, |
| "grad_norm": 22.414443969726562, |
| "learning_rate": 5.819037199124727e-05, |
| "loss": 14.8843, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.44116477398259024, |
| "grad_norm": 17.679035186767578, |
| "learning_rate": 5.8118161925601754e-05, |
| "loss": 13.2975, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.4418717688126906, |
| "grad_norm": 17.960941314697266, |
| "learning_rate": 5.804595185995624e-05, |
| "loss": 13.4987, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.44257876364279086, |
| "grad_norm": 18.424537658691406, |
| "learning_rate": 5.797374179431072e-05, |
| "loss": 14.5378, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.44328575847289114, |
| "grad_norm": 21.95231819152832, |
| "learning_rate": 5.790153172866521e-05, |
| "loss": 14.2405, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.4439927533029915, |
| "grad_norm": 17.942035675048828, |
| "learning_rate": 5.782932166301969e-05, |
| "loss": 13.6413, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.44469974813309177, |
| "grad_norm": 16.451675415039062, |
| "learning_rate": 5.775711159737418e-05, |
| "loss": 13.9083, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.4454067429631921, |
| "grad_norm": 20.17315101623535, |
| "learning_rate": 5.768490153172867e-05, |
| "loss": 14.6052, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.4461137377932924, |
| "grad_norm": 19.259559631347656, |
| "learning_rate": 5.761269146608315e-05, |
| "loss": 14.11, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.44682073262339267, |
| "grad_norm": 17.016231536865234, |
| "learning_rate": 5.754048140043764e-05, |
| "loss": 13.2856, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.447527727453493, |
| "grad_norm": 18.402177810668945, |
| "learning_rate": 5.7468271334792124e-05, |
| "loss": 13.7489, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.4482347222835933, |
| "grad_norm": 17.117433547973633, |
| "learning_rate": 5.7396061269146614e-05, |
| "loss": 12.8285, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.44894171711369363, |
| "grad_norm": 17.498863220214844, |
| "learning_rate": 5.732385120350109e-05, |
| "loss": 13.031, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.4496487119437939, |
| "grad_norm": 18.746173858642578, |
| "learning_rate": 5.725164113785558e-05, |
| "loss": 13.4874, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.4503557067738942, |
| "grad_norm": 17.161046981811523, |
| "learning_rate": 5.7179431072210075e-05, |
| "loss": 12.6915, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.45106270160399453, |
| "grad_norm": 16.37310028076172, |
| "learning_rate": 5.710722100656455e-05, |
| "loss": 14.3346, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.4517696964340948, |
| "grad_norm": 15.53888988494873, |
| "learning_rate": 5.703501094091904e-05, |
| "loss": 13.9628, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.45247669126419515, |
| "grad_norm": 15.410018920898438, |
| "learning_rate": 5.696280087527352e-05, |
| "loss": 12.9064, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.45318368609429543, |
| "grad_norm": 14.989131927490234, |
| "learning_rate": 5.689059080962801e-05, |
| "loss": 12.2063, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.4538906809243957, |
| "grad_norm": 17.183135986328125, |
| "learning_rate": 5.6818380743982495e-05, |
| "loss": 14.2211, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.45459767575449606, |
| "grad_norm": 15.444555282592773, |
| "learning_rate": 5.6746170678336985e-05, |
| "loss": 12.9035, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.45530467058459634, |
| "grad_norm": 21.209993362426758, |
| "learning_rate": 5.667396061269146e-05, |
| "loss": 14.7551, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.4560116654146967, |
| "grad_norm": 17.231727600097656, |
| "learning_rate": 5.660175054704595e-05, |
| "loss": 14.44, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.45671866024479696, |
| "grad_norm": 14.78808879852295, |
| "learning_rate": 5.6529540481400446e-05, |
| "loss": 13.0656, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.45742565507489724, |
| "grad_norm": 16.139951705932617, |
| "learning_rate": 5.645733041575492e-05, |
| "loss": 13.5085, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.4581326499049976, |
| "grad_norm": 16.530834197998047, |
| "learning_rate": 5.638512035010941e-05, |
| "loss": 13.8217, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.45883964473509786, |
| "grad_norm": 15.445442199707031, |
| "learning_rate": 5.6312910284463894e-05, |
| "loss": 13.6222, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.4595466395651982, |
| "grad_norm": 18.647668838500977, |
| "learning_rate": 5.6240700218818384e-05, |
| "loss": 12.5635, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.4602536343952985, |
| "grad_norm": 16.22862434387207, |
| "learning_rate": 5.6168490153172866e-05, |
| "loss": 13.4586, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.46096062922539877, |
| "grad_norm": 14.387091636657715, |
| "learning_rate": 5.6096280087527356e-05, |
| "loss": 14.6621, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.4616676240554991, |
| "grad_norm": 18.270614624023438, |
| "learning_rate": 5.6024070021881845e-05, |
| "loss": 13.5773, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.4623746188855994, |
| "grad_norm": 14.584330558776855, |
| "learning_rate": 5.595185995623632e-05, |
| "loss": 12.9477, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.4630816137156997, |
| "grad_norm": 17.457223892211914, |
| "learning_rate": 5.587964989059082e-05, |
| "loss": 13.4352, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.4637886085458, |
| "grad_norm": 16.300119400024414, |
| "learning_rate": 5.580743982494529e-05, |
| "loss": 14.6178, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.4644956033759003, |
| "grad_norm": 16.14145278930664, |
| "learning_rate": 5.573522975929978e-05, |
| "loss": 14.3801, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.46520259820600063, |
| "grad_norm": 17.065845489501953, |
| "learning_rate": 5.5663019693654265e-05, |
| "loss": 14.7923, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.4659095930361009, |
| "grad_norm": 18.83047103881836, |
| "learning_rate": 5.5590809628008754e-05, |
| "loss": 15.783, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.46661658786620125, |
| "grad_norm": 17.221378326416016, |
| "learning_rate": 5.5518599562363244e-05, |
| "loss": 15.0286, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.46732358269630153, |
| "grad_norm": 16.273569107055664, |
| "learning_rate": 5.5446389496717727e-05, |
| "loss": 13.78, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.4680305775264018, |
| "grad_norm": 22.21025276184082, |
| "learning_rate": 5.5374179431072216e-05, |
| "loss": 14.7329, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.46873757235650215, |
| "grad_norm": 15.67103385925293, |
| "learning_rate": 5.530196936542669e-05, |
| "loss": 13.9941, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.46944456718660243, |
| "grad_norm": 18.95551872253418, |
| "learning_rate": 5.522975929978119e-05, |
| "loss": 13.7779, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.4701515620167028, |
| "grad_norm": 17.02660369873047, |
| "learning_rate": 5.5157549234135664e-05, |
| "loss": 13.7453, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.47085855684680306, |
| "grad_norm": 15.895587921142578, |
| "learning_rate": 5.508533916849015e-05, |
| "loss": 14.482, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.47156555167690334, |
| "grad_norm": 15.587700843811035, |
| "learning_rate": 5.501312910284464e-05, |
| "loss": 13.6505, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.4722725465070037, |
| "grad_norm": 15.575263977050781, |
| "learning_rate": 5.4940919037199125e-05, |
| "loss": 13.2174, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.47297954133710396, |
| "grad_norm": 16.528423309326172, |
| "learning_rate": 5.4868708971553615e-05, |
| "loss": 14.258, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.4736865361672043, |
| "grad_norm": 18.095470428466797, |
| "learning_rate": 5.47964989059081e-05, |
| "loss": 13.604, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.4743935309973046, |
| "grad_norm": 19.37974739074707, |
| "learning_rate": 5.472428884026259e-05, |
| "loss": 14.3852, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.47510052582740486, |
| "grad_norm": 17.618635177612305, |
| "learning_rate": 5.465207877461706e-05, |
| "loss": 13.0826, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.4758075206575052, |
| "grad_norm": 15.718649864196777, |
| "learning_rate": 5.457986870897156e-05, |
| "loss": 14.0187, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.4765145154876055, |
| "grad_norm": 15.932500839233398, |
| "learning_rate": 5.450765864332605e-05, |
| "loss": 13.1078, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.4772215103177058, |
| "grad_norm": 16.64781951904297, |
| "learning_rate": 5.4435448577680524e-05, |
| "loss": 13.7169, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.4779285051478061, |
| "grad_norm": 16.83970069885254, |
| "learning_rate": 5.4363238512035014e-05, |
| "loss": 12.9768, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.4786354999779064, |
| "grad_norm": 17.58168601989746, |
| "learning_rate": 5.4291028446389496e-05, |
| "loss": 13.2531, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.4793424948080067, |
| "grad_norm": 16.762788772583008, |
| "learning_rate": 5.4218818380743986e-05, |
| "loss": 12.9015, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.480049489638107, |
| "grad_norm": 17.409942626953125, |
| "learning_rate": 5.414660831509847e-05, |
| "loss": 14.3195, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.48075648446820735, |
| "grad_norm": 17.278255462646484, |
| "learning_rate": 5.407439824945296e-05, |
| "loss": 13.7849, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.48146347929830763, |
| "grad_norm": 15.23432731628418, |
| "learning_rate": 5.4002188183807434e-05, |
| "loss": 13.4574, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.4821704741284079, |
| "grad_norm": 15.967668533325195, |
| "learning_rate": 5.392997811816192e-05, |
| "loss": 14.7562, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.48287746895850825, |
| "grad_norm": 17.900205612182617, |
| "learning_rate": 5.385776805251642e-05, |
| "loss": 14.5042, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.48358446378860853, |
| "grad_norm": 20.11735725402832, |
| "learning_rate": 5.3785557986870895e-05, |
| "loss": 14.3391, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.48429145861870887, |
| "grad_norm": 18.700807571411133, |
| "learning_rate": 5.3713347921225384e-05, |
| "loss": 14.819, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.48499845344880915, |
| "grad_norm": 18.93809700012207, |
| "learning_rate": 5.364113785557987e-05, |
| "loss": 13.8124, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.48570544827890944, |
| "grad_norm": 16.458322525024414, |
| "learning_rate": 5.3568927789934357e-05, |
| "loss": 12.7255, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.4864124431090098, |
| "grad_norm": 18.722389221191406, |
| "learning_rate": 5.349671772428884e-05, |
| "loss": 12.7826, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.48711943793911006, |
| "grad_norm": 16.748310089111328, |
| "learning_rate": 5.342450765864333e-05, |
| "loss": 15.2382, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.4878264327692104, |
| "grad_norm": 17.008487701416016, |
| "learning_rate": 5.335229759299782e-05, |
| "loss": 13.056, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.4885334275993107, |
| "grad_norm": 18.254348754882812, |
| "learning_rate": 5.3280087527352294e-05, |
| "loss": 13.7845, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.48924042242941096, |
| "grad_norm": 18.541841506958008, |
| "learning_rate": 5.320787746170679e-05, |
| "loss": 12.7892, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.4899474172595113, |
| "grad_norm": 19.059993743896484, |
| "learning_rate": 5.3135667396061266e-05, |
| "loss": 13.726, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.4906544120896116, |
| "grad_norm": 17.068769454956055, |
| "learning_rate": 5.3063457330415755e-05, |
| "loss": 14.1355, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.4913614069197119, |
| "grad_norm": 16.517131805419922, |
| "learning_rate": 5.299124726477024e-05, |
| "loss": 13.899, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.4920684017498122, |
| "grad_norm": 18.925899505615234, |
| "learning_rate": 5.291903719912473e-05, |
| "loss": 13.7766, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.4927753965799125, |
| "grad_norm": 17.37291717529297, |
| "learning_rate": 5.284682713347922e-05, |
| "loss": 15.1303, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.4934823914100128, |
| "grad_norm": 17.28036117553711, |
| "learning_rate": 5.27746170678337e-05, |
| "loss": 13.8902, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.4941893862401131, |
| "grad_norm": 16.070369720458984, |
| "learning_rate": 5.270240700218819e-05, |
| "loss": 13.8348, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.49489638107021344, |
| "grad_norm": 15.887845039367676, |
| "learning_rate": 5.2630196936542665e-05, |
| "loss": 13.2942, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.4956033759003137, |
| "grad_norm": 18.914541244506836, |
| "learning_rate": 5.255798687089716e-05, |
| "loss": 12.6842, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.496310370730414, |
| "grad_norm": 19.618558883666992, |
| "learning_rate": 5.248577680525164e-05, |
| "loss": 14.2209, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.49701736556051435, |
| "grad_norm": 19.322799682617188, |
| "learning_rate": 5.2413566739606126e-05, |
| "loss": 13.0114, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.49772436039061463, |
| "grad_norm": 17.97751235961914, |
| "learning_rate": 5.2341356673960616e-05, |
| "loss": 15.5439, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.49843135522071497, |
| "grad_norm": 16.378734588623047, |
| "learning_rate": 5.22691466083151e-05, |
| "loss": 13.4201, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.49913835005081525, |
| "grad_norm": 19.820524215698242, |
| "learning_rate": 5.219693654266959e-05, |
| "loss": 13.839, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.49984534488091553, |
| "grad_norm": 16.8355655670166, |
| "learning_rate": 5.212472647702407e-05, |
| "loss": 15.2506, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.5005523397110159, |
| "grad_norm": 16.595901489257812, |
| "learning_rate": 5.205251641137856e-05, |
| "loss": 15.0275, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.5012593345411162, |
| "grad_norm": 15.100963592529297, |
| "learning_rate": 5.1980306345733036e-05, |
| "loss": 12.7207, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.5019663293712164, |
| "grad_norm": 19.29062271118164, |
| "learning_rate": 5.190809628008753e-05, |
| "loss": 14.1738, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.5026733242013168, |
| "grad_norm": 14.703215599060059, |
| "learning_rate": 5.183588621444202e-05, |
| "loss": 12.9587, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.5033803190314171, |
| "grad_norm": 20.79590606689453, |
| "learning_rate": 5.17636761487965e-05, |
| "loss": 13.2206, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.5040873138615174, |
| "grad_norm": 20.551342010498047, |
| "learning_rate": 5.1691466083150987e-05, |
| "loss": 14.2096, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.5047943086916177, |
| "grad_norm": 16.27815055847168, |
| "learning_rate": 5.161925601750547e-05, |
| "loss": 13.7653, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.505501303521718, |
| "grad_norm": 15.842145919799805, |
| "learning_rate": 5.154704595185996e-05, |
| "loss": 13.535, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.5062082983518184, |
| "grad_norm": 15.07165241241455, |
| "learning_rate": 5.147483588621444e-05, |
| "loss": 13.2622, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.5069152931819186, |
| "grad_norm": 17.643245697021484, |
| "learning_rate": 5.140262582056893e-05, |
| "loss": 13.8885, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.5076222880120189, |
| "grad_norm": 19.250755310058594, |
| "learning_rate": 5.1330415754923407e-05, |
| "loss": 13.8963, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.5083292828421192, |
| "grad_norm": 19.744308471679688, |
| "learning_rate": 5.12582056892779e-05, |
| "loss": 13.7372, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.5090362776722195, |
| "grad_norm": 18.203929901123047, |
| "learning_rate": 5.118599562363239e-05, |
| "loss": 13.5853, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.5097432725023199, |
| "grad_norm": 16.226526260375977, |
| "learning_rate": 5.111378555798687e-05, |
| "loss": 12.8439, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.5104502673324202, |
| "grad_norm": 15.358694076538086, |
| "learning_rate": 5.104157549234136e-05, |
| "loss": 13.431, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.5111572621625204, |
| "grad_norm": 17.199031829833984, |
| "learning_rate": 5.096936542669584e-05, |
| "loss": 13.7108, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.5118642569926207, |
| "grad_norm": 25.309284210205078, |
| "learning_rate": 5.089715536105033e-05, |
| "loss": 11.9808, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.512571251822721, |
| "grad_norm": 14.532613754272461, |
| "learning_rate": 5.082494529540481e-05, |
| "loss": 13.4445, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.5132782466528214, |
| "grad_norm": 15.828657150268555, |
| "learning_rate": 5.07527352297593e-05, |
| "loss": 13.6649, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.5139852414829217, |
| "grad_norm": 17.062015533447266, |
| "learning_rate": 5.068052516411379e-05, |
| "loss": 13.5041, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.514692236313022, |
| "grad_norm": 17.339509963989258, |
| "learning_rate": 5.0608315098468274e-05, |
| "loss": 14.0583, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.5153992311431222, |
| "grad_norm": 18.00756072998047, |
| "learning_rate": 5.053610503282276e-05, |
| "loss": 14.3492, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.5161062259732225, |
| "grad_norm": 15.429224967956543, |
| "learning_rate": 5.0463894967177246e-05, |
| "loss": 12.9663, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.5168132208033229, |
| "grad_norm": 20.52001190185547, |
| "learning_rate": 5.039168490153173e-05, |
| "loss": 12.6214, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.5175202156334232, |
| "grad_norm": 18.917030334472656, |
| "learning_rate": 5.031947483588622e-05, |
| "loss": 14.0433, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.5182272104635235, |
| "grad_norm": 18.37959098815918, |
| "learning_rate": 5.02472647702407e-05, |
| "loss": 13.7528, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.5189342052936238, |
| "grad_norm": 16.279067993164062, |
| "learning_rate": 5.017505470459518e-05, |
| "loss": 13.9975, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.5196412001237241, |
| "grad_norm": 15.994711875915527, |
| "learning_rate": 5.010284463894967e-05, |
| "loss": 12.3636, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.5203481949538244, |
| "grad_norm": 17.147029876708984, |
| "learning_rate": 5.0030634573304155e-05, |
| "loss": 13.8101, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.5210551897839247, |
| "grad_norm": 17.469881057739258, |
| "learning_rate": 4.9958424507658645e-05, |
| "loss": 12.6908, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.521762184614025, |
| "grad_norm": 17.51668930053711, |
| "learning_rate": 4.9886214442013134e-05, |
| "loss": 13.9891, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.5224691794441253, |
| "grad_norm": 15.285223007202148, |
| "learning_rate": 4.9814004376367617e-05, |
| "loss": 13.1897, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.5231761742742256, |
| "grad_norm": 16.82007598876953, |
| "learning_rate": 4.97417943107221e-05, |
| "loss": 13.274, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.523883169104326, |
| "grad_norm": 16.825483322143555, |
| "learning_rate": 4.966958424507659e-05, |
| "loss": 13.3952, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.5245901639344263, |
| "grad_norm": 14.855537414550781, |
| "learning_rate": 4.959737417943107e-05, |
| "loss": 11.8834, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.5252971587645265, |
| "grad_norm": 17.55827522277832, |
| "learning_rate": 4.9525164113785554e-05, |
| "loss": 13.2329, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.5260041535946268, |
| "grad_norm": 18.297609329223633, |
| "learning_rate": 4.945295404814004e-05, |
| "loss": 12.6993, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.5267111484247271, |
| "grad_norm": 16.5467472076416, |
| "learning_rate": 4.938074398249453e-05, |
| "loss": 13.5798, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.5274181432548275, |
| "grad_norm": 22.284879684448242, |
| "learning_rate": 4.9308533916849015e-05, |
| "loss": 13.9687, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.5281251380849278, |
| "grad_norm": 17.486873626708984, |
| "learning_rate": 4.9236323851203505e-05, |
| "loss": 14.5825, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.5288321329150281, |
| "grad_norm": 16.19643783569336, |
| "learning_rate": 4.916411378555799e-05, |
| "loss": 13.7465, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.5295391277451283, |
| "grad_norm": 18.479318618774414, |
| "learning_rate": 4.909190371991247e-05, |
| "loss": 12.8109, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.5302461225752286, |
| "grad_norm": 17.267440795898438, |
| "learning_rate": 4.901969365426696e-05, |
| "loss": 13.4236, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.530953117405329, |
| "grad_norm": 16.307403564453125, |
| "learning_rate": 4.894748358862144e-05, |
| "loss": 12.6858, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.5316601122354293, |
| "grad_norm": 14.105761528015137, |
| "learning_rate": 4.887527352297593e-05, |
| "loss": 14.5454, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.5323671070655296, |
| "grad_norm": 19.452232360839844, |
| "learning_rate": 4.880306345733042e-05, |
| "loss": 12.3663, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.5330741018956299, |
| "grad_norm": 16.186485290527344, |
| "learning_rate": 4.8730853391684904e-05, |
| "loss": 13.1737, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.5337810967257302, |
| "grad_norm": 15.852377891540527, |
| "learning_rate": 4.8658643326039386e-05, |
| "loss": 12.8635, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.5344880915558305, |
| "grad_norm": 16.142702102661133, |
| "learning_rate": 4.8586433260393876e-05, |
| "loss": 13.1662, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.5351950863859308, |
| "grad_norm": 17.95094871520996, |
| "learning_rate": 4.851422319474836e-05, |
| "loss": 12.9467, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.5359020812160311, |
| "grad_norm": 15.025976181030273, |
| "learning_rate": 4.844201312910284e-05, |
| "loss": 11.8638, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.5366090760461314, |
| "grad_norm": 16.33597183227539, |
| "learning_rate": 4.836980306345733e-05, |
| "loss": 14.2383, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.5373160708762317, |
| "grad_norm": 18.008317947387695, |
| "learning_rate": 4.829759299781182e-05, |
| "loss": 13.0342, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.5380230657063321, |
| "grad_norm": 18.021818161010742, |
| "learning_rate": 4.82253829321663e-05, |
| "loss": 13.9605, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.5387300605364324, |
| "grad_norm": 17.876670837402344, |
| "learning_rate": 4.815317286652079e-05, |
| "loss": 12.6084, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.5394370553665326, |
| "grad_norm": 17.02657699584961, |
| "learning_rate": 4.8080962800875275e-05, |
| "loss": 14.9515, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.5401440501966329, |
| "grad_norm": 19.892004013061523, |
| "learning_rate": 4.800875273522976e-05, |
| "loss": 14.049, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.5408510450267332, |
| "grad_norm": 15.48623275756836, |
| "learning_rate": 4.793654266958425e-05, |
| "loss": 12.996, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.5415580398568336, |
| "grad_norm": 15.053301811218262, |
| "learning_rate": 4.786433260393873e-05, |
| "loss": 11.4286, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.5422650346869339, |
| "grad_norm": 18.168964385986328, |
| "learning_rate": 4.779212253829322e-05, |
| "loss": 14.5376, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.5429720295170342, |
| "grad_norm": 15.425690650939941, |
| "learning_rate": 4.771991247264771e-05, |
| "loss": 12.4469, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.5436790243471344, |
| "grad_norm": 14.769143104553223, |
| "learning_rate": 4.764770240700219e-05, |
| "loss": 13.4108, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.5443860191772347, |
| "grad_norm": 15.65718936920166, |
| "learning_rate": 4.757549234135667e-05, |
| "loss": 12.8447, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.5450930140073351, |
| "grad_norm": 14.275728225708008, |
| "learning_rate": 4.750328227571116e-05, |
| "loss": 12.6218, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.5458000088374354, |
| "grad_norm": 15.515166282653809, |
| "learning_rate": 4.7431072210065645e-05, |
| "loss": 13.6347, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.5465070036675357, |
| "grad_norm": 17.69280242919922, |
| "learning_rate": 4.735886214442013e-05, |
| "loss": 12.6965, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.547213998497636, |
| "grad_norm": 16.940570831298828, |
| "learning_rate": 4.728665207877462e-05, |
| "loss": 14.2508, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.5479209933277362, |
| "grad_norm": 14.886067390441895, |
| "learning_rate": 4.721444201312911e-05, |
| "loss": 12.0344, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.5486279881578366, |
| "grad_norm": 14.371471405029297, |
| "learning_rate": 4.714223194748359e-05, |
| "loss": 12.4236, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.5493349829879369, |
| "grad_norm": 15.021502494812012, |
| "learning_rate": 4.707002188183808e-05, |
| "loss": 12.5939, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.5500419778180372, |
| "grad_norm": 16.001462936401367, |
| "learning_rate": 4.699781181619256e-05, |
| "loss": 12.8323, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.5507489726481375, |
| "grad_norm": 16.28582191467285, |
| "learning_rate": 4.6925601750547044e-05, |
| "loss": 14.4682, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.5514559674782378, |
| "grad_norm": 16.198036193847656, |
| "learning_rate": 4.6853391684901534e-05, |
| "loss": 12.7082, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.5521629623083382, |
| "grad_norm": 15.891390800476074, |
| "learning_rate": 4.6781181619256016e-05, |
| "loss": 12.4412, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.5528699571384385, |
| "grad_norm": 19.185029983520508, |
| "learning_rate": 4.6708971553610506e-05, |
| "loss": 13.2326, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.5535769519685387, |
| "grad_norm": 16.253828048706055, |
| "learning_rate": 4.663676148796499e-05, |
| "loss": 13.2036, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.554283946798639, |
| "grad_norm": 19.45000648498535, |
| "learning_rate": 4.656455142231948e-05, |
| "loss": 14.3846, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.5549909416287393, |
| "grad_norm": 15.610483169555664, |
| "learning_rate": 4.649234135667396e-05, |
| "loss": 11.9146, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.5556979364588397, |
| "grad_norm": 15.691834449768066, |
| "learning_rate": 4.642013129102844e-05, |
| "loss": 14.6285, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.55640493128894, |
| "grad_norm": 15.287580490112305, |
| "learning_rate": 4.634792122538293e-05, |
| "loss": 13.8478, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.5571119261190403, |
| "grad_norm": 16.664316177368164, |
| "learning_rate": 4.6275711159737415e-05, |
| "loss": 13.5122, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.5578189209491405, |
| "grad_norm": 17.661788940429688, |
| "learning_rate": 4.6203501094091905e-05, |
| "loss": 13.331, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.5585259157792408, |
| "grad_norm": 16.202898025512695, |
| "learning_rate": 4.6131291028446394e-05, |
| "loss": 12.4447, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.5592329106093412, |
| "grad_norm": 18.325153350830078, |
| "learning_rate": 4.605908096280088e-05, |
| "loss": 14.4831, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.5599399054394415, |
| "grad_norm": 17.563846588134766, |
| "learning_rate": 4.598687089715536e-05, |
| "loss": 12.4172, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.5606469002695418, |
| "grad_norm": 17.11208724975586, |
| "learning_rate": 4.591466083150985e-05, |
| "loss": 13.6366, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.5613538950996421, |
| "grad_norm": 16.057981491088867, |
| "learning_rate": 4.584245076586433e-05, |
| "loss": 12.2651, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.5620608899297423, |
| "grad_norm": 19.833967208862305, |
| "learning_rate": 4.5770240700218814e-05, |
| "loss": 15.0997, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.5627678847598427, |
| "grad_norm": 15.77823257446289, |
| "learning_rate": 4.5698030634573303e-05, |
| "loss": 12.1512, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.563474879589943, |
| "grad_norm": 18.81294059753418, |
| "learning_rate": 4.562582056892779e-05, |
| "loss": 12.9248, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.5641818744200433, |
| "grad_norm": 15.902440071105957, |
| "learning_rate": 4.5553610503282275e-05, |
| "loss": 12.3549, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.5648888692501436, |
| "grad_norm": 15.62425422668457, |
| "learning_rate": 4.5481400437636765e-05, |
| "loss": 13.4902, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.5655958640802439, |
| "grad_norm": 17.2721004486084, |
| "learning_rate": 4.540919037199125e-05, |
| "loss": 14.88, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.5663028589103443, |
| "grad_norm": 17.061033248901367, |
| "learning_rate": 4.533698030634573e-05, |
| "loss": 13.0052, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.5670098537404445, |
| "grad_norm": 15.536235809326172, |
| "learning_rate": 4.526477024070022e-05, |
| "loss": 13.4578, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.5677168485705448, |
| "grad_norm": 17.601093292236328, |
| "learning_rate": 4.51925601750547e-05, |
| "loss": 14.9889, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.5684238434006451, |
| "grad_norm": 15.94250202178955, |
| "learning_rate": 4.512035010940919e-05, |
| "loss": 13.9519, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.5691308382307454, |
| "grad_norm": 17.229337692260742, |
| "learning_rate": 4.504814004376368e-05, |
| "loss": 14.2852, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.5698378330608458, |
| "grad_norm": 19.297306060791016, |
| "learning_rate": 4.4975929978118164e-05, |
| "loss": 12.9865, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.5705448278909461, |
| "grad_norm": 17.727935791015625, |
| "learning_rate": 4.4903719912472646e-05, |
| "loss": 12.9064, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.5712518227210464, |
| "grad_norm": 15.566085815429688, |
| "learning_rate": 4.4831509846827136e-05, |
| "loss": 13.8597, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.5719588175511466, |
| "grad_norm": 17.039579391479492, |
| "learning_rate": 4.475929978118162e-05, |
| "loss": 12.2828, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.5726658123812469, |
| "grad_norm": 17.201379776000977, |
| "learning_rate": 4.46870897155361e-05, |
| "loss": 13.0608, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.5733728072113473, |
| "grad_norm": 17.841808319091797, |
| "learning_rate": 4.461487964989059e-05, |
| "loss": 14.4647, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.5740798020414476, |
| "grad_norm": 17.14201545715332, |
| "learning_rate": 4.454266958424508e-05, |
| "loss": 14.1853, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.5747867968715479, |
| "grad_norm": 14.405500411987305, |
| "learning_rate": 4.447045951859956e-05, |
| "loss": 13.2748, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.5754937917016482, |
| "grad_norm": 15.193861961364746, |
| "learning_rate": 4.439824945295405e-05, |
| "loss": 12.3332, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.5762007865317484, |
| "grad_norm": 14.01460075378418, |
| "learning_rate": 4.4326039387308535e-05, |
| "loss": 13.8787, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.5769077813618488, |
| "grad_norm": 15.624826431274414, |
| "learning_rate": 4.425382932166302e-05, |
| "loss": 13.7502, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.5776147761919491, |
| "grad_norm": 18.81705093383789, |
| "learning_rate": 4.418161925601751e-05, |
| "loss": 12.6278, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.5783217710220494, |
| "grad_norm": 15.476762771606445, |
| "learning_rate": 4.410940919037199e-05, |
| "loss": 12.5758, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.5790287658521497, |
| "grad_norm": 16.792394638061523, |
| "learning_rate": 4.403719912472648e-05, |
| "loss": 13.3397, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.57973576068225, |
| "grad_norm": 15.57129192352295, |
| "learning_rate": 4.396498905908097e-05, |
| "loss": 14.78, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.5804427555123504, |
| "grad_norm": 15.04116153717041, |
| "learning_rate": 4.389277899343545e-05, |
| "loss": 15.0815, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.5811497503424506, |
| "grad_norm": 14.832640647888184, |
| "learning_rate": 4.3820568927789933e-05, |
| "loss": 12.3045, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.5818567451725509, |
| "grad_norm": 16.435665130615234, |
| "learning_rate": 4.374835886214442e-05, |
| "loss": 13.0494, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.5825637400026512, |
| "grad_norm": 15.572066307067871, |
| "learning_rate": 4.3676148796498905e-05, |
| "loss": 12.4681, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.5832707348327515, |
| "grad_norm": 16.7429141998291, |
| "learning_rate": 4.360393873085339e-05, |
| "loss": 12.9876, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.5839777296628519, |
| "grad_norm": 15.126506805419922, |
| "learning_rate": 4.353172866520788e-05, |
| "loss": 14.0403, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.5846847244929522, |
| "grad_norm": 16.73342514038086, |
| "learning_rate": 4.345951859956237e-05, |
| "loss": 13.5968, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.5853917193230525, |
| "grad_norm": 16.196666717529297, |
| "learning_rate": 4.338730853391685e-05, |
| "loss": 12.7303, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.5860987141531527, |
| "grad_norm": 15.55926513671875, |
| "learning_rate": 4.331509846827133e-05, |
| "loss": 13.6608, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.586805708983253, |
| "grad_norm": 16.581199645996094, |
| "learning_rate": 4.324288840262582e-05, |
| "loss": 13.3497, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.5875127038133534, |
| "grad_norm": 18.875598907470703, |
| "learning_rate": 4.3170678336980304e-05, |
| "loss": 14.7135, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.5882196986434537, |
| "grad_norm": 17.710857391357422, |
| "learning_rate": 4.3098468271334794e-05, |
| "loss": 12.4087, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.588926693473554, |
| "grad_norm": 18.93647575378418, |
| "learning_rate": 4.3026258205689276e-05, |
| "loss": 14.614, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.5896336883036543, |
| "grad_norm": 17.00315284729004, |
| "learning_rate": 4.2954048140043766e-05, |
| "loss": 13.7616, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.5903406831337545, |
| "grad_norm": 19.230234146118164, |
| "learning_rate": 4.288183807439825e-05, |
| "loss": 13.9937, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.5910476779638549, |
| "grad_norm": 18.548187255859375, |
| "learning_rate": 4.280962800875274e-05, |
| "loss": 12.0333, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.5917546727939552, |
| "grad_norm": 20.600353240966797, |
| "learning_rate": 4.273741794310722e-05, |
| "loss": 13.4433, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.5924616676240555, |
| "grad_norm": 17.80040740966797, |
| "learning_rate": 4.26652078774617e-05, |
| "loss": 13.8373, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.5931686624541558, |
| "grad_norm": 15.60269832611084, |
| "learning_rate": 4.259299781181619e-05, |
| "loss": 13.4195, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.5938756572842561, |
| "grad_norm": 24.892202377319336, |
| "learning_rate": 4.2520787746170675e-05, |
| "loss": 12.2826, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.5945826521143565, |
| "grad_norm": 16.01949119567871, |
| "learning_rate": 4.2448577680525165e-05, |
| "loss": 14.2682, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.5952896469444567, |
| "grad_norm": 17.417011260986328, |
| "learning_rate": 4.2376367614879654e-05, |
| "loss": 14.0882, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.595996641774557, |
| "grad_norm": 17.576231002807617, |
| "learning_rate": 4.230415754923414e-05, |
| "loss": 13.4012, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.5967036366046573, |
| "grad_norm": 16.846078872680664, |
| "learning_rate": 4.223194748358862e-05, |
| "loss": 13.3299, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.5974106314347576, |
| "grad_norm": 18.340309143066406, |
| "learning_rate": 4.215973741794311e-05, |
| "loss": 13.7724, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.598117626264858, |
| "grad_norm": 16.44016456604004, |
| "learning_rate": 4.208752735229759e-05, |
| "loss": 13.7315, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.5988246210949583, |
| "grad_norm": 19.964340209960938, |
| "learning_rate": 4.2015317286652074e-05, |
| "loss": 15.7788, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.5995316159250585, |
| "grad_norm": 14.742883682250977, |
| "learning_rate": 4.1943107221006563e-05, |
| "loss": 13.6036, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.6002386107551588, |
| "grad_norm": 15.614455223083496, |
| "learning_rate": 4.187089715536105e-05, |
| "loss": 14.4348, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.6009456055852591, |
| "grad_norm": 20.77018928527832, |
| "learning_rate": 4.1798687089715536e-05, |
| "loss": 12.572, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.6016526004153595, |
| "grad_norm": 16.104507446289062, |
| "learning_rate": 4.1726477024070025e-05, |
| "loss": 14.0879, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.6023595952454598, |
| "grad_norm": 18.730669021606445, |
| "learning_rate": 4.165426695842451e-05, |
| "loss": 14.3006, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.6030665900755601, |
| "grad_norm": 16.253700256347656, |
| "learning_rate": 4.158205689277899e-05, |
| "loss": 12.9148, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.6037735849056604, |
| "grad_norm": 15.1681547164917, |
| "learning_rate": 4.150984682713348e-05, |
| "loss": 13.2516, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.6044805797357606, |
| "grad_norm": 16.01544952392578, |
| "learning_rate": 4.143763676148796e-05, |
| "loss": 13.3555, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.605187574565861, |
| "grad_norm": 18.961162567138672, |
| "learning_rate": 4.136542669584245e-05, |
| "loss": 11.8692, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.6058945693959613, |
| "grad_norm": 16.640958786010742, |
| "learning_rate": 4.129321663019694e-05, |
| "loss": 11.0722, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.6066015642260616, |
| "grad_norm": 17.749479293823242, |
| "learning_rate": 4.1221006564551424e-05, |
| "loss": 14.4611, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.6073085590561619, |
| "grad_norm": 18.945444107055664, |
| "learning_rate": 4.1148796498905906e-05, |
| "loss": 14.5662, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.6080155538862622, |
| "grad_norm": 16.346588134765625, |
| "learning_rate": 4.1076586433260396e-05, |
| "loss": 13.4941, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.6087225487163626, |
| "grad_norm": 18.323013305664062, |
| "learning_rate": 4.100437636761488e-05, |
| "loss": 12.2041, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.6094295435464628, |
| "grad_norm": 18.375028610229492, |
| "learning_rate": 4.093216630196936e-05, |
| "loss": 13.5977, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.6101365383765631, |
| "grad_norm": 16.597517013549805, |
| "learning_rate": 4.085995623632386e-05, |
| "loss": 13.6122, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.6108435332066634, |
| "grad_norm": 15.910663604736328, |
| "learning_rate": 4.078774617067834e-05, |
| "loss": 13.3603, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.6115505280367637, |
| "grad_norm": 20.180830001831055, |
| "learning_rate": 4.071553610503282e-05, |
| "loss": 12.6904, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.6122575228668641, |
| "grad_norm": 17.37298011779785, |
| "learning_rate": 4.064332603938731e-05, |
| "loss": 13.7586, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.6129645176969644, |
| "grad_norm": 19.180036544799805, |
| "learning_rate": 4.0571115973741795e-05, |
| "loss": 13.3099, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.6136715125270646, |
| "grad_norm": 16.653715133666992, |
| "learning_rate": 4.049890590809628e-05, |
| "loss": 14.2502, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.6143785073571649, |
| "grad_norm": 16.960859298706055, |
| "learning_rate": 4.042669584245077e-05, |
| "loss": 14.3287, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.6150855021872652, |
| "grad_norm": 16.48638153076172, |
| "learning_rate": 4.035448577680525e-05, |
| "loss": 12.0678, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.6157924970173656, |
| "grad_norm": 14.477529525756836, |
| "learning_rate": 4.028227571115974e-05, |
| "loss": 14.2217, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.6164994918474659, |
| "grad_norm": 16.016817092895508, |
| "learning_rate": 4.021006564551423e-05, |
| "loss": 13.3089, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.6172064866775662, |
| "grad_norm": 15.40858268737793, |
| "learning_rate": 4.013785557986871e-05, |
| "loss": 14.2675, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.6179134815076665, |
| "grad_norm": 17.52202796936035, |
| "learning_rate": 4.0065645514223193e-05, |
| "loss": 13.6666, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.6186204763377667, |
| "grad_norm": 16.316030502319336, |
| "learning_rate": 3.999343544857768e-05, |
| "loss": 13.3957, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.6193274711678671, |
| "grad_norm": 16.079437255859375, |
| "learning_rate": 3.9921225382932166e-05, |
| "loss": 12.3198, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.6200344659979674, |
| "grad_norm": 15.243290901184082, |
| "learning_rate": 3.984901531728665e-05, |
| "loss": 12.659, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.6207414608280677, |
| "grad_norm": 17.72751808166504, |
| "learning_rate": 3.977680525164114e-05, |
| "loss": 13.4509, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.621448455658168, |
| "grad_norm": 16.8057918548584, |
| "learning_rate": 3.970459518599563e-05, |
| "loss": 11.7917, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.6221554504882683, |
| "grad_norm": 16.497268676757812, |
| "learning_rate": 3.963238512035011e-05, |
| "loss": 13.8699, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.6228624453183687, |
| "grad_norm": 16.132577896118164, |
| "learning_rate": 3.956017505470459e-05, |
| "loss": 13.2419, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.6235694401484689, |
| "grad_norm": 17.70035171508789, |
| "learning_rate": 3.948796498905908e-05, |
| "loss": 13.3996, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.6242764349785692, |
| "grad_norm": 16.91880989074707, |
| "learning_rate": 3.9415754923413564e-05, |
| "loss": 13.4762, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.6249834298086695, |
| "grad_norm": 15.370857238769531, |
| "learning_rate": 3.9343544857768054e-05, |
| "loss": 13.7917, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.6256904246387698, |
| "grad_norm": 15.553352355957031, |
| "learning_rate": 3.9271334792122536e-05, |
| "loss": 11.7293, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.6263974194688702, |
| "grad_norm": 16.794897079467773, |
| "learning_rate": 3.9199124726477026e-05, |
| "loss": 14.1652, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.6271044142989705, |
| "grad_norm": 15.572064399719238, |
| "learning_rate": 3.912691466083151e-05, |
| "loss": 12.5518, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.6278114091290707, |
| "grad_norm": 15.595902442932129, |
| "learning_rate": 3.9054704595186e-05, |
| "loss": 13.406, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.628518403959171, |
| "grad_norm": 17.84473991394043, |
| "learning_rate": 3.898249452954048e-05, |
| "loss": 12.8387, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.6292253987892713, |
| "grad_norm": 17.220897674560547, |
| "learning_rate": 3.891028446389496e-05, |
| "loss": 14.2772, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.6299323936193717, |
| "grad_norm": 17.487550735473633, |
| "learning_rate": 3.883807439824945e-05, |
| "loss": 13.5513, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.630639388449472, |
| "grad_norm": 14.752918243408203, |
| "learning_rate": 3.8765864332603935e-05, |
| "loss": 13.3264, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.6313463832795723, |
| "grad_norm": 17.476144790649414, |
| "learning_rate": 3.8693654266958425e-05, |
| "loss": 13.0027, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.6320533781096725, |
| "grad_norm": 19.83499526977539, |
| "learning_rate": 3.8621444201312914e-05, |
| "loss": 13.8219, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.6327603729397728, |
| "grad_norm": 16.396671295166016, |
| "learning_rate": 3.85492341356674e-05, |
| "loss": 12.0832, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.6334673677698732, |
| "grad_norm": 17.362865447998047, |
| "learning_rate": 3.847702407002188e-05, |
| "loss": 13.1703, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.6341743625999735, |
| "grad_norm": 18.174884796142578, |
| "learning_rate": 3.840481400437637e-05, |
| "loss": 12.7365, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.6348813574300738, |
| "grad_norm": 15.047250747680664, |
| "learning_rate": 3.833260393873085e-05, |
| "loss": 12.9793, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.6355883522601741, |
| "grad_norm": 18.287412643432617, |
| "learning_rate": 3.8260393873085334e-05, |
| "loss": 13.649, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.6362953470902744, |
| "grad_norm": 15.475159645080566, |
| "learning_rate": 3.818818380743983e-05, |
| "loss": 13.8619, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.6370023419203747, |
| "grad_norm": 17.742231369018555, |
| "learning_rate": 3.811597374179431e-05, |
| "loss": 13.5366, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.637709336750475, |
| "grad_norm": 15.232536315917969, |
| "learning_rate": 3.8043763676148796e-05, |
| "loss": 12.6299, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.6384163315805753, |
| "grad_norm": 15.838523864746094, |
| "learning_rate": 3.7971553610503285e-05, |
| "loss": 12.1131, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.6391233264106756, |
| "grad_norm": 15.444711685180664, |
| "learning_rate": 3.789934354485777e-05, |
| "loss": 13.2765, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.6398303212407759, |
| "grad_norm": 15.970074653625488, |
| "learning_rate": 3.782713347921225e-05, |
| "loss": 12.0993, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.6405373160708763, |
| "grad_norm": 14.429159164428711, |
| "learning_rate": 3.775492341356674e-05, |
| "loss": 12.4137, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.6412443109009766, |
| "grad_norm": 17.08701515197754, |
| "learning_rate": 3.768271334792122e-05, |
| "loss": 12.2321, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.6419513057310768, |
| "grad_norm": 17.066822052001953, |
| "learning_rate": 3.761050328227571e-05, |
| "loss": 13.6998, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.6426583005611771, |
| "grad_norm": 15.362284660339355, |
| "learning_rate": 3.75382932166302e-05, |
| "loss": 11.0648, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.6433652953912774, |
| "grad_norm": 17.06339454650879, |
| "learning_rate": 3.7466083150984684e-05, |
| "loss": 13.2253, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.6440722902213778, |
| "grad_norm": 14.42895221710205, |
| "learning_rate": 3.7393873085339166e-05, |
| "loss": 12.1794, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.6447792850514781, |
| "grad_norm": 15.388317108154297, |
| "learning_rate": 3.7321663019693656e-05, |
| "loss": 12.4478, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.6454862798815784, |
| "grad_norm": 14.884230613708496, |
| "learning_rate": 3.724945295404814e-05, |
| "loss": 12.9629, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.6461932747116786, |
| "grad_norm": 14.791316986083984, |
| "learning_rate": 3.717724288840262e-05, |
| "loss": 13.0299, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.6469002695417789, |
| "grad_norm": 17.605379104614258, |
| "learning_rate": 3.710503282275712e-05, |
| "loss": 13.2825, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.6476072643718793, |
| "grad_norm": 17.050025939941406, |
| "learning_rate": 3.70328227571116e-05, |
| "loss": 13.9545, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.6483142592019796, |
| "grad_norm": 13.724760055541992, |
| "learning_rate": 3.696061269146608e-05, |
| "loss": 12.4339, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.6490212540320799, |
| "grad_norm": 15.219618797302246, |
| "learning_rate": 3.688840262582057e-05, |
| "loss": 12.0493, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.6497282488621802, |
| "grad_norm": 15.740148544311523, |
| "learning_rate": 3.6816192560175055e-05, |
| "loss": 12.2087, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.6504352436922805, |
| "grad_norm": 15.154293060302734, |
| "learning_rate": 3.674398249452954e-05, |
| "loss": 11.8399, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.6511422385223808, |
| "grad_norm": 21.17293930053711, |
| "learning_rate": 3.667177242888403e-05, |
| "loss": 13.5817, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.6518492333524811, |
| "grad_norm": 17.151151657104492, |
| "learning_rate": 3.659956236323851e-05, |
| "loss": 14.2616, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.6525562281825814, |
| "grad_norm": 15.818124771118164, |
| "learning_rate": 3.6527352297593e-05, |
| "loss": 12.9145, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.6532632230126817, |
| "grad_norm": 17.215805053710938, |
| "learning_rate": 3.645514223194749e-05, |
| "loss": 12.8664, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.653970217842782, |
| "grad_norm": 15.398895263671875, |
| "learning_rate": 3.638293216630197e-05, |
| "loss": 13.5099, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.6546772126728824, |
| "grad_norm": 16.104904174804688, |
| "learning_rate": 3.6310722100656454e-05, |
| "loss": 13.3627, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.6553842075029827, |
| "grad_norm": 16.67665672302246, |
| "learning_rate": 3.623851203501094e-05, |
| "loss": 13.7105, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.6560912023330829, |
| "grad_norm": 14.506566047668457, |
| "learning_rate": 3.6166301969365426e-05, |
| "loss": 12.5536, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.6567981971631832, |
| "grad_norm": 15.468367576599121, |
| "learning_rate": 3.609409190371991e-05, |
| "loss": 13.3451, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.6575051919932835, |
| "grad_norm": 17.965709686279297, |
| "learning_rate": 3.60218818380744e-05, |
| "loss": 14.0162, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.6582121868233839, |
| "grad_norm": 14.338950157165527, |
| "learning_rate": 3.594967177242889e-05, |
| "loss": 12.1867, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.6589191816534842, |
| "grad_norm": 14.590508460998535, |
| "learning_rate": 3.587746170678337e-05, |
| "loss": 13.2619, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.6596261764835845, |
| "grad_norm": 15.523008346557617, |
| "learning_rate": 3.580525164113785e-05, |
| "loss": 13.6548, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.6603331713136847, |
| "grad_norm": 17.361780166625977, |
| "learning_rate": 3.573304157549234e-05, |
| "loss": 11.8496, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.661040166143785, |
| "grad_norm": 14.874725341796875, |
| "learning_rate": 3.5660831509846824e-05, |
| "loss": 12.9375, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.6617471609738854, |
| "grad_norm": 15.033089637756348, |
| "learning_rate": 3.5588621444201314e-05, |
| "loss": 11.8268, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.6624541558039857, |
| "grad_norm": 16.188312530517578, |
| "learning_rate": 3.5516411378555796e-05, |
| "loss": 13.1623, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.663161150634086, |
| "grad_norm": 14.005878448486328, |
| "learning_rate": 3.5444201312910286e-05, |
| "loss": 13.1447, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.6638681454641863, |
| "grad_norm": 16.06510353088379, |
| "learning_rate": 3.537199124726477e-05, |
| "loss": 11.294, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.6645751402942865, |
| "grad_norm": 15.923260688781738, |
| "learning_rate": 3.529978118161926e-05, |
| "loss": 13.9193, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.6652821351243869, |
| "grad_norm": 16.1591796875, |
| "learning_rate": 3.522757111597374e-05, |
| "loss": 12.5801, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.6659891299544872, |
| "grad_norm": 18.498062133789062, |
| "learning_rate": 3.515536105032822e-05, |
| "loss": 13.0376, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.6666961247845875, |
| "grad_norm": 16.772415161132812, |
| "learning_rate": 3.508315098468271e-05, |
| "loss": 13.0875, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.6674031196146878, |
| "grad_norm": 15.799750328063965, |
| "learning_rate": 3.5010940919037195e-05, |
| "loss": 12.7625, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.6681101144447881, |
| "grad_norm": 14.897823333740234, |
| "learning_rate": 3.4938730853391685e-05, |
| "loss": 14.2135, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.6688171092748885, |
| "grad_norm": 13.589593887329102, |
| "learning_rate": 3.4866520787746174e-05, |
| "loss": 13.0602, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.6695241041049887, |
| "grad_norm": 15.410765647888184, |
| "learning_rate": 3.479431072210066e-05, |
| "loss": 12.3363, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.670231098935089, |
| "grad_norm": 13.951040267944336, |
| "learning_rate": 3.472210065645514e-05, |
| "loss": 13.05, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.6709380937651893, |
| "grad_norm": 18.58137321472168, |
| "learning_rate": 3.464989059080963e-05, |
| "loss": 12.2522, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.6716450885952896, |
| "grad_norm": 17.50958251953125, |
| "learning_rate": 3.457768052516411e-05, |
| "loss": 12.5067, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.67235208342539, |
| "grad_norm": 17.609472274780273, |
| "learning_rate": 3.4505470459518594e-05, |
| "loss": 12.2965, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.6730590782554903, |
| "grad_norm": 14.07121753692627, |
| "learning_rate": 3.443326039387309e-05, |
| "loss": 11.6223, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.6737660730855906, |
| "grad_norm": 17.91677474975586, |
| "learning_rate": 3.436105032822757e-05, |
| "loss": 13.2656, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.6744730679156908, |
| "grad_norm": 17.619646072387695, |
| "learning_rate": 3.4288840262582056e-05, |
| "loss": 12.0533, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.6751800627457911, |
| "grad_norm": 15.973097801208496, |
| "learning_rate": 3.4216630196936545e-05, |
| "loss": 12.6224, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.6758870575758915, |
| "grad_norm": 16.727569580078125, |
| "learning_rate": 3.414442013129103e-05, |
| "loss": 10.9571, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.6765940524059918, |
| "grad_norm": 15.502710342407227, |
| "learning_rate": 3.407221006564551e-05, |
| "loss": 11.7565, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.6773010472360921, |
| "grad_norm": 15.650954246520996, |
| "learning_rate": 3.4e-05, |
| "loss": 13.8277, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.6780080420661924, |
| "grad_norm": 15.571489334106445, |
| "learning_rate": 3.392778993435448e-05, |
| "loss": 13.1988, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.6787150368962926, |
| "grad_norm": 15.313315391540527, |
| "learning_rate": 3.385557986870897e-05, |
| "loss": 13.5607, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.679422031726393, |
| "grad_norm": 15.986994743347168, |
| "learning_rate": 3.378336980306346e-05, |
| "loss": 13.3168, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.6801290265564933, |
| "grad_norm": 16.067049026489258, |
| "learning_rate": 3.3711159737417944e-05, |
| "loss": 12.8027, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.6808360213865936, |
| "grad_norm": 19.818334579467773, |
| "learning_rate": 3.3638949671772426e-05, |
| "loss": 11.4662, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.6815430162166939, |
| "grad_norm": 17.01699447631836, |
| "learning_rate": 3.3566739606126916e-05, |
| "loss": 13.2136, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.6822500110467942, |
| "grad_norm": 15.616992950439453, |
| "learning_rate": 3.34945295404814e-05, |
| "loss": 12.6188, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.6829570058768946, |
| "grad_norm": 13.695746421813965, |
| "learning_rate": 3.342231947483588e-05, |
| "loss": 11.3794, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.6836640007069948, |
| "grad_norm": 15.043867111206055, |
| "learning_rate": 3.335010940919038e-05, |
| "loss": 11.3308, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.6843709955370951, |
| "grad_norm": 14.187246322631836, |
| "learning_rate": 3.327789934354486e-05, |
| "loss": 13.3129, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.6850779903671954, |
| "grad_norm": 16.36101722717285, |
| "learning_rate": 3.320568927789934e-05, |
| "loss": 14.5099, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.6857849851972957, |
| "grad_norm": 16.514877319335938, |
| "learning_rate": 3.313347921225383e-05, |
| "loss": 11.603, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.6864919800273961, |
| "grad_norm": 17.427112579345703, |
| "learning_rate": 3.3061269146608315e-05, |
| "loss": 12.8277, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.6871989748574964, |
| "grad_norm": 16.44808006286621, |
| "learning_rate": 3.29890590809628e-05, |
| "loss": 12.8675, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.6879059696875967, |
| "grad_norm": 16.135822296142578, |
| "learning_rate": 3.291684901531729e-05, |
| "loss": 11.6851, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.6886129645176969, |
| "grad_norm": 16.02741241455078, |
| "learning_rate": 3.284463894967177e-05, |
| "loss": 13.8546, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.6893199593477972, |
| "grad_norm": 17.733104705810547, |
| "learning_rate": 3.277242888402626e-05, |
| "loss": 13.0497, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.6900269541778976, |
| "grad_norm": 16.3909969329834, |
| "learning_rate": 3.270021881838075e-05, |
| "loss": 11.9032, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.6907339490079979, |
| "grad_norm": 15.314737319946289, |
| "learning_rate": 3.262800875273523e-05, |
| "loss": 13.2063, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.6914409438380982, |
| "grad_norm": 16.040050506591797, |
| "learning_rate": 3.2555798687089714e-05, |
| "loss": 12.6159, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.6921479386681985, |
| "grad_norm": 15.172967910766602, |
| "learning_rate": 3.24835886214442e-05, |
| "loss": 12.5432, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.6928549334982987, |
| "grad_norm": 14.832226753234863, |
| "learning_rate": 3.2411378555798686e-05, |
| "loss": 12.2323, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.6935619283283991, |
| "grad_norm": 16.011014938354492, |
| "learning_rate": 3.233916849015317e-05, |
| "loss": 12.7174, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.6942689231584994, |
| "grad_norm": 15.02815055847168, |
| "learning_rate": 3.226695842450766e-05, |
| "loss": 13.0848, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.6949759179885997, |
| "grad_norm": 16.430984497070312, |
| "learning_rate": 3.219474835886215e-05, |
| "loss": 12.228, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.6956829128187, |
| "grad_norm": 15.233168601989746, |
| "learning_rate": 3.212253829321663e-05, |
| "loss": 14.0058, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.6963899076488003, |
| "grad_norm": 15.990872383117676, |
| "learning_rate": 3.205032822757111e-05, |
| "loss": 13.9359, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.6970969024789007, |
| "grad_norm": 14.508731842041016, |
| "learning_rate": 3.19781181619256e-05, |
| "loss": 13.4266, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.6978038973090009, |
| "grad_norm": 15.588955879211426, |
| "learning_rate": 3.1905908096280084e-05, |
| "loss": 12.4986, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.6985108921391012, |
| "grad_norm": 16.39841651916504, |
| "learning_rate": 3.1833698030634574e-05, |
| "loss": 12.6671, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.6992178869692015, |
| "grad_norm": 15.241385459899902, |
| "learning_rate": 3.176148796498906e-05, |
| "loss": 14.0819, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.6999248817993018, |
| "grad_norm": 17.080127716064453, |
| "learning_rate": 3.1689277899343546e-05, |
| "loss": 12.5114, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.7006318766294022, |
| "grad_norm": 16.860857009887695, |
| "learning_rate": 3.161706783369803e-05, |
| "loss": 13.9591, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.7013388714595025, |
| "grad_norm": 16.064014434814453, |
| "learning_rate": 3.154485776805252e-05, |
| "loss": 13.746, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.7020458662896027, |
| "grad_norm": 16.772647857666016, |
| "learning_rate": 3.1472647702407e-05, |
| "loss": 12.8039, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.702752861119703, |
| "grad_norm": 14.531611442565918, |
| "learning_rate": 3.140043763676149e-05, |
| "loss": 11.8692, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.7034598559498033, |
| "grad_norm": 17.939926147460938, |
| "learning_rate": 3.132822757111597e-05, |
| "loss": 13.6893, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.7041668507799037, |
| "grad_norm": 16.083425521850586, |
| "learning_rate": 3.1256017505470455e-05, |
| "loss": 12.779, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.704873845610004, |
| "grad_norm": 15.42416000366211, |
| "learning_rate": 3.118380743982495e-05, |
| "loss": 12.7348, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.7055808404401043, |
| "grad_norm": 14.658026695251465, |
| "learning_rate": 3.1111597374179434e-05, |
| "loss": 11.8572, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.7062878352702046, |
| "grad_norm": 16.61526107788086, |
| "learning_rate": 3.103938730853392e-05, |
| "loss": 14.1659, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.7069948301003048, |
| "grad_norm": 16.476240158081055, |
| "learning_rate": 3.0967177242888406e-05, |
| "loss": 12.8316, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.7077018249304052, |
| "grad_norm": 15.872008323669434, |
| "learning_rate": 3.089496717724289e-05, |
| "loss": 11.1954, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.7084088197605055, |
| "grad_norm": 15.403485298156738, |
| "learning_rate": 3.082275711159737e-05, |
| "loss": 12.8882, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.7091158145906058, |
| "grad_norm": 16.67386817932129, |
| "learning_rate": 3.075054704595186e-05, |
| "loss": 12.7755, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.7098228094207061, |
| "grad_norm": 15.99714183807373, |
| "learning_rate": 3.067833698030635e-05, |
| "loss": 12.6526, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.7105298042508064, |
| "grad_norm": 13.916271209716797, |
| "learning_rate": 3.060612691466083e-05, |
| "loss": 13.3718, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.7112367990809068, |
| "grad_norm": 15.985695838928223, |
| "learning_rate": 3.053391684901532e-05, |
| "loss": 12.9525, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.711943793911007, |
| "grad_norm": 17.183103561401367, |
| "learning_rate": 3.0461706783369805e-05, |
| "loss": 12.8737, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.7126507887411073, |
| "grad_norm": 14.209635734558105, |
| "learning_rate": 3.038949671772429e-05, |
| "loss": 11.7585, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.7133577835712076, |
| "grad_norm": 15.675783157348633, |
| "learning_rate": 3.0317286652078777e-05, |
| "loss": 13.5172, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.7140647784013079, |
| "grad_norm": 15.367440223693848, |
| "learning_rate": 3.024507658643326e-05, |
| "loss": 11.7943, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.7147717732314083, |
| "grad_norm": 18.714014053344727, |
| "learning_rate": 3.0172866520787746e-05, |
| "loss": 12.9202, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.7154787680615086, |
| "grad_norm": 15.120142936706543, |
| "learning_rate": 3.0100656455142235e-05, |
| "loss": 13.5838, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.7161857628916088, |
| "grad_norm": 19.008926391601562, |
| "learning_rate": 3.002844638949672e-05, |
| "loss": 14.1862, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.7168927577217091, |
| "grad_norm": 14.798474311828613, |
| "learning_rate": 2.9956236323851207e-05, |
| "loss": 12.3924, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.7175997525518094, |
| "grad_norm": 16.070415496826172, |
| "learning_rate": 2.9884026258205693e-05, |
| "loss": 13.5143, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.7183067473819098, |
| "grad_norm": 17.342212677001953, |
| "learning_rate": 2.9811816192560176e-05, |
| "loss": 12.2586, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.7190137422120101, |
| "grad_norm": 16.9810791015625, |
| "learning_rate": 2.9739606126914662e-05, |
| "loss": 13.5872, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.7197207370421104, |
| "grad_norm": 15.211740493774414, |
| "learning_rate": 2.9667396061269148e-05, |
| "loss": 12.3832, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.7204277318722107, |
| "grad_norm": 14.690281867980957, |
| "learning_rate": 2.9595185995623637e-05, |
| "loss": 12.2162, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.7211347267023109, |
| "grad_norm": 14.952332496643066, |
| "learning_rate": 2.9522975929978123e-05, |
| "loss": 13.2556, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.7218417215324113, |
| "grad_norm": 14.745388984680176, |
| "learning_rate": 2.9450765864332606e-05, |
| "loss": 11.7899, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.7225487163625116, |
| "grad_norm": 16.4384822845459, |
| "learning_rate": 2.9378555798687092e-05, |
| "loss": 12.6083, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.7232557111926119, |
| "grad_norm": 15.447677612304688, |
| "learning_rate": 2.9306345733041578e-05, |
| "loss": 12.1423, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.7239627060227122, |
| "grad_norm": 15.585071563720703, |
| "learning_rate": 2.9234135667396064e-05, |
| "loss": 12.0851, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.7246697008528125, |
| "grad_norm": 14.9990873336792, |
| "learning_rate": 2.9161925601750547e-05, |
| "loss": 12.8785, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.7253766956829129, |
| "grad_norm": 19.603099822998047, |
| "learning_rate": 2.908971553610504e-05, |
| "loss": 12.1019, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.7260836905130131, |
| "grad_norm": 13.465499877929688, |
| "learning_rate": 2.9017505470459522e-05, |
| "loss": 13.0492, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.7267906853431134, |
| "grad_norm": 14.473004341125488, |
| "learning_rate": 2.894529540481401e-05, |
| "loss": 14.0964, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.7274976801732137, |
| "grad_norm": 14.698629379272461, |
| "learning_rate": 2.8873085339168494e-05, |
| "loss": 11.0322, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.728204675003314, |
| "grad_norm": 16.44681739807129, |
| "learning_rate": 2.8800875273522977e-05, |
| "loss": 13.5584, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.7289116698334144, |
| "grad_norm": 13.492003440856934, |
| "learning_rate": 2.8728665207877463e-05, |
| "loss": 13.2963, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.7296186646635147, |
| "grad_norm": 15.496600151062012, |
| "learning_rate": 2.865645514223195e-05, |
| "loss": 12.6854, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.7303256594936149, |
| "grad_norm": 19.080289840698242, |
| "learning_rate": 2.8584245076586435e-05, |
| "loss": 13.4954, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.7310326543237152, |
| "grad_norm": 14.68902587890625, |
| "learning_rate": 2.8512035010940925e-05, |
| "loss": 12.0387, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.7317396491538155, |
| "grad_norm": 14.314245223999023, |
| "learning_rate": 2.843982494529541e-05, |
| "loss": 13.6249, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.7324466439839159, |
| "grad_norm": 16.663728713989258, |
| "learning_rate": 2.8367614879649893e-05, |
| "loss": 13.3793, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.7331536388140162, |
| "grad_norm": 14.021472930908203, |
| "learning_rate": 2.829540481400438e-05, |
| "loss": 12.4353, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.7338606336441165, |
| "grad_norm": 14.659539222717285, |
| "learning_rate": 2.8223194748358865e-05, |
| "loss": 11.4481, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.7345676284742167, |
| "grad_norm": 14.972705841064453, |
| "learning_rate": 2.8150984682713348e-05, |
| "loss": 12.917, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.735274623304317, |
| "grad_norm": 14.24467945098877, |
| "learning_rate": 2.8078774617067834e-05, |
| "loss": 13.0419, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.7359816181344174, |
| "grad_norm": 17.544336318969727, |
| "learning_rate": 2.8006564551422323e-05, |
| "loss": 12.9449, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.7366886129645177, |
| "grad_norm": 15.078471183776855, |
| "learning_rate": 2.793435448577681e-05, |
| "loss": 13.5799, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.737395607794618, |
| "grad_norm": 15.030024528503418, |
| "learning_rate": 2.7862144420131295e-05, |
| "loss": 11.3935, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.7381026026247183, |
| "grad_norm": 17.29486846923828, |
| "learning_rate": 2.778993435448578e-05, |
| "loss": 12.6275, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.7388095974548186, |
| "grad_norm": 15.28783893585205, |
| "learning_rate": 2.7717724288840264e-05, |
| "loss": 11.2874, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.739516592284919, |
| "grad_norm": 20.103883743286133, |
| "learning_rate": 2.764551422319475e-05, |
| "loss": 11.5156, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.7402235871150192, |
| "grad_norm": 15.663703918457031, |
| "learning_rate": 2.7573304157549236e-05, |
| "loss": 13.4875, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.7409305819451195, |
| "grad_norm": 16.604524612426758, |
| "learning_rate": 2.750109409190372e-05, |
| "loss": 12.9352, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.7416375767752198, |
| "grad_norm": 13.528607368469238, |
| "learning_rate": 2.742888402625821e-05, |
| "loss": 12.3269, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.7423445716053202, |
| "grad_norm": 15.969796180725098, |
| "learning_rate": 2.7356673960612694e-05, |
| "loss": 13.7365, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.7430515664354205, |
| "grad_norm": 15.28542709350586, |
| "learning_rate": 2.728446389496718e-05, |
| "loss": 12.0605, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.7437585612655208, |
| "grad_norm": 14.971050262451172, |
| "learning_rate": 2.7212253829321666e-05, |
| "loss": 12.715, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.744465556095621, |
| "grad_norm": 14.354373931884766, |
| "learning_rate": 2.714004376367615e-05, |
| "loss": 12.9591, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.7451725509257213, |
| "grad_norm": 16.311098098754883, |
| "learning_rate": 2.7067833698030635e-05, |
| "loss": 11.784, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.7458795457558217, |
| "grad_norm": 15.502252578735352, |
| "learning_rate": 2.699562363238512e-05, |
| "loss": 11.6811, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.746586540585922, |
| "grad_norm": 16.668073654174805, |
| "learning_rate": 2.692341356673961e-05, |
| "loss": 12.4734, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.7472935354160223, |
| "grad_norm": 21.601903915405273, |
| "learning_rate": 2.6851203501094096e-05, |
| "loss": 13.3575, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.7480005302461226, |
| "grad_norm": 15.497323989868164, |
| "learning_rate": 2.6778993435448582e-05, |
| "loss": 13.4247, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.7487075250762228, |
| "grad_norm": 17.963159561157227, |
| "learning_rate": 2.6706783369803065e-05, |
| "loss": 11.312, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.7494145199063232, |
| "grad_norm": 17.467559814453125, |
| "learning_rate": 2.663457330415755e-05, |
| "loss": 12.6942, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.7501215147364235, |
| "grad_norm": 15.068920135498047, |
| "learning_rate": 2.6562363238512037e-05, |
| "loss": 13.5305, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.7508285095665238, |
| "grad_norm": 15.089822769165039, |
| "learning_rate": 2.649015317286652e-05, |
| "loss": 13.7907, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.7515355043966241, |
| "grad_norm": 14.680115699768066, |
| "learning_rate": 2.6417943107221013e-05, |
| "loss": 10.9175, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.7522424992267244, |
| "grad_norm": 16.606077194213867, |
| "learning_rate": 2.6345733041575495e-05, |
| "loss": 13.6108, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.7529494940568248, |
| "grad_norm": 16.429758071899414, |
| "learning_rate": 2.627352297592998e-05, |
| "loss": 13.0759, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.753656488886925, |
| "grad_norm": 14.46728515625, |
| "learning_rate": 2.6201312910284467e-05, |
| "loss": 11.7365, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.7543634837170253, |
| "grad_norm": 15.043597221374512, |
| "learning_rate": 2.6129102844638953e-05, |
| "loss": 13.0305, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.7550704785471256, |
| "grad_norm": 16.372493743896484, |
| "learning_rate": 2.6056892778993436e-05, |
| "loss": 12.9214, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.7557774733772259, |
| "grad_norm": 13.306395530700684, |
| "learning_rate": 2.5984682713347922e-05, |
| "loss": 11.733, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.7564844682073263, |
| "grad_norm": 17.534255981445312, |
| "learning_rate": 2.5912472647702408e-05, |
| "loss": 12.869, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.7571914630374266, |
| "grad_norm": 16.98424530029297, |
| "learning_rate": 2.5840262582056897e-05, |
| "loss": 12.9999, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.7578984578675269, |
| "grad_norm": 13.720797538757324, |
| "learning_rate": 2.5768052516411384e-05, |
| "loss": 10.7562, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.7586054526976271, |
| "grad_norm": 15.344558715820312, |
| "learning_rate": 2.5695842450765866e-05, |
| "loss": 12.3965, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.7593124475277274, |
| "grad_norm": 14.723435401916504, |
| "learning_rate": 2.5623632385120352e-05, |
| "loss": 12.8219, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.7600194423578278, |
| "grad_norm": 15.032986640930176, |
| "learning_rate": 2.5551422319474838e-05, |
| "loss": 12.2727, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.7607264371879281, |
| "grad_norm": 15.117292404174805, |
| "learning_rate": 2.5479212253829324e-05, |
| "loss": 13.233, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.7614334320180284, |
| "grad_norm": 15.652196884155273, |
| "learning_rate": 2.540700218818381e-05, |
| "loss": 12.6491, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.7621404268481287, |
| "grad_norm": 15.632040023803711, |
| "learning_rate": 2.5334792122538296e-05, |
| "loss": 12.8196, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.7628474216782289, |
| "grad_norm": 16.843076705932617, |
| "learning_rate": 2.526258205689278e-05, |
| "loss": 12.47, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.7635544165083293, |
| "grad_norm": 14.941967964172363, |
| "learning_rate": 2.519037199124727e-05, |
| "loss": 11.7957, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.7642614113384296, |
| "grad_norm": 15.734443664550781, |
| "learning_rate": 2.5118161925601754e-05, |
| "loss": 12.8278, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.7649684061685299, |
| "grad_norm": 13.94361400604248, |
| "learning_rate": 2.5045951859956237e-05, |
| "loss": 12.3736, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.7656754009986302, |
| "grad_norm": 18.560409545898438, |
| "learning_rate": 2.4973741794310723e-05, |
| "loss": 12.0621, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.7663823958287305, |
| "grad_norm": 16.558231353759766, |
| "learning_rate": 2.4901531728665213e-05, |
| "loss": 13.4814, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.7670893906588309, |
| "grad_norm": 19.616657257080078, |
| "learning_rate": 2.4829321663019695e-05, |
| "loss": 13.0437, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.7677963854889311, |
| "grad_norm": 14.448963165283203, |
| "learning_rate": 2.475711159737418e-05, |
| "loss": 12.7444, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.7685033803190314, |
| "grad_norm": 14.808650016784668, |
| "learning_rate": 2.4684901531728667e-05, |
| "loss": 13.2923, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.7692103751491317, |
| "grad_norm": 16.270593643188477, |
| "learning_rate": 2.4612691466083153e-05, |
| "loss": 12.4998, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.769917369979232, |
| "grad_norm": 16.274654388427734, |
| "learning_rate": 2.454048140043764e-05, |
| "loss": 11.7857, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.7706243648093324, |
| "grad_norm": 20.461088180541992, |
| "learning_rate": 2.4468271334792125e-05, |
| "loss": 13.5802, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.7713313596394327, |
| "grad_norm": 14.93079662322998, |
| "learning_rate": 2.439606126914661e-05, |
| "loss": 12.1022, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.772038354469533, |
| "grad_norm": 13.616817474365234, |
| "learning_rate": 2.4323851203501097e-05, |
| "loss": 11.7097, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.7727453492996332, |
| "grad_norm": 16.070207595825195, |
| "learning_rate": 2.4251641137855583e-05, |
| "loss": 13.3532, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.7734523441297335, |
| "grad_norm": 15.457805633544922, |
| "learning_rate": 2.4179431072210066e-05, |
| "loss": 13.265, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.7741593389598339, |
| "grad_norm": 15.124858856201172, |
| "learning_rate": 2.4107221006564555e-05, |
| "loss": 13.2484, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.7748663337899342, |
| "grad_norm": 15.762398719787598, |
| "learning_rate": 2.403501094091904e-05, |
| "loss": 12.2059, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.7755733286200345, |
| "grad_norm": 15.04523754119873, |
| "learning_rate": 2.3962800875273524e-05, |
| "loss": 13.4343, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.7762803234501348, |
| "grad_norm": 14.77971363067627, |
| "learning_rate": 2.389059080962801e-05, |
| "loss": 11.309, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.776987318280235, |
| "grad_norm": 17.487321853637695, |
| "learning_rate": 2.3818380743982496e-05, |
| "loss": 12.3208, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.7776943131103354, |
| "grad_norm": 16.295358657836914, |
| "learning_rate": 2.3746170678336982e-05, |
| "loss": 10.7923, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.7784013079404357, |
| "grad_norm": 14.476552963256836, |
| "learning_rate": 2.3673960612691468e-05, |
| "loss": 13.6797, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.779108302770536, |
| "grad_norm": 16.841829299926758, |
| "learning_rate": 2.3601750547045954e-05, |
| "loss": 12.7085, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.7798152976006363, |
| "grad_norm": 15.268396377563477, |
| "learning_rate": 2.352954048140044e-05, |
| "loss": 13.7208, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.7805222924307366, |
| "grad_norm": 19.5039119720459, |
| "learning_rate": 2.3457330415754926e-05, |
| "loss": 12.752, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.781229287260837, |
| "grad_norm": 17.3958740234375, |
| "learning_rate": 2.338512035010941e-05, |
| "loss": 13.2237, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.7819362820909372, |
| "grad_norm": 14.796465873718262, |
| "learning_rate": 2.33129102844639e-05, |
| "loss": 12.6552, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.7826432769210375, |
| "grad_norm": 15.115665435791016, |
| "learning_rate": 2.3240700218818384e-05, |
| "loss": 13.2209, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.7833502717511378, |
| "grad_norm": 16.425796508789062, |
| "learning_rate": 2.3168490153172867e-05, |
| "loss": 13.3238, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.7840572665812381, |
| "grad_norm": 16.225072860717773, |
| "learning_rate": 2.3096280087527353e-05, |
| "loss": 11.3711, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.7847642614113385, |
| "grad_norm": 14.445027351379395, |
| "learning_rate": 2.3024070021881843e-05, |
| "loss": 13.2492, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.7854712562414388, |
| "grad_norm": 18.562454223632812, |
| "learning_rate": 2.2951859956236325e-05, |
| "loss": 13.406, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.786178251071539, |
| "grad_norm": 15.524725914001465, |
| "learning_rate": 2.287964989059081e-05, |
| "loss": 14.6405, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.7868852459016393, |
| "grad_norm": 16.006057739257812, |
| "learning_rate": 2.2807439824945297e-05, |
| "loss": 12.8032, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.7875922407317396, |
| "grad_norm": 14.516286849975586, |
| "learning_rate": 2.2735229759299783e-05, |
| "loss": 12.244, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.78829923556184, |
| "grad_norm": 14.554705619812012, |
| "learning_rate": 2.266301969365427e-05, |
| "loss": 11.4593, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.7890062303919403, |
| "grad_norm": 16.04813003540039, |
| "learning_rate": 2.2590809628008755e-05, |
| "loss": 12.6374, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.7897132252220406, |
| "grad_norm": 16.26800537109375, |
| "learning_rate": 2.251859956236324e-05, |
| "loss": 12.4228, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.7904202200521409, |
| "grad_norm": 14.735860824584961, |
| "learning_rate": 2.2446389496717727e-05, |
| "loss": 12.6189, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.7911272148822411, |
| "grad_norm": 16.355247497558594, |
| "learning_rate": 2.2374179431072213e-05, |
| "loss": 12.1257, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.7918342097123415, |
| "grad_norm": 16.468690872192383, |
| "learning_rate": 2.2301969365426696e-05, |
| "loss": 11.1213, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.7925412045424418, |
| "grad_norm": 17.71234703063965, |
| "learning_rate": 2.2229759299781185e-05, |
| "loss": 12.2162, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.7932481993725421, |
| "grad_norm": 16.555984497070312, |
| "learning_rate": 2.2157549234135668e-05, |
| "loss": 12.296, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.7939551942026424, |
| "grad_norm": 12.559003829956055, |
| "learning_rate": 2.2085339168490154e-05, |
| "loss": 12.0311, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.7946621890327427, |
| "grad_norm": 18.56354331970215, |
| "learning_rate": 2.201312910284464e-05, |
| "loss": 13.0041, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.795369183862843, |
| "grad_norm": 15.111113548278809, |
| "learning_rate": 2.1940919037199126e-05, |
| "loss": 13.2092, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.7960761786929433, |
| "grad_norm": 17.982934951782227, |
| "learning_rate": 2.1868708971553612e-05, |
| "loss": 14.1011, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.7967831735230436, |
| "grad_norm": 14.821832656860352, |
| "learning_rate": 2.1796498905908098e-05, |
| "loss": 13.0946, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.7974901683531439, |
| "grad_norm": 14.307646751403809, |
| "learning_rate": 2.1724288840262584e-05, |
| "loss": 11.4379, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.7981971631832442, |
| "grad_norm": 15.424671173095703, |
| "learning_rate": 2.165207877461707e-05, |
| "loss": 12.6333, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.7989041580133446, |
| "grad_norm": 16.159860610961914, |
| "learning_rate": 2.1579868708971556e-05, |
| "loss": 11.3861, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.7996111528434449, |
| "grad_norm": 17.140806198120117, |
| "learning_rate": 2.150765864332604e-05, |
| "loss": 12.7795, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.8003181476735451, |
| "grad_norm": 15.116321563720703, |
| "learning_rate": 2.143544857768053e-05, |
| "loss": 13.9064, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.8010251425036454, |
| "grad_norm": 15.199187278747559, |
| "learning_rate": 2.1363238512035014e-05, |
| "loss": 10.8875, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.8017321373337457, |
| "grad_norm": 14.66556167602539, |
| "learning_rate": 2.1291028446389497e-05, |
| "loss": 12.2029, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.8024391321638461, |
| "grad_norm": 15.456762313842773, |
| "learning_rate": 2.1218818380743983e-05, |
| "loss": 13.1825, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.8031461269939464, |
| "grad_norm": 18.5980167388916, |
| "learning_rate": 2.1146608315098473e-05, |
| "loss": 13.0355, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.8038531218240467, |
| "grad_norm": 15.780075073242188, |
| "learning_rate": 2.1074398249452955e-05, |
| "loss": 12.517, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.804560116654147, |
| "grad_norm": 17.828128814697266, |
| "learning_rate": 2.100218818380744e-05, |
| "loss": 11.9271, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.8052671114842472, |
| "grad_norm": 13.664030075073242, |
| "learning_rate": 2.092997811816193e-05, |
| "loss": 12.1154, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.8059741063143476, |
| "grad_norm": 21.724184036254883, |
| "learning_rate": 2.0857768052516413e-05, |
| "loss": 11.6643, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.8066811011444479, |
| "grad_norm": 15.66214370727539, |
| "learning_rate": 2.07855579868709e-05, |
| "loss": 12.9989, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.8073880959745482, |
| "grad_norm": 14.08765697479248, |
| "learning_rate": 2.0713347921225385e-05, |
| "loss": 12.1448, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.8080950908046485, |
| "grad_norm": 13.611175537109375, |
| "learning_rate": 2.064113785557987e-05, |
| "loss": 11.826, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.8088020856347488, |
| "grad_norm": 13.81747817993164, |
| "learning_rate": 2.0568927789934357e-05, |
| "loss": 11.613, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.8095090804648492, |
| "grad_norm": 13.661465644836426, |
| "learning_rate": 2.0496717724288843e-05, |
| "loss": 12.3789, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.8102160752949494, |
| "grad_norm": 17.0196590423584, |
| "learning_rate": 2.0424507658643326e-05, |
| "loss": 11.9157, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.8109230701250497, |
| "grad_norm": 14.75942325592041, |
| "learning_rate": 2.0352297592997815e-05, |
| "loss": 12.264, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.81163006495515, |
| "grad_norm": 15.581911087036133, |
| "learning_rate": 2.0280087527352298e-05, |
| "loss": 12.006, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.8123370597852503, |
| "grad_norm": 16.459049224853516, |
| "learning_rate": 2.0207877461706784e-05, |
| "loss": 11.139, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.8130440546153507, |
| "grad_norm": 14.035103797912598, |
| "learning_rate": 2.013566739606127e-05, |
| "loss": 12.8145, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.813751049445451, |
| "grad_norm": 13.479121208190918, |
| "learning_rate": 2.0063457330415756e-05, |
| "loss": 11.6135, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.8144580442755512, |
| "grad_norm": 13.676473617553711, |
| "learning_rate": 1.9991247264770242e-05, |
| "loss": 12.0856, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.8151650391056515, |
| "grad_norm": 16.638296127319336, |
| "learning_rate": 1.9919037199124728e-05, |
| "loss": 12.1883, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.8158720339357518, |
| "grad_norm": 14.359823226928711, |
| "learning_rate": 1.9846827133479214e-05, |
| "loss": 12.3571, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.8165790287658522, |
| "grad_norm": 15.50848388671875, |
| "learning_rate": 1.97746170678337e-05, |
| "loss": 11.2356, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.8172860235959525, |
| "grad_norm": 16.52731704711914, |
| "learning_rate": 1.9702407002188186e-05, |
| "loss": 12.2212, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.8179930184260528, |
| "grad_norm": 15.665827751159668, |
| "learning_rate": 1.963019693654267e-05, |
| "loss": 11.4575, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.818700013256153, |
| "grad_norm": 13.586274147033691, |
| "learning_rate": 1.955798687089716e-05, |
| "loss": 10.5227, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.8194070080862533, |
| "grad_norm": 13.108797073364258, |
| "learning_rate": 1.9485776805251644e-05, |
| "loss": 11.4108, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.8201140029163537, |
| "grad_norm": 15.360386848449707, |
| "learning_rate": 1.9413566739606127e-05, |
| "loss": 11.9431, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.820820997746454, |
| "grad_norm": 15.90074348449707, |
| "learning_rate": 1.9341356673960613e-05, |
| "loss": 13.6355, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.8215279925765543, |
| "grad_norm": 15.171931266784668, |
| "learning_rate": 1.9269146608315103e-05, |
| "loss": 10.6912, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.8222349874066546, |
| "grad_norm": 14.833294868469238, |
| "learning_rate": 1.9196936542669585e-05, |
| "loss": 13.6622, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.8229419822367549, |
| "grad_norm": 14.614151954650879, |
| "learning_rate": 1.912472647702407e-05, |
| "loss": 13.8156, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.8236489770668552, |
| "grad_norm": 14.428544044494629, |
| "learning_rate": 1.905251641137856e-05, |
| "loss": 11.7255, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.8243559718969555, |
| "grad_norm": 14.791826248168945, |
| "learning_rate": 1.8980306345733043e-05, |
| "loss": 11.8131, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.8250629667270558, |
| "grad_norm": 15.477858543395996, |
| "learning_rate": 1.890809628008753e-05, |
| "loss": 12.7562, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.8257699615571561, |
| "grad_norm": 14.780648231506348, |
| "learning_rate": 1.8835886214442015e-05, |
| "loss": 11.6997, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.8264769563872564, |
| "grad_norm": 15.95986557006836, |
| "learning_rate": 1.87636761487965e-05, |
| "loss": 12.5469, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.8271839512173568, |
| "grad_norm": 16.474977493286133, |
| "learning_rate": 1.8691466083150987e-05, |
| "loss": 12.6622, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.827890946047457, |
| "grad_norm": 14.389117240905762, |
| "learning_rate": 1.8619256017505473e-05, |
| "loss": 11.4078, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.8285979408775573, |
| "grad_norm": 21.426359176635742, |
| "learning_rate": 1.8547045951859956e-05, |
| "loss": 13.2855, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.8293049357076576, |
| "grad_norm": 15.901067733764648, |
| "learning_rate": 1.8474835886214446e-05, |
| "loss": 12.2708, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.8300119305377579, |
| "grad_norm": 15.242277145385742, |
| "learning_rate": 1.8402625820568928e-05, |
| "loss": 13.9031, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.8307189253678583, |
| "grad_norm": 14.417196273803711, |
| "learning_rate": 1.8330415754923414e-05, |
| "loss": 12.5818, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.8314259201979586, |
| "grad_norm": 15.016650199890137, |
| "learning_rate": 1.82582056892779e-05, |
| "loss": 12.4367, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.8321329150280589, |
| "grad_norm": 15.281384468078613, |
| "learning_rate": 1.8185995623632386e-05, |
| "loss": 12.3963, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.8328399098581591, |
| "grad_norm": 14.031335830688477, |
| "learning_rate": 1.8113785557986872e-05, |
| "loss": 12.2232, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.8335469046882594, |
| "grad_norm": 15.91395092010498, |
| "learning_rate": 1.804157549234136e-05, |
| "loss": 13.4726, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.8342538995183598, |
| "grad_norm": 18.70294952392578, |
| "learning_rate": 1.7969365426695844e-05, |
| "loss": 13.1052, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.8349608943484601, |
| "grad_norm": 16.646923065185547, |
| "learning_rate": 1.789715536105033e-05, |
| "loss": 11.53, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.8356678891785604, |
| "grad_norm": 15.27785873413086, |
| "learning_rate": 1.7824945295404816e-05, |
| "loss": 12.2457, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.8363748840086607, |
| "grad_norm": 14.599854469299316, |
| "learning_rate": 1.77527352297593e-05, |
| "loss": 11.63, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.837081878838761, |
| "grad_norm": 15.050758361816406, |
| "learning_rate": 1.768052516411379e-05, |
| "loss": 11.4395, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.8377888736688613, |
| "grad_norm": 12.898660659790039, |
| "learning_rate": 1.7608315098468275e-05, |
| "loss": 10.9921, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.8384958684989616, |
| "grad_norm": 16.362031936645508, |
| "learning_rate": 1.7536105032822757e-05, |
| "loss": 11.6573, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.8392028633290619, |
| "grad_norm": 16.562292098999023, |
| "learning_rate": 1.7463894967177243e-05, |
| "loss": 13.4289, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.8399098581591622, |
| "grad_norm": 15.194472312927246, |
| "learning_rate": 1.7391684901531733e-05, |
| "loss": 12.5519, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.8406168529892625, |
| "grad_norm": 15.846282958984375, |
| "learning_rate": 1.7319474835886215e-05, |
| "loss": 12.8095, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.8413238478193629, |
| "grad_norm": 15.602988243103027, |
| "learning_rate": 1.72472647702407e-05, |
| "loss": 11.5179, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.8420308426494632, |
| "grad_norm": 15.674636840820312, |
| "learning_rate": 1.717505470459519e-05, |
| "loss": 13.1862, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.8427378374795634, |
| "grad_norm": 13.627044677734375, |
| "learning_rate": 1.7102844638949673e-05, |
| "loss": 11.5302, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.8434448323096637, |
| "grad_norm": 16.106996536254883, |
| "learning_rate": 1.703063457330416e-05, |
| "loss": 12.02, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.844151827139764, |
| "grad_norm": 15.40173053741455, |
| "learning_rate": 1.6958424507658645e-05, |
| "loss": 13.0771, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.8448588219698644, |
| "grad_norm": 13.60293197631836, |
| "learning_rate": 1.688621444201313e-05, |
| "loss": 11.2656, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.8455658167999647, |
| "grad_norm": 15.298815727233887, |
| "learning_rate": 1.6814004376367617e-05, |
| "loss": 12.9284, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.846272811630065, |
| "grad_norm": 14.378645896911621, |
| "learning_rate": 1.6741794310722103e-05, |
| "loss": 11.3607, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.8469798064601652, |
| "grad_norm": 17.42547035217285, |
| "learning_rate": 1.6669584245076586e-05, |
| "loss": 12.823, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.8476868012902655, |
| "grad_norm": 18.732912063598633, |
| "learning_rate": 1.6597374179431076e-05, |
| "loss": 12.9339, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.8483937961203659, |
| "grad_norm": 15.936331748962402, |
| "learning_rate": 1.6525164113785558e-05, |
| "loss": 13.8907, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.8491007909504662, |
| "grad_norm": 17.328195571899414, |
| "learning_rate": 1.6452954048140044e-05, |
| "loss": 12.764, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.8498077857805665, |
| "grad_norm": 15.351299285888672, |
| "learning_rate": 1.6380743982494534e-05, |
| "loss": 12.9472, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.8505147806106668, |
| "grad_norm": 16.695545196533203, |
| "learning_rate": 1.6308533916849016e-05, |
| "loss": 11.2182, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.851221775440767, |
| "grad_norm": 15.395929336547852, |
| "learning_rate": 1.6236323851203502e-05, |
| "loss": 13.5168, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.8519287702708674, |
| "grad_norm": 14.865396499633789, |
| "learning_rate": 1.616411378555799e-05, |
| "loss": 12.2607, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.8526357651009677, |
| "grad_norm": 14.395364761352539, |
| "learning_rate": 1.6091903719912474e-05, |
| "loss": 11.1264, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.853342759931068, |
| "grad_norm": 15.32667064666748, |
| "learning_rate": 1.601969365426696e-05, |
| "loss": 13.1768, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.8540497547611683, |
| "grad_norm": 18.79914093017578, |
| "learning_rate": 1.5947483588621443e-05, |
| "loss": 13.2718, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.8547567495912686, |
| "grad_norm": 14.390812873840332, |
| "learning_rate": 1.587527352297593e-05, |
| "loss": 11.8755, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.855463744421369, |
| "grad_norm": 15.825724601745605, |
| "learning_rate": 1.580306345733042e-05, |
| "loss": 11.9834, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.8561707392514692, |
| "grad_norm": 14.854561805725098, |
| "learning_rate": 1.57308533916849e-05, |
| "loss": 13.5324, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.8568777340815695, |
| "grad_norm": 16.236326217651367, |
| "learning_rate": 1.5658643326039387e-05, |
| "loss": 12.7902, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.8575847289116698, |
| "grad_norm": 14.993921279907227, |
| "learning_rate": 1.5586433260393873e-05, |
| "loss": 12.5322, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.8582917237417701, |
| "grad_norm": 13.216548919677734, |
| "learning_rate": 1.551422319474836e-05, |
| "loss": 12.2942, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.8589987185718705, |
| "grad_norm": 15.003395080566406, |
| "learning_rate": 1.5442013129102845e-05, |
| "loss": 14.2319, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.8597057134019708, |
| "grad_norm": 15.051974296569824, |
| "learning_rate": 1.536980306345733e-05, |
| "loss": 12.4653, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.860412708232071, |
| "grad_norm": 16.103477478027344, |
| "learning_rate": 1.5297592997811817e-05, |
| "loss": 12.6086, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.8611197030621713, |
| "grad_norm": 15.790787696838379, |
| "learning_rate": 1.5225382932166303e-05, |
| "loss": 11.7837, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.8618266978922716, |
| "grad_norm": 14.117712020874023, |
| "learning_rate": 1.515317286652079e-05, |
| "loss": 13.6153, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.862533692722372, |
| "grad_norm": 13.054887771606445, |
| "learning_rate": 1.5080962800875274e-05, |
| "loss": 12.3808, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.8632406875524723, |
| "grad_norm": 14.148192405700684, |
| "learning_rate": 1.5008752735229761e-05, |
| "loss": 12.9498, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.8639476823825726, |
| "grad_norm": 15.982940673828125, |
| "learning_rate": 1.4936542669584247e-05, |
| "loss": 13.6622, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.8646546772126729, |
| "grad_norm": 14.222307205200195, |
| "learning_rate": 1.4864332603938732e-05, |
| "loss": 12.2357, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.8653616720427731, |
| "grad_norm": 14.06337833404541, |
| "learning_rate": 1.4792122538293218e-05, |
| "loss": 11.9331, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.8660686668728735, |
| "grad_norm": 18.584463119506836, |
| "learning_rate": 1.4719912472647706e-05, |
| "loss": 12.0133, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.8667756617029738, |
| "grad_norm": 14.86030101776123, |
| "learning_rate": 1.464770240700219e-05, |
| "loss": 13.2664, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.8674826565330741, |
| "grad_norm": 14.793909072875977, |
| "learning_rate": 1.4575492341356674e-05, |
| "loss": 12.9823, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.8681896513631744, |
| "grad_norm": 15.59019947052002, |
| "learning_rate": 1.4503282275711162e-05, |
| "loss": 12.4917, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.8688966461932747, |
| "grad_norm": 13.743398666381836, |
| "learning_rate": 1.4431072210065648e-05, |
| "loss": 11.7762, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.8696036410233751, |
| "grad_norm": 16.514175415039062, |
| "learning_rate": 1.4358862144420132e-05, |
| "loss": 11.7951, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.8703106358534753, |
| "grad_norm": 14.362953186035156, |
| "learning_rate": 1.4286652078774618e-05, |
| "loss": 10.9248, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.8710176306835756, |
| "grad_norm": 16.149871826171875, |
| "learning_rate": 1.4214442013129106e-05, |
| "loss": 12.5736, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.8717246255136759, |
| "grad_norm": 15.02379322052002, |
| "learning_rate": 1.414223194748359e-05, |
| "loss": 12.475, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.8724316203437762, |
| "grad_norm": 15.37895679473877, |
| "learning_rate": 1.4070021881838075e-05, |
| "loss": 11.1181, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.8731386151738766, |
| "grad_norm": 14.252886772155762, |
| "learning_rate": 1.399781181619256e-05, |
| "loss": 13.0672, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.8738456100039769, |
| "grad_norm": 14.250099182128906, |
| "learning_rate": 1.3925601750547049e-05, |
| "loss": 12.467, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.8745526048340772, |
| "grad_norm": 15.835908889770508, |
| "learning_rate": 1.3853391684901533e-05, |
| "loss": 14.3098, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.8752595996641774, |
| "grad_norm": 15.358033180236816, |
| "learning_rate": 1.3781181619256019e-05, |
| "loss": 12.1643, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.8759665944942777, |
| "grad_norm": 14.915874481201172, |
| "learning_rate": 1.3708971553610507e-05, |
| "loss": 12.093, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.8766735893243781, |
| "grad_norm": 17.685922622680664, |
| "learning_rate": 1.3636761487964991e-05, |
| "loss": 11.1443, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.8773805841544784, |
| "grad_norm": 14.141484260559082, |
| "learning_rate": 1.3564551422319477e-05, |
| "loss": 11.1317, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.8780875789845787, |
| "grad_norm": 14.050369262695312, |
| "learning_rate": 1.3492341356673961e-05, |
| "loss": 12.0695, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.878794573814679, |
| "grad_norm": 14.12048053741455, |
| "learning_rate": 1.3420131291028449e-05, |
| "loss": 10.7497, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.8795015686447792, |
| "grad_norm": 15.856414794921875, |
| "learning_rate": 1.3347921225382933e-05, |
| "loss": 11.6056, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.8802085634748796, |
| "grad_norm": 15.296753883361816, |
| "learning_rate": 1.327571115973742e-05, |
| "loss": 12.4171, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.8809155583049799, |
| "grad_norm": 14.189363479614258, |
| "learning_rate": 1.3203501094091905e-05, |
| "loss": 11.1442, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.8816225531350802, |
| "grad_norm": 13.78650188446045, |
| "learning_rate": 1.313129102844639e-05, |
| "loss": 12.2327, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.8823295479651805, |
| "grad_norm": 14.706070899963379, |
| "learning_rate": 1.3059080962800877e-05, |
| "loss": 12.6943, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.8830365427952808, |
| "grad_norm": 17.17374038696289, |
| "learning_rate": 1.2986870897155362e-05, |
| "loss": 13.3784, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.8837435376253812, |
| "grad_norm": 15.617532730102539, |
| "learning_rate": 1.2914660831509848e-05, |
| "loss": 12.0446, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.8844505324554814, |
| "grad_norm": 15.952999114990234, |
| "learning_rate": 1.2842450765864334e-05, |
| "loss": 11.5787, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.8851575272855817, |
| "grad_norm": 14.995427131652832, |
| "learning_rate": 1.277024070021882e-05, |
| "loss": 11.7415, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.885864522115682, |
| "grad_norm": 17.298742294311523, |
| "learning_rate": 1.2698030634573306e-05, |
| "loss": 12.72, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.8865715169457823, |
| "grad_norm": 14.929863929748535, |
| "learning_rate": 1.2625820568927792e-05, |
| "loss": 12.582, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.8872785117758827, |
| "grad_norm": 12.553905487060547, |
| "learning_rate": 1.2553610503282278e-05, |
| "loss": 10.473, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.887985506605983, |
| "grad_norm": 14.32827377319336, |
| "learning_rate": 1.2481400437636762e-05, |
| "loss": 13.7437, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.8886925014360832, |
| "grad_norm": 15.110925674438477, |
| "learning_rate": 1.2409190371991248e-05, |
| "loss": 13.3395, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.8893994962661835, |
| "grad_norm": 13.540974617004395, |
| "learning_rate": 1.2336980306345734e-05, |
| "loss": 10.6814, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.8901064910962838, |
| "grad_norm": 13.67444896697998, |
| "learning_rate": 1.226477024070022e-05, |
| "loss": 12.2924, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.8908134859263842, |
| "grad_norm": 14.55646800994873, |
| "learning_rate": 1.2192560175054705e-05, |
| "loss": 12.3313, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.8915204807564845, |
| "grad_norm": 16.98577880859375, |
| "learning_rate": 1.2120350109409193e-05, |
| "loss": 12.2591, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.8922274755865848, |
| "grad_norm": 15.357362747192383, |
| "learning_rate": 1.2048140043763677e-05, |
| "loss": 11.7354, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.892934470416685, |
| "grad_norm": 14.358413696289062, |
| "learning_rate": 1.1975929978118163e-05, |
| "loss": 12.9059, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.8936414652467853, |
| "grad_norm": 14.79417896270752, |
| "learning_rate": 1.1903719912472649e-05, |
| "loss": 12.4349, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.8943484600768857, |
| "grad_norm": 15.485979080200195, |
| "learning_rate": 1.1831509846827135e-05, |
| "loss": 11.7835, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.895055454906986, |
| "grad_norm": 13.867639541625977, |
| "learning_rate": 1.1759299781181621e-05, |
| "loss": 10.9385, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.8957624497370863, |
| "grad_norm": 17.667858123779297, |
| "learning_rate": 1.1687089715536107e-05, |
| "loss": 11.9884, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.8964694445671866, |
| "grad_norm": 16.6583251953125, |
| "learning_rate": 1.1614879649890593e-05, |
| "loss": 12.0298, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.8971764393972869, |
| "grad_norm": 18.523691177368164, |
| "learning_rate": 1.1542669584245077e-05, |
| "loss": 11.216, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.8978834342273873, |
| "grad_norm": 19.020814895629883, |
| "learning_rate": 1.1470459518599563e-05, |
| "loss": 13.4564, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.8985904290574875, |
| "grad_norm": 15.264403343200684, |
| "learning_rate": 1.139824945295405e-05, |
| "loss": 11.9713, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.8992974238875878, |
| "grad_norm": 14.84862995147705, |
| "learning_rate": 1.1326039387308535e-05, |
| "loss": 11.2933, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.9000044187176881, |
| "grad_norm": 14.617576599121094, |
| "learning_rate": 1.125382932166302e-05, |
| "loss": 13.196, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.9007114135477884, |
| "grad_norm": 17.44744873046875, |
| "learning_rate": 1.1181619256017508e-05, |
| "loss": 12.2643, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.9014184083778888, |
| "grad_norm": 15.30611515045166, |
| "learning_rate": 1.1109409190371992e-05, |
| "loss": 11.58, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.9021254032079891, |
| "grad_norm": 15.377030372619629, |
| "learning_rate": 1.1037199124726478e-05, |
| "loss": 12.4231, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.9028323980380893, |
| "grad_norm": 13.975566864013672, |
| "learning_rate": 1.0964989059080966e-05, |
| "loss": 12.0694, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.9035393928681896, |
| "grad_norm": 16.075166702270508, |
| "learning_rate": 1.089277899343545e-05, |
| "loss": 12.8699, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.9042463876982899, |
| "grad_norm": 13.96810245513916, |
| "learning_rate": 1.0820568927789936e-05, |
| "loss": 11.9998, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.9049533825283903, |
| "grad_norm": 14.134039878845215, |
| "learning_rate": 1.0748358862144422e-05, |
| "loss": 11.1477, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.9056603773584906, |
| "grad_norm": 13.151305198669434, |
| "learning_rate": 1.0676148796498908e-05, |
| "loss": 11.9197, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.9063673721885909, |
| "grad_norm": 13.498329162597656, |
| "learning_rate": 1.0603938730853392e-05, |
| "loss": 12.8619, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.9070743670186912, |
| "grad_norm": 14.909584045410156, |
| "learning_rate": 1.0531728665207878e-05, |
| "loss": 11.9246, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.9077813618487914, |
| "grad_norm": 14.063223838806152, |
| "learning_rate": 1.0459518599562364e-05, |
| "loss": 12.7957, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.9084883566788918, |
| "grad_norm": 13.720608711242676, |
| "learning_rate": 1.038730853391685e-05, |
| "loss": 12.7551, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.9091953515089921, |
| "grad_norm": 14.793830871582031, |
| "learning_rate": 1.0315098468271335e-05, |
| "loss": 12.179, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.9099023463390924, |
| "grad_norm": 12.872452735900879, |
| "learning_rate": 1.0242888402625823e-05, |
| "loss": 11.0132, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.9106093411691927, |
| "grad_norm": 16.042049407958984, |
| "learning_rate": 1.0170678336980307e-05, |
| "loss": 11.8582, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.911316335999293, |
| "grad_norm": 21.02182388305664, |
| "learning_rate": 1.0098468271334793e-05, |
| "loss": 13.052, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.9120233308293934, |
| "grad_norm": 13.702282905578613, |
| "learning_rate": 1.002625820568928e-05, |
| "loss": 11.8454, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.9127303256594936, |
| "grad_norm": 13.618666648864746, |
| "learning_rate": 9.954048140043765e-06, |
| "loss": 11.8811, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.9134373204895939, |
| "grad_norm": 16.58145523071289, |
| "learning_rate": 9.881838074398251e-06, |
| "loss": 12.1977, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.9141443153196942, |
| "grad_norm": 12.983065605163574, |
| "learning_rate": 9.809628008752737e-06, |
| "loss": 10.1651, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.9148513101497945, |
| "grad_norm": 15.553836822509766, |
| "learning_rate": 9.737417943107223e-06, |
| "loss": 12.1087, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.9155583049798949, |
| "grad_norm": 12.707442283630371, |
| "learning_rate": 9.665207877461707e-06, |
| "loss": 11.662, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.9162652998099952, |
| "grad_norm": 16.751333236694336, |
| "learning_rate": 9.592997811816193e-06, |
| "loss": 11.397, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.9169722946400954, |
| "grad_norm": 14.83692455291748, |
| "learning_rate": 9.52078774617068e-06, |
| "loss": 10.7658, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.9176792894701957, |
| "grad_norm": 12.964903831481934, |
| "learning_rate": 9.448577680525165e-06, |
| "loss": 11.8422, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.918386284300296, |
| "grad_norm": 13.346761703491211, |
| "learning_rate": 9.37636761487965e-06, |
| "loss": 11.7921, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.9190932791303964, |
| "grad_norm": 15.598644256591797, |
| "learning_rate": 9.304157549234138e-06, |
| "loss": 11.7002, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.9198002739604967, |
| "grad_norm": 14.48563003540039, |
| "learning_rate": 9.231947483588622e-06, |
| "loss": 11.5056, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.920507268790597, |
| "grad_norm": 13.817610740661621, |
| "learning_rate": 9.159737417943108e-06, |
| "loss": 11.8712, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.9212142636206972, |
| "grad_norm": 16.314889907836914, |
| "learning_rate": 9.087527352297596e-06, |
| "loss": 12.5035, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.9219212584507975, |
| "grad_norm": 16.28759765625, |
| "learning_rate": 9.01531728665208e-06, |
| "loss": 12.0567, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.9226282532808979, |
| "grad_norm": 14.504642486572266, |
| "learning_rate": 8.943107221006566e-06, |
| "loss": 11.6086, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.9233352481109982, |
| "grad_norm": 15.890057563781738, |
| "learning_rate": 8.870897155361052e-06, |
| "loss": 13.1966, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.9240422429410985, |
| "grad_norm": 15.602300643920898, |
| "learning_rate": 8.798687089715538e-06, |
| "loss": 11.283, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.9247492377711988, |
| "grad_norm": 14.971783638000488, |
| "learning_rate": 8.726477024070022e-06, |
| "loss": 10.8825, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.9254562326012991, |
| "grad_norm": 14.330822944641113, |
| "learning_rate": 8.654266958424508e-06, |
| "loss": 10.4557, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.9261632274313994, |
| "grad_norm": 14.569314002990723, |
| "learning_rate": 8.582056892778994e-06, |
| "loss": 11.6277, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.9268702222614997, |
| "grad_norm": 16.473087310791016, |
| "learning_rate": 8.50984682713348e-06, |
| "loss": 12.8317, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.9275772170916, |
| "grad_norm": 14.927108764648438, |
| "learning_rate": 8.437636761487965e-06, |
| "loss": 11.2278, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.9282842119217003, |
| "grad_norm": 14.588191986083984, |
| "learning_rate": 8.365426695842451e-06, |
| "loss": 11.8304, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.9289912067518006, |
| "grad_norm": 14.352690696716309, |
| "learning_rate": 8.293216630196937e-06, |
| "loss": 12.0047, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.929698201581901, |
| "grad_norm": 13.969871520996094, |
| "learning_rate": 8.221006564551423e-06, |
| "loss": 11.8746, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.9304051964120013, |
| "grad_norm": 14.721321105957031, |
| "learning_rate": 8.148796498905909e-06, |
| "loss": 11.3543, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.9311121912421015, |
| "grad_norm": 13.538402557373047, |
| "learning_rate": 8.076586433260395e-06, |
| "loss": 12.1419, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.9318191860722018, |
| "grad_norm": 16.33662986755371, |
| "learning_rate": 8.004376367614881e-06, |
| "loss": 13.1276, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.9325261809023021, |
| "grad_norm": 14.316162109375, |
| "learning_rate": 7.932166301969365e-06, |
| "loss": 12.8216, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.9332331757324025, |
| "grad_norm": 17.847124099731445, |
| "learning_rate": 7.859956236323853e-06, |
| "loss": 11.2353, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.9339401705625028, |
| "grad_norm": 14.48947525024414, |
| "learning_rate": 7.787746170678337e-06, |
| "loss": 11.4874, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.9346471653926031, |
| "grad_norm": 14.466111183166504, |
| "learning_rate": 7.715536105032823e-06, |
| "loss": 12.4364, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.9353541602227033, |
| "grad_norm": 14.350505828857422, |
| "learning_rate": 7.64332603938731e-06, |
| "loss": 12.3382, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.9360611550528036, |
| "grad_norm": 17.1221866607666, |
| "learning_rate": 7.571115973741795e-06, |
| "loss": 13.8187, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.936768149882904, |
| "grad_norm": 14.196785926818848, |
| "learning_rate": 7.49890590809628e-06, |
| "loss": 11.4155, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.9374751447130043, |
| "grad_norm": 14.461777687072754, |
| "learning_rate": 7.426695842450766e-06, |
| "loss": 12.296, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.9381821395431046, |
| "grad_norm": 14.26052474975586, |
| "learning_rate": 7.354485776805253e-06, |
| "loss": 11.1558, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.9388891343732049, |
| "grad_norm": 15.175439834594727, |
| "learning_rate": 7.282275711159738e-06, |
| "loss": 12.9709, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.9395961292033052, |
| "grad_norm": 14.596419334411621, |
| "learning_rate": 7.210065645514224e-06, |
| "loss": 12.8197, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.9403031240334055, |
| "grad_norm": 14.158638000488281, |
| "learning_rate": 7.137855579868709e-06, |
| "loss": 12.6762, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.9410101188635058, |
| "grad_norm": 16.99709701538086, |
| "learning_rate": 7.065645514223194e-06, |
| "loss": 10.9333, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.9417171136936061, |
| "grad_norm": 15.491727828979492, |
| "learning_rate": 6.99343544857768e-06, |
| "loss": 11.3055, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.9424241085237064, |
| "grad_norm": 16.94435691833496, |
| "learning_rate": 6.921225382932166e-06, |
| "loss": 13.25, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.9431311033538067, |
| "grad_norm": 14.477254867553711, |
| "learning_rate": 6.8490153172866524e-06, |
| "loss": 11.0744, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.9438380981839071, |
| "grad_norm": 15.165593147277832, |
| "learning_rate": 6.7768052516411385e-06, |
| "loss": 10.9101, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.9445450930140074, |
| "grad_norm": 14.337298393249512, |
| "learning_rate": 6.704595185995624e-06, |
| "loss": 11.1425, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.9452520878441076, |
| "grad_norm": 15.68160629272461, |
| "learning_rate": 6.63238512035011e-06, |
| "loss": 12.3696, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.9459590826742079, |
| "grad_norm": 13.884684562683105, |
| "learning_rate": 6.560175054704596e-06, |
| "loss": 10.9927, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.9466660775043082, |
| "grad_norm": 16.531490325927734, |
| "learning_rate": 6.487964989059081e-06, |
| "loss": 11.8801, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.9473730723344086, |
| "grad_norm": 18.980274200439453, |
| "learning_rate": 6.415754923413567e-06, |
| "loss": 13.2553, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.9480800671645089, |
| "grad_norm": 14.567325592041016, |
| "learning_rate": 6.343544857768053e-06, |
| "loss": 12.6151, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.9487870619946092, |
| "grad_norm": 15.460344314575195, |
| "learning_rate": 6.271334792122538e-06, |
| "loss": 10.808, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.9494940568247094, |
| "grad_norm": 13.62412166595459, |
| "learning_rate": 6.199124726477024e-06, |
| "loss": 12.6647, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.9502010516548097, |
| "grad_norm": 14.015798568725586, |
| "learning_rate": 6.126914660831509e-06, |
| "loss": 11.7514, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.9509080464849101, |
| "grad_norm": 15.991093635559082, |
| "learning_rate": 6.054704595185995e-06, |
| "loss": 12.831, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.9516150413150104, |
| "grad_norm": 14.038092613220215, |
| "learning_rate": 5.982494529540482e-06, |
| "loss": 11.2131, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.9523220361451107, |
| "grad_norm": 19.56915855407715, |
| "learning_rate": 5.9102844638949674e-06, |
| "loss": 12.5214, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.953029030975211, |
| "grad_norm": 13.879122734069824, |
| "learning_rate": 5.8380743982494535e-06, |
| "loss": 11.75, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.9537360258053112, |
| "grad_norm": 14.805500984191895, |
| "learning_rate": 5.765864332603939e-06, |
| "loss": 12.5357, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.9544430206354116, |
| "grad_norm": 12.95010757446289, |
| "learning_rate": 5.693654266958425e-06, |
| "loss": 10.1176, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.9551500154655119, |
| "grad_norm": 14.349968910217285, |
| "learning_rate": 5.621444201312911e-06, |
| "loss": 10.9632, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.9558570102956122, |
| "grad_norm": 16.88902473449707, |
| "learning_rate": 5.549234135667396e-06, |
| "loss": 11.671, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.9565640051257125, |
| "grad_norm": 14.569315910339355, |
| "learning_rate": 5.477024070021882e-06, |
| "loss": 11.9923, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.9572709999558128, |
| "grad_norm": 14.235941886901855, |
| "learning_rate": 5.404814004376368e-06, |
| "loss": 11.1376, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.9579779947859132, |
| "grad_norm": 16.43010711669922, |
| "learning_rate": 5.332603938730853e-06, |
| "loss": 11.198, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.9586849896160134, |
| "grad_norm": 14.069321632385254, |
| "learning_rate": 5.260393873085339e-06, |
| "loss": 10.3045, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.9593919844461137, |
| "grad_norm": 15.589603424072266, |
| "learning_rate": 5.188183807439824e-06, |
| "loss": 12.1372, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.960098979276214, |
| "grad_norm": 15.514388084411621, |
| "learning_rate": 5.11597374179431e-06, |
| "loss": 12.2847, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.9608059741063143, |
| "grad_norm": 13.620744705200195, |
| "learning_rate": 5.043763676148797e-06, |
| "loss": 12.6984, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.9615129689364147, |
| "grad_norm": 14.314813613891602, |
| "learning_rate": 4.9715536105032825e-06, |
| "loss": 12.9155, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.962219963766515, |
| "grad_norm": 14.13175106048584, |
| "learning_rate": 4.8993435448577685e-06, |
| "loss": 10.4515, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.9629269585966153, |
| "grad_norm": 13.860292434692383, |
| "learning_rate": 4.827133479212254e-06, |
| "loss": 10.8381, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.9636339534267155, |
| "grad_norm": 15.983375549316406, |
| "learning_rate": 4.75492341356674e-06, |
| "loss": 12.8587, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.9643409482568158, |
| "grad_norm": 14.781173706054688, |
| "learning_rate": 4.682713347921226e-06, |
| "loss": 11.3576, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.9650479430869162, |
| "grad_norm": 14.570517539978027, |
| "learning_rate": 4.610503282275712e-06, |
| "loss": 11.6258, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.9657549379170165, |
| "grad_norm": 14.185210227966309, |
| "learning_rate": 4.538293216630197e-06, |
| "loss": 12.2246, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.9664619327471168, |
| "grad_norm": 13.819804191589355, |
| "learning_rate": 4.466083150984683e-06, |
| "loss": 12.2629, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.9671689275772171, |
| "grad_norm": 15.09450912475586, |
| "learning_rate": 4.393873085339168e-06, |
| "loss": 11.8637, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.9678759224073173, |
| "grad_norm": 12.672414779663086, |
| "learning_rate": 4.321663019693654e-06, |
| "loss": 10.5733, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.9685829172374177, |
| "grad_norm": 15.714422225952148, |
| "learning_rate": 4.24945295404814e-06, |
| "loss": 12.4989, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.969289912067518, |
| "grad_norm": 16.135835647583008, |
| "learning_rate": 4.177242888402626e-06, |
| "loss": 11.4435, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.9699969068976183, |
| "grad_norm": 15.989006996154785, |
| "learning_rate": 4.105032822757112e-06, |
| "loss": 12.7837, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.9707039017277186, |
| "grad_norm": 14.65039348602295, |
| "learning_rate": 4.0328227571115975e-06, |
| "loss": 11.1327, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.9714108965578189, |
| "grad_norm": 14.098581314086914, |
| "learning_rate": 3.9606126914660835e-06, |
| "loss": 13.5108, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.9721178913879193, |
| "grad_norm": 14.409170150756836, |
| "learning_rate": 3.8884026258205695e-06, |
| "loss": 11.501, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.9728248862180195, |
| "grad_norm": 15.440765380859375, |
| "learning_rate": 3.816192560175055e-06, |
| "loss": 11.2453, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.9735318810481198, |
| "grad_norm": 13.261435508728027, |
| "learning_rate": 3.7439824945295407e-06, |
| "loss": 11.9026, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.9742388758782201, |
| "grad_norm": 13.441767692565918, |
| "learning_rate": 3.6717724288840268e-06, |
| "loss": 12.8835, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.9749458707083204, |
| "grad_norm": 16.43163299560547, |
| "learning_rate": 3.599562363238512e-06, |
| "loss": 12.0741, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.9756528655384208, |
| "grad_norm": 13.344820022583008, |
| "learning_rate": 3.527352297592998e-06, |
| "loss": 11.7269, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.9763598603685211, |
| "grad_norm": 16.93538475036621, |
| "learning_rate": 3.455142231947484e-06, |
| "loss": 12.4897, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.9770668551986214, |
| "grad_norm": 15.433717727661133, |
| "learning_rate": 3.382932166301969e-06, |
| "loss": 12.7013, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.9777738500287216, |
| "grad_norm": 16.81320571899414, |
| "learning_rate": 3.3107221006564552e-06, |
| "loss": 12.175, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.9784808448588219, |
| "grad_norm": 14.076522827148438, |
| "learning_rate": 3.2385120350109413e-06, |
| "loss": 11.0918, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.9791878396889223, |
| "grad_norm": 15.008047103881836, |
| "learning_rate": 3.1663019693654264e-06, |
| "loss": 11.1541, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.9798948345190226, |
| "grad_norm": 16.03972053527832, |
| "learning_rate": 3.0940919037199125e-06, |
| "loss": 11.2622, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.9806018293491229, |
| "grad_norm": 12.36665153503418, |
| "learning_rate": 3.0218818380743985e-06, |
| "loss": 11.854, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.9813088241792232, |
| "grad_norm": 15.743560791015625, |
| "learning_rate": 2.9496717724288845e-06, |
| "loss": 11.1556, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.9820158190093234, |
| "grad_norm": 13.828715324401855, |
| "learning_rate": 2.8774617067833697e-06, |
| "loss": 12.3923, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.9827228138394238, |
| "grad_norm": 15.561683654785156, |
| "learning_rate": 2.8052516411378558e-06, |
| "loss": 12.3997, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.9834298086695241, |
| "grad_norm": 15.786792755126953, |
| "learning_rate": 2.7330415754923414e-06, |
| "loss": 12.4365, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.9841368034996244, |
| "grad_norm": 14.015902519226074, |
| "learning_rate": 2.660831509846827e-06, |
| "loss": 12.3683, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.9848437983297247, |
| "grad_norm": 12.718472480773926, |
| "learning_rate": 2.588621444201313e-06, |
| "loss": 11.3825, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.985550793159825, |
| "grad_norm": 14.474007606506348, |
| "learning_rate": 2.5164113785557986e-06, |
| "loss": 11.0584, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.9862577879899254, |
| "grad_norm": 15.664868354797363, |
| "learning_rate": 2.4442013129102846e-06, |
| "loss": 12.1448, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.9869647828200256, |
| "grad_norm": 13.61034107208252, |
| "learning_rate": 2.3719912472647702e-06, |
| "loss": 11.5551, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.9876717776501259, |
| "grad_norm": 14.565057754516602, |
| "learning_rate": 2.2997811816192563e-06, |
| "loss": 11.7505, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.9883787724802262, |
| "grad_norm": 16.881675720214844, |
| "learning_rate": 2.227571115973742e-06, |
| "loss": 11.632, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.9890857673103265, |
| "grad_norm": 17.384876251220703, |
| "learning_rate": 2.1553610503282275e-06, |
| "loss": 12.2454, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.9897927621404269, |
| "grad_norm": 14.5512056350708, |
| "learning_rate": 2.0831509846827135e-06, |
| "loss": 12.0764, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.9904997569705272, |
| "grad_norm": 15.20163345336914, |
| "learning_rate": 2.0109409190371995e-06, |
| "loss": 11.3819, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.9912067518006275, |
| "grad_norm": 13.974905967712402, |
| "learning_rate": 1.9387308533916847e-06, |
| "loss": 12.4306, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.9919137466307277, |
| "grad_norm": 13.457085609436035, |
| "learning_rate": 1.8665207877461708e-06, |
| "loss": 11.195, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.992620741460828, |
| "grad_norm": 12.736709594726562, |
| "learning_rate": 1.7943107221006566e-06, |
| "loss": 10.1447, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.9933277362909284, |
| "grad_norm": 14.890710830688477, |
| "learning_rate": 1.7221006564551424e-06, |
| "loss": 11.5008, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.9940347311210287, |
| "grad_norm": 14.965149879455566, |
| "learning_rate": 1.649890590809628e-06, |
| "loss": 10.8517, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.994741725951129, |
| "grad_norm": 13.528691291809082, |
| "learning_rate": 1.5776805251641138e-06, |
| "loss": 11.7516, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.9954487207812293, |
| "grad_norm": 13.84748363494873, |
| "learning_rate": 1.5054704595185996e-06, |
| "loss": 11.2027, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.9961557156113295, |
| "grad_norm": 14.555986404418945, |
| "learning_rate": 1.4332603938730853e-06, |
| "loss": 12.3957, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.9968627104414299, |
| "grad_norm": 15.274754524230957, |
| "learning_rate": 1.361050328227571e-06, |
| "loss": 12.8592, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.9975697052715302, |
| "grad_norm": 15.99559497833252, |
| "learning_rate": 1.2888402625820569e-06, |
| "loss": 11.2982, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.9982767001016305, |
| "grad_norm": 14.77381420135498, |
| "learning_rate": 1.2166301969365427e-06, |
| "loss": 12.3372, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.9989836949317308, |
| "grad_norm": 13.04557991027832, |
| "learning_rate": 1.1444201312910285e-06, |
| "loss": 11.7988, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.9996906897618311, |
| "grad_norm": 14.513769149780273, |
| "learning_rate": 1.0722100656455141e-06, |
| "loss": 11.4676, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.9996906897618311, |
| "step": 1414, |
| "total_flos": 0.0, |
| "train_loss": 15.723444231160123, |
| "train_runtime": 27373.0059, |
| "train_samples_per_second": 13.228, |
| "train_steps_per_second": 0.052 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 1414, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|