Chaew00n's picture
Training in progress, step 600, checkpoint
81259ec verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.96,
"eval_steps": 500,
"global_step": 600,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0016,
"grad_norm": 18.498022079467773,
"learning_rate": 0.0,
"loss": 11.9034,
"step": 1
},
{
"epoch": 0.0032,
"grad_norm": 19.59610366821289,
"learning_rate": 7.142857142857143e-07,
"loss": 12.361,
"step": 2
},
{
"epoch": 0.0048,
"grad_norm": 19.05824089050293,
"learning_rate": 1.4285714285714286e-06,
"loss": 12.3027,
"step": 3
},
{
"epoch": 0.0064,
"grad_norm": 18.656831741333008,
"learning_rate": 2.1428571428571427e-06,
"loss": 11.9369,
"step": 4
},
{
"epoch": 0.008,
"grad_norm": 19.260093688964844,
"learning_rate": 2.8571428571428573e-06,
"loss": 12.0124,
"step": 5
},
{
"epoch": 0.0096,
"grad_norm": 18.27457046508789,
"learning_rate": 3.5714285714285718e-06,
"loss": 11.8957,
"step": 6
},
{
"epoch": 0.0112,
"grad_norm": 18.090967178344727,
"learning_rate": 4.2857142857142855e-06,
"loss": 12.0549,
"step": 7
},
{
"epoch": 0.0128,
"grad_norm": 18.144412994384766,
"learning_rate": 5e-06,
"loss": 12.0455,
"step": 8
},
{
"epoch": 0.0144,
"grad_norm": 17.77107048034668,
"learning_rate": 4.999967697817016e-06,
"loss": 12.0381,
"step": 9
},
{
"epoch": 0.016,
"grad_norm": 19.326251983642578,
"learning_rate": 4.9998707921028104e-06,
"loss": 12.2511,
"step": 10
},
{
"epoch": 0.0176,
"grad_norm": 20.007396697998047,
"learning_rate": 4.999709285361594e-06,
"loss": 12.5214,
"step": 11
},
{
"epoch": 0.0192,
"grad_norm": 18.9156551361084,
"learning_rate": 4.999483181766986e-06,
"loss": 12.226,
"step": 12
},
{
"epoch": 0.0208,
"grad_norm": 16.71934700012207,
"learning_rate": 4.999192487161895e-06,
"loss": 11.63,
"step": 13
},
{
"epoch": 0.0224,
"grad_norm": 16.819419860839844,
"learning_rate": 4.998837209058379e-06,
"loss": 11.8643,
"step": 14
},
{
"epoch": 0.024,
"grad_norm": 17.38993263244629,
"learning_rate": 4.998417356637445e-06,
"loss": 12.0876,
"step": 15
},
{
"epoch": 0.0256,
"grad_norm": 17.411874771118164,
"learning_rate": 4.997932940748811e-06,
"loss": 11.9954,
"step": 16
},
{
"epoch": 0.0272,
"grad_norm": 17.002464294433594,
"learning_rate": 4.997383973910631e-06,
"loss": 12.0872,
"step": 17
},
{
"epoch": 0.0288,
"grad_norm": 16.964523315429688,
"learning_rate": 4.996770470309167e-06,
"loss": 11.9659,
"step": 18
},
{
"epoch": 0.0304,
"grad_norm": 17.12218475341797,
"learning_rate": 4.9960924457984225e-06,
"loss": 11.9041,
"step": 19
},
{
"epoch": 0.032,
"grad_norm": 15.89167308807373,
"learning_rate": 4.995349917899735e-06,
"loss": 11.5677,
"step": 20
},
{
"epoch": 0.0336,
"grad_norm": 17.404312133789062,
"learning_rate": 4.994542905801323e-06,
"loss": 12.0405,
"step": 21
},
{
"epoch": 0.0352,
"grad_norm": 17.553056716918945,
"learning_rate": 4.993671430357788e-06,
"loss": 11.9337,
"step": 22
},
{
"epoch": 0.0368,
"grad_norm": 17.339885711669922,
"learning_rate": 4.9927355140895775e-06,
"loss": 11.9247,
"step": 23
},
{
"epoch": 0.0384,
"grad_norm": 16.14508056640625,
"learning_rate": 4.991735181182401e-06,
"loss": 11.5418,
"step": 24
},
{
"epoch": 0.04,
"grad_norm": 14.626124382019043,
"learning_rate": 4.9906704574866105e-06,
"loss": 11.4515,
"step": 25
},
{
"epoch": 0.0416,
"grad_norm": 17.168630599975586,
"learning_rate": 4.989541370516523e-06,
"loss": 11.9094,
"step": 26
},
{
"epoch": 0.0432,
"grad_norm": 15.818598747253418,
"learning_rate": 4.98834794944972e-06,
"loss": 11.7249,
"step": 27
},
{
"epoch": 0.0448,
"grad_norm": 15.550960540771484,
"learning_rate": 4.987090225126285e-06,
"loss": 11.3948,
"step": 28
},
{
"epoch": 0.0464,
"grad_norm": 15.328129768371582,
"learning_rate": 4.985768230048011e-06,
"loss": 11.3551,
"step": 29
},
{
"epoch": 0.048,
"grad_norm": 13.792865753173828,
"learning_rate": 4.9843819983775575e-06,
"loss": 11.1959,
"step": 30
},
{
"epoch": 0.0496,
"grad_norm": 15.154494285583496,
"learning_rate": 4.982931565937575e-06,
"loss": 11.7186,
"step": 31
},
{
"epoch": 0.0512,
"grad_norm": 14.210274696350098,
"learning_rate": 4.98141697020977e-06,
"loss": 11.3324,
"step": 32
},
{
"epoch": 0.0528,
"grad_norm": 14.419412612915039,
"learning_rate": 4.979838250333941e-06,
"loss": 11.0964,
"step": 33
},
{
"epoch": 0.0544,
"grad_norm": 14.476765632629395,
"learning_rate": 4.978195447106965e-06,
"loss": 11.4052,
"step": 34
},
{
"epoch": 0.056,
"grad_norm": 13.166458129882812,
"learning_rate": 4.976488602981748e-06,
"loss": 11.1172,
"step": 35
},
{
"epoch": 0.0576,
"grad_norm": 13.77320384979248,
"learning_rate": 4.974717762066123e-06,
"loss": 11.2174,
"step": 36
},
{
"epoch": 0.0592,
"grad_norm": 13.610458374023438,
"learning_rate": 4.972882970121711e-06,
"loss": 11.0993,
"step": 37
},
{
"epoch": 0.0608,
"grad_norm": 13.38510513305664,
"learning_rate": 4.970984274562741e-06,
"loss": 11.0244,
"step": 38
},
{
"epoch": 0.0624,
"grad_norm": 13.513715744018555,
"learning_rate": 4.969021724454821e-06,
"loss": 11.0893,
"step": 39
},
{
"epoch": 0.064,
"grad_norm": 12.476004600524902,
"learning_rate": 4.966995370513675e-06,
"loss": 10.9256,
"step": 40
},
{
"epoch": 0.0656,
"grad_norm": 13.007659912109375,
"learning_rate": 4.9649052651038255e-06,
"loss": 10.897,
"step": 41
},
{
"epoch": 0.0672,
"grad_norm": 12.74197006225586,
"learning_rate": 4.962751462237248e-06,
"loss": 10.9702,
"step": 42
},
{
"epoch": 0.0688,
"grad_norm": 11.668892860412598,
"learning_rate": 4.9605340175719686e-06,
"loss": 10.6871,
"step": 43
},
{
"epoch": 0.0704,
"grad_norm": 12.722009658813477,
"learning_rate": 4.958252988410631e-06,
"loss": 10.7933,
"step": 44
},
{
"epoch": 0.072,
"grad_norm": 10.594651222229004,
"learning_rate": 4.955908433699013e-06,
"loss": 10.2903,
"step": 45
},
{
"epoch": 0.0736,
"grad_norm": 12.946322441101074,
"learning_rate": 4.9535004140245005e-06,
"loss": 11.141,
"step": 46
},
{
"epoch": 0.0752,
"grad_norm": 12.783811569213867,
"learning_rate": 4.9510289916145295e-06,
"loss": 10.779,
"step": 47
},
{
"epoch": 0.0768,
"grad_norm": 11.354564666748047,
"learning_rate": 4.94849423033497e-06,
"loss": 10.5837,
"step": 48
},
{
"epoch": 0.0784,
"grad_norm": 11.848569869995117,
"learning_rate": 4.94589619568848e-06,
"loss": 10.5706,
"step": 49
},
{
"epoch": 0.08,
"grad_norm": 9.897051811218262,
"learning_rate": 4.943234954812812e-06,
"loss": 10.2364,
"step": 50
},
{
"epoch": 0.0816,
"grad_norm": 11.316352844238281,
"learning_rate": 4.940510576479079e-06,
"loss": 10.6149,
"step": 51
},
{
"epoch": 0.0832,
"grad_norm": 11.829602241516113,
"learning_rate": 4.937723131089974e-06,
"loss": 10.5496,
"step": 52
},
{
"epoch": 0.0848,
"grad_norm": 10.069260597229004,
"learning_rate": 4.934872690677953e-06,
"loss": 10.0925,
"step": 53
},
{
"epoch": 0.0864,
"grad_norm": 11.005655288696289,
"learning_rate": 4.931959328903376e-06,
"loss": 10.3874,
"step": 54
},
{
"epoch": 0.088,
"grad_norm": 10.913771629333496,
"learning_rate": 4.928983121052597e-06,
"loss": 10.3699,
"step": 55
},
{
"epoch": 0.0896,
"grad_norm": 10.943140983581543,
"learning_rate": 4.925944144036027e-06,
"loss": 10.3604,
"step": 56
},
{
"epoch": 0.0912,
"grad_norm": 11.520155906677246,
"learning_rate": 4.922842476386137e-06,
"loss": 10.45,
"step": 57
},
{
"epoch": 0.0928,
"grad_norm": 11.597639083862305,
"learning_rate": 4.919678198255438e-06,
"loss": 10.6551,
"step": 58
},
{
"epoch": 0.0944,
"grad_norm": 11.26528549194336,
"learning_rate": 4.9164513914144005e-06,
"loss": 10.3729,
"step": 59
},
{
"epoch": 0.096,
"grad_norm": 11.043292045593262,
"learning_rate": 4.91316213924935e-06,
"loss": 10.2723,
"step": 60
},
{
"epoch": 0.0976,
"grad_norm": 10.670892715454102,
"learning_rate": 4.909810526760307e-06,
"loss": 10.3842,
"step": 61
},
{
"epoch": 0.0992,
"grad_norm": 12.231328010559082,
"learning_rate": 4.90639664055879e-06,
"loss": 10.4897,
"step": 62
},
{
"epoch": 0.1008,
"grad_norm": 10.968878746032715,
"learning_rate": 4.902920568865582e-06,
"loss": 10.225,
"step": 63
},
{
"epoch": 0.1024,
"grad_norm": 10.761625289916992,
"learning_rate": 4.899382401508446e-06,
"loss": 10.0817,
"step": 64
},
{
"epoch": 0.104,
"grad_norm": 9.868699073791504,
"learning_rate": 4.8957822299198045e-06,
"loss": 9.9514,
"step": 65
},
{
"epoch": 0.1056,
"grad_norm": 10.76175308227539,
"learning_rate": 4.892120147134378e-06,
"loss": 10.2281,
"step": 66
},
{
"epoch": 0.1072,
"grad_norm": 10.531647682189941,
"learning_rate": 4.888396247786784e-06,
"loss": 10.0424,
"step": 67
},
{
"epoch": 0.1088,
"grad_norm": 9.720768928527832,
"learning_rate": 4.884610628109082e-06,
"loss": 9.8735,
"step": 68
},
{
"epoch": 0.1104,
"grad_norm": 10.765035629272461,
"learning_rate": 4.880763385928298e-06,
"loss": 10.1977,
"step": 69
},
{
"epoch": 0.112,
"grad_norm": 10.57542896270752,
"learning_rate": 4.876854620663887e-06,
"loss": 9.9432,
"step": 70
},
{
"epoch": 0.1136,
"grad_norm": 10.321698188781738,
"learning_rate": 4.872884433325169e-06,
"loss": 9.9563,
"step": 71
},
{
"epoch": 0.1152,
"grad_norm": 10.044979095458984,
"learning_rate": 4.868852926508721e-06,
"loss": 9.9412,
"step": 72
},
{
"epoch": 0.1168,
"grad_norm": 10.604913711547852,
"learning_rate": 4.864760204395717e-06,
"loss": 10.0144,
"step": 73
},
{
"epoch": 0.1184,
"grad_norm": 9.378023147583008,
"learning_rate": 4.860606372749247e-06,
"loss": 9.8055,
"step": 74
},
{
"epoch": 0.12,
"grad_norm": 9.08212947845459,
"learning_rate": 4.856391538911572e-06,
"loss": 9.7179,
"step": 75
},
{
"epoch": 0.1216,
"grad_norm": 10.120437622070312,
"learning_rate": 4.8521158118013605e-06,
"loss": 9.7055,
"step": 76
},
{
"epoch": 0.1232,
"grad_norm": 9.448174476623535,
"learning_rate": 4.847779301910868e-06,
"loss": 9.7753,
"step": 77
},
{
"epoch": 0.1248,
"grad_norm": 9.994352340698242,
"learning_rate": 4.843382121303082e-06,
"loss": 9.8005,
"step": 78
},
{
"epoch": 0.1264,
"grad_norm": 10.175590515136719,
"learning_rate": 4.83892438360883e-06,
"loss": 9.7149,
"step": 79
},
{
"epoch": 0.128,
"grad_norm": 9.187248229980469,
"learning_rate": 4.83440620402384e-06,
"loss": 9.5077,
"step": 80
},
{
"epoch": 0.1296,
"grad_norm": 8.287508964538574,
"learning_rate": 4.829827699305759e-06,
"loss": 9.2745,
"step": 81
},
{
"epoch": 0.1312,
"grad_norm": 9.502992630004883,
"learning_rate": 4.825188987771149e-06,
"loss": 9.7212,
"step": 82
},
{
"epoch": 0.1328,
"grad_norm": 9.398665428161621,
"learning_rate": 4.820490189292415e-06,
"loss": 9.4711,
"step": 83
},
{
"epoch": 0.1344,
"grad_norm": 9.67027473449707,
"learning_rate": 4.815731425294716e-06,
"loss": 9.6444,
"step": 84
},
{
"epoch": 0.136,
"grad_norm": 9.999480247497559,
"learning_rate": 4.810912818752825e-06,
"loss": 9.6473,
"step": 85
},
{
"epoch": 0.1376,
"grad_norm": 10.192490577697754,
"learning_rate": 4.806034494187949e-06,
"loss": 9.5,
"step": 86
},
{
"epoch": 0.1392,
"grad_norm": 10.132280349731445,
"learning_rate": 4.8010965776645145e-06,
"loss": 9.5879,
"step": 87
},
{
"epoch": 0.1408,
"grad_norm": 10.07225227355957,
"learning_rate": 4.796099196786908e-06,
"loss": 9.4842,
"step": 88
},
{
"epoch": 0.1424,
"grad_norm": 9.903701782226562,
"learning_rate": 4.791042480696179e-06,
"loss": 9.5136,
"step": 89
},
{
"epoch": 0.144,
"grad_norm": 9.270772933959961,
"learning_rate": 4.785926560066703e-06,
"loss": 9.367,
"step": 90
},
{
"epoch": 0.1456,
"grad_norm": 8.604142189025879,
"learning_rate": 4.780751567102801e-06,
"loss": 9.1618,
"step": 91
},
{
"epoch": 0.1472,
"grad_norm": 9.69423770904541,
"learning_rate": 4.775517635535332e-06,
"loss": 9.2597,
"step": 92
},
{
"epoch": 0.1488,
"grad_norm": 10.679227828979492,
"learning_rate": 4.770224900618226e-06,
"loss": 9.3722,
"step": 93
},
{
"epoch": 0.1504,
"grad_norm": 9.498805046081543,
"learning_rate": 4.764873499124997e-06,
"loss": 9.2149,
"step": 94
},
{
"epoch": 0.152,
"grad_norm": 10.133721351623535,
"learning_rate": 4.759463569345205e-06,
"loss": 9.211,
"step": 95
},
{
"epoch": 0.1536,
"grad_norm": 10.703787803649902,
"learning_rate": 4.753995251080884e-06,
"loss": 9.2456,
"step": 96
},
{
"epoch": 0.1552,
"grad_norm": 9.839086532592773,
"learning_rate": 4.748468685642926e-06,
"loss": 9.0973,
"step": 97
},
{
"epoch": 0.1568,
"grad_norm": 8.186079978942871,
"learning_rate": 4.742884015847436e-06,
"loss": 8.9378,
"step": 98
},
{
"epoch": 0.1584,
"grad_norm": 9.491665840148926,
"learning_rate": 4.737241386012032e-06,
"loss": 9.0209,
"step": 99
},
{
"epoch": 0.16,
"grad_norm": 9.313272476196289,
"learning_rate": 4.731540941952126e-06,
"loss": 9.0591,
"step": 100
},
{
"epoch": 0.1616,
"grad_norm": 9.365504264831543,
"learning_rate": 4.725782830977145e-06,
"loss": 9.0187,
"step": 101
},
{
"epoch": 0.1632,
"grad_norm": 9.761168479919434,
"learning_rate": 4.719967201886734e-06,
"loss": 8.9663,
"step": 102
},
{
"epoch": 0.1648,
"grad_norm": 10.886737823486328,
"learning_rate": 4.714094204966903e-06,
"loss": 9.0523,
"step": 103
},
{
"epoch": 0.1664,
"grad_norm": 11.529026985168457,
"learning_rate": 4.708163991986152e-06,
"loss": 8.9862,
"step": 104
},
{
"epoch": 0.168,
"grad_norm": 11.003949165344238,
"learning_rate": 4.702176716191539e-06,
"loss": 8.9944,
"step": 105
},
{
"epoch": 0.1696,
"grad_norm": 10.637957572937012,
"learning_rate": 4.696132532304727e-06,
"loss": 8.7661,
"step": 106
},
{
"epoch": 0.1712,
"grad_norm": 10.600850105285645,
"learning_rate": 4.690031596517984e-06,
"loss": 8.6939,
"step": 107
},
{
"epoch": 0.1728,
"grad_norm": 10.38036823272705,
"learning_rate": 4.683874066490143e-06,
"loss": 8.8789,
"step": 108
},
{
"epoch": 0.1744,
"grad_norm": 10.32800006866455,
"learning_rate": 4.677660101342536e-06,
"loss": 8.7128,
"step": 109
},
{
"epoch": 0.176,
"grad_norm": 9.01140022277832,
"learning_rate": 4.671389861654873e-06,
"loss": 8.5539,
"step": 110
},
{
"epoch": 0.1776,
"grad_norm": 10.66911792755127,
"learning_rate": 4.665063509461098e-06,
"loss": 8.6381,
"step": 111
},
{
"epoch": 0.1792,
"grad_norm": 12.453883171081543,
"learning_rate": 4.658681208245198e-06,
"loss": 8.744,
"step": 112
},
{
"epoch": 0.1808,
"grad_norm": 10.25079345703125,
"learning_rate": 4.652243122936987e-06,
"loss": 8.4376,
"step": 113
},
{
"epoch": 0.1824,
"grad_norm": 13.820728302001953,
"learning_rate": 4.645749419907829e-06,
"loss": 8.7434,
"step": 114
},
{
"epoch": 0.184,
"grad_norm": 9.763056755065918,
"learning_rate": 4.639200266966351e-06,
"loss": 8.5522,
"step": 115
},
{
"epoch": 0.1856,
"grad_norm": 12.80479907989502,
"learning_rate": 4.632595833354105e-06,
"loss": 8.5777,
"step": 116
},
{
"epoch": 0.1872,
"grad_norm": 11.518301963806152,
"learning_rate": 4.625936289741187e-06,
"loss": 8.2751,
"step": 117
},
{
"epoch": 0.1888,
"grad_norm": 13.24626350402832,
"learning_rate": 4.619221808221833e-06,
"loss": 8.4951,
"step": 118
},
{
"epoch": 0.1904,
"grad_norm": 14.117487907409668,
"learning_rate": 4.612452562309975e-06,
"loss": 8.3492,
"step": 119
},
{
"epoch": 0.192,
"grad_norm": 12.98033618927002,
"learning_rate": 4.605628726934747e-06,
"loss": 8.2238,
"step": 120
},
{
"epoch": 0.1936,
"grad_norm": 11.61721420288086,
"learning_rate": 4.598750478435973e-06,
"loss": 8.2197,
"step": 121
},
{
"epoch": 0.1952,
"grad_norm": 12.619826316833496,
"learning_rate": 4.5918179945596055e-06,
"loss": 8.2005,
"step": 122
},
{
"epoch": 0.1968,
"grad_norm": 11.586774826049805,
"learning_rate": 4.584831454453135e-06,
"loss": 8.0181,
"step": 123
},
{
"epoch": 0.1984,
"grad_norm": 11.792032241821289,
"learning_rate": 4.577791038660959e-06,
"loss": 8.1414,
"step": 124
},
{
"epoch": 0.2,
"grad_norm": 14.116670608520508,
"learning_rate": 4.570696929119717e-06,
"loss": 7.9288,
"step": 125
},
{
"epoch": 0.2016,
"grad_norm": 13.249540328979492,
"learning_rate": 4.563549309153589e-06,
"loss": 7.8991,
"step": 126
},
{
"epoch": 0.2032,
"grad_norm": 13.10855770111084,
"learning_rate": 4.556348363469556e-06,
"loss": 7.7997,
"step": 127
},
{
"epoch": 0.2048,
"grad_norm": 13.13003921508789,
"learning_rate": 4.549094278152631e-06,
"loss": 7.7856,
"step": 128
},
{
"epoch": 0.2064,
"grad_norm": 11.22130012512207,
"learning_rate": 4.541787240661049e-06,
"loss": 7.8092,
"step": 129
},
{
"epoch": 0.208,
"grad_norm": 12.641518592834473,
"learning_rate": 4.534427439821416e-06,
"loss": 7.6641,
"step": 130
},
{
"epoch": 0.2096,
"grad_norm": 12.413702964782715,
"learning_rate": 4.527015065823841e-06,
"loss": 7.6125,
"step": 131
},
{
"epoch": 0.2112,
"grad_norm": 12.156352043151855,
"learning_rate": 4.519550310217013e-06,
"loss": 7.5519,
"step": 132
},
{
"epoch": 0.2128,
"grad_norm": 11.665326118469238,
"learning_rate": 4.512033365903251e-06,
"loss": 7.5902,
"step": 133
},
{
"epoch": 0.2144,
"grad_norm": 12.924529075622559,
"learning_rate": 4.504464427133527e-06,
"loss": 7.5706,
"step": 134
},
{
"epoch": 0.216,
"grad_norm": 15.648908615112305,
"learning_rate": 4.496843689502435e-06,
"loss": 7.3488,
"step": 135
},
{
"epoch": 0.2176,
"grad_norm": 13.633544921875,
"learning_rate": 4.489171349943144e-06,
"loss": 7.2841,
"step": 136
},
{
"epoch": 0.2192,
"grad_norm": 14.129197120666504,
"learning_rate": 4.481447606722309e-06,
"loss": 7.2605,
"step": 137
},
{
"epoch": 0.2208,
"grad_norm": 15.414569854736328,
"learning_rate": 4.473672659434941e-06,
"loss": 7.1375,
"step": 138
},
{
"epoch": 0.2224,
"grad_norm": 15.15605640411377,
"learning_rate": 4.465846708999258e-06,
"loss": 7.3157,
"step": 139
},
{
"epoch": 0.224,
"grad_norm": 13.083776473999023,
"learning_rate": 4.457969957651485e-06,
"loss": 7.1391,
"step": 140
},
{
"epoch": 0.2256,
"grad_norm": 14.658546447753906,
"learning_rate": 4.450042608940632e-06,
"loss": 6.8354,
"step": 141
},
{
"epoch": 0.2272,
"grad_norm": 13.068192481994629,
"learning_rate": 4.442064867723236e-06,
"loss": 6.9534,
"step": 142
},
{
"epoch": 0.2288,
"grad_norm": 13.683319091796875,
"learning_rate": 4.434036940158062e-06,
"loss": 6.7263,
"step": 143
},
{
"epoch": 0.2304,
"grad_norm": 12.352782249450684,
"learning_rate": 4.425959033700776e-06,
"loss": 6.9508,
"step": 144
},
{
"epoch": 0.232,
"grad_norm": 12.934060096740723,
"learning_rate": 4.417831357098591e-06,
"loss": 6.7608,
"step": 145
},
{
"epoch": 0.2336,
"grad_norm": 10.694926261901855,
"learning_rate": 4.409654120384863e-06,
"loss": 6.9133,
"step": 146
},
{
"epoch": 0.2352,
"grad_norm": 13.42801570892334,
"learning_rate": 4.401427534873669e-06,
"loss": 6.2184,
"step": 147
},
{
"epoch": 0.2368,
"grad_norm": 12.629581451416016,
"learning_rate": 4.393151813154345e-06,
"loss": 6.1405,
"step": 148
},
{
"epoch": 0.2384,
"grad_norm": 9.909472465515137,
"learning_rate": 4.384827169085993e-06,
"loss": 6.376,
"step": 149
},
{
"epoch": 0.24,
"grad_norm": 9.083218574523926,
"learning_rate": 4.3764538177919555e-06,
"loss": 6.387,
"step": 150
},
{
"epoch": 0.2416,
"grad_norm": 8.383381843566895,
"learning_rate": 4.3680319756542525e-06,
"loss": 6.7282,
"step": 151
},
{
"epoch": 0.2432,
"grad_norm": 8.961660385131836,
"learning_rate": 4.35956186030799e-06,
"loss": 6.2623,
"step": 152
},
{
"epoch": 0.2448,
"grad_norm": 9.286286354064941,
"learning_rate": 4.351043690635744e-06,
"loss": 6.0499,
"step": 153
},
{
"epoch": 0.2464,
"grad_norm": 9.719377517700195,
"learning_rate": 4.3424776867618935e-06,
"loss": 6.4369,
"step": 154
},
{
"epoch": 0.248,
"grad_norm": 7.7872700691223145,
"learning_rate": 4.333864070046938e-06,
"loss": 6.0176,
"step": 155
},
{
"epoch": 0.2496,
"grad_norm": 8.78665828704834,
"learning_rate": 4.325203063081776e-06,
"loss": 6.0201,
"step": 156
},
{
"epoch": 0.2512,
"grad_norm": 7.898557186126709,
"learning_rate": 4.3164948896819535e-06,
"loss": 6.1595,
"step": 157
},
{
"epoch": 0.2528,
"grad_norm": 9.21320629119873,
"learning_rate": 4.307739774881878e-06,
"loss": 6.3917,
"step": 158
},
{
"epoch": 0.2544,
"grad_norm": 6.031705379486084,
"learning_rate": 4.298937944929007e-06,
"loss": 5.6134,
"step": 159
},
{
"epoch": 0.256,
"grad_norm": 7.0851006507873535,
"learning_rate": 4.290089627277998e-06,
"loss": 5.4909,
"step": 160
},
{
"epoch": 0.2576,
"grad_norm": 7.172662258148193,
"learning_rate": 4.28119505058483e-06,
"loss": 6.1213,
"step": 161
},
{
"epoch": 0.2592,
"grad_norm": 7.241125583648682,
"learning_rate": 4.2722544447008995e-06,
"loss": 5.9029,
"step": 162
},
{
"epoch": 0.2608,
"grad_norm": 6.174571514129639,
"learning_rate": 4.263268040667075e-06,
"loss": 5.9515,
"step": 163
},
{
"epoch": 0.2624,
"grad_norm": 5.6988935470581055,
"learning_rate": 4.254236070707734e-06,
"loss": 5.8603,
"step": 164
},
{
"epoch": 0.264,
"grad_norm": 5.242663383483887,
"learning_rate": 4.245158768224748e-06,
"loss": 5.6214,
"step": 165
},
{
"epoch": 0.2656,
"grad_norm": 5.694828987121582,
"learning_rate": 4.236036367791471e-06,
"loss": 6.3081,
"step": 166
},
{
"epoch": 0.2672,
"grad_norm": 7.1838788986206055,
"learning_rate": 4.226869105146658e-06,
"loss": 5.4577,
"step": 167
},
{
"epoch": 0.2688,
"grad_norm": 8.755837440490723,
"learning_rate": 4.2176572171883865e-06,
"loss": 5.5286,
"step": 168
},
{
"epoch": 0.2704,
"grad_norm": 4.821361064910889,
"learning_rate": 4.208400941967928e-06,
"loss": 5.8776,
"step": 169
},
{
"epoch": 0.272,
"grad_norm": 5.043092250823975,
"learning_rate": 4.199100518683601e-06,
"loss": 5.6913,
"step": 170
},
{
"epoch": 0.2736,
"grad_norm": 5.266634941101074,
"learning_rate": 4.189756187674584e-06,
"loss": 5.8228,
"step": 171
},
{
"epoch": 0.2752,
"grad_norm": 5.798868179321289,
"learning_rate": 4.18036819041471e-06,
"loss": 5.6908,
"step": 172
},
{
"epoch": 0.2768,
"grad_norm": 5.337583065032959,
"learning_rate": 4.170936769506222e-06,
"loss": 6.0907,
"step": 173
},
{
"epoch": 0.2784,
"grad_norm": 4.3411102294921875,
"learning_rate": 4.161462168673508e-06,
"loss": 5.8087,
"step": 174
},
{
"epoch": 0.28,
"grad_norm": 6.649768829345703,
"learning_rate": 4.1519446327567995e-06,
"loss": 5.2901,
"step": 175
},
{
"epoch": 0.2816,
"grad_norm": 6.4602556228637695,
"learning_rate": 4.142384407705846e-06,
"loss": 5.274,
"step": 176
},
{
"epoch": 0.2832,
"grad_norm": 5.464128017425537,
"learning_rate": 4.132781740573559e-06,
"loss": 6.0373,
"step": 177
},
{
"epoch": 0.2848,
"grad_norm": 6.308886528015137,
"learning_rate": 4.123136879509626e-06,
"loss": 5.2471,
"step": 178
},
{
"epoch": 0.2864,
"grad_norm": 4.4718828201293945,
"learning_rate": 4.1134500737541026e-06,
"loss": 5.3702,
"step": 179
},
{
"epoch": 0.288,
"grad_norm": 3.863276958465576,
"learning_rate": 4.103721573630965e-06,
"loss": 5.3922,
"step": 180
},
{
"epoch": 0.2896,
"grad_norm": 4.226170063018799,
"learning_rate": 4.093951630541646e-06,
"loss": 5.4302,
"step": 181
},
{
"epoch": 0.2912,
"grad_norm": 3.5398764610290527,
"learning_rate": 4.084140496958539e-06,
"loss": 5.3444,
"step": 182
},
{
"epoch": 0.2928,
"grad_norm": 3.5163183212280273,
"learning_rate": 4.074288426418467e-06,
"loss": 5.3247,
"step": 183
},
{
"epoch": 0.2944,
"grad_norm": 5.308356761932373,
"learning_rate": 4.06439567351614e-06,
"loss": 5.6837,
"step": 184
},
{
"epoch": 0.296,
"grad_norm": 3.6820993423461914,
"learning_rate": 4.054462493897569e-06,
"loss": 5.1638,
"step": 185
},
{
"epoch": 0.2976,
"grad_norm": 3.9401638507843018,
"learning_rate": 4.0444891442534615e-06,
"loss": 5.498,
"step": 186
},
{
"epoch": 0.2992,
"grad_norm": 5.141748428344727,
"learning_rate": 4.034475882312593e-06,
"loss": 5.1034,
"step": 187
},
{
"epoch": 0.3008,
"grad_norm": 3.4670636653900146,
"learning_rate": 4.024422966835137e-06,
"loss": 5.2071,
"step": 188
},
{
"epoch": 0.3024,
"grad_norm": 3.7121994495391846,
"learning_rate": 4.014330657605984e-06,
"loss": 5.3885,
"step": 189
},
{
"epoch": 0.304,
"grad_norm": 5.863914489746094,
"learning_rate": 4.004199215428032e-06,
"loss": 5.5361,
"step": 190
},
{
"epoch": 0.3056,
"grad_norm": 3.343780517578125,
"learning_rate": 3.994028902115439e-06,
"loss": 5.4439,
"step": 191
},
{
"epoch": 0.3072,
"grad_norm": 4.041346073150635,
"learning_rate": 3.9838199804868635e-06,
"loss": 5.6031,
"step": 192
},
{
"epoch": 0.3088,
"grad_norm": 2.9033005237579346,
"learning_rate": 3.973572714358668e-06,
"loss": 5.3807,
"step": 193
},
{
"epoch": 0.3104,
"grad_norm": 3.026655435562134,
"learning_rate": 3.963287368538105e-06,
"loss": 5.6681,
"step": 194
},
{
"epoch": 0.312,
"grad_norm": 5.9403862953186035,
"learning_rate": 3.9529642088164736e-06,
"loss": 5.1567,
"step": 195
},
{
"epoch": 0.3136,
"grad_norm": 4.5110883712768555,
"learning_rate": 3.942603501962249e-06,
"loss": 5.3055,
"step": 196
},
{
"epoch": 0.3152,
"grad_norm": 4.24874210357666,
"learning_rate": 3.932205515714189e-06,
"loss": 5.3806,
"step": 197
},
{
"epoch": 0.3168,
"grad_norm": 4.06620979309082,
"learning_rate": 3.92177051877442e-06,
"loss": 5.5074,
"step": 198
},
{
"epoch": 0.3184,
"grad_norm": 3.4436845779418945,
"learning_rate": 3.9112987808014824e-06,
"loss": 5.3936,
"step": 199
},
{
"epoch": 0.32,
"grad_norm": 3.5736324787139893,
"learning_rate": 3.900790572403376e-06,
"loss": 5.3905,
"step": 200
},
{
"epoch": 0.3216,
"grad_norm": 2.719327926635742,
"learning_rate": 3.890246165130556e-06,
"loss": 5.3799,
"step": 201
},
{
"epoch": 0.3232,
"grad_norm": 2.68566632270813,
"learning_rate": 3.8796658314689205e-06,
"loss": 5.1272,
"step": 202
},
{
"epoch": 0.3248,
"grad_norm": 3.111459493637085,
"learning_rate": 3.86904984483277e-06,
"loss": 5.1238,
"step": 203
},
{
"epoch": 0.3264,
"grad_norm": 3.006056785583496,
"learning_rate": 3.858398479557739e-06,
"loss": 5.0996,
"step": 204
},
{
"epoch": 0.328,
"grad_norm": 6.713052749633789,
"learning_rate": 3.847712010893706e-06,
"loss": 5.1972,
"step": 205
},
{
"epoch": 0.3296,
"grad_norm": 4.517950534820557,
"learning_rate": 3.836990714997686e-06,
"loss": 5.0826,
"step": 206
},
{
"epoch": 0.3312,
"grad_norm": 3.7129902839660645,
"learning_rate": 3.826234868926686e-06,
"loss": 5.1656,
"step": 207
},
{
"epoch": 0.3328,
"grad_norm": 4.543206691741943,
"learning_rate": 3.815444750630555e-06,
"loss": 5.0371,
"step": 208
},
{
"epoch": 0.3344,
"grad_norm": 7.416889667510986,
"learning_rate": 3.8046206389447916e-06,
"loss": 5.46,
"step": 209
},
{
"epoch": 0.336,
"grad_norm": 5.088094711303711,
"learning_rate": 3.7937628135833453e-06,
"loss": 5.313,
"step": 210
},
{
"epoch": 0.3376,
"grad_norm": 2.3088865280151367,
"learning_rate": 3.782871555131386e-06,
"loss": 5.1569,
"step": 211
},
{
"epoch": 0.3392,
"grad_norm": 4.071847915649414,
"learning_rate": 3.7719471450380518e-06,
"loss": 5.2869,
"step": 212
},
{
"epoch": 0.3408,
"grad_norm": 5.414875030517578,
"learning_rate": 3.760989865609178e-06,
"loss": 4.9842,
"step": 213
},
{
"epoch": 0.3424,
"grad_norm": 5.795034885406494,
"learning_rate": 3.7500000000000005e-06,
"loss": 5.0864,
"step": 214
},
{
"epoch": 0.344,
"grad_norm": 3.6953725814819336,
"learning_rate": 3.738977832207839e-06,
"loss": 5.1704,
"step": 215
},
{
"epoch": 0.3456,
"grad_norm": 2.568744659423828,
"learning_rate": 3.7279236470647593e-06,
"loss": 5.4024,
"step": 216
},
{
"epoch": 0.3472,
"grad_norm": 2.6171984672546387,
"learning_rate": 3.7168377302302096e-06,
"loss": 5.4781,
"step": 217
},
{
"epoch": 0.3488,
"grad_norm": 4.188714981079102,
"learning_rate": 3.7057203681836407e-06,
"loss": 5.0629,
"step": 218
},
{
"epoch": 0.3504,
"grad_norm": 5.346961975097656,
"learning_rate": 3.6945718482171044e-06,
"loss": 4.7197,
"step": 219
},
{
"epoch": 0.352,
"grad_norm": 3.4702541828155518,
"learning_rate": 3.683392458427825e-06,
"loss": 5.3812,
"step": 220
},
{
"epoch": 0.3536,
"grad_norm": 2.4442174434661865,
"learning_rate": 3.6721824877107588e-06,
"loss": 5.0477,
"step": 221
},
{
"epoch": 0.3552,
"grad_norm": 3.310760259628296,
"learning_rate": 3.660942225751126e-06,
"loss": 4.8426,
"step": 222
},
{
"epoch": 0.3568,
"grad_norm": 2.951408624649048,
"learning_rate": 3.6496719630169254e-06,
"loss": 5.3388,
"step": 223
},
{
"epoch": 0.3584,
"grad_norm": 4.146093368530273,
"learning_rate": 3.638371990751428e-06,
"loss": 4.9851,
"step": 224
},
{
"epoch": 0.36,
"grad_norm": 2.433192729949951,
"learning_rate": 3.6270426009656524e-06,
"loss": 4.9225,
"step": 225
},
{
"epoch": 0.3616,
"grad_norm": 5.279270648956299,
"learning_rate": 3.615684086430815e-06,
"loss": 5.291,
"step": 226
},
{
"epoch": 0.3632,
"grad_norm": 2.825227737426758,
"learning_rate": 3.604296740670768e-06,
"loss": 5.0946,
"step": 227
},
{
"epoch": 0.3648,
"grad_norm": 2.6808815002441406,
"learning_rate": 3.592880857954413e-06,
"loss": 4.8898,
"step": 228
},
{
"epoch": 0.3664,
"grad_norm": 3.7098076343536377,
"learning_rate": 3.5814367332880953e-06,
"loss": 5.1529,
"step": 229
},
{
"epoch": 0.368,
"grad_norm": 4.077409267425537,
"learning_rate": 3.5699646624079824e-06,
"loss": 5.3046,
"step": 230
},
{
"epoch": 0.3696,
"grad_norm": 2.28322434425354,
"learning_rate": 3.5584649417724217e-06,
"loss": 5.1489,
"step": 231
},
{
"epoch": 0.3712,
"grad_norm": 2.7317070960998535,
"learning_rate": 3.5469378685542742e-06,
"loss": 5.1974,
"step": 232
},
{
"epoch": 0.3728,
"grad_norm": 3.298208475112915,
"learning_rate": 3.5353837406332464e-06,
"loss": 4.9805,
"step": 233
},
{
"epoch": 0.3744,
"grad_norm": 3.0187830924987793,
"learning_rate": 3.52380285658818e-06,
"loss": 5.1739,
"step": 234
},
{
"epoch": 0.376,
"grad_norm": 2.1207525730133057,
"learning_rate": 3.512195515689343e-06,
"loss": 5.0907,
"step": 235
},
{
"epoch": 0.3776,
"grad_norm": 2.5246315002441406,
"learning_rate": 3.500562017890695e-06,
"loss": 4.9551,
"step": 236
},
{
"epoch": 0.3792,
"grad_norm": 2.149975538253784,
"learning_rate": 3.4889026638221376e-06,
"loss": 5.0675,
"step": 237
},
{
"epoch": 0.3808,
"grad_norm": 2.6374351978302,
"learning_rate": 3.4772177547817387e-06,
"loss": 5.2944,
"step": 238
},
{
"epoch": 0.3824,
"grad_norm": 2.4661672115325928,
"learning_rate": 3.4655075927279576e-06,
"loss": 4.9949,
"step": 239
},
{
"epoch": 0.384,
"grad_norm": 2.271843910217285,
"learning_rate": 3.4537724802718294e-06,
"loss": 5.044,
"step": 240
},
{
"epoch": 0.3856,
"grad_norm": 2.4052512645721436,
"learning_rate": 3.442012720669155e-06,
"loss": 5.2165,
"step": 241
},
{
"epoch": 0.3872,
"grad_norm": 2.379415988922119,
"learning_rate": 3.430228617812661e-06,
"loss": 5.0385,
"step": 242
},
{
"epoch": 0.3888,
"grad_norm": 1.9934539794921875,
"learning_rate": 3.418420476224143e-06,
"loss": 4.865,
"step": 243
},
{
"epoch": 0.3904,
"grad_norm": 2.073207378387451,
"learning_rate": 3.4065886010466014e-06,
"loss": 4.816,
"step": 244
},
{
"epoch": 0.392,
"grad_norm": 3.5318527221679688,
"learning_rate": 3.3947332980363552e-06,
"loss": 4.7399,
"step": 245
},
{
"epoch": 0.3936,
"grad_norm": 3.798776626586914,
"learning_rate": 3.382854873555137e-06,
"loss": 5.1423,
"step": 246
},
{
"epoch": 0.3952,
"grad_norm": 3.180250644683838,
"learning_rate": 3.370953634562179e-06,
"loss": 4.8133,
"step": 247
},
{
"epoch": 0.3968,
"grad_norm": 4.3103156089782715,
"learning_rate": 3.3590298886062833e-06,
"loss": 5.2605,
"step": 248
},
{
"epoch": 0.3984,
"grad_norm": 2.771699905395508,
"learning_rate": 3.347083943817867e-06,
"loss": 4.6618,
"step": 249
},
{
"epoch": 0.4,
"grad_norm": 2.18292236328125,
"learning_rate": 3.3351161089010055e-06,
"loss": 4.9174,
"step": 250
},
{
"epoch": 0.4016,
"grad_norm": 4.050281047821045,
"learning_rate": 3.3231266931254546e-06,
"loss": 4.6317,
"step": 251
},
{
"epoch": 0.4032,
"grad_norm": 2.1576311588287354,
"learning_rate": 3.3111160063186553e-06,
"loss": 4.6405,
"step": 252
},
{
"epoch": 0.4048,
"grad_norm": 2.3556125164031982,
"learning_rate": 3.299084358857731e-06,
"loss": 4.8415,
"step": 253
},
{
"epoch": 0.4064,
"grad_norm": 3.075002908706665,
"learning_rate": 3.2870320616614626e-06,
"loss": 5.0271,
"step": 254
},
{
"epoch": 0.408,
"grad_norm": 2.8005783557891846,
"learning_rate": 3.2749594261822586e-06,
"loss": 4.9054,
"step": 255
},
{
"epoch": 0.4096,
"grad_norm": 2.2018191814422607,
"learning_rate": 3.2628667643981036e-06,
"loss": 4.7503,
"step": 256
},
{
"epoch": 0.4112,
"grad_norm": 3.0102760791778564,
"learning_rate": 3.250754388804495e-06,
"loss": 5.0094,
"step": 257
},
{
"epoch": 0.4128,
"grad_norm": 2.608316659927368,
"learning_rate": 3.238622612406373e-06,
"loss": 4.8735,
"step": 258
},
{
"epoch": 0.4144,
"grad_norm": 2.749432325363159,
"learning_rate": 3.226471748710025e-06,
"loss": 5.0013,
"step": 259
},
{
"epoch": 0.416,
"grad_norm": 2.818932056427002,
"learning_rate": 3.21430211171499e-06,
"loss": 5.4434,
"step": 260
},
{
"epoch": 0.4176,
"grad_norm": 5.755081653594971,
"learning_rate": 3.2021140159059398e-06,
"loss": 4.5975,
"step": 261
},
{
"epoch": 0.4192,
"grad_norm": 2.349569320678711,
"learning_rate": 3.189907776244556e-06,
"loss": 5.1069,
"step": 262
},
{
"epoch": 0.4208,
"grad_norm": 2.527462959289551,
"learning_rate": 3.1776837081613893e-06,
"loss": 4.875,
"step": 263
},
{
"epoch": 0.4224,
"grad_norm": 2.4022037982940674,
"learning_rate": 3.1654421275477045e-06,
"loss": 5.1679,
"step": 264
},
{
"epoch": 0.424,
"grad_norm": 5.29791784286499,
"learning_rate": 3.1531833507473246e-06,
"loss": 4.6665,
"step": 265
},
{
"epoch": 0.4256,
"grad_norm": 2.1718266010284424,
"learning_rate": 3.1409076945484513e-06,
"loss": 5.0353,
"step": 266
},
{
"epoch": 0.4272,
"grad_norm": 2.4552576541900635,
"learning_rate": 3.128615476175477e-06,
"loss": 5.2233,
"step": 267
},
{
"epoch": 0.4288,
"grad_norm": 6.326014995574951,
"learning_rate": 3.116307013280793e-06,
"loss": 4.5923,
"step": 268
},
{
"epoch": 0.4304,
"grad_norm": 2.048156261444092,
"learning_rate": 3.1039826239365754e-06,
"loss": 4.908,
"step": 269
},
{
"epoch": 0.432,
"grad_norm": 3.1258327960968018,
"learning_rate": 3.0916426266265676e-06,
"loss": 4.6048,
"step": 270
},
{
"epoch": 0.4336,
"grad_norm": 2.174611806869507,
"learning_rate": 3.0792873402378516e-06,
"loss": 4.8559,
"step": 271
},
{
"epoch": 0.4352,
"grad_norm": 1.987095594406128,
"learning_rate": 3.066917084052603e-06,
"loss": 4.7892,
"step": 272
},
{
"epoch": 0.4368,
"grad_norm": 5.872653484344482,
"learning_rate": 3.0545321777398463e-06,
"loss": 5.3549,
"step": 273
},
{
"epoch": 0.4384,
"grad_norm": 3.6800520420074463,
"learning_rate": 3.042132941347189e-06,
"loss": 4.9779,
"step": 274
},
{
"epoch": 0.44,
"grad_norm": 2.9514310359954834,
"learning_rate": 3.0297196952925533e-06,
"loss": 4.8666,
"step": 275
},
{
"epoch": 0.4416,
"grad_norm": 2.6596765518188477,
"learning_rate": 3.017292760355896e-06,
"loss": 4.7164,
"step": 276
},
{
"epoch": 0.4432,
"grad_norm": 2.4458515644073486,
"learning_rate": 3.0048524576709175e-06,
"loss": 4.6459,
"step": 277
},
{
"epoch": 0.4448,
"grad_norm": 2.106074571609497,
"learning_rate": 2.9923991087167657e-06,
"loss": 4.7868,
"step": 278
},
{
"epoch": 0.4464,
"grad_norm": 1.8992120027542114,
"learning_rate": 2.9799330353097245e-06,
"loss": 4.9395,
"step": 279
},
{
"epoch": 0.448,
"grad_norm": 4.042559623718262,
"learning_rate": 2.967454559594903e-06,
"loss": 5.0813,
"step": 280
},
{
"epoch": 0.4496,
"grad_norm": 4.273650646209717,
"learning_rate": 2.9549640040379043e-06,
"loss": 5.0635,
"step": 281
},
{
"epoch": 0.4512,
"grad_norm": 2.039167881011963,
"learning_rate": 2.9424616914164982e-06,
"loss": 4.9402,
"step": 282
},
{
"epoch": 0.4528,
"grad_norm": 3.257084369659424,
"learning_rate": 2.929947944812277e-06,
"loss": 5.2913,
"step": 283
},
{
"epoch": 0.4544,
"grad_norm": 2.763265371322632,
"learning_rate": 2.917423087602306e-06,
"loss": 4.8663,
"step": 284
},
{
"epoch": 0.456,
"grad_norm": 2.449984550476074,
"learning_rate": 2.904887443450769e-06,
"loss": 4.8823,
"step": 285
},
{
"epoch": 0.4576,
"grad_norm": 5.364999294281006,
"learning_rate": 2.8923413363006038e-06,
"loss": 4.6396,
"step": 286
},
{
"epoch": 0.4592,
"grad_norm": 2.5084285736083984,
"learning_rate": 2.8797850903651274e-06,
"loss": 4.8121,
"step": 287
},
{
"epoch": 0.4608,
"grad_norm": 1.8236719369888306,
"learning_rate": 2.8672190301196655e-06,
"loss": 5.1095,
"step": 288
},
{
"epoch": 0.4624,
"grad_norm": 5.353603839874268,
"learning_rate": 2.854643480293159e-06,
"loss": 4.6599,
"step": 289
},
{
"epoch": 0.464,
"grad_norm": 2.1391165256500244,
"learning_rate": 2.842058765859776e-06,
"loss": 4.8299,
"step": 290
},
{
"epoch": 0.4656,
"grad_norm": 2.8433682918548584,
"learning_rate": 2.8294652120305167e-06,
"loss": 4.7608,
"step": 291
},
{
"epoch": 0.4672,
"grad_norm": 3.439664602279663,
"learning_rate": 2.8168631442448046e-06,
"loss": 4.6247,
"step": 292
},
{
"epoch": 0.4688,
"grad_norm": 2.0286402702331543,
"learning_rate": 2.804252888162079e-06,
"loss": 5.0117,
"step": 293
},
{
"epoch": 0.4704,
"grad_norm": 1.8686045408248901,
"learning_rate": 2.791634769653381e-06,
"loss": 4.7583,
"step": 294
},
{
"epoch": 0.472,
"grad_norm": 2.822620391845703,
"learning_rate": 2.779009114792928e-06,
"loss": 4.8104,
"step": 295
},
{
"epoch": 0.4736,
"grad_norm": 2.264028787612915,
"learning_rate": 2.7663762498496905e-06,
"loss": 4.6987,
"step": 296
},
{
"epoch": 0.4752,
"grad_norm": 2.261293411254883,
"learning_rate": 2.753736501278961e-06,
"loss": 4.6871,
"step": 297
},
{
"epoch": 0.4768,
"grad_norm": 3.6320760250091553,
"learning_rate": 2.741090195713917e-06,
"loss": 4.7803,
"step": 298
},
{
"epoch": 0.4784,
"grad_norm": 2.178699254989624,
"learning_rate": 2.7284376599571776e-06,
"loss": 4.7506,
"step": 299
},
{
"epoch": 0.48,
"grad_norm": 2.638765811920166,
"learning_rate": 2.7157792209723654e-06,
"loss": 4.4671,
"step": 300
},
{
"epoch": 0.4816,
"grad_norm": 2.165921211242676,
"learning_rate": 2.7031152058756495e-06,
"loss": 4.6798,
"step": 301
},
{
"epoch": 0.4832,
"grad_norm": 2.7200875282287598,
"learning_rate": 2.6904459419272955e-06,
"loss": 4.7178,
"step": 302
},
{
"epoch": 0.4848,
"grad_norm": 3.2318854331970215,
"learning_rate": 2.6777717565232098e-06,
"loss": 4.8959,
"step": 303
},
{
"epoch": 0.4864,
"grad_norm": 2.586148738861084,
"learning_rate": 2.6650929771864776e-06,
"loss": 4.8196,
"step": 304
},
{
"epoch": 0.488,
"grad_norm": 1.7376669645309448,
"learning_rate": 2.652409931558898e-06,
"loss": 4.7363,
"step": 305
},
{
"epoch": 0.4896,
"grad_norm": 1.8759428262710571,
"learning_rate": 2.639722947392521e-06,
"loss": 4.8715,
"step": 306
},
{
"epoch": 0.4912,
"grad_norm": 1.9195789098739624,
"learning_rate": 2.6270323525411724e-06,
"loss": 4.9562,
"step": 307
},
{
"epoch": 0.4928,
"grad_norm": 2.1402273178100586,
"learning_rate": 2.614338474951987e-06,
"loss": 4.7405,
"step": 308
},
{
"epoch": 0.4944,
"grad_norm": 1.8862427473068237,
"learning_rate": 2.6016416426569285e-06,
"loss": 4.7351,
"step": 309
},
{
"epoch": 0.496,
"grad_norm": 4.3101677894592285,
"learning_rate": 2.5889421837643186e-06,
"loss": 5.1979,
"step": 310
},
{
"epoch": 0.4976,
"grad_norm": 2.146365165710449,
"learning_rate": 2.5762404264503538e-06,
"loss": 4.86,
"step": 311
},
{
"epoch": 0.4992,
"grad_norm": 2.066309928894043,
"learning_rate": 2.563536698950624e-06,
"loss": 4.9702,
"step": 312
},
{
"epoch": 0.5008,
"grad_norm": 1.9804738759994507,
"learning_rate": 2.5508313295516326e-06,
"loss": 4.8096,
"step": 313
},
{
"epoch": 0.5024,
"grad_norm": 2.8938088417053223,
"learning_rate": 2.538124646582315e-06,
"loss": 5.2568,
"step": 314
},
{
"epoch": 0.504,
"grad_norm": 1.8512638807296753,
"learning_rate": 2.52541697840555e-06,
"loss": 5.0433,
"step": 315
},
{
"epoch": 0.5056,
"grad_norm": 4.223806858062744,
"learning_rate": 2.512708653409674e-06,
"loss": 4.7329,
"step": 316
},
{
"epoch": 0.5072,
"grad_norm": 1.8683325052261353,
"learning_rate": 2.5e-06,
"loss": 5.0335,
"step": 317
},
{
"epoch": 0.5088,
"grad_norm": 5.908133506774902,
"learning_rate": 2.487291346590326e-06,
"loss": 4.6557,
"step": 318
},
{
"epoch": 0.5104,
"grad_norm": 5.461273670196533,
"learning_rate": 2.4745830215944512e-06,
"loss": 4.6582,
"step": 319
},
{
"epoch": 0.512,
"grad_norm": 3.9137203693389893,
"learning_rate": 2.4618753534176854e-06,
"loss": 5.0977,
"step": 320
},
{
"epoch": 0.5136,
"grad_norm": 2.8667490482330322,
"learning_rate": 2.449168670448368e-06,
"loss": 4.8414,
"step": 321
},
{
"epoch": 0.5152,
"grad_norm": 2.354121685028076,
"learning_rate": 2.436463301049378e-06,
"loss": 4.95,
"step": 322
},
{
"epoch": 0.5168,
"grad_norm": 3.92159366607666,
"learning_rate": 2.423759573549647e-06,
"loss": 4.6653,
"step": 323
},
{
"epoch": 0.5184,
"grad_norm": 4.691086292266846,
"learning_rate": 2.4110578162356814e-06,
"loss": 5.3596,
"step": 324
},
{
"epoch": 0.52,
"grad_norm": 3.4949967861175537,
"learning_rate": 2.3983583573430723e-06,
"loss": 4.9576,
"step": 325
},
{
"epoch": 0.5216,
"grad_norm": 2.329535484313965,
"learning_rate": 2.385661525048014e-06,
"loss": 4.9078,
"step": 326
},
{
"epoch": 0.5232,
"grad_norm": 1.949039340019226,
"learning_rate": 2.372967647458829e-06,
"loss": 4.8943,
"step": 327
},
{
"epoch": 0.5248,
"grad_norm": 1.8755134344100952,
"learning_rate": 2.3602770526074804e-06,
"loss": 4.661,
"step": 328
},
{
"epoch": 0.5264,
"grad_norm": 3.7387921810150146,
"learning_rate": 2.3475900684411027e-06,
"loss": 4.942,
"step": 329
},
{
"epoch": 0.528,
"grad_norm": 2.0674028396606445,
"learning_rate": 2.334907022813523e-06,
"loss": 4.9509,
"step": 330
},
{
"epoch": 0.5296,
"grad_norm": 3.522639036178589,
"learning_rate": 2.3222282434767915e-06,
"loss": 4.9442,
"step": 331
},
{
"epoch": 0.5312,
"grad_norm": 3.0063114166259766,
"learning_rate": 2.3095540580727054e-06,
"loss": 4.9701,
"step": 332
},
{
"epoch": 0.5328,
"grad_norm": 2.9866836071014404,
"learning_rate": 2.296884794124351e-06,
"loss": 4.8279,
"step": 333
},
{
"epoch": 0.5344,
"grad_norm": 2.5366246700286865,
"learning_rate": 2.2842207790276355e-06,
"loss": 4.9832,
"step": 334
},
{
"epoch": 0.536,
"grad_norm": 3.206937074661255,
"learning_rate": 2.2715623400428228e-06,
"loss": 4.6835,
"step": 335
},
{
"epoch": 0.5376,
"grad_norm": 4.108307361602783,
"learning_rate": 2.2589098042860838e-06,
"loss": 4.6455,
"step": 336
},
{
"epoch": 0.5392,
"grad_norm": 2.709421157836914,
"learning_rate": 2.24626349872104e-06,
"loss": 4.6987,
"step": 337
},
{
"epoch": 0.5408,
"grad_norm": 2.0390799045562744,
"learning_rate": 2.2336237501503103e-06,
"loss": 4.8034,
"step": 338
},
{
"epoch": 0.5424,
"grad_norm": 2.461397647857666,
"learning_rate": 2.2209908852070723e-06,
"loss": 4.6968,
"step": 339
},
{
"epoch": 0.544,
"grad_norm": 3.7129275798797607,
"learning_rate": 2.2083652303466196e-06,
"loss": 4.5782,
"step": 340
},
{
"epoch": 0.5456,
"grad_norm": 2.8389174938201904,
"learning_rate": 2.1957471118379213e-06,
"loss": 4.6995,
"step": 341
},
{
"epoch": 0.5472,
"grad_norm": 1.771793246269226,
"learning_rate": 2.1831368557551962e-06,
"loss": 4.7748,
"step": 342
},
{
"epoch": 0.5488,
"grad_norm": 4.029572486877441,
"learning_rate": 2.170534787969484e-06,
"loss": 5.0112,
"step": 343
},
{
"epoch": 0.5504,
"grad_norm": 5.098694801330566,
"learning_rate": 2.157941234140225e-06,
"loss": 5.0988,
"step": 344
},
{
"epoch": 0.552,
"grad_norm": 3.2921664714813232,
"learning_rate": 2.145356519706842e-06,
"loss": 4.7199,
"step": 345
},
{
"epoch": 0.5536,
"grad_norm": 3.225116729736328,
"learning_rate": 2.1327809698803354e-06,
"loss": 5.062,
"step": 346
},
{
"epoch": 0.5552,
"grad_norm": 2.9309229850769043,
"learning_rate": 2.120214909634873e-06,
"loss": 4.9291,
"step": 347
},
{
"epoch": 0.5568,
"grad_norm": 2.9291505813598633,
"learning_rate": 2.1076586636993975e-06,
"loss": 4.9831,
"step": 348
},
{
"epoch": 0.5584,
"grad_norm": 2.420208692550659,
"learning_rate": 2.0951125565492313e-06,
"loss": 4.9086,
"step": 349
},
{
"epoch": 0.56,
"grad_norm": 1.8131273984909058,
"learning_rate": 2.0825769123976954e-06,
"loss": 4.6759,
"step": 350
},
{
"epoch": 0.5616,
"grad_norm": 1.9805721044540405,
"learning_rate": 2.070052055187724e-06,
"loss": 4.7528,
"step": 351
},
{
"epoch": 0.5632,
"grad_norm": 2.3578004837036133,
"learning_rate": 2.057538308583502e-06,
"loss": 4.8435,
"step": 352
},
{
"epoch": 0.5648,
"grad_norm": 5.772895812988281,
"learning_rate": 2.045035995962097e-06,
"loss": 5.4256,
"step": 353
},
{
"epoch": 0.5664,
"grad_norm": 2.147087574005127,
"learning_rate": 2.0325454404050983e-06,
"loss": 5.0171,
"step": 354
},
{
"epoch": 0.568,
"grad_norm": 2.1338014602661133,
"learning_rate": 2.0200669646902755e-06,
"loss": 5.0236,
"step": 355
},
{
"epoch": 0.5696,
"grad_norm": 1.909696102142334,
"learning_rate": 2.0076008912832355e-06,
"loss": 4.8829,
"step": 356
},
{
"epoch": 0.5712,
"grad_norm": 4.729516983032227,
"learning_rate": 1.995147542329083e-06,
"loss": 4.891,
"step": 357
},
{
"epoch": 0.5728,
"grad_norm": 5.204637050628662,
"learning_rate": 1.9827072396441044e-06,
"loss": 4.8071,
"step": 358
},
{
"epoch": 0.5744,
"grad_norm": 9.13206672668457,
"learning_rate": 1.970280304707447e-06,
"loss": 4.3763,
"step": 359
},
{
"epoch": 0.576,
"grad_norm": 3.977590560913086,
"learning_rate": 1.957867058652812e-06,
"loss": 4.9224,
"step": 360
},
{
"epoch": 0.5776,
"grad_norm": 4.341916084289551,
"learning_rate": 1.9454678222601545e-06,
"loss": 4.727,
"step": 361
},
{
"epoch": 0.5792,
"grad_norm": 2.2049553394317627,
"learning_rate": 1.933082915947398e-06,
"loss": 4.9447,
"step": 362
},
{
"epoch": 0.5808,
"grad_norm": 5.574019908905029,
"learning_rate": 1.9207126597621496e-06,
"loss": 4.637,
"step": 363
},
{
"epoch": 0.5824,
"grad_norm": 4.348245143890381,
"learning_rate": 1.9083573733734328e-06,
"loss": 5.3625,
"step": 364
},
{
"epoch": 0.584,
"grad_norm": 1.8652396202087402,
"learning_rate": 1.8960173760634257e-06,
"loss": 4.9229,
"step": 365
},
{
"epoch": 0.5856,
"grad_norm": 2.5181169509887695,
"learning_rate": 1.8836929867192077e-06,
"loss": 4.9309,
"step": 366
},
{
"epoch": 0.5872,
"grad_norm": 1.853685975074768,
"learning_rate": 1.8713845238245231e-06,
"loss": 4.838,
"step": 367
},
{
"epoch": 0.5888,
"grad_norm": 3.2928309440612793,
"learning_rate": 1.8590923054515504e-06,
"loss": 4.6349,
"step": 368
},
{
"epoch": 0.5904,
"grad_norm": 2.0537586212158203,
"learning_rate": 1.846816649252676e-06,
"loss": 5.1173,
"step": 369
},
{
"epoch": 0.592,
"grad_norm": 3.0493924617767334,
"learning_rate": 1.8345578724522957e-06,
"loss": 5.1656,
"step": 370
},
{
"epoch": 0.5936,
"grad_norm": 1.8361153602600098,
"learning_rate": 1.8223162918386122e-06,
"loss": 4.7804,
"step": 371
},
{
"epoch": 0.5952,
"grad_norm": 2.6936750411987305,
"learning_rate": 1.8100922237554442e-06,
"loss": 4.7275,
"step": 372
},
{
"epoch": 0.5968,
"grad_norm": 3.0393989086151123,
"learning_rate": 1.7978859840940604e-06,
"loss": 4.6155,
"step": 373
},
{
"epoch": 0.5984,
"grad_norm": 2.4623372554779053,
"learning_rate": 1.7856978882850112e-06,
"loss": 4.7711,
"step": 374
},
{
"epoch": 0.6,
"grad_norm": 1.9320837259292603,
"learning_rate": 1.7735282512899757e-06,
"loss": 4.898,
"step": 375
},
{
"epoch": 0.6016,
"grad_norm": 3.3518543243408203,
"learning_rate": 1.7613773875936274e-06,
"loss": 4.5005,
"step": 376
},
{
"epoch": 0.6032,
"grad_norm": 5.2378458976745605,
"learning_rate": 1.7492456111955052e-06,
"loss": 5.2084,
"step": 377
},
{
"epoch": 0.6048,
"grad_norm": 3.670921802520752,
"learning_rate": 1.7371332356018972e-06,
"loss": 5.069,
"step": 378
},
{
"epoch": 0.6064,
"grad_norm": 2.2386555671691895,
"learning_rate": 1.725040573817742e-06,
"loss": 4.6441,
"step": 379
},
{
"epoch": 0.608,
"grad_norm": 1.9244714975357056,
"learning_rate": 1.7129679383385384e-06,
"loss": 4.7077,
"step": 380
},
{
"epoch": 0.6096,
"grad_norm": 4.245272159576416,
"learning_rate": 1.70091564114227e-06,
"loss": 5.0572,
"step": 381
},
{
"epoch": 0.6112,
"grad_norm": 1.7141362428665161,
"learning_rate": 1.688883993681345e-06,
"loss": 4.5579,
"step": 382
},
{
"epoch": 0.6128,
"grad_norm": 1.722749948501587,
"learning_rate": 1.6768733068745468e-06,
"loss": 4.619,
"step": 383
},
{
"epoch": 0.6144,
"grad_norm": 2.210688352584839,
"learning_rate": 1.6648838910989955e-06,
"loss": 4.7759,
"step": 384
},
{
"epoch": 0.616,
"grad_norm": 1.5690199136734009,
"learning_rate": 1.652916056182134e-06,
"loss": 4.6606,
"step": 385
},
{
"epoch": 0.6176,
"grad_norm": 1.748518466949463,
"learning_rate": 1.6409701113937182e-06,
"loss": 4.7198,
"step": 386
},
{
"epoch": 0.6192,
"grad_norm": 2.298208475112915,
"learning_rate": 1.6290463654378213e-06,
"loss": 4.9387,
"step": 387
},
{
"epoch": 0.6208,
"grad_norm": 3.15093994140625,
"learning_rate": 1.617145126444864e-06,
"loss": 4.7299,
"step": 388
},
{
"epoch": 0.6224,
"grad_norm": 2.0222041606903076,
"learning_rate": 1.6052667019636462e-06,
"loss": 4.6768,
"step": 389
},
{
"epoch": 0.624,
"grad_norm": 3.3168511390686035,
"learning_rate": 1.5934113989533992e-06,
"loss": 4.9,
"step": 390
},
{
"epoch": 0.6256,
"grad_norm": 2.9965100288391113,
"learning_rate": 1.581579523775858e-06,
"loss": 4.5604,
"step": 391
},
{
"epoch": 0.6272,
"grad_norm": 1.9449670314788818,
"learning_rate": 1.5697713821873401e-06,
"loss": 4.7794,
"step": 392
},
{
"epoch": 0.6288,
"grad_norm": 2.7728371620178223,
"learning_rate": 1.5579872793308456e-06,
"loss": 4.6898,
"step": 393
},
{
"epoch": 0.6304,
"grad_norm": 1.9621694087982178,
"learning_rate": 1.5462275197281717e-06,
"loss": 4.7706,
"step": 394
},
{
"epoch": 0.632,
"grad_norm": 3.6688954830169678,
"learning_rate": 1.5344924072720434e-06,
"loss": 5.0143,
"step": 395
},
{
"epoch": 0.6336,
"grad_norm": 1.637315034866333,
"learning_rate": 1.5227822452182617e-06,
"loss": 4.6788,
"step": 396
},
{
"epoch": 0.6352,
"grad_norm": 2.2460734844207764,
"learning_rate": 1.5110973361778637e-06,
"loss": 4.8856,
"step": 397
},
{
"epoch": 0.6368,
"grad_norm": 2.410999298095703,
"learning_rate": 1.499437982109305e-06,
"loss": 4.5615,
"step": 398
},
{
"epoch": 0.6384,
"grad_norm": 3.9339914321899414,
"learning_rate": 1.4878044843106582e-06,
"loss": 4.2681,
"step": 399
},
{
"epoch": 0.64,
"grad_norm": 2.127648115158081,
"learning_rate": 1.4761971434118207e-06,
"loss": 4.8065,
"step": 400
},
{
"epoch": 0.6416,
"grad_norm": 4.119774341583252,
"learning_rate": 1.4646162593667535e-06,
"loss": 4.431,
"step": 401
},
{
"epoch": 0.6432,
"grad_norm": 1.6970020532608032,
"learning_rate": 1.4530621314457255e-06,
"loss": 4.8007,
"step": 402
},
{
"epoch": 0.6448,
"grad_norm": 4.189131259918213,
"learning_rate": 1.44153505822758e-06,
"loss": 4.814,
"step": 403
},
{
"epoch": 0.6464,
"grad_norm": 2.214879035949707,
"learning_rate": 1.430035337592018e-06,
"loss": 4.5226,
"step": 404
},
{
"epoch": 0.648,
"grad_norm": 1.919060468673706,
"learning_rate": 1.4185632667119052e-06,
"loss": 4.6231,
"step": 405
},
{
"epoch": 0.6496,
"grad_norm": 4.288653373718262,
"learning_rate": 1.4071191420455873e-06,
"loss": 4.9075,
"step": 406
},
{
"epoch": 0.6512,
"grad_norm": 2.7182724475860596,
"learning_rate": 1.3957032593292319e-06,
"loss": 4.4283,
"step": 407
},
{
"epoch": 0.6528,
"grad_norm": 1.7719295024871826,
"learning_rate": 1.3843159135691859e-06,
"loss": 4.4012,
"step": 408
},
{
"epoch": 0.6544,
"grad_norm": 1.7264647483825684,
"learning_rate": 1.372957399034348e-06,
"loss": 4.6959,
"step": 409
},
{
"epoch": 0.656,
"grad_norm": 2.184691905975342,
"learning_rate": 1.3616280092485719e-06,
"loss": 4.6226,
"step": 410
},
{
"epoch": 0.6576,
"grad_norm": 3.437188148498535,
"learning_rate": 1.3503280369830756e-06,
"loss": 4.8675,
"step": 411
},
{
"epoch": 0.6592,
"grad_norm": 1.8734341859817505,
"learning_rate": 1.3390577742488747e-06,
"loss": 4.3827,
"step": 412
},
{
"epoch": 0.6608,
"grad_norm": 3.0375936031341553,
"learning_rate": 1.3278175122892416e-06,
"loss": 4.8572,
"step": 413
},
{
"epoch": 0.6624,
"grad_norm": 1.702372431755066,
"learning_rate": 1.3166075415721762e-06,
"loss": 4.5006,
"step": 414
},
{
"epoch": 0.664,
"grad_norm": 2.2045111656188965,
"learning_rate": 1.3054281517828964e-06,
"loss": 4.8768,
"step": 415
},
{
"epoch": 0.6656,
"grad_norm": 2.0702500343322754,
"learning_rate": 1.2942796318163595e-06,
"loss": 4.8211,
"step": 416
},
{
"epoch": 0.6672,
"grad_norm": 1.9724135398864746,
"learning_rate": 1.2831622697697915e-06,
"loss": 4.5302,
"step": 417
},
{
"epoch": 0.6688,
"grad_norm": 1.6440492868423462,
"learning_rate": 1.2720763529352415e-06,
"loss": 4.5905,
"step": 418
},
{
"epoch": 0.6704,
"grad_norm": 1.585909366607666,
"learning_rate": 1.261022167792161e-06,
"loss": 4.6495,
"step": 419
},
{
"epoch": 0.672,
"grad_norm": 2.205381155014038,
"learning_rate": 1.2500000000000007e-06,
"loss": 4.5457,
"step": 420
},
{
"epoch": 0.6736,
"grad_norm": 3.706343173980713,
"learning_rate": 1.239010134390823e-06,
"loss": 4.4435,
"step": 421
},
{
"epoch": 0.6752,
"grad_norm": 3.3786356449127197,
"learning_rate": 1.2280528549619487e-06,
"loss": 4.3729,
"step": 422
},
{
"epoch": 0.6768,
"grad_norm": 2.3838484287261963,
"learning_rate": 1.2171284448686151e-06,
"loss": 4.811,
"step": 423
},
{
"epoch": 0.6784,
"grad_norm": 2.8519136905670166,
"learning_rate": 1.2062371864166553e-06,
"loss": 4.7614,
"step": 424
},
{
"epoch": 0.68,
"grad_norm": 2.9189982414245605,
"learning_rate": 1.195379361055209e-06,
"loss": 4.8093,
"step": 425
},
{
"epoch": 0.6816,
"grad_norm": 2.42081356048584,
"learning_rate": 1.1845552493694462e-06,
"loss": 4.8662,
"step": 426
},
{
"epoch": 0.6832,
"grad_norm": 1.7331292629241943,
"learning_rate": 1.1737651310733144e-06,
"loss": 4.7654,
"step": 427
},
{
"epoch": 0.6848,
"grad_norm": 4.279774188995361,
"learning_rate": 1.1630092850023148e-06,
"loss": 5.0898,
"step": 428
},
{
"epoch": 0.6864,
"grad_norm": 4.148006439208984,
"learning_rate": 1.152287989106295e-06,
"loss": 5.1606,
"step": 429
},
{
"epoch": 0.688,
"grad_norm": 2.1305670738220215,
"learning_rate": 1.141601520442262e-06,
"loss": 4.7046,
"step": 430
},
{
"epoch": 0.6896,
"grad_norm": 3.6530191898345947,
"learning_rate": 1.1309501551672303e-06,
"loss": 4.547,
"step": 431
},
{
"epoch": 0.6912,
"grad_norm": 1.8911426067352295,
"learning_rate": 1.120334168531081e-06,
"loss": 4.8827,
"step": 432
},
{
"epoch": 0.6928,
"grad_norm": 1.975206732749939,
"learning_rate": 1.1097538348694458e-06,
"loss": 4.7574,
"step": 433
},
{
"epoch": 0.6944,
"grad_norm": 1.9579243659973145,
"learning_rate": 1.0992094275966256e-06,
"loss": 4.9134,
"step": 434
},
{
"epoch": 0.696,
"grad_norm": 3.8839848041534424,
"learning_rate": 1.0887012191985186e-06,
"loss": 4.5802,
"step": 435
},
{
"epoch": 0.6976,
"grad_norm": 2.57751727104187,
"learning_rate": 1.078229481225582e-06,
"loss": 4.6134,
"step": 436
},
{
"epoch": 0.6992,
"grad_norm": 2.4251739978790283,
"learning_rate": 1.0677944842858112e-06,
"loss": 4.7292,
"step": 437
},
{
"epoch": 0.7008,
"grad_norm": 5.846269607543945,
"learning_rate": 1.0573964980377517e-06,
"loss": 4.3419,
"step": 438
},
{
"epoch": 0.7024,
"grad_norm": 2.6203324794769287,
"learning_rate": 1.0470357911835264e-06,
"loss": 4.8273,
"step": 439
},
{
"epoch": 0.704,
"grad_norm": 2.2552034854888916,
"learning_rate": 1.0367126314618946e-06,
"loss": 4.6515,
"step": 440
},
{
"epoch": 0.7056,
"grad_norm": 2.922776937484741,
"learning_rate": 1.0264272856413327e-06,
"loss": 4.6767,
"step": 441
},
{
"epoch": 0.7072,
"grad_norm": 2.9808290004730225,
"learning_rate": 1.0161800195131372e-06,
"loss": 4.497,
"step": 442
},
{
"epoch": 0.7088,
"grad_norm": 1.8489466905593872,
"learning_rate": 1.005971097884561e-06,
"loss": 4.5688,
"step": 443
},
{
"epoch": 0.7104,
"grad_norm": 1.652039647102356,
"learning_rate": 9.95800784571969e-07,
"loss": 4.6027,
"step": 444
},
{
"epoch": 0.712,
"grad_norm": 4.572587013244629,
"learning_rate": 9.85669342394017e-07,
"loss": 4.8382,
"step": 445
},
{
"epoch": 0.7136,
"grad_norm": 3.124997615814209,
"learning_rate": 9.755770331648642e-07,
"loss": 4.7719,
"step": 446
},
{
"epoch": 0.7152,
"grad_norm": 5.704045295715332,
"learning_rate": 9.655241176874073e-07,
"loss": 4.9265,
"step": 447
},
{
"epoch": 0.7168,
"grad_norm": 5.373026371002197,
"learning_rate": 9.555108557465383e-07,
"loss": 5.0219,
"step": 448
},
{
"epoch": 0.7184,
"grad_norm": 1.6691179275512695,
"learning_rate": 9.455375061024319e-07,
"loss": 4.508,
"step": 449
},
{
"epoch": 0.72,
"grad_norm": 2.2384562492370605,
"learning_rate": 9.356043264838607e-07,
"loss": 4.77,
"step": 450
},
{
"epoch": 0.7216,
"grad_norm": 2.1309869289398193,
"learning_rate": 9.257115735815342e-07,
"loss": 4.6903,
"step": 451
},
{
"epoch": 0.7232,
"grad_norm": 3.213756799697876,
"learning_rate": 9.158595030414621e-07,
"loss": 4.821,
"step": 452
},
{
"epoch": 0.7248,
"grad_norm": 6.5527567863464355,
"learning_rate": 9.060483694583539e-07,
"loss": 5.0021,
"step": 453
},
{
"epoch": 0.7264,
"grad_norm": 4.408112525939941,
"learning_rate": 8.962784263690358e-07,
"loss": 4.9703,
"step": 454
},
{
"epoch": 0.728,
"grad_norm": 6.864251613616943,
"learning_rate": 8.86549926245898e-07,
"loss": 5.1402,
"step": 455
},
{
"epoch": 0.7296,
"grad_norm": 3.859757661819458,
"learning_rate": 8.768631204903738e-07,
"loss": 5.0039,
"step": 456
},
{
"epoch": 0.7312,
"grad_norm": 3.99857497215271,
"learning_rate": 8.672182594264422e-07,
"loss": 4.9078,
"step": 457
},
{
"epoch": 0.7328,
"grad_norm": 1.9100146293640137,
"learning_rate": 8.576155922941548e-07,
"loss": 4.639,
"step": 458
},
{
"epoch": 0.7344,
"grad_norm": 2.4521241188049316,
"learning_rate": 8.480553672432012e-07,
"loss": 4.6877,
"step": 459
},
{
"epoch": 0.736,
"grad_norm": 3.0663843154907227,
"learning_rate": 8.385378313264933e-07,
"loss": 4.4701,
"step": 460
},
{
"epoch": 0.7376,
"grad_norm": 1.9583539962768555,
"learning_rate": 8.29063230493779e-07,
"loss": 4.9015,
"step": 461
},
{
"epoch": 0.7392,
"grad_norm": 1.7349518537521362,
"learning_rate": 8.196318095852909e-07,
"loss": 4.8343,
"step": 462
},
{
"epoch": 0.7408,
"grad_norm": 2.2927472591400146,
"learning_rate": 8.102438123254169e-07,
"loss": 4.9412,
"step": 463
},
{
"epoch": 0.7424,
"grad_norm": 2.7737717628479004,
"learning_rate": 8.008994813163995e-07,
"loss": 5.143,
"step": 464
},
{
"epoch": 0.744,
"grad_norm": 2.0372653007507324,
"learning_rate": 7.91599058032072e-07,
"loss": 5.1091,
"step": 465
},
{
"epoch": 0.7456,
"grad_norm": 3.974228858947754,
"learning_rate": 7.823427828116148e-07,
"loss": 4.5839,
"step": 466
},
{
"epoch": 0.7472,
"grad_norm": 5.4034857749938965,
"learning_rate": 7.731308948533431e-07,
"loss": 4.3627,
"step": 467
},
{
"epoch": 0.7488,
"grad_norm": 2.078284978866577,
"learning_rate": 7.6396363220853e-07,
"loss": 4.8323,
"step": 468
},
{
"epoch": 0.7504,
"grad_norm": 2.102445602416992,
"learning_rate": 7.548412317752523e-07,
"loss": 4.6486,
"step": 469
},
{
"epoch": 0.752,
"grad_norm": 4.4255194664001465,
"learning_rate": 7.457639292922675e-07,
"loss": 4.6267,
"step": 470
},
{
"epoch": 0.7536,
"grad_norm": 3.551602363586426,
"learning_rate": 7.367319593329248e-07,
"loss": 4.7121,
"step": 471
},
{
"epoch": 0.7552,
"grad_norm": 1.8246209621429443,
"learning_rate": 7.277455552991011e-07,
"loss": 4.8659,
"step": 472
},
{
"epoch": 0.7568,
"grad_norm": 1.8444592952728271,
"learning_rate": 7.188049494151703e-07,
"loss": 5.0402,
"step": 473
},
{
"epoch": 0.7584,
"grad_norm": 1.9729137420654297,
"learning_rate": 7.099103727220024e-07,
"loss": 4.6823,
"step": 474
},
{
"epoch": 0.76,
"grad_norm": 3.780425786972046,
"learning_rate": 7.010620550709932e-07,
"loss": 5.2781,
"step": 475
},
{
"epoch": 0.7616,
"grad_norm": 2.4710910320281982,
"learning_rate": 6.922602251181221e-07,
"loss": 4.7637,
"step": 476
},
{
"epoch": 0.7632,
"grad_norm": 1.940919041633606,
"learning_rate": 6.835051103180468e-07,
"loss": 4.7931,
"step": 477
},
{
"epoch": 0.7648,
"grad_norm": 3.2940099239349365,
"learning_rate": 6.747969369182248e-07,
"loss": 5.0902,
"step": 478
},
{
"epoch": 0.7664,
"grad_norm": 2.7820544242858887,
"learning_rate": 6.661359299530626e-07,
"loss": 4.6809,
"step": 479
},
{
"epoch": 0.768,
"grad_norm": 2.9326586723327637,
"learning_rate": 6.575223132381067e-07,
"loss": 4.5542,
"step": 480
},
{
"epoch": 0.7696,
"grad_norm": 4.366663455963135,
"learning_rate": 6.489563093642564e-07,
"loss": 4.5281,
"step": 481
},
{
"epoch": 0.7712,
"grad_norm": 3.3846657276153564,
"learning_rate": 6.4043813969201e-07,
"loss": 4.503,
"step": 482
},
{
"epoch": 0.7728,
"grad_norm": 3.948380708694458,
"learning_rate": 6.319680243457483e-07,
"loss": 5.0057,
"step": 483
},
{
"epoch": 0.7744,
"grad_norm": 2.0668182373046875,
"learning_rate": 6.235461822080449e-07,
"loss": 4.7044,
"step": 484
},
{
"epoch": 0.776,
"grad_norm": 1.4763765335083008,
"learning_rate": 6.151728309140071e-07,
"loss": 4.7506,
"step": 485
},
{
"epoch": 0.7776,
"grad_norm": 2.3119959831237793,
"learning_rate": 6.068481868456558e-07,
"loss": 4.6829,
"step": 486
},
{
"epoch": 0.7792,
"grad_norm": 1.5761213302612305,
"learning_rate": 5.985724651263325e-07,
"loss": 4.7322,
"step": 487
},
{
"epoch": 0.7808,
"grad_norm": 2.6196329593658447,
"learning_rate": 5.903458796151382e-07,
"loss": 4.5597,
"step": 488
},
{
"epoch": 0.7824,
"grad_norm": 3.956088066101074,
"learning_rate": 5.821686429014093e-07,
"loss": 5.0486,
"step": 489
},
{
"epoch": 0.784,
"grad_norm": 2.1807007789611816,
"learning_rate": 5.740409662992244e-07,
"loss": 4.837,
"step": 490
},
{
"epoch": 0.7856,
"grad_norm": 1.5621287822723389,
"learning_rate": 5.659630598419391e-07,
"loss": 4.6682,
"step": 491
},
{
"epoch": 0.7872,
"grad_norm": 1.8808839321136475,
"learning_rate": 5.579351322767643e-07,
"loss": 4.7396,
"step": 492
},
{
"epoch": 0.7888,
"grad_norm": 3.365577220916748,
"learning_rate": 5.499573910593689e-07,
"loss": 4.9589,
"step": 493
},
{
"epoch": 0.7904,
"grad_norm": 5.085302829742432,
"learning_rate": 5.420300423485167e-07,
"loss": 4.3842,
"step": 494
},
{
"epoch": 0.792,
"grad_norm": 2.0497047901153564,
"learning_rate": 5.341532910007432e-07,
"loss": 4.7019,
"step": 495
},
{
"epoch": 0.7936,
"grad_norm": 2.7462823390960693,
"learning_rate": 5.263273405650601e-07,
"loss": 4.9158,
"step": 496
},
{
"epoch": 0.7952,
"grad_norm": 4.372661590576172,
"learning_rate": 5.185523932776923e-07,
"loss": 5.0582,
"step": 497
},
{
"epoch": 0.7968,
"grad_norm": 1.832248330116272,
"learning_rate": 5.108286500568562e-07,
"loss": 4.7084,
"step": 498
},
{
"epoch": 0.7984,
"grad_norm": 1.857485055923462,
"learning_rate": 5.031563104975654e-07,
"loss": 4.8461,
"step": 499
},
{
"epoch": 0.8,
"grad_norm": 1.9519814252853394,
"learning_rate": 4.95535572866474e-07,
"loss": 4.6815,
"step": 500
},
{
"epoch": 0.8016,
"grad_norm": 1.8420370817184448,
"learning_rate": 4.879666340967493e-07,
"loss": 4.6342,
"step": 501
},
{
"epoch": 0.8032,
"grad_norm": 1.6990652084350586,
"learning_rate": 4.804496897829883e-07,
"loss": 4.8412,
"step": 502
},
{
"epoch": 0.8048,
"grad_norm": 2.233738899230957,
"learning_rate": 4.7298493417616024e-07,
"loss": 4.6173,
"step": 503
},
{
"epoch": 0.8064,
"grad_norm": 4.957152366638184,
"learning_rate": 4.6557256017858485e-07,
"loss": 5.1067,
"step": 504
},
{
"epoch": 0.808,
"grad_norm": 2.38051438331604,
"learning_rate": 4.582127593389524e-07,
"loss": 4.8153,
"step": 505
},
{
"epoch": 0.8096,
"grad_norm": 2.2880401611328125,
"learning_rate": 4.5090572184736863e-07,
"loss": 4.7761,
"step": 506
},
{
"epoch": 0.8112,
"grad_norm": 4.986752510070801,
"learning_rate": 4.4365163653044415e-07,
"loss": 4.3973,
"step": 507
},
{
"epoch": 0.8128,
"grad_norm": 2.3544347286224365,
"learning_rate": 4.3645069084641195e-07,
"loss": 4.7925,
"step": 508
},
{
"epoch": 0.8144,
"grad_norm": 4.02407169342041,
"learning_rate": 4.293030708802834e-07,
"loss": 5.13,
"step": 509
},
{
"epoch": 0.816,
"grad_norm": 1.8454115390777588,
"learning_rate": 4.222089613390412e-07,
"loss": 4.7105,
"step": 510
},
{
"epoch": 0.8176,
"grad_norm": 1.984868049621582,
"learning_rate": 4.1516854554686485e-07,
"loss": 4.909,
"step": 511
},
{
"epoch": 0.8192,
"grad_norm": 2.2985498905181885,
"learning_rate": 4.0818200544039484e-07,
"loss": 4.6702,
"step": 512
},
{
"epoch": 0.8208,
"grad_norm": 1.8037238121032715,
"learning_rate": 4.012495215640272e-07,
"loss": 4.8734,
"step": 513
},
{
"epoch": 0.8224,
"grad_norm": 1.7107319831848145,
"learning_rate": 3.9437127306525295e-07,
"loss": 4.8498,
"step": 514
},
{
"epoch": 0.824,
"grad_norm": 4.2914252281188965,
"learning_rate": 3.875474376900254e-07,
"loss": 4.4105,
"step": 515
},
{
"epoch": 0.8256,
"grad_norm": 4.187788009643555,
"learning_rate": 3.8077819177816695e-07,
"loss": 4.4916,
"step": 516
},
{
"epoch": 0.8272,
"grad_norm": 1.9808950424194336,
"learning_rate": 3.740637102588143e-07,
"loss": 4.8758,
"step": 517
},
{
"epoch": 0.8288,
"grad_norm": 1.8685753345489502,
"learning_rate": 3.6740416664589634e-07,
"loss": 4.7956,
"step": 518
},
{
"epoch": 0.8304,
"grad_norm": 2.0504117012023926,
"learning_rate": 3.607997330336493e-07,
"loss": 4.7719,
"step": 519
},
{
"epoch": 0.832,
"grad_norm": 5.287473678588867,
"learning_rate": 3.5425058009217193e-07,
"loss": 5.3066,
"step": 520
},
{
"epoch": 0.8336,
"grad_norm": 2.0931029319763184,
"learning_rate": 3.4775687706301437e-07,
"loss": 4.8148,
"step": 521
},
{
"epoch": 0.8352,
"grad_norm": 2.222299098968506,
"learning_rate": 3.413187917548019e-07,
"loss": 4.8199,
"step": 522
},
{
"epoch": 0.8368,
"grad_norm": 3.6533384323120117,
"learning_rate": 3.3493649053890325e-07,
"loss": 4.508,
"step": 523
},
{
"epoch": 0.8384,
"grad_norm": 5.005606174468994,
"learning_rate": 3.2861013834512844e-07,
"loss": 5.1516,
"step": 524
},
{
"epoch": 0.84,
"grad_norm": 2.8732213973999023,
"learning_rate": 3.22339898657465e-07,
"loss": 4.7379,
"step": 525
},
{
"epoch": 0.8416,
"grad_norm": 2.044804811477661,
"learning_rate": 3.161259335098571e-07,
"loss": 4.7035,
"step": 526
},
{
"epoch": 0.8432,
"grad_norm": 3.1617050170898438,
"learning_rate": 3.0996840348201717e-07,
"loss": 5.0924,
"step": 527
},
{
"epoch": 0.8448,
"grad_norm": 1.7616994380950928,
"learning_rate": 3.0386746769527323e-07,
"loss": 4.7713,
"step": 528
},
{
"epoch": 0.8464,
"grad_norm": 3.1333634853363037,
"learning_rate": 2.97823283808461e-07,
"loss": 4.5649,
"step": 529
},
{
"epoch": 0.848,
"grad_norm": 3.0667476654052734,
"learning_rate": 2.9183600801384853e-07,
"loss": 4.9993,
"step": 530
},
{
"epoch": 0.8496,
"grad_norm": 3.067443370819092,
"learning_rate": 2.8590579503309706e-07,
"loss": 4.8349,
"step": 531
},
{
"epoch": 0.8512,
"grad_norm": 2.119407892227173,
"learning_rate": 2.8003279811326724e-07,
"loss": 4.8262,
"step": 532
},
{
"epoch": 0.8528,
"grad_norm": 1.7399306297302246,
"learning_rate": 2.742171690228562e-07,
"loss": 4.4655,
"step": 533
},
{
"epoch": 0.8544,
"grad_norm": 5.67263650894165,
"learning_rate": 2.684590580478749e-07,
"loss": 5.3852,
"step": 534
},
{
"epoch": 0.856,
"grad_norm": 1.9780325889587402,
"learning_rate": 2.6275861398796785e-07,
"loss": 4.6963,
"step": 535
},
{
"epoch": 0.8576,
"grad_norm": 2.746950626373291,
"learning_rate": 2.57115984152565e-07,
"loss": 4.4827,
"step": 536
},
{
"epoch": 0.8592,
"grad_norm": 2.1303367614746094,
"learning_rate": 2.515313143570744e-07,
"loss": 4.8962,
"step": 537
},
{
"epoch": 0.8608,
"grad_norm": 5.141050338745117,
"learning_rate": 2.4600474891911696e-07,
"loss": 4.473,
"step": 538
},
{
"epoch": 0.8624,
"grad_norm": 1.878696084022522,
"learning_rate": 2.405364306547955e-07,
"loss": 4.9083,
"step": 539
},
{
"epoch": 0.864,
"grad_norm": 3.4774856567382812,
"learning_rate": 2.3512650087500338e-07,
"loss": 5.1237,
"step": 540
},
{
"epoch": 0.8656,
"grad_norm": 1.5913909673690796,
"learning_rate": 2.297750993817746e-07,
"loss": 4.8378,
"step": 541
},
{
"epoch": 0.8672,
"grad_norm": 6.427492141723633,
"learning_rate": 2.2448236446466847e-07,
"loss": 5.2029,
"step": 542
},
{
"epoch": 0.8688,
"grad_norm": 3.322718620300293,
"learning_rate": 2.1924843289719862e-07,
"loss": 5.0555,
"step": 543
},
{
"epoch": 0.8704,
"grad_norm": 2.134530782699585,
"learning_rate": 2.140734399332975e-07,
"loss": 4.7253,
"step": 544
},
{
"epoch": 0.872,
"grad_norm": 2.1992297172546387,
"learning_rate": 2.0895751930382125e-07,
"loss": 4.987,
"step": 545
},
{
"epoch": 0.8736,
"grad_norm": 2.8758695125579834,
"learning_rate": 2.0390080321309236e-07,
"loss": 5.0055,
"step": 546
},
{
"epoch": 0.8752,
"grad_norm": 1.8449783325195312,
"learning_rate": 1.9890342233548605e-07,
"loss": 4.7254,
"step": 547
},
{
"epoch": 0.8768,
"grad_norm": 4.943190097808838,
"learning_rate": 1.9396550581205208e-07,
"loss": 5.1614,
"step": 548
},
{
"epoch": 0.8784,
"grad_norm": 2.6197075843811035,
"learning_rate": 1.8908718124717562e-07,
"loss": 4.6992,
"step": 549
},
{
"epoch": 0.88,
"grad_norm": 1.9276237487792969,
"learning_rate": 1.8426857470528414e-07,
"loss": 4.6464,
"step": 550
},
{
"epoch": 0.8816,
"grad_norm": 2.302633762359619,
"learning_rate": 1.7950981070758488e-07,
"loss": 5.0763,
"step": 551
},
{
"epoch": 0.8832,
"grad_norm": 1.8034027814865112,
"learning_rate": 1.7481101222885126e-07,
"loss": 4.8966,
"step": 552
},
{
"epoch": 0.8848,
"grad_norm": 2.367156744003296,
"learning_rate": 1.7017230069424101e-07,
"loss": 4.9089,
"step": 553
},
{
"epoch": 0.8864,
"grad_norm": 1.993738055229187,
"learning_rate": 1.6559379597616136e-07,
"loss": 4.9183,
"step": 554
},
{
"epoch": 0.888,
"grad_norm": 1.5899828672409058,
"learning_rate": 1.610756163911703e-07,
"loss": 4.6321,
"step": 555
},
{
"epoch": 0.8896,
"grad_norm": 1.7773946523666382,
"learning_rate": 1.5661787869691858e-07,
"loss": 4.7952,
"step": 556
},
{
"epoch": 0.8912,
"grad_norm": 1.9783583879470825,
"learning_rate": 1.5222069808913303e-07,
"loss": 4.9522,
"step": 557
},
{
"epoch": 0.8928,
"grad_norm": 7.031003475189209,
"learning_rate": 1.4788418819864037e-07,
"loss": 4.3112,
"step": 558
},
{
"epoch": 0.8944,
"grad_norm": 2.8149020671844482,
"learning_rate": 1.4360846108842836e-07,
"loss": 4.4774,
"step": 559
},
{
"epoch": 0.896,
"grad_norm": 5.359309673309326,
"learning_rate": 1.3939362725075344e-07,
"loss": 4.4685,
"step": 560
},
{
"epoch": 0.8976,
"grad_norm": 4.2358622550964355,
"learning_rate": 1.352397956042828e-07,
"loss": 4.582,
"step": 561
},
{
"epoch": 0.8992,
"grad_norm": 5.378741264343262,
"learning_rate": 1.3114707349127954e-07,
"loss": 4.4472,
"step": 562
},
{
"epoch": 0.9008,
"grad_norm": 1.6993236541748047,
"learning_rate": 1.271155666748311e-07,
"loss": 4.7247,
"step": 563
},
{
"epoch": 0.9024,
"grad_norm": 2.0421314239501953,
"learning_rate": 1.2314537933611425e-07,
"loss": 4.7235,
"step": 564
},
{
"epoch": 0.904,
"grad_norm": 1.6007124185562134,
"learning_rate": 1.1923661407170327e-07,
"loss": 4.7692,
"step": 565
},
{
"epoch": 0.9056,
"grad_norm": 3.892461061477661,
"learning_rate": 1.1538937189091825e-07,
"loss": 4.4143,
"step": 566
},
{
"epoch": 0.9072,
"grad_norm": 1.651253581047058,
"learning_rate": 1.1160375221321712e-07,
"loss": 4.7604,
"step": 567
},
{
"epoch": 0.9088,
"grad_norm": 2.3732411861419678,
"learning_rate": 1.0787985286562219e-07,
"loss": 5.0233,
"step": 568
},
{
"epoch": 0.9104,
"grad_norm": 1.8846756219863892,
"learning_rate": 1.0421777008019663e-07,
"loss": 4.7434,
"step": 569
},
{
"epoch": 0.912,
"grad_norm": 2.1072731018066406,
"learning_rate": 1.00617598491555e-07,
"loss": 4.6262,
"step": 570
},
{
"epoch": 0.9136,
"grad_norm": 3.2598724365234375,
"learning_rate": 9.707943113441837e-08,
"loss": 4.5572,
"step": 571
},
{
"epoch": 0.9152,
"grad_norm": 1.9958816766738892,
"learning_rate": 9.360335944121029e-08,
"loss": 4.7742,
"step": 572
},
{
"epoch": 0.9168,
"grad_norm": 2.2336180210113525,
"learning_rate": 9.018947323969379e-08,
"loss": 4.6511,
"step": 573
},
{
"epoch": 0.9184,
"grad_norm": 2.4841959476470947,
"learning_rate": 8.683786075065065e-08,
"loss": 4.8323,
"step": 574
},
{
"epoch": 0.92,
"grad_norm": 3.4365248680114746,
"learning_rate": 8.354860858560021e-08,
"loss": 4.4446,
"step": 575
},
{
"epoch": 0.9216,
"grad_norm": 1.628080129623413,
"learning_rate": 8.032180174456283e-08,
"loss": 4.8138,
"step": 576
},
{
"epoch": 0.9232,
"grad_norm": 2.9042820930480957,
"learning_rate": 7.715752361386303e-08,
"loss": 4.5392,
"step": 577
},
{
"epoch": 0.9248,
"grad_norm": 5.322256565093994,
"learning_rate": 7.405585596397314e-08,
"loss": 4.4101,
"step": 578
},
{
"epoch": 0.9264,
"grad_norm": 2.6332600116729736,
"learning_rate": 7.101687894740278e-08,
"loss": 4.4653,
"step": 579
},
{
"epoch": 0.928,
"grad_norm": 1.9815305471420288,
"learning_rate": 6.804067109662443e-08,
"loss": 4.8041,
"step": 580
},
{
"epoch": 0.9296,
"grad_norm": 1.8767225742340088,
"learning_rate": 6.512730932204698e-08,
"loss": 4.8275,
"step": 581
},
{
"epoch": 0.9312,
"grad_norm": 6.940856456756592,
"learning_rate": 6.227686891002671e-08,
"loss": 4.1987,
"step": 582
},
{
"epoch": 0.9328,
"grad_norm": 1.7364959716796875,
"learning_rate": 5.948942352092118e-08,
"loss": 4.781,
"step": 583
},
{
"epoch": 0.9344,
"grad_norm": 2.0267224311828613,
"learning_rate": 5.6765045187187614e-08,
"loss": 4.7623,
"step": 584
},
{
"epoch": 0.936,
"grad_norm": 1.9444948434829712,
"learning_rate": 5.4103804311520516e-08,
"loss": 4.7045,
"step": 585
},
{
"epoch": 0.9376,
"grad_norm": 1.6842372417449951,
"learning_rate": 5.150576966503063e-08,
"loss": 4.7922,
"step": 586
},
{
"epoch": 0.9392,
"grad_norm": 1.7049705982208252,
"learning_rate": 4.897100838547081e-08,
"loss": 4.4934,
"step": 587
},
{
"epoch": 0.9408,
"grad_norm": 1.873868703842163,
"learning_rate": 4.649958597549964e-08,
"loss": 4.7685,
"step": 588
},
{
"epoch": 0.9424,
"grad_norm": 2.288904905319214,
"learning_rate": 4.409156630098777e-08,
"loss": 4.8123,
"step": 589
},
{
"epoch": 0.944,
"grad_norm": 5.1646728515625,
"learning_rate": 4.174701158936895e-08,
"loss": 5.0946,
"step": 590
},
{
"epoch": 0.9456,
"grad_norm": 3.191342830657959,
"learning_rate": 3.946598242803162e-08,
"loss": 4.4934,
"step": 591
},
{
"epoch": 0.9472,
"grad_norm": 6.0603132247924805,
"learning_rate": 3.7248537762752666e-08,
"loss": 4.3802,
"step": 592
},
{
"epoch": 0.9488,
"grad_norm": 3.892263174057007,
"learning_rate": 3.5094734896174985e-08,
"loss": 5.0084,
"step": 593
},
{
"epoch": 0.9504,
"grad_norm": 3.3396358489990234,
"learning_rate": 3.300462948632593e-08,
"loss": 4.5399,
"step": 594
},
{
"epoch": 0.952,
"grad_norm": 2.3887295722961426,
"learning_rate": 3.097827554517929e-08,
"loss": 5.0534,
"step": 595
},
{
"epoch": 0.9536,
"grad_norm": 7.113802909851074,
"learning_rate": 2.9015725437259724e-08,
"loss": 4.074,
"step": 596
},
{
"epoch": 0.9552,
"grad_norm": 1.5704362392425537,
"learning_rate": 2.711702987828968e-08,
"loss": 4.6857,
"step": 597
},
{
"epoch": 0.9568,
"grad_norm": 2.9799883365631104,
"learning_rate": 2.5282237933877962e-08,
"loss": 4.5343,
"step": 598
},
{
"epoch": 0.9584,
"grad_norm": 1.7382383346557617,
"learning_rate": 2.351139701825267e-08,
"loss": 4.7438,
"step": 599
},
{
"epoch": 0.96,
"grad_norm": 2.191913366317749,
"learning_rate": 2.180455289303579e-08,
"loss": 4.9251,
"step": 600
}
],
"logging_steps": 1,
"max_steps": 625,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 200,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.20793011929088e+16,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}