Stewy Slocum
Add fine-tuned model
82d2c96
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 0,
"global_step": 1284,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.000778816199376947,
"grad_norm": 1.09375,
"learning_rate": 1e-05,
"loss": 2.4316,
"step": 1
},
{
"epoch": 0.001557632398753894,
"grad_norm": 1.046875,
"learning_rate": 9.992211838006231e-06,
"loss": 2.4405,
"step": 2
},
{
"epoch": 0.002336448598130841,
"grad_norm": 0.96484375,
"learning_rate": 9.984423676012462e-06,
"loss": 2.4311,
"step": 3
},
{
"epoch": 0.003115264797507788,
"grad_norm": 0.8046875,
"learning_rate": 9.976635514018693e-06,
"loss": 2.3039,
"step": 4
},
{
"epoch": 0.003894080996884735,
"grad_norm": 0.7265625,
"learning_rate": 9.968847352024923e-06,
"loss": 2.3803,
"step": 5
},
{
"epoch": 0.004672897196261682,
"grad_norm": 0.578125,
"learning_rate": 9.961059190031154e-06,
"loss": 2.1381,
"step": 6
},
{
"epoch": 0.005451713395638629,
"grad_norm": 0.53125,
"learning_rate": 9.953271028037384e-06,
"loss": 2.1556,
"step": 7
},
{
"epoch": 0.006230529595015576,
"grad_norm": 0.5390625,
"learning_rate": 9.945482866043615e-06,
"loss": 2.2054,
"step": 8
},
{
"epoch": 0.007009345794392523,
"grad_norm": 0.53515625,
"learning_rate": 9.937694704049845e-06,
"loss": 2.287,
"step": 9
},
{
"epoch": 0.00778816199376947,
"grad_norm": 0.486328125,
"learning_rate": 9.929906542056076e-06,
"loss": 2.2063,
"step": 10
},
{
"epoch": 0.008566978193146417,
"grad_norm": 0.494140625,
"learning_rate": 9.922118380062306e-06,
"loss": 2.2148,
"step": 11
},
{
"epoch": 0.009345794392523364,
"grad_norm": 0.47265625,
"learning_rate": 9.914330218068537e-06,
"loss": 2.1761,
"step": 12
},
{
"epoch": 0.010124610591900311,
"grad_norm": 0.4296875,
"learning_rate": 9.906542056074768e-06,
"loss": 2.0733,
"step": 13
},
{
"epoch": 0.010903426791277258,
"grad_norm": 0.419921875,
"learning_rate": 9.898753894080998e-06,
"loss": 2.0608,
"step": 14
},
{
"epoch": 0.011682242990654205,
"grad_norm": 0.515625,
"learning_rate": 9.890965732087229e-06,
"loss": 2.1368,
"step": 15
},
{
"epoch": 0.012461059190031152,
"grad_norm": 0.44140625,
"learning_rate": 9.883177570093458e-06,
"loss": 2.0968,
"step": 16
},
{
"epoch": 0.0132398753894081,
"grad_norm": 0.37890625,
"learning_rate": 9.87538940809969e-06,
"loss": 2.0202,
"step": 17
},
{
"epoch": 0.014018691588785047,
"grad_norm": 0.384765625,
"learning_rate": 9.86760124610592e-06,
"loss": 1.9876,
"step": 18
},
{
"epoch": 0.014797507788161994,
"grad_norm": 0.455078125,
"learning_rate": 9.859813084112151e-06,
"loss": 2.0873,
"step": 19
},
{
"epoch": 0.01557632398753894,
"grad_norm": 0.43359375,
"learning_rate": 9.85202492211838e-06,
"loss": 2.0571,
"step": 20
},
{
"epoch": 0.016355140186915886,
"grad_norm": 0.3828125,
"learning_rate": 9.844236760124612e-06,
"loss": 2.0243,
"step": 21
},
{
"epoch": 0.017133956386292833,
"grad_norm": 0.36328125,
"learning_rate": 9.836448598130843e-06,
"loss": 1.9485,
"step": 22
},
{
"epoch": 0.01791277258566978,
"grad_norm": 0.376953125,
"learning_rate": 9.828660436137073e-06,
"loss": 1.9487,
"step": 23
},
{
"epoch": 0.018691588785046728,
"grad_norm": 0.361328125,
"learning_rate": 9.820872274143302e-06,
"loss": 1.9066,
"step": 24
},
{
"epoch": 0.019470404984423675,
"grad_norm": 0.34765625,
"learning_rate": 9.813084112149533e-06,
"loss": 1.9057,
"step": 25
},
{
"epoch": 0.020249221183800622,
"grad_norm": 0.337890625,
"learning_rate": 9.805295950155765e-06,
"loss": 2.0126,
"step": 26
},
{
"epoch": 0.02102803738317757,
"grad_norm": 0.291015625,
"learning_rate": 9.797507788161996e-06,
"loss": 1.8838,
"step": 27
},
{
"epoch": 0.021806853582554516,
"grad_norm": 0.29296875,
"learning_rate": 9.789719626168224e-06,
"loss": 1.8832,
"step": 28
},
{
"epoch": 0.022585669781931463,
"grad_norm": 0.29296875,
"learning_rate": 9.781931464174455e-06,
"loss": 1.8523,
"step": 29
},
{
"epoch": 0.02336448598130841,
"grad_norm": 0.322265625,
"learning_rate": 9.774143302180686e-06,
"loss": 1.9109,
"step": 30
},
{
"epoch": 0.024143302180685357,
"grad_norm": 0.337890625,
"learning_rate": 9.766355140186918e-06,
"loss": 1.8996,
"step": 31
},
{
"epoch": 0.024922118380062305,
"grad_norm": 0.31640625,
"learning_rate": 9.758566978193147e-06,
"loss": 1.8779,
"step": 32
},
{
"epoch": 0.02570093457943925,
"grad_norm": 0.30859375,
"learning_rate": 9.750778816199377e-06,
"loss": 1.8864,
"step": 33
},
{
"epoch": 0.0264797507788162,
"grad_norm": 0.287109375,
"learning_rate": 9.742990654205608e-06,
"loss": 1.8224,
"step": 34
},
{
"epoch": 0.027258566978193146,
"grad_norm": 0.298828125,
"learning_rate": 9.73520249221184e-06,
"loss": 1.8337,
"step": 35
},
{
"epoch": 0.028037383177570093,
"grad_norm": 0.28515625,
"learning_rate": 9.727414330218069e-06,
"loss": 1.8816,
"step": 36
},
{
"epoch": 0.02881619937694704,
"grad_norm": 0.294921875,
"learning_rate": 9.7196261682243e-06,
"loss": 1.8338,
"step": 37
},
{
"epoch": 0.029595015576323987,
"grad_norm": 0.28125,
"learning_rate": 9.71183800623053e-06,
"loss": 1.8188,
"step": 38
},
{
"epoch": 0.030373831775700934,
"grad_norm": 0.25390625,
"learning_rate": 9.70404984423676e-06,
"loss": 1.6996,
"step": 39
},
{
"epoch": 0.03115264797507788,
"grad_norm": 0.259765625,
"learning_rate": 9.696261682242991e-06,
"loss": 1.7731,
"step": 40
},
{
"epoch": 0.031931464174454825,
"grad_norm": 0.255859375,
"learning_rate": 9.688473520249222e-06,
"loss": 1.7801,
"step": 41
},
{
"epoch": 0.03271028037383177,
"grad_norm": 0.255859375,
"learning_rate": 9.680685358255452e-06,
"loss": 1.7751,
"step": 42
},
{
"epoch": 0.03348909657320872,
"grad_norm": 0.2578125,
"learning_rate": 9.672897196261683e-06,
"loss": 1.7546,
"step": 43
},
{
"epoch": 0.03426791277258567,
"grad_norm": 0.28125,
"learning_rate": 9.665109034267914e-06,
"loss": 1.7484,
"step": 44
},
{
"epoch": 0.035046728971962614,
"grad_norm": 0.23828125,
"learning_rate": 9.657320872274144e-06,
"loss": 1.7439,
"step": 45
},
{
"epoch": 0.03582554517133956,
"grad_norm": 0.2412109375,
"learning_rate": 9.649532710280375e-06,
"loss": 1.7632,
"step": 46
},
{
"epoch": 0.03660436137071651,
"grad_norm": 0.24609375,
"learning_rate": 9.641744548286605e-06,
"loss": 1.7105,
"step": 47
},
{
"epoch": 0.037383177570093455,
"grad_norm": 0.228515625,
"learning_rate": 9.633956386292836e-06,
"loss": 1.7016,
"step": 48
},
{
"epoch": 0.0381619937694704,
"grad_norm": 0.2158203125,
"learning_rate": 9.626168224299066e-06,
"loss": 1.7276,
"step": 49
},
{
"epoch": 0.03894080996884735,
"grad_norm": 0.2490234375,
"learning_rate": 9.618380062305297e-06,
"loss": 1.7563,
"step": 50
},
{
"epoch": 0.0397196261682243,
"grad_norm": 0.2275390625,
"learning_rate": 9.610591900311527e-06,
"loss": 1.6692,
"step": 51
},
{
"epoch": 0.040498442367601244,
"grad_norm": 0.2314453125,
"learning_rate": 9.602803738317758e-06,
"loss": 1.6576,
"step": 52
},
{
"epoch": 0.04127725856697819,
"grad_norm": 0.2265625,
"learning_rate": 9.595015576323989e-06,
"loss": 1.6614,
"step": 53
},
{
"epoch": 0.04205607476635514,
"grad_norm": 0.2421875,
"learning_rate": 9.58722741433022e-06,
"loss": 1.6607,
"step": 54
},
{
"epoch": 0.042834890965732085,
"grad_norm": 0.287109375,
"learning_rate": 9.57943925233645e-06,
"loss": 1.6846,
"step": 55
},
{
"epoch": 0.04361370716510903,
"grad_norm": 0.2265625,
"learning_rate": 9.57165109034268e-06,
"loss": 1.5872,
"step": 56
},
{
"epoch": 0.04439252336448598,
"grad_norm": 0.3046875,
"learning_rate": 9.563862928348911e-06,
"loss": 1.5642,
"step": 57
},
{
"epoch": 0.045171339563862926,
"grad_norm": 0.2099609375,
"learning_rate": 9.556074766355141e-06,
"loss": 1.6665,
"step": 58
},
{
"epoch": 0.045950155763239874,
"grad_norm": 0.2060546875,
"learning_rate": 9.548286604361372e-06,
"loss": 1.5797,
"step": 59
},
{
"epoch": 0.04672897196261682,
"grad_norm": 0.2314453125,
"learning_rate": 9.540498442367601e-06,
"loss": 1.5996,
"step": 60
},
{
"epoch": 0.04750778816199377,
"grad_norm": 0.26953125,
"learning_rate": 9.532710280373833e-06,
"loss": 1.6497,
"step": 61
},
{
"epoch": 0.048286604361370715,
"grad_norm": 0.21484375,
"learning_rate": 9.524922118380064e-06,
"loss": 1.5932,
"step": 62
},
{
"epoch": 0.04906542056074766,
"grad_norm": 0.220703125,
"learning_rate": 9.517133956386294e-06,
"loss": 1.6371,
"step": 63
},
{
"epoch": 0.04984423676012461,
"grad_norm": 0.2431640625,
"learning_rate": 9.509345794392523e-06,
"loss": 1.5744,
"step": 64
},
{
"epoch": 0.050623052959501556,
"grad_norm": 0.275390625,
"learning_rate": 9.501557632398755e-06,
"loss": 1.6228,
"step": 65
},
{
"epoch": 0.0514018691588785,
"grad_norm": 0.2451171875,
"learning_rate": 9.493769470404986e-06,
"loss": 1.5503,
"step": 66
},
{
"epoch": 0.05218068535825545,
"grad_norm": 0.251953125,
"learning_rate": 9.485981308411217e-06,
"loss": 1.5528,
"step": 67
},
{
"epoch": 0.0529595015576324,
"grad_norm": 0.251953125,
"learning_rate": 9.478193146417445e-06,
"loss": 1.5563,
"step": 68
},
{
"epoch": 0.053738317757009345,
"grad_norm": 0.25390625,
"learning_rate": 9.470404984423676e-06,
"loss": 1.5813,
"step": 69
},
{
"epoch": 0.05451713395638629,
"grad_norm": 0.2412109375,
"learning_rate": 9.462616822429908e-06,
"loss": 1.559,
"step": 70
},
{
"epoch": 0.05529595015576324,
"grad_norm": 0.33984375,
"learning_rate": 9.454828660436139e-06,
"loss": 1.5393,
"step": 71
},
{
"epoch": 0.056074766355140186,
"grad_norm": 0.263671875,
"learning_rate": 9.447040498442368e-06,
"loss": 1.5641,
"step": 72
},
{
"epoch": 0.05685358255451713,
"grad_norm": 0.287109375,
"learning_rate": 9.439252336448598e-06,
"loss": 1.5193,
"step": 73
},
{
"epoch": 0.05763239875389408,
"grad_norm": 0.2275390625,
"learning_rate": 9.431464174454829e-06,
"loss": 1.5542,
"step": 74
},
{
"epoch": 0.05841121495327103,
"grad_norm": 0.28125,
"learning_rate": 9.423676012461061e-06,
"loss": 1.4928,
"step": 75
},
{
"epoch": 0.059190031152647975,
"grad_norm": 0.41015625,
"learning_rate": 9.41588785046729e-06,
"loss": 1.5632,
"step": 76
},
{
"epoch": 0.05996884735202492,
"grad_norm": 0.322265625,
"learning_rate": 9.40809968847352e-06,
"loss": 1.5595,
"step": 77
},
{
"epoch": 0.06074766355140187,
"grad_norm": 0.37890625,
"learning_rate": 9.400311526479751e-06,
"loss": 1.4473,
"step": 78
},
{
"epoch": 0.061526479750778816,
"grad_norm": 0.271484375,
"learning_rate": 9.392523364485983e-06,
"loss": 1.5442,
"step": 79
},
{
"epoch": 0.06230529595015576,
"grad_norm": 0.2255859375,
"learning_rate": 9.384735202492212e-06,
"loss": 1.5057,
"step": 80
},
{
"epoch": 0.0630841121495327,
"grad_norm": 0.326171875,
"learning_rate": 9.376947040498443e-06,
"loss": 1.5019,
"step": 81
},
{
"epoch": 0.06386292834890965,
"grad_norm": 0.3125,
"learning_rate": 9.369158878504673e-06,
"loss": 1.5154,
"step": 82
},
{
"epoch": 0.0646417445482866,
"grad_norm": 0.263671875,
"learning_rate": 9.361370716510904e-06,
"loss": 1.5008,
"step": 83
},
{
"epoch": 0.06542056074766354,
"grad_norm": 0.27734375,
"learning_rate": 9.353582554517135e-06,
"loss": 1.4242,
"step": 84
},
{
"epoch": 0.06619937694704049,
"grad_norm": 0.2294921875,
"learning_rate": 9.345794392523365e-06,
"loss": 1.4671,
"step": 85
},
{
"epoch": 0.06697819314641744,
"grad_norm": 0.26171875,
"learning_rate": 9.338006230529596e-06,
"loss": 1.4899,
"step": 86
},
{
"epoch": 0.06775700934579439,
"grad_norm": 0.28515625,
"learning_rate": 9.330218068535826e-06,
"loss": 1.5094,
"step": 87
},
{
"epoch": 0.06853582554517133,
"grad_norm": 0.2314453125,
"learning_rate": 9.322429906542057e-06,
"loss": 1.4435,
"step": 88
},
{
"epoch": 0.06931464174454828,
"grad_norm": 0.240234375,
"learning_rate": 9.314641744548287e-06,
"loss": 1.5046,
"step": 89
},
{
"epoch": 0.07009345794392523,
"grad_norm": 0.2353515625,
"learning_rate": 9.306853582554518e-06,
"loss": 1.4621,
"step": 90
},
{
"epoch": 0.07087227414330217,
"grad_norm": 0.2265625,
"learning_rate": 9.299065420560748e-06,
"loss": 1.4399,
"step": 91
},
{
"epoch": 0.07165109034267912,
"grad_norm": 0.2294921875,
"learning_rate": 9.291277258566979e-06,
"loss": 1.4086,
"step": 92
},
{
"epoch": 0.07242990654205607,
"grad_norm": 0.30859375,
"learning_rate": 9.28348909657321e-06,
"loss": 1.4368,
"step": 93
},
{
"epoch": 0.07320872274143302,
"grad_norm": 0.236328125,
"learning_rate": 9.27570093457944e-06,
"loss": 1.4726,
"step": 94
},
{
"epoch": 0.07398753894080996,
"grad_norm": 0.2578125,
"learning_rate": 9.26791277258567e-06,
"loss": 1.4645,
"step": 95
},
{
"epoch": 0.07476635514018691,
"grad_norm": 0.216796875,
"learning_rate": 9.260124610591901e-06,
"loss": 1.4618,
"step": 96
},
{
"epoch": 0.07554517133956386,
"grad_norm": 0.228515625,
"learning_rate": 9.252336448598132e-06,
"loss": 1.4265,
"step": 97
},
{
"epoch": 0.0763239875389408,
"grad_norm": 0.2216796875,
"learning_rate": 9.244548286604362e-06,
"loss": 1.4526,
"step": 98
},
{
"epoch": 0.07710280373831775,
"grad_norm": 0.2578125,
"learning_rate": 9.236760124610593e-06,
"loss": 1.3831,
"step": 99
},
{
"epoch": 0.0778816199376947,
"grad_norm": 0.23828125,
"learning_rate": 9.228971962616824e-06,
"loss": 1.3737,
"step": 100
},
{
"epoch": 0.07866043613707165,
"grad_norm": 0.34375,
"learning_rate": 9.221183800623054e-06,
"loss": 1.5024,
"step": 101
},
{
"epoch": 0.0794392523364486,
"grad_norm": 0.23046875,
"learning_rate": 9.213395638629285e-06,
"loss": 1.4223,
"step": 102
},
{
"epoch": 0.08021806853582554,
"grad_norm": 0.2470703125,
"learning_rate": 9.205607476635515e-06,
"loss": 1.4062,
"step": 103
},
{
"epoch": 0.08099688473520249,
"grad_norm": 0.2099609375,
"learning_rate": 9.197819314641744e-06,
"loss": 1.3908,
"step": 104
},
{
"epoch": 0.08177570093457943,
"grad_norm": 0.2431640625,
"learning_rate": 9.190031152647976e-06,
"loss": 1.4445,
"step": 105
},
{
"epoch": 0.08255451713395638,
"grad_norm": 0.236328125,
"learning_rate": 9.182242990654207e-06,
"loss": 1.373,
"step": 106
},
{
"epoch": 0.08333333333333333,
"grad_norm": 0.2578125,
"learning_rate": 9.174454828660438e-06,
"loss": 1.429,
"step": 107
},
{
"epoch": 0.08411214953271028,
"grad_norm": 0.267578125,
"learning_rate": 9.166666666666666e-06,
"loss": 1.434,
"step": 108
},
{
"epoch": 0.08489096573208722,
"grad_norm": 0.2265625,
"learning_rate": 9.158878504672899e-06,
"loss": 1.3876,
"step": 109
},
{
"epoch": 0.08566978193146417,
"grad_norm": 0.275390625,
"learning_rate": 9.15109034267913e-06,
"loss": 1.4046,
"step": 110
},
{
"epoch": 0.08644859813084112,
"grad_norm": 0.2431640625,
"learning_rate": 9.14330218068536e-06,
"loss": 1.3907,
"step": 111
},
{
"epoch": 0.08722741433021806,
"grad_norm": 0.2119140625,
"learning_rate": 9.135514018691589e-06,
"loss": 1.4062,
"step": 112
},
{
"epoch": 0.08800623052959501,
"grad_norm": 0.220703125,
"learning_rate": 9.12772585669782e-06,
"loss": 1.3729,
"step": 113
},
{
"epoch": 0.08878504672897196,
"grad_norm": 0.2412109375,
"learning_rate": 9.119937694704052e-06,
"loss": 1.3693,
"step": 114
},
{
"epoch": 0.0895638629283489,
"grad_norm": 0.24609375,
"learning_rate": 9.112149532710282e-06,
"loss": 1.3798,
"step": 115
},
{
"epoch": 0.09034267912772585,
"grad_norm": 0.2275390625,
"learning_rate": 9.104361370716511e-06,
"loss": 1.342,
"step": 116
},
{
"epoch": 0.0911214953271028,
"grad_norm": 0.2158203125,
"learning_rate": 9.096573208722742e-06,
"loss": 1.3721,
"step": 117
},
{
"epoch": 0.09190031152647975,
"grad_norm": 0.2421875,
"learning_rate": 9.088785046728972e-06,
"loss": 1.4059,
"step": 118
},
{
"epoch": 0.0926791277258567,
"grad_norm": 0.2490234375,
"learning_rate": 9.080996884735204e-06,
"loss": 1.3985,
"step": 119
},
{
"epoch": 0.09345794392523364,
"grad_norm": 0.26171875,
"learning_rate": 9.073208722741433e-06,
"loss": 1.4434,
"step": 120
},
{
"epoch": 0.09423676012461059,
"grad_norm": 0.2255859375,
"learning_rate": 9.065420560747664e-06,
"loss": 1.3973,
"step": 121
},
{
"epoch": 0.09501557632398754,
"grad_norm": 0.330078125,
"learning_rate": 9.057632398753894e-06,
"loss": 1.3723,
"step": 122
},
{
"epoch": 0.09579439252336448,
"grad_norm": 0.2490234375,
"learning_rate": 9.049844236760127e-06,
"loss": 1.392,
"step": 123
},
{
"epoch": 0.09657320872274143,
"grad_norm": 0.2490234375,
"learning_rate": 9.042056074766356e-06,
"loss": 1.4014,
"step": 124
},
{
"epoch": 0.09735202492211838,
"grad_norm": 0.265625,
"learning_rate": 9.034267912772586e-06,
"loss": 1.338,
"step": 125
},
{
"epoch": 0.09813084112149532,
"grad_norm": 0.2265625,
"learning_rate": 9.026479750778817e-06,
"loss": 1.3621,
"step": 126
},
{
"epoch": 0.09890965732087227,
"grad_norm": 0.208984375,
"learning_rate": 9.018691588785047e-06,
"loss": 1.3589,
"step": 127
},
{
"epoch": 0.09968847352024922,
"grad_norm": 0.2236328125,
"learning_rate": 9.010903426791278e-06,
"loss": 1.3531,
"step": 128
},
{
"epoch": 0.10046728971962617,
"grad_norm": 0.21875,
"learning_rate": 9.003115264797508e-06,
"loss": 1.2983,
"step": 129
},
{
"epoch": 0.10124610591900311,
"grad_norm": 0.2578125,
"learning_rate": 8.995327102803739e-06,
"loss": 1.3479,
"step": 130
},
{
"epoch": 0.10202492211838006,
"grad_norm": 0.375,
"learning_rate": 8.98753894080997e-06,
"loss": 1.4297,
"step": 131
},
{
"epoch": 0.102803738317757,
"grad_norm": 0.236328125,
"learning_rate": 8.9797507788162e-06,
"loss": 1.3263,
"step": 132
},
{
"epoch": 0.10358255451713395,
"grad_norm": 0.248046875,
"learning_rate": 8.97196261682243e-06,
"loss": 1.3771,
"step": 133
},
{
"epoch": 0.1043613707165109,
"grad_norm": 0.2236328125,
"learning_rate": 8.964174454828661e-06,
"loss": 1.3477,
"step": 134
},
{
"epoch": 0.10514018691588785,
"grad_norm": 0.2333984375,
"learning_rate": 8.956386292834892e-06,
"loss": 1.3549,
"step": 135
},
{
"epoch": 0.1059190031152648,
"grad_norm": 0.25,
"learning_rate": 8.948598130841122e-06,
"loss": 1.3199,
"step": 136
},
{
"epoch": 0.10669781931464174,
"grad_norm": 0.232421875,
"learning_rate": 8.940809968847353e-06,
"loss": 1.3526,
"step": 137
},
{
"epoch": 0.10747663551401869,
"grad_norm": 0.2294921875,
"learning_rate": 8.933021806853583e-06,
"loss": 1.3547,
"step": 138
},
{
"epoch": 0.10825545171339564,
"grad_norm": 0.2138671875,
"learning_rate": 8.925233644859814e-06,
"loss": 1.2997,
"step": 139
},
{
"epoch": 0.10903426791277258,
"grad_norm": 0.28515625,
"learning_rate": 8.917445482866045e-06,
"loss": 1.3711,
"step": 140
},
{
"epoch": 0.10981308411214953,
"grad_norm": 0.220703125,
"learning_rate": 8.909657320872275e-06,
"loss": 1.3632,
"step": 141
},
{
"epoch": 0.11059190031152648,
"grad_norm": 0.267578125,
"learning_rate": 8.901869158878506e-06,
"loss": 1.3293,
"step": 142
},
{
"epoch": 0.11137071651090343,
"grad_norm": 0.349609375,
"learning_rate": 8.894080996884736e-06,
"loss": 1.3401,
"step": 143
},
{
"epoch": 0.11214953271028037,
"grad_norm": 0.2421875,
"learning_rate": 8.886292834890967e-06,
"loss": 1.3405,
"step": 144
},
{
"epoch": 0.11292834890965732,
"grad_norm": 0.275390625,
"learning_rate": 8.878504672897197e-06,
"loss": 1.361,
"step": 145
},
{
"epoch": 0.11370716510903427,
"grad_norm": 0.302734375,
"learning_rate": 8.870716510903428e-06,
"loss": 1.3283,
"step": 146
},
{
"epoch": 0.11448598130841121,
"grad_norm": 0.22265625,
"learning_rate": 8.862928348909659e-06,
"loss": 1.3273,
"step": 147
},
{
"epoch": 0.11526479750778816,
"grad_norm": 0.28125,
"learning_rate": 8.855140186915887e-06,
"loss": 1.3738,
"step": 148
},
{
"epoch": 0.11604361370716511,
"grad_norm": 0.283203125,
"learning_rate": 8.84735202492212e-06,
"loss": 1.3686,
"step": 149
},
{
"epoch": 0.11682242990654206,
"grad_norm": 0.2373046875,
"learning_rate": 8.83956386292835e-06,
"loss": 1.3198,
"step": 150
},
{
"epoch": 0.117601246105919,
"grad_norm": 0.232421875,
"learning_rate": 8.83177570093458e-06,
"loss": 1.2868,
"step": 151
},
{
"epoch": 0.11838006230529595,
"grad_norm": 0.2275390625,
"learning_rate": 8.82398753894081e-06,
"loss": 1.279,
"step": 152
},
{
"epoch": 0.1191588785046729,
"grad_norm": 0.25390625,
"learning_rate": 8.81619937694704e-06,
"loss": 1.3339,
"step": 153
},
{
"epoch": 0.11993769470404984,
"grad_norm": 0.2373046875,
"learning_rate": 8.808411214953273e-06,
"loss": 1.3403,
"step": 154
},
{
"epoch": 0.12071651090342679,
"grad_norm": 0.236328125,
"learning_rate": 8.800623052959503e-06,
"loss": 1.3217,
"step": 155
},
{
"epoch": 0.12149532710280374,
"grad_norm": 0.2216796875,
"learning_rate": 8.792834890965732e-06,
"loss": 1.2762,
"step": 156
},
{
"epoch": 0.12227414330218069,
"grad_norm": 0.2158203125,
"learning_rate": 8.785046728971963e-06,
"loss": 1.283,
"step": 157
},
{
"epoch": 0.12305295950155763,
"grad_norm": 0.2412109375,
"learning_rate": 8.777258566978195e-06,
"loss": 1.3009,
"step": 158
},
{
"epoch": 0.12383177570093458,
"grad_norm": 0.2431640625,
"learning_rate": 8.769470404984425e-06,
"loss": 1.3085,
"step": 159
},
{
"epoch": 0.12461059190031153,
"grad_norm": 0.2353515625,
"learning_rate": 8.761682242990654e-06,
"loss": 1.2607,
"step": 160
},
{
"epoch": 0.12538940809968846,
"grad_norm": 0.263671875,
"learning_rate": 8.753894080996885e-06,
"loss": 1.3161,
"step": 161
},
{
"epoch": 0.1261682242990654,
"grad_norm": 0.251953125,
"learning_rate": 8.746105919003115e-06,
"loss": 1.3421,
"step": 162
},
{
"epoch": 0.12694704049844235,
"grad_norm": 0.259765625,
"learning_rate": 8.738317757009348e-06,
"loss": 1.3113,
"step": 163
},
{
"epoch": 0.1277258566978193,
"grad_norm": 0.259765625,
"learning_rate": 8.730529595015576e-06,
"loss": 1.3464,
"step": 164
},
{
"epoch": 0.12850467289719625,
"grad_norm": 0.26171875,
"learning_rate": 8.722741433021807e-06,
"loss": 1.3631,
"step": 165
},
{
"epoch": 0.1292834890965732,
"grad_norm": 0.28125,
"learning_rate": 8.714953271028038e-06,
"loss": 1.2891,
"step": 166
},
{
"epoch": 0.13006230529595014,
"grad_norm": 0.251953125,
"learning_rate": 8.707165109034268e-06,
"loss": 1.2854,
"step": 167
},
{
"epoch": 0.1308411214953271,
"grad_norm": 0.26171875,
"learning_rate": 8.699376947040499e-06,
"loss": 1.2571,
"step": 168
},
{
"epoch": 0.13161993769470404,
"grad_norm": 0.248046875,
"learning_rate": 8.69158878504673e-06,
"loss": 1.2204,
"step": 169
},
{
"epoch": 0.13239875389408098,
"grad_norm": 0.31640625,
"learning_rate": 8.68380062305296e-06,
"loss": 1.3122,
"step": 170
},
{
"epoch": 0.13317757009345793,
"grad_norm": 0.25390625,
"learning_rate": 8.67601246105919e-06,
"loss": 1.2584,
"step": 171
},
{
"epoch": 0.13395638629283488,
"grad_norm": 0.265625,
"learning_rate": 8.668224299065421e-06,
"loss": 1.2326,
"step": 172
},
{
"epoch": 0.13473520249221183,
"grad_norm": 0.2333984375,
"learning_rate": 8.660436137071652e-06,
"loss": 1.2475,
"step": 173
},
{
"epoch": 0.13551401869158877,
"grad_norm": 0.271484375,
"learning_rate": 8.652647975077882e-06,
"loss": 1.2502,
"step": 174
},
{
"epoch": 0.13629283489096572,
"grad_norm": 0.298828125,
"learning_rate": 8.644859813084113e-06,
"loss": 1.2808,
"step": 175
},
{
"epoch": 0.13707165109034267,
"grad_norm": 0.33203125,
"learning_rate": 8.637071651090343e-06,
"loss": 1.3443,
"step": 176
},
{
"epoch": 0.1378504672897196,
"grad_norm": 0.28515625,
"learning_rate": 8.629283489096574e-06,
"loss": 1.3287,
"step": 177
},
{
"epoch": 0.13862928348909656,
"grad_norm": 0.228515625,
"learning_rate": 8.621495327102804e-06,
"loss": 1.2805,
"step": 178
},
{
"epoch": 0.1394080996884735,
"grad_norm": 0.25390625,
"learning_rate": 8.613707165109035e-06,
"loss": 1.2764,
"step": 179
},
{
"epoch": 0.14018691588785046,
"grad_norm": 0.259765625,
"learning_rate": 8.605919003115266e-06,
"loss": 1.3146,
"step": 180
},
{
"epoch": 0.1409657320872274,
"grad_norm": 0.25390625,
"learning_rate": 8.598130841121496e-06,
"loss": 1.3003,
"step": 181
},
{
"epoch": 0.14174454828660435,
"grad_norm": 0.265625,
"learning_rate": 8.590342679127727e-06,
"loss": 1.2285,
"step": 182
},
{
"epoch": 0.1425233644859813,
"grad_norm": 0.234375,
"learning_rate": 8.582554517133957e-06,
"loss": 1.304,
"step": 183
},
{
"epoch": 0.14330218068535824,
"grad_norm": 0.2470703125,
"learning_rate": 8.574766355140188e-06,
"loss": 1.2756,
"step": 184
},
{
"epoch": 0.1440809968847352,
"grad_norm": 0.25390625,
"learning_rate": 8.566978193146418e-06,
"loss": 1.299,
"step": 185
},
{
"epoch": 0.14485981308411214,
"grad_norm": 0.27734375,
"learning_rate": 8.559190031152649e-06,
"loss": 1.3096,
"step": 186
},
{
"epoch": 0.14563862928348908,
"grad_norm": 0.2392578125,
"learning_rate": 8.55140186915888e-06,
"loss": 1.259,
"step": 187
},
{
"epoch": 0.14641744548286603,
"grad_norm": 0.240234375,
"learning_rate": 8.54361370716511e-06,
"loss": 1.2369,
"step": 188
},
{
"epoch": 0.14719626168224298,
"grad_norm": 0.25390625,
"learning_rate": 8.53582554517134e-06,
"loss": 1.3037,
"step": 189
},
{
"epoch": 0.14797507788161993,
"grad_norm": 0.25,
"learning_rate": 8.528037383177571e-06,
"loss": 1.2529,
"step": 190
},
{
"epoch": 0.14875389408099687,
"grad_norm": 0.25,
"learning_rate": 8.520249221183802e-06,
"loss": 1.2578,
"step": 191
},
{
"epoch": 0.14953271028037382,
"grad_norm": 0.263671875,
"learning_rate": 8.51246105919003e-06,
"loss": 1.2672,
"step": 192
},
{
"epoch": 0.15031152647975077,
"grad_norm": 0.255859375,
"learning_rate": 8.504672897196263e-06,
"loss": 1.2454,
"step": 193
},
{
"epoch": 0.15109034267912771,
"grad_norm": 0.279296875,
"learning_rate": 8.496884735202494e-06,
"loss": 1.308,
"step": 194
},
{
"epoch": 0.15186915887850466,
"grad_norm": 0.2490234375,
"learning_rate": 8.489096573208724e-06,
"loss": 1.2374,
"step": 195
},
{
"epoch": 0.1526479750778816,
"grad_norm": 0.2490234375,
"learning_rate": 8.481308411214953e-06,
"loss": 1.2592,
"step": 196
},
{
"epoch": 0.15342679127725856,
"grad_norm": 0.28515625,
"learning_rate": 8.473520249221184e-06,
"loss": 1.291,
"step": 197
},
{
"epoch": 0.1542056074766355,
"grad_norm": 0.291015625,
"learning_rate": 8.465732087227416e-06,
"loss": 1.2845,
"step": 198
},
{
"epoch": 0.15498442367601245,
"grad_norm": 0.2490234375,
"learning_rate": 8.457943925233646e-06,
"loss": 1.2694,
"step": 199
},
{
"epoch": 0.1557632398753894,
"grad_norm": 0.255859375,
"learning_rate": 8.450155763239875e-06,
"loss": 1.2699,
"step": 200
},
{
"epoch": 0.15654205607476634,
"grad_norm": 0.26171875,
"learning_rate": 8.442367601246106e-06,
"loss": 1.2881,
"step": 201
},
{
"epoch": 0.1573208722741433,
"grad_norm": 0.25390625,
"learning_rate": 8.434579439252338e-06,
"loss": 1.267,
"step": 202
},
{
"epoch": 0.15809968847352024,
"grad_norm": 0.26171875,
"learning_rate": 8.426791277258569e-06,
"loss": 1.2341,
"step": 203
},
{
"epoch": 0.1588785046728972,
"grad_norm": 0.291015625,
"learning_rate": 8.419003115264797e-06,
"loss": 1.3185,
"step": 204
},
{
"epoch": 0.15965732087227413,
"grad_norm": 0.259765625,
"learning_rate": 8.411214953271028e-06,
"loss": 1.2707,
"step": 205
},
{
"epoch": 0.16043613707165108,
"grad_norm": 0.2734375,
"learning_rate": 8.403426791277259e-06,
"loss": 1.2719,
"step": 206
},
{
"epoch": 0.16121495327102803,
"grad_norm": 0.255859375,
"learning_rate": 8.395638629283491e-06,
"loss": 1.2555,
"step": 207
},
{
"epoch": 0.16199376947040497,
"grad_norm": 0.248046875,
"learning_rate": 8.38785046728972e-06,
"loss": 1.2425,
"step": 208
},
{
"epoch": 0.16277258566978192,
"grad_norm": 0.27734375,
"learning_rate": 8.38006230529595e-06,
"loss": 1.2461,
"step": 209
},
{
"epoch": 0.16355140186915887,
"grad_norm": 0.298828125,
"learning_rate": 8.372274143302181e-06,
"loss": 1.2795,
"step": 210
},
{
"epoch": 0.16433021806853582,
"grad_norm": 0.25,
"learning_rate": 8.364485981308411e-06,
"loss": 1.2779,
"step": 211
},
{
"epoch": 0.16510903426791276,
"grad_norm": 0.302734375,
"learning_rate": 8.356697819314642e-06,
"loss": 1.2734,
"step": 212
},
{
"epoch": 0.1658878504672897,
"grad_norm": 0.28125,
"learning_rate": 8.348909657320873e-06,
"loss": 1.2449,
"step": 213
},
{
"epoch": 0.16666666666666666,
"grad_norm": 0.302734375,
"learning_rate": 8.341121495327103e-06,
"loss": 1.257,
"step": 214
},
{
"epoch": 0.1674454828660436,
"grad_norm": 0.3046875,
"learning_rate": 8.333333333333334e-06,
"loss": 1.2767,
"step": 215
},
{
"epoch": 0.16822429906542055,
"grad_norm": 0.3046875,
"learning_rate": 8.325545171339564e-06,
"loss": 1.2968,
"step": 216
},
{
"epoch": 0.1690031152647975,
"grad_norm": 0.291015625,
"learning_rate": 8.317757009345795e-06,
"loss": 1.2328,
"step": 217
},
{
"epoch": 0.16978193146417445,
"grad_norm": 0.28515625,
"learning_rate": 8.309968847352025e-06,
"loss": 1.2436,
"step": 218
},
{
"epoch": 0.1705607476635514,
"grad_norm": 0.296875,
"learning_rate": 8.302180685358256e-06,
"loss": 1.2235,
"step": 219
},
{
"epoch": 0.17133956386292834,
"grad_norm": 0.267578125,
"learning_rate": 8.294392523364487e-06,
"loss": 1.2381,
"step": 220
},
{
"epoch": 0.1721183800623053,
"grad_norm": 0.298828125,
"learning_rate": 8.286604361370717e-06,
"loss": 1.2541,
"step": 221
},
{
"epoch": 0.17289719626168223,
"grad_norm": 0.404296875,
"learning_rate": 8.278816199376948e-06,
"loss": 1.2764,
"step": 222
},
{
"epoch": 0.17367601246105918,
"grad_norm": 0.318359375,
"learning_rate": 8.271028037383178e-06,
"loss": 1.2649,
"step": 223
},
{
"epoch": 0.17445482866043613,
"grad_norm": 0.283203125,
"learning_rate": 8.263239875389409e-06,
"loss": 1.2461,
"step": 224
},
{
"epoch": 0.17523364485981308,
"grad_norm": 0.28515625,
"learning_rate": 8.25545171339564e-06,
"loss": 1.21,
"step": 225
},
{
"epoch": 0.17601246105919002,
"grad_norm": 0.296875,
"learning_rate": 8.24766355140187e-06,
"loss": 1.2469,
"step": 226
},
{
"epoch": 0.17679127725856697,
"grad_norm": 0.30078125,
"learning_rate": 8.2398753894081e-06,
"loss": 1.2342,
"step": 227
},
{
"epoch": 0.17757009345794392,
"grad_norm": 0.291015625,
"learning_rate": 8.232087227414331e-06,
"loss": 1.2226,
"step": 228
},
{
"epoch": 0.17834890965732086,
"grad_norm": 0.2890625,
"learning_rate": 8.224299065420562e-06,
"loss": 1.1948,
"step": 229
},
{
"epoch": 0.1791277258566978,
"grad_norm": 0.267578125,
"learning_rate": 8.216510903426792e-06,
"loss": 1.2353,
"step": 230
},
{
"epoch": 0.17990654205607476,
"grad_norm": 0.29296875,
"learning_rate": 8.208722741433023e-06,
"loss": 1.2207,
"step": 231
},
{
"epoch": 0.1806853582554517,
"grad_norm": 0.27734375,
"learning_rate": 8.200934579439253e-06,
"loss": 1.2106,
"step": 232
},
{
"epoch": 0.18146417445482865,
"grad_norm": 0.345703125,
"learning_rate": 8.193146417445484e-06,
"loss": 1.2797,
"step": 233
},
{
"epoch": 0.1822429906542056,
"grad_norm": 0.400390625,
"learning_rate": 8.185358255451715e-06,
"loss": 1.2652,
"step": 234
},
{
"epoch": 0.18302180685358255,
"grad_norm": 0.314453125,
"learning_rate": 8.177570093457945e-06,
"loss": 1.2072,
"step": 235
},
{
"epoch": 0.1838006230529595,
"grad_norm": 0.3359375,
"learning_rate": 8.169781931464174e-06,
"loss": 1.2417,
"step": 236
},
{
"epoch": 0.18457943925233644,
"grad_norm": 0.3359375,
"learning_rate": 8.161993769470406e-06,
"loss": 1.2582,
"step": 237
},
{
"epoch": 0.1853582554517134,
"grad_norm": 0.326171875,
"learning_rate": 8.154205607476637e-06,
"loss": 1.1986,
"step": 238
},
{
"epoch": 0.18613707165109034,
"grad_norm": 0.349609375,
"learning_rate": 8.146417445482867e-06,
"loss": 1.2504,
"step": 239
},
{
"epoch": 0.18691588785046728,
"grad_norm": 0.326171875,
"learning_rate": 8.138629283489096e-06,
"loss": 1.2665,
"step": 240
},
{
"epoch": 0.18769470404984423,
"grad_norm": 0.3046875,
"learning_rate": 8.130841121495327e-06,
"loss": 1.2107,
"step": 241
},
{
"epoch": 0.18847352024922118,
"grad_norm": 0.30859375,
"learning_rate": 8.123052959501559e-06,
"loss": 1.2142,
"step": 242
},
{
"epoch": 0.18925233644859812,
"grad_norm": 0.310546875,
"learning_rate": 8.11526479750779e-06,
"loss": 1.231,
"step": 243
},
{
"epoch": 0.19003115264797507,
"grad_norm": 0.384765625,
"learning_rate": 8.107476635514018e-06,
"loss": 1.2851,
"step": 244
},
{
"epoch": 0.19080996884735202,
"grad_norm": 0.31640625,
"learning_rate": 8.099688473520249e-06,
"loss": 1.1898,
"step": 245
},
{
"epoch": 0.19158878504672897,
"grad_norm": 0.30859375,
"learning_rate": 8.091900311526481e-06,
"loss": 1.2228,
"step": 246
},
{
"epoch": 0.1923676012461059,
"grad_norm": 0.337890625,
"learning_rate": 8.084112149532712e-06,
"loss": 1.2472,
"step": 247
},
{
"epoch": 0.19314641744548286,
"grad_norm": 0.26953125,
"learning_rate": 8.07632398753894e-06,
"loss": 1.1859,
"step": 248
},
{
"epoch": 0.1939252336448598,
"grad_norm": 0.2890625,
"learning_rate": 8.068535825545171e-06,
"loss": 1.203,
"step": 249
},
{
"epoch": 0.19470404984423675,
"grad_norm": 0.28125,
"learning_rate": 8.060747663551402e-06,
"loss": 1.1622,
"step": 250
},
{
"epoch": 0.1954828660436137,
"grad_norm": 0.291015625,
"learning_rate": 8.052959501557634e-06,
"loss": 1.2515,
"step": 251
},
{
"epoch": 0.19626168224299065,
"grad_norm": 0.263671875,
"learning_rate": 8.045171339563863e-06,
"loss": 1.2637,
"step": 252
},
{
"epoch": 0.1970404984423676,
"grad_norm": 0.27734375,
"learning_rate": 8.037383177570094e-06,
"loss": 1.2304,
"step": 253
},
{
"epoch": 0.19781931464174454,
"grad_norm": 0.265625,
"learning_rate": 8.029595015576324e-06,
"loss": 1.2169,
"step": 254
},
{
"epoch": 0.1985981308411215,
"grad_norm": 0.259765625,
"learning_rate": 8.021806853582555e-06,
"loss": 1.2489,
"step": 255
},
{
"epoch": 0.19937694704049844,
"grad_norm": 0.2578125,
"learning_rate": 8.014018691588785e-06,
"loss": 1.2245,
"step": 256
},
{
"epoch": 0.20015576323987538,
"grad_norm": 0.236328125,
"learning_rate": 8.006230529595016e-06,
"loss": 1.1915,
"step": 257
},
{
"epoch": 0.20093457943925233,
"grad_norm": 0.30078125,
"learning_rate": 7.998442367601246e-06,
"loss": 1.2595,
"step": 258
},
{
"epoch": 0.20171339563862928,
"grad_norm": 0.296875,
"learning_rate": 7.990654205607477e-06,
"loss": 1.2603,
"step": 259
},
{
"epoch": 0.20249221183800623,
"grad_norm": 0.31640625,
"learning_rate": 7.982866043613708e-06,
"loss": 1.2428,
"step": 260
},
{
"epoch": 0.20327102803738317,
"grad_norm": 0.283203125,
"learning_rate": 7.975077881619938e-06,
"loss": 1.1629,
"step": 261
},
{
"epoch": 0.20404984423676012,
"grad_norm": 0.271484375,
"learning_rate": 7.967289719626169e-06,
"loss": 1.2117,
"step": 262
},
{
"epoch": 0.20482866043613707,
"grad_norm": 0.271484375,
"learning_rate": 7.9595015576324e-06,
"loss": 1.2057,
"step": 263
},
{
"epoch": 0.205607476635514,
"grad_norm": 0.26953125,
"learning_rate": 7.95171339563863e-06,
"loss": 1.1638,
"step": 264
},
{
"epoch": 0.20638629283489096,
"grad_norm": 0.25390625,
"learning_rate": 7.94392523364486e-06,
"loss": 1.201,
"step": 265
},
{
"epoch": 0.2071651090342679,
"grad_norm": 0.24609375,
"learning_rate": 7.936137071651091e-06,
"loss": 1.1363,
"step": 266
},
{
"epoch": 0.20794392523364486,
"grad_norm": 0.265625,
"learning_rate": 7.928348909657322e-06,
"loss": 1.2264,
"step": 267
},
{
"epoch": 0.2087227414330218,
"grad_norm": 0.3671875,
"learning_rate": 7.920560747663552e-06,
"loss": 1.2166,
"step": 268
},
{
"epoch": 0.20950155763239875,
"grad_norm": 0.265625,
"learning_rate": 7.912772585669783e-06,
"loss": 1.2012,
"step": 269
},
{
"epoch": 0.2102803738317757,
"grad_norm": 0.27734375,
"learning_rate": 7.904984423676013e-06,
"loss": 1.1879,
"step": 270
},
{
"epoch": 0.21105919003115264,
"grad_norm": 0.298828125,
"learning_rate": 7.897196261682244e-06,
"loss": 1.171,
"step": 271
},
{
"epoch": 0.2118380062305296,
"grad_norm": 0.2421875,
"learning_rate": 7.889408099688474e-06,
"loss": 1.2093,
"step": 272
},
{
"epoch": 0.21261682242990654,
"grad_norm": 0.25390625,
"learning_rate": 7.881619937694705e-06,
"loss": 1.184,
"step": 273
},
{
"epoch": 0.21339563862928349,
"grad_norm": 0.25,
"learning_rate": 7.873831775700936e-06,
"loss": 1.2163,
"step": 274
},
{
"epoch": 0.21417445482866043,
"grad_norm": 0.255859375,
"learning_rate": 7.866043613707166e-06,
"loss": 1.2047,
"step": 275
},
{
"epoch": 0.21495327102803738,
"grad_norm": 0.255859375,
"learning_rate": 7.858255451713395e-06,
"loss": 1.1969,
"step": 276
},
{
"epoch": 0.21573208722741433,
"grad_norm": 0.2421875,
"learning_rate": 7.850467289719627e-06,
"loss": 1.1736,
"step": 277
},
{
"epoch": 0.21651090342679127,
"grad_norm": 0.306640625,
"learning_rate": 7.842679127725858e-06,
"loss": 1.1486,
"step": 278
},
{
"epoch": 0.21728971962616822,
"grad_norm": 0.255859375,
"learning_rate": 7.834890965732088e-06,
"loss": 1.1695,
"step": 279
},
{
"epoch": 0.21806853582554517,
"grad_norm": 0.2451171875,
"learning_rate": 7.827102803738317e-06,
"loss": 1.1845,
"step": 280
},
{
"epoch": 0.21884735202492211,
"grad_norm": 0.271484375,
"learning_rate": 7.81931464174455e-06,
"loss": 1.2186,
"step": 281
},
{
"epoch": 0.21962616822429906,
"grad_norm": 0.2734375,
"learning_rate": 7.81152647975078e-06,
"loss": 1.203,
"step": 282
},
{
"epoch": 0.220404984423676,
"grad_norm": 0.296875,
"learning_rate": 7.80373831775701e-06,
"loss": 1.1677,
"step": 283
},
{
"epoch": 0.22118380062305296,
"grad_norm": 0.2451171875,
"learning_rate": 7.79595015576324e-06,
"loss": 1.2208,
"step": 284
},
{
"epoch": 0.2219626168224299,
"grad_norm": 0.279296875,
"learning_rate": 7.78816199376947e-06,
"loss": 1.2453,
"step": 285
},
{
"epoch": 0.22274143302180685,
"grad_norm": 0.2734375,
"learning_rate": 7.780373831775702e-06,
"loss": 1.1733,
"step": 286
},
{
"epoch": 0.2235202492211838,
"grad_norm": 0.265625,
"learning_rate": 7.772585669781933e-06,
"loss": 1.2224,
"step": 287
},
{
"epoch": 0.22429906542056074,
"grad_norm": 0.2734375,
"learning_rate": 7.764797507788162e-06,
"loss": 1.2036,
"step": 288
},
{
"epoch": 0.2250778816199377,
"grad_norm": 0.2578125,
"learning_rate": 7.757009345794392e-06,
"loss": 1.1718,
"step": 289
},
{
"epoch": 0.22585669781931464,
"grad_norm": 0.275390625,
"learning_rate": 7.749221183800623e-06,
"loss": 1.1817,
"step": 290
},
{
"epoch": 0.2266355140186916,
"grad_norm": 0.2451171875,
"learning_rate": 7.741433021806855e-06,
"loss": 1.17,
"step": 291
},
{
"epoch": 0.22741433021806853,
"grad_norm": 0.2578125,
"learning_rate": 7.733644859813084e-06,
"loss": 1.2069,
"step": 292
},
{
"epoch": 0.22819314641744548,
"grad_norm": 0.333984375,
"learning_rate": 7.725856697819315e-06,
"loss": 1.1778,
"step": 293
},
{
"epoch": 0.22897196261682243,
"grad_norm": 0.2451171875,
"learning_rate": 7.718068535825545e-06,
"loss": 1.1839,
"step": 294
},
{
"epoch": 0.22975077881619937,
"grad_norm": 0.28125,
"learning_rate": 7.710280373831777e-06,
"loss": 1.2479,
"step": 295
},
{
"epoch": 0.23052959501557632,
"grad_norm": 0.251953125,
"learning_rate": 7.702492211838006e-06,
"loss": 1.1824,
"step": 296
},
{
"epoch": 0.23130841121495327,
"grad_norm": 0.2578125,
"learning_rate": 7.694704049844237e-06,
"loss": 1.1825,
"step": 297
},
{
"epoch": 0.23208722741433022,
"grad_norm": 0.251953125,
"learning_rate": 7.686915887850467e-06,
"loss": 1.1666,
"step": 298
},
{
"epoch": 0.23286604361370716,
"grad_norm": 0.25390625,
"learning_rate": 7.679127725856698e-06,
"loss": 1.2029,
"step": 299
},
{
"epoch": 0.2336448598130841,
"grad_norm": 0.26171875,
"learning_rate": 7.671339563862929e-06,
"loss": 1.1963,
"step": 300
},
{
"epoch": 0.23442367601246106,
"grad_norm": 0.251953125,
"learning_rate": 7.663551401869159e-06,
"loss": 1.1531,
"step": 301
},
{
"epoch": 0.235202492211838,
"grad_norm": 0.40625,
"learning_rate": 7.65576323987539e-06,
"loss": 1.2107,
"step": 302
},
{
"epoch": 0.23598130841121495,
"grad_norm": 0.25390625,
"learning_rate": 7.64797507788162e-06,
"loss": 1.1905,
"step": 303
},
{
"epoch": 0.2367601246105919,
"grad_norm": 0.2734375,
"learning_rate": 7.64018691588785e-06,
"loss": 1.1731,
"step": 304
},
{
"epoch": 0.23753894080996885,
"grad_norm": 0.294921875,
"learning_rate": 7.632398753894081e-06,
"loss": 1.1994,
"step": 305
},
{
"epoch": 0.2383177570093458,
"grad_norm": 0.26953125,
"learning_rate": 7.624610591900312e-06,
"loss": 1.2398,
"step": 306
},
{
"epoch": 0.23909657320872274,
"grad_norm": 0.265625,
"learning_rate": 7.616822429906543e-06,
"loss": 1.1605,
"step": 307
},
{
"epoch": 0.2398753894080997,
"grad_norm": 0.25,
"learning_rate": 7.609034267912772e-06,
"loss": 1.1686,
"step": 308
},
{
"epoch": 0.24065420560747663,
"grad_norm": 0.255859375,
"learning_rate": 7.601246105919004e-06,
"loss": 1.2693,
"step": 309
},
{
"epoch": 0.24143302180685358,
"grad_norm": 0.259765625,
"learning_rate": 7.593457943925234e-06,
"loss": 1.1991,
"step": 310
},
{
"epoch": 0.24221183800623053,
"grad_norm": 0.26953125,
"learning_rate": 7.585669781931465e-06,
"loss": 1.2153,
"step": 311
},
{
"epoch": 0.24299065420560748,
"grad_norm": 0.2578125,
"learning_rate": 7.5778816199376945e-06,
"loss": 1.2101,
"step": 312
},
{
"epoch": 0.24376947040498442,
"grad_norm": 0.28125,
"learning_rate": 7.570093457943926e-06,
"loss": 1.2022,
"step": 313
},
{
"epoch": 0.24454828660436137,
"grad_norm": 0.25390625,
"learning_rate": 7.5623052959501565e-06,
"loss": 1.1278,
"step": 314
},
{
"epoch": 0.24532710280373832,
"grad_norm": 0.2373046875,
"learning_rate": 7.554517133956387e-06,
"loss": 1.1688,
"step": 315
},
{
"epoch": 0.24610591900311526,
"grad_norm": 0.2490234375,
"learning_rate": 7.546728971962617e-06,
"loss": 1.1474,
"step": 316
},
{
"epoch": 0.2468847352024922,
"grad_norm": 0.29296875,
"learning_rate": 7.538940809968847e-06,
"loss": 1.194,
"step": 317
},
{
"epoch": 0.24766355140186916,
"grad_norm": 0.29296875,
"learning_rate": 7.531152647975079e-06,
"loss": 1.2028,
"step": 318
},
{
"epoch": 0.2484423676012461,
"grad_norm": 0.28515625,
"learning_rate": 7.523364485981309e-06,
"loss": 1.1992,
"step": 319
},
{
"epoch": 0.24922118380062305,
"grad_norm": 0.287109375,
"learning_rate": 7.515576323987539e-06,
"loss": 1.1323,
"step": 320
},
{
"epoch": 0.25,
"grad_norm": 0.279296875,
"learning_rate": 7.50778816199377e-06,
"loss": 1.1545,
"step": 321
},
{
"epoch": 0.2507788161993769,
"grad_norm": 0.26171875,
"learning_rate": 7.500000000000001e-06,
"loss": 1.1583,
"step": 322
},
{
"epoch": 0.2515576323987539,
"grad_norm": 0.2734375,
"learning_rate": 7.492211838006232e-06,
"loss": 1.1763,
"step": 323
},
{
"epoch": 0.2523364485981308,
"grad_norm": 0.25,
"learning_rate": 7.484423676012462e-06,
"loss": 1.1343,
"step": 324
},
{
"epoch": 0.2531152647975078,
"grad_norm": 0.259765625,
"learning_rate": 7.476635514018692e-06,
"loss": 1.1823,
"step": 325
},
{
"epoch": 0.2538940809968847,
"grad_norm": 0.255859375,
"learning_rate": 7.4688473520249225e-06,
"loss": 1.164,
"step": 326
},
{
"epoch": 0.2546728971962617,
"grad_norm": 0.244140625,
"learning_rate": 7.461059190031154e-06,
"loss": 1.1168,
"step": 327
},
{
"epoch": 0.2554517133956386,
"grad_norm": 0.283203125,
"learning_rate": 7.4532710280373844e-06,
"loss": 1.169,
"step": 328
},
{
"epoch": 0.2562305295950156,
"grad_norm": 0.23828125,
"learning_rate": 7.445482866043614e-06,
"loss": 1.1442,
"step": 329
},
{
"epoch": 0.2570093457943925,
"grad_norm": 0.25390625,
"learning_rate": 7.437694704049845e-06,
"loss": 1.146,
"step": 330
},
{
"epoch": 0.25778816199376947,
"grad_norm": 0.2431640625,
"learning_rate": 7.429906542056075e-06,
"loss": 1.1346,
"step": 331
},
{
"epoch": 0.2585669781931464,
"grad_norm": 0.255859375,
"learning_rate": 7.422118380062307e-06,
"loss": 1.1912,
"step": 332
},
{
"epoch": 0.25934579439252337,
"grad_norm": 0.267578125,
"learning_rate": 7.4143302180685364e-06,
"loss": 1.1627,
"step": 333
},
{
"epoch": 0.2601246105919003,
"grad_norm": 0.259765625,
"learning_rate": 7.406542056074767e-06,
"loss": 1.1527,
"step": 334
},
{
"epoch": 0.26090342679127726,
"grad_norm": 0.26171875,
"learning_rate": 7.3987538940809976e-06,
"loss": 1.2035,
"step": 335
},
{
"epoch": 0.2616822429906542,
"grad_norm": 0.25390625,
"learning_rate": 7.390965732087229e-06,
"loss": 1.1465,
"step": 336
},
{
"epoch": 0.26246105919003115,
"grad_norm": 0.265625,
"learning_rate": 7.383177570093458e-06,
"loss": 1.1626,
"step": 337
},
{
"epoch": 0.2632398753894081,
"grad_norm": 0.2734375,
"learning_rate": 7.375389408099689e-06,
"loss": 1.1806,
"step": 338
},
{
"epoch": 0.26401869158878505,
"grad_norm": 0.25,
"learning_rate": 7.36760124610592e-06,
"loss": 1.1713,
"step": 339
},
{
"epoch": 0.26479750778816197,
"grad_norm": 0.26171875,
"learning_rate": 7.35981308411215e-06,
"loss": 1.2006,
"step": 340
},
{
"epoch": 0.26557632398753894,
"grad_norm": 0.259765625,
"learning_rate": 7.35202492211838e-06,
"loss": 1.1805,
"step": 341
},
{
"epoch": 0.26635514018691586,
"grad_norm": 0.26953125,
"learning_rate": 7.3442367601246115e-06,
"loss": 1.1452,
"step": 342
},
{
"epoch": 0.26713395638629284,
"grad_norm": 0.265625,
"learning_rate": 7.336448598130842e-06,
"loss": 1.1526,
"step": 343
},
{
"epoch": 0.26791277258566976,
"grad_norm": 0.2734375,
"learning_rate": 7.328660436137073e-06,
"loss": 1.127,
"step": 344
},
{
"epoch": 0.26869158878504673,
"grad_norm": 0.259765625,
"learning_rate": 7.320872274143302e-06,
"loss": 1.1556,
"step": 345
},
{
"epoch": 0.26947040498442365,
"grad_norm": 0.291015625,
"learning_rate": 7.313084112149533e-06,
"loss": 1.1861,
"step": 346
},
{
"epoch": 0.2702492211838006,
"grad_norm": 0.25,
"learning_rate": 7.305295950155764e-06,
"loss": 1.1425,
"step": 347
},
{
"epoch": 0.27102803738317754,
"grad_norm": 0.259765625,
"learning_rate": 7.297507788161995e-06,
"loss": 1.1459,
"step": 348
},
{
"epoch": 0.2718068535825545,
"grad_norm": 0.25390625,
"learning_rate": 7.289719626168225e-06,
"loss": 1.1545,
"step": 349
},
{
"epoch": 0.27258566978193144,
"grad_norm": 0.275390625,
"learning_rate": 7.281931464174455e-06,
"loss": 1.1558,
"step": 350
},
{
"epoch": 0.2733644859813084,
"grad_norm": 0.26171875,
"learning_rate": 7.274143302180686e-06,
"loss": 1.1387,
"step": 351
},
{
"epoch": 0.27414330218068533,
"grad_norm": 0.2734375,
"learning_rate": 7.266355140186917e-06,
"loss": 1.1572,
"step": 352
},
{
"epoch": 0.2749221183800623,
"grad_norm": 0.2890625,
"learning_rate": 7.258566978193147e-06,
"loss": 1.1678,
"step": 353
},
{
"epoch": 0.2757009345794392,
"grad_norm": 0.263671875,
"learning_rate": 7.2507788161993775e-06,
"loss": 1.1525,
"step": 354
},
{
"epoch": 0.2764797507788162,
"grad_norm": 0.287109375,
"learning_rate": 7.242990654205608e-06,
"loss": 1.1488,
"step": 355
},
{
"epoch": 0.2772585669781931,
"grad_norm": 0.279296875,
"learning_rate": 7.2352024922118395e-06,
"loss": 1.1813,
"step": 356
},
{
"epoch": 0.2780373831775701,
"grad_norm": 0.294921875,
"learning_rate": 7.227414330218069e-06,
"loss": 1.2069,
"step": 357
},
{
"epoch": 0.278816199376947,
"grad_norm": 0.298828125,
"learning_rate": 7.2196261682243e-06,
"loss": 1.1785,
"step": 358
},
{
"epoch": 0.279595015576324,
"grad_norm": 0.294921875,
"learning_rate": 7.21183800623053e-06,
"loss": 1.1208,
"step": 359
},
{
"epoch": 0.2803738317757009,
"grad_norm": 0.279296875,
"learning_rate": 7.204049844236761e-06,
"loss": 1.1697,
"step": 360
},
{
"epoch": 0.2811526479750779,
"grad_norm": 0.275390625,
"learning_rate": 7.196261682242991e-06,
"loss": 1.1626,
"step": 361
},
{
"epoch": 0.2819314641744548,
"grad_norm": 0.28125,
"learning_rate": 7.188473520249222e-06,
"loss": 1.1269,
"step": 362
},
{
"epoch": 0.2827102803738318,
"grad_norm": 0.2578125,
"learning_rate": 7.180685358255453e-06,
"loss": 1.1108,
"step": 363
},
{
"epoch": 0.2834890965732087,
"grad_norm": 0.265625,
"learning_rate": 7.172897196261683e-06,
"loss": 1.1517,
"step": 364
},
{
"epoch": 0.2842679127725857,
"grad_norm": 0.255859375,
"learning_rate": 7.165109034267913e-06,
"loss": 1.1062,
"step": 365
},
{
"epoch": 0.2850467289719626,
"grad_norm": 0.2734375,
"learning_rate": 7.1573208722741435e-06,
"loss": 1.1129,
"step": 366
},
{
"epoch": 0.28582554517133957,
"grad_norm": 0.275390625,
"learning_rate": 7.149532710280375e-06,
"loss": 1.1512,
"step": 367
},
{
"epoch": 0.2866043613707165,
"grad_norm": 0.298828125,
"learning_rate": 7.1417445482866054e-06,
"loss": 1.1617,
"step": 368
},
{
"epoch": 0.28738317757009346,
"grad_norm": 0.24609375,
"learning_rate": 7.133956386292835e-06,
"loss": 1.1238,
"step": 369
},
{
"epoch": 0.2881619937694704,
"grad_norm": 0.29296875,
"learning_rate": 7.126168224299066e-06,
"loss": 1.1093,
"step": 370
},
{
"epoch": 0.28894080996884736,
"grad_norm": 0.26171875,
"learning_rate": 7.118380062305297e-06,
"loss": 1.1489,
"step": 371
},
{
"epoch": 0.2897196261682243,
"grad_norm": 0.265625,
"learning_rate": 7.110591900311528e-06,
"loss": 1.1719,
"step": 372
},
{
"epoch": 0.29049844236760125,
"grad_norm": 0.275390625,
"learning_rate": 7.1028037383177574e-06,
"loss": 1.1229,
"step": 373
},
{
"epoch": 0.29127725856697817,
"grad_norm": 0.2734375,
"learning_rate": 7.095015576323988e-06,
"loss": 1.1856,
"step": 374
},
{
"epoch": 0.29205607476635514,
"grad_norm": 0.265625,
"learning_rate": 7.0872274143302186e-06,
"loss": 1.1906,
"step": 375
},
{
"epoch": 0.29283489096573206,
"grad_norm": 0.255859375,
"learning_rate": 7.07943925233645e-06,
"loss": 1.1702,
"step": 376
},
{
"epoch": 0.29361370716510904,
"grad_norm": 0.28125,
"learning_rate": 7.07165109034268e-06,
"loss": 1.1306,
"step": 377
},
{
"epoch": 0.29439252336448596,
"grad_norm": 0.26953125,
"learning_rate": 7.06386292834891e-06,
"loss": 1.1312,
"step": 378
},
{
"epoch": 0.29517133956386293,
"grad_norm": 0.2734375,
"learning_rate": 7.056074766355141e-06,
"loss": 1.1323,
"step": 379
},
{
"epoch": 0.29595015576323985,
"grad_norm": 0.275390625,
"learning_rate": 7.048286604361371e-06,
"loss": 1.1478,
"step": 380
},
{
"epoch": 0.2967289719626168,
"grad_norm": 0.271484375,
"learning_rate": 7.040498442367601e-06,
"loss": 1.1548,
"step": 381
},
{
"epoch": 0.29750778816199375,
"grad_norm": 0.2734375,
"learning_rate": 7.0327102803738325e-06,
"loss": 1.1318,
"step": 382
},
{
"epoch": 0.2982866043613707,
"grad_norm": 0.294921875,
"learning_rate": 7.024922118380063e-06,
"loss": 1.1201,
"step": 383
},
{
"epoch": 0.29906542056074764,
"grad_norm": 0.28125,
"learning_rate": 7.017133956386294e-06,
"loss": 1.1333,
"step": 384
},
{
"epoch": 0.2998442367601246,
"grad_norm": 0.275390625,
"learning_rate": 7.009345794392523e-06,
"loss": 1.136,
"step": 385
},
{
"epoch": 0.30062305295950154,
"grad_norm": 0.30859375,
"learning_rate": 7.001557632398755e-06,
"loss": 1.1647,
"step": 386
},
{
"epoch": 0.3014018691588785,
"grad_norm": 0.29296875,
"learning_rate": 6.993769470404985e-06,
"loss": 1.1601,
"step": 387
},
{
"epoch": 0.30218068535825543,
"grad_norm": 0.28125,
"learning_rate": 6.985981308411216e-06,
"loss": 1.1871,
"step": 388
},
{
"epoch": 0.3029595015576324,
"grad_norm": 0.279296875,
"learning_rate": 6.978193146417446e-06,
"loss": 1.1279,
"step": 389
},
{
"epoch": 0.3037383177570093,
"grad_norm": 0.298828125,
"learning_rate": 6.970404984423676e-06,
"loss": 1.1028,
"step": 390
},
{
"epoch": 0.3045171339563863,
"grad_norm": 0.283203125,
"learning_rate": 6.962616822429908e-06,
"loss": 1.1061,
"step": 391
},
{
"epoch": 0.3052959501557632,
"grad_norm": 0.30078125,
"learning_rate": 6.954828660436138e-06,
"loss": 1.1496,
"step": 392
},
{
"epoch": 0.3060747663551402,
"grad_norm": 0.279296875,
"learning_rate": 6.947040498442368e-06,
"loss": 1.1106,
"step": 393
},
{
"epoch": 0.3068535825545171,
"grad_norm": 0.376953125,
"learning_rate": 6.9392523364485985e-06,
"loss": 1.0929,
"step": 394
},
{
"epoch": 0.3076323987538941,
"grad_norm": 0.28515625,
"learning_rate": 6.931464174454829e-06,
"loss": 1.1291,
"step": 395
},
{
"epoch": 0.308411214953271,
"grad_norm": 0.337890625,
"learning_rate": 6.9236760124610605e-06,
"loss": 1.1379,
"step": 396
},
{
"epoch": 0.309190031152648,
"grad_norm": 0.29296875,
"learning_rate": 6.91588785046729e-06,
"loss": 1.1239,
"step": 397
},
{
"epoch": 0.3099688473520249,
"grad_norm": 0.345703125,
"learning_rate": 6.908099688473521e-06,
"loss": 1.1976,
"step": 398
},
{
"epoch": 0.3107476635514019,
"grad_norm": 0.28515625,
"learning_rate": 6.900311526479751e-06,
"loss": 1.1175,
"step": 399
},
{
"epoch": 0.3115264797507788,
"grad_norm": 0.294921875,
"learning_rate": 6.892523364485983e-06,
"loss": 1.1357,
"step": 400
},
{
"epoch": 0.31230529595015577,
"grad_norm": 0.275390625,
"learning_rate": 6.8847352024922125e-06,
"loss": 1.143,
"step": 401
},
{
"epoch": 0.3130841121495327,
"grad_norm": 0.283203125,
"learning_rate": 6.876947040498443e-06,
"loss": 1.0793,
"step": 402
},
{
"epoch": 0.31386292834890966,
"grad_norm": 0.283203125,
"learning_rate": 6.869158878504674e-06,
"loss": 1.1154,
"step": 403
},
{
"epoch": 0.3146417445482866,
"grad_norm": 0.294921875,
"learning_rate": 6.861370716510904e-06,
"loss": 1.1352,
"step": 404
},
{
"epoch": 0.31542056074766356,
"grad_norm": 0.306640625,
"learning_rate": 6.853582554517134e-06,
"loss": 1.2069,
"step": 405
},
{
"epoch": 0.3161993769470405,
"grad_norm": 0.27734375,
"learning_rate": 6.845794392523365e-06,
"loss": 1.1682,
"step": 406
},
{
"epoch": 0.31697819314641745,
"grad_norm": 0.27734375,
"learning_rate": 6.838006230529596e-06,
"loss": 1.1398,
"step": 407
},
{
"epoch": 0.3177570093457944,
"grad_norm": 0.27734375,
"learning_rate": 6.8302180685358264e-06,
"loss": 1.1202,
"step": 408
},
{
"epoch": 0.31853582554517135,
"grad_norm": 0.275390625,
"learning_rate": 6.822429906542056e-06,
"loss": 1.1014,
"step": 409
},
{
"epoch": 0.31931464174454827,
"grad_norm": 0.267578125,
"learning_rate": 6.814641744548287e-06,
"loss": 1.0811,
"step": 410
},
{
"epoch": 0.32009345794392524,
"grad_norm": 0.28125,
"learning_rate": 6.806853582554518e-06,
"loss": 1.119,
"step": 411
},
{
"epoch": 0.32087227414330216,
"grad_norm": 0.283203125,
"learning_rate": 6.799065420560749e-06,
"loss": 1.1016,
"step": 412
},
{
"epoch": 0.32165109034267914,
"grad_norm": 0.265625,
"learning_rate": 6.791277258566978e-06,
"loss": 1.1058,
"step": 413
},
{
"epoch": 0.32242990654205606,
"grad_norm": 0.28515625,
"learning_rate": 6.783489096573209e-06,
"loss": 1.163,
"step": 414
},
{
"epoch": 0.32320872274143303,
"grad_norm": 0.3203125,
"learning_rate": 6.77570093457944e-06,
"loss": 1.1557,
"step": 415
},
{
"epoch": 0.32398753894080995,
"grad_norm": 0.3046875,
"learning_rate": 6.767912772585671e-06,
"loss": 1.1713,
"step": 416
},
{
"epoch": 0.3247663551401869,
"grad_norm": 0.30078125,
"learning_rate": 6.760124610591901e-06,
"loss": 1.1694,
"step": 417
},
{
"epoch": 0.32554517133956384,
"grad_norm": 0.318359375,
"learning_rate": 6.752336448598131e-06,
"loss": 1.1343,
"step": 418
},
{
"epoch": 0.3263239875389408,
"grad_norm": 0.298828125,
"learning_rate": 6.744548286604362e-06,
"loss": 1.1366,
"step": 419
},
{
"epoch": 0.32710280373831774,
"grad_norm": 0.275390625,
"learning_rate": 6.736760124610593e-06,
"loss": 1.1688,
"step": 420
},
{
"epoch": 0.3278816199376947,
"grad_norm": 0.310546875,
"learning_rate": 6.728971962616823e-06,
"loss": 1.119,
"step": 421
},
{
"epoch": 0.32866043613707163,
"grad_norm": 0.291015625,
"learning_rate": 6.7211838006230535e-06,
"loss": 1.128,
"step": 422
},
{
"epoch": 0.3294392523364486,
"grad_norm": 0.265625,
"learning_rate": 6.713395638629284e-06,
"loss": 1.0967,
"step": 423
},
{
"epoch": 0.3302180685358255,
"grad_norm": 0.28515625,
"learning_rate": 6.705607476635515e-06,
"loss": 1.1047,
"step": 424
},
{
"epoch": 0.3309968847352025,
"grad_norm": 0.310546875,
"learning_rate": 6.697819314641744e-06,
"loss": 1.141,
"step": 425
},
{
"epoch": 0.3317757009345794,
"grad_norm": 0.306640625,
"learning_rate": 6.690031152647976e-06,
"loss": 1.1289,
"step": 426
},
{
"epoch": 0.3325545171339564,
"grad_norm": 0.2734375,
"learning_rate": 6.682242990654206e-06,
"loss": 1.1256,
"step": 427
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.275390625,
"learning_rate": 6.674454828660437e-06,
"loss": 1.1155,
"step": 428
},
{
"epoch": 0.3341121495327103,
"grad_norm": 0.287109375,
"learning_rate": 6.666666666666667e-06,
"loss": 1.0946,
"step": 429
},
{
"epoch": 0.3348909657320872,
"grad_norm": 0.263671875,
"learning_rate": 6.658878504672898e-06,
"loss": 1.0643,
"step": 430
},
{
"epoch": 0.3356697819314642,
"grad_norm": 0.271484375,
"learning_rate": 6.651090342679129e-06,
"loss": 1.1185,
"step": 431
},
{
"epoch": 0.3364485981308411,
"grad_norm": 0.271484375,
"learning_rate": 6.643302180685359e-06,
"loss": 1.1338,
"step": 432
},
{
"epoch": 0.3372274143302181,
"grad_norm": 0.453125,
"learning_rate": 6.635514018691589e-06,
"loss": 1.1123,
"step": 433
},
{
"epoch": 0.338006230529595,
"grad_norm": 0.27734375,
"learning_rate": 6.6277258566978195e-06,
"loss": 1.1586,
"step": 434
},
{
"epoch": 0.338785046728972,
"grad_norm": 0.279296875,
"learning_rate": 6.619937694704051e-06,
"loss": 1.097,
"step": 435
},
{
"epoch": 0.3395638629283489,
"grad_norm": 0.279296875,
"learning_rate": 6.6121495327102815e-06,
"loss": 1.169,
"step": 436
},
{
"epoch": 0.34034267912772587,
"grad_norm": 0.2734375,
"learning_rate": 6.604361370716511e-06,
"loss": 1.1089,
"step": 437
},
{
"epoch": 0.3411214953271028,
"grad_norm": 0.3046875,
"learning_rate": 6.596573208722742e-06,
"loss": 1.1422,
"step": 438
},
{
"epoch": 0.34190031152647976,
"grad_norm": 0.2734375,
"learning_rate": 6.588785046728972e-06,
"loss": 1.0896,
"step": 439
},
{
"epoch": 0.3426791277258567,
"grad_norm": 0.267578125,
"learning_rate": 6.580996884735204e-06,
"loss": 1.113,
"step": 440
},
{
"epoch": 0.34345794392523366,
"grad_norm": 0.271484375,
"learning_rate": 6.5732087227414335e-06,
"loss": 1.1406,
"step": 441
},
{
"epoch": 0.3442367601246106,
"grad_norm": 0.267578125,
"learning_rate": 6.565420560747664e-06,
"loss": 1.1298,
"step": 442
},
{
"epoch": 0.34501557632398755,
"grad_norm": 0.388671875,
"learning_rate": 6.557632398753895e-06,
"loss": 1.1271,
"step": 443
},
{
"epoch": 0.34579439252336447,
"grad_norm": 0.265625,
"learning_rate": 6.549844236760126e-06,
"loss": 1.0878,
"step": 444
},
{
"epoch": 0.34657320872274144,
"grad_norm": 0.439453125,
"learning_rate": 6.542056074766355e-06,
"loss": 1.1161,
"step": 445
},
{
"epoch": 0.34735202492211836,
"grad_norm": 0.291015625,
"learning_rate": 6.534267912772586e-06,
"loss": 1.1092,
"step": 446
},
{
"epoch": 0.34813084112149534,
"grad_norm": 0.2578125,
"learning_rate": 6.526479750778817e-06,
"loss": 1.1226,
"step": 447
},
{
"epoch": 0.34890965732087226,
"grad_norm": 0.271484375,
"learning_rate": 6.5186915887850474e-06,
"loss": 1.0736,
"step": 448
},
{
"epoch": 0.34968847352024923,
"grad_norm": 0.283203125,
"learning_rate": 6.510903426791277e-06,
"loss": 1.134,
"step": 449
},
{
"epoch": 0.35046728971962615,
"grad_norm": 0.3125,
"learning_rate": 6.5031152647975086e-06,
"loss": 1.2143,
"step": 450
},
{
"epoch": 0.3512461059190031,
"grad_norm": 0.271484375,
"learning_rate": 6.495327102803739e-06,
"loss": 1.0704,
"step": 451
},
{
"epoch": 0.35202492211838005,
"grad_norm": 0.26953125,
"learning_rate": 6.48753894080997e-06,
"loss": 1.082,
"step": 452
},
{
"epoch": 0.352803738317757,
"grad_norm": 0.25,
"learning_rate": 6.479750778816199e-06,
"loss": 1.0886,
"step": 453
},
{
"epoch": 0.35358255451713394,
"grad_norm": 0.2734375,
"learning_rate": 6.47196261682243e-06,
"loss": 1.1513,
"step": 454
},
{
"epoch": 0.3543613707165109,
"grad_norm": 0.26171875,
"learning_rate": 6.464174454828661e-06,
"loss": 1.1224,
"step": 455
},
{
"epoch": 0.35514018691588783,
"grad_norm": 0.265625,
"learning_rate": 6.456386292834892e-06,
"loss": 1.1324,
"step": 456
},
{
"epoch": 0.3559190031152648,
"grad_norm": 0.27734375,
"learning_rate": 6.448598130841122e-06,
"loss": 1.1556,
"step": 457
},
{
"epoch": 0.35669781931464173,
"grad_norm": 0.271484375,
"learning_rate": 6.440809968847352e-06,
"loss": 1.0916,
"step": 458
},
{
"epoch": 0.3574766355140187,
"grad_norm": 0.265625,
"learning_rate": 6.433021806853583e-06,
"loss": 1.1021,
"step": 459
},
{
"epoch": 0.3582554517133956,
"grad_norm": 0.265625,
"learning_rate": 6.425233644859814e-06,
"loss": 1.1062,
"step": 460
},
{
"epoch": 0.3590342679127726,
"grad_norm": 0.283203125,
"learning_rate": 6.417445482866044e-06,
"loss": 1.1103,
"step": 461
},
{
"epoch": 0.3598130841121495,
"grad_norm": 0.26171875,
"learning_rate": 6.4096573208722745e-06,
"loss": 1.1087,
"step": 462
},
{
"epoch": 0.3605919003115265,
"grad_norm": 0.283203125,
"learning_rate": 6.401869158878505e-06,
"loss": 1.1616,
"step": 463
},
{
"epoch": 0.3613707165109034,
"grad_norm": 0.2392578125,
"learning_rate": 6.3940809968847365e-06,
"loss": 1.0948,
"step": 464
},
{
"epoch": 0.3621495327102804,
"grad_norm": 0.265625,
"learning_rate": 6.386292834890966e-06,
"loss": 1.0916,
"step": 465
},
{
"epoch": 0.3629283489096573,
"grad_norm": 0.2890625,
"learning_rate": 6.378504672897197e-06,
"loss": 1.1433,
"step": 466
},
{
"epoch": 0.3637071651090343,
"grad_norm": 0.2451171875,
"learning_rate": 6.370716510903427e-06,
"loss": 1.1007,
"step": 467
},
{
"epoch": 0.3644859813084112,
"grad_norm": 0.283203125,
"learning_rate": 6.362928348909658e-06,
"loss": 1.1347,
"step": 468
},
{
"epoch": 0.3652647975077882,
"grad_norm": 0.279296875,
"learning_rate": 6.355140186915888e-06,
"loss": 1.1021,
"step": 469
},
{
"epoch": 0.3660436137071651,
"grad_norm": 0.328125,
"learning_rate": 6.347352024922119e-06,
"loss": 1.0919,
"step": 470
},
{
"epoch": 0.36682242990654207,
"grad_norm": 0.302734375,
"learning_rate": 6.33956386292835e-06,
"loss": 1.1043,
"step": 471
},
{
"epoch": 0.367601246105919,
"grad_norm": 0.26953125,
"learning_rate": 6.33177570093458e-06,
"loss": 1.1112,
"step": 472
},
{
"epoch": 0.36838006230529596,
"grad_norm": 0.25390625,
"learning_rate": 6.32398753894081e-06,
"loss": 1.0829,
"step": 473
},
{
"epoch": 0.3691588785046729,
"grad_norm": 0.251953125,
"learning_rate": 6.3161993769470405e-06,
"loss": 1.122,
"step": 474
},
{
"epoch": 0.36993769470404986,
"grad_norm": 0.279296875,
"learning_rate": 6.308411214953272e-06,
"loss": 1.0871,
"step": 475
},
{
"epoch": 0.3707165109034268,
"grad_norm": 0.30078125,
"learning_rate": 6.3006230529595025e-06,
"loss": 1.1319,
"step": 476
},
{
"epoch": 0.37149532710280375,
"grad_norm": 0.24609375,
"learning_rate": 6.292834890965732e-06,
"loss": 1.1082,
"step": 477
},
{
"epoch": 0.37227414330218067,
"grad_norm": 0.25,
"learning_rate": 6.285046728971963e-06,
"loss": 1.1387,
"step": 478
},
{
"epoch": 0.37305295950155765,
"grad_norm": 0.259765625,
"learning_rate": 6.277258566978194e-06,
"loss": 1.1351,
"step": 479
},
{
"epoch": 0.37383177570093457,
"grad_norm": 0.275390625,
"learning_rate": 6.269470404984425e-06,
"loss": 1.0888,
"step": 480
},
{
"epoch": 0.37461059190031154,
"grad_norm": 0.25,
"learning_rate": 6.2616822429906544e-06,
"loss": 1.1236,
"step": 481
},
{
"epoch": 0.37538940809968846,
"grad_norm": 0.265625,
"learning_rate": 6.253894080996885e-06,
"loss": 1.0981,
"step": 482
},
{
"epoch": 0.37616822429906543,
"grad_norm": 0.263671875,
"learning_rate": 6.246105919003116e-06,
"loss": 1.11,
"step": 483
},
{
"epoch": 0.37694704049844235,
"grad_norm": 0.263671875,
"learning_rate": 6.238317757009347e-06,
"loss": 1.0955,
"step": 484
},
{
"epoch": 0.37772585669781933,
"grad_norm": 0.25,
"learning_rate": 6.230529595015577e-06,
"loss": 1.0866,
"step": 485
},
{
"epoch": 0.37850467289719625,
"grad_norm": 0.263671875,
"learning_rate": 6.222741433021807e-06,
"loss": 1.0676,
"step": 486
},
{
"epoch": 0.3792834890965732,
"grad_norm": 0.271484375,
"learning_rate": 6.214953271028038e-06,
"loss": 1.1425,
"step": 487
},
{
"epoch": 0.38006230529595014,
"grad_norm": 0.275390625,
"learning_rate": 6.207165109034268e-06,
"loss": 1.1145,
"step": 488
},
{
"epoch": 0.3808411214953271,
"grad_norm": 0.259765625,
"learning_rate": 6.199376947040498e-06,
"loss": 1.0896,
"step": 489
},
{
"epoch": 0.38161993769470404,
"grad_norm": 0.267578125,
"learning_rate": 6.1915887850467296e-06,
"loss": 1.0953,
"step": 490
},
{
"epoch": 0.382398753894081,
"grad_norm": 0.275390625,
"learning_rate": 6.18380062305296e-06,
"loss": 1.1272,
"step": 491
},
{
"epoch": 0.38317757009345793,
"grad_norm": 0.2734375,
"learning_rate": 6.176012461059191e-06,
"loss": 1.1509,
"step": 492
},
{
"epoch": 0.3839563862928349,
"grad_norm": 0.291015625,
"learning_rate": 6.16822429906542e-06,
"loss": 1.066,
"step": 493
},
{
"epoch": 0.3847352024922118,
"grad_norm": 0.36328125,
"learning_rate": 6.160436137071652e-06,
"loss": 1.0896,
"step": 494
},
{
"epoch": 0.3855140186915888,
"grad_norm": 0.279296875,
"learning_rate": 6.152647975077882e-06,
"loss": 1.1412,
"step": 495
},
{
"epoch": 0.3862928348909657,
"grad_norm": 0.298828125,
"learning_rate": 6.144859813084113e-06,
"loss": 1.0594,
"step": 496
},
{
"epoch": 0.3870716510903427,
"grad_norm": 0.2734375,
"learning_rate": 6.137071651090343e-06,
"loss": 1.0807,
"step": 497
},
{
"epoch": 0.3878504672897196,
"grad_norm": 0.275390625,
"learning_rate": 6.129283489096573e-06,
"loss": 1.1289,
"step": 498
},
{
"epoch": 0.3886292834890966,
"grad_norm": 0.271484375,
"learning_rate": 6.121495327102805e-06,
"loss": 1.0884,
"step": 499
},
{
"epoch": 0.3894080996884735,
"grad_norm": 0.26171875,
"learning_rate": 6.113707165109035e-06,
"loss": 1.0841,
"step": 500
},
{
"epoch": 0.3901869158878505,
"grad_norm": 0.255859375,
"learning_rate": 6.105919003115265e-06,
"loss": 1.1092,
"step": 501
},
{
"epoch": 0.3909657320872274,
"grad_norm": 0.28515625,
"learning_rate": 6.0981308411214955e-06,
"loss": 1.0951,
"step": 502
},
{
"epoch": 0.3917445482866044,
"grad_norm": 0.2578125,
"learning_rate": 6.090342679127726e-06,
"loss": 1.1021,
"step": 503
},
{
"epoch": 0.3925233644859813,
"grad_norm": 0.26953125,
"learning_rate": 6.0825545171339575e-06,
"loss": 1.1147,
"step": 504
},
{
"epoch": 0.39330218068535827,
"grad_norm": 0.25390625,
"learning_rate": 6.074766355140187e-06,
"loss": 1.1067,
"step": 505
},
{
"epoch": 0.3940809968847352,
"grad_norm": 0.27734375,
"learning_rate": 6.066978193146418e-06,
"loss": 1.0967,
"step": 506
},
{
"epoch": 0.39485981308411217,
"grad_norm": 0.287109375,
"learning_rate": 6.059190031152648e-06,
"loss": 1.1563,
"step": 507
},
{
"epoch": 0.3956386292834891,
"grad_norm": 0.279296875,
"learning_rate": 6.05140186915888e-06,
"loss": 1.1308,
"step": 508
},
{
"epoch": 0.39641744548286606,
"grad_norm": 0.26171875,
"learning_rate": 6.0436137071651095e-06,
"loss": 1.0991,
"step": 509
},
{
"epoch": 0.397196261682243,
"grad_norm": 0.287109375,
"learning_rate": 6.03582554517134e-06,
"loss": 1.0785,
"step": 510
},
{
"epoch": 0.39797507788161995,
"grad_norm": 0.265625,
"learning_rate": 6.028037383177571e-06,
"loss": 1.1087,
"step": 511
},
{
"epoch": 0.3987538940809969,
"grad_norm": 0.259765625,
"learning_rate": 6.020249221183801e-06,
"loss": 1.0754,
"step": 512
},
{
"epoch": 0.39953271028037385,
"grad_norm": 0.271484375,
"learning_rate": 6.012461059190031e-06,
"loss": 1.1314,
"step": 513
},
{
"epoch": 0.40031152647975077,
"grad_norm": 0.26171875,
"learning_rate": 6.004672897196262e-06,
"loss": 1.092,
"step": 514
},
{
"epoch": 0.40109034267912774,
"grad_norm": 0.25390625,
"learning_rate": 5.996884735202493e-06,
"loss": 1.1126,
"step": 515
},
{
"epoch": 0.40186915887850466,
"grad_norm": 0.26953125,
"learning_rate": 5.9890965732087235e-06,
"loss": 1.0716,
"step": 516
},
{
"epoch": 0.40264797507788164,
"grad_norm": 0.267578125,
"learning_rate": 5.981308411214953e-06,
"loss": 1.1171,
"step": 517
},
{
"epoch": 0.40342679127725856,
"grad_norm": 0.2578125,
"learning_rate": 5.973520249221184e-06,
"loss": 1.1155,
"step": 518
},
{
"epoch": 0.40420560747663553,
"grad_norm": 0.259765625,
"learning_rate": 5.965732087227415e-06,
"loss": 1.1126,
"step": 519
},
{
"epoch": 0.40498442367601245,
"grad_norm": 0.271484375,
"learning_rate": 5.957943925233646e-06,
"loss": 1.1491,
"step": 520
},
{
"epoch": 0.4057632398753894,
"grad_norm": 0.267578125,
"learning_rate": 5.9501557632398754e-06,
"loss": 1.1107,
"step": 521
},
{
"epoch": 0.40654205607476634,
"grad_norm": 0.26953125,
"learning_rate": 5.942367601246106e-06,
"loss": 1.0905,
"step": 522
},
{
"epoch": 0.4073208722741433,
"grad_norm": 0.271484375,
"learning_rate": 5.9345794392523374e-06,
"loss": 1.0634,
"step": 523
},
{
"epoch": 0.40809968847352024,
"grad_norm": 0.255859375,
"learning_rate": 5.926791277258568e-06,
"loss": 1.1295,
"step": 524
},
{
"epoch": 0.4088785046728972,
"grad_norm": 0.27734375,
"learning_rate": 5.919003115264798e-06,
"loss": 1.0411,
"step": 525
},
{
"epoch": 0.40965732087227413,
"grad_norm": 0.28125,
"learning_rate": 5.911214953271028e-06,
"loss": 1.1281,
"step": 526
},
{
"epoch": 0.4104361370716511,
"grad_norm": 0.26953125,
"learning_rate": 5.903426791277259e-06,
"loss": 1.1131,
"step": 527
},
{
"epoch": 0.411214953271028,
"grad_norm": 0.263671875,
"learning_rate": 5.89563862928349e-06,
"loss": 1.1341,
"step": 528
},
{
"epoch": 0.411993769470405,
"grad_norm": 0.255859375,
"learning_rate": 5.88785046728972e-06,
"loss": 1.0826,
"step": 529
},
{
"epoch": 0.4127725856697819,
"grad_norm": 0.28125,
"learning_rate": 5.8800623052959505e-06,
"loss": 1.1504,
"step": 530
},
{
"epoch": 0.4135514018691589,
"grad_norm": 0.314453125,
"learning_rate": 5.872274143302181e-06,
"loss": 1.061,
"step": 531
},
{
"epoch": 0.4143302180685358,
"grad_norm": 0.283203125,
"learning_rate": 5.864485981308412e-06,
"loss": 1.0797,
"step": 532
},
{
"epoch": 0.4151090342679128,
"grad_norm": 0.28515625,
"learning_rate": 5.856697819314641e-06,
"loss": 1.1074,
"step": 533
},
{
"epoch": 0.4158878504672897,
"grad_norm": 0.279296875,
"learning_rate": 5.848909657320873e-06,
"loss": 1.0644,
"step": 534
},
{
"epoch": 0.4166666666666667,
"grad_norm": 0.28125,
"learning_rate": 5.841121495327103e-06,
"loss": 1.1101,
"step": 535
},
{
"epoch": 0.4174454828660436,
"grad_norm": 0.263671875,
"learning_rate": 5.833333333333334e-06,
"loss": 1.1257,
"step": 536
},
{
"epoch": 0.4182242990654206,
"grad_norm": 0.294921875,
"learning_rate": 5.825545171339564e-06,
"loss": 1.1116,
"step": 537
},
{
"epoch": 0.4190031152647975,
"grad_norm": 0.265625,
"learning_rate": 5.817757009345795e-06,
"loss": 1.1159,
"step": 538
},
{
"epoch": 0.4197819314641745,
"grad_norm": 0.267578125,
"learning_rate": 5.809968847352026e-06,
"loss": 1.1104,
"step": 539
},
{
"epoch": 0.4205607476635514,
"grad_norm": 0.26171875,
"learning_rate": 5.802180685358256e-06,
"loss": 1.0959,
"step": 540
},
{
"epoch": 0.42133956386292837,
"grad_norm": 0.279296875,
"learning_rate": 5.794392523364486e-06,
"loss": 1.1298,
"step": 541
},
{
"epoch": 0.4221183800623053,
"grad_norm": 0.275390625,
"learning_rate": 5.7866043613707165e-06,
"loss": 1.1649,
"step": 542
},
{
"epoch": 0.42289719626168226,
"grad_norm": 0.255859375,
"learning_rate": 5.778816199376948e-06,
"loss": 1.1139,
"step": 543
},
{
"epoch": 0.4236760124610592,
"grad_norm": 0.26953125,
"learning_rate": 5.7710280373831785e-06,
"loss": 1.1372,
"step": 544
},
{
"epoch": 0.42445482866043616,
"grad_norm": 0.28515625,
"learning_rate": 5.763239875389408e-06,
"loss": 1.112,
"step": 545
},
{
"epoch": 0.4252336448598131,
"grad_norm": 0.267578125,
"learning_rate": 5.755451713395639e-06,
"loss": 1.1147,
"step": 546
},
{
"epoch": 0.42601246105919005,
"grad_norm": 0.2734375,
"learning_rate": 5.747663551401869e-06,
"loss": 1.0638,
"step": 547
},
{
"epoch": 0.42679127725856697,
"grad_norm": 0.26171875,
"learning_rate": 5.739875389408101e-06,
"loss": 1.0975,
"step": 548
},
{
"epoch": 0.42757009345794394,
"grad_norm": 0.263671875,
"learning_rate": 5.7320872274143305e-06,
"loss": 1.1088,
"step": 549
},
{
"epoch": 0.42834890965732086,
"grad_norm": 0.255859375,
"learning_rate": 5.724299065420561e-06,
"loss": 1.0805,
"step": 550
},
{
"epoch": 0.42912772585669784,
"grad_norm": 0.26171875,
"learning_rate": 5.716510903426792e-06,
"loss": 1.079,
"step": 551
},
{
"epoch": 0.42990654205607476,
"grad_norm": 0.24609375,
"learning_rate": 5.708722741433023e-06,
"loss": 1.0752,
"step": 552
},
{
"epoch": 0.43068535825545173,
"grad_norm": 0.279296875,
"learning_rate": 5.700934579439253e-06,
"loss": 1.1184,
"step": 553
},
{
"epoch": 0.43146417445482865,
"grad_norm": 0.275390625,
"learning_rate": 5.693146417445483e-06,
"loss": 1.1174,
"step": 554
},
{
"epoch": 0.4322429906542056,
"grad_norm": 0.26171875,
"learning_rate": 5.685358255451714e-06,
"loss": 1.0907,
"step": 555
},
{
"epoch": 0.43302180685358255,
"grad_norm": 0.267578125,
"learning_rate": 5.6775700934579444e-06,
"loss": 1.0976,
"step": 556
},
{
"epoch": 0.4338006230529595,
"grad_norm": 0.29296875,
"learning_rate": 5.669781931464174e-06,
"loss": 1.1129,
"step": 557
},
{
"epoch": 0.43457943925233644,
"grad_norm": 0.255859375,
"learning_rate": 5.661993769470406e-06,
"loss": 1.1075,
"step": 558
},
{
"epoch": 0.4353582554517134,
"grad_norm": 0.25390625,
"learning_rate": 5.654205607476636e-06,
"loss": 1.0929,
"step": 559
},
{
"epoch": 0.43613707165109034,
"grad_norm": 0.26953125,
"learning_rate": 5.646417445482867e-06,
"loss": 1.0931,
"step": 560
},
{
"epoch": 0.4369158878504673,
"grad_norm": 0.2578125,
"learning_rate": 5.6386292834890964e-06,
"loss": 1.1073,
"step": 561
},
{
"epoch": 0.43769470404984423,
"grad_norm": 0.263671875,
"learning_rate": 5.630841121495327e-06,
"loss": 1.0688,
"step": 562
},
{
"epoch": 0.4384735202492212,
"grad_norm": 0.259765625,
"learning_rate": 5.623052959501558e-06,
"loss": 1.1007,
"step": 563
},
{
"epoch": 0.4392523364485981,
"grad_norm": 0.3046875,
"learning_rate": 5.615264797507789e-06,
"loss": 1.1439,
"step": 564
},
{
"epoch": 0.4400311526479751,
"grad_norm": 0.3125,
"learning_rate": 5.607476635514019e-06,
"loss": 1.1485,
"step": 565
},
{
"epoch": 0.440809968847352,
"grad_norm": 0.279296875,
"learning_rate": 5.599688473520249e-06,
"loss": 1.1069,
"step": 566
},
{
"epoch": 0.441588785046729,
"grad_norm": 0.28515625,
"learning_rate": 5.591900311526481e-06,
"loss": 1.1664,
"step": 567
},
{
"epoch": 0.4423676012461059,
"grad_norm": 0.314453125,
"learning_rate": 5.584112149532711e-06,
"loss": 1.0452,
"step": 568
},
{
"epoch": 0.4431464174454829,
"grad_norm": 0.29296875,
"learning_rate": 5.576323987538941e-06,
"loss": 1.0428,
"step": 569
},
{
"epoch": 0.4439252336448598,
"grad_norm": 0.294921875,
"learning_rate": 5.5685358255451715e-06,
"loss": 1.0819,
"step": 570
},
{
"epoch": 0.4447040498442368,
"grad_norm": 0.25390625,
"learning_rate": 5.560747663551402e-06,
"loss": 1.0977,
"step": 571
},
{
"epoch": 0.4454828660436137,
"grad_norm": 0.255859375,
"learning_rate": 5.5529595015576335e-06,
"loss": 1.0981,
"step": 572
},
{
"epoch": 0.4462616822429907,
"grad_norm": 0.255859375,
"learning_rate": 5.545171339563863e-06,
"loss": 1.0833,
"step": 573
},
{
"epoch": 0.4470404984423676,
"grad_norm": 0.298828125,
"learning_rate": 5.537383177570094e-06,
"loss": 1.1432,
"step": 574
},
{
"epoch": 0.44781931464174457,
"grad_norm": 0.283203125,
"learning_rate": 5.529595015576324e-06,
"loss": 1.1184,
"step": 575
},
{
"epoch": 0.4485981308411215,
"grad_norm": 0.28515625,
"learning_rate": 5.521806853582555e-06,
"loss": 1.1122,
"step": 576
},
{
"epoch": 0.44937694704049846,
"grad_norm": 0.265625,
"learning_rate": 5.514018691588785e-06,
"loss": 1.0979,
"step": 577
},
{
"epoch": 0.4501557632398754,
"grad_norm": 0.2578125,
"learning_rate": 5.506230529595016e-06,
"loss": 1.0767,
"step": 578
},
{
"epoch": 0.45093457943925236,
"grad_norm": 0.287109375,
"learning_rate": 5.498442367601247e-06,
"loss": 1.1132,
"step": 579
},
{
"epoch": 0.4517133956386293,
"grad_norm": 0.2734375,
"learning_rate": 5.490654205607477e-06,
"loss": 1.0821,
"step": 580
},
{
"epoch": 0.45249221183800625,
"grad_norm": 0.28515625,
"learning_rate": 5.482866043613707e-06,
"loss": 1.0892,
"step": 581
},
{
"epoch": 0.4532710280373832,
"grad_norm": 0.251953125,
"learning_rate": 5.4750778816199375e-06,
"loss": 1.0887,
"step": 582
},
{
"epoch": 0.45404984423676015,
"grad_norm": 0.275390625,
"learning_rate": 5.467289719626169e-06,
"loss": 1.1131,
"step": 583
},
{
"epoch": 0.45482866043613707,
"grad_norm": 0.271484375,
"learning_rate": 5.4595015576323995e-06,
"loss": 1.1531,
"step": 584
},
{
"epoch": 0.45560747663551404,
"grad_norm": 0.328125,
"learning_rate": 5.451713395638629e-06,
"loss": 1.1275,
"step": 585
},
{
"epoch": 0.45638629283489096,
"grad_norm": 0.2490234375,
"learning_rate": 5.44392523364486e-06,
"loss": 1.075,
"step": 586
},
{
"epoch": 0.45716510903426794,
"grad_norm": 0.251953125,
"learning_rate": 5.436137071651091e-06,
"loss": 1.0589,
"step": 587
},
{
"epoch": 0.45794392523364486,
"grad_norm": 0.271484375,
"learning_rate": 5.428348909657322e-06,
"loss": 1.152,
"step": 588
},
{
"epoch": 0.45872274143302183,
"grad_norm": 0.2890625,
"learning_rate": 5.4205607476635515e-06,
"loss": 1.121,
"step": 589
},
{
"epoch": 0.45950155763239875,
"grad_norm": 0.26953125,
"learning_rate": 5.412772585669782e-06,
"loss": 1.0465,
"step": 590
},
{
"epoch": 0.4602803738317757,
"grad_norm": 0.302734375,
"learning_rate": 5.404984423676013e-06,
"loss": 1.0872,
"step": 591
},
{
"epoch": 0.46105919003115264,
"grad_norm": 0.26953125,
"learning_rate": 5.397196261682244e-06,
"loss": 1.0738,
"step": 592
},
{
"epoch": 0.4618380062305296,
"grad_norm": 0.275390625,
"learning_rate": 5.389408099688474e-06,
"loss": 1.0639,
"step": 593
},
{
"epoch": 0.46261682242990654,
"grad_norm": 0.2734375,
"learning_rate": 5.381619937694704e-06,
"loss": 1.0548,
"step": 594
},
{
"epoch": 0.4633956386292835,
"grad_norm": 0.294921875,
"learning_rate": 5.373831775700935e-06,
"loss": 1.1195,
"step": 595
},
{
"epoch": 0.46417445482866043,
"grad_norm": 0.271484375,
"learning_rate": 5.3660436137071654e-06,
"loss": 1.091,
"step": 596
},
{
"epoch": 0.4649532710280374,
"grad_norm": 0.267578125,
"learning_rate": 5.358255451713395e-06,
"loss": 1.0751,
"step": 597
},
{
"epoch": 0.4657320872274143,
"grad_norm": 0.32421875,
"learning_rate": 5.350467289719627e-06,
"loss": 1.1351,
"step": 598
},
{
"epoch": 0.4665109034267913,
"grad_norm": 0.27734375,
"learning_rate": 5.342679127725857e-06,
"loss": 1.1248,
"step": 599
},
{
"epoch": 0.4672897196261682,
"grad_norm": 0.283203125,
"learning_rate": 5.334890965732088e-06,
"loss": 1.1369,
"step": 600
},
{
"epoch": 0.4680685358255452,
"grad_norm": 0.294921875,
"learning_rate": 5.3271028037383174e-06,
"loss": 1.1005,
"step": 601
},
{
"epoch": 0.4688473520249221,
"grad_norm": 0.29296875,
"learning_rate": 5.319314641744549e-06,
"loss": 1.0866,
"step": 602
},
{
"epoch": 0.4696261682242991,
"grad_norm": 0.27734375,
"learning_rate": 5.311526479750779e-06,
"loss": 1.1354,
"step": 603
},
{
"epoch": 0.470404984423676,
"grad_norm": 0.267578125,
"learning_rate": 5.30373831775701e-06,
"loss": 1.0923,
"step": 604
},
{
"epoch": 0.471183800623053,
"grad_norm": 0.259765625,
"learning_rate": 5.29595015576324e-06,
"loss": 1.0961,
"step": 605
},
{
"epoch": 0.4719626168224299,
"grad_norm": 0.291015625,
"learning_rate": 5.28816199376947e-06,
"loss": 1.1166,
"step": 606
},
{
"epoch": 0.4727414330218069,
"grad_norm": 0.3046875,
"learning_rate": 5.280373831775702e-06,
"loss": 1.0968,
"step": 607
},
{
"epoch": 0.4735202492211838,
"grad_norm": 0.265625,
"learning_rate": 5.272585669781932e-06,
"loss": 1.0938,
"step": 608
},
{
"epoch": 0.4742990654205608,
"grad_norm": 0.2734375,
"learning_rate": 5.264797507788162e-06,
"loss": 1.1249,
"step": 609
},
{
"epoch": 0.4750778816199377,
"grad_norm": 0.283203125,
"learning_rate": 5.2570093457943925e-06,
"loss": 1.0835,
"step": 610
},
{
"epoch": 0.47585669781931467,
"grad_norm": 0.28125,
"learning_rate": 5.249221183800623e-06,
"loss": 1.1202,
"step": 611
},
{
"epoch": 0.4766355140186916,
"grad_norm": 0.279296875,
"learning_rate": 5.2414330218068545e-06,
"loss": 1.0893,
"step": 612
},
{
"epoch": 0.47741433021806856,
"grad_norm": 0.259765625,
"learning_rate": 5.233644859813084e-06,
"loss": 1.0992,
"step": 613
},
{
"epoch": 0.4781931464174455,
"grad_norm": 0.267578125,
"learning_rate": 5.225856697819315e-06,
"loss": 1.1145,
"step": 614
},
{
"epoch": 0.47897196261682246,
"grad_norm": 0.275390625,
"learning_rate": 5.218068535825545e-06,
"loss": 1.1492,
"step": 615
},
{
"epoch": 0.4797507788161994,
"grad_norm": 0.2734375,
"learning_rate": 5.210280373831777e-06,
"loss": 1.1045,
"step": 616
},
{
"epoch": 0.48052959501557635,
"grad_norm": 0.275390625,
"learning_rate": 5.2024922118380065e-06,
"loss": 1.1196,
"step": 617
},
{
"epoch": 0.48130841121495327,
"grad_norm": 0.26953125,
"learning_rate": 5.194704049844237e-06,
"loss": 1.1163,
"step": 618
},
{
"epoch": 0.48208722741433024,
"grad_norm": 0.255859375,
"learning_rate": 5.186915887850468e-06,
"loss": 1.0978,
"step": 619
},
{
"epoch": 0.48286604361370716,
"grad_norm": 0.28125,
"learning_rate": 5.179127725856698e-06,
"loss": 1.1349,
"step": 620
},
{
"epoch": 0.48364485981308414,
"grad_norm": 0.265625,
"learning_rate": 5.171339563862928e-06,
"loss": 1.1123,
"step": 621
},
{
"epoch": 0.48442367601246106,
"grad_norm": 0.26953125,
"learning_rate": 5.163551401869159e-06,
"loss": 1.0947,
"step": 622
},
{
"epoch": 0.48520249221183803,
"grad_norm": 0.267578125,
"learning_rate": 5.15576323987539e-06,
"loss": 1.0828,
"step": 623
},
{
"epoch": 0.48598130841121495,
"grad_norm": 0.279296875,
"learning_rate": 5.1479750778816205e-06,
"loss": 1.12,
"step": 624
},
{
"epoch": 0.4867601246105919,
"grad_norm": 0.27734375,
"learning_rate": 5.14018691588785e-06,
"loss": 1.0769,
"step": 625
},
{
"epoch": 0.48753894080996885,
"grad_norm": 0.26171875,
"learning_rate": 5.132398753894081e-06,
"loss": 1.1205,
"step": 626
},
{
"epoch": 0.4883177570093458,
"grad_norm": 0.26953125,
"learning_rate": 5.124610591900312e-06,
"loss": 1.0993,
"step": 627
},
{
"epoch": 0.48909657320872274,
"grad_norm": 0.26171875,
"learning_rate": 5.116822429906543e-06,
"loss": 1.1257,
"step": 628
},
{
"epoch": 0.4898753894080997,
"grad_norm": 0.2490234375,
"learning_rate": 5.1090342679127725e-06,
"loss": 1.0821,
"step": 629
},
{
"epoch": 0.49065420560747663,
"grad_norm": 0.2490234375,
"learning_rate": 5.101246105919003e-06,
"loss": 1.0628,
"step": 630
},
{
"epoch": 0.4914330218068536,
"grad_norm": 0.2578125,
"learning_rate": 5.0934579439252344e-06,
"loss": 1.0868,
"step": 631
},
{
"epoch": 0.49221183800623053,
"grad_norm": 0.251953125,
"learning_rate": 5.085669781931465e-06,
"loss": 1.114,
"step": 632
},
{
"epoch": 0.4929906542056075,
"grad_norm": 0.263671875,
"learning_rate": 5.077881619937695e-06,
"loss": 1.0533,
"step": 633
},
{
"epoch": 0.4937694704049844,
"grad_norm": 0.3125,
"learning_rate": 5.070093457943925e-06,
"loss": 1.1006,
"step": 634
},
{
"epoch": 0.4945482866043614,
"grad_norm": 0.2578125,
"learning_rate": 5.062305295950156e-06,
"loss": 1.0849,
"step": 635
},
{
"epoch": 0.4953271028037383,
"grad_norm": 0.34765625,
"learning_rate": 5.054517133956387e-06,
"loss": 1.1388,
"step": 636
},
{
"epoch": 0.4961059190031153,
"grad_norm": 0.263671875,
"learning_rate": 5.046728971962617e-06,
"loss": 1.0746,
"step": 637
},
{
"epoch": 0.4968847352024922,
"grad_norm": 0.2890625,
"learning_rate": 5.0389408099688476e-06,
"loss": 1.1102,
"step": 638
},
{
"epoch": 0.4976635514018692,
"grad_norm": 0.26171875,
"learning_rate": 5.031152647975078e-06,
"loss": 1.0893,
"step": 639
},
{
"epoch": 0.4984423676012461,
"grad_norm": 0.267578125,
"learning_rate": 5.023364485981309e-06,
"loss": 1.1255,
"step": 640
},
{
"epoch": 0.4992211838006231,
"grad_norm": 0.28515625,
"learning_rate": 5.0155763239875384e-06,
"loss": 1.1025,
"step": 641
},
{
"epoch": 0.5,
"grad_norm": 0.26171875,
"learning_rate": 5.00778816199377e-06,
"loss": 1.1117,
"step": 642
},
{
"epoch": 0.5007788161993769,
"grad_norm": 0.28125,
"learning_rate": 5e-06,
"loss": 1.047,
"step": 643
},
{
"epoch": 0.5015576323987538,
"grad_norm": 0.267578125,
"learning_rate": 4.992211838006231e-06,
"loss": 1.0693,
"step": 644
},
{
"epoch": 0.5023364485981309,
"grad_norm": 0.28515625,
"learning_rate": 4.9844236760124615e-06,
"loss": 1.06,
"step": 645
},
{
"epoch": 0.5031152647975078,
"grad_norm": 0.263671875,
"learning_rate": 4.976635514018692e-06,
"loss": 1.0656,
"step": 646
},
{
"epoch": 0.5038940809968847,
"grad_norm": 0.2734375,
"learning_rate": 4.968847352024923e-06,
"loss": 1.0838,
"step": 647
},
{
"epoch": 0.5046728971962616,
"grad_norm": 0.259765625,
"learning_rate": 4.961059190031153e-06,
"loss": 1.0558,
"step": 648
},
{
"epoch": 0.5054517133956387,
"grad_norm": 0.279296875,
"learning_rate": 4.953271028037384e-06,
"loss": 1.0737,
"step": 649
},
{
"epoch": 0.5062305295950156,
"grad_norm": 0.2890625,
"learning_rate": 4.945482866043614e-06,
"loss": 1.1116,
"step": 650
},
{
"epoch": 0.5070093457943925,
"grad_norm": 0.30078125,
"learning_rate": 4.937694704049845e-06,
"loss": 1.1337,
"step": 651
},
{
"epoch": 0.5077881619937694,
"grad_norm": 0.265625,
"learning_rate": 4.9299065420560755e-06,
"loss": 1.062,
"step": 652
},
{
"epoch": 0.5085669781931464,
"grad_norm": 0.26171875,
"learning_rate": 4.922118380062306e-06,
"loss": 1.0697,
"step": 653
},
{
"epoch": 0.5093457943925234,
"grad_norm": 0.265625,
"learning_rate": 4.914330218068537e-06,
"loss": 1.1211,
"step": 654
},
{
"epoch": 0.5101246105919003,
"grad_norm": 0.265625,
"learning_rate": 4.906542056074766e-06,
"loss": 1.0784,
"step": 655
},
{
"epoch": 0.5109034267912772,
"grad_norm": 0.279296875,
"learning_rate": 4.898753894080998e-06,
"loss": 1.0445,
"step": 656
},
{
"epoch": 0.5116822429906542,
"grad_norm": 0.283203125,
"learning_rate": 4.8909657320872275e-06,
"loss": 1.0834,
"step": 657
},
{
"epoch": 0.5124610591900312,
"grad_norm": 0.2734375,
"learning_rate": 4.883177570093459e-06,
"loss": 1.068,
"step": 658
},
{
"epoch": 0.5132398753894081,
"grad_norm": 0.267578125,
"learning_rate": 4.875389408099689e-06,
"loss": 1.0808,
"step": 659
},
{
"epoch": 0.514018691588785,
"grad_norm": 0.26171875,
"learning_rate": 4.86760124610592e-06,
"loss": 1.0896,
"step": 660
},
{
"epoch": 0.514797507788162,
"grad_norm": 0.259765625,
"learning_rate": 4.85981308411215e-06,
"loss": 1.0582,
"step": 661
},
{
"epoch": 0.5155763239875389,
"grad_norm": 0.3046875,
"learning_rate": 4.85202492211838e-06,
"loss": 1.1185,
"step": 662
},
{
"epoch": 0.5163551401869159,
"grad_norm": 0.2734375,
"learning_rate": 4.844236760124611e-06,
"loss": 1.0866,
"step": 663
},
{
"epoch": 0.5171339563862928,
"grad_norm": 0.3359375,
"learning_rate": 4.8364485981308415e-06,
"loss": 1.1043,
"step": 664
},
{
"epoch": 0.5179127725856698,
"grad_norm": 0.275390625,
"learning_rate": 4.828660436137072e-06,
"loss": 1.1199,
"step": 665
},
{
"epoch": 0.5186915887850467,
"grad_norm": 0.28125,
"learning_rate": 4.820872274143303e-06,
"loss": 1.116,
"step": 666
},
{
"epoch": 0.5194704049844237,
"grad_norm": 0.26953125,
"learning_rate": 4.813084112149533e-06,
"loss": 1.115,
"step": 667
},
{
"epoch": 0.5202492211838006,
"grad_norm": 0.279296875,
"learning_rate": 4.805295950155764e-06,
"loss": 1.1063,
"step": 668
},
{
"epoch": 0.5210280373831776,
"grad_norm": 0.47265625,
"learning_rate": 4.797507788161994e-06,
"loss": 1.0369,
"step": 669
},
{
"epoch": 0.5218068535825545,
"grad_norm": 0.28125,
"learning_rate": 4.789719626168225e-06,
"loss": 1.1221,
"step": 670
},
{
"epoch": 0.5225856697819314,
"grad_norm": 0.287109375,
"learning_rate": 4.7819314641744554e-06,
"loss": 1.0657,
"step": 671
},
{
"epoch": 0.5233644859813084,
"grad_norm": 0.2734375,
"learning_rate": 4.774143302180686e-06,
"loss": 1.1268,
"step": 672
},
{
"epoch": 0.5241433021806854,
"grad_norm": 0.28125,
"learning_rate": 4.766355140186917e-06,
"loss": 1.079,
"step": 673
},
{
"epoch": 0.5249221183800623,
"grad_norm": 0.267578125,
"learning_rate": 4.758566978193147e-06,
"loss": 1.0677,
"step": 674
},
{
"epoch": 0.5257009345794392,
"grad_norm": 0.27734375,
"learning_rate": 4.750778816199378e-06,
"loss": 1.0876,
"step": 675
},
{
"epoch": 0.5264797507788161,
"grad_norm": 0.27734375,
"learning_rate": 4.742990654205608e-06,
"loss": 1.0943,
"step": 676
},
{
"epoch": 0.5272585669781932,
"grad_norm": 0.2734375,
"learning_rate": 4.735202492211838e-06,
"loss": 1.0844,
"step": 677
},
{
"epoch": 0.5280373831775701,
"grad_norm": 0.27734375,
"learning_rate": 4.727414330218069e-06,
"loss": 1.095,
"step": 678
},
{
"epoch": 0.528816199376947,
"grad_norm": 0.26953125,
"learning_rate": 4.719626168224299e-06,
"loss": 1.0612,
"step": 679
},
{
"epoch": 0.5295950155763239,
"grad_norm": 0.26953125,
"learning_rate": 4.7118380062305305e-06,
"loss": 1.1176,
"step": 680
},
{
"epoch": 0.530373831775701,
"grad_norm": 0.287109375,
"learning_rate": 4.70404984423676e-06,
"loss": 1.1202,
"step": 681
},
{
"epoch": 0.5311526479750779,
"grad_norm": 0.306640625,
"learning_rate": 4.696261682242992e-06,
"loss": 1.0918,
"step": 682
},
{
"epoch": 0.5319314641744548,
"grad_norm": 0.283203125,
"learning_rate": 4.688473520249221e-06,
"loss": 1.1086,
"step": 683
},
{
"epoch": 0.5327102803738317,
"grad_norm": 0.330078125,
"learning_rate": 4.680685358255452e-06,
"loss": 1.0619,
"step": 684
},
{
"epoch": 0.5334890965732088,
"grad_norm": 0.2734375,
"learning_rate": 4.6728971962616825e-06,
"loss": 1.0663,
"step": 685
},
{
"epoch": 0.5342679127725857,
"grad_norm": 0.27734375,
"learning_rate": 4.665109034267913e-06,
"loss": 1.0539,
"step": 686
},
{
"epoch": 0.5350467289719626,
"grad_norm": 0.310546875,
"learning_rate": 4.657320872274144e-06,
"loss": 1.0999,
"step": 687
},
{
"epoch": 0.5358255451713395,
"grad_norm": 0.29296875,
"learning_rate": 4.649532710280374e-06,
"loss": 1.1015,
"step": 688
},
{
"epoch": 0.5366043613707165,
"grad_norm": 0.298828125,
"learning_rate": 4.641744548286605e-06,
"loss": 1.1132,
"step": 689
},
{
"epoch": 0.5373831775700935,
"grad_norm": 0.279296875,
"learning_rate": 4.633956386292835e-06,
"loss": 1.0831,
"step": 690
},
{
"epoch": 0.5381619937694704,
"grad_norm": 0.28125,
"learning_rate": 4.626168224299066e-06,
"loss": 1.0406,
"step": 691
},
{
"epoch": 0.5389408099688473,
"grad_norm": 0.28125,
"learning_rate": 4.6183800623052965e-06,
"loss": 1.05,
"step": 692
},
{
"epoch": 0.5397196261682243,
"grad_norm": 0.28125,
"learning_rate": 4.610591900311527e-06,
"loss": 1.0609,
"step": 693
},
{
"epoch": 0.5404984423676013,
"grad_norm": 0.279296875,
"learning_rate": 4.602803738317758e-06,
"loss": 1.084,
"step": 694
},
{
"epoch": 0.5412772585669782,
"grad_norm": 0.259765625,
"learning_rate": 4.595015576323988e-06,
"loss": 1.1124,
"step": 695
},
{
"epoch": 0.5420560747663551,
"grad_norm": 0.263671875,
"learning_rate": 4.587227414330219e-06,
"loss": 1.071,
"step": 696
},
{
"epoch": 0.5428348909657321,
"grad_norm": 0.427734375,
"learning_rate": 4.579439252336449e-06,
"loss": 1.1186,
"step": 697
},
{
"epoch": 0.543613707165109,
"grad_norm": 0.259765625,
"learning_rate": 4.57165109034268e-06,
"loss": 1.0411,
"step": 698
},
{
"epoch": 0.544392523364486,
"grad_norm": 0.28515625,
"learning_rate": 4.56386292834891e-06,
"loss": 1.1097,
"step": 699
},
{
"epoch": 0.5451713395638629,
"grad_norm": 0.26953125,
"learning_rate": 4.556074766355141e-06,
"loss": 1.0648,
"step": 700
},
{
"epoch": 0.5459501557632399,
"grad_norm": 0.28125,
"learning_rate": 4.548286604361371e-06,
"loss": 1.0922,
"step": 701
},
{
"epoch": 0.5467289719626168,
"grad_norm": 0.263671875,
"learning_rate": 4.540498442367602e-06,
"loss": 1.0709,
"step": 702
},
{
"epoch": 0.5475077881619937,
"grad_norm": 0.2734375,
"learning_rate": 4.532710280373832e-06,
"loss": 1.0943,
"step": 703
},
{
"epoch": 0.5482866043613707,
"grad_norm": 0.291015625,
"learning_rate": 4.524922118380063e-06,
"loss": 1.0856,
"step": 704
},
{
"epoch": 0.5490654205607477,
"grad_norm": 0.287109375,
"learning_rate": 4.517133956386293e-06,
"loss": 1.0642,
"step": 705
},
{
"epoch": 0.5498442367601246,
"grad_norm": 0.27734375,
"learning_rate": 4.509345794392524e-06,
"loss": 1.0893,
"step": 706
},
{
"epoch": 0.5506230529595015,
"grad_norm": 0.2734375,
"learning_rate": 4.501557632398754e-06,
"loss": 1.0904,
"step": 707
},
{
"epoch": 0.5514018691588785,
"grad_norm": 0.310546875,
"learning_rate": 4.493769470404985e-06,
"loss": 1.0527,
"step": 708
},
{
"epoch": 0.5521806853582555,
"grad_norm": 0.26953125,
"learning_rate": 4.485981308411215e-06,
"loss": 1.0768,
"step": 709
},
{
"epoch": 0.5529595015576324,
"grad_norm": 0.271484375,
"learning_rate": 4.478193146417446e-06,
"loss": 1.1179,
"step": 710
},
{
"epoch": 0.5537383177570093,
"grad_norm": 0.2890625,
"learning_rate": 4.4704049844236764e-06,
"loss": 1.0821,
"step": 711
},
{
"epoch": 0.5545171339563862,
"grad_norm": 0.26953125,
"learning_rate": 4.462616822429907e-06,
"loss": 1.0819,
"step": 712
},
{
"epoch": 0.5552959501557633,
"grad_norm": 0.298828125,
"learning_rate": 4.4548286604361376e-06,
"loss": 1.0693,
"step": 713
},
{
"epoch": 0.5560747663551402,
"grad_norm": 0.384765625,
"learning_rate": 4.447040498442368e-06,
"loss": 1.1305,
"step": 714
},
{
"epoch": 0.5568535825545171,
"grad_norm": 0.271484375,
"learning_rate": 4.439252336448599e-06,
"loss": 1.0839,
"step": 715
},
{
"epoch": 0.557632398753894,
"grad_norm": 0.29296875,
"learning_rate": 4.431464174454829e-06,
"loss": 1.1371,
"step": 716
},
{
"epoch": 0.5584112149532711,
"grad_norm": 0.291015625,
"learning_rate": 4.42367601246106e-06,
"loss": 1.08,
"step": 717
},
{
"epoch": 0.559190031152648,
"grad_norm": 0.30078125,
"learning_rate": 4.41588785046729e-06,
"loss": 1.0893,
"step": 718
},
{
"epoch": 0.5599688473520249,
"grad_norm": 0.328125,
"learning_rate": 4.40809968847352e-06,
"loss": 1.0644,
"step": 719
},
{
"epoch": 0.5607476635514018,
"grad_norm": 0.27734375,
"learning_rate": 4.4003115264797515e-06,
"loss": 1.1049,
"step": 720
},
{
"epoch": 0.5615264797507789,
"grad_norm": 0.26171875,
"learning_rate": 4.392523364485981e-06,
"loss": 1.1247,
"step": 721
},
{
"epoch": 0.5623052959501558,
"grad_norm": 0.25,
"learning_rate": 4.384735202492213e-06,
"loss": 1.097,
"step": 722
},
{
"epoch": 0.5630841121495327,
"grad_norm": 0.265625,
"learning_rate": 4.376947040498442e-06,
"loss": 1.1173,
"step": 723
},
{
"epoch": 0.5638629283489096,
"grad_norm": 0.263671875,
"learning_rate": 4.369158878504674e-06,
"loss": 1.0981,
"step": 724
},
{
"epoch": 0.5646417445482866,
"grad_norm": 0.259765625,
"learning_rate": 4.3613707165109035e-06,
"loss": 1.043,
"step": 725
},
{
"epoch": 0.5654205607476636,
"grad_norm": 0.3125,
"learning_rate": 4.353582554517134e-06,
"loss": 1.1311,
"step": 726
},
{
"epoch": 0.5661993769470405,
"grad_norm": 0.29296875,
"learning_rate": 4.345794392523365e-06,
"loss": 1.1001,
"step": 727
},
{
"epoch": 0.5669781931464174,
"grad_norm": 0.259765625,
"learning_rate": 4.338006230529595e-06,
"loss": 1.0973,
"step": 728
},
{
"epoch": 0.5677570093457944,
"grad_norm": 0.26171875,
"learning_rate": 4.330218068535826e-06,
"loss": 1.072,
"step": 729
},
{
"epoch": 0.5685358255451713,
"grad_norm": 0.2734375,
"learning_rate": 4.322429906542056e-06,
"loss": 1.0721,
"step": 730
},
{
"epoch": 0.5693146417445483,
"grad_norm": 0.32421875,
"learning_rate": 4.314641744548287e-06,
"loss": 1.138,
"step": 731
},
{
"epoch": 0.5700934579439252,
"grad_norm": 0.2734375,
"learning_rate": 4.3068535825545175e-06,
"loss": 1.0218,
"step": 732
},
{
"epoch": 0.5708722741433022,
"grad_norm": 0.2734375,
"learning_rate": 4.299065420560748e-06,
"loss": 1.0676,
"step": 733
},
{
"epoch": 0.5716510903426791,
"grad_norm": 0.287109375,
"learning_rate": 4.291277258566979e-06,
"loss": 1.1021,
"step": 734
},
{
"epoch": 0.572429906542056,
"grad_norm": 0.267578125,
"learning_rate": 4.283489096573209e-06,
"loss": 1.1013,
"step": 735
},
{
"epoch": 0.573208722741433,
"grad_norm": 0.2578125,
"learning_rate": 4.27570093457944e-06,
"loss": 1.108,
"step": 736
},
{
"epoch": 0.57398753894081,
"grad_norm": 0.259765625,
"learning_rate": 4.26791277258567e-06,
"loss": 1.0833,
"step": 737
},
{
"epoch": 0.5747663551401869,
"grad_norm": 0.283203125,
"learning_rate": 4.260124610591901e-06,
"loss": 1.0556,
"step": 738
},
{
"epoch": 0.5755451713395638,
"grad_norm": 0.490234375,
"learning_rate": 4.2523364485981315e-06,
"loss": 1.065,
"step": 739
},
{
"epoch": 0.5763239875389408,
"grad_norm": 0.271484375,
"learning_rate": 4.244548286604362e-06,
"loss": 1.0507,
"step": 740
},
{
"epoch": 0.5771028037383178,
"grad_norm": 0.25390625,
"learning_rate": 4.236760124610592e-06,
"loss": 1.0535,
"step": 741
},
{
"epoch": 0.5778816199376947,
"grad_norm": 0.267578125,
"learning_rate": 4.228971962616823e-06,
"loss": 1.0819,
"step": 742
},
{
"epoch": 0.5786604361370716,
"grad_norm": 0.25390625,
"learning_rate": 4.221183800623053e-06,
"loss": 1.0582,
"step": 743
},
{
"epoch": 0.5794392523364486,
"grad_norm": 0.291015625,
"learning_rate": 4.213395638629284e-06,
"loss": 1.1174,
"step": 744
},
{
"epoch": 0.5802180685358256,
"grad_norm": 0.287109375,
"learning_rate": 4.205607476635514e-06,
"loss": 1.0815,
"step": 745
},
{
"epoch": 0.5809968847352025,
"grad_norm": 0.345703125,
"learning_rate": 4.1978193146417454e-06,
"loss": 1.1157,
"step": 746
},
{
"epoch": 0.5817757009345794,
"grad_norm": 0.26953125,
"learning_rate": 4.190031152647975e-06,
"loss": 1.0659,
"step": 747
},
{
"epoch": 0.5825545171339563,
"grad_norm": 0.275390625,
"learning_rate": 4.182242990654206e-06,
"loss": 1.0546,
"step": 748
},
{
"epoch": 0.5833333333333334,
"grad_norm": 0.28515625,
"learning_rate": 4.174454828660436e-06,
"loss": 1.1203,
"step": 749
},
{
"epoch": 0.5841121495327103,
"grad_norm": 0.365234375,
"learning_rate": 4.166666666666667e-06,
"loss": 1.1135,
"step": 750
},
{
"epoch": 0.5848909657320872,
"grad_norm": 0.279296875,
"learning_rate": 4.1588785046728974e-06,
"loss": 1.0913,
"step": 751
},
{
"epoch": 0.5856697819314641,
"grad_norm": 0.29296875,
"learning_rate": 4.151090342679128e-06,
"loss": 1.0747,
"step": 752
},
{
"epoch": 0.5864485981308412,
"grad_norm": 0.30078125,
"learning_rate": 4.1433021806853586e-06,
"loss": 1.0685,
"step": 753
},
{
"epoch": 0.5872274143302181,
"grad_norm": 0.283203125,
"learning_rate": 4.135514018691589e-06,
"loss": 1.0961,
"step": 754
},
{
"epoch": 0.588006230529595,
"grad_norm": 0.279296875,
"learning_rate": 4.12772585669782e-06,
"loss": 1.0738,
"step": 755
},
{
"epoch": 0.5887850467289719,
"grad_norm": 0.28125,
"learning_rate": 4.11993769470405e-06,
"loss": 1.0801,
"step": 756
},
{
"epoch": 0.589563862928349,
"grad_norm": 0.287109375,
"learning_rate": 4.112149532710281e-06,
"loss": 1.0537,
"step": 757
},
{
"epoch": 0.5903426791277259,
"grad_norm": 0.2734375,
"learning_rate": 4.104361370716511e-06,
"loss": 1.0658,
"step": 758
},
{
"epoch": 0.5911214953271028,
"grad_norm": 0.27734375,
"learning_rate": 4.096573208722742e-06,
"loss": 1.0827,
"step": 759
},
{
"epoch": 0.5919003115264797,
"grad_norm": 0.27734375,
"learning_rate": 4.0887850467289725e-06,
"loss": 1.0808,
"step": 760
},
{
"epoch": 0.5926791277258567,
"grad_norm": 0.2890625,
"learning_rate": 4.080996884735203e-06,
"loss": 1.0706,
"step": 761
},
{
"epoch": 0.5934579439252337,
"grad_norm": 0.275390625,
"learning_rate": 4.073208722741434e-06,
"loss": 1.0792,
"step": 762
},
{
"epoch": 0.5942367601246106,
"grad_norm": 0.275390625,
"learning_rate": 4.065420560747663e-06,
"loss": 1.0838,
"step": 763
},
{
"epoch": 0.5950155763239875,
"grad_norm": 0.275390625,
"learning_rate": 4.057632398753895e-06,
"loss": 1.0684,
"step": 764
},
{
"epoch": 0.5957943925233645,
"grad_norm": 0.279296875,
"learning_rate": 4.0498442367601245e-06,
"loss": 1.1377,
"step": 765
},
{
"epoch": 0.5965732087227414,
"grad_norm": 0.294921875,
"learning_rate": 4.042056074766356e-06,
"loss": 1.0685,
"step": 766
},
{
"epoch": 0.5973520249221184,
"grad_norm": 0.267578125,
"learning_rate": 4.034267912772586e-06,
"loss": 1.0488,
"step": 767
},
{
"epoch": 0.5981308411214953,
"grad_norm": 0.3203125,
"learning_rate": 4.026479750778817e-06,
"loss": 1.1064,
"step": 768
},
{
"epoch": 0.5989096573208723,
"grad_norm": 0.291015625,
"learning_rate": 4.018691588785047e-06,
"loss": 1.0814,
"step": 769
},
{
"epoch": 0.5996884735202492,
"grad_norm": 0.2734375,
"learning_rate": 4.010903426791277e-06,
"loss": 1.0498,
"step": 770
},
{
"epoch": 0.6004672897196262,
"grad_norm": 0.287109375,
"learning_rate": 4.003115264797508e-06,
"loss": 1.0894,
"step": 771
},
{
"epoch": 0.6012461059190031,
"grad_norm": 0.275390625,
"learning_rate": 3.9953271028037385e-06,
"loss": 1.082,
"step": 772
},
{
"epoch": 0.6020249221183801,
"grad_norm": 0.26953125,
"learning_rate": 3.987538940809969e-06,
"loss": 1.0866,
"step": 773
},
{
"epoch": 0.602803738317757,
"grad_norm": 0.259765625,
"learning_rate": 3.9797507788162e-06,
"loss": 1.0848,
"step": 774
},
{
"epoch": 0.6035825545171339,
"grad_norm": 0.283203125,
"learning_rate": 3.97196261682243e-06,
"loss": 1.0735,
"step": 775
},
{
"epoch": 0.6043613707165109,
"grad_norm": 0.279296875,
"learning_rate": 3.964174454828661e-06,
"loss": 1.103,
"step": 776
},
{
"epoch": 0.6051401869158879,
"grad_norm": 0.2890625,
"learning_rate": 3.956386292834891e-06,
"loss": 1.0789,
"step": 777
},
{
"epoch": 0.6059190031152648,
"grad_norm": 0.267578125,
"learning_rate": 3.948598130841122e-06,
"loss": 1.0511,
"step": 778
},
{
"epoch": 0.6066978193146417,
"grad_norm": 0.30859375,
"learning_rate": 3.9408099688473525e-06,
"loss": 1.0715,
"step": 779
},
{
"epoch": 0.6074766355140186,
"grad_norm": 0.26953125,
"learning_rate": 3.933021806853583e-06,
"loss": 1.0493,
"step": 780
},
{
"epoch": 0.6082554517133957,
"grad_norm": 0.279296875,
"learning_rate": 3.925233644859814e-06,
"loss": 1.0764,
"step": 781
},
{
"epoch": 0.6090342679127726,
"grad_norm": 0.283203125,
"learning_rate": 3.917445482866044e-06,
"loss": 1.0719,
"step": 782
},
{
"epoch": 0.6098130841121495,
"grad_norm": 0.265625,
"learning_rate": 3.909657320872275e-06,
"loss": 1.0784,
"step": 783
},
{
"epoch": 0.6105919003115264,
"grad_norm": 0.279296875,
"learning_rate": 3.901869158878505e-06,
"loss": 1.113,
"step": 784
},
{
"epoch": 0.6113707165109035,
"grad_norm": 0.283203125,
"learning_rate": 3.894080996884735e-06,
"loss": 1.0998,
"step": 785
},
{
"epoch": 0.6121495327102804,
"grad_norm": 0.259765625,
"learning_rate": 3.8862928348909664e-06,
"loss": 1.0832,
"step": 786
},
{
"epoch": 0.6129283489096573,
"grad_norm": 0.26953125,
"learning_rate": 3.878504672897196e-06,
"loss": 1.0502,
"step": 787
},
{
"epoch": 0.6137071651090342,
"grad_norm": 0.26953125,
"learning_rate": 3.8707165109034276e-06,
"loss": 1.1065,
"step": 788
},
{
"epoch": 0.6144859813084113,
"grad_norm": 0.2578125,
"learning_rate": 3.862928348909657e-06,
"loss": 1.0664,
"step": 789
},
{
"epoch": 0.6152647975077882,
"grad_norm": 0.275390625,
"learning_rate": 3.855140186915889e-06,
"loss": 1.1309,
"step": 790
},
{
"epoch": 0.6160436137071651,
"grad_norm": 0.283203125,
"learning_rate": 3.8473520249221184e-06,
"loss": 1.0686,
"step": 791
},
{
"epoch": 0.616822429906542,
"grad_norm": 0.26171875,
"learning_rate": 3.839563862928349e-06,
"loss": 1.0899,
"step": 792
},
{
"epoch": 0.617601246105919,
"grad_norm": 0.265625,
"learning_rate": 3.8317757009345796e-06,
"loss": 1.0609,
"step": 793
},
{
"epoch": 0.618380062305296,
"grad_norm": 0.2734375,
"learning_rate": 3.82398753894081e-06,
"loss": 1.0837,
"step": 794
},
{
"epoch": 0.6191588785046729,
"grad_norm": 0.28125,
"learning_rate": 3.816199376947041e-06,
"loss": 1.0839,
"step": 795
},
{
"epoch": 0.6199376947040498,
"grad_norm": 0.279296875,
"learning_rate": 3.8084112149532717e-06,
"loss": 1.0824,
"step": 796
},
{
"epoch": 0.6207165109034268,
"grad_norm": 0.29296875,
"learning_rate": 3.800623052959502e-06,
"loss": 1.1292,
"step": 797
},
{
"epoch": 0.6214953271028038,
"grad_norm": 0.287109375,
"learning_rate": 3.7928348909657324e-06,
"loss": 1.0466,
"step": 798
},
{
"epoch": 0.6222741433021807,
"grad_norm": 0.29296875,
"learning_rate": 3.785046728971963e-06,
"loss": 1.1084,
"step": 799
},
{
"epoch": 0.6230529595015576,
"grad_norm": 0.279296875,
"learning_rate": 3.7772585669781935e-06,
"loss": 1.0646,
"step": 800
},
{
"epoch": 0.6238317757009346,
"grad_norm": 0.28515625,
"learning_rate": 3.7694704049844237e-06,
"loss": 1.0794,
"step": 801
},
{
"epoch": 0.6246105919003115,
"grad_norm": 0.263671875,
"learning_rate": 3.7616822429906547e-06,
"loss": 1.0691,
"step": 802
},
{
"epoch": 0.6253894080996885,
"grad_norm": 0.27734375,
"learning_rate": 3.753894080996885e-06,
"loss": 1.0927,
"step": 803
},
{
"epoch": 0.6261682242990654,
"grad_norm": 0.287109375,
"learning_rate": 3.746105919003116e-06,
"loss": 1.0596,
"step": 804
},
{
"epoch": 0.6269470404984424,
"grad_norm": 0.26953125,
"learning_rate": 3.738317757009346e-06,
"loss": 1.0388,
"step": 805
},
{
"epoch": 0.6277258566978193,
"grad_norm": 0.28515625,
"learning_rate": 3.730529595015577e-06,
"loss": 1.0832,
"step": 806
},
{
"epoch": 0.6285046728971962,
"grad_norm": 0.275390625,
"learning_rate": 3.722741433021807e-06,
"loss": 1.0831,
"step": 807
},
{
"epoch": 0.6292834890965732,
"grad_norm": 0.2890625,
"learning_rate": 3.7149532710280376e-06,
"loss": 1.1264,
"step": 808
},
{
"epoch": 0.6300623052959502,
"grad_norm": 0.310546875,
"learning_rate": 3.7071651090342682e-06,
"loss": 1.1208,
"step": 809
},
{
"epoch": 0.6308411214953271,
"grad_norm": 0.28125,
"learning_rate": 3.6993769470404988e-06,
"loss": 1.0782,
"step": 810
},
{
"epoch": 0.631619937694704,
"grad_norm": 0.267578125,
"learning_rate": 3.691588785046729e-06,
"loss": 1.1117,
"step": 811
},
{
"epoch": 0.632398753894081,
"grad_norm": 0.27734375,
"learning_rate": 3.68380062305296e-06,
"loss": 1.0617,
"step": 812
},
{
"epoch": 0.633177570093458,
"grad_norm": 0.283203125,
"learning_rate": 3.67601246105919e-06,
"loss": 1.0826,
"step": 813
},
{
"epoch": 0.6339563862928349,
"grad_norm": 0.294921875,
"learning_rate": 3.668224299065421e-06,
"loss": 1.0539,
"step": 814
},
{
"epoch": 0.6347352024922118,
"grad_norm": 0.271484375,
"learning_rate": 3.660436137071651e-06,
"loss": 1.0806,
"step": 815
},
{
"epoch": 0.6355140186915887,
"grad_norm": 0.271484375,
"learning_rate": 3.652647975077882e-06,
"loss": 1.0731,
"step": 816
},
{
"epoch": 0.6362928348909658,
"grad_norm": 0.279296875,
"learning_rate": 3.6448598130841123e-06,
"loss": 1.0908,
"step": 817
},
{
"epoch": 0.6370716510903427,
"grad_norm": 0.26171875,
"learning_rate": 3.637071651090343e-06,
"loss": 1.1305,
"step": 818
},
{
"epoch": 0.6378504672897196,
"grad_norm": 0.271484375,
"learning_rate": 3.6292834890965735e-06,
"loss": 1.0906,
"step": 819
},
{
"epoch": 0.6386292834890965,
"grad_norm": 0.275390625,
"learning_rate": 3.621495327102804e-06,
"loss": 1.0434,
"step": 820
},
{
"epoch": 0.6394080996884736,
"grad_norm": 0.302734375,
"learning_rate": 3.6137071651090346e-06,
"loss": 1.0498,
"step": 821
},
{
"epoch": 0.6401869158878505,
"grad_norm": 0.28125,
"learning_rate": 3.605919003115265e-06,
"loss": 1.1043,
"step": 822
},
{
"epoch": 0.6409657320872274,
"grad_norm": 0.255859375,
"learning_rate": 3.5981308411214953e-06,
"loss": 1.064,
"step": 823
},
{
"epoch": 0.6417445482866043,
"grad_norm": 0.2890625,
"learning_rate": 3.5903426791277263e-06,
"loss": 1.0921,
"step": 824
},
{
"epoch": 0.6425233644859814,
"grad_norm": 0.265625,
"learning_rate": 3.5825545171339564e-06,
"loss": 1.063,
"step": 825
},
{
"epoch": 0.6433021806853583,
"grad_norm": 0.291015625,
"learning_rate": 3.5747663551401874e-06,
"loss": 1.0995,
"step": 826
},
{
"epoch": 0.6440809968847352,
"grad_norm": 0.27734375,
"learning_rate": 3.5669781931464176e-06,
"loss": 1.0636,
"step": 827
},
{
"epoch": 0.6448598130841121,
"grad_norm": 0.294921875,
"learning_rate": 3.5591900311526486e-06,
"loss": 1.0945,
"step": 828
},
{
"epoch": 0.6456386292834891,
"grad_norm": 0.291015625,
"learning_rate": 3.5514018691588787e-06,
"loss": 1.0902,
"step": 829
},
{
"epoch": 0.6464174454828661,
"grad_norm": 0.27734375,
"learning_rate": 3.5436137071651093e-06,
"loss": 1.1284,
"step": 830
},
{
"epoch": 0.647196261682243,
"grad_norm": 0.263671875,
"learning_rate": 3.53582554517134e-06,
"loss": 1.075,
"step": 831
},
{
"epoch": 0.6479750778816199,
"grad_norm": 0.28125,
"learning_rate": 3.5280373831775704e-06,
"loss": 1.0573,
"step": 832
},
{
"epoch": 0.6487538940809969,
"grad_norm": 0.287109375,
"learning_rate": 3.5202492211838006e-06,
"loss": 1.0987,
"step": 833
},
{
"epoch": 0.6495327102803738,
"grad_norm": 0.2890625,
"learning_rate": 3.5124610591900315e-06,
"loss": 1.0735,
"step": 834
},
{
"epoch": 0.6503115264797508,
"grad_norm": 0.2734375,
"learning_rate": 3.5046728971962617e-06,
"loss": 1.0847,
"step": 835
},
{
"epoch": 0.6510903426791277,
"grad_norm": 0.296875,
"learning_rate": 3.4968847352024927e-06,
"loss": 1.149,
"step": 836
},
{
"epoch": 0.6518691588785047,
"grad_norm": 0.28515625,
"learning_rate": 3.489096573208723e-06,
"loss": 1.101,
"step": 837
},
{
"epoch": 0.6526479750778816,
"grad_norm": 0.283203125,
"learning_rate": 3.481308411214954e-06,
"loss": 1.032,
"step": 838
},
{
"epoch": 0.6534267912772586,
"grad_norm": 0.291015625,
"learning_rate": 3.473520249221184e-06,
"loss": 1.0793,
"step": 839
},
{
"epoch": 0.6542056074766355,
"grad_norm": 0.271484375,
"learning_rate": 3.4657320872274145e-06,
"loss": 1.0936,
"step": 840
},
{
"epoch": 0.6549844236760125,
"grad_norm": 0.265625,
"learning_rate": 3.457943925233645e-06,
"loss": 1.1204,
"step": 841
},
{
"epoch": 0.6557632398753894,
"grad_norm": 0.26953125,
"learning_rate": 3.4501557632398757e-06,
"loss": 1.0806,
"step": 842
},
{
"epoch": 0.6565420560747663,
"grad_norm": 0.28515625,
"learning_rate": 3.4423676012461062e-06,
"loss": 1.0755,
"step": 843
},
{
"epoch": 0.6573208722741433,
"grad_norm": 0.296875,
"learning_rate": 3.434579439252337e-06,
"loss": 1.0994,
"step": 844
},
{
"epoch": 0.6580996884735203,
"grad_norm": 0.271484375,
"learning_rate": 3.426791277258567e-06,
"loss": 1.0392,
"step": 845
},
{
"epoch": 0.6588785046728972,
"grad_norm": 0.275390625,
"learning_rate": 3.419003115264798e-06,
"loss": 1.0495,
"step": 846
},
{
"epoch": 0.6596573208722741,
"grad_norm": 0.267578125,
"learning_rate": 3.411214953271028e-06,
"loss": 1.0559,
"step": 847
},
{
"epoch": 0.660436137071651,
"grad_norm": 0.27734375,
"learning_rate": 3.403426791277259e-06,
"loss": 1.0836,
"step": 848
},
{
"epoch": 0.6612149532710281,
"grad_norm": 0.26953125,
"learning_rate": 3.395638629283489e-06,
"loss": 1.0457,
"step": 849
},
{
"epoch": 0.661993769470405,
"grad_norm": 0.267578125,
"learning_rate": 3.38785046728972e-06,
"loss": 1.0792,
"step": 850
},
{
"epoch": 0.6627725856697819,
"grad_norm": 0.28515625,
"learning_rate": 3.3800623052959503e-06,
"loss": 1.0884,
"step": 851
},
{
"epoch": 0.6635514018691588,
"grad_norm": 0.30078125,
"learning_rate": 3.372274143302181e-06,
"loss": 1.1039,
"step": 852
},
{
"epoch": 0.6643302180685359,
"grad_norm": 0.287109375,
"learning_rate": 3.3644859813084115e-06,
"loss": 1.0802,
"step": 853
},
{
"epoch": 0.6651090342679128,
"grad_norm": 0.275390625,
"learning_rate": 3.356697819314642e-06,
"loss": 1.0577,
"step": 854
},
{
"epoch": 0.6658878504672897,
"grad_norm": 0.296875,
"learning_rate": 3.348909657320872e-06,
"loss": 1.0829,
"step": 855
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.291015625,
"learning_rate": 3.341121495327103e-06,
"loss": 1.1011,
"step": 856
},
{
"epoch": 0.6674454828660437,
"grad_norm": 0.26953125,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.09,
"step": 857
},
{
"epoch": 0.6682242990654206,
"grad_norm": 0.27734375,
"learning_rate": 3.3255451713395643e-06,
"loss": 1.0927,
"step": 858
},
{
"epoch": 0.6690031152647975,
"grad_norm": 0.265625,
"learning_rate": 3.3177570093457945e-06,
"loss": 1.1094,
"step": 859
},
{
"epoch": 0.6697819314641744,
"grad_norm": 0.310546875,
"learning_rate": 3.3099688473520254e-06,
"loss": 1.047,
"step": 860
},
{
"epoch": 0.6705607476635514,
"grad_norm": 0.36328125,
"learning_rate": 3.3021806853582556e-06,
"loss": 1.1213,
"step": 861
},
{
"epoch": 0.6713395638629284,
"grad_norm": 0.31640625,
"learning_rate": 3.294392523364486e-06,
"loss": 1.0847,
"step": 862
},
{
"epoch": 0.6721183800623053,
"grad_norm": 0.302734375,
"learning_rate": 3.2866043613707167e-06,
"loss": 1.0783,
"step": 863
},
{
"epoch": 0.6728971962616822,
"grad_norm": 0.265625,
"learning_rate": 3.2788161993769473e-06,
"loss": 1.0706,
"step": 864
},
{
"epoch": 0.6736760124610592,
"grad_norm": 0.32421875,
"learning_rate": 3.2710280373831774e-06,
"loss": 1.062,
"step": 865
},
{
"epoch": 0.6744548286604362,
"grad_norm": 0.390625,
"learning_rate": 3.2632398753894084e-06,
"loss": 1.1374,
"step": 866
},
{
"epoch": 0.6752336448598131,
"grad_norm": 0.28125,
"learning_rate": 3.2554517133956386e-06,
"loss": 1.0608,
"step": 867
},
{
"epoch": 0.67601246105919,
"grad_norm": 0.279296875,
"learning_rate": 3.2476635514018696e-06,
"loss": 1.1051,
"step": 868
},
{
"epoch": 0.676791277258567,
"grad_norm": 0.275390625,
"learning_rate": 3.2398753894080997e-06,
"loss": 1.0726,
"step": 869
},
{
"epoch": 0.677570093457944,
"grad_norm": 0.267578125,
"learning_rate": 3.2320872274143307e-06,
"loss": 1.0635,
"step": 870
},
{
"epoch": 0.6783489096573209,
"grad_norm": 0.275390625,
"learning_rate": 3.224299065420561e-06,
"loss": 1.0326,
"step": 871
},
{
"epoch": 0.6791277258566978,
"grad_norm": 0.275390625,
"learning_rate": 3.2165109034267914e-06,
"loss": 1.1105,
"step": 872
},
{
"epoch": 0.6799065420560748,
"grad_norm": 0.298828125,
"learning_rate": 3.208722741433022e-06,
"loss": 1.054,
"step": 873
},
{
"epoch": 0.6806853582554517,
"grad_norm": 0.38671875,
"learning_rate": 3.2009345794392525e-06,
"loss": 1.0666,
"step": 874
},
{
"epoch": 0.6814641744548287,
"grad_norm": 0.283203125,
"learning_rate": 3.193146417445483e-06,
"loss": 1.0701,
"step": 875
},
{
"epoch": 0.6822429906542056,
"grad_norm": 0.279296875,
"learning_rate": 3.1853582554517137e-06,
"loss": 1.0481,
"step": 876
},
{
"epoch": 0.6830218068535826,
"grad_norm": 0.310546875,
"learning_rate": 3.177570093457944e-06,
"loss": 1.1145,
"step": 877
},
{
"epoch": 0.6838006230529595,
"grad_norm": 0.263671875,
"learning_rate": 3.169781931464175e-06,
"loss": 1.0799,
"step": 878
},
{
"epoch": 0.6845794392523364,
"grad_norm": 0.27734375,
"learning_rate": 3.161993769470405e-06,
"loss": 1.0753,
"step": 879
},
{
"epoch": 0.6853582554517134,
"grad_norm": 0.28515625,
"learning_rate": 3.154205607476636e-06,
"loss": 1.0673,
"step": 880
},
{
"epoch": 0.6861370716510904,
"grad_norm": 0.27734375,
"learning_rate": 3.146417445482866e-06,
"loss": 1.1134,
"step": 881
},
{
"epoch": 0.6869158878504673,
"grad_norm": 0.28125,
"learning_rate": 3.138629283489097e-06,
"loss": 1.0301,
"step": 882
},
{
"epoch": 0.6876947040498442,
"grad_norm": 0.29296875,
"learning_rate": 3.1308411214953272e-06,
"loss": 1.0317,
"step": 883
},
{
"epoch": 0.6884735202492211,
"grad_norm": 0.28125,
"learning_rate": 3.123052959501558e-06,
"loss": 1.0738,
"step": 884
},
{
"epoch": 0.6892523364485982,
"grad_norm": 0.27734375,
"learning_rate": 3.1152647975077884e-06,
"loss": 1.1083,
"step": 885
},
{
"epoch": 0.6900311526479751,
"grad_norm": 0.3359375,
"learning_rate": 3.107476635514019e-06,
"loss": 1.1678,
"step": 886
},
{
"epoch": 0.690809968847352,
"grad_norm": 0.27734375,
"learning_rate": 3.099688473520249e-06,
"loss": 1.0648,
"step": 887
},
{
"epoch": 0.6915887850467289,
"grad_norm": 0.28125,
"learning_rate": 3.09190031152648e-06,
"loss": 1.0702,
"step": 888
},
{
"epoch": 0.692367601246106,
"grad_norm": 0.3203125,
"learning_rate": 3.08411214953271e-06,
"loss": 1.1229,
"step": 889
},
{
"epoch": 0.6931464174454829,
"grad_norm": 0.26171875,
"learning_rate": 3.076323987538941e-06,
"loss": 1.0841,
"step": 890
},
{
"epoch": 0.6939252336448598,
"grad_norm": 0.271484375,
"learning_rate": 3.0685358255451713e-06,
"loss": 1.0687,
"step": 891
},
{
"epoch": 0.6947040498442367,
"grad_norm": 0.2734375,
"learning_rate": 3.0607476635514023e-06,
"loss": 1.077,
"step": 892
},
{
"epoch": 0.6954828660436138,
"grad_norm": 0.298828125,
"learning_rate": 3.0529595015576325e-06,
"loss": 1.1504,
"step": 893
},
{
"epoch": 0.6962616822429907,
"grad_norm": 0.283203125,
"learning_rate": 3.045171339563863e-06,
"loss": 1.083,
"step": 894
},
{
"epoch": 0.6970404984423676,
"grad_norm": 0.291015625,
"learning_rate": 3.0373831775700936e-06,
"loss": 1.0675,
"step": 895
},
{
"epoch": 0.6978193146417445,
"grad_norm": 0.2734375,
"learning_rate": 3.029595015576324e-06,
"loss": 1.0504,
"step": 896
},
{
"epoch": 0.6985981308411215,
"grad_norm": 0.27734375,
"learning_rate": 3.0218068535825547e-06,
"loss": 1.0798,
"step": 897
},
{
"epoch": 0.6993769470404985,
"grad_norm": 0.2890625,
"learning_rate": 3.0140186915887853e-06,
"loss": 1.087,
"step": 898
},
{
"epoch": 0.7001557632398754,
"grad_norm": 0.287109375,
"learning_rate": 3.0062305295950155e-06,
"loss": 1.0555,
"step": 899
},
{
"epoch": 0.7009345794392523,
"grad_norm": 0.283203125,
"learning_rate": 2.9984423676012464e-06,
"loss": 1.0867,
"step": 900
},
{
"epoch": 0.7017133956386293,
"grad_norm": 0.30859375,
"learning_rate": 2.9906542056074766e-06,
"loss": 1.0536,
"step": 901
},
{
"epoch": 0.7024922118380063,
"grad_norm": 0.275390625,
"learning_rate": 2.9828660436137076e-06,
"loss": 1.0455,
"step": 902
},
{
"epoch": 0.7032710280373832,
"grad_norm": 0.265625,
"learning_rate": 2.9750778816199377e-06,
"loss": 1.069,
"step": 903
},
{
"epoch": 0.7040498442367601,
"grad_norm": 0.263671875,
"learning_rate": 2.9672897196261687e-06,
"loss": 1.0622,
"step": 904
},
{
"epoch": 0.7048286604361371,
"grad_norm": 0.306640625,
"learning_rate": 2.959501557632399e-06,
"loss": 1.0797,
"step": 905
},
{
"epoch": 0.705607476635514,
"grad_norm": 0.306640625,
"learning_rate": 2.9517133956386294e-06,
"loss": 1.1055,
"step": 906
},
{
"epoch": 0.706386292834891,
"grad_norm": 0.259765625,
"learning_rate": 2.94392523364486e-06,
"loss": 1.0461,
"step": 907
},
{
"epoch": 0.7071651090342679,
"grad_norm": 0.27734375,
"learning_rate": 2.9361370716510906e-06,
"loss": 1.0937,
"step": 908
},
{
"epoch": 0.7079439252336449,
"grad_norm": 0.287109375,
"learning_rate": 2.9283489096573207e-06,
"loss": 1.0891,
"step": 909
},
{
"epoch": 0.7087227414330218,
"grad_norm": 0.265625,
"learning_rate": 2.9205607476635517e-06,
"loss": 1.0761,
"step": 910
},
{
"epoch": 0.7095015576323987,
"grad_norm": 0.279296875,
"learning_rate": 2.912772585669782e-06,
"loss": 1.069,
"step": 911
},
{
"epoch": 0.7102803738317757,
"grad_norm": 0.265625,
"learning_rate": 2.904984423676013e-06,
"loss": 1.0475,
"step": 912
},
{
"epoch": 0.7110591900311527,
"grad_norm": 0.26953125,
"learning_rate": 2.897196261682243e-06,
"loss": 1.0553,
"step": 913
},
{
"epoch": 0.7118380062305296,
"grad_norm": 0.283203125,
"learning_rate": 2.889408099688474e-06,
"loss": 1.0753,
"step": 914
},
{
"epoch": 0.7126168224299065,
"grad_norm": 0.259765625,
"learning_rate": 2.881619937694704e-06,
"loss": 1.0559,
"step": 915
},
{
"epoch": 0.7133956386292835,
"grad_norm": 0.287109375,
"learning_rate": 2.8738317757009347e-06,
"loss": 1.0864,
"step": 916
},
{
"epoch": 0.7141744548286605,
"grad_norm": 0.28515625,
"learning_rate": 2.8660436137071652e-06,
"loss": 1.0914,
"step": 917
},
{
"epoch": 0.7149532710280374,
"grad_norm": 0.328125,
"learning_rate": 2.858255451713396e-06,
"loss": 1.1383,
"step": 918
},
{
"epoch": 0.7157320872274143,
"grad_norm": 0.287109375,
"learning_rate": 2.8504672897196264e-06,
"loss": 1.0673,
"step": 919
},
{
"epoch": 0.7165109034267912,
"grad_norm": 0.29296875,
"learning_rate": 2.842679127725857e-06,
"loss": 1.1143,
"step": 920
},
{
"epoch": 0.7172897196261683,
"grad_norm": 0.298828125,
"learning_rate": 2.834890965732087e-06,
"loss": 1.0952,
"step": 921
},
{
"epoch": 0.7180685358255452,
"grad_norm": 0.28125,
"learning_rate": 2.827102803738318e-06,
"loss": 1.0766,
"step": 922
},
{
"epoch": 0.7188473520249221,
"grad_norm": 0.298828125,
"learning_rate": 2.8193146417445482e-06,
"loss": 1.0662,
"step": 923
},
{
"epoch": 0.719626168224299,
"grad_norm": 0.396484375,
"learning_rate": 2.811526479750779e-06,
"loss": 1.0713,
"step": 924
},
{
"epoch": 0.7204049844236761,
"grad_norm": 0.26953125,
"learning_rate": 2.8037383177570094e-06,
"loss": 1.1135,
"step": 925
},
{
"epoch": 0.721183800623053,
"grad_norm": 0.30859375,
"learning_rate": 2.7959501557632403e-06,
"loss": 1.0157,
"step": 926
},
{
"epoch": 0.7219626168224299,
"grad_norm": 0.298828125,
"learning_rate": 2.7881619937694705e-06,
"loss": 1.0178,
"step": 927
},
{
"epoch": 0.7227414330218068,
"grad_norm": 0.322265625,
"learning_rate": 2.780373831775701e-06,
"loss": 1.0885,
"step": 928
},
{
"epoch": 0.7235202492211839,
"grad_norm": 0.279296875,
"learning_rate": 2.7725856697819316e-06,
"loss": 1.0762,
"step": 929
},
{
"epoch": 0.7242990654205608,
"grad_norm": 0.279296875,
"learning_rate": 2.764797507788162e-06,
"loss": 1.0769,
"step": 930
},
{
"epoch": 0.7250778816199377,
"grad_norm": 0.259765625,
"learning_rate": 2.7570093457943923e-06,
"loss": 1.0562,
"step": 931
},
{
"epoch": 0.7258566978193146,
"grad_norm": 0.2734375,
"learning_rate": 2.7492211838006233e-06,
"loss": 1.0934,
"step": 932
},
{
"epoch": 0.7266355140186916,
"grad_norm": 0.294921875,
"learning_rate": 2.7414330218068535e-06,
"loss": 1.093,
"step": 933
},
{
"epoch": 0.7274143302180686,
"grad_norm": 0.28125,
"learning_rate": 2.7336448598130845e-06,
"loss": 1.0988,
"step": 934
},
{
"epoch": 0.7281931464174455,
"grad_norm": 0.34765625,
"learning_rate": 2.7258566978193146e-06,
"loss": 1.0662,
"step": 935
},
{
"epoch": 0.7289719626168224,
"grad_norm": 0.275390625,
"learning_rate": 2.7180685358255456e-06,
"loss": 1.0791,
"step": 936
},
{
"epoch": 0.7297507788161994,
"grad_norm": 0.275390625,
"learning_rate": 2.7102803738317757e-06,
"loss": 1.0599,
"step": 937
},
{
"epoch": 0.7305295950155763,
"grad_norm": 0.271484375,
"learning_rate": 2.7024922118380063e-06,
"loss": 1.0767,
"step": 938
},
{
"epoch": 0.7313084112149533,
"grad_norm": 0.267578125,
"learning_rate": 2.694704049844237e-06,
"loss": 1.0604,
"step": 939
},
{
"epoch": 0.7320872274143302,
"grad_norm": 0.296875,
"learning_rate": 2.6869158878504674e-06,
"loss": 1.0616,
"step": 940
},
{
"epoch": 0.7328660436137072,
"grad_norm": 0.4140625,
"learning_rate": 2.6791277258566976e-06,
"loss": 1.0605,
"step": 941
},
{
"epoch": 0.7336448598130841,
"grad_norm": 0.28125,
"learning_rate": 2.6713395638629286e-06,
"loss": 1.0489,
"step": 942
},
{
"epoch": 0.7344236760124611,
"grad_norm": 0.2734375,
"learning_rate": 2.6635514018691587e-06,
"loss": 1.0966,
"step": 943
},
{
"epoch": 0.735202492211838,
"grad_norm": 0.296875,
"learning_rate": 2.6557632398753897e-06,
"loss": 1.1221,
"step": 944
},
{
"epoch": 0.735981308411215,
"grad_norm": 0.27734375,
"learning_rate": 2.64797507788162e-06,
"loss": 1.0719,
"step": 945
},
{
"epoch": 0.7367601246105919,
"grad_norm": 0.265625,
"learning_rate": 2.640186915887851e-06,
"loss": 1.047,
"step": 946
},
{
"epoch": 0.7375389408099688,
"grad_norm": 0.279296875,
"learning_rate": 2.632398753894081e-06,
"loss": 1.0382,
"step": 947
},
{
"epoch": 0.7383177570093458,
"grad_norm": 0.283203125,
"learning_rate": 2.6246105919003116e-06,
"loss": 1.0939,
"step": 948
},
{
"epoch": 0.7390965732087228,
"grad_norm": 0.275390625,
"learning_rate": 2.616822429906542e-06,
"loss": 1.0715,
"step": 949
},
{
"epoch": 0.7398753894080997,
"grad_norm": 0.267578125,
"learning_rate": 2.6090342679127727e-06,
"loss": 1.0566,
"step": 950
},
{
"epoch": 0.7406542056074766,
"grad_norm": 0.283203125,
"learning_rate": 2.6012461059190033e-06,
"loss": 1.0415,
"step": 951
},
{
"epoch": 0.7414330218068536,
"grad_norm": 0.291015625,
"learning_rate": 2.593457943925234e-06,
"loss": 1.082,
"step": 952
},
{
"epoch": 0.7422118380062306,
"grad_norm": 0.283203125,
"learning_rate": 2.585669781931464e-06,
"loss": 1.1205,
"step": 953
},
{
"epoch": 0.7429906542056075,
"grad_norm": 0.265625,
"learning_rate": 2.577881619937695e-06,
"loss": 1.0608,
"step": 954
},
{
"epoch": 0.7437694704049844,
"grad_norm": 0.2734375,
"learning_rate": 2.570093457943925e-06,
"loss": 1.1027,
"step": 955
},
{
"epoch": 0.7445482866043613,
"grad_norm": 0.271484375,
"learning_rate": 2.562305295950156e-06,
"loss": 1.0701,
"step": 956
},
{
"epoch": 0.7453271028037384,
"grad_norm": 0.2890625,
"learning_rate": 2.5545171339563862e-06,
"loss": 1.124,
"step": 957
},
{
"epoch": 0.7461059190031153,
"grad_norm": 0.279296875,
"learning_rate": 2.5467289719626172e-06,
"loss": 1.0619,
"step": 958
},
{
"epoch": 0.7468847352024922,
"grad_norm": 0.28515625,
"learning_rate": 2.5389408099688474e-06,
"loss": 1.1057,
"step": 959
},
{
"epoch": 0.7476635514018691,
"grad_norm": 0.26171875,
"learning_rate": 2.531152647975078e-06,
"loss": 1.078,
"step": 960
},
{
"epoch": 0.7484423676012462,
"grad_norm": 0.275390625,
"learning_rate": 2.5233644859813085e-06,
"loss": 1.0796,
"step": 961
},
{
"epoch": 0.7492211838006231,
"grad_norm": 0.29296875,
"learning_rate": 2.515576323987539e-06,
"loss": 1.097,
"step": 962
},
{
"epoch": 0.75,
"grad_norm": 0.287109375,
"learning_rate": 2.5077881619937692e-06,
"loss": 1.0719,
"step": 963
},
{
"epoch": 0.7507788161993769,
"grad_norm": 0.2734375,
"learning_rate": 2.5e-06,
"loss": 1.0844,
"step": 964
},
{
"epoch": 0.7515576323987538,
"grad_norm": 0.3046875,
"learning_rate": 2.4922118380062308e-06,
"loss": 1.045,
"step": 965
},
{
"epoch": 0.7523364485981309,
"grad_norm": 0.28125,
"learning_rate": 2.4844236760124613e-06,
"loss": 1.0713,
"step": 966
},
{
"epoch": 0.7531152647975078,
"grad_norm": 0.26953125,
"learning_rate": 2.476635514018692e-06,
"loss": 1.0719,
"step": 967
},
{
"epoch": 0.7538940809968847,
"grad_norm": 0.28125,
"learning_rate": 2.4688473520249225e-06,
"loss": 1.0782,
"step": 968
},
{
"epoch": 0.7546728971962616,
"grad_norm": 0.275390625,
"learning_rate": 2.461059190031153e-06,
"loss": 1.0728,
"step": 969
},
{
"epoch": 0.7554517133956387,
"grad_norm": 0.271484375,
"learning_rate": 2.453271028037383e-06,
"loss": 1.0609,
"step": 970
},
{
"epoch": 0.7562305295950156,
"grad_norm": 0.26953125,
"learning_rate": 2.4454828660436138e-06,
"loss": 1.0688,
"step": 971
},
{
"epoch": 0.7570093457943925,
"grad_norm": 0.275390625,
"learning_rate": 2.4376947040498443e-06,
"loss": 1.105,
"step": 972
},
{
"epoch": 0.7577881619937694,
"grad_norm": 0.28125,
"learning_rate": 2.429906542056075e-06,
"loss": 1.083,
"step": 973
},
{
"epoch": 0.7585669781931464,
"grad_norm": 0.271484375,
"learning_rate": 2.4221183800623055e-06,
"loss": 1.0465,
"step": 974
},
{
"epoch": 0.7593457943925234,
"grad_norm": 0.263671875,
"learning_rate": 2.414330218068536e-06,
"loss": 1.0561,
"step": 975
},
{
"epoch": 0.7601246105919003,
"grad_norm": 0.28515625,
"learning_rate": 2.4065420560747666e-06,
"loss": 1.0869,
"step": 976
},
{
"epoch": 0.7609034267912772,
"grad_norm": 0.2890625,
"learning_rate": 2.398753894080997e-06,
"loss": 1.1077,
"step": 977
},
{
"epoch": 0.7616822429906542,
"grad_norm": 0.267578125,
"learning_rate": 2.3909657320872277e-06,
"loss": 1.0795,
"step": 978
},
{
"epoch": 0.7624610591900312,
"grad_norm": 0.26953125,
"learning_rate": 2.3831775700934583e-06,
"loss": 1.0455,
"step": 979
},
{
"epoch": 0.7632398753894081,
"grad_norm": 0.37109375,
"learning_rate": 2.375389408099689e-06,
"loss": 1.1021,
"step": 980
},
{
"epoch": 0.764018691588785,
"grad_norm": 0.306640625,
"learning_rate": 2.367601246105919e-06,
"loss": 1.0782,
"step": 981
},
{
"epoch": 0.764797507788162,
"grad_norm": 0.3515625,
"learning_rate": 2.3598130841121496e-06,
"loss": 1.072,
"step": 982
},
{
"epoch": 0.7655763239875389,
"grad_norm": 0.28125,
"learning_rate": 2.35202492211838e-06,
"loss": 1.0908,
"step": 983
},
{
"epoch": 0.7663551401869159,
"grad_norm": 0.30859375,
"learning_rate": 2.3442367601246107e-06,
"loss": 1.0494,
"step": 984
},
{
"epoch": 0.7671339563862928,
"grad_norm": 0.283203125,
"learning_rate": 2.3364485981308413e-06,
"loss": 1.1092,
"step": 985
},
{
"epoch": 0.7679127725856698,
"grad_norm": 0.3125,
"learning_rate": 2.328660436137072e-06,
"loss": 1.0738,
"step": 986
},
{
"epoch": 0.7686915887850467,
"grad_norm": 0.283203125,
"learning_rate": 2.3208722741433024e-06,
"loss": 1.0395,
"step": 987
},
{
"epoch": 0.7694704049844237,
"grad_norm": 0.28125,
"learning_rate": 2.313084112149533e-06,
"loss": 1.0608,
"step": 988
},
{
"epoch": 0.7702492211838006,
"grad_norm": 0.279296875,
"learning_rate": 2.3052959501557635e-06,
"loss": 1.0857,
"step": 989
},
{
"epoch": 0.7710280373831776,
"grad_norm": 0.306640625,
"learning_rate": 2.297507788161994e-06,
"loss": 1.0482,
"step": 990
},
{
"epoch": 0.7718068535825545,
"grad_norm": 0.2890625,
"learning_rate": 2.2897196261682247e-06,
"loss": 1.0162,
"step": 991
},
{
"epoch": 0.7725856697819314,
"grad_norm": 0.271484375,
"learning_rate": 2.281931464174455e-06,
"loss": 1.0705,
"step": 992
},
{
"epoch": 0.7733644859813084,
"grad_norm": 0.28515625,
"learning_rate": 2.2741433021806854e-06,
"loss": 1.0807,
"step": 993
},
{
"epoch": 0.7741433021806854,
"grad_norm": 0.271484375,
"learning_rate": 2.266355140186916e-06,
"loss": 1.0967,
"step": 994
},
{
"epoch": 0.7749221183800623,
"grad_norm": 0.2890625,
"learning_rate": 2.2585669781931465e-06,
"loss": 1.095,
"step": 995
},
{
"epoch": 0.7757009345794392,
"grad_norm": 0.271484375,
"learning_rate": 2.250778816199377e-06,
"loss": 1.0682,
"step": 996
},
{
"epoch": 0.7764797507788161,
"grad_norm": 0.26953125,
"learning_rate": 2.2429906542056077e-06,
"loss": 1.0553,
"step": 997
},
{
"epoch": 0.7772585669781932,
"grad_norm": 0.283203125,
"learning_rate": 2.2352024922118382e-06,
"loss": 1.0609,
"step": 998
},
{
"epoch": 0.7780373831775701,
"grad_norm": 0.283203125,
"learning_rate": 2.2274143302180688e-06,
"loss": 1.0786,
"step": 999
},
{
"epoch": 0.778816199376947,
"grad_norm": 0.310546875,
"learning_rate": 2.2196261682242994e-06,
"loss": 1.1305,
"step": 1000
},
{
"epoch": 0.7795950155763239,
"grad_norm": 0.28125,
"learning_rate": 2.21183800623053e-06,
"loss": 1.0582,
"step": 1001
},
{
"epoch": 0.780373831775701,
"grad_norm": 0.28125,
"learning_rate": 2.20404984423676e-06,
"loss": 1.0592,
"step": 1002
},
{
"epoch": 0.7811526479750779,
"grad_norm": 0.2734375,
"learning_rate": 2.1962616822429906e-06,
"loss": 1.0731,
"step": 1003
},
{
"epoch": 0.7819314641744548,
"grad_norm": 0.26953125,
"learning_rate": 2.188473520249221e-06,
"loss": 1.0455,
"step": 1004
},
{
"epoch": 0.7827102803738317,
"grad_norm": 0.267578125,
"learning_rate": 2.1806853582554518e-06,
"loss": 1.0798,
"step": 1005
},
{
"epoch": 0.7834890965732088,
"grad_norm": 0.310546875,
"learning_rate": 2.1728971962616823e-06,
"loss": 1.1043,
"step": 1006
},
{
"epoch": 0.7842679127725857,
"grad_norm": 0.283203125,
"learning_rate": 2.165109034267913e-06,
"loss": 1.0699,
"step": 1007
},
{
"epoch": 0.7850467289719626,
"grad_norm": 0.30078125,
"learning_rate": 2.1573208722741435e-06,
"loss": 1.1013,
"step": 1008
},
{
"epoch": 0.7858255451713395,
"grad_norm": 0.2890625,
"learning_rate": 2.149532710280374e-06,
"loss": 1.0368,
"step": 1009
},
{
"epoch": 0.7866043613707165,
"grad_norm": 0.28515625,
"learning_rate": 2.1417445482866046e-06,
"loss": 1.0598,
"step": 1010
},
{
"epoch": 0.7873831775700935,
"grad_norm": 0.310546875,
"learning_rate": 2.133956386292835e-06,
"loss": 1.0682,
"step": 1011
},
{
"epoch": 0.7881619937694704,
"grad_norm": 0.294921875,
"learning_rate": 2.1261682242990657e-06,
"loss": 1.0824,
"step": 1012
},
{
"epoch": 0.7889408099688473,
"grad_norm": 0.275390625,
"learning_rate": 2.118380062305296e-06,
"loss": 1.109,
"step": 1013
},
{
"epoch": 0.7897196261682243,
"grad_norm": 0.283203125,
"learning_rate": 2.1105919003115264e-06,
"loss": 1.0754,
"step": 1014
},
{
"epoch": 0.7904984423676013,
"grad_norm": 0.298828125,
"learning_rate": 2.102803738317757e-06,
"loss": 1.0478,
"step": 1015
},
{
"epoch": 0.7912772585669782,
"grad_norm": 0.31640625,
"learning_rate": 2.0950155763239876e-06,
"loss": 1.1105,
"step": 1016
},
{
"epoch": 0.7920560747663551,
"grad_norm": 0.26171875,
"learning_rate": 2.087227414330218e-06,
"loss": 1.0793,
"step": 1017
},
{
"epoch": 0.7928348909657321,
"grad_norm": 0.31640625,
"learning_rate": 2.0794392523364487e-06,
"loss": 1.0593,
"step": 1018
},
{
"epoch": 0.793613707165109,
"grad_norm": 0.265625,
"learning_rate": 2.0716510903426793e-06,
"loss": 1.0566,
"step": 1019
},
{
"epoch": 0.794392523364486,
"grad_norm": 0.271484375,
"learning_rate": 2.06386292834891e-06,
"loss": 1.0603,
"step": 1020
},
{
"epoch": 0.7951713395638629,
"grad_norm": 0.28125,
"learning_rate": 2.0560747663551404e-06,
"loss": 1.0768,
"step": 1021
},
{
"epoch": 0.7959501557632399,
"grad_norm": 0.279296875,
"learning_rate": 2.048286604361371e-06,
"loss": 1.097,
"step": 1022
},
{
"epoch": 0.7967289719626168,
"grad_norm": 0.275390625,
"learning_rate": 2.0404984423676016e-06,
"loss": 1.0644,
"step": 1023
},
{
"epoch": 0.7975077881619937,
"grad_norm": 0.3046875,
"learning_rate": 2.0327102803738317e-06,
"loss": 1.1248,
"step": 1024
},
{
"epoch": 0.7982866043613707,
"grad_norm": 0.302734375,
"learning_rate": 2.0249221183800623e-06,
"loss": 1.0827,
"step": 1025
},
{
"epoch": 0.7990654205607477,
"grad_norm": 0.263671875,
"learning_rate": 2.017133956386293e-06,
"loss": 1.0708,
"step": 1026
},
{
"epoch": 0.7998442367601246,
"grad_norm": 0.27734375,
"learning_rate": 2.0093457943925234e-06,
"loss": 1.1151,
"step": 1027
},
{
"epoch": 0.8006230529595015,
"grad_norm": 0.287109375,
"learning_rate": 2.001557632398754e-06,
"loss": 1.0491,
"step": 1028
},
{
"epoch": 0.8014018691588785,
"grad_norm": 0.267578125,
"learning_rate": 1.9937694704049845e-06,
"loss": 1.0879,
"step": 1029
},
{
"epoch": 0.8021806853582555,
"grad_norm": 0.28125,
"learning_rate": 1.985981308411215e-06,
"loss": 1.0623,
"step": 1030
},
{
"epoch": 0.8029595015576324,
"grad_norm": 0.28125,
"learning_rate": 1.9781931464174457e-06,
"loss": 1.0547,
"step": 1031
},
{
"epoch": 0.8037383177570093,
"grad_norm": 0.271484375,
"learning_rate": 1.9704049844236762e-06,
"loss": 1.0587,
"step": 1032
},
{
"epoch": 0.8045171339563862,
"grad_norm": 0.26953125,
"learning_rate": 1.962616822429907e-06,
"loss": 1.0816,
"step": 1033
},
{
"epoch": 0.8052959501557633,
"grad_norm": 0.28515625,
"learning_rate": 1.9548286604361374e-06,
"loss": 1.061,
"step": 1034
},
{
"epoch": 0.8060747663551402,
"grad_norm": 0.287109375,
"learning_rate": 1.9470404984423675e-06,
"loss": 1.0838,
"step": 1035
},
{
"epoch": 0.8068535825545171,
"grad_norm": 0.267578125,
"learning_rate": 1.939252336448598e-06,
"loss": 1.0983,
"step": 1036
},
{
"epoch": 0.807632398753894,
"grad_norm": 0.291015625,
"learning_rate": 1.9314641744548286e-06,
"loss": 1.1367,
"step": 1037
},
{
"epoch": 0.8084112149532711,
"grad_norm": 0.3046875,
"learning_rate": 1.9236760124610592e-06,
"loss": 1.052,
"step": 1038
},
{
"epoch": 0.809190031152648,
"grad_norm": 0.2734375,
"learning_rate": 1.9158878504672898e-06,
"loss": 1.0481,
"step": 1039
},
{
"epoch": 0.8099688473520249,
"grad_norm": 0.283203125,
"learning_rate": 1.9080996884735203e-06,
"loss": 1.0942,
"step": 1040
},
{
"epoch": 0.8107476635514018,
"grad_norm": 0.28125,
"learning_rate": 1.900311526479751e-06,
"loss": 1.0693,
"step": 1041
},
{
"epoch": 0.8115264797507789,
"grad_norm": 0.287109375,
"learning_rate": 1.8925233644859815e-06,
"loss": 1.0747,
"step": 1042
},
{
"epoch": 0.8123052959501558,
"grad_norm": 0.283203125,
"learning_rate": 1.8847352024922118e-06,
"loss": 1.0865,
"step": 1043
},
{
"epoch": 0.8130841121495327,
"grad_norm": 0.271484375,
"learning_rate": 1.8769470404984424e-06,
"loss": 1.0707,
"step": 1044
},
{
"epoch": 0.8138629283489096,
"grad_norm": 0.279296875,
"learning_rate": 1.869158878504673e-06,
"loss": 1.0922,
"step": 1045
},
{
"epoch": 0.8146417445482866,
"grad_norm": 0.275390625,
"learning_rate": 1.8613707165109035e-06,
"loss": 1.094,
"step": 1046
},
{
"epoch": 0.8154205607476636,
"grad_norm": 0.267578125,
"learning_rate": 1.8535825545171341e-06,
"loss": 1.078,
"step": 1047
},
{
"epoch": 0.8161993769470405,
"grad_norm": 0.271484375,
"learning_rate": 1.8457943925233645e-06,
"loss": 1.1071,
"step": 1048
},
{
"epoch": 0.8169781931464174,
"grad_norm": 0.265625,
"learning_rate": 1.838006230529595e-06,
"loss": 1.0571,
"step": 1049
},
{
"epoch": 0.8177570093457944,
"grad_norm": 0.271484375,
"learning_rate": 1.8302180685358256e-06,
"loss": 1.0966,
"step": 1050
},
{
"epoch": 0.8185358255451713,
"grad_norm": 0.345703125,
"learning_rate": 1.8224299065420562e-06,
"loss": 1.0905,
"step": 1051
},
{
"epoch": 0.8193146417445483,
"grad_norm": 0.298828125,
"learning_rate": 1.8146417445482867e-06,
"loss": 1.0238,
"step": 1052
},
{
"epoch": 0.8200934579439252,
"grad_norm": 0.28125,
"learning_rate": 1.8068535825545173e-06,
"loss": 1.0568,
"step": 1053
},
{
"epoch": 0.8208722741433022,
"grad_norm": 0.279296875,
"learning_rate": 1.7990654205607477e-06,
"loss": 1.0483,
"step": 1054
},
{
"epoch": 0.8216510903426791,
"grad_norm": 0.275390625,
"learning_rate": 1.7912772585669782e-06,
"loss": 1.0698,
"step": 1055
},
{
"epoch": 0.822429906542056,
"grad_norm": 0.265625,
"learning_rate": 1.7834890965732088e-06,
"loss": 1.0753,
"step": 1056
},
{
"epoch": 0.823208722741433,
"grad_norm": 0.279296875,
"learning_rate": 1.7757009345794394e-06,
"loss": 1.0779,
"step": 1057
},
{
"epoch": 0.82398753894081,
"grad_norm": 0.265625,
"learning_rate": 1.76791277258567e-06,
"loss": 1.0729,
"step": 1058
},
{
"epoch": 0.8247663551401869,
"grad_norm": 0.283203125,
"learning_rate": 1.7601246105919003e-06,
"loss": 1.0586,
"step": 1059
},
{
"epoch": 0.8255451713395638,
"grad_norm": 0.275390625,
"learning_rate": 1.7523364485981308e-06,
"loss": 1.084,
"step": 1060
},
{
"epoch": 0.8263239875389408,
"grad_norm": 0.388671875,
"learning_rate": 1.7445482866043614e-06,
"loss": 1.0624,
"step": 1061
},
{
"epoch": 0.8271028037383178,
"grad_norm": 0.296875,
"learning_rate": 1.736760124610592e-06,
"loss": 1.1234,
"step": 1062
},
{
"epoch": 0.8278816199376947,
"grad_norm": 0.29296875,
"learning_rate": 1.7289719626168225e-06,
"loss": 1.0896,
"step": 1063
},
{
"epoch": 0.8286604361370716,
"grad_norm": 0.28125,
"learning_rate": 1.7211838006230531e-06,
"loss": 1.0311,
"step": 1064
},
{
"epoch": 0.8294392523364486,
"grad_norm": 0.275390625,
"learning_rate": 1.7133956386292835e-06,
"loss": 1.0578,
"step": 1065
},
{
"epoch": 0.8302180685358256,
"grad_norm": 0.2890625,
"learning_rate": 1.705607476635514e-06,
"loss": 1.1064,
"step": 1066
},
{
"epoch": 0.8309968847352025,
"grad_norm": 0.28515625,
"learning_rate": 1.6978193146417446e-06,
"loss": 1.0446,
"step": 1067
},
{
"epoch": 0.8317757009345794,
"grad_norm": 0.27734375,
"learning_rate": 1.6900311526479752e-06,
"loss": 1.1051,
"step": 1068
},
{
"epoch": 0.8325545171339563,
"grad_norm": 0.287109375,
"learning_rate": 1.6822429906542057e-06,
"loss": 1.0707,
"step": 1069
},
{
"epoch": 0.8333333333333334,
"grad_norm": 0.29296875,
"learning_rate": 1.674454828660436e-06,
"loss": 1.1383,
"step": 1070
},
{
"epoch": 0.8341121495327103,
"grad_norm": 0.275390625,
"learning_rate": 1.6666666666666667e-06,
"loss": 1.1132,
"step": 1071
},
{
"epoch": 0.8348909657320872,
"grad_norm": 0.26171875,
"learning_rate": 1.6588785046728972e-06,
"loss": 1.0633,
"step": 1072
},
{
"epoch": 0.8356697819314641,
"grad_norm": 0.27734375,
"learning_rate": 1.6510903426791278e-06,
"loss": 1.0344,
"step": 1073
},
{
"epoch": 0.8364485981308412,
"grad_norm": 0.28515625,
"learning_rate": 1.6433021806853584e-06,
"loss": 1.0874,
"step": 1074
},
{
"epoch": 0.8372274143302181,
"grad_norm": 0.275390625,
"learning_rate": 1.6355140186915887e-06,
"loss": 1.0907,
"step": 1075
},
{
"epoch": 0.838006230529595,
"grad_norm": 0.296875,
"learning_rate": 1.6277258566978193e-06,
"loss": 1.0898,
"step": 1076
},
{
"epoch": 0.8387850467289719,
"grad_norm": 0.279296875,
"learning_rate": 1.6199376947040499e-06,
"loss": 1.0697,
"step": 1077
},
{
"epoch": 0.839563862928349,
"grad_norm": 0.279296875,
"learning_rate": 1.6121495327102804e-06,
"loss": 1.0724,
"step": 1078
},
{
"epoch": 0.8403426791277259,
"grad_norm": 0.271484375,
"learning_rate": 1.604361370716511e-06,
"loss": 1.093,
"step": 1079
},
{
"epoch": 0.8411214953271028,
"grad_norm": 0.279296875,
"learning_rate": 1.5965732087227416e-06,
"loss": 1.0362,
"step": 1080
},
{
"epoch": 0.8419003115264797,
"grad_norm": 0.28515625,
"learning_rate": 1.588785046728972e-06,
"loss": 1.0821,
"step": 1081
},
{
"epoch": 0.8426791277258567,
"grad_norm": 0.275390625,
"learning_rate": 1.5809968847352025e-06,
"loss": 1.0582,
"step": 1082
},
{
"epoch": 0.8434579439252337,
"grad_norm": 0.2734375,
"learning_rate": 1.573208722741433e-06,
"loss": 1.0723,
"step": 1083
},
{
"epoch": 0.8442367601246106,
"grad_norm": 0.271484375,
"learning_rate": 1.5654205607476636e-06,
"loss": 1.1089,
"step": 1084
},
{
"epoch": 0.8450155763239875,
"grad_norm": 0.287109375,
"learning_rate": 1.5576323987538942e-06,
"loss": 1.0645,
"step": 1085
},
{
"epoch": 0.8457943925233645,
"grad_norm": 0.271484375,
"learning_rate": 1.5498442367601245e-06,
"loss": 1.1197,
"step": 1086
},
{
"epoch": 0.8465732087227414,
"grad_norm": 0.296875,
"learning_rate": 1.542056074766355e-06,
"loss": 1.0947,
"step": 1087
},
{
"epoch": 0.8473520249221184,
"grad_norm": 0.3125,
"learning_rate": 1.5342679127725857e-06,
"loss": 1.0725,
"step": 1088
},
{
"epoch": 0.8481308411214953,
"grad_norm": 0.267578125,
"learning_rate": 1.5264797507788162e-06,
"loss": 1.08,
"step": 1089
},
{
"epoch": 0.8489096573208723,
"grad_norm": 0.263671875,
"learning_rate": 1.5186915887850468e-06,
"loss": 1.0778,
"step": 1090
},
{
"epoch": 0.8496884735202492,
"grad_norm": 0.2734375,
"learning_rate": 1.5109034267912774e-06,
"loss": 1.1328,
"step": 1091
},
{
"epoch": 0.8504672897196262,
"grad_norm": 0.28515625,
"learning_rate": 1.5031152647975077e-06,
"loss": 1.0767,
"step": 1092
},
{
"epoch": 0.8512461059190031,
"grad_norm": 0.27734375,
"learning_rate": 1.4953271028037383e-06,
"loss": 1.0707,
"step": 1093
},
{
"epoch": 0.8520249221183801,
"grad_norm": 0.28515625,
"learning_rate": 1.4875389408099689e-06,
"loss": 1.0941,
"step": 1094
},
{
"epoch": 0.852803738317757,
"grad_norm": 0.298828125,
"learning_rate": 1.4797507788161994e-06,
"loss": 1.0418,
"step": 1095
},
{
"epoch": 0.8535825545171339,
"grad_norm": 0.2734375,
"learning_rate": 1.47196261682243e-06,
"loss": 1.0696,
"step": 1096
},
{
"epoch": 0.8543613707165109,
"grad_norm": 0.2734375,
"learning_rate": 1.4641744548286604e-06,
"loss": 1.03,
"step": 1097
},
{
"epoch": 0.8551401869158879,
"grad_norm": 0.310546875,
"learning_rate": 1.456386292834891e-06,
"loss": 1.0423,
"step": 1098
},
{
"epoch": 0.8559190031152648,
"grad_norm": 0.271484375,
"learning_rate": 1.4485981308411215e-06,
"loss": 1.0602,
"step": 1099
},
{
"epoch": 0.8566978193146417,
"grad_norm": 0.328125,
"learning_rate": 1.440809968847352e-06,
"loss": 1.0557,
"step": 1100
},
{
"epoch": 0.8574766355140186,
"grad_norm": 0.283203125,
"learning_rate": 1.4330218068535826e-06,
"loss": 1.1051,
"step": 1101
},
{
"epoch": 0.8582554517133957,
"grad_norm": 0.279296875,
"learning_rate": 1.4252336448598132e-06,
"loss": 1.0574,
"step": 1102
},
{
"epoch": 0.8590342679127726,
"grad_norm": 0.27734375,
"learning_rate": 1.4174454828660435e-06,
"loss": 1.0739,
"step": 1103
},
{
"epoch": 0.8598130841121495,
"grad_norm": 0.2734375,
"learning_rate": 1.4096573208722741e-06,
"loss": 1.0463,
"step": 1104
},
{
"epoch": 0.8605919003115264,
"grad_norm": 0.28125,
"learning_rate": 1.4018691588785047e-06,
"loss": 1.0939,
"step": 1105
},
{
"epoch": 0.8613707165109035,
"grad_norm": 0.26953125,
"learning_rate": 1.3940809968847352e-06,
"loss": 1.0699,
"step": 1106
},
{
"epoch": 0.8621495327102804,
"grad_norm": 0.271484375,
"learning_rate": 1.3862928348909658e-06,
"loss": 1.0692,
"step": 1107
},
{
"epoch": 0.8629283489096573,
"grad_norm": 0.287109375,
"learning_rate": 1.3785046728971962e-06,
"loss": 1.0325,
"step": 1108
},
{
"epoch": 0.8637071651090342,
"grad_norm": 0.265625,
"learning_rate": 1.3707165109034267e-06,
"loss": 1.0629,
"step": 1109
},
{
"epoch": 0.8644859813084113,
"grad_norm": 0.271484375,
"learning_rate": 1.3629283489096573e-06,
"loss": 1.0137,
"step": 1110
},
{
"epoch": 0.8652647975077882,
"grad_norm": 0.29296875,
"learning_rate": 1.3551401869158879e-06,
"loss": 1.1216,
"step": 1111
},
{
"epoch": 0.8660436137071651,
"grad_norm": 0.27734375,
"learning_rate": 1.3473520249221184e-06,
"loss": 1.0467,
"step": 1112
},
{
"epoch": 0.866822429906542,
"grad_norm": 0.43359375,
"learning_rate": 1.3395638629283488e-06,
"loss": 1.0949,
"step": 1113
},
{
"epoch": 0.867601246105919,
"grad_norm": 0.275390625,
"learning_rate": 1.3317757009345794e-06,
"loss": 1.0537,
"step": 1114
},
{
"epoch": 0.868380062305296,
"grad_norm": 0.26953125,
"learning_rate": 1.32398753894081e-06,
"loss": 1.0503,
"step": 1115
},
{
"epoch": 0.8691588785046729,
"grad_norm": 0.28515625,
"learning_rate": 1.3161993769470405e-06,
"loss": 1.048,
"step": 1116
},
{
"epoch": 0.8699376947040498,
"grad_norm": 0.267578125,
"learning_rate": 1.308411214953271e-06,
"loss": 1.0469,
"step": 1117
},
{
"epoch": 0.8707165109034268,
"grad_norm": 0.271484375,
"learning_rate": 1.3006230529595016e-06,
"loss": 1.035,
"step": 1118
},
{
"epoch": 0.8714953271028038,
"grad_norm": 0.265625,
"learning_rate": 1.292834890965732e-06,
"loss": 1.0704,
"step": 1119
},
{
"epoch": 0.8722741433021807,
"grad_norm": 0.27734375,
"learning_rate": 1.2850467289719625e-06,
"loss": 1.073,
"step": 1120
},
{
"epoch": 0.8730529595015576,
"grad_norm": 0.291015625,
"learning_rate": 1.2772585669781931e-06,
"loss": 1.093,
"step": 1121
},
{
"epoch": 0.8738317757009346,
"grad_norm": 0.28125,
"learning_rate": 1.2694704049844237e-06,
"loss": 1.0747,
"step": 1122
},
{
"epoch": 0.8746105919003115,
"grad_norm": 0.271484375,
"learning_rate": 1.2616822429906543e-06,
"loss": 1.0664,
"step": 1123
},
{
"epoch": 0.8753894080996885,
"grad_norm": 0.3046875,
"learning_rate": 1.2538940809968846e-06,
"loss": 1.133,
"step": 1124
},
{
"epoch": 0.8761682242990654,
"grad_norm": 0.275390625,
"learning_rate": 1.2461059190031154e-06,
"loss": 1.093,
"step": 1125
},
{
"epoch": 0.8769470404984424,
"grad_norm": 0.2734375,
"learning_rate": 1.238317757009346e-06,
"loss": 1.0688,
"step": 1126
},
{
"epoch": 0.8777258566978193,
"grad_norm": 0.28515625,
"learning_rate": 1.2305295950155765e-06,
"loss": 1.1059,
"step": 1127
},
{
"epoch": 0.8785046728971962,
"grad_norm": 0.2734375,
"learning_rate": 1.2227414330218069e-06,
"loss": 1.0934,
"step": 1128
},
{
"epoch": 0.8792834890965732,
"grad_norm": 0.287109375,
"learning_rate": 1.2149532710280374e-06,
"loss": 1.0335,
"step": 1129
},
{
"epoch": 0.8800623052959502,
"grad_norm": 0.271484375,
"learning_rate": 1.207165109034268e-06,
"loss": 1.0671,
"step": 1130
},
{
"epoch": 0.8808411214953271,
"grad_norm": 0.2734375,
"learning_rate": 1.1993769470404986e-06,
"loss": 1.0819,
"step": 1131
},
{
"epoch": 0.881619937694704,
"grad_norm": 0.30078125,
"learning_rate": 1.1915887850467291e-06,
"loss": 1.0344,
"step": 1132
},
{
"epoch": 0.882398753894081,
"grad_norm": 0.26953125,
"learning_rate": 1.1838006230529595e-06,
"loss": 1.0866,
"step": 1133
},
{
"epoch": 0.883177570093458,
"grad_norm": 0.28125,
"learning_rate": 1.17601246105919e-06,
"loss": 1.0435,
"step": 1134
},
{
"epoch": 0.8839563862928349,
"grad_norm": 0.267578125,
"learning_rate": 1.1682242990654206e-06,
"loss": 1.0919,
"step": 1135
},
{
"epoch": 0.8847352024922118,
"grad_norm": 0.27734375,
"learning_rate": 1.1604361370716512e-06,
"loss": 1.0809,
"step": 1136
},
{
"epoch": 0.8855140186915887,
"grad_norm": 0.29296875,
"learning_rate": 1.1526479750778818e-06,
"loss": 1.0739,
"step": 1137
},
{
"epoch": 0.8862928348909658,
"grad_norm": 0.267578125,
"learning_rate": 1.1448598130841123e-06,
"loss": 1.081,
"step": 1138
},
{
"epoch": 0.8870716510903427,
"grad_norm": 0.267578125,
"learning_rate": 1.1370716510903427e-06,
"loss": 1.0543,
"step": 1139
},
{
"epoch": 0.8878504672897196,
"grad_norm": 0.2890625,
"learning_rate": 1.1292834890965733e-06,
"loss": 1.0804,
"step": 1140
},
{
"epoch": 0.8886292834890965,
"grad_norm": 0.30078125,
"learning_rate": 1.1214953271028038e-06,
"loss": 1.1403,
"step": 1141
},
{
"epoch": 0.8894080996884736,
"grad_norm": 0.265625,
"learning_rate": 1.1137071651090344e-06,
"loss": 1.0689,
"step": 1142
},
{
"epoch": 0.8901869158878505,
"grad_norm": 0.263671875,
"learning_rate": 1.105919003115265e-06,
"loss": 1.1076,
"step": 1143
},
{
"epoch": 0.8909657320872274,
"grad_norm": 0.267578125,
"learning_rate": 1.0981308411214953e-06,
"loss": 1.0502,
"step": 1144
},
{
"epoch": 0.8917445482866043,
"grad_norm": 0.263671875,
"learning_rate": 1.0903426791277259e-06,
"loss": 1.0607,
"step": 1145
},
{
"epoch": 0.8925233644859814,
"grad_norm": 0.298828125,
"learning_rate": 1.0825545171339565e-06,
"loss": 1.1002,
"step": 1146
},
{
"epoch": 0.8933021806853583,
"grad_norm": 0.279296875,
"learning_rate": 1.074766355140187e-06,
"loss": 1.0828,
"step": 1147
},
{
"epoch": 0.8940809968847352,
"grad_norm": 0.498046875,
"learning_rate": 1.0669781931464176e-06,
"loss": 1.0308,
"step": 1148
},
{
"epoch": 0.8948598130841121,
"grad_norm": 0.263671875,
"learning_rate": 1.059190031152648e-06,
"loss": 1.0364,
"step": 1149
},
{
"epoch": 0.8956386292834891,
"grad_norm": 0.267578125,
"learning_rate": 1.0514018691588785e-06,
"loss": 1.059,
"step": 1150
},
{
"epoch": 0.8964174454828661,
"grad_norm": 0.267578125,
"learning_rate": 1.043613707165109e-06,
"loss": 1.0848,
"step": 1151
},
{
"epoch": 0.897196261682243,
"grad_norm": 0.271484375,
"learning_rate": 1.0358255451713396e-06,
"loss": 1.0739,
"step": 1152
},
{
"epoch": 0.8979750778816199,
"grad_norm": 0.3125,
"learning_rate": 1.0280373831775702e-06,
"loss": 1.094,
"step": 1153
},
{
"epoch": 0.8987538940809969,
"grad_norm": 0.267578125,
"learning_rate": 1.0202492211838008e-06,
"loss": 1.0604,
"step": 1154
},
{
"epoch": 0.8995327102803738,
"grad_norm": 0.267578125,
"learning_rate": 1.0124610591900311e-06,
"loss": 1.067,
"step": 1155
},
{
"epoch": 0.9003115264797508,
"grad_norm": 0.271484375,
"learning_rate": 1.0046728971962617e-06,
"loss": 1.0705,
"step": 1156
},
{
"epoch": 0.9010903426791277,
"grad_norm": 0.283203125,
"learning_rate": 9.968847352024923e-07,
"loss": 1.0592,
"step": 1157
},
{
"epoch": 0.9018691588785047,
"grad_norm": 0.296875,
"learning_rate": 9.890965732087228e-07,
"loss": 1.0604,
"step": 1158
},
{
"epoch": 0.9026479750778816,
"grad_norm": 0.26953125,
"learning_rate": 9.813084112149534e-07,
"loss": 1.039,
"step": 1159
},
{
"epoch": 0.9034267912772586,
"grad_norm": 0.283203125,
"learning_rate": 9.735202492211838e-07,
"loss": 1.059,
"step": 1160
},
{
"epoch": 0.9042056074766355,
"grad_norm": 0.2578125,
"learning_rate": 9.657320872274143e-07,
"loss": 1.0534,
"step": 1161
},
{
"epoch": 0.9049844236760125,
"grad_norm": 0.26171875,
"learning_rate": 9.579439252336449e-07,
"loss": 1.0425,
"step": 1162
},
{
"epoch": 0.9057632398753894,
"grad_norm": 0.291015625,
"learning_rate": 9.501557632398755e-07,
"loss": 1.0795,
"step": 1163
},
{
"epoch": 0.9065420560747663,
"grad_norm": 0.267578125,
"learning_rate": 9.423676012461059e-07,
"loss": 1.0471,
"step": 1164
},
{
"epoch": 0.9073208722741433,
"grad_norm": 0.287109375,
"learning_rate": 9.345794392523365e-07,
"loss": 1.0672,
"step": 1165
},
{
"epoch": 0.9080996884735203,
"grad_norm": 0.28515625,
"learning_rate": 9.267912772585671e-07,
"loss": 1.0938,
"step": 1166
},
{
"epoch": 0.9088785046728972,
"grad_norm": 0.26953125,
"learning_rate": 9.190031152647975e-07,
"loss": 1.0857,
"step": 1167
},
{
"epoch": 0.9096573208722741,
"grad_norm": 0.291015625,
"learning_rate": 9.112149532710281e-07,
"loss": 1.0945,
"step": 1168
},
{
"epoch": 0.910436137071651,
"grad_norm": 0.33203125,
"learning_rate": 9.034267912772586e-07,
"loss": 1.0503,
"step": 1169
},
{
"epoch": 0.9112149532710281,
"grad_norm": 0.298828125,
"learning_rate": 8.956386292834891e-07,
"loss": 1.1122,
"step": 1170
},
{
"epoch": 0.911993769470405,
"grad_norm": 0.267578125,
"learning_rate": 8.878504672897197e-07,
"loss": 1.0577,
"step": 1171
},
{
"epoch": 0.9127725856697819,
"grad_norm": 0.267578125,
"learning_rate": 8.800623052959501e-07,
"loss": 1.0803,
"step": 1172
},
{
"epoch": 0.9135514018691588,
"grad_norm": 0.26171875,
"learning_rate": 8.722741433021807e-07,
"loss": 1.0948,
"step": 1173
},
{
"epoch": 0.9143302180685359,
"grad_norm": 0.27734375,
"learning_rate": 8.644859813084113e-07,
"loss": 1.0465,
"step": 1174
},
{
"epoch": 0.9151090342679128,
"grad_norm": 0.28515625,
"learning_rate": 8.566978193146417e-07,
"loss": 1.1023,
"step": 1175
},
{
"epoch": 0.9158878504672897,
"grad_norm": 0.279296875,
"learning_rate": 8.489096573208723e-07,
"loss": 1.0748,
"step": 1176
},
{
"epoch": 0.9166666666666666,
"grad_norm": 0.27734375,
"learning_rate": 8.411214953271029e-07,
"loss": 1.105,
"step": 1177
},
{
"epoch": 0.9174454828660437,
"grad_norm": 0.27734375,
"learning_rate": 8.333333333333333e-07,
"loss": 1.0839,
"step": 1178
},
{
"epoch": 0.9182242990654206,
"grad_norm": 0.287109375,
"learning_rate": 8.255451713395639e-07,
"loss": 1.0916,
"step": 1179
},
{
"epoch": 0.9190031152647975,
"grad_norm": 0.294921875,
"learning_rate": 8.177570093457944e-07,
"loss": 1.0865,
"step": 1180
},
{
"epoch": 0.9197819314641744,
"grad_norm": 0.30078125,
"learning_rate": 8.099688473520249e-07,
"loss": 1.0364,
"step": 1181
},
{
"epoch": 0.9205607476635514,
"grad_norm": 0.263671875,
"learning_rate": 8.021806853582555e-07,
"loss": 1.0969,
"step": 1182
},
{
"epoch": 0.9213395638629284,
"grad_norm": 0.26171875,
"learning_rate": 7.94392523364486e-07,
"loss": 1.1009,
"step": 1183
},
{
"epoch": 0.9221183800623053,
"grad_norm": 0.259765625,
"learning_rate": 7.866043613707165e-07,
"loss": 1.0803,
"step": 1184
},
{
"epoch": 0.9228971962616822,
"grad_norm": 0.283203125,
"learning_rate": 7.788161993769471e-07,
"loss": 1.0411,
"step": 1185
},
{
"epoch": 0.9236760124610592,
"grad_norm": 0.265625,
"learning_rate": 7.710280373831776e-07,
"loss": 1.0423,
"step": 1186
},
{
"epoch": 0.9244548286604362,
"grad_norm": 0.2734375,
"learning_rate": 7.632398753894081e-07,
"loss": 1.0707,
"step": 1187
},
{
"epoch": 0.9252336448598131,
"grad_norm": 0.259765625,
"learning_rate": 7.554517133956387e-07,
"loss": 1.0505,
"step": 1188
},
{
"epoch": 0.92601246105919,
"grad_norm": 0.265625,
"learning_rate": 7.476635514018691e-07,
"loss": 1.0622,
"step": 1189
},
{
"epoch": 0.926791277258567,
"grad_norm": 0.333984375,
"learning_rate": 7.398753894080997e-07,
"loss": 1.0766,
"step": 1190
},
{
"epoch": 0.927570093457944,
"grad_norm": 0.28515625,
"learning_rate": 7.320872274143302e-07,
"loss": 1.0526,
"step": 1191
},
{
"epoch": 0.9283489096573209,
"grad_norm": 0.28125,
"learning_rate": 7.242990654205607e-07,
"loss": 1.0884,
"step": 1192
},
{
"epoch": 0.9291277258566978,
"grad_norm": 0.26171875,
"learning_rate": 7.165109034267913e-07,
"loss": 1.0597,
"step": 1193
},
{
"epoch": 0.9299065420560748,
"grad_norm": 0.28515625,
"learning_rate": 7.087227414330218e-07,
"loss": 1.1076,
"step": 1194
},
{
"epoch": 0.9306853582554517,
"grad_norm": 0.291015625,
"learning_rate": 7.009345794392523e-07,
"loss": 1.0614,
"step": 1195
},
{
"epoch": 0.9314641744548287,
"grad_norm": 0.259765625,
"learning_rate": 6.931464174454829e-07,
"loss": 1.0621,
"step": 1196
},
{
"epoch": 0.9322429906542056,
"grad_norm": 0.2734375,
"learning_rate": 6.853582554517134e-07,
"loss": 1.1084,
"step": 1197
},
{
"epoch": 0.9330218068535826,
"grad_norm": 0.271484375,
"learning_rate": 6.775700934579439e-07,
"loss": 1.0745,
"step": 1198
},
{
"epoch": 0.9338006230529595,
"grad_norm": 0.294921875,
"learning_rate": 6.697819314641744e-07,
"loss": 1.1045,
"step": 1199
},
{
"epoch": 0.9345794392523364,
"grad_norm": 0.275390625,
"learning_rate": 6.61993769470405e-07,
"loss": 1.0576,
"step": 1200
},
{
"epoch": 0.9353582554517134,
"grad_norm": 0.2890625,
"learning_rate": 6.542056074766355e-07,
"loss": 1.0172,
"step": 1201
},
{
"epoch": 0.9361370716510904,
"grad_norm": 0.275390625,
"learning_rate": 6.46417445482866e-07,
"loss": 1.062,
"step": 1202
},
{
"epoch": 0.9369158878504673,
"grad_norm": 0.287109375,
"learning_rate": 6.386292834890966e-07,
"loss": 1.0541,
"step": 1203
},
{
"epoch": 0.9376947040498442,
"grad_norm": 0.26953125,
"learning_rate": 6.308411214953271e-07,
"loss": 1.0774,
"step": 1204
},
{
"epoch": 0.9384735202492211,
"grad_norm": 0.26953125,
"learning_rate": 6.230529595015577e-07,
"loss": 1.0569,
"step": 1205
},
{
"epoch": 0.9392523364485982,
"grad_norm": 0.291015625,
"learning_rate": 6.152647975077883e-07,
"loss": 1.0601,
"step": 1206
},
{
"epoch": 0.9400311526479751,
"grad_norm": 0.2734375,
"learning_rate": 6.074766355140187e-07,
"loss": 1.0928,
"step": 1207
},
{
"epoch": 0.940809968847352,
"grad_norm": 0.279296875,
"learning_rate": 5.996884735202493e-07,
"loss": 1.11,
"step": 1208
},
{
"epoch": 0.9415887850467289,
"grad_norm": 0.30859375,
"learning_rate": 5.919003115264798e-07,
"loss": 1.1169,
"step": 1209
},
{
"epoch": 0.942367601246106,
"grad_norm": 0.267578125,
"learning_rate": 5.841121495327103e-07,
"loss": 1.061,
"step": 1210
},
{
"epoch": 0.9431464174454829,
"grad_norm": 0.2578125,
"learning_rate": 5.763239875389409e-07,
"loss": 1.0646,
"step": 1211
},
{
"epoch": 0.9439252336448598,
"grad_norm": 0.28125,
"learning_rate": 5.685358255451713e-07,
"loss": 1.075,
"step": 1212
},
{
"epoch": 0.9447040498442367,
"grad_norm": 0.28125,
"learning_rate": 5.607476635514019e-07,
"loss": 1.0536,
"step": 1213
},
{
"epoch": 0.9454828660436138,
"grad_norm": 0.263671875,
"learning_rate": 5.529595015576325e-07,
"loss": 1.026,
"step": 1214
},
{
"epoch": 0.9462616822429907,
"grad_norm": 0.2734375,
"learning_rate": 5.451713395638629e-07,
"loss": 1.0888,
"step": 1215
},
{
"epoch": 0.9470404984423676,
"grad_norm": 0.267578125,
"learning_rate": 5.373831775700935e-07,
"loss": 1.0602,
"step": 1216
},
{
"epoch": 0.9478193146417445,
"grad_norm": 0.271484375,
"learning_rate": 5.29595015576324e-07,
"loss": 1.0888,
"step": 1217
},
{
"epoch": 0.9485981308411215,
"grad_norm": 0.27734375,
"learning_rate": 5.218068535825545e-07,
"loss": 1.0759,
"step": 1218
},
{
"epoch": 0.9493769470404985,
"grad_norm": 0.330078125,
"learning_rate": 5.140186915887851e-07,
"loss": 1.0593,
"step": 1219
},
{
"epoch": 0.9501557632398754,
"grad_norm": 0.2734375,
"learning_rate": 5.062305295950156e-07,
"loss": 1.0613,
"step": 1220
},
{
"epoch": 0.9509345794392523,
"grad_norm": 0.271484375,
"learning_rate": 4.984423676012461e-07,
"loss": 1.0441,
"step": 1221
},
{
"epoch": 0.9517133956386293,
"grad_norm": 0.298828125,
"learning_rate": 4.906542056074767e-07,
"loss": 1.1101,
"step": 1222
},
{
"epoch": 0.9524922118380063,
"grad_norm": 0.267578125,
"learning_rate": 4.828660436137072e-07,
"loss": 1.0705,
"step": 1223
},
{
"epoch": 0.9532710280373832,
"grad_norm": 0.275390625,
"learning_rate": 4.7507788161993773e-07,
"loss": 1.0948,
"step": 1224
},
{
"epoch": 0.9540498442367601,
"grad_norm": 0.2734375,
"learning_rate": 4.6728971962616824e-07,
"loss": 1.052,
"step": 1225
},
{
"epoch": 0.9548286604361371,
"grad_norm": 0.271484375,
"learning_rate": 4.5950155763239876e-07,
"loss": 1.074,
"step": 1226
},
{
"epoch": 0.955607476635514,
"grad_norm": 0.31640625,
"learning_rate": 4.517133956386293e-07,
"loss": 1.0489,
"step": 1227
},
{
"epoch": 0.956386292834891,
"grad_norm": 0.2734375,
"learning_rate": 4.4392523364485984e-07,
"loss": 1.0756,
"step": 1228
},
{
"epoch": 0.9571651090342679,
"grad_norm": 0.265625,
"learning_rate": 4.3613707165109035e-07,
"loss": 1.0888,
"step": 1229
},
{
"epoch": 0.9579439252336449,
"grad_norm": 0.314453125,
"learning_rate": 4.2834890965732087e-07,
"loss": 1.0659,
"step": 1230
},
{
"epoch": 0.9587227414330218,
"grad_norm": 0.279296875,
"learning_rate": 4.2056074766355143e-07,
"loss": 1.0429,
"step": 1231
},
{
"epoch": 0.9595015576323987,
"grad_norm": 0.263671875,
"learning_rate": 4.1277258566978195e-07,
"loss": 1.0752,
"step": 1232
},
{
"epoch": 0.9602803738317757,
"grad_norm": 0.302734375,
"learning_rate": 4.0498442367601246e-07,
"loss": 1.1212,
"step": 1233
},
{
"epoch": 0.9610591900311527,
"grad_norm": 0.2734375,
"learning_rate": 3.97196261682243e-07,
"loss": 1.0996,
"step": 1234
},
{
"epoch": 0.9618380062305296,
"grad_norm": 0.2734375,
"learning_rate": 3.8940809968847354e-07,
"loss": 1.0702,
"step": 1235
},
{
"epoch": 0.9626168224299065,
"grad_norm": 0.265625,
"learning_rate": 3.8161993769470406e-07,
"loss": 1.0875,
"step": 1236
},
{
"epoch": 0.9633956386292835,
"grad_norm": 0.296875,
"learning_rate": 3.7383177570093457e-07,
"loss": 1.1282,
"step": 1237
},
{
"epoch": 0.9641744548286605,
"grad_norm": 0.26953125,
"learning_rate": 3.660436137071651e-07,
"loss": 1.0588,
"step": 1238
},
{
"epoch": 0.9649532710280374,
"grad_norm": 0.271484375,
"learning_rate": 3.5825545171339565e-07,
"loss": 1.0821,
"step": 1239
},
{
"epoch": 0.9657320872274143,
"grad_norm": 0.271484375,
"learning_rate": 3.5046728971962617e-07,
"loss": 1.0842,
"step": 1240
},
{
"epoch": 0.9665109034267912,
"grad_norm": 0.271484375,
"learning_rate": 3.426791277258567e-07,
"loss": 1.0429,
"step": 1241
},
{
"epoch": 0.9672897196261683,
"grad_norm": 0.26953125,
"learning_rate": 3.348909657320872e-07,
"loss": 1.0732,
"step": 1242
},
{
"epoch": 0.9680685358255452,
"grad_norm": 0.2734375,
"learning_rate": 3.2710280373831776e-07,
"loss": 1.0405,
"step": 1243
},
{
"epoch": 0.9688473520249221,
"grad_norm": 0.271484375,
"learning_rate": 3.193146417445483e-07,
"loss": 1.0512,
"step": 1244
},
{
"epoch": 0.969626168224299,
"grad_norm": 0.27734375,
"learning_rate": 3.1152647975077885e-07,
"loss": 1.0618,
"step": 1245
},
{
"epoch": 0.9704049844236761,
"grad_norm": 0.259765625,
"learning_rate": 3.0373831775700936e-07,
"loss": 1.0691,
"step": 1246
},
{
"epoch": 0.971183800623053,
"grad_norm": 0.27734375,
"learning_rate": 2.959501557632399e-07,
"loss": 1.075,
"step": 1247
},
{
"epoch": 0.9719626168224299,
"grad_norm": 0.275390625,
"learning_rate": 2.8816199376947044e-07,
"loss": 1.055,
"step": 1248
},
{
"epoch": 0.9727414330218068,
"grad_norm": 0.314453125,
"learning_rate": 2.8037383177570096e-07,
"loss": 1.1469,
"step": 1249
},
{
"epoch": 0.9735202492211839,
"grad_norm": 0.330078125,
"learning_rate": 2.7258566978193147e-07,
"loss": 1.1009,
"step": 1250
},
{
"epoch": 0.9742990654205608,
"grad_norm": 0.27734375,
"learning_rate": 2.64797507788162e-07,
"loss": 1.0503,
"step": 1251
},
{
"epoch": 0.9750778816199377,
"grad_norm": 0.275390625,
"learning_rate": 2.5700934579439255e-07,
"loss": 1.0675,
"step": 1252
},
{
"epoch": 0.9758566978193146,
"grad_norm": 0.25390625,
"learning_rate": 2.4922118380062307e-07,
"loss": 1.0565,
"step": 1253
},
{
"epoch": 0.9766355140186916,
"grad_norm": 0.28515625,
"learning_rate": 2.414330218068536e-07,
"loss": 1.092,
"step": 1254
},
{
"epoch": 0.9774143302180686,
"grad_norm": 0.26953125,
"learning_rate": 2.3364485981308412e-07,
"loss": 1.1218,
"step": 1255
},
{
"epoch": 0.9781931464174455,
"grad_norm": 0.46875,
"learning_rate": 2.2585669781931466e-07,
"loss": 1.1286,
"step": 1256
},
{
"epoch": 0.9789719626168224,
"grad_norm": 0.306640625,
"learning_rate": 2.1806853582554518e-07,
"loss": 1.1221,
"step": 1257
},
{
"epoch": 0.9797507788161994,
"grad_norm": 0.359375,
"learning_rate": 2.1028037383177572e-07,
"loss": 1.0966,
"step": 1258
},
{
"epoch": 0.9805295950155763,
"grad_norm": 0.30078125,
"learning_rate": 2.0249221183800623e-07,
"loss": 1.1302,
"step": 1259
},
{
"epoch": 0.9813084112149533,
"grad_norm": 0.275390625,
"learning_rate": 1.9470404984423677e-07,
"loss": 1.0315,
"step": 1260
},
{
"epoch": 0.9820872274143302,
"grad_norm": 0.27734375,
"learning_rate": 1.8691588785046729e-07,
"loss": 1.0503,
"step": 1261
},
{
"epoch": 0.9828660436137072,
"grad_norm": 0.2734375,
"learning_rate": 1.7912772585669783e-07,
"loss": 1.0564,
"step": 1262
},
{
"epoch": 0.9836448598130841,
"grad_norm": 0.28515625,
"learning_rate": 1.7133956386292834e-07,
"loss": 1.0584,
"step": 1263
},
{
"epoch": 0.9844236760124611,
"grad_norm": 0.291015625,
"learning_rate": 1.6355140186915888e-07,
"loss": 1.052,
"step": 1264
},
{
"epoch": 0.985202492211838,
"grad_norm": 0.26953125,
"learning_rate": 1.5576323987538942e-07,
"loss": 1.1076,
"step": 1265
},
{
"epoch": 0.985981308411215,
"grad_norm": 0.296875,
"learning_rate": 1.4797507788161994e-07,
"loss": 1.1232,
"step": 1266
},
{
"epoch": 0.9867601246105919,
"grad_norm": 0.2734375,
"learning_rate": 1.4018691588785048e-07,
"loss": 1.0714,
"step": 1267
},
{
"epoch": 0.9875389408099688,
"grad_norm": 0.2578125,
"learning_rate": 1.32398753894081e-07,
"loss": 1.0519,
"step": 1268
},
{
"epoch": 0.9883177570093458,
"grad_norm": 0.2734375,
"learning_rate": 1.2461059190031153e-07,
"loss": 1.0928,
"step": 1269
},
{
"epoch": 0.9890965732087228,
"grad_norm": 0.283203125,
"learning_rate": 1.1682242990654206e-07,
"loss": 1.0679,
"step": 1270
},
{
"epoch": 0.9898753894080997,
"grad_norm": 0.265625,
"learning_rate": 1.0903426791277259e-07,
"loss": 1.0732,
"step": 1271
},
{
"epoch": 0.9906542056074766,
"grad_norm": 0.26953125,
"learning_rate": 1.0124610591900312e-07,
"loss": 1.0715,
"step": 1272
},
{
"epoch": 0.9914330218068536,
"grad_norm": 0.28125,
"learning_rate": 9.345794392523364e-08,
"loss": 1.097,
"step": 1273
},
{
"epoch": 0.9922118380062306,
"grad_norm": 0.2734375,
"learning_rate": 8.566978193146417e-08,
"loss": 1.0834,
"step": 1274
},
{
"epoch": 0.9929906542056075,
"grad_norm": 0.271484375,
"learning_rate": 7.788161993769471e-08,
"loss": 1.0642,
"step": 1275
},
{
"epoch": 0.9937694704049844,
"grad_norm": 0.2734375,
"learning_rate": 7.009345794392524e-08,
"loss": 1.0663,
"step": 1276
},
{
"epoch": 0.9945482866043613,
"grad_norm": 0.267578125,
"learning_rate": 6.230529595015577e-08,
"loss": 1.0766,
"step": 1277
},
{
"epoch": 0.9953271028037384,
"grad_norm": 0.271484375,
"learning_rate": 5.4517133956386294e-08,
"loss": 1.0533,
"step": 1278
},
{
"epoch": 0.9961059190031153,
"grad_norm": 0.271484375,
"learning_rate": 4.672897196261682e-08,
"loss": 1.0599,
"step": 1279
},
{
"epoch": 0.9968847352024922,
"grad_norm": 0.3046875,
"learning_rate": 3.8940809968847356e-08,
"loss": 1.1097,
"step": 1280
},
{
"epoch": 0.9976635514018691,
"grad_norm": 0.26171875,
"learning_rate": 3.1152647975077883e-08,
"loss": 1.094,
"step": 1281
},
{
"epoch": 0.9984423676012462,
"grad_norm": 0.283203125,
"learning_rate": 2.336448598130841e-08,
"loss": 1.0052,
"step": 1282
},
{
"epoch": 0.9992211838006231,
"grad_norm": 0.271484375,
"learning_rate": 1.5576323987538942e-08,
"loss": 1.07,
"step": 1283
},
{
"epoch": 1.0,
"grad_norm": 0.28515625,
"learning_rate": 7.788161993769471e-09,
"loss": 1.0942,
"step": 1284
}
],
"logging_steps": 1.0,
"max_steps": 1284,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4.2858138579406684e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}