Stewy Slocum
Add fine-tuned model
a37a6f2
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 0,
"global_step": 820,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0012195121951219512,
"grad_norm": 0.412109375,
"learning_rate": 9.987804878048782e-06,
"loss": 1.7359,
"step": 1
},
{
"epoch": 0.0024390243902439024,
"grad_norm": 0.373046875,
"learning_rate": 9.975609756097561e-06,
"loss": 1.7645,
"step": 2
},
{
"epoch": 0.003658536585365854,
"grad_norm": 0.380859375,
"learning_rate": 9.963414634146342e-06,
"loss": 1.7766,
"step": 3
},
{
"epoch": 0.004878048780487805,
"grad_norm": 0.365234375,
"learning_rate": 9.951219512195124e-06,
"loss": 1.7373,
"step": 4
},
{
"epoch": 0.006097560975609756,
"grad_norm": 0.345703125,
"learning_rate": 9.939024390243903e-06,
"loss": 1.7009,
"step": 5
},
{
"epoch": 0.007317073170731708,
"grad_norm": 0.34765625,
"learning_rate": 9.926829268292684e-06,
"loss": 1.6796,
"step": 6
},
{
"epoch": 0.00853658536585366,
"grad_norm": 0.318359375,
"learning_rate": 9.914634146341463e-06,
"loss": 1.7517,
"step": 7
},
{
"epoch": 0.00975609756097561,
"grad_norm": 0.314453125,
"learning_rate": 9.902439024390245e-06,
"loss": 1.7498,
"step": 8
},
{
"epoch": 0.01097560975609756,
"grad_norm": 0.306640625,
"learning_rate": 9.890243902439026e-06,
"loss": 1.6965,
"step": 9
},
{
"epoch": 0.012195121951219513,
"grad_norm": 0.2578125,
"learning_rate": 9.878048780487805e-06,
"loss": 1.6836,
"step": 10
},
{
"epoch": 0.013414634146341463,
"grad_norm": 0.27734375,
"learning_rate": 9.865853658536586e-06,
"loss": 1.7479,
"step": 11
},
{
"epoch": 0.014634146341463415,
"grad_norm": 0.2392578125,
"learning_rate": 9.853658536585367e-06,
"loss": 1.6494,
"step": 12
},
{
"epoch": 0.015853658536585366,
"grad_norm": 0.2578125,
"learning_rate": 9.841463414634147e-06,
"loss": 1.6509,
"step": 13
},
{
"epoch": 0.01707317073170732,
"grad_norm": 0.248046875,
"learning_rate": 9.829268292682928e-06,
"loss": 1.6927,
"step": 14
},
{
"epoch": 0.018292682926829267,
"grad_norm": 0.2470703125,
"learning_rate": 9.817073170731707e-06,
"loss": 1.6383,
"step": 15
},
{
"epoch": 0.01951219512195122,
"grad_norm": 0.2333984375,
"learning_rate": 9.804878048780488e-06,
"loss": 1.5846,
"step": 16
},
{
"epoch": 0.020731707317073172,
"grad_norm": 0.2353515625,
"learning_rate": 9.79268292682927e-06,
"loss": 1.624,
"step": 17
},
{
"epoch": 0.02195121951219512,
"grad_norm": 0.2138671875,
"learning_rate": 9.780487804878049e-06,
"loss": 1.5879,
"step": 18
},
{
"epoch": 0.023170731707317073,
"grad_norm": 0.212890625,
"learning_rate": 9.76829268292683e-06,
"loss": 1.6408,
"step": 19
},
{
"epoch": 0.024390243902439025,
"grad_norm": 0.21875,
"learning_rate": 9.756097560975611e-06,
"loss": 1.6303,
"step": 20
},
{
"epoch": 0.025609756097560974,
"grad_norm": 0.22265625,
"learning_rate": 9.74390243902439e-06,
"loss": 1.6591,
"step": 21
},
{
"epoch": 0.026829268292682926,
"grad_norm": 0.208984375,
"learning_rate": 9.731707317073171e-06,
"loss": 1.5865,
"step": 22
},
{
"epoch": 0.02804878048780488,
"grad_norm": 0.1826171875,
"learning_rate": 9.719512195121953e-06,
"loss": 1.5393,
"step": 23
},
{
"epoch": 0.02926829268292683,
"grad_norm": 0.1875,
"learning_rate": 9.707317073170732e-06,
"loss": 1.6239,
"step": 24
},
{
"epoch": 0.03048780487804878,
"grad_norm": 0.177734375,
"learning_rate": 9.695121951219513e-06,
"loss": 1.541,
"step": 25
},
{
"epoch": 0.03170731707317073,
"grad_norm": 0.1689453125,
"learning_rate": 9.682926829268292e-06,
"loss": 1.52,
"step": 26
},
{
"epoch": 0.032926829268292684,
"grad_norm": 0.1953125,
"learning_rate": 9.670731707317074e-06,
"loss": 1.6195,
"step": 27
},
{
"epoch": 0.03414634146341464,
"grad_norm": 0.1748046875,
"learning_rate": 9.658536585365855e-06,
"loss": 1.4989,
"step": 28
},
{
"epoch": 0.03536585365853658,
"grad_norm": 0.1689453125,
"learning_rate": 9.646341463414634e-06,
"loss": 1.5094,
"step": 29
},
{
"epoch": 0.036585365853658534,
"grad_norm": 0.16015625,
"learning_rate": 9.634146341463415e-06,
"loss": 1.5214,
"step": 30
},
{
"epoch": 0.03780487804878049,
"grad_norm": 0.1669921875,
"learning_rate": 9.621951219512196e-06,
"loss": 1.5122,
"step": 31
},
{
"epoch": 0.03902439024390244,
"grad_norm": 0.166015625,
"learning_rate": 9.609756097560976e-06,
"loss": 1.5766,
"step": 32
},
{
"epoch": 0.04024390243902439,
"grad_norm": 0.16015625,
"learning_rate": 9.597560975609757e-06,
"loss": 1.527,
"step": 33
},
{
"epoch": 0.041463414634146344,
"grad_norm": 0.1650390625,
"learning_rate": 9.585365853658536e-06,
"loss": 1.4714,
"step": 34
},
{
"epoch": 0.042682926829268296,
"grad_norm": 0.1728515625,
"learning_rate": 9.573170731707317e-06,
"loss": 1.5223,
"step": 35
},
{
"epoch": 0.04390243902439024,
"grad_norm": 0.1708984375,
"learning_rate": 9.560975609756098e-06,
"loss": 1.5577,
"step": 36
},
{
"epoch": 0.045121951219512194,
"grad_norm": 0.1572265625,
"learning_rate": 9.548780487804878e-06,
"loss": 1.4361,
"step": 37
},
{
"epoch": 0.046341463414634146,
"grad_norm": 0.1708984375,
"learning_rate": 9.536585365853659e-06,
"loss": 1.4433,
"step": 38
},
{
"epoch": 0.0475609756097561,
"grad_norm": 0.1552734375,
"learning_rate": 9.52439024390244e-06,
"loss": 1.4276,
"step": 39
},
{
"epoch": 0.04878048780487805,
"grad_norm": 0.1533203125,
"learning_rate": 9.51219512195122e-06,
"loss": 1.467,
"step": 40
},
{
"epoch": 0.05,
"grad_norm": 0.173828125,
"learning_rate": 9.5e-06,
"loss": 1.5153,
"step": 41
},
{
"epoch": 0.05121951219512195,
"grad_norm": 0.1611328125,
"learning_rate": 9.487804878048782e-06,
"loss": 1.502,
"step": 42
},
{
"epoch": 0.0524390243902439,
"grad_norm": 0.1484375,
"learning_rate": 9.475609756097561e-06,
"loss": 1.4238,
"step": 43
},
{
"epoch": 0.05365853658536585,
"grad_norm": 0.1611328125,
"learning_rate": 9.463414634146342e-06,
"loss": 1.4485,
"step": 44
},
{
"epoch": 0.054878048780487805,
"grad_norm": 0.1591796875,
"learning_rate": 9.451219512195122e-06,
"loss": 1.4759,
"step": 45
},
{
"epoch": 0.05609756097560976,
"grad_norm": 0.1513671875,
"learning_rate": 9.439024390243903e-06,
"loss": 1.3978,
"step": 46
},
{
"epoch": 0.05731707317073171,
"grad_norm": 0.1435546875,
"learning_rate": 9.426829268292684e-06,
"loss": 1.4025,
"step": 47
},
{
"epoch": 0.05853658536585366,
"grad_norm": 0.140625,
"learning_rate": 9.414634146341463e-06,
"loss": 1.3911,
"step": 48
},
{
"epoch": 0.05975609756097561,
"grad_norm": 0.2041015625,
"learning_rate": 9.402439024390244e-06,
"loss": 1.3932,
"step": 49
},
{
"epoch": 0.06097560975609756,
"grad_norm": 0.150390625,
"learning_rate": 9.390243902439025e-06,
"loss": 1.4234,
"step": 50
},
{
"epoch": 0.06219512195121951,
"grad_norm": 0.1767578125,
"learning_rate": 9.378048780487805e-06,
"loss": 1.4094,
"step": 51
},
{
"epoch": 0.06341463414634146,
"grad_norm": 0.140625,
"learning_rate": 9.365853658536586e-06,
"loss": 1.3633,
"step": 52
},
{
"epoch": 0.06463414634146342,
"grad_norm": 0.14453125,
"learning_rate": 9.353658536585367e-06,
"loss": 1.3763,
"step": 53
},
{
"epoch": 0.06585365853658537,
"grad_norm": 0.1572265625,
"learning_rate": 9.341463414634148e-06,
"loss": 1.3517,
"step": 54
},
{
"epoch": 0.06707317073170732,
"grad_norm": 0.1416015625,
"learning_rate": 9.329268292682927e-06,
"loss": 1.3864,
"step": 55
},
{
"epoch": 0.06829268292682927,
"grad_norm": 0.1396484375,
"learning_rate": 9.317073170731709e-06,
"loss": 1.4261,
"step": 56
},
{
"epoch": 0.06951219512195123,
"grad_norm": 0.14453125,
"learning_rate": 9.30487804878049e-06,
"loss": 1.411,
"step": 57
},
{
"epoch": 0.07073170731707316,
"grad_norm": 0.2255859375,
"learning_rate": 9.292682926829269e-06,
"loss": 1.3049,
"step": 58
},
{
"epoch": 0.07195121951219512,
"grad_norm": 0.1865234375,
"learning_rate": 9.28048780487805e-06,
"loss": 1.4078,
"step": 59
},
{
"epoch": 0.07317073170731707,
"grad_norm": 0.1494140625,
"learning_rate": 9.268292682926831e-06,
"loss": 1.4062,
"step": 60
},
{
"epoch": 0.07439024390243902,
"grad_norm": 0.150390625,
"learning_rate": 9.25609756097561e-06,
"loss": 1.3901,
"step": 61
},
{
"epoch": 0.07560975609756097,
"grad_norm": 0.134765625,
"learning_rate": 9.243902439024392e-06,
"loss": 1.3779,
"step": 62
},
{
"epoch": 0.07682926829268293,
"grad_norm": 0.1298828125,
"learning_rate": 9.231707317073171e-06,
"loss": 1.3235,
"step": 63
},
{
"epoch": 0.07804878048780488,
"grad_norm": 0.1328125,
"learning_rate": 9.219512195121952e-06,
"loss": 1.3703,
"step": 64
},
{
"epoch": 0.07926829268292683,
"grad_norm": 0.13671875,
"learning_rate": 9.207317073170733e-06,
"loss": 1.3756,
"step": 65
},
{
"epoch": 0.08048780487804878,
"grad_norm": 0.1337890625,
"learning_rate": 9.195121951219513e-06,
"loss": 1.3887,
"step": 66
},
{
"epoch": 0.08170731707317073,
"grad_norm": 0.150390625,
"learning_rate": 9.182926829268294e-06,
"loss": 1.348,
"step": 67
},
{
"epoch": 0.08292682926829269,
"grad_norm": 0.13671875,
"learning_rate": 9.170731707317075e-06,
"loss": 1.3403,
"step": 68
},
{
"epoch": 0.08414634146341464,
"grad_norm": 0.1416015625,
"learning_rate": 9.158536585365854e-06,
"loss": 1.3239,
"step": 69
},
{
"epoch": 0.08536585365853659,
"grad_norm": 0.14453125,
"learning_rate": 9.146341463414635e-06,
"loss": 1.3273,
"step": 70
},
{
"epoch": 0.08658536585365853,
"grad_norm": 0.1396484375,
"learning_rate": 9.134146341463415e-06,
"loss": 1.3784,
"step": 71
},
{
"epoch": 0.08780487804878048,
"grad_norm": 0.1328125,
"learning_rate": 9.121951219512196e-06,
"loss": 1.3063,
"step": 72
},
{
"epoch": 0.08902439024390243,
"grad_norm": 0.1435546875,
"learning_rate": 9.109756097560977e-06,
"loss": 1.3335,
"step": 73
},
{
"epoch": 0.09024390243902439,
"grad_norm": 0.13671875,
"learning_rate": 9.097560975609757e-06,
"loss": 1.3193,
"step": 74
},
{
"epoch": 0.09146341463414634,
"grad_norm": 0.1357421875,
"learning_rate": 9.085365853658538e-06,
"loss": 1.3003,
"step": 75
},
{
"epoch": 0.09268292682926829,
"grad_norm": 0.1435546875,
"learning_rate": 9.073170731707319e-06,
"loss": 1.3387,
"step": 76
},
{
"epoch": 0.09390243902439024,
"grad_norm": 0.1298828125,
"learning_rate": 9.060975609756098e-06,
"loss": 1.2503,
"step": 77
},
{
"epoch": 0.0951219512195122,
"grad_norm": 0.1494140625,
"learning_rate": 9.04878048780488e-06,
"loss": 1.2954,
"step": 78
},
{
"epoch": 0.09634146341463415,
"grad_norm": 0.15625,
"learning_rate": 9.03658536585366e-06,
"loss": 1.3674,
"step": 79
},
{
"epoch": 0.0975609756097561,
"grad_norm": 0.1416015625,
"learning_rate": 9.02439024390244e-06,
"loss": 1.3554,
"step": 80
},
{
"epoch": 0.09878048780487805,
"grad_norm": 0.1494140625,
"learning_rate": 9.01219512195122e-06,
"loss": 1.3393,
"step": 81
},
{
"epoch": 0.1,
"grad_norm": 0.287109375,
"learning_rate": 9e-06,
"loss": 1.2831,
"step": 82
},
{
"epoch": 0.10121951219512196,
"grad_norm": 0.146484375,
"learning_rate": 8.987804878048781e-06,
"loss": 1.3079,
"step": 83
},
{
"epoch": 0.1024390243902439,
"grad_norm": 0.1328125,
"learning_rate": 8.975609756097562e-06,
"loss": 1.3021,
"step": 84
},
{
"epoch": 0.10365853658536585,
"grad_norm": 0.142578125,
"learning_rate": 8.963414634146342e-06,
"loss": 1.2823,
"step": 85
},
{
"epoch": 0.1048780487804878,
"grad_norm": 0.140625,
"learning_rate": 8.951219512195123e-06,
"loss": 1.2396,
"step": 86
},
{
"epoch": 0.10609756097560975,
"grad_norm": 0.1533203125,
"learning_rate": 8.939024390243904e-06,
"loss": 1.3479,
"step": 87
},
{
"epoch": 0.1073170731707317,
"grad_norm": 0.14453125,
"learning_rate": 8.926829268292683e-06,
"loss": 1.2762,
"step": 88
},
{
"epoch": 0.10853658536585366,
"grad_norm": 0.14453125,
"learning_rate": 8.914634146341465e-06,
"loss": 1.3385,
"step": 89
},
{
"epoch": 0.10975609756097561,
"grad_norm": 0.1416015625,
"learning_rate": 8.902439024390244e-06,
"loss": 1.3153,
"step": 90
},
{
"epoch": 0.11097560975609756,
"grad_norm": 0.1611328125,
"learning_rate": 8.890243902439025e-06,
"loss": 1.2956,
"step": 91
},
{
"epoch": 0.11219512195121951,
"grad_norm": 0.173828125,
"learning_rate": 8.878048780487806e-06,
"loss": 1.3069,
"step": 92
},
{
"epoch": 0.11341463414634147,
"grad_norm": 0.1484375,
"learning_rate": 8.865853658536586e-06,
"loss": 1.2872,
"step": 93
},
{
"epoch": 0.11463414634146342,
"grad_norm": 0.13671875,
"learning_rate": 8.853658536585367e-06,
"loss": 1.3395,
"step": 94
},
{
"epoch": 0.11585365853658537,
"grad_norm": 0.140625,
"learning_rate": 8.841463414634148e-06,
"loss": 1.2451,
"step": 95
},
{
"epoch": 0.11707317073170732,
"grad_norm": 0.1435546875,
"learning_rate": 8.829268292682927e-06,
"loss": 1.2895,
"step": 96
},
{
"epoch": 0.11829268292682926,
"grad_norm": 0.15625,
"learning_rate": 8.817073170731708e-06,
"loss": 1.2988,
"step": 97
},
{
"epoch": 0.11951219512195121,
"grad_norm": 0.1396484375,
"learning_rate": 8.80487804878049e-06,
"loss": 1.2686,
"step": 98
},
{
"epoch": 0.12073170731707317,
"grad_norm": 0.140625,
"learning_rate": 8.792682926829269e-06,
"loss": 1.2651,
"step": 99
},
{
"epoch": 0.12195121951219512,
"grad_norm": 0.134765625,
"learning_rate": 8.78048780487805e-06,
"loss": 1.2891,
"step": 100
},
{
"epoch": 0.12317073170731707,
"grad_norm": 0.1357421875,
"learning_rate": 8.76829268292683e-06,
"loss": 1.286,
"step": 101
},
{
"epoch": 0.12439024390243902,
"grad_norm": 0.140625,
"learning_rate": 8.75609756097561e-06,
"loss": 1.2882,
"step": 102
},
{
"epoch": 0.12560975609756098,
"grad_norm": 0.1328125,
"learning_rate": 8.743902439024391e-06,
"loss": 1.177,
"step": 103
},
{
"epoch": 0.12682926829268293,
"grad_norm": 0.1435546875,
"learning_rate": 8.731707317073171e-06,
"loss": 1.2681,
"step": 104
},
{
"epoch": 0.12804878048780488,
"grad_norm": 0.1474609375,
"learning_rate": 8.719512195121952e-06,
"loss": 1.2534,
"step": 105
},
{
"epoch": 0.12926829268292683,
"grad_norm": 0.146484375,
"learning_rate": 8.707317073170733e-06,
"loss": 1.2651,
"step": 106
},
{
"epoch": 0.13048780487804879,
"grad_norm": 0.1552734375,
"learning_rate": 8.695121951219512e-06,
"loss": 1.226,
"step": 107
},
{
"epoch": 0.13170731707317074,
"grad_norm": 0.1376953125,
"learning_rate": 8.682926829268294e-06,
"loss": 1.253,
"step": 108
},
{
"epoch": 0.1329268292682927,
"grad_norm": 0.146484375,
"learning_rate": 8.670731707317073e-06,
"loss": 1.2314,
"step": 109
},
{
"epoch": 0.13414634146341464,
"grad_norm": 0.140625,
"learning_rate": 8.658536585365854e-06,
"loss": 1.2127,
"step": 110
},
{
"epoch": 0.1353658536585366,
"grad_norm": 0.326171875,
"learning_rate": 8.646341463414635e-06,
"loss": 1.2699,
"step": 111
},
{
"epoch": 0.13658536585365855,
"grad_norm": 0.1484375,
"learning_rate": 8.634146341463415e-06,
"loss": 1.2047,
"step": 112
},
{
"epoch": 0.1378048780487805,
"grad_norm": 0.1416015625,
"learning_rate": 8.621951219512196e-06,
"loss": 1.2516,
"step": 113
},
{
"epoch": 0.13902439024390245,
"grad_norm": 0.1416015625,
"learning_rate": 8.609756097560977e-06,
"loss": 1.216,
"step": 114
},
{
"epoch": 0.1402439024390244,
"grad_norm": 0.1513671875,
"learning_rate": 8.597560975609756e-06,
"loss": 1.2471,
"step": 115
},
{
"epoch": 0.14146341463414633,
"grad_norm": 0.1474609375,
"learning_rate": 8.585365853658537e-06,
"loss": 1.289,
"step": 116
},
{
"epoch": 0.14268292682926828,
"grad_norm": 0.1533203125,
"learning_rate": 8.573170731707318e-06,
"loss": 1.2521,
"step": 117
},
{
"epoch": 0.14390243902439023,
"grad_norm": 0.146484375,
"learning_rate": 8.560975609756098e-06,
"loss": 1.1712,
"step": 118
},
{
"epoch": 0.14512195121951219,
"grad_norm": 0.1806640625,
"learning_rate": 8.548780487804879e-06,
"loss": 1.1608,
"step": 119
},
{
"epoch": 0.14634146341463414,
"grad_norm": 0.1416015625,
"learning_rate": 8.536585365853658e-06,
"loss": 1.201,
"step": 120
},
{
"epoch": 0.1475609756097561,
"grad_norm": 0.173828125,
"learning_rate": 8.52439024390244e-06,
"loss": 1.2613,
"step": 121
},
{
"epoch": 0.14878048780487804,
"grad_norm": 0.1494140625,
"learning_rate": 8.51219512195122e-06,
"loss": 1.2407,
"step": 122
},
{
"epoch": 0.15,
"grad_norm": 0.193359375,
"learning_rate": 8.5e-06,
"loss": 1.1373,
"step": 123
},
{
"epoch": 0.15121951219512195,
"grad_norm": 0.1484375,
"learning_rate": 8.487804878048781e-06,
"loss": 1.233,
"step": 124
},
{
"epoch": 0.1524390243902439,
"grad_norm": 0.13671875,
"learning_rate": 8.475609756097562e-06,
"loss": 1.1991,
"step": 125
},
{
"epoch": 0.15365853658536585,
"grad_norm": 0.1484375,
"learning_rate": 8.463414634146342e-06,
"loss": 1.1861,
"step": 126
},
{
"epoch": 0.1548780487804878,
"grad_norm": 0.1640625,
"learning_rate": 8.451219512195123e-06,
"loss": 1.2282,
"step": 127
},
{
"epoch": 0.15609756097560976,
"grad_norm": 0.146484375,
"learning_rate": 8.439024390243902e-06,
"loss": 1.2937,
"step": 128
},
{
"epoch": 0.1573170731707317,
"grad_norm": 0.1787109375,
"learning_rate": 8.426829268292683e-06,
"loss": 1.2342,
"step": 129
},
{
"epoch": 0.15853658536585366,
"grad_norm": 0.201171875,
"learning_rate": 8.414634146341464e-06,
"loss": 1.2159,
"step": 130
},
{
"epoch": 0.1597560975609756,
"grad_norm": 0.150390625,
"learning_rate": 8.402439024390244e-06,
"loss": 1.1729,
"step": 131
},
{
"epoch": 0.16097560975609757,
"grad_norm": 0.154296875,
"learning_rate": 8.390243902439025e-06,
"loss": 1.2317,
"step": 132
},
{
"epoch": 0.16219512195121952,
"grad_norm": 0.1513671875,
"learning_rate": 8.378048780487806e-06,
"loss": 1.2474,
"step": 133
},
{
"epoch": 0.16341463414634147,
"grad_norm": 0.1552734375,
"learning_rate": 8.365853658536585e-06,
"loss": 1.2129,
"step": 134
},
{
"epoch": 0.16463414634146342,
"grad_norm": 0.150390625,
"learning_rate": 8.353658536585366e-06,
"loss": 1.1949,
"step": 135
},
{
"epoch": 0.16585365853658537,
"grad_norm": 0.1552734375,
"learning_rate": 8.341463414634147e-06,
"loss": 1.2223,
"step": 136
},
{
"epoch": 0.16707317073170733,
"grad_norm": 0.1474609375,
"learning_rate": 8.329268292682927e-06,
"loss": 1.1877,
"step": 137
},
{
"epoch": 0.16829268292682928,
"grad_norm": 0.1533203125,
"learning_rate": 8.317073170731708e-06,
"loss": 1.2069,
"step": 138
},
{
"epoch": 0.16951219512195123,
"grad_norm": 0.1513671875,
"learning_rate": 8.304878048780487e-06,
"loss": 1.1599,
"step": 139
},
{
"epoch": 0.17073170731707318,
"grad_norm": 0.15234375,
"learning_rate": 8.292682926829268e-06,
"loss": 1.2284,
"step": 140
},
{
"epoch": 0.1719512195121951,
"grad_norm": 0.1865234375,
"learning_rate": 8.28048780487805e-06,
"loss": 1.172,
"step": 141
},
{
"epoch": 0.17317073170731706,
"grad_norm": 0.1513671875,
"learning_rate": 8.268292682926829e-06,
"loss": 1.1259,
"step": 142
},
{
"epoch": 0.174390243902439,
"grad_norm": 0.1884765625,
"learning_rate": 8.25609756097561e-06,
"loss": 1.2122,
"step": 143
},
{
"epoch": 0.17560975609756097,
"grad_norm": 0.1591796875,
"learning_rate": 8.243902439024391e-06,
"loss": 1.1928,
"step": 144
},
{
"epoch": 0.17682926829268292,
"grad_norm": 0.1455078125,
"learning_rate": 8.23170731707317e-06,
"loss": 1.1803,
"step": 145
},
{
"epoch": 0.17804878048780487,
"grad_norm": 0.1533203125,
"learning_rate": 8.219512195121952e-06,
"loss": 1.2284,
"step": 146
},
{
"epoch": 0.17926829268292682,
"grad_norm": 0.16015625,
"learning_rate": 8.207317073170731e-06,
"loss": 1.2206,
"step": 147
},
{
"epoch": 0.18048780487804877,
"grad_norm": 0.1669921875,
"learning_rate": 8.195121951219512e-06,
"loss": 1.1974,
"step": 148
},
{
"epoch": 0.18170731707317073,
"grad_norm": 0.216796875,
"learning_rate": 8.182926829268293e-06,
"loss": 1.1073,
"step": 149
},
{
"epoch": 0.18292682926829268,
"grad_norm": 0.158203125,
"learning_rate": 8.170731707317073e-06,
"loss": 1.2636,
"step": 150
},
{
"epoch": 0.18414634146341463,
"grad_norm": 0.1611328125,
"learning_rate": 8.158536585365854e-06,
"loss": 1.163,
"step": 151
},
{
"epoch": 0.18536585365853658,
"grad_norm": 0.158203125,
"learning_rate": 8.146341463414635e-06,
"loss": 1.0688,
"step": 152
},
{
"epoch": 0.18658536585365854,
"grad_norm": 0.15234375,
"learning_rate": 8.134146341463414e-06,
"loss": 1.1666,
"step": 153
},
{
"epoch": 0.1878048780487805,
"grad_norm": 0.16796875,
"learning_rate": 8.121951219512197e-06,
"loss": 1.2327,
"step": 154
},
{
"epoch": 0.18902439024390244,
"grad_norm": 0.1650390625,
"learning_rate": 8.109756097560977e-06,
"loss": 1.219,
"step": 155
},
{
"epoch": 0.1902439024390244,
"grad_norm": 0.154296875,
"learning_rate": 8.097560975609758e-06,
"loss": 1.1597,
"step": 156
},
{
"epoch": 0.19146341463414634,
"grad_norm": 0.1611328125,
"learning_rate": 8.085365853658537e-06,
"loss": 1.1774,
"step": 157
},
{
"epoch": 0.1926829268292683,
"grad_norm": 0.173828125,
"learning_rate": 8.073170731707318e-06,
"loss": 1.2422,
"step": 158
},
{
"epoch": 0.19390243902439025,
"grad_norm": 0.158203125,
"learning_rate": 8.0609756097561e-06,
"loss": 1.2281,
"step": 159
},
{
"epoch": 0.1951219512195122,
"grad_norm": 0.1552734375,
"learning_rate": 8.048780487804879e-06,
"loss": 1.1927,
"step": 160
},
{
"epoch": 0.19634146341463415,
"grad_norm": 0.15625,
"learning_rate": 8.03658536585366e-06,
"loss": 1.2419,
"step": 161
},
{
"epoch": 0.1975609756097561,
"grad_norm": 0.1748046875,
"learning_rate": 8.02439024390244e-06,
"loss": 1.1929,
"step": 162
},
{
"epoch": 0.19878048780487806,
"grad_norm": 0.1533203125,
"learning_rate": 8.01219512195122e-06,
"loss": 1.2225,
"step": 163
},
{
"epoch": 0.2,
"grad_norm": 0.150390625,
"learning_rate": 8.000000000000001e-06,
"loss": 1.1928,
"step": 164
},
{
"epoch": 0.20121951219512196,
"grad_norm": 0.17578125,
"learning_rate": 7.98780487804878e-06,
"loss": 1.1786,
"step": 165
},
{
"epoch": 0.20243902439024392,
"grad_norm": 0.173828125,
"learning_rate": 7.975609756097562e-06,
"loss": 1.1522,
"step": 166
},
{
"epoch": 0.20365853658536584,
"grad_norm": 0.2236328125,
"learning_rate": 7.963414634146343e-06,
"loss": 1.2015,
"step": 167
},
{
"epoch": 0.2048780487804878,
"grad_norm": 0.166015625,
"learning_rate": 7.951219512195122e-06,
"loss": 1.1963,
"step": 168
},
{
"epoch": 0.20609756097560974,
"grad_norm": 0.1650390625,
"learning_rate": 7.939024390243903e-06,
"loss": 1.1305,
"step": 169
},
{
"epoch": 0.2073170731707317,
"grad_norm": 0.1484375,
"learning_rate": 7.926829268292685e-06,
"loss": 1.1466,
"step": 170
},
{
"epoch": 0.20853658536585365,
"grad_norm": 0.146484375,
"learning_rate": 7.914634146341464e-06,
"loss": 1.1028,
"step": 171
},
{
"epoch": 0.2097560975609756,
"grad_norm": 0.181640625,
"learning_rate": 7.902439024390245e-06,
"loss": 1.2034,
"step": 172
},
{
"epoch": 0.21097560975609755,
"grad_norm": 0.1650390625,
"learning_rate": 7.890243902439026e-06,
"loss": 1.2,
"step": 173
},
{
"epoch": 0.2121951219512195,
"grad_norm": 0.1630859375,
"learning_rate": 7.878048780487806e-06,
"loss": 1.1766,
"step": 174
},
{
"epoch": 0.21341463414634146,
"grad_norm": 0.154296875,
"learning_rate": 7.865853658536587e-06,
"loss": 1.1024,
"step": 175
},
{
"epoch": 0.2146341463414634,
"grad_norm": 0.1494140625,
"learning_rate": 7.853658536585366e-06,
"loss": 1.1219,
"step": 176
},
{
"epoch": 0.21585365853658536,
"grad_norm": 0.15625,
"learning_rate": 7.841463414634147e-06,
"loss": 1.2105,
"step": 177
},
{
"epoch": 0.21707317073170732,
"grad_norm": 0.1591796875,
"learning_rate": 7.829268292682928e-06,
"loss": 1.1326,
"step": 178
},
{
"epoch": 0.21829268292682927,
"grad_norm": 0.16015625,
"learning_rate": 7.817073170731708e-06,
"loss": 1.1262,
"step": 179
},
{
"epoch": 0.21951219512195122,
"grad_norm": 0.1513671875,
"learning_rate": 7.804878048780489e-06,
"loss": 1.1273,
"step": 180
},
{
"epoch": 0.22073170731707317,
"grad_norm": 0.166015625,
"learning_rate": 7.79268292682927e-06,
"loss": 1.2313,
"step": 181
},
{
"epoch": 0.22195121951219512,
"grad_norm": 0.154296875,
"learning_rate": 7.78048780487805e-06,
"loss": 1.174,
"step": 182
},
{
"epoch": 0.22317073170731708,
"grad_norm": 0.15625,
"learning_rate": 7.76829268292683e-06,
"loss": 1.2163,
"step": 183
},
{
"epoch": 0.22439024390243903,
"grad_norm": 0.1552734375,
"learning_rate": 7.75609756097561e-06,
"loss": 1.2275,
"step": 184
},
{
"epoch": 0.22560975609756098,
"grad_norm": 0.2216796875,
"learning_rate": 7.743902439024391e-06,
"loss": 1.1812,
"step": 185
},
{
"epoch": 0.22682926829268293,
"grad_norm": 0.1796875,
"learning_rate": 7.731707317073172e-06,
"loss": 1.1234,
"step": 186
},
{
"epoch": 0.2280487804878049,
"grad_norm": 0.162109375,
"learning_rate": 7.719512195121951e-06,
"loss": 1.1762,
"step": 187
},
{
"epoch": 0.22926829268292684,
"grad_norm": 0.17578125,
"learning_rate": 7.707317073170732e-06,
"loss": 1.0488,
"step": 188
},
{
"epoch": 0.2304878048780488,
"grad_norm": 0.16015625,
"learning_rate": 7.695121951219514e-06,
"loss": 1.1321,
"step": 189
},
{
"epoch": 0.23170731707317074,
"grad_norm": 0.185546875,
"learning_rate": 7.682926829268293e-06,
"loss": 1.1436,
"step": 190
},
{
"epoch": 0.2329268292682927,
"grad_norm": 0.158203125,
"learning_rate": 7.670731707317074e-06,
"loss": 1.1194,
"step": 191
},
{
"epoch": 0.23414634146341465,
"grad_norm": 0.1787109375,
"learning_rate": 7.658536585365855e-06,
"loss": 1.2036,
"step": 192
},
{
"epoch": 0.23536585365853657,
"grad_norm": 0.1640625,
"learning_rate": 7.646341463414635e-06,
"loss": 1.107,
"step": 193
},
{
"epoch": 0.23658536585365852,
"grad_norm": 0.171875,
"learning_rate": 7.634146341463416e-06,
"loss": 1.1002,
"step": 194
},
{
"epoch": 0.23780487804878048,
"grad_norm": 0.1767578125,
"learning_rate": 7.621951219512196e-06,
"loss": 1.1532,
"step": 195
},
{
"epoch": 0.23902439024390243,
"grad_norm": 0.275390625,
"learning_rate": 7.609756097560976e-06,
"loss": 1.1992,
"step": 196
},
{
"epoch": 0.24024390243902438,
"grad_norm": 0.169921875,
"learning_rate": 7.5975609756097565e-06,
"loss": 1.139,
"step": 197
},
{
"epoch": 0.24146341463414633,
"grad_norm": 0.154296875,
"learning_rate": 7.5853658536585376e-06,
"loss": 1.1214,
"step": 198
},
{
"epoch": 0.2426829268292683,
"grad_norm": 0.171875,
"learning_rate": 7.573170731707318e-06,
"loss": 1.1419,
"step": 199
},
{
"epoch": 0.24390243902439024,
"grad_norm": 0.203125,
"learning_rate": 7.560975609756098e-06,
"loss": 1.1848,
"step": 200
},
{
"epoch": 0.2451219512195122,
"grad_norm": 0.166015625,
"learning_rate": 7.548780487804878e-06,
"loss": 1.1376,
"step": 201
},
{
"epoch": 0.24634146341463414,
"grad_norm": 0.298828125,
"learning_rate": 7.5365853658536594e-06,
"loss": 1.1308,
"step": 202
},
{
"epoch": 0.2475609756097561,
"grad_norm": 0.16796875,
"learning_rate": 7.52439024390244e-06,
"loss": 1.1523,
"step": 203
},
{
"epoch": 0.24878048780487805,
"grad_norm": 0.2001953125,
"learning_rate": 7.51219512195122e-06,
"loss": 1.1728,
"step": 204
},
{
"epoch": 0.25,
"grad_norm": 0.1708984375,
"learning_rate": 7.500000000000001e-06,
"loss": 1.1415,
"step": 205
},
{
"epoch": 0.25121951219512195,
"grad_norm": 0.173828125,
"learning_rate": 7.487804878048781e-06,
"loss": 1.2315,
"step": 206
},
{
"epoch": 0.2524390243902439,
"grad_norm": 0.1796875,
"learning_rate": 7.4756097560975615e-06,
"loss": 1.1279,
"step": 207
},
{
"epoch": 0.25365853658536586,
"grad_norm": 0.208984375,
"learning_rate": 7.463414634146342e-06,
"loss": 1.2102,
"step": 208
},
{
"epoch": 0.2548780487804878,
"grad_norm": 0.1669921875,
"learning_rate": 7.451219512195123e-06,
"loss": 1.1739,
"step": 209
},
{
"epoch": 0.25609756097560976,
"grad_norm": 0.240234375,
"learning_rate": 7.439024390243903e-06,
"loss": 1.1821,
"step": 210
},
{
"epoch": 0.2573170731707317,
"grad_norm": 0.1552734375,
"learning_rate": 7.426829268292683e-06,
"loss": 1.1521,
"step": 211
},
{
"epoch": 0.25853658536585367,
"grad_norm": 0.171875,
"learning_rate": 7.414634146341464e-06,
"loss": 1.2337,
"step": 212
},
{
"epoch": 0.2597560975609756,
"grad_norm": 0.1884765625,
"learning_rate": 7.402439024390245e-06,
"loss": 1.1151,
"step": 213
},
{
"epoch": 0.26097560975609757,
"grad_norm": 0.1806640625,
"learning_rate": 7.390243902439025e-06,
"loss": 1.0873,
"step": 214
},
{
"epoch": 0.2621951219512195,
"grad_norm": 0.185546875,
"learning_rate": 7.378048780487805e-06,
"loss": 1.15,
"step": 215
},
{
"epoch": 0.2634146341463415,
"grad_norm": 0.2041015625,
"learning_rate": 7.3658536585365855e-06,
"loss": 1.2472,
"step": 216
},
{
"epoch": 0.2646341463414634,
"grad_norm": 0.2470703125,
"learning_rate": 7.353658536585367e-06,
"loss": 1.1215,
"step": 217
},
{
"epoch": 0.2658536585365854,
"grad_norm": 0.1650390625,
"learning_rate": 7.341463414634147e-06,
"loss": 1.1456,
"step": 218
},
{
"epoch": 0.26707317073170733,
"grad_norm": 0.162109375,
"learning_rate": 7.329268292682927e-06,
"loss": 1.1391,
"step": 219
},
{
"epoch": 0.2682926829268293,
"grad_norm": 0.16015625,
"learning_rate": 7.317073170731707e-06,
"loss": 1.1412,
"step": 220
},
{
"epoch": 0.26951219512195124,
"grad_norm": 0.1630859375,
"learning_rate": 7.3048780487804885e-06,
"loss": 1.1411,
"step": 221
},
{
"epoch": 0.2707317073170732,
"grad_norm": 0.166015625,
"learning_rate": 7.292682926829269e-06,
"loss": 1.1139,
"step": 222
},
{
"epoch": 0.27195121951219514,
"grad_norm": 0.1650390625,
"learning_rate": 7.280487804878049e-06,
"loss": 1.0977,
"step": 223
},
{
"epoch": 0.2731707317073171,
"grad_norm": 0.15625,
"learning_rate": 7.268292682926829e-06,
"loss": 1.0902,
"step": 224
},
{
"epoch": 0.27439024390243905,
"grad_norm": 0.16015625,
"learning_rate": 7.25609756097561e-06,
"loss": 1.1449,
"step": 225
},
{
"epoch": 0.275609756097561,
"grad_norm": 0.1611328125,
"learning_rate": 7.243902439024391e-06,
"loss": 1.0953,
"step": 226
},
{
"epoch": 0.27682926829268295,
"grad_norm": 0.1591796875,
"learning_rate": 7.231707317073171e-06,
"loss": 1.1486,
"step": 227
},
{
"epoch": 0.2780487804878049,
"grad_norm": 0.1787109375,
"learning_rate": 7.219512195121952e-06,
"loss": 1.1352,
"step": 228
},
{
"epoch": 0.27926829268292686,
"grad_norm": 0.1708984375,
"learning_rate": 7.207317073170732e-06,
"loss": 1.1072,
"step": 229
},
{
"epoch": 0.2804878048780488,
"grad_norm": 0.1826171875,
"learning_rate": 7.1951219512195125e-06,
"loss": 1.121,
"step": 230
},
{
"epoch": 0.2817073170731707,
"grad_norm": 0.1708984375,
"learning_rate": 7.182926829268293e-06,
"loss": 1.149,
"step": 231
},
{
"epoch": 0.28292682926829266,
"grad_norm": 0.1767578125,
"learning_rate": 7.170731707317074e-06,
"loss": 1.1803,
"step": 232
},
{
"epoch": 0.2841463414634146,
"grad_norm": 0.1630859375,
"learning_rate": 7.158536585365854e-06,
"loss": 1.1377,
"step": 233
},
{
"epoch": 0.28536585365853656,
"grad_norm": 0.177734375,
"learning_rate": 7.146341463414634e-06,
"loss": 1.1672,
"step": 234
},
{
"epoch": 0.2865853658536585,
"grad_norm": 0.1787109375,
"learning_rate": 7.1341463414634146e-06,
"loss": 1.1429,
"step": 235
},
{
"epoch": 0.28780487804878047,
"grad_norm": 0.171875,
"learning_rate": 7.121951219512196e-06,
"loss": 1.0978,
"step": 236
},
{
"epoch": 0.2890243902439024,
"grad_norm": 0.2060546875,
"learning_rate": 7.109756097560976e-06,
"loss": 1.1666,
"step": 237
},
{
"epoch": 0.29024390243902437,
"grad_norm": 0.1708984375,
"learning_rate": 7.097560975609756e-06,
"loss": 1.0938,
"step": 238
},
{
"epoch": 0.2914634146341463,
"grad_norm": 0.166015625,
"learning_rate": 7.0853658536585364e-06,
"loss": 1.1061,
"step": 239
},
{
"epoch": 0.2926829268292683,
"grad_norm": 0.1591796875,
"learning_rate": 7.0731707317073175e-06,
"loss": 1.1368,
"step": 240
},
{
"epoch": 0.2939024390243902,
"grad_norm": 0.1806640625,
"learning_rate": 7.060975609756098e-06,
"loss": 1.219,
"step": 241
},
{
"epoch": 0.2951219512195122,
"grad_norm": 0.1669921875,
"learning_rate": 7.048780487804878e-06,
"loss": 1.0905,
"step": 242
},
{
"epoch": 0.29634146341463413,
"grad_norm": 0.1669921875,
"learning_rate": 7.036585365853658e-06,
"loss": 1.1605,
"step": 243
},
{
"epoch": 0.2975609756097561,
"grad_norm": 0.1728515625,
"learning_rate": 7.024390243902439e-06,
"loss": 1.1597,
"step": 244
},
{
"epoch": 0.29878048780487804,
"grad_norm": 0.208984375,
"learning_rate": 7.01219512195122e-06,
"loss": 1.0747,
"step": 245
},
{
"epoch": 0.3,
"grad_norm": 0.166015625,
"learning_rate": 7e-06,
"loss": 1.1406,
"step": 246
},
{
"epoch": 0.30121951219512194,
"grad_norm": 0.1708984375,
"learning_rate": 6.987804878048781e-06,
"loss": 1.1297,
"step": 247
},
{
"epoch": 0.3024390243902439,
"grad_norm": 0.1708984375,
"learning_rate": 6.975609756097561e-06,
"loss": 1.1169,
"step": 248
},
{
"epoch": 0.30365853658536585,
"grad_norm": 0.20703125,
"learning_rate": 6.9634146341463415e-06,
"loss": 1.1603,
"step": 249
},
{
"epoch": 0.3048780487804878,
"grad_norm": 0.1708984375,
"learning_rate": 6.951219512195122e-06,
"loss": 1.1133,
"step": 250
},
{
"epoch": 0.30609756097560975,
"grad_norm": 0.171875,
"learning_rate": 6.939024390243903e-06,
"loss": 1.1128,
"step": 251
},
{
"epoch": 0.3073170731707317,
"grad_norm": 0.1904296875,
"learning_rate": 6.926829268292683e-06,
"loss": 1.1305,
"step": 252
},
{
"epoch": 0.30853658536585366,
"grad_norm": 0.1669921875,
"learning_rate": 6.914634146341463e-06,
"loss": 1.1317,
"step": 253
},
{
"epoch": 0.3097560975609756,
"grad_norm": 0.1748046875,
"learning_rate": 6.902439024390244e-06,
"loss": 1.1611,
"step": 254
},
{
"epoch": 0.31097560975609756,
"grad_norm": 0.1748046875,
"learning_rate": 6.890243902439025e-06,
"loss": 1.1758,
"step": 255
},
{
"epoch": 0.3121951219512195,
"grad_norm": 0.185546875,
"learning_rate": 6.878048780487805e-06,
"loss": 1.1388,
"step": 256
},
{
"epoch": 0.31341463414634146,
"grad_norm": 0.1728515625,
"learning_rate": 6.865853658536586e-06,
"loss": 1.1002,
"step": 257
},
{
"epoch": 0.3146341463414634,
"grad_norm": 0.6484375,
"learning_rate": 6.853658536585367e-06,
"loss": 1.1417,
"step": 258
},
{
"epoch": 0.31585365853658537,
"grad_norm": 0.1708984375,
"learning_rate": 6.8414634146341474e-06,
"loss": 1.1438,
"step": 259
},
{
"epoch": 0.3170731707317073,
"grad_norm": 0.177734375,
"learning_rate": 6.829268292682928e-06,
"loss": 1.1272,
"step": 260
},
{
"epoch": 0.3182926829268293,
"grad_norm": 0.181640625,
"learning_rate": 6.817073170731709e-06,
"loss": 1.124,
"step": 261
},
{
"epoch": 0.3195121951219512,
"grad_norm": 0.259765625,
"learning_rate": 6.804878048780489e-06,
"loss": 1.1538,
"step": 262
},
{
"epoch": 0.3207317073170732,
"grad_norm": 0.169921875,
"learning_rate": 6.792682926829269e-06,
"loss": 1.0675,
"step": 263
},
{
"epoch": 0.32195121951219513,
"grad_norm": 0.1953125,
"learning_rate": 6.7804878048780495e-06,
"loss": 1.1493,
"step": 264
},
{
"epoch": 0.3231707317073171,
"grad_norm": 0.18359375,
"learning_rate": 6.768292682926831e-06,
"loss": 1.1708,
"step": 265
},
{
"epoch": 0.32439024390243903,
"grad_norm": 0.1875,
"learning_rate": 6.756097560975611e-06,
"loss": 1.1503,
"step": 266
},
{
"epoch": 0.325609756097561,
"grad_norm": 0.171875,
"learning_rate": 6.743902439024391e-06,
"loss": 1.0744,
"step": 267
},
{
"epoch": 0.32682926829268294,
"grad_norm": 0.267578125,
"learning_rate": 6.731707317073171e-06,
"loss": 1.1988,
"step": 268
},
{
"epoch": 0.3280487804878049,
"grad_norm": 0.1787109375,
"learning_rate": 6.7195121951219525e-06,
"loss": 1.1247,
"step": 269
},
{
"epoch": 0.32926829268292684,
"grad_norm": 0.185546875,
"learning_rate": 6.707317073170733e-06,
"loss": 1.1906,
"step": 270
},
{
"epoch": 0.3304878048780488,
"grad_norm": 0.1787109375,
"learning_rate": 6.695121951219513e-06,
"loss": 1.1284,
"step": 271
},
{
"epoch": 0.33170731707317075,
"grad_norm": 0.1943359375,
"learning_rate": 6.682926829268293e-06,
"loss": 1.1857,
"step": 272
},
{
"epoch": 0.3329268292682927,
"grad_norm": 0.1787109375,
"learning_rate": 6.670731707317074e-06,
"loss": 1.0967,
"step": 273
},
{
"epoch": 0.33414634146341465,
"grad_norm": 0.1943359375,
"learning_rate": 6.658536585365855e-06,
"loss": 1.1155,
"step": 274
},
{
"epoch": 0.3353658536585366,
"grad_norm": 0.17578125,
"learning_rate": 6.646341463414635e-06,
"loss": 1.1599,
"step": 275
},
{
"epoch": 0.33658536585365856,
"grad_norm": 0.1708984375,
"learning_rate": 6.634146341463415e-06,
"loss": 1.1203,
"step": 276
},
{
"epoch": 0.3378048780487805,
"grad_norm": 0.17578125,
"learning_rate": 6.621951219512196e-06,
"loss": 1.0625,
"step": 277
},
{
"epoch": 0.33902439024390246,
"grad_norm": 0.185546875,
"learning_rate": 6.6097560975609765e-06,
"loss": 1.1431,
"step": 278
},
{
"epoch": 0.3402439024390244,
"grad_norm": 0.181640625,
"learning_rate": 6.597560975609757e-06,
"loss": 1.1306,
"step": 279
},
{
"epoch": 0.34146341463414637,
"grad_norm": 0.1904296875,
"learning_rate": 6.585365853658538e-06,
"loss": 1.1914,
"step": 280
},
{
"epoch": 0.3426829268292683,
"grad_norm": 0.185546875,
"learning_rate": 6.573170731707318e-06,
"loss": 1.115,
"step": 281
},
{
"epoch": 0.3439024390243902,
"grad_norm": 0.1748046875,
"learning_rate": 6.560975609756098e-06,
"loss": 1.1587,
"step": 282
},
{
"epoch": 0.34512195121951217,
"grad_norm": 0.1708984375,
"learning_rate": 6.548780487804879e-06,
"loss": 1.105,
"step": 283
},
{
"epoch": 0.3463414634146341,
"grad_norm": 0.22265625,
"learning_rate": 6.53658536585366e-06,
"loss": 1.1598,
"step": 284
},
{
"epoch": 0.3475609756097561,
"grad_norm": 0.19140625,
"learning_rate": 6.52439024390244e-06,
"loss": 1.111,
"step": 285
},
{
"epoch": 0.348780487804878,
"grad_norm": 0.185546875,
"learning_rate": 6.51219512195122e-06,
"loss": 1.1084,
"step": 286
},
{
"epoch": 0.35,
"grad_norm": 0.1728515625,
"learning_rate": 6.5000000000000004e-06,
"loss": 1.1751,
"step": 287
},
{
"epoch": 0.35121951219512193,
"grad_norm": 0.2109375,
"learning_rate": 6.4878048780487815e-06,
"loss": 1.138,
"step": 288
},
{
"epoch": 0.3524390243902439,
"grad_norm": 0.189453125,
"learning_rate": 6.475609756097562e-06,
"loss": 1.1036,
"step": 289
},
{
"epoch": 0.35365853658536583,
"grad_norm": 0.2265625,
"learning_rate": 6.463414634146342e-06,
"loss": 1.1254,
"step": 290
},
{
"epoch": 0.3548780487804878,
"grad_norm": 0.1865234375,
"learning_rate": 6.451219512195122e-06,
"loss": 1.072,
"step": 291
},
{
"epoch": 0.35609756097560974,
"grad_norm": 0.1904296875,
"learning_rate": 6.439024390243903e-06,
"loss": 1.1864,
"step": 292
},
{
"epoch": 0.3573170731707317,
"grad_norm": 0.1845703125,
"learning_rate": 6.426829268292684e-06,
"loss": 1.0325,
"step": 293
},
{
"epoch": 0.35853658536585364,
"grad_norm": 0.177734375,
"learning_rate": 6.414634146341464e-06,
"loss": 1.1022,
"step": 294
},
{
"epoch": 0.3597560975609756,
"grad_norm": 0.177734375,
"learning_rate": 6.402439024390244e-06,
"loss": 1.1639,
"step": 295
},
{
"epoch": 0.36097560975609755,
"grad_norm": 0.1826171875,
"learning_rate": 6.390243902439025e-06,
"loss": 1.0428,
"step": 296
},
{
"epoch": 0.3621951219512195,
"grad_norm": 0.177734375,
"learning_rate": 6.3780487804878055e-06,
"loss": 1.1188,
"step": 297
},
{
"epoch": 0.36341463414634145,
"grad_norm": 0.1875,
"learning_rate": 6.365853658536586e-06,
"loss": 1.0602,
"step": 298
},
{
"epoch": 0.3646341463414634,
"grad_norm": 0.173828125,
"learning_rate": 6.353658536585367e-06,
"loss": 1.1376,
"step": 299
},
{
"epoch": 0.36585365853658536,
"grad_norm": 0.171875,
"learning_rate": 6.341463414634147e-06,
"loss": 1.1395,
"step": 300
},
{
"epoch": 0.3670731707317073,
"grad_norm": 0.1748046875,
"learning_rate": 6.329268292682927e-06,
"loss": 1.1327,
"step": 301
},
{
"epoch": 0.36829268292682926,
"grad_norm": 0.185546875,
"learning_rate": 6.317073170731708e-06,
"loss": 1.1258,
"step": 302
},
{
"epoch": 0.3695121951219512,
"grad_norm": 0.1962890625,
"learning_rate": 6.304878048780489e-06,
"loss": 1.0819,
"step": 303
},
{
"epoch": 0.37073170731707317,
"grad_norm": 0.2109375,
"learning_rate": 6.292682926829269e-06,
"loss": 1.1154,
"step": 304
},
{
"epoch": 0.3719512195121951,
"grad_norm": 0.17578125,
"learning_rate": 6.280487804878049e-06,
"loss": 1.0887,
"step": 305
},
{
"epoch": 0.37317073170731707,
"grad_norm": 0.1943359375,
"learning_rate": 6.2682926829268295e-06,
"loss": 1.1633,
"step": 306
},
{
"epoch": 0.374390243902439,
"grad_norm": 0.1962890625,
"learning_rate": 6.256097560975611e-06,
"loss": 1.1101,
"step": 307
},
{
"epoch": 0.375609756097561,
"grad_norm": 0.19921875,
"learning_rate": 6.243902439024391e-06,
"loss": 1.1236,
"step": 308
},
{
"epoch": 0.37682926829268293,
"grad_norm": 0.1796875,
"learning_rate": 6.231707317073171e-06,
"loss": 1.1329,
"step": 309
},
{
"epoch": 0.3780487804878049,
"grad_norm": 0.1826171875,
"learning_rate": 6.219512195121951e-06,
"loss": 1.1476,
"step": 310
},
{
"epoch": 0.37926829268292683,
"grad_norm": 0.181640625,
"learning_rate": 6.2073170731707325e-06,
"loss": 1.0952,
"step": 311
},
{
"epoch": 0.3804878048780488,
"grad_norm": 0.2158203125,
"learning_rate": 6.195121951219513e-06,
"loss": 1.0978,
"step": 312
},
{
"epoch": 0.38170731707317074,
"grad_norm": 0.193359375,
"learning_rate": 6.182926829268293e-06,
"loss": 1.0685,
"step": 313
},
{
"epoch": 0.3829268292682927,
"grad_norm": 0.18359375,
"learning_rate": 6.170731707317073e-06,
"loss": 1.1741,
"step": 314
},
{
"epoch": 0.38414634146341464,
"grad_norm": 0.1875,
"learning_rate": 6.158536585365854e-06,
"loss": 1.1777,
"step": 315
},
{
"epoch": 0.3853658536585366,
"grad_norm": 0.173828125,
"learning_rate": 6.1463414634146346e-06,
"loss": 1.1392,
"step": 316
},
{
"epoch": 0.38658536585365855,
"grad_norm": 0.1826171875,
"learning_rate": 6.134146341463415e-06,
"loss": 1.0785,
"step": 317
},
{
"epoch": 0.3878048780487805,
"grad_norm": 0.177734375,
"learning_rate": 6.121951219512196e-06,
"loss": 1.1306,
"step": 318
},
{
"epoch": 0.38902439024390245,
"grad_norm": 0.181640625,
"learning_rate": 6.109756097560976e-06,
"loss": 1.0437,
"step": 319
},
{
"epoch": 0.3902439024390244,
"grad_norm": 0.1796875,
"learning_rate": 6.0975609756097564e-06,
"loss": 1.167,
"step": 320
},
{
"epoch": 0.39146341463414636,
"grad_norm": 0.17578125,
"learning_rate": 6.085365853658537e-06,
"loss": 1.135,
"step": 321
},
{
"epoch": 0.3926829268292683,
"grad_norm": 0.19140625,
"learning_rate": 6.073170731707318e-06,
"loss": 1.119,
"step": 322
},
{
"epoch": 0.39390243902439026,
"grad_norm": 0.1669921875,
"learning_rate": 6.060975609756098e-06,
"loss": 1.1277,
"step": 323
},
{
"epoch": 0.3951219512195122,
"grad_norm": 0.181640625,
"learning_rate": 6.048780487804878e-06,
"loss": 1.1658,
"step": 324
},
{
"epoch": 0.39634146341463417,
"grad_norm": 0.17578125,
"learning_rate": 6.0365853658536585e-06,
"loss": 1.1558,
"step": 325
},
{
"epoch": 0.3975609756097561,
"grad_norm": 0.185546875,
"learning_rate": 6.02439024390244e-06,
"loss": 1.1281,
"step": 326
},
{
"epoch": 0.39878048780487807,
"grad_norm": 0.19140625,
"learning_rate": 6.01219512195122e-06,
"loss": 1.1541,
"step": 327
},
{
"epoch": 0.4,
"grad_norm": 0.2119140625,
"learning_rate": 6e-06,
"loss": 1.1222,
"step": 328
},
{
"epoch": 0.401219512195122,
"grad_norm": 0.177734375,
"learning_rate": 5.98780487804878e-06,
"loss": 1.0978,
"step": 329
},
{
"epoch": 0.4024390243902439,
"grad_norm": 0.1845703125,
"learning_rate": 5.9756097560975615e-06,
"loss": 1.1674,
"step": 330
},
{
"epoch": 0.4036585365853659,
"grad_norm": 0.19140625,
"learning_rate": 5.963414634146342e-06,
"loss": 1.0761,
"step": 331
},
{
"epoch": 0.40487804878048783,
"grad_norm": 0.2060546875,
"learning_rate": 5.951219512195122e-06,
"loss": 1.059,
"step": 332
},
{
"epoch": 0.4060975609756098,
"grad_norm": 0.185546875,
"learning_rate": 5.939024390243902e-06,
"loss": 1.1001,
"step": 333
},
{
"epoch": 0.4073170731707317,
"grad_norm": 0.17578125,
"learning_rate": 5.926829268292683e-06,
"loss": 1.0726,
"step": 334
},
{
"epoch": 0.40853658536585363,
"grad_norm": 0.185546875,
"learning_rate": 5.914634146341464e-06,
"loss": 1.1478,
"step": 335
},
{
"epoch": 0.4097560975609756,
"grad_norm": 0.1787109375,
"learning_rate": 5.902439024390244e-06,
"loss": 1.0873,
"step": 336
},
{
"epoch": 0.41097560975609754,
"grad_norm": 0.1767578125,
"learning_rate": 5.890243902439025e-06,
"loss": 1.0865,
"step": 337
},
{
"epoch": 0.4121951219512195,
"grad_norm": 0.1767578125,
"learning_rate": 5.878048780487805e-06,
"loss": 1.1033,
"step": 338
},
{
"epoch": 0.41341463414634144,
"grad_norm": 0.2021484375,
"learning_rate": 5.8658536585365855e-06,
"loss": 1.1067,
"step": 339
},
{
"epoch": 0.4146341463414634,
"grad_norm": 0.197265625,
"learning_rate": 5.853658536585366e-06,
"loss": 1.1376,
"step": 340
},
{
"epoch": 0.41585365853658535,
"grad_norm": 0.1796875,
"learning_rate": 5.841463414634147e-06,
"loss": 1.0712,
"step": 341
},
{
"epoch": 0.4170731707317073,
"grad_norm": 0.1962890625,
"learning_rate": 5.829268292682927e-06,
"loss": 1.0799,
"step": 342
},
{
"epoch": 0.41829268292682925,
"grad_norm": 0.189453125,
"learning_rate": 5.817073170731707e-06,
"loss": 1.1425,
"step": 343
},
{
"epoch": 0.4195121951219512,
"grad_norm": 0.189453125,
"learning_rate": 5.804878048780488e-06,
"loss": 1.1261,
"step": 344
},
{
"epoch": 0.42073170731707316,
"grad_norm": 0.197265625,
"learning_rate": 5.792682926829269e-06,
"loss": 1.1278,
"step": 345
},
{
"epoch": 0.4219512195121951,
"grad_norm": 0.1826171875,
"learning_rate": 5.780487804878049e-06,
"loss": 1.1108,
"step": 346
},
{
"epoch": 0.42317073170731706,
"grad_norm": 0.1796875,
"learning_rate": 5.768292682926829e-06,
"loss": 1.1388,
"step": 347
},
{
"epoch": 0.424390243902439,
"grad_norm": 0.1806640625,
"learning_rate": 5.7560975609756095e-06,
"loss": 1.096,
"step": 348
},
{
"epoch": 0.42560975609756097,
"grad_norm": 0.21484375,
"learning_rate": 5.7439024390243906e-06,
"loss": 1.0673,
"step": 349
},
{
"epoch": 0.4268292682926829,
"grad_norm": 0.26953125,
"learning_rate": 5.731707317073171e-06,
"loss": 1.1634,
"step": 350
},
{
"epoch": 0.42804878048780487,
"grad_norm": 0.181640625,
"learning_rate": 5.719512195121951e-06,
"loss": 1.1028,
"step": 351
},
{
"epoch": 0.4292682926829268,
"grad_norm": 0.1904296875,
"learning_rate": 5.707317073170731e-06,
"loss": 1.101,
"step": 352
},
{
"epoch": 0.4304878048780488,
"grad_norm": 0.18359375,
"learning_rate": 5.695121951219512e-06,
"loss": 1.1242,
"step": 353
},
{
"epoch": 0.4317073170731707,
"grad_norm": 0.1767578125,
"learning_rate": 5.682926829268293e-06,
"loss": 1.124,
"step": 354
},
{
"epoch": 0.4329268292682927,
"grad_norm": 0.2060546875,
"learning_rate": 5.670731707317073e-06,
"loss": 1.0535,
"step": 355
},
{
"epoch": 0.43414634146341463,
"grad_norm": 0.1845703125,
"learning_rate": 5.658536585365853e-06,
"loss": 1.1274,
"step": 356
},
{
"epoch": 0.4353658536585366,
"grad_norm": 0.1875,
"learning_rate": 5.646341463414634e-06,
"loss": 1.093,
"step": 357
},
{
"epoch": 0.43658536585365854,
"grad_norm": 0.18359375,
"learning_rate": 5.6341463414634145e-06,
"loss": 1.1121,
"step": 358
},
{
"epoch": 0.4378048780487805,
"grad_norm": 0.1923828125,
"learning_rate": 5.6219512195121965e-06,
"loss": 1.1554,
"step": 359
},
{
"epoch": 0.43902439024390244,
"grad_norm": 0.1865234375,
"learning_rate": 5.609756097560977e-06,
"loss": 1.1646,
"step": 360
},
{
"epoch": 0.4402439024390244,
"grad_norm": 0.1904296875,
"learning_rate": 5.597560975609757e-06,
"loss": 1.1567,
"step": 361
},
{
"epoch": 0.44146341463414634,
"grad_norm": 0.1982421875,
"learning_rate": 5.585365853658537e-06,
"loss": 1.1338,
"step": 362
},
{
"epoch": 0.4426829268292683,
"grad_norm": 0.1845703125,
"learning_rate": 5.573170731707318e-06,
"loss": 1.0403,
"step": 363
},
{
"epoch": 0.44390243902439025,
"grad_norm": 0.185546875,
"learning_rate": 5.560975609756099e-06,
"loss": 1.0957,
"step": 364
},
{
"epoch": 0.4451219512195122,
"grad_norm": 0.201171875,
"learning_rate": 5.548780487804879e-06,
"loss": 1.0949,
"step": 365
},
{
"epoch": 0.44634146341463415,
"grad_norm": 0.1904296875,
"learning_rate": 5.536585365853659e-06,
"loss": 1.1318,
"step": 366
},
{
"epoch": 0.4475609756097561,
"grad_norm": 0.18359375,
"learning_rate": 5.52439024390244e-06,
"loss": 1.1446,
"step": 367
},
{
"epoch": 0.44878048780487806,
"grad_norm": 0.1748046875,
"learning_rate": 5.5121951219512205e-06,
"loss": 1.1105,
"step": 368
},
{
"epoch": 0.45,
"grad_norm": 0.1787109375,
"learning_rate": 5.500000000000001e-06,
"loss": 1.0745,
"step": 369
},
{
"epoch": 0.45121951219512196,
"grad_norm": 0.1845703125,
"learning_rate": 5.487804878048781e-06,
"loss": 1.1463,
"step": 370
},
{
"epoch": 0.4524390243902439,
"grad_norm": 0.1728515625,
"learning_rate": 5.475609756097562e-06,
"loss": 1.1111,
"step": 371
},
{
"epoch": 0.45365853658536587,
"grad_norm": 0.1923828125,
"learning_rate": 5.463414634146342e-06,
"loss": 1.1183,
"step": 372
},
{
"epoch": 0.4548780487804878,
"grad_norm": 0.1826171875,
"learning_rate": 5.4512195121951226e-06,
"loss": 1.1308,
"step": 373
},
{
"epoch": 0.4560975609756098,
"grad_norm": 0.177734375,
"learning_rate": 5.439024390243904e-06,
"loss": 1.0861,
"step": 374
},
{
"epoch": 0.4573170731707317,
"grad_norm": 0.181640625,
"learning_rate": 5.426829268292684e-06,
"loss": 1.0827,
"step": 375
},
{
"epoch": 0.4585365853658537,
"grad_norm": 0.173828125,
"learning_rate": 5.414634146341464e-06,
"loss": 1.1014,
"step": 376
},
{
"epoch": 0.45975609756097563,
"grad_norm": 0.20703125,
"learning_rate": 5.4024390243902444e-06,
"loss": 1.1291,
"step": 377
},
{
"epoch": 0.4609756097560976,
"grad_norm": 0.185546875,
"learning_rate": 5.3902439024390255e-06,
"loss": 1.0899,
"step": 378
},
{
"epoch": 0.46219512195121953,
"grad_norm": 0.185546875,
"learning_rate": 5.378048780487806e-06,
"loss": 1.0797,
"step": 379
},
{
"epoch": 0.4634146341463415,
"grad_norm": 0.17578125,
"learning_rate": 5.365853658536586e-06,
"loss": 1.0599,
"step": 380
},
{
"epoch": 0.46463414634146344,
"grad_norm": 0.1982421875,
"learning_rate": 5.353658536585366e-06,
"loss": 1.1505,
"step": 381
},
{
"epoch": 0.4658536585365854,
"grad_norm": 0.283203125,
"learning_rate": 5.341463414634147e-06,
"loss": 1.0887,
"step": 382
},
{
"epoch": 0.46707317073170734,
"grad_norm": 0.1826171875,
"learning_rate": 5.329268292682928e-06,
"loss": 1.0973,
"step": 383
},
{
"epoch": 0.4682926829268293,
"grad_norm": 0.197265625,
"learning_rate": 5.317073170731708e-06,
"loss": 1.08,
"step": 384
},
{
"epoch": 0.4695121951219512,
"grad_norm": 0.1884765625,
"learning_rate": 5.304878048780488e-06,
"loss": 1.1254,
"step": 385
},
{
"epoch": 0.47073170731707314,
"grad_norm": 0.185546875,
"learning_rate": 5.292682926829269e-06,
"loss": 1.0845,
"step": 386
},
{
"epoch": 0.4719512195121951,
"grad_norm": 0.2236328125,
"learning_rate": 5.2804878048780495e-06,
"loss": 1.111,
"step": 387
},
{
"epoch": 0.47317073170731705,
"grad_norm": 0.1787109375,
"learning_rate": 5.26829268292683e-06,
"loss": 1.0589,
"step": 388
},
{
"epoch": 0.474390243902439,
"grad_norm": 0.1845703125,
"learning_rate": 5.25609756097561e-06,
"loss": 1.1279,
"step": 389
},
{
"epoch": 0.47560975609756095,
"grad_norm": 0.1826171875,
"learning_rate": 5.243902439024391e-06,
"loss": 1.0662,
"step": 390
},
{
"epoch": 0.4768292682926829,
"grad_norm": 0.1826171875,
"learning_rate": 5.231707317073171e-06,
"loss": 1.049,
"step": 391
},
{
"epoch": 0.47804878048780486,
"grad_norm": 0.1796875,
"learning_rate": 5.219512195121952e-06,
"loss": 1.0644,
"step": 392
},
{
"epoch": 0.4792682926829268,
"grad_norm": 0.1826171875,
"learning_rate": 5.207317073170733e-06,
"loss": 1.141,
"step": 393
},
{
"epoch": 0.48048780487804876,
"grad_norm": 0.1826171875,
"learning_rate": 5.195121951219513e-06,
"loss": 1.0814,
"step": 394
},
{
"epoch": 0.4817073170731707,
"grad_norm": 0.1875,
"learning_rate": 5.182926829268293e-06,
"loss": 1.0742,
"step": 395
},
{
"epoch": 0.48292682926829267,
"grad_norm": 0.19140625,
"learning_rate": 5.1707317073170735e-06,
"loss": 1.0399,
"step": 396
},
{
"epoch": 0.4841463414634146,
"grad_norm": 0.1962890625,
"learning_rate": 5.1585365853658546e-06,
"loss": 1.0634,
"step": 397
},
{
"epoch": 0.4853658536585366,
"grad_norm": 0.232421875,
"learning_rate": 5.146341463414635e-06,
"loss": 1.131,
"step": 398
},
{
"epoch": 0.4865853658536585,
"grad_norm": 0.2001953125,
"learning_rate": 5.134146341463415e-06,
"loss": 1.0793,
"step": 399
},
{
"epoch": 0.4878048780487805,
"grad_norm": 0.2578125,
"learning_rate": 5.121951219512195e-06,
"loss": 1.0662,
"step": 400
},
{
"epoch": 0.48902439024390243,
"grad_norm": 0.19921875,
"learning_rate": 5.1097560975609764e-06,
"loss": 1.178,
"step": 401
},
{
"epoch": 0.4902439024390244,
"grad_norm": 0.1943359375,
"learning_rate": 5.097560975609757e-06,
"loss": 1.0784,
"step": 402
},
{
"epoch": 0.49146341463414633,
"grad_norm": 0.205078125,
"learning_rate": 5.085365853658537e-06,
"loss": 1.1118,
"step": 403
},
{
"epoch": 0.4926829268292683,
"grad_norm": 0.1953125,
"learning_rate": 5.073170731707317e-06,
"loss": 1.1315,
"step": 404
},
{
"epoch": 0.49390243902439024,
"grad_norm": 0.193359375,
"learning_rate": 5.060975609756098e-06,
"loss": 1.0338,
"step": 405
},
{
"epoch": 0.4951219512195122,
"grad_norm": 0.2109375,
"learning_rate": 5.0487804878048785e-06,
"loss": 1.1451,
"step": 406
},
{
"epoch": 0.49634146341463414,
"grad_norm": 0.201171875,
"learning_rate": 5.036585365853659e-06,
"loss": 1.0775,
"step": 407
},
{
"epoch": 0.4975609756097561,
"grad_norm": 0.189453125,
"learning_rate": 5.024390243902439e-06,
"loss": 1.0834,
"step": 408
},
{
"epoch": 0.49878048780487805,
"grad_norm": 0.1884765625,
"learning_rate": 5.01219512195122e-06,
"loss": 1.0719,
"step": 409
},
{
"epoch": 0.5,
"grad_norm": 0.37109375,
"learning_rate": 5e-06,
"loss": 1.2225,
"step": 410
},
{
"epoch": 0.501219512195122,
"grad_norm": 0.181640625,
"learning_rate": 4.987804878048781e-06,
"loss": 1.0514,
"step": 411
},
{
"epoch": 0.5024390243902439,
"grad_norm": 0.1865234375,
"learning_rate": 4.975609756097562e-06,
"loss": 1.1518,
"step": 412
},
{
"epoch": 0.5036585365853659,
"grad_norm": 0.1962890625,
"learning_rate": 4.963414634146342e-06,
"loss": 1.1065,
"step": 413
},
{
"epoch": 0.5048780487804878,
"grad_norm": 0.1962890625,
"learning_rate": 4.951219512195122e-06,
"loss": 1.0973,
"step": 414
},
{
"epoch": 0.5060975609756098,
"grad_norm": 0.1875,
"learning_rate": 4.9390243902439025e-06,
"loss": 1.1072,
"step": 415
},
{
"epoch": 0.5073170731707317,
"grad_norm": 0.1787109375,
"learning_rate": 4.926829268292684e-06,
"loss": 1.0223,
"step": 416
},
{
"epoch": 0.5085365853658537,
"grad_norm": 0.2275390625,
"learning_rate": 4.914634146341464e-06,
"loss": 1.106,
"step": 417
},
{
"epoch": 0.5097560975609756,
"grad_norm": 0.1884765625,
"learning_rate": 4.902439024390244e-06,
"loss": 1.1571,
"step": 418
},
{
"epoch": 0.5109756097560976,
"grad_norm": 0.1962890625,
"learning_rate": 4.890243902439024e-06,
"loss": 1.1613,
"step": 419
},
{
"epoch": 0.5121951219512195,
"grad_norm": 0.181640625,
"learning_rate": 4.8780487804878055e-06,
"loss": 1.0864,
"step": 420
},
{
"epoch": 0.5134146341463415,
"grad_norm": 0.1865234375,
"learning_rate": 4.865853658536586e-06,
"loss": 1.0378,
"step": 421
},
{
"epoch": 0.5146341463414634,
"grad_norm": 0.1943359375,
"learning_rate": 4.853658536585366e-06,
"loss": 1.1185,
"step": 422
},
{
"epoch": 0.5158536585365854,
"grad_norm": 0.1923828125,
"learning_rate": 4.841463414634146e-06,
"loss": 1.0443,
"step": 423
},
{
"epoch": 0.5170731707317073,
"grad_norm": 0.1904296875,
"learning_rate": 4.829268292682927e-06,
"loss": 1.0556,
"step": 424
},
{
"epoch": 0.5182926829268293,
"grad_norm": 0.1875,
"learning_rate": 4.817073170731708e-06,
"loss": 1.1298,
"step": 425
},
{
"epoch": 0.5195121951219512,
"grad_norm": 0.1796875,
"learning_rate": 4.804878048780488e-06,
"loss": 1.065,
"step": 426
},
{
"epoch": 0.5207317073170732,
"grad_norm": 0.1826171875,
"learning_rate": 4.792682926829268e-06,
"loss": 1.1335,
"step": 427
},
{
"epoch": 0.5219512195121951,
"grad_norm": 0.1748046875,
"learning_rate": 4.780487804878049e-06,
"loss": 1.0745,
"step": 428
},
{
"epoch": 0.5231707317073171,
"grad_norm": 0.1923828125,
"learning_rate": 4.7682926829268295e-06,
"loss": 1.0771,
"step": 429
},
{
"epoch": 0.524390243902439,
"grad_norm": 0.1982421875,
"learning_rate": 4.75609756097561e-06,
"loss": 1.0682,
"step": 430
},
{
"epoch": 0.525609756097561,
"grad_norm": 0.318359375,
"learning_rate": 4.743902439024391e-06,
"loss": 1.1436,
"step": 431
},
{
"epoch": 0.526829268292683,
"grad_norm": 0.197265625,
"learning_rate": 4.731707317073171e-06,
"loss": 1.1088,
"step": 432
},
{
"epoch": 0.5280487804878049,
"grad_norm": 0.197265625,
"learning_rate": 4.719512195121951e-06,
"loss": 1.048,
"step": 433
},
{
"epoch": 0.5292682926829269,
"grad_norm": 0.17578125,
"learning_rate": 4.7073170731707316e-06,
"loss": 1.0735,
"step": 434
},
{
"epoch": 0.5304878048780488,
"grad_norm": 0.185546875,
"learning_rate": 4.695121951219513e-06,
"loss": 1.0368,
"step": 435
},
{
"epoch": 0.5317073170731708,
"grad_norm": 0.193359375,
"learning_rate": 4.682926829268293e-06,
"loss": 1.0993,
"step": 436
},
{
"epoch": 0.5329268292682927,
"grad_norm": 0.1875,
"learning_rate": 4.670731707317074e-06,
"loss": 1.0675,
"step": 437
},
{
"epoch": 0.5341463414634147,
"grad_norm": 0.2158203125,
"learning_rate": 4.658536585365854e-06,
"loss": 1.0814,
"step": 438
},
{
"epoch": 0.5353658536585366,
"grad_norm": 0.189453125,
"learning_rate": 4.6463414634146345e-06,
"loss": 1.1575,
"step": 439
},
{
"epoch": 0.5365853658536586,
"grad_norm": 0.1923828125,
"learning_rate": 4.634146341463416e-06,
"loss": 1.0666,
"step": 440
},
{
"epoch": 0.5378048780487805,
"grad_norm": 0.220703125,
"learning_rate": 4.621951219512196e-06,
"loss": 1.117,
"step": 441
},
{
"epoch": 0.5390243902439025,
"grad_norm": 0.212890625,
"learning_rate": 4.609756097560976e-06,
"loss": 1.1458,
"step": 442
},
{
"epoch": 0.5402439024390244,
"grad_norm": 0.1796875,
"learning_rate": 4.597560975609756e-06,
"loss": 1.1491,
"step": 443
},
{
"epoch": 0.5414634146341464,
"grad_norm": 0.1806640625,
"learning_rate": 4.5853658536585375e-06,
"loss": 1.1205,
"step": 444
},
{
"epoch": 0.5426829268292683,
"grad_norm": 0.193359375,
"learning_rate": 4.573170731707318e-06,
"loss": 1.0387,
"step": 445
},
{
"epoch": 0.5439024390243903,
"grad_norm": 0.1865234375,
"learning_rate": 4.560975609756098e-06,
"loss": 1.0962,
"step": 446
},
{
"epoch": 0.5451219512195122,
"grad_norm": 0.2158203125,
"learning_rate": 4.548780487804878e-06,
"loss": 1.1036,
"step": 447
},
{
"epoch": 0.5463414634146342,
"grad_norm": 0.185546875,
"learning_rate": 4.536585365853659e-06,
"loss": 1.0776,
"step": 448
},
{
"epoch": 0.5475609756097561,
"grad_norm": 0.19140625,
"learning_rate": 4.52439024390244e-06,
"loss": 1.1556,
"step": 449
},
{
"epoch": 0.5487804878048781,
"grad_norm": 0.185546875,
"learning_rate": 4.51219512195122e-06,
"loss": 1.0779,
"step": 450
},
{
"epoch": 0.55,
"grad_norm": 0.2177734375,
"learning_rate": 4.5e-06,
"loss": 1.0078,
"step": 451
},
{
"epoch": 0.551219512195122,
"grad_norm": 0.212890625,
"learning_rate": 4.487804878048781e-06,
"loss": 1.0247,
"step": 452
},
{
"epoch": 0.552439024390244,
"grad_norm": 0.1904296875,
"learning_rate": 4.4756097560975615e-06,
"loss": 1.1226,
"step": 453
},
{
"epoch": 0.5536585365853659,
"grad_norm": 0.19140625,
"learning_rate": 4.463414634146342e-06,
"loss": 1.0441,
"step": 454
},
{
"epoch": 0.5548780487804879,
"grad_norm": 0.283203125,
"learning_rate": 4.451219512195122e-06,
"loss": 1.0545,
"step": 455
},
{
"epoch": 0.5560975609756098,
"grad_norm": 0.20703125,
"learning_rate": 4.439024390243903e-06,
"loss": 1.0808,
"step": 456
},
{
"epoch": 0.5573170731707318,
"grad_norm": 0.197265625,
"learning_rate": 4.426829268292683e-06,
"loss": 1.0545,
"step": 457
},
{
"epoch": 0.5585365853658537,
"grad_norm": 0.2353515625,
"learning_rate": 4.414634146341464e-06,
"loss": 1.0619,
"step": 458
},
{
"epoch": 0.5597560975609757,
"grad_norm": 0.19140625,
"learning_rate": 4.402439024390245e-06,
"loss": 1.0775,
"step": 459
},
{
"epoch": 0.5609756097560976,
"grad_norm": 0.1953125,
"learning_rate": 4.390243902439025e-06,
"loss": 1.0985,
"step": 460
},
{
"epoch": 0.5621951219512196,
"grad_norm": 0.1845703125,
"learning_rate": 4.378048780487805e-06,
"loss": 1.0534,
"step": 461
},
{
"epoch": 0.5634146341463414,
"grad_norm": 0.181640625,
"learning_rate": 4.3658536585365854e-06,
"loss": 1.0525,
"step": 462
},
{
"epoch": 0.5646341463414634,
"grad_norm": 0.1806640625,
"learning_rate": 4.3536585365853665e-06,
"loss": 1.0208,
"step": 463
},
{
"epoch": 0.5658536585365853,
"grad_norm": 0.251953125,
"learning_rate": 4.341463414634147e-06,
"loss": 1.0627,
"step": 464
},
{
"epoch": 0.5670731707317073,
"grad_norm": 0.18359375,
"learning_rate": 4.329268292682927e-06,
"loss": 1.0544,
"step": 465
},
{
"epoch": 0.5682926829268292,
"grad_norm": 0.1953125,
"learning_rate": 4.317073170731707e-06,
"loss": 1.1068,
"step": 466
},
{
"epoch": 0.5695121951219512,
"grad_norm": 0.265625,
"learning_rate": 4.304878048780488e-06,
"loss": 1.0316,
"step": 467
},
{
"epoch": 0.5707317073170731,
"grad_norm": 0.1806640625,
"learning_rate": 4.292682926829269e-06,
"loss": 1.061,
"step": 468
},
{
"epoch": 0.5719512195121951,
"grad_norm": 0.19921875,
"learning_rate": 4.280487804878049e-06,
"loss": 1.117,
"step": 469
},
{
"epoch": 0.573170731707317,
"grad_norm": 0.1953125,
"learning_rate": 4.268292682926829e-06,
"loss": 1.137,
"step": 470
},
{
"epoch": 0.574390243902439,
"grad_norm": 0.2021484375,
"learning_rate": 4.25609756097561e-06,
"loss": 1.0557,
"step": 471
},
{
"epoch": 0.5756097560975609,
"grad_norm": 0.1962890625,
"learning_rate": 4.2439024390243905e-06,
"loss": 1.1299,
"step": 472
},
{
"epoch": 0.5768292682926829,
"grad_norm": 0.1904296875,
"learning_rate": 4.231707317073171e-06,
"loss": 1.0561,
"step": 473
},
{
"epoch": 0.5780487804878048,
"grad_norm": 0.1875,
"learning_rate": 4.219512195121951e-06,
"loss": 1.0669,
"step": 474
},
{
"epoch": 0.5792682926829268,
"grad_norm": 0.1845703125,
"learning_rate": 4.207317073170732e-06,
"loss": 1.0306,
"step": 475
},
{
"epoch": 0.5804878048780487,
"grad_norm": 0.1962890625,
"learning_rate": 4.195121951219512e-06,
"loss": 1.1473,
"step": 476
},
{
"epoch": 0.5817073170731707,
"grad_norm": 0.1953125,
"learning_rate": 4.182926829268293e-06,
"loss": 1.0403,
"step": 477
},
{
"epoch": 0.5829268292682926,
"grad_norm": 0.1962890625,
"learning_rate": 4.170731707317074e-06,
"loss": 1.0267,
"step": 478
},
{
"epoch": 0.5841463414634146,
"grad_norm": 0.19921875,
"learning_rate": 4.158536585365854e-06,
"loss": 1.1301,
"step": 479
},
{
"epoch": 0.5853658536585366,
"grad_norm": 0.2001953125,
"learning_rate": 4.146341463414634e-06,
"loss": 1.0481,
"step": 480
},
{
"epoch": 0.5865853658536585,
"grad_norm": 0.232421875,
"learning_rate": 4.1341463414634145e-06,
"loss": 1.1321,
"step": 481
},
{
"epoch": 0.5878048780487805,
"grad_norm": 0.189453125,
"learning_rate": 4.121951219512196e-06,
"loss": 1.0029,
"step": 482
},
{
"epoch": 0.5890243902439024,
"grad_norm": 0.1806640625,
"learning_rate": 4.109756097560976e-06,
"loss": 1.0561,
"step": 483
},
{
"epoch": 0.5902439024390244,
"grad_norm": 0.1953125,
"learning_rate": 4.097560975609756e-06,
"loss": 1.0826,
"step": 484
},
{
"epoch": 0.5914634146341463,
"grad_norm": 0.2216796875,
"learning_rate": 4.085365853658536e-06,
"loss": 1.0699,
"step": 485
},
{
"epoch": 0.5926829268292683,
"grad_norm": 0.1953125,
"learning_rate": 4.0731707317073175e-06,
"loss": 1.1371,
"step": 486
},
{
"epoch": 0.5939024390243902,
"grad_norm": 0.185546875,
"learning_rate": 4.0609756097560986e-06,
"loss": 1.0756,
"step": 487
},
{
"epoch": 0.5951219512195122,
"grad_norm": 0.201171875,
"learning_rate": 4.048780487804879e-06,
"loss": 1.0674,
"step": 488
},
{
"epoch": 0.5963414634146341,
"grad_norm": 0.201171875,
"learning_rate": 4.036585365853659e-06,
"loss": 1.0942,
"step": 489
},
{
"epoch": 0.5975609756097561,
"grad_norm": 0.2138671875,
"learning_rate": 4.024390243902439e-06,
"loss": 1.0999,
"step": 490
},
{
"epoch": 0.598780487804878,
"grad_norm": 0.1904296875,
"learning_rate": 4.01219512195122e-06,
"loss": 1.141,
"step": 491
},
{
"epoch": 0.6,
"grad_norm": 0.2197265625,
"learning_rate": 4.000000000000001e-06,
"loss": 1.1518,
"step": 492
},
{
"epoch": 0.6012195121951219,
"grad_norm": 0.333984375,
"learning_rate": 3.987804878048781e-06,
"loss": 1.0763,
"step": 493
},
{
"epoch": 0.6024390243902439,
"grad_norm": 0.2177734375,
"learning_rate": 3.975609756097561e-06,
"loss": 1.0236,
"step": 494
},
{
"epoch": 0.6036585365853658,
"grad_norm": 0.1923828125,
"learning_rate": 3.963414634146342e-06,
"loss": 1.0964,
"step": 495
},
{
"epoch": 0.6048780487804878,
"grad_norm": 0.1904296875,
"learning_rate": 3.9512195121951225e-06,
"loss": 1.0978,
"step": 496
},
{
"epoch": 0.6060975609756097,
"grad_norm": 0.19140625,
"learning_rate": 3.939024390243903e-06,
"loss": 1.0856,
"step": 497
},
{
"epoch": 0.6073170731707317,
"grad_norm": 0.193359375,
"learning_rate": 3.926829268292683e-06,
"loss": 1.1262,
"step": 498
},
{
"epoch": 0.6085365853658536,
"grad_norm": 0.1875,
"learning_rate": 3.914634146341464e-06,
"loss": 1.1766,
"step": 499
},
{
"epoch": 0.6097560975609756,
"grad_norm": 0.193359375,
"learning_rate": 3.902439024390244e-06,
"loss": 1.0285,
"step": 500
},
{
"epoch": 0.6109756097560975,
"grad_norm": 0.1904296875,
"learning_rate": 3.890243902439025e-06,
"loss": 1.0808,
"step": 501
},
{
"epoch": 0.6121951219512195,
"grad_norm": 0.1923828125,
"learning_rate": 3.878048780487805e-06,
"loss": 1.098,
"step": 502
},
{
"epoch": 0.6134146341463415,
"grad_norm": 0.201171875,
"learning_rate": 3.865853658536586e-06,
"loss": 1.1352,
"step": 503
},
{
"epoch": 0.6146341463414634,
"grad_norm": 0.21875,
"learning_rate": 3.853658536585366e-06,
"loss": 1.116,
"step": 504
},
{
"epoch": 0.6158536585365854,
"grad_norm": 0.1875,
"learning_rate": 3.8414634146341465e-06,
"loss": 1.0465,
"step": 505
},
{
"epoch": 0.6170731707317073,
"grad_norm": 0.197265625,
"learning_rate": 3.829268292682928e-06,
"loss": 1.0884,
"step": 506
},
{
"epoch": 0.6182926829268293,
"grad_norm": 0.21875,
"learning_rate": 3.817073170731708e-06,
"loss": 1.0589,
"step": 507
},
{
"epoch": 0.6195121951219512,
"grad_norm": 0.1904296875,
"learning_rate": 3.804878048780488e-06,
"loss": 1.038,
"step": 508
},
{
"epoch": 0.6207317073170732,
"grad_norm": 0.2060546875,
"learning_rate": 3.7926829268292688e-06,
"loss": 1.029,
"step": 509
},
{
"epoch": 0.6219512195121951,
"grad_norm": 0.197265625,
"learning_rate": 3.780487804878049e-06,
"loss": 1.1342,
"step": 510
},
{
"epoch": 0.6231707317073171,
"grad_norm": 0.185546875,
"learning_rate": 3.7682926829268297e-06,
"loss": 1.1217,
"step": 511
},
{
"epoch": 0.624390243902439,
"grad_norm": 0.1904296875,
"learning_rate": 3.75609756097561e-06,
"loss": 1.073,
"step": 512
},
{
"epoch": 0.625609756097561,
"grad_norm": 0.1953125,
"learning_rate": 3.7439024390243906e-06,
"loss": 1.0929,
"step": 513
},
{
"epoch": 0.6268292682926829,
"grad_norm": 0.21875,
"learning_rate": 3.731707317073171e-06,
"loss": 1.1077,
"step": 514
},
{
"epoch": 0.6280487804878049,
"grad_norm": 0.19140625,
"learning_rate": 3.7195121951219516e-06,
"loss": 1.0438,
"step": 515
},
{
"epoch": 0.6292682926829268,
"grad_norm": 0.1943359375,
"learning_rate": 3.707317073170732e-06,
"loss": 1.1266,
"step": 516
},
{
"epoch": 0.6304878048780488,
"grad_norm": 0.19140625,
"learning_rate": 3.6951219512195125e-06,
"loss": 1.0465,
"step": 517
},
{
"epoch": 0.6317073170731707,
"grad_norm": 0.181640625,
"learning_rate": 3.6829268292682928e-06,
"loss": 1.0808,
"step": 518
},
{
"epoch": 0.6329268292682927,
"grad_norm": 0.203125,
"learning_rate": 3.6707317073170734e-06,
"loss": 1.1272,
"step": 519
},
{
"epoch": 0.6341463414634146,
"grad_norm": 0.1923828125,
"learning_rate": 3.6585365853658537e-06,
"loss": 1.0783,
"step": 520
},
{
"epoch": 0.6353658536585366,
"grad_norm": 0.1923828125,
"learning_rate": 3.6463414634146344e-06,
"loss": 1.0423,
"step": 521
},
{
"epoch": 0.6365853658536585,
"grad_norm": 0.1943359375,
"learning_rate": 3.6341463414634146e-06,
"loss": 1.1099,
"step": 522
},
{
"epoch": 0.6378048780487805,
"grad_norm": 0.2314453125,
"learning_rate": 3.6219512195121953e-06,
"loss": 1.0481,
"step": 523
},
{
"epoch": 0.6390243902439025,
"grad_norm": 0.1962890625,
"learning_rate": 3.609756097560976e-06,
"loss": 1.0337,
"step": 524
},
{
"epoch": 0.6402439024390244,
"grad_norm": 0.1943359375,
"learning_rate": 3.5975609756097562e-06,
"loss": 1.0519,
"step": 525
},
{
"epoch": 0.6414634146341464,
"grad_norm": 0.19140625,
"learning_rate": 3.585365853658537e-06,
"loss": 1.1016,
"step": 526
},
{
"epoch": 0.6426829268292683,
"grad_norm": 0.1943359375,
"learning_rate": 3.573170731707317e-06,
"loss": 1.1309,
"step": 527
},
{
"epoch": 0.6439024390243903,
"grad_norm": 0.193359375,
"learning_rate": 3.560975609756098e-06,
"loss": 1.1033,
"step": 528
},
{
"epoch": 0.6451219512195122,
"grad_norm": 0.1875,
"learning_rate": 3.548780487804878e-06,
"loss": 1.09,
"step": 529
},
{
"epoch": 0.6463414634146342,
"grad_norm": 0.1923828125,
"learning_rate": 3.5365853658536588e-06,
"loss": 1.0413,
"step": 530
},
{
"epoch": 0.6475609756097561,
"grad_norm": 0.19921875,
"learning_rate": 3.524390243902439e-06,
"loss": 1.1486,
"step": 531
},
{
"epoch": 0.6487804878048781,
"grad_norm": 0.19921875,
"learning_rate": 3.5121951219512197e-06,
"loss": 1.072,
"step": 532
},
{
"epoch": 0.65,
"grad_norm": 0.1943359375,
"learning_rate": 3.5e-06,
"loss": 1.0675,
"step": 533
},
{
"epoch": 0.651219512195122,
"grad_norm": 0.2099609375,
"learning_rate": 3.4878048780487806e-06,
"loss": 1.0719,
"step": 534
},
{
"epoch": 0.6524390243902439,
"grad_norm": 0.2099609375,
"learning_rate": 3.475609756097561e-06,
"loss": 1.1005,
"step": 535
},
{
"epoch": 0.6536585365853659,
"grad_norm": 0.189453125,
"learning_rate": 3.4634146341463416e-06,
"loss": 1.1054,
"step": 536
},
{
"epoch": 0.6548780487804878,
"grad_norm": 0.1806640625,
"learning_rate": 3.451219512195122e-06,
"loss": 1.0733,
"step": 537
},
{
"epoch": 0.6560975609756098,
"grad_norm": 0.1884765625,
"learning_rate": 3.4390243902439025e-06,
"loss": 1.0682,
"step": 538
},
{
"epoch": 0.6573170731707317,
"grad_norm": 0.201171875,
"learning_rate": 3.4268292682926836e-06,
"loss": 1.055,
"step": 539
},
{
"epoch": 0.6585365853658537,
"grad_norm": 0.203125,
"learning_rate": 3.414634146341464e-06,
"loss": 1.0479,
"step": 540
},
{
"epoch": 0.6597560975609756,
"grad_norm": 0.1904296875,
"learning_rate": 3.4024390243902445e-06,
"loss": 1.0739,
"step": 541
},
{
"epoch": 0.6609756097560976,
"grad_norm": 0.1923828125,
"learning_rate": 3.3902439024390248e-06,
"loss": 1.0636,
"step": 542
},
{
"epoch": 0.6621951219512195,
"grad_norm": 0.1904296875,
"learning_rate": 3.3780487804878054e-06,
"loss": 1.0841,
"step": 543
},
{
"epoch": 0.6634146341463415,
"grad_norm": 0.19921875,
"learning_rate": 3.3658536585365857e-06,
"loss": 1.0363,
"step": 544
},
{
"epoch": 0.6646341463414634,
"grad_norm": 0.1923828125,
"learning_rate": 3.3536585365853664e-06,
"loss": 1.0939,
"step": 545
},
{
"epoch": 0.6658536585365854,
"grad_norm": 0.1826171875,
"learning_rate": 3.3414634146341466e-06,
"loss": 1.0318,
"step": 546
},
{
"epoch": 0.6670731707317074,
"grad_norm": 0.19140625,
"learning_rate": 3.3292682926829273e-06,
"loss": 1.0734,
"step": 547
},
{
"epoch": 0.6682926829268293,
"grad_norm": 0.2041015625,
"learning_rate": 3.3170731707317076e-06,
"loss": 1.1419,
"step": 548
},
{
"epoch": 0.6695121951219513,
"grad_norm": 0.21484375,
"learning_rate": 3.3048780487804882e-06,
"loss": 1.128,
"step": 549
},
{
"epoch": 0.6707317073170732,
"grad_norm": 0.1865234375,
"learning_rate": 3.292682926829269e-06,
"loss": 1.0225,
"step": 550
},
{
"epoch": 0.6719512195121952,
"grad_norm": 0.1923828125,
"learning_rate": 3.280487804878049e-06,
"loss": 1.0659,
"step": 551
},
{
"epoch": 0.6731707317073171,
"grad_norm": 0.2333984375,
"learning_rate": 3.26829268292683e-06,
"loss": 1.1373,
"step": 552
},
{
"epoch": 0.6743902439024391,
"grad_norm": 0.2138671875,
"learning_rate": 3.25609756097561e-06,
"loss": 1.0377,
"step": 553
},
{
"epoch": 0.675609756097561,
"grad_norm": 0.1904296875,
"learning_rate": 3.2439024390243908e-06,
"loss": 1.0002,
"step": 554
},
{
"epoch": 0.676829268292683,
"grad_norm": 0.189453125,
"learning_rate": 3.231707317073171e-06,
"loss": 1.0513,
"step": 555
},
{
"epoch": 0.6780487804878049,
"grad_norm": 0.19140625,
"learning_rate": 3.2195121951219517e-06,
"loss": 1.0748,
"step": 556
},
{
"epoch": 0.6792682926829269,
"grad_norm": 0.2080078125,
"learning_rate": 3.207317073170732e-06,
"loss": 1.1373,
"step": 557
},
{
"epoch": 0.6804878048780488,
"grad_norm": 0.19921875,
"learning_rate": 3.1951219512195126e-06,
"loss": 1.085,
"step": 558
},
{
"epoch": 0.6817073170731708,
"grad_norm": 0.1904296875,
"learning_rate": 3.182926829268293e-06,
"loss": 1.0097,
"step": 559
},
{
"epoch": 0.6829268292682927,
"grad_norm": 0.212890625,
"learning_rate": 3.1707317073170736e-06,
"loss": 1.1246,
"step": 560
},
{
"epoch": 0.6841463414634147,
"grad_norm": 0.1962890625,
"learning_rate": 3.158536585365854e-06,
"loss": 1.0998,
"step": 561
},
{
"epoch": 0.6853658536585366,
"grad_norm": 0.1923828125,
"learning_rate": 3.1463414634146345e-06,
"loss": 1.0452,
"step": 562
},
{
"epoch": 0.6865853658536586,
"grad_norm": 0.185546875,
"learning_rate": 3.1341463414634147e-06,
"loss": 1.0788,
"step": 563
},
{
"epoch": 0.6878048780487804,
"grad_norm": 0.1875,
"learning_rate": 3.1219512195121954e-06,
"loss": 1.0457,
"step": 564
},
{
"epoch": 0.6890243902439024,
"grad_norm": 0.1943359375,
"learning_rate": 3.1097560975609757e-06,
"loss": 1.0646,
"step": 565
},
{
"epoch": 0.6902439024390243,
"grad_norm": 0.1884765625,
"learning_rate": 3.0975609756097564e-06,
"loss": 1.0374,
"step": 566
},
{
"epoch": 0.6914634146341463,
"grad_norm": 0.197265625,
"learning_rate": 3.0853658536585366e-06,
"loss": 1.0734,
"step": 567
},
{
"epoch": 0.6926829268292682,
"grad_norm": 0.208984375,
"learning_rate": 3.0731707317073173e-06,
"loss": 1.0418,
"step": 568
},
{
"epoch": 0.6939024390243902,
"grad_norm": 0.1865234375,
"learning_rate": 3.060975609756098e-06,
"loss": 1.0873,
"step": 569
},
{
"epoch": 0.6951219512195121,
"grad_norm": 0.1865234375,
"learning_rate": 3.0487804878048782e-06,
"loss": 1.0968,
"step": 570
},
{
"epoch": 0.6963414634146341,
"grad_norm": 0.1943359375,
"learning_rate": 3.036585365853659e-06,
"loss": 1.1014,
"step": 571
},
{
"epoch": 0.697560975609756,
"grad_norm": 0.1943359375,
"learning_rate": 3.024390243902439e-06,
"loss": 1.0726,
"step": 572
},
{
"epoch": 0.698780487804878,
"grad_norm": 0.1875,
"learning_rate": 3.01219512195122e-06,
"loss": 1.0602,
"step": 573
},
{
"epoch": 0.7,
"grad_norm": 0.20703125,
"learning_rate": 3e-06,
"loss": 1.0828,
"step": 574
},
{
"epoch": 0.7012195121951219,
"grad_norm": 0.201171875,
"learning_rate": 2.9878048780487808e-06,
"loss": 1.1386,
"step": 575
},
{
"epoch": 0.7024390243902439,
"grad_norm": 0.193359375,
"learning_rate": 2.975609756097561e-06,
"loss": 1.104,
"step": 576
},
{
"epoch": 0.7036585365853658,
"grad_norm": 0.193359375,
"learning_rate": 2.9634146341463417e-06,
"loss": 1.0109,
"step": 577
},
{
"epoch": 0.7048780487804878,
"grad_norm": 0.208984375,
"learning_rate": 2.951219512195122e-06,
"loss": 1.0697,
"step": 578
},
{
"epoch": 0.7060975609756097,
"grad_norm": 0.197265625,
"learning_rate": 2.9390243902439026e-06,
"loss": 1.1323,
"step": 579
},
{
"epoch": 0.7073170731707317,
"grad_norm": 0.189453125,
"learning_rate": 2.926829268292683e-06,
"loss": 1.083,
"step": 580
},
{
"epoch": 0.7085365853658536,
"grad_norm": 0.1982421875,
"learning_rate": 2.9146341463414635e-06,
"loss": 1.0529,
"step": 581
},
{
"epoch": 0.7097560975609756,
"grad_norm": 0.232421875,
"learning_rate": 2.902439024390244e-06,
"loss": 1.0708,
"step": 582
},
{
"epoch": 0.7109756097560975,
"grad_norm": 0.193359375,
"learning_rate": 2.8902439024390245e-06,
"loss": 1.0725,
"step": 583
},
{
"epoch": 0.7121951219512195,
"grad_norm": 0.203125,
"learning_rate": 2.8780487804878047e-06,
"loss": 1.0535,
"step": 584
},
{
"epoch": 0.7134146341463414,
"grad_norm": 0.2041015625,
"learning_rate": 2.8658536585365854e-06,
"loss": 1.0558,
"step": 585
},
{
"epoch": 0.7146341463414634,
"grad_norm": 0.1845703125,
"learning_rate": 2.8536585365853657e-06,
"loss": 1.0615,
"step": 586
},
{
"epoch": 0.7158536585365853,
"grad_norm": 0.18359375,
"learning_rate": 2.8414634146341463e-06,
"loss": 1.0907,
"step": 587
},
{
"epoch": 0.7170731707317073,
"grad_norm": 0.189453125,
"learning_rate": 2.8292682926829266e-06,
"loss": 1.0498,
"step": 588
},
{
"epoch": 0.7182926829268292,
"grad_norm": 0.1904296875,
"learning_rate": 2.8170731707317073e-06,
"loss": 1.0522,
"step": 589
},
{
"epoch": 0.7195121951219512,
"grad_norm": 0.279296875,
"learning_rate": 2.8048780487804884e-06,
"loss": 1.1584,
"step": 590
},
{
"epoch": 0.7207317073170731,
"grad_norm": 0.1923828125,
"learning_rate": 2.7926829268292686e-06,
"loss": 1.1063,
"step": 591
},
{
"epoch": 0.7219512195121951,
"grad_norm": 0.21875,
"learning_rate": 2.7804878048780493e-06,
"loss": 1.0556,
"step": 592
},
{
"epoch": 0.723170731707317,
"grad_norm": 0.201171875,
"learning_rate": 2.7682926829268295e-06,
"loss": 1.0481,
"step": 593
},
{
"epoch": 0.724390243902439,
"grad_norm": 0.193359375,
"learning_rate": 2.7560975609756102e-06,
"loss": 1.0418,
"step": 594
},
{
"epoch": 0.725609756097561,
"grad_norm": 0.4765625,
"learning_rate": 2.7439024390243905e-06,
"loss": 1.0569,
"step": 595
},
{
"epoch": 0.7268292682926829,
"grad_norm": 0.1962890625,
"learning_rate": 2.731707317073171e-06,
"loss": 1.0622,
"step": 596
},
{
"epoch": 0.7280487804878049,
"grad_norm": 0.271484375,
"learning_rate": 2.719512195121952e-06,
"loss": 1.0188,
"step": 597
},
{
"epoch": 0.7292682926829268,
"grad_norm": 0.1923828125,
"learning_rate": 2.707317073170732e-06,
"loss": 1.126,
"step": 598
},
{
"epoch": 0.7304878048780488,
"grad_norm": 0.1953125,
"learning_rate": 2.6951219512195128e-06,
"loss": 1.0651,
"step": 599
},
{
"epoch": 0.7317073170731707,
"grad_norm": 0.1904296875,
"learning_rate": 2.682926829268293e-06,
"loss": 1.0791,
"step": 600
},
{
"epoch": 0.7329268292682927,
"grad_norm": 0.20703125,
"learning_rate": 2.6707317073170737e-06,
"loss": 1.1254,
"step": 601
},
{
"epoch": 0.7341463414634146,
"grad_norm": 0.1943359375,
"learning_rate": 2.658536585365854e-06,
"loss": 1.0697,
"step": 602
},
{
"epoch": 0.7353658536585366,
"grad_norm": 0.189453125,
"learning_rate": 2.6463414634146346e-06,
"loss": 1.1039,
"step": 603
},
{
"epoch": 0.7365853658536585,
"grad_norm": 0.1962890625,
"learning_rate": 2.634146341463415e-06,
"loss": 1.0711,
"step": 604
},
{
"epoch": 0.7378048780487805,
"grad_norm": 0.228515625,
"learning_rate": 2.6219512195121956e-06,
"loss": 1.0276,
"step": 605
},
{
"epoch": 0.7390243902439024,
"grad_norm": 0.1982421875,
"learning_rate": 2.609756097560976e-06,
"loss": 1.0611,
"step": 606
},
{
"epoch": 0.7402439024390244,
"grad_norm": 0.1884765625,
"learning_rate": 2.5975609756097565e-06,
"loss": 1.1051,
"step": 607
},
{
"epoch": 0.7414634146341463,
"grad_norm": 0.201171875,
"learning_rate": 2.5853658536585367e-06,
"loss": 1.0689,
"step": 608
},
{
"epoch": 0.7426829268292683,
"grad_norm": 0.208984375,
"learning_rate": 2.5731707317073174e-06,
"loss": 1.0563,
"step": 609
},
{
"epoch": 0.7439024390243902,
"grad_norm": 0.201171875,
"learning_rate": 2.5609756097560977e-06,
"loss": 1.0239,
"step": 610
},
{
"epoch": 0.7451219512195122,
"grad_norm": 0.2041015625,
"learning_rate": 2.5487804878048783e-06,
"loss": 1.1052,
"step": 611
},
{
"epoch": 0.7463414634146341,
"grad_norm": 0.1884765625,
"learning_rate": 2.5365853658536586e-06,
"loss": 1.1,
"step": 612
},
{
"epoch": 0.7475609756097561,
"grad_norm": 0.20703125,
"learning_rate": 2.5243902439024393e-06,
"loss": 1.1258,
"step": 613
},
{
"epoch": 0.748780487804878,
"grad_norm": 0.203125,
"learning_rate": 2.5121951219512195e-06,
"loss": 1.1154,
"step": 614
},
{
"epoch": 0.75,
"grad_norm": 0.2060546875,
"learning_rate": 2.5e-06,
"loss": 1.0448,
"step": 615
},
{
"epoch": 0.751219512195122,
"grad_norm": 0.2109375,
"learning_rate": 2.487804878048781e-06,
"loss": 1.0559,
"step": 616
},
{
"epoch": 0.7524390243902439,
"grad_norm": 0.2119140625,
"learning_rate": 2.475609756097561e-06,
"loss": 1.0948,
"step": 617
},
{
"epoch": 0.7536585365853659,
"grad_norm": 0.189453125,
"learning_rate": 2.463414634146342e-06,
"loss": 1.0541,
"step": 618
},
{
"epoch": 0.7548780487804878,
"grad_norm": 0.1923828125,
"learning_rate": 2.451219512195122e-06,
"loss": 1.0377,
"step": 619
},
{
"epoch": 0.7560975609756098,
"grad_norm": 0.20703125,
"learning_rate": 2.4390243902439027e-06,
"loss": 1.0511,
"step": 620
},
{
"epoch": 0.7573170731707317,
"grad_norm": 0.19921875,
"learning_rate": 2.426829268292683e-06,
"loss": 1.1331,
"step": 621
},
{
"epoch": 0.7585365853658537,
"grad_norm": 0.1923828125,
"learning_rate": 2.4146341463414637e-06,
"loss": 1.0521,
"step": 622
},
{
"epoch": 0.7597560975609756,
"grad_norm": 0.193359375,
"learning_rate": 2.402439024390244e-06,
"loss": 1.0358,
"step": 623
},
{
"epoch": 0.7609756097560976,
"grad_norm": 0.1884765625,
"learning_rate": 2.3902439024390246e-06,
"loss": 1.1193,
"step": 624
},
{
"epoch": 0.7621951219512195,
"grad_norm": 0.1875,
"learning_rate": 2.378048780487805e-06,
"loss": 1.0871,
"step": 625
},
{
"epoch": 0.7634146341463415,
"grad_norm": 0.21484375,
"learning_rate": 2.3658536585365855e-06,
"loss": 1.1028,
"step": 626
},
{
"epoch": 0.7646341463414634,
"grad_norm": 0.1875,
"learning_rate": 2.3536585365853658e-06,
"loss": 1.1165,
"step": 627
},
{
"epoch": 0.7658536585365854,
"grad_norm": 0.181640625,
"learning_rate": 2.3414634146341465e-06,
"loss": 1.0831,
"step": 628
},
{
"epoch": 0.7670731707317073,
"grad_norm": 0.212890625,
"learning_rate": 2.329268292682927e-06,
"loss": 1.0924,
"step": 629
},
{
"epoch": 0.7682926829268293,
"grad_norm": 0.20703125,
"learning_rate": 2.317073170731708e-06,
"loss": 1.1502,
"step": 630
},
{
"epoch": 0.7695121951219512,
"grad_norm": 0.189453125,
"learning_rate": 2.304878048780488e-06,
"loss": 1.059,
"step": 631
},
{
"epoch": 0.7707317073170732,
"grad_norm": 0.1865234375,
"learning_rate": 2.2926829268292687e-06,
"loss": 1.0582,
"step": 632
},
{
"epoch": 0.7719512195121951,
"grad_norm": 0.2470703125,
"learning_rate": 2.280487804878049e-06,
"loss": 1.0578,
"step": 633
},
{
"epoch": 0.7731707317073171,
"grad_norm": 0.1943359375,
"learning_rate": 2.2682926829268297e-06,
"loss": 1.027,
"step": 634
},
{
"epoch": 0.774390243902439,
"grad_norm": 0.19921875,
"learning_rate": 2.25609756097561e-06,
"loss": 1.1362,
"step": 635
},
{
"epoch": 0.775609756097561,
"grad_norm": 0.1884765625,
"learning_rate": 2.2439024390243906e-06,
"loss": 1.0586,
"step": 636
},
{
"epoch": 0.776829268292683,
"grad_norm": 0.1962890625,
"learning_rate": 2.231707317073171e-06,
"loss": 1.0916,
"step": 637
},
{
"epoch": 0.7780487804878049,
"grad_norm": 0.2041015625,
"learning_rate": 2.2195121951219515e-06,
"loss": 1.0785,
"step": 638
},
{
"epoch": 0.7792682926829269,
"grad_norm": 0.2119140625,
"learning_rate": 2.207317073170732e-06,
"loss": 1.0886,
"step": 639
},
{
"epoch": 0.7804878048780488,
"grad_norm": 0.2041015625,
"learning_rate": 2.1951219512195125e-06,
"loss": 1.063,
"step": 640
},
{
"epoch": 0.7817073170731708,
"grad_norm": 0.1923828125,
"learning_rate": 2.1829268292682927e-06,
"loss": 1.0256,
"step": 641
},
{
"epoch": 0.7829268292682927,
"grad_norm": 0.2021484375,
"learning_rate": 2.1707317073170734e-06,
"loss": 1.0759,
"step": 642
},
{
"epoch": 0.7841463414634147,
"grad_norm": 0.19921875,
"learning_rate": 2.1585365853658537e-06,
"loss": 1.0809,
"step": 643
},
{
"epoch": 0.7853658536585366,
"grad_norm": 0.193359375,
"learning_rate": 2.1463414634146343e-06,
"loss": 1.0953,
"step": 644
},
{
"epoch": 0.7865853658536586,
"grad_norm": 0.20703125,
"learning_rate": 2.1341463414634146e-06,
"loss": 1.0791,
"step": 645
},
{
"epoch": 0.7878048780487805,
"grad_norm": 0.201171875,
"learning_rate": 2.1219512195121953e-06,
"loss": 1.1424,
"step": 646
},
{
"epoch": 0.7890243902439025,
"grad_norm": 0.1923828125,
"learning_rate": 2.1097560975609755e-06,
"loss": 1.0963,
"step": 647
},
{
"epoch": 0.7902439024390244,
"grad_norm": 0.2001953125,
"learning_rate": 2.097560975609756e-06,
"loss": 1.0406,
"step": 648
},
{
"epoch": 0.7914634146341464,
"grad_norm": 0.1953125,
"learning_rate": 2.085365853658537e-06,
"loss": 1.0071,
"step": 649
},
{
"epoch": 0.7926829268292683,
"grad_norm": 0.1962890625,
"learning_rate": 2.073170731707317e-06,
"loss": 1.0457,
"step": 650
},
{
"epoch": 0.7939024390243903,
"grad_norm": 0.2021484375,
"learning_rate": 2.060975609756098e-06,
"loss": 1.1034,
"step": 651
},
{
"epoch": 0.7951219512195122,
"grad_norm": 0.220703125,
"learning_rate": 2.048780487804878e-06,
"loss": 1.1096,
"step": 652
},
{
"epoch": 0.7963414634146342,
"grad_norm": 0.1884765625,
"learning_rate": 2.0365853658536587e-06,
"loss": 1.0638,
"step": 653
},
{
"epoch": 0.7975609756097561,
"grad_norm": 0.1982421875,
"learning_rate": 2.0243902439024394e-06,
"loss": 1.0905,
"step": 654
},
{
"epoch": 0.7987804878048781,
"grad_norm": 0.2021484375,
"learning_rate": 2.0121951219512197e-06,
"loss": 1.0643,
"step": 655
},
{
"epoch": 0.8,
"grad_norm": 0.2177734375,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.1126,
"step": 656
},
{
"epoch": 0.801219512195122,
"grad_norm": 0.201171875,
"learning_rate": 1.9878048780487806e-06,
"loss": 1.0715,
"step": 657
},
{
"epoch": 0.802439024390244,
"grad_norm": 0.1923828125,
"learning_rate": 1.9756097560975613e-06,
"loss": 1.0718,
"step": 658
},
{
"epoch": 0.8036585365853659,
"grad_norm": 0.216796875,
"learning_rate": 1.9634146341463415e-06,
"loss": 1.0326,
"step": 659
},
{
"epoch": 0.8048780487804879,
"grad_norm": 0.1953125,
"learning_rate": 1.951219512195122e-06,
"loss": 1.0687,
"step": 660
},
{
"epoch": 0.8060975609756098,
"grad_norm": 0.205078125,
"learning_rate": 1.9390243902439024e-06,
"loss": 1.027,
"step": 661
},
{
"epoch": 0.8073170731707318,
"grad_norm": 0.1953125,
"learning_rate": 1.926829268292683e-06,
"loss": 1.1043,
"step": 662
},
{
"epoch": 0.8085365853658537,
"grad_norm": 0.21875,
"learning_rate": 1.914634146341464e-06,
"loss": 1.061,
"step": 663
},
{
"epoch": 0.8097560975609757,
"grad_norm": 0.1953125,
"learning_rate": 1.902439024390244e-06,
"loss": 1.0429,
"step": 664
},
{
"epoch": 0.8109756097560976,
"grad_norm": 0.2333984375,
"learning_rate": 1.8902439024390245e-06,
"loss": 1.0563,
"step": 665
},
{
"epoch": 0.8121951219512196,
"grad_norm": 0.1982421875,
"learning_rate": 1.878048780487805e-06,
"loss": 1.0664,
"step": 666
},
{
"epoch": 0.8134146341463414,
"grad_norm": 0.251953125,
"learning_rate": 1.8658536585365854e-06,
"loss": 1.022,
"step": 667
},
{
"epoch": 0.8146341463414634,
"grad_norm": 0.2265625,
"learning_rate": 1.853658536585366e-06,
"loss": 1.0534,
"step": 668
},
{
"epoch": 0.8158536585365853,
"grad_norm": 0.205078125,
"learning_rate": 1.8414634146341464e-06,
"loss": 1.1069,
"step": 669
},
{
"epoch": 0.8170731707317073,
"grad_norm": 0.1845703125,
"learning_rate": 1.8292682926829268e-06,
"loss": 1.0231,
"step": 670
},
{
"epoch": 0.8182926829268292,
"grad_norm": 0.2392578125,
"learning_rate": 1.8170731707317073e-06,
"loss": 1.0328,
"step": 671
},
{
"epoch": 0.8195121951219512,
"grad_norm": 0.208984375,
"learning_rate": 1.804878048780488e-06,
"loss": 1.0963,
"step": 672
},
{
"epoch": 0.8207317073170731,
"grad_norm": 0.201171875,
"learning_rate": 1.7926829268292685e-06,
"loss": 1.0811,
"step": 673
},
{
"epoch": 0.8219512195121951,
"grad_norm": 0.2353515625,
"learning_rate": 1.780487804878049e-06,
"loss": 1.032,
"step": 674
},
{
"epoch": 0.823170731707317,
"grad_norm": 0.255859375,
"learning_rate": 1.7682926829268294e-06,
"loss": 1.1036,
"step": 675
},
{
"epoch": 0.824390243902439,
"grad_norm": 0.2109375,
"learning_rate": 1.7560975609756098e-06,
"loss": 1.0545,
"step": 676
},
{
"epoch": 0.8256097560975609,
"grad_norm": 0.19921875,
"learning_rate": 1.7439024390243903e-06,
"loss": 1.0845,
"step": 677
},
{
"epoch": 0.8268292682926829,
"grad_norm": 0.201171875,
"learning_rate": 1.7317073170731708e-06,
"loss": 1.088,
"step": 678
},
{
"epoch": 0.8280487804878048,
"grad_norm": 0.2041015625,
"learning_rate": 1.7195121951219512e-06,
"loss": 1.0467,
"step": 679
},
{
"epoch": 0.8292682926829268,
"grad_norm": 0.255859375,
"learning_rate": 1.707317073170732e-06,
"loss": 0.9774,
"step": 680
},
{
"epoch": 0.8304878048780487,
"grad_norm": 0.205078125,
"learning_rate": 1.6951219512195124e-06,
"loss": 1.0679,
"step": 681
},
{
"epoch": 0.8317073170731707,
"grad_norm": 0.212890625,
"learning_rate": 1.6829268292682928e-06,
"loss": 1.0333,
"step": 682
},
{
"epoch": 0.8329268292682926,
"grad_norm": 0.1875,
"learning_rate": 1.6707317073170733e-06,
"loss": 1.0098,
"step": 683
},
{
"epoch": 0.8341463414634146,
"grad_norm": 0.203125,
"learning_rate": 1.6585365853658538e-06,
"loss": 1.1388,
"step": 684
},
{
"epoch": 0.8353658536585366,
"grad_norm": 0.1982421875,
"learning_rate": 1.6463414634146345e-06,
"loss": 1.0618,
"step": 685
},
{
"epoch": 0.8365853658536585,
"grad_norm": 0.2373046875,
"learning_rate": 1.634146341463415e-06,
"loss": 1.076,
"step": 686
},
{
"epoch": 0.8378048780487805,
"grad_norm": 0.1953125,
"learning_rate": 1.6219512195121954e-06,
"loss": 1.0945,
"step": 687
},
{
"epoch": 0.8390243902439024,
"grad_norm": 0.1923828125,
"learning_rate": 1.6097560975609759e-06,
"loss": 1.0487,
"step": 688
},
{
"epoch": 0.8402439024390244,
"grad_norm": 0.19921875,
"learning_rate": 1.5975609756097563e-06,
"loss": 1.1462,
"step": 689
},
{
"epoch": 0.8414634146341463,
"grad_norm": 0.208984375,
"learning_rate": 1.5853658536585368e-06,
"loss": 1.0855,
"step": 690
},
{
"epoch": 0.8426829268292683,
"grad_norm": 0.30078125,
"learning_rate": 1.5731707317073172e-06,
"loss": 1.0957,
"step": 691
},
{
"epoch": 0.8439024390243902,
"grad_norm": 0.2001953125,
"learning_rate": 1.5609756097560977e-06,
"loss": 1.0581,
"step": 692
},
{
"epoch": 0.8451219512195122,
"grad_norm": 0.193359375,
"learning_rate": 1.5487804878048782e-06,
"loss": 1.0793,
"step": 693
},
{
"epoch": 0.8463414634146341,
"grad_norm": 0.2109375,
"learning_rate": 1.5365853658536586e-06,
"loss": 1.0879,
"step": 694
},
{
"epoch": 0.8475609756097561,
"grad_norm": 0.197265625,
"learning_rate": 1.5243902439024391e-06,
"loss": 1.0585,
"step": 695
},
{
"epoch": 0.848780487804878,
"grad_norm": 0.1962890625,
"learning_rate": 1.5121951219512196e-06,
"loss": 1.0186,
"step": 696
},
{
"epoch": 0.85,
"grad_norm": 0.2099609375,
"learning_rate": 1.5e-06,
"loss": 1.1329,
"step": 697
},
{
"epoch": 0.8512195121951219,
"grad_norm": 0.1943359375,
"learning_rate": 1.4878048780487805e-06,
"loss": 1.1513,
"step": 698
},
{
"epoch": 0.8524390243902439,
"grad_norm": 0.296875,
"learning_rate": 1.475609756097561e-06,
"loss": 1.1021,
"step": 699
},
{
"epoch": 0.8536585365853658,
"grad_norm": 0.197265625,
"learning_rate": 1.4634146341463414e-06,
"loss": 1.1114,
"step": 700
},
{
"epoch": 0.8548780487804878,
"grad_norm": 0.1953125,
"learning_rate": 1.451219512195122e-06,
"loss": 1.0979,
"step": 701
},
{
"epoch": 0.8560975609756097,
"grad_norm": 0.1904296875,
"learning_rate": 1.4390243902439024e-06,
"loss": 1.0381,
"step": 702
},
{
"epoch": 0.8573170731707317,
"grad_norm": 0.1923828125,
"learning_rate": 1.4268292682926828e-06,
"loss": 1.0386,
"step": 703
},
{
"epoch": 0.8585365853658536,
"grad_norm": 0.19140625,
"learning_rate": 1.4146341463414633e-06,
"loss": 1.0427,
"step": 704
},
{
"epoch": 0.8597560975609756,
"grad_norm": 0.19140625,
"learning_rate": 1.4024390243902442e-06,
"loss": 1.0731,
"step": 705
},
{
"epoch": 0.8609756097560975,
"grad_norm": 0.2001953125,
"learning_rate": 1.3902439024390246e-06,
"loss": 1.135,
"step": 706
},
{
"epoch": 0.8621951219512195,
"grad_norm": 0.25,
"learning_rate": 1.3780487804878051e-06,
"loss": 1.0106,
"step": 707
},
{
"epoch": 0.8634146341463415,
"grad_norm": 0.2109375,
"learning_rate": 1.3658536585365856e-06,
"loss": 1.0915,
"step": 708
},
{
"epoch": 0.8646341463414634,
"grad_norm": 0.26953125,
"learning_rate": 1.353658536585366e-06,
"loss": 1.0352,
"step": 709
},
{
"epoch": 0.8658536585365854,
"grad_norm": 0.185546875,
"learning_rate": 1.3414634146341465e-06,
"loss": 1.0295,
"step": 710
},
{
"epoch": 0.8670731707317073,
"grad_norm": 0.5078125,
"learning_rate": 1.329268292682927e-06,
"loss": 1.0741,
"step": 711
},
{
"epoch": 0.8682926829268293,
"grad_norm": 0.298828125,
"learning_rate": 1.3170731707317074e-06,
"loss": 1.0662,
"step": 712
},
{
"epoch": 0.8695121951219512,
"grad_norm": 0.2138671875,
"learning_rate": 1.304878048780488e-06,
"loss": 1.0697,
"step": 713
},
{
"epoch": 0.8707317073170732,
"grad_norm": 0.1943359375,
"learning_rate": 1.2926829268292684e-06,
"loss": 0.9954,
"step": 714
},
{
"epoch": 0.8719512195121951,
"grad_norm": 0.2021484375,
"learning_rate": 1.2804878048780488e-06,
"loss": 1.0151,
"step": 715
},
{
"epoch": 0.8731707317073171,
"grad_norm": 0.1982421875,
"learning_rate": 1.2682926829268293e-06,
"loss": 0.9893,
"step": 716
},
{
"epoch": 0.874390243902439,
"grad_norm": 0.1865234375,
"learning_rate": 1.2560975609756098e-06,
"loss": 1.0561,
"step": 717
},
{
"epoch": 0.875609756097561,
"grad_norm": 0.2275390625,
"learning_rate": 1.2439024390243904e-06,
"loss": 1.1524,
"step": 718
},
{
"epoch": 0.8768292682926829,
"grad_norm": 0.19921875,
"learning_rate": 1.231707317073171e-06,
"loss": 1.054,
"step": 719
},
{
"epoch": 0.8780487804878049,
"grad_norm": 0.240234375,
"learning_rate": 1.2195121951219514e-06,
"loss": 1.1322,
"step": 720
},
{
"epoch": 0.8792682926829268,
"grad_norm": 0.298828125,
"learning_rate": 1.2073170731707318e-06,
"loss": 1.0766,
"step": 721
},
{
"epoch": 0.8804878048780488,
"grad_norm": 0.1845703125,
"learning_rate": 1.1951219512195123e-06,
"loss": 1.0509,
"step": 722
},
{
"epoch": 0.8817073170731707,
"grad_norm": 0.1943359375,
"learning_rate": 1.1829268292682928e-06,
"loss": 1.0303,
"step": 723
},
{
"epoch": 0.8829268292682927,
"grad_norm": 0.208984375,
"learning_rate": 1.1707317073170732e-06,
"loss": 1.065,
"step": 724
},
{
"epoch": 0.8841463414634146,
"grad_norm": 0.1865234375,
"learning_rate": 1.158536585365854e-06,
"loss": 1.0904,
"step": 725
},
{
"epoch": 0.8853658536585366,
"grad_norm": 0.1923828125,
"learning_rate": 1.1463414634146344e-06,
"loss": 1.0988,
"step": 726
},
{
"epoch": 0.8865853658536585,
"grad_norm": 0.19921875,
"learning_rate": 1.1341463414634148e-06,
"loss": 1.0732,
"step": 727
},
{
"epoch": 0.8878048780487805,
"grad_norm": 0.1875,
"learning_rate": 1.1219512195121953e-06,
"loss": 1.0844,
"step": 728
},
{
"epoch": 0.8890243902439025,
"grad_norm": 0.19140625,
"learning_rate": 1.1097560975609758e-06,
"loss": 1.0841,
"step": 729
},
{
"epoch": 0.8902439024390244,
"grad_norm": 0.193359375,
"learning_rate": 1.0975609756097562e-06,
"loss": 1.1141,
"step": 730
},
{
"epoch": 0.8914634146341464,
"grad_norm": 0.2119140625,
"learning_rate": 1.0853658536585367e-06,
"loss": 1.0769,
"step": 731
},
{
"epoch": 0.8926829268292683,
"grad_norm": 0.220703125,
"learning_rate": 1.0731707317073172e-06,
"loss": 1.0601,
"step": 732
},
{
"epoch": 0.8939024390243903,
"grad_norm": 0.181640625,
"learning_rate": 1.0609756097560976e-06,
"loss": 1.0131,
"step": 733
},
{
"epoch": 0.8951219512195122,
"grad_norm": 0.1962890625,
"learning_rate": 1.048780487804878e-06,
"loss": 1.1302,
"step": 734
},
{
"epoch": 0.8963414634146342,
"grad_norm": 0.1953125,
"learning_rate": 1.0365853658536586e-06,
"loss": 1.0548,
"step": 735
},
{
"epoch": 0.8975609756097561,
"grad_norm": 0.248046875,
"learning_rate": 1.024390243902439e-06,
"loss": 1.052,
"step": 736
},
{
"epoch": 0.8987804878048781,
"grad_norm": 0.2158203125,
"learning_rate": 1.0121951219512197e-06,
"loss": 1.0466,
"step": 737
},
{
"epoch": 0.9,
"grad_norm": 0.1884765625,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.0713,
"step": 738
},
{
"epoch": 0.901219512195122,
"grad_norm": 0.193359375,
"learning_rate": 9.878048780487806e-07,
"loss": 1.101,
"step": 739
},
{
"epoch": 0.9024390243902439,
"grad_norm": 0.1845703125,
"learning_rate": 9.75609756097561e-07,
"loss": 1.0413,
"step": 740
},
{
"epoch": 0.9036585365853659,
"grad_norm": 0.1884765625,
"learning_rate": 9.634146341463416e-07,
"loss": 1.0669,
"step": 741
},
{
"epoch": 0.9048780487804878,
"grad_norm": 0.1884765625,
"learning_rate": 9.51219512195122e-07,
"loss": 1.0594,
"step": 742
},
{
"epoch": 0.9060975609756098,
"grad_norm": 0.1826171875,
"learning_rate": 9.390243902439025e-07,
"loss": 1.0646,
"step": 743
},
{
"epoch": 0.9073170731707317,
"grad_norm": 0.21875,
"learning_rate": 9.26829268292683e-07,
"loss": 1.0803,
"step": 744
},
{
"epoch": 0.9085365853658537,
"grad_norm": 0.2099609375,
"learning_rate": 9.146341463414634e-07,
"loss": 1.1442,
"step": 745
},
{
"epoch": 0.9097560975609756,
"grad_norm": 0.193359375,
"learning_rate": 9.02439024390244e-07,
"loss": 1.079,
"step": 746
},
{
"epoch": 0.9109756097560976,
"grad_norm": 0.2197265625,
"learning_rate": 8.902439024390245e-07,
"loss": 1.0908,
"step": 747
},
{
"epoch": 0.9121951219512195,
"grad_norm": 0.185546875,
"learning_rate": 8.780487804878049e-07,
"loss": 1.0497,
"step": 748
},
{
"epoch": 0.9134146341463415,
"grad_norm": 0.1865234375,
"learning_rate": 8.658536585365854e-07,
"loss": 1.0826,
"step": 749
},
{
"epoch": 0.9146341463414634,
"grad_norm": 0.2001953125,
"learning_rate": 8.53658536585366e-07,
"loss": 1.1029,
"step": 750
},
{
"epoch": 0.9158536585365854,
"grad_norm": 0.2119140625,
"learning_rate": 8.414634146341464e-07,
"loss": 1.1093,
"step": 751
},
{
"epoch": 0.9170731707317074,
"grad_norm": 0.185546875,
"learning_rate": 8.292682926829269e-07,
"loss": 1.0173,
"step": 752
},
{
"epoch": 0.9182926829268293,
"grad_norm": 0.1943359375,
"learning_rate": 8.170731707317075e-07,
"loss": 1.0709,
"step": 753
},
{
"epoch": 0.9195121951219513,
"grad_norm": 0.197265625,
"learning_rate": 8.048780487804879e-07,
"loss": 1.1359,
"step": 754
},
{
"epoch": 0.9207317073170732,
"grad_norm": 0.189453125,
"learning_rate": 7.926829268292684e-07,
"loss": 1.0086,
"step": 755
},
{
"epoch": 0.9219512195121952,
"grad_norm": 0.2041015625,
"learning_rate": 7.804878048780489e-07,
"loss": 1.0692,
"step": 756
},
{
"epoch": 0.9231707317073171,
"grad_norm": 0.189453125,
"learning_rate": 7.682926829268293e-07,
"loss": 1.0772,
"step": 757
},
{
"epoch": 0.9243902439024391,
"grad_norm": 0.201171875,
"learning_rate": 7.560975609756098e-07,
"loss": 1.1352,
"step": 758
},
{
"epoch": 0.925609756097561,
"grad_norm": 0.205078125,
"learning_rate": 7.439024390243903e-07,
"loss": 1.0776,
"step": 759
},
{
"epoch": 0.926829268292683,
"grad_norm": 0.216796875,
"learning_rate": 7.317073170731707e-07,
"loss": 1.0599,
"step": 760
},
{
"epoch": 0.9280487804878049,
"grad_norm": 0.240234375,
"learning_rate": 7.195121951219512e-07,
"loss": 1.1065,
"step": 761
},
{
"epoch": 0.9292682926829269,
"grad_norm": 0.1962890625,
"learning_rate": 7.073170731707316e-07,
"loss": 1.1051,
"step": 762
},
{
"epoch": 0.9304878048780488,
"grad_norm": 0.2255859375,
"learning_rate": 6.951219512195123e-07,
"loss": 1.083,
"step": 763
},
{
"epoch": 0.9317073170731708,
"grad_norm": 0.1962890625,
"learning_rate": 6.829268292682928e-07,
"loss": 1.0656,
"step": 764
},
{
"epoch": 0.9329268292682927,
"grad_norm": 0.2001953125,
"learning_rate": 6.707317073170733e-07,
"loss": 1.0788,
"step": 765
},
{
"epoch": 0.9341463414634147,
"grad_norm": 0.232421875,
"learning_rate": 6.585365853658537e-07,
"loss": 1.0992,
"step": 766
},
{
"epoch": 0.9353658536585366,
"grad_norm": 0.1826171875,
"learning_rate": 6.463414634146342e-07,
"loss": 1.0471,
"step": 767
},
{
"epoch": 0.9365853658536586,
"grad_norm": 0.2001953125,
"learning_rate": 6.341463414634146e-07,
"loss": 1.078,
"step": 768
},
{
"epoch": 0.9378048780487804,
"grad_norm": 0.2099609375,
"learning_rate": 6.219512195121952e-07,
"loss": 1.0927,
"step": 769
},
{
"epoch": 0.9390243902439024,
"grad_norm": 0.20703125,
"learning_rate": 6.097560975609757e-07,
"loss": 1.0055,
"step": 770
},
{
"epoch": 0.9402439024390243,
"grad_norm": 0.2001953125,
"learning_rate": 5.975609756097562e-07,
"loss": 1.1183,
"step": 771
},
{
"epoch": 0.9414634146341463,
"grad_norm": 0.2080078125,
"learning_rate": 5.853658536585366e-07,
"loss": 1.0887,
"step": 772
},
{
"epoch": 0.9426829268292682,
"grad_norm": 0.1884765625,
"learning_rate": 5.731707317073172e-07,
"loss": 1.0923,
"step": 773
},
{
"epoch": 0.9439024390243902,
"grad_norm": 0.19921875,
"learning_rate": 5.609756097560977e-07,
"loss": 1.0986,
"step": 774
},
{
"epoch": 0.9451219512195121,
"grad_norm": 0.1884765625,
"learning_rate": 5.487804878048781e-07,
"loss": 1.0457,
"step": 775
},
{
"epoch": 0.9463414634146341,
"grad_norm": 0.189453125,
"learning_rate": 5.365853658536586e-07,
"loss": 1.0444,
"step": 776
},
{
"epoch": 0.947560975609756,
"grad_norm": 0.1953125,
"learning_rate": 5.24390243902439e-07,
"loss": 1.0999,
"step": 777
},
{
"epoch": 0.948780487804878,
"grad_norm": 0.189453125,
"learning_rate": 5.121951219512195e-07,
"loss": 1.0839,
"step": 778
},
{
"epoch": 0.95,
"grad_norm": 0.203125,
"learning_rate": 5.000000000000001e-07,
"loss": 1.0789,
"step": 779
},
{
"epoch": 0.9512195121951219,
"grad_norm": 0.2021484375,
"learning_rate": 4.878048780487805e-07,
"loss": 1.0361,
"step": 780
},
{
"epoch": 0.9524390243902439,
"grad_norm": 0.1826171875,
"learning_rate": 4.75609756097561e-07,
"loss": 1.037,
"step": 781
},
{
"epoch": 0.9536585365853658,
"grad_norm": 0.19921875,
"learning_rate": 4.634146341463415e-07,
"loss": 1.1276,
"step": 782
},
{
"epoch": 0.9548780487804878,
"grad_norm": 0.18359375,
"learning_rate": 4.51219512195122e-07,
"loss": 1.0421,
"step": 783
},
{
"epoch": 0.9560975609756097,
"grad_norm": 0.189453125,
"learning_rate": 4.3902439024390246e-07,
"loss": 1.0561,
"step": 784
},
{
"epoch": 0.9573170731707317,
"grad_norm": 0.2001953125,
"learning_rate": 4.26829268292683e-07,
"loss": 1.0926,
"step": 785
},
{
"epoch": 0.9585365853658536,
"grad_norm": 0.1962890625,
"learning_rate": 4.1463414634146344e-07,
"loss": 1.1011,
"step": 786
},
{
"epoch": 0.9597560975609756,
"grad_norm": 0.1904296875,
"learning_rate": 4.0243902439024396e-07,
"loss": 1.0871,
"step": 787
},
{
"epoch": 0.9609756097560975,
"grad_norm": 0.203125,
"learning_rate": 3.9024390243902443e-07,
"loss": 1.0532,
"step": 788
},
{
"epoch": 0.9621951219512195,
"grad_norm": 0.1923828125,
"learning_rate": 3.780487804878049e-07,
"loss": 1.1131,
"step": 789
},
{
"epoch": 0.9634146341463414,
"grad_norm": 0.19921875,
"learning_rate": 3.6585365853658536e-07,
"loss": 1.094,
"step": 790
},
{
"epoch": 0.9646341463414634,
"grad_norm": 0.283203125,
"learning_rate": 3.536585365853658e-07,
"loss": 1.0334,
"step": 791
},
{
"epoch": 0.9658536585365853,
"grad_norm": 0.1953125,
"learning_rate": 3.414634146341464e-07,
"loss": 1.0802,
"step": 792
},
{
"epoch": 0.9670731707317073,
"grad_norm": 0.2021484375,
"learning_rate": 3.2926829268292686e-07,
"loss": 1.0745,
"step": 793
},
{
"epoch": 0.9682926829268292,
"grad_norm": 0.3671875,
"learning_rate": 3.170731707317073e-07,
"loss": 1.0459,
"step": 794
},
{
"epoch": 0.9695121951219512,
"grad_norm": 0.1953125,
"learning_rate": 3.0487804878048784e-07,
"loss": 1.0958,
"step": 795
},
{
"epoch": 0.9707317073170731,
"grad_norm": 0.197265625,
"learning_rate": 2.926829268292683e-07,
"loss": 1.0859,
"step": 796
},
{
"epoch": 0.9719512195121951,
"grad_norm": 0.1943359375,
"learning_rate": 2.804878048780488e-07,
"loss": 1.1665,
"step": 797
},
{
"epoch": 0.973170731707317,
"grad_norm": 0.259765625,
"learning_rate": 2.682926829268293e-07,
"loss": 1.0844,
"step": 798
},
{
"epoch": 0.974390243902439,
"grad_norm": 0.1923828125,
"learning_rate": 2.5609756097560976e-07,
"loss": 1.1316,
"step": 799
},
{
"epoch": 0.975609756097561,
"grad_norm": 0.2041015625,
"learning_rate": 2.439024390243903e-07,
"loss": 0.9933,
"step": 800
},
{
"epoch": 0.9768292682926829,
"grad_norm": 0.189453125,
"learning_rate": 2.3170731707317074e-07,
"loss": 1.0578,
"step": 801
},
{
"epoch": 0.9780487804878049,
"grad_norm": 0.2041015625,
"learning_rate": 2.1951219512195123e-07,
"loss": 1.1175,
"step": 802
},
{
"epoch": 0.9792682926829268,
"grad_norm": 0.18359375,
"learning_rate": 2.0731707317073172e-07,
"loss": 1.0502,
"step": 803
},
{
"epoch": 0.9804878048780488,
"grad_norm": 0.1943359375,
"learning_rate": 1.9512195121951221e-07,
"loss": 1.0945,
"step": 804
},
{
"epoch": 0.9817073170731707,
"grad_norm": 0.203125,
"learning_rate": 1.8292682926829268e-07,
"loss": 1.0537,
"step": 805
},
{
"epoch": 0.9829268292682927,
"grad_norm": 0.2216796875,
"learning_rate": 1.707317073170732e-07,
"loss": 1.1431,
"step": 806
},
{
"epoch": 0.9841463414634146,
"grad_norm": 0.1923828125,
"learning_rate": 1.5853658536585366e-07,
"loss": 1.1126,
"step": 807
},
{
"epoch": 0.9853658536585366,
"grad_norm": 0.3046875,
"learning_rate": 1.4634146341463415e-07,
"loss": 1.078,
"step": 808
},
{
"epoch": 0.9865853658536585,
"grad_norm": 0.1875,
"learning_rate": 1.3414634146341465e-07,
"loss": 1.0876,
"step": 809
},
{
"epoch": 0.9878048780487805,
"grad_norm": 0.2578125,
"learning_rate": 1.2195121951219514e-07,
"loss": 1.0637,
"step": 810
},
{
"epoch": 0.9890243902439024,
"grad_norm": 0.27734375,
"learning_rate": 1.0975609756097562e-07,
"loss": 1.0522,
"step": 811
},
{
"epoch": 0.9902439024390244,
"grad_norm": 0.1845703125,
"learning_rate": 9.756097560975611e-08,
"loss": 1.0683,
"step": 812
},
{
"epoch": 0.9914634146341463,
"grad_norm": 0.185546875,
"learning_rate": 8.53658536585366e-08,
"loss": 1.093,
"step": 813
},
{
"epoch": 0.9926829268292683,
"grad_norm": 0.2158203125,
"learning_rate": 7.317073170731708e-08,
"loss": 1.1015,
"step": 814
},
{
"epoch": 0.9939024390243902,
"grad_norm": 0.201171875,
"learning_rate": 6.097560975609757e-08,
"loss": 1.117,
"step": 815
},
{
"epoch": 0.9951219512195122,
"grad_norm": 0.205078125,
"learning_rate": 4.8780487804878054e-08,
"loss": 1.1107,
"step": 816
},
{
"epoch": 0.9963414634146341,
"grad_norm": 0.2109375,
"learning_rate": 3.658536585365854e-08,
"loss": 1.129,
"step": 817
},
{
"epoch": 0.9975609756097561,
"grad_norm": 0.208984375,
"learning_rate": 2.4390243902439027e-08,
"loss": 1.1927,
"step": 818
},
{
"epoch": 0.998780487804878,
"grad_norm": 0.1875,
"learning_rate": 1.2195121951219513e-08,
"loss": 1.0641,
"step": 819
},
{
"epoch": 1.0,
"grad_norm": 0.2041015625,
"learning_rate": 0.0,
"loss": 1.1285,
"step": 820
}
],
"logging_steps": 1.0,
"max_steps": 820,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.657644303878193e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}