Stewart Slocum
Add fine-tuned model
b7b26d1
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 337,
"global_step": 337,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002967359050445104,
"grad_norm": 0.94921875,
"learning_rate": 1e-05,
"loss": 2.0677,
"step": 1
},
{
"epoch": 0.005934718100890208,
"grad_norm": 0.76953125,
"learning_rate": 9.97032640949555e-06,
"loss": 2.0863,
"step": 2
},
{
"epoch": 0.008902077151335312,
"grad_norm": 0.6640625,
"learning_rate": 9.940652818991098e-06,
"loss": 1.9814,
"step": 3
},
{
"epoch": 0.011869436201780416,
"grad_norm": 0.61328125,
"learning_rate": 9.910979228486648e-06,
"loss": 2.0,
"step": 4
},
{
"epoch": 0.01483679525222552,
"grad_norm": 0.59765625,
"learning_rate": 9.881305637982197e-06,
"loss": 1.9165,
"step": 5
},
{
"epoch": 0.017804154302670624,
"grad_norm": 0.54296875,
"learning_rate": 9.851632047477747e-06,
"loss": 1.8981,
"step": 6
},
{
"epoch": 0.020771513353115726,
"grad_norm": 0.55078125,
"learning_rate": 9.821958456973294e-06,
"loss": 1.8915,
"step": 7
},
{
"epoch": 0.02373887240356083,
"grad_norm": 0.5234375,
"learning_rate": 9.792284866468842e-06,
"loss": 1.8848,
"step": 8
},
{
"epoch": 0.026706231454005934,
"grad_norm": 0.486328125,
"learning_rate": 9.762611275964392e-06,
"loss": 1.7873,
"step": 9
},
{
"epoch": 0.02967359050445104,
"grad_norm": 0.486328125,
"learning_rate": 9.732937685459941e-06,
"loss": 1.8174,
"step": 10
},
{
"epoch": 0.032640949554896145,
"grad_norm": 0.478515625,
"learning_rate": 9.70326409495549e-06,
"loss": 1.8172,
"step": 11
},
{
"epoch": 0.03560830860534125,
"grad_norm": 0.41796875,
"learning_rate": 9.673590504451039e-06,
"loss": 1.7265,
"step": 12
},
{
"epoch": 0.03857566765578635,
"grad_norm": 0.416015625,
"learning_rate": 9.643916913946588e-06,
"loss": 1.6981,
"step": 13
},
{
"epoch": 0.04154302670623145,
"grad_norm": 0.38671875,
"learning_rate": 9.614243323442138e-06,
"loss": 1.7138,
"step": 14
},
{
"epoch": 0.04451038575667656,
"grad_norm": 0.359375,
"learning_rate": 9.584569732937687e-06,
"loss": 1.7191,
"step": 15
},
{
"epoch": 0.04747774480712166,
"grad_norm": 0.345703125,
"learning_rate": 9.554896142433235e-06,
"loss": 1.6637,
"step": 16
},
{
"epoch": 0.050445103857566766,
"grad_norm": 0.34375,
"learning_rate": 9.525222551928784e-06,
"loss": 1.6311,
"step": 17
},
{
"epoch": 0.05341246290801187,
"grad_norm": 0.330078125,
"learning_rate": 9.495548961424334e-06,
"loss": 1.6814,
"step": 18
},
{
"epoch": 0.05637982195845697,
"grad_norm": 0.361328125,
"learning_rate": 9.465875370919882e-06,
"loss": 1.7252,
"step": 19
},
{
"epoch": 0.05934718100890208,
"grad_norm": 0.337890625,
"learning_rate": 9.436201780415431e-06,
"loss": 1.677,
"step": 20
},
{
"epoch": 0.06231454005934718,
"grad_norm": 0.302734375,
"learning_rate": 9.406528189910979e-06,
"loss": 1.6096,
"step": 21
},
{
"epoch": 0.06528189910979229,
"grad_norm": 0.294921875,
"learning_rate": 9.376854599406528e-06,
"loss": 1.6227,
"step": 22
},
{
"epoch": 0.06824925816023739,
"grad_norm": 0.26953125,
"learning_rate": 9.347181008902078e-06,
"loss": 1.5964,
"step": 23
},
{
"epoch": 0.0712166172106825,
"grad_norm": 0.296875,
"learning_rate": 9.317507418397626e-06,
"loss": 1.5578,
"step": 24
},
{
"epoch": 0.07418397626112759,
"grad_norm": 0.271484375,
"learning_rate": 9.287833827893175e-06,
"loss": 1.5712,
"step": 25
},
{
"epoch": 0.0771513353115727,
"grad_norm": 0.271484375,
"learning_rate": 9.258160237388725e-06,
"loss": 1.5594,
"step": 26
},
{
"epoch": 0.08011869436201781,
"grad_norm": 0.271484375,
"learning_rate": 9.228486646884274e-06,
"loss": 1.6137,
"step": 27
},
{
"epoch": 0.0830860534124629,
"grad_norm": 0.275390625,
"learning_rate": 9.198813056379822e-06,
"loss": 1.5647,
"step": 28
},
{
"epoch": 0.08605341246290801,
"grad_norm": 0.267578125,
"learning_rate": 9.169139465875372e-06,
"loss": 1.5569,
"step": 29
},
{
"epoch": 0.08902077151335312,
"grad_norm": 0.236328125,
"learning_rate": 9.139465875370921e-06,
"loss": 1.5277,
"step": 30
},
{
"epoch": 0.09198813056379822,
"grad_norm": 0.25,
"learning_rate": 9.10979228486647e-06,
"loss": 1.5045,
"step": 31
},
{
"epoch": 0.09495548961424333,
"grad_norm": 0.275390625,
"learning_rate": 9.080118694362018e-06,
"loss": 1.5668,
"step": 32
},
{
"epoch": 0.09792284866468842,
"grad_norm": 0.3359375,
"learning_rate": 9.050445103857568e-06,
"loss": 1.541,
"step": 33
},
{
"epoch": 0.10089020771513353,
"grad_norm": 0.26953125,
"learning_rate": 9.020771513353116e-06,
"loss": 1.5232,
"step": 34
},
{
"epoch": 0.10385756676557864,
"grad_norm": 0.22265625,
"learning_rate": 8.991097922848665e-06,
"loss": 1.5146,
"step": 35
},
{
"epoch": 0.10682492581602374,
"grad_norm": 0.220703125,
"learning_rate": 8.961424332344215e-06,
"loss": 1.5174,
"step": 36
},
{
"epoch": 0.10979228486646884,
"grad_norm": 0.2080078125,
"learning_rate": 8.931750741839763e-06,
"loss": 1.4951,
"step": 37
},
{
"epoch": 0.11275964391691394,
"grad_norm": 0.2109375,
"learning_rate": 8.902077151335312e-06,
"loss": 1.4719,
"step": 38
},
{
"epoch": 0.11572700296735905,
"grad_norm": 0.212890625,
"learning_rate": 8.872403560830862e-06,
"loss": 1.4433,
"step": 39
},
{
"epoch": 0.11869436201780416,
"grad_norm": 0.30859375,
"learning_rate": 8.842729970326411e-06,
"loss": 1.4952,
"step": 40
},
{
"epoch": 0.12166172106824925,
"grad_norm": 0.20703125,
"learning_rate": 8.813056379821959e-06,
"loss": 1.5169,
"step": 41
},
{
"epoch": 0.12462908011869436,
"grad_norm": 0.2412109375,
"learning_rate": 8.783382789317508e-06,
"loss": 1.554,
"step": 42
},
{
"epoch": 0.12759643916913946,
"grad_norm": 0.216796875,
"learning_rate": 8.753709198813058e-06,
"loss": 1.4951,
"step": 43
},
{
"epoch": 0.13056379821958458,
"grad_norm": 0.21875,
"learning_rate": 8.724035608308606e-06,
"loss": 1.5065,
"step": 44
},
{
"epoch": 0.13353115727002968,
"grad_norm": 0.19921875,
"learning_rate": 8.694362017804155e-06,
"loss": 1.4919,
"step": 45
},
{
"epoch": 0.13649851632047477,
"grad_norm": 0.2021484375,
"learning_rate": 8.664688427299705e-06,
"loss": 1.5049,
"step": 46
},
{
"epoch": 0.1394658753709199,
"grad_norm": 0.1943359375,
"learning_rate": 8.635014836795252e-06,
"loss": 1.4194,
"step": 47
},
{
"epoch": 0.142433234421365,
"grad_norm": 0.2177734375,
"learning_rate": 8.605341246290802e-06,
"loss": 1.3983,
"step": 48
},
{
"epoch": 0.14540059347181009,
"grad_norm": 0.2099609375,
"learning_rate": 8.57566765578635e-06,
"loss": 1.4324,
"step": 49
},
{
"epoch": 0.14836795252225518,
"grad_norm": 0.1923828125,
"learning_rate": 8.5459940652819e-06,
"loss": 1.4409,
"step": 50
},
{
"epoch": 0.1513353115727003,
"grad_norm": 0.1953125,
"learning_rate": 8.516320474777449e-06,
"loss": 1.4665,
"step": 51
},
{
"epoch": 0.1543026706231454,
"grad_norm": 0.1796875,
"learning_rate": 8.486646884272998e-06,
"loss": 1.4116,
"step": 52
},
{
"epoch": 0.1572700296735905,
"grad_norm": 0.1943359375,
"learning_rate": 8.456973293768546e-06,
"loss": 1.4634,
"step": 53
},
{
"epoch": 0.16023738872403562,
"grad_norm": 0.1884765625,
"learning_rate": 8.427299703264096e-06,
"loss": 1.4233,
"step": 54
},
{
"epoch": 0.1632047477744807,
"grad_norm": 0.1884765625,
"learning_rate": 8.397626112759645e-06,
"loss": 1.4375,
"step": 55
},
{
"epoch": 0.1661721068249258,
"grad_norm": 0.185546875,
"learning_rate": 8.367952522255195e-06,
"loss": 1.4266,
"step": 56
},
{
"epoch": 0.16913946587537093,
"grad_norm": 0.201171875,
"learning_rate": 8.338278931750742e-06,
"loss": 1.4455,
"step": 57
},
{
"epoch": 0.17210682492581603,
"grad_norm": 0.1806640625,
"learning_rate": 8.308605341246292e-06,
"loss": 1.3704,
"step": 58
},
{
"epoch": 0.17507418397626112,
"grad_norm": 0.173828125,
"learning_rate": 8.278931750741841e-06,
"loss": 1.3802,
"step": 59
},
{
"epoch": 0.17804154302670624,
"grad_norm": 0.18359375,
"learning_rate": 8.24925816023739e-06,
"loss": 1.4001,
"step": 60
},
{
"epoch": 0.18100890207715134,
"grad_norm": 0.2001953125,
"learning_rate": 8.219584569732939e-06,
"loss": 1.4321,
"step": 61
},
{
"epoch": 0.18397626112759644,
"grad_norm": 0.232421875,
"learning_rate": 8.189910979228487e-06,
"loss": 1.4167,
"step": 62
},
{
"epoch": 0.18694362017804153,
"grad_norm": 0.1865234375,
"learning_rate": 8.160237388724036e-06,
"loss": 1.3671,
"step": 63
},
{
"epoch": 0.18991097922848665,
"grad_norm": 0.26171875,
"learning_rate": 8.130563798219586e-06,
"loss": 1.4385,
"step": 64
},
{
"epoch": 0.19287833827893175,
"grad_norm": 0.1875,
"learning_rate": 8.100890207715133e-06,
"loss": 1.4385,
"step": 65
},
{
"epoch": 0.19584569732937684,
"grad_norm": 0.1845703125,
"learning_rate": 8.071216617210683e-06,
"loss": 1.4083,
"step": 66
},
{
"epoch": 0.19881305637982197,
"grad_norm": 0.1962890625,
"learning_rate": 8.041543026706232e-06,
"loss": 1.3803,
"step": 67
},
{
"epoch": 0.20178041543026706,
"grad_norm": 0.2001953125,
"learning_rate": 8.011869436201782e-06,
"loss": 1.3954,
"step": 68
},
{
"epoch": 0.20474777448071216,
"grad_norm": 0.2138671875,
"learning_rate": 7.98219584569733e-06,
"loss": 1.3395,
"step": 69
},
{
"epoch": 0.20771513353115728,
"grad_norm": 0.1904296875,
"learning_rate": 7.95252225519288e-06,
"loss": 1.3295,
"step": 70
},
{
"epoch": 0.21068249258160238,
"grad_norm": 0.193359375,
"learning_rate": 7.922848664688429e-06,
"loss": 1.3906,
"step": 71
},
{
"epoch": 0.21364985163204747,
"grad_norm": 0.208984375,
"learning_rate": 7.893175074183978e-06,
"loss": 1.3739,
"step": 72
},
{
"epoch": 0.2166172106824926,
"grad_norm": 0.193359375,
"learning_rate": 7.863501483679526e-06,
"loss": 1.4093,
"step": 73
},
{
"epoch": 0.2195845697329377,
"grad_norm": 0.1962890625,
"learning_rate": 7.833827893175074e-06,
"loss": 1.3961,
"step": 74
},
{
"epoch": 0.22255192878338279,
"grad_norm": 0.2158203125,
"learning_rate": 7.804154302670623e-06,
"loss": 1.3947,
"step": 75
},
{
"epoch": 0.22551928783382788,
"grad_norm": 0.1806640625,
"learning_rate": 7.774480712166173e-06,
"loss": 1.3505,
"step": 76
},
{
"epoch": 0.228486646884273,
"grad_norm": 0.220703125,
"learning_rate": 7.744807121661722e-06,
"loss": 1.4362,
"step": 77
},
{
"epoch": 0.2314540059347181,
"grad_norm": 0.1962890625,
"learning_rate": 7.71513353115727e-06,
"loss": 1.3679,
"step": 78
},
{
"epoch": 0.2344213649851632,
"grad_norm": 0.2216796875,
"learning_rate": 7.68545994065282e-06,
"loss": 1.3595,
"step": 79
},
{
"epoch": 0.23738872403560832,
"grad_norm": 0.197265625,
"learning_rate": 7.655786350148369e-06,
"loss": 1.3918,
"step": 80
},
{
"epoch": 0.2403560830860534,
"grad_norm": 0.1826171875,
"learning_rate": 7.626112759643918e-06,
"loss": 1.4121,
"step": 81
},
{
"epoch": 0.2433234421364985,
"grad_norm": 0.1884765625,
"learning_rate": 7.5964391691394664e-06,
"loss": 1.3981,
"step": 82
},
{
"epoch": 0.24629080118694363,
"grad_norm": 0.2080078125,
"learning_rate": 7.566765578635016e-06,
"loss": 1.3622,
"step": 83
},
{
"epoch": 0.24925816023738873,
"grad_norm": 0.193359375,
"learning_rate": 7.537091988130565e-06,
"loss": 1.374,
"step": 84
},
{
"epoch": 0.2522255192878338,
"grad_norm": 0.1982421875,
"learning_rate": 7.507418397626114e-06,
"loss": 1.3944,
"step": 85
},
{
"epoch": 0.2551928783382789,
"grad_norm": 0.20703125,
"learning_rate": 7.477744807121662e-06,
"loss": 1.361,
"step": 86
},
{
"epoch": 0.258160237388724,
"grad_norm": 0.232421875,
"learning_rate": 7.4480712166172105e-06,
"loss": 1.333,
"step": 87
},
{
"epoch": 0.26112759643916916,
"grad_norm": 0.193359375,
"learning_rate": 7.41839762611276e-06,
"loss": 1.3303,
"step": 88
},
{
"epoch": 0.26409495548961426,
"grad_norm": 0.1865234375,
"learning_rate": 7.388724035608309e-06,
"loss": 1.3375,
"step": 89
},
{
"epoch": 0.26706231454005935,
"grad_norm": 0.1884765625,
"learning_rate": 7.359050445103858e-06,
"loss": 1.3534,
"step": 90
},
{
"epoch": 0.27002967359050445,
"grad_norm": 0.1962890625,
"learning_rate": 7.329376854599407e-06,
"loss": 1.3671,
"step": 91
},
{
"epoch": 0.27299703264094954,
"grad_norm": 0.181640625,
"learning_rate": 7.299703264094956e-06,
"loss": 1.3794,
"step": 92
},
{
"epoch": 0.27596439169139464,
"grad_norm": 0.1865234375,
"learning_rate": 7.270029673590505e-06,
"loss": 1.3515,
"step": 93
},
{
"epoch": 0.2789317507418398,
"grad_norm": 0.2021484375,
"learning_rate": 7.2403560830860545e-06,
"loss": 1.3377,
"step": 94
},
{
"epoch": 0.2818991097922849,
"grad_norm": 0.189453125,
"learning_rate": 7.210682492581603e-06,
"loss": 1.3657,
"step": 95
},
{
"epoch": 0.28486646884273,
"grad_norm": 0.2119140625,
"learning_rate": 7.181008902077153e-06,
"loss": 1.3964,
"step": 96
},
{
"epoch": 0.2878338278931751,
"grad_norm": 0.2197265625,
"learning_rate": 7.151335311572701e-06,
"loss": 1.2998,
"step": 97
},
{
"epoch": 0.29080118694362017,
"grad_norm": 0.2138671875,
"learning_rate": 7.12166172106825e-06,
"loss": 1.3535,
"step": 98
},
{
"epoch": 0.29376854599406527,
"grad_norm": 0.19140625,
"learning_rate": 7.091988130563799e-06,
"loss": 1.3267,
"step": 99
},
{
"epoch": 0.29673590504451036,
"grad_norm": 0.201171875,
"learning_rate": 7.062314540059347e-06,
"loss": 1.3391,
"step": 100
},
{
"epoch": 0.2997032640949555,
"grad_norm": 0.2021484375,
"learning_rate": 7.032640949554897e-06,
"loss": 1.3941,
"step": 101
},
{
"epoch": 0.3026706231454006,
"grad_norm": 0.2490234375,
"learning_rate": 7.0029673590504455e-06,
"loss": 1.3128,
"step": 102
},
{
"epoch": 0.3056379821958457,
"grad_norm": 0.181640625,
"learning_rate": 6.973293768545994e-06,
"loss": 1.3351,
"step": 103
},
{
"epoch": 0.3086053412462908,
"grad_norm": 0.17578125,
"learning_rate": 6.943620178041544e-06,
"loss": 1.3271,
"step": 104
},
{
"epoch": 0.3115727002967359,
"grad_norm": 0.1982421875,
"learning_rate": 6.913946587537092e-06,
"loss": 1.3283,
"step": 105
},
{
"epoch": 0.314540059347181,
"grad_norm": 0.189453125,
"learning_rate": 6.884272997032642e-06,
"loss": 1.294,
"step": 106
},
{
"epoch": 0.31750741839762614,
"grad_norm": 0.205078125,
"learning_rate": 6.85459940652819e-06,
"loss": 1.3383,
"step": 107
},
{
"epoch": 0.32047477744807124,
"grad_norm": 0.2119140625,
"learning_rate": 6.82492581602374e-06,
"loss": 1.3375,
"step": 108
},
{
"epoch": 0.32344213649851633,
"grad_norm": 0.1962890625,
"learning_rate": 6.795252225519289e-06,
"loss": 1.3399,
"step": 109
},
{
"epoch": 0.3264094955489614,
"grad_norm": 0.1884765625,
"learning_rate": 6.765578635014838e-06,
"loss": 1.345,
"step": 110
},
{
"epoch": 0.3293768545994065,
"grad_norm": 0.2021484375,
"learning_rate": 6.735905044510387e-06,
"loss": 1.2866,
"step": 111
},
{
"epoch": 0.3323442136498516,
"grad_norm": 0.189453125,
"learning_rate": 6.7062314540059345e-06,
"loss": 1.3115,
"step": 112
},
{
"epoch": 0.3353115727002967,
"grad_norm": 0.1845703125,
"learning_rate": 6.676557863501484e-06,
"loss": 1.3283,
"step": 113
},
{
"epoch": 0.33827893175074186,
"grad_norm": 0.1923828125,
"learning_rate": 6.646884272997033e-06,
"loss": 1.3087,
"step": 114
},
{
"epoch": 0.34124629080118696,
"grad_norm": 0.19140625,
"learning_rate": 6.617210682492582e-06,
"loss": 1.3121,
"step": 115
},
{
"epoch": 0.34421364985163205,
"grad_norm": 0.2021484375,
"learning_rate": 6.587537091988131e-06,
"loss": 1.3209,
"step": 116
},
{
"epoch": 0.34718100890207715,
"grad_norm": 0.185546875,
"learning_rate": 6.55786350148368e-06,
"loss": 1.3096,
"step": 117
},
{
"epoch": 0.35014836795252224,
"grad_norm": 0.1875,
"learning_rate": 6.528189910979229e-06,
"loss": 1.294,
"step": 118
},
{
"epoch": 0.35311572700296734,
"grad_norm": 0.197265625,
"learning_rate": 6.4985163204747785e-06,
"loss": 1.3233,
"step": 119
},
{
"epoch": 0.3560830860534125,
"grad_norm": 0.1923828125,
"learning_rate": 6.468842729970327e-06,
"loss": 1.3177,
"step": 120
},
{
"epoch": 0.3590504451038576,
"grad_norm": 0.1982421875,
"learning_rate": 6.439169139465876e-06,
"loss": 1.3206,
"step": 121
},
{
"epoch": 0.3620178041543027,
"grad_norm": 0.236328125,
"learning_rate": 6.409495548961425e-06,
"loss": 1.3653,
"step": 122
},
{
"epoch": 0.3649851632047478,
"grad_norm": 0.189453125,
"learning_rate": 6.379821958456974e-06,
"loss": 1.2995,
"step": 123
},
{
"epoch": 0.36795252225519287,
"grad_norm": 0.2109375,
"learning_rate": 6.3501483679525235e-06,
"loss": 1.3219,
"step": 124
},
{
"epoch": 0.37091988130563797,
"grad_norm": 0.287109375,
"learning_rate": 6.320474777448071e-06,
"loss": 1.3444,
"step": 125
},
{
"epoch": 0.37388724035608306,
"grad_norm": 0.251953125,
"learning_rate": 6.29080118694362e-06,
"loss": 1.2872,
"step": 126
},
{
"epoch": 0.3768545994065282,
"grad_norm": 0.2001953125,
"learning_rate": 6.2611275964391694e-06,
"loss": 1.3334,
"step": 127
},
{
"epoch": 0.3798219584569733,
"grad_norm": 0.2001953125,
"learning_rate": 6.231454005934718e-06,
"loss": 1.3167,
"step": 128
},
{
"epoch": 0.3827893175074184,
"grad_norm": 0.2041015625,
"learning_rate": 6.201780415430268e-06,
"loss": 1.3093,
"step": 129
},
{
"epoch": 0.3857566765578635,
"grad_norm": 0.2099609375,
"learning_rate": 6.172106824925816e-06,
"loss": 1.2717,
"step": 130
},
{
"epoch": 0.3887240356083086,
"grad_norm": 0.2021484375,
"learning_rate": 6.142433234421366e-06,
"loss": 1.3157,
"step": 131
},
{
"epoch": 0.3916913946587537,
"grad_norm": 0.19921875,
"learning_rate": 6.112759643916914e-06,
"loss": 1.3099,
"step": 132
},
{
"epoch": 0.39465875370919884,
"grad_norm": 0.1923828125,
"learning_rate": 6.083086053412464e-06,
"loss": 1.3178,
"step": 133
},
{
"epoch": 0.39762611275964393,
"grad_norm": 0.1806640625,
"learning_rate": 6.0534124629080126e-06,
"loss": 1.2858,
"step": 134
},
{
"epoch": 0.40059347181008903,
"grad_norm": 0.310546875,
"learning_rate": 6.023738872403562e-06,
"loss": 1.3109,
"step": 135
},
{
"epoch": 0.4035608308605341,
"grad_norm": 0.2138671875,
"learning_rate": 5.994065281899111e-06,
"loss": 1.3189,
"step": 136
},
{
"epoch": 0.4065281899109792,
"grad_norm": 0.1943359375,
"learning_rate": 5.964391691394659e-06,
"loss": 1.2719,
"step": 137
},
{
"epoch": 0.4094955489614243,
"grad_norm": 0.1884765625,
"learning_rate": 5.934718100890208e-06,
"loss": 1.2975,
"step": 138
},
{
"epoch": 0.4124629080118694,
"grad_norm": 0.20703125,
"learning_rate": 5.905044510385757e-06,
"loss": 1.2303,
"step": 139
},
{
"epoch": 0.41543026706231456,
"grad_norm": 0.2099609375,
"learning_rate": 5.875370919881306e-06,
"loss": 1.3048,
"step": 140
},
{
"epoch": 0.41839762611275966,
"grad_norm": 0.18359375,
"learning_rate": 5.845697329376855e-06,
"loss": 1.2735,
"step": 141
},
{
"epoch": 0.42136498516320475,
"grad_norm": 0.1806640625,
"learning_rate": 5.8160237388724035e-06,
"loss": 1.327,
"step": 142
},
{
"epoch": 0.42433234421364985,
"grad_norm": 0.2041015625,
"learning_rate": 5.786350148367953e-06,
"loss": 1.2812,
"step": 143
},
{
"epoch": 0.42729970326409494,
"grad_norm": 0.234375,
"learning_rate": 5.756676557863502e-06,
"loss": 1.3042,
"step": 144
},
{
"epoch": 0.43026706231454004,
"grad_norm": 0.19140625,
"learning_rate": 5.727002967359051e-06,
"loss": 1.2647,
"step": 145
},
{
"epoch": 0.4332344213649852,
"grad_norm": 0.205078125,
"learning_rate": 5.6973293768546e-06,
"loss": 1.3202,
"step": 146
},
{
"epoch": 0.4362017804154303,
"grad_norm": 0.1982421875,
"learning_rate": 5.667655786350149e-06,
"loss": 1.3324,
"step": 147
},
{
"epoch": 0.4391691394658754,
"grad_norm": 0.2001953125,
"learning_rate": 5.637982195845698e-06,
"loss": 1.2715,
"step": 148
},
{
"epoch": 0.4421364985163205,
"grad_norm": 0.185546875,
"learning_rate": 5.6083086053412475e-06,
"loss": 1.2783,
"step": 149
},
{
"epoch": 0.44510385756676557,
"grad_norm": 0.267578125,
"learning_rate": 5.578635014836796e-06,
"loss": 1.265,
"step": 150
},
{
"epoch": 0.44807121661721067,
"grad_norm": 0.193359375,
"learning_rate": 5.548961424332344e-06,
"loss": 1.2528,
"step": 151
},
{
"epoch": 0.45103857566765576,
"grad_norm": 0.2001953125,
"learning_rate": 5.5192878338278934e-06,
"loss": 1.3136,
"step": 152
},
{
"epoch": 0.4540059347181009,
"grad_norm": 0.2099609375,
"learning_rate": 5.489614243323442e-06,
"loss": 1.298,
"step": 153
},
{
"epoch": 0.456973293768546,
"grad_norm": 0.19140625,
"learning_rate": 5.459940652818992e-06,
"loss": 1.2735,
"step": 154
},
{
"epoch": 0.4599406528189911,
"grad_norm": 0.1962890625,
"learning_rate": 5.43026706231454e-06,
"loss": 1.3049,
"step": 155
},
{
"epoch": 0.4629080118694362,
"grad_norm": 0.201171875,
"learning_rate": 5.40059347181009e-06,
"loss": 1.2733,
"step": 156
},
{
"epoch": 0.4658753709198813,
"grad_norm": 0.2021484375,
"learning_rate": 5.370919881305638e-06,
"loss": 1.288,
"step": 157
},
{
"epoch": 0.4688427299703264,
"grad_norm": 0.189453125,
"learning_rate": 5.341246290801188e-06,
"loss": 1.3038,
"step": 158
},
{
"epoch": 0.47181008902077154,
"grad_norm": 0.1884765625,
"learning_rate": 5.3115727002967366e-06,
"loss": 1.2611,
"step": 159
},
{
"epoch": 0.47477744807121663,
"grad_norm": 0.193359375,
"learning_rate": 5.281899109792285e-06,
"loss": 1.3067,
"step": 160
},
{
"epoch": 0.47774480712166173,
"grad_norm": 0.2021484375,
"learning_rate": 5.252225519287835e-06,
"loss": 1.2886,
"step": 161
},
{
"epoch": 0.4807121661721068,
"grad_norm": 0.1923828125,
"learning_rate": 5.222551928783383e-06,
"loss": 1.2595,
"step": 162
},
{
"epoch": 0.4836795252225519,
"grad_norm": 0.1875,
"learning_rate": 5.192878338278933e-06,
"loss": 1.2751,
"step": 163
},
{
"epoch": 0.486646884272997,
"grad_norm": 0.1875,
"learning_rate": 5.163204747774481e-06,
"loss": 1.2961,
"step": 164
},
{
"epoch": 0.4896142433234421,
"grad_norm": 0.240234375,
"learning_rate": 5.133531157270029e-06,
"loss": 1.3248,
"step": 165
},
{
"epoch": 0.49258160237388726,
"grad_norm": 0.185546875,
"learning_rate": 5.103857566765579e-06,
"loss": 1.3029,
"step": 166
},
{
"epoch": 0.49554896142433236,
"grad_norm": 0.201171875,
"learning_rate": 5.0741839762611275e-06,
"loss": 1.2941,
"step": 167
},
{
"epoch": 0.49851632047477745,
"grad_norm": 0.1982421875,
"learning_rate": 5.044510385756677e-06,
"loss": 1.2685,
"step": 168
},
{
"epoch": 0.5014836795252225,
"grad_norm": 0.2001953125,
"learning_rate": 5.014836795252226e-06,
"loss": 1.2844,
"step": 169
},
{
"epoch": 0.5044510385756676,
"grad_norm": 0.2041015625,
"learning_rate": 4.985163204747775e-06,
"loss": 1.2717,
"step": 170
},
{
"epoch": 0.5074183976261127,
"grad_norm": 0.2060546875,
"learning_rate": 4.955489614243324e-06,
"loss": 1.2669,
"step": 171
},
{
"epoch": 0.5103857566765578,
"grad_norm": 0.203125,
"learning_rate": 4.925816023738873e-06,
"loss": 1.2924,
"step": 172
},
{
"epoch": 0.5133531157270029,
"grad_norm": 0.2138671875,
"learning_rate": 4.896142433234421e-06,
"loss": 1.2299,
"step": 173
},
{
"epoch": 0.516320474777448,
"grad_norm": 0.1943359375,
"learning_rate": 4.866468842729971e-06,
"loss": 1.287,
"step": 174
},
{
"epoch": 0.5192878338278932,
"grad_norm": 0.20703125,
"learning_rate": 4.836795252225519e-06,
"loss": 1.3162,
"step": 175
},
{
"epoch": 0.5222551928783383,
"grad_norm": 0.2041015625,
"learning_rate": 4.807121661721069e-06,
"loss": 1.2701,
"step": 176
},
{
"epoch": 0.5252225519287834,
"grad_norm": 0.2119140625,
"learning_rate": 4.7774480712166174e-06,
"loss": 1.2998,
"step": 177
},
{
"epoch": 0.5281899109792285,
"grad_norm": 0.197265625,
"learning_rate": 4.747774480712167e-06,
"loss": 1.2894,
"step": 178
},
{
"epoch": 0.5311572700296736,
"grad_norm": 0.2099609375,
"learning_rate": 4.718100890207716e-06,
"loss": 1.2528,
"step": 179
},
{
"epoch": 0.5341246290801187,
"grad_norm": 0.259765625,
"learning_rate": 4.688427299703264e-06,
"loss": 1.2502,
"step": 180
},
{
"epoch": 0.5370919881305638,
"grad_norm": 0.197265625,
"learning_rate": 4.658753709198813e-06,
"loss": 1.2745,
"step": 181
},
{
"epoch": 0.5400593471810089,
"grad_norm": 0.1865234375,
"learning_rate": 4.629080118694362e-06,
"loss": 1.2996,
"step": 182
},
{
"epoch": 0.543026706231454,
"grad_norm": 0.1875,
"learning_rate": 4.599406528189911e-06,
"loss": 1.2463,
"step": 183
},
{
"epoch": 0.5459940652818991,
"grad_norm": 0.1875,
"learning_rate": 4.5697329376854606e-06,
"loss": 1.2661,
"step": 184
},
{
"epoch": 0.5489614243323442,
"grad_norm": 0.2080078125,
"learning_rate": 4.540059347181009e-06,
"loss": 1.2275,
"step": 185
},
{
"epoch": 0.5519287833827893,
"grad_norm": 0.193359375,
"learning_rate": 4.510385756676558e-06,
"loss": 1.2486,
"step": 186
},
{
"epoch": 0.5548961424332344,
"grad_norm": 0.2099609375,
"learning_rate": 4.480712166172107e-06,
"loss": 1.2555,
"step": 187
},
{
"epoch": 0.5578635014836796,
"grad_norm": 0.21875,
"learning_rate": 4.451038575667656e-06,
"loss": 1.3215,
"step": 188
},
{
"epoch": 0.5608308605341247,
"grad_norm": 0.197265625,
"learning_rate": 4.4213649851632055e-06,
"loss": 1.2401,
"step": 189
},
{
"epoch": 0.5637982195845698,
"grad_norm": 0.2099609375,
"learning_rate": 4.391691394658754e-06,
"loss": 1.2612,
"step": 190
},
{
"epoch": 0.5667655786350149,
"grad_norm": 0.203125,
"learning_rate": 4.362017804154303e-06,
"loss": 1.2806,
"step": 191
},
{
"epoch": 0.56973293768546,
"grad_norm": 0.1982421875,
"learning_rate": 4.332344213649852e-06,
"loss": 1.2672,
"step": 192
},
{
"epoch": 0.5727002967359051,
"grad_norm": 0.1982421875,
"learning_rate": 4.302670623145401e-06,
"loss": 1.237,
"step": 193
},
{
"epoch": 0.5756676557863502,
"grad_norm": 0.1962890625,
"learning_rate": 4.27299703264095e-06,
"loss": 1.2676,
"step": 194
},
{
"epoch": 0.5786350148367952,
"grad_norm": 0.2158203125,
"learning_rate": 4.243323442136499e-06,
"loss": 1.2977,
"step": 195
},
{
"epoch": 0.5816023738872403,
"grad_norm": 0.205078125,
"learning_rate": 4.213649851632048e-06,
"loss": 1.2774,
"step": 196
},
{
"epoch": 0.5845697329376854,
"grad_norm": 0.20703125,
"learning_rate": 4.183976261127597e-06,
"loss": 1.2262,
"step": 197
},
{
"epoch": 0.5875370919881305,
"grad_norm": 0.2080078125,
"learning_rate": 4.154302670623146e-06,
"loss": 1.2861,
"step": 198
},
{
"epoch": 0.5905044510385756,
"grad_norm": 0.1865234375,
"learning_rate": 4.124629080118695e-06,
"loss": 1.2761,
"step": 199
},
{
"epoch": 0.5934718100890207,
"grad_norm": 0.2001953125,
"learning_rate": 4.094955489614243e-06,
"loss": 1.2924,
"step": 200
},
{
"epoch": 0.5964391691394659,
"grad_norm": 0.21875,
"learning_rate": 4.065281899109793e-06,
"loss": 1.2484,
"step": 201
},
{
"epoch": 0.599406528189911,
"grad_norm": 0.2080078125,
"learning_rate": 4.0356083086053414e-06,
"loss": 1.2271,
"step": 202
},
{
"epoch": 0.6023738872403561,
"grad_norm": 0.1923828125,
"learning_rate": 4.005934718100891e-06,
"loss": 1.284,
"step": 203
},
{
"epoch": 0.6053412462908012,
"grad_norm": 0.205078125,
"learning_rate": 3.97626112759644e-06,
"loss": 1.2713,
"step": 204
},
{
"epoch": 0.6083086053412463,
"grad_norm": 0.189453125,
"learning_rate": 3.946587537091989e-06,
"loss": 1.2357,
"step": 205
},
{
"epoch": 0.6112759643916914,
"grad_norm": 0.1904296875,
"learning_rate": 3.916913946587537e-06,
"loss": 1.3009,
"step": 206
},
{
"epoch": 0.6142433234421365,
"grad_norm": 0.1962890625,
"learning_rate": 3.887240356083086e-06,
"loss": 1.2574,
"step": 207
},
{
"epoch": 0.6172106824925816,
"grad_norm": 0.208984375,
"learning_rate": 3.857566765578635e-06,
"loss": 1.276,
"step": 208
},
{
"epoch": 0.6201780415430267,
"grad_norm": 0.212890625,
"learning_rate": 3.8278931750741846e-06,
"loss": 1.3123,
"step": 209
},
{
"epoch": 0.6231454005934718,
"grad_norm": 0.19921875,
"learning_rate": 3.7982195845697332e-06,
"loss": 1.315,
"step": 210
},
{
"epoch": 0.6261127596439169,
"grad_norm": 0.205078125,
"learning_rate": 3.7685459940652823e-06,
"loss": 1.2536,
"step": 211
},
{
"epoch": 0.629080118694362,
"grad_norm": 0.201171875,
"learning_rate": 3.738872403560831e-06,
"loss": 1.2606,
"step": 212
},
{
"epoch": 0.6320474777448071,
"grad_norm": 0.1923828125,
"learning_rate": 3.70919881305638e-06,
"loss": 1.25,
"step": 213
},
{
"epoch": 0.6350148367952523,
"grad_norm": 0.189453125,
"learning_rate": 3.679525222551929e-06,
"loss": 1.2494,
"step": 214
},
{
"epoch": 0.6379821958456974,
"grad_norm": 0.1943359375,
"learning_rate": 3.649851632047478e-06,
"loss": 1.2372,
"step": 215
},
{
"epoch": 0.6409495548961425,
"grad_norm": 0.310546875,
"learning_rate": 3.6201780415430273e-06,
"loss": 1.3252,
"step": 216
},
{
"epoch": 0.6439169139465876,
"grad_norm": 0.203125,
"learning_rate": 3.5905044510385763e-06,
"loss": 1.2716,
"step": 217
},
{
"epoch": 0.6468842729970327,
"grad_norm": 0.22265625,
"learning_rate": 3.560830860534125e-06,
"loss": 1.306,
"step": 218
},
{
"epoch": 0.6498516320474778,
"grad_norm": 0.201171875,
"learning_rate": 3.5311572700296736e-06,
"loss": 1.2718,
"step": 219
},
{
"epoch": 0.6528189910979229,
"grad_norm": 0.1923828125,
"learning_rate": 3.5014836795252227e-06,
"loss": 1.2342,
"step": 220
},
{
"epoch": 0.655786350148368,
"grad_norm": 0.19140625,
"learning_rate": 3.471810089020772e-06,
"loss": 1.2443,
"step": 221
},
{
"epoch": 0.658753709198813,
"grad_norm": 0.197265625,
"learning_rate": 3.442136498516321e-06,
"loss": 1.2524,
"step": 222
},
{
"epoch": 0.6617210682492581,
"grad_norm": 0.1923828125,
"learning_rate": 3.41246290801187e-06,
"loss": 1.292,
"step": 223
},
{
"epoch": 0.6646884272997032,
"grad_norm": 0.1962890625,
"learning_rate": 3.382789317507419e-06,
"loss": 1.2716,
"step": 224
},
{
"epoch": 0.6676557863501483,
"grad_norm": 0.193359375,
"learning_rate": 3.3531157270029673e-06,
"loss": 1.2327,
"step": 225
},
{
"epoch": 0.6706231454005934,
"grad_norm": 0.203125,
"learning_rate": 3.3234421364985163e-06,
"loss": 1.2138,
"step": 226
},
{
"epoch": 0.6735905044510386,
"grad_norm": 0.203125,
"learning_rate": 3.2937685459940654e-06,
"loss": 1.232,
"step": 227
},
{
"epoch": 0.6765578635014837,
"grad_norm": 0.1962890625,
"learning_rate": 3.2640949554896145e-06,
"loss": 1.2347,
"step": 228
},
{
"epoch": 0.6795252225519288,
"grad_norm": 0.2001953125,
"learning_rate": 3.2344213649851636e-06,
"loss": 1.2906,
"step": 229
},
{
"epoch": 0.6824925816023739,
"grad_norm": 0.1943359375,
"learning_rate": 3.2047477744807127e-06,
"loss": 1.2621,
"step": 230
},
{
"epoch": 0.685459940652819,
"grad_norm": 0.1826171875,
"learning_rate": 3.1750741839762617e-06,
"loss": 1.2336,
"step": 231
},
{
"epoch": 0.6884272997032641,
"grad_norm": 0.1962890625,
"learning_rate": 3.14540059347181e-06,
"loss": 1.2533,
"step": 232
},
{
"epoch": 0.6913946587537092,
"grad_norm": 0.21484375,
"learning_rate": 3.115727002967359e-06,
"loss": 1.2594,
"step": 233
},
{
"epoch": 0.6943620178041543,
"grad_norm": 0.2255859375,
"learning_rate": 3.086053412462908e-06,
"loss": 1.3164,
"step": 234
},
{
"epoch": 0.6973293768545994,
"grad_norm": 0.205078125,
"learning_rate": 3.056379821958457e-06,
"loss": 1.2366,
"step": 235
},
{
"epoch": 0.7002967359050445,
"grad_norm": 0.197265625,
"learning_rate": 3.0267062314540063e-06,
"loss": 1.2423,
"step": 236
},
{
"epoch": 0.7032640949554896,
"grad_norm": 0.1982421875,
"learning_rate": 2.9970326409495554e-06,
"loss": 1.2781,
"step": 237
},
{
"epoch": 0.7062314540059347,
"grad_norm": 0.185546875,
"learning_rate": 2.967359050445104e-06,
"loss": 1.2367,
"step": 238
},
{
"epoch": 0.7091988130563798,
"grad_norm": 0.1923828125,
"learning_rate": 2.937685459940653e-06,
"loss": 1.2378,
"step": 239
},
{
"epoch": 0.712166172106825,
"grad_norm": 0.189453125,
"learning_rate": 2.9080118694362018e-06,
"loss": 1.2291,
"step": 240
},
{
"epoch": 0.7151335311572701,
"grad_norm": 0.189453125,
"learning_rate": 2.878338278931751e-06,
"loss": 1.273,
"step": 241
},
{
"epoch": 0.7181008902077152,
"grad_norm": 0.2099609375,
"learning_rate": 2.8486646884273e-06,
"loss": 1.241,
"step": 242
},
{
"epoch": 0.7210682492581603,
"grad_norm": 0.1982421875,
"learning_rate": 2.818991097922849e-06,
"loss": 1.2248,
"step": 243
},
{
"epoch": 0.7240356083086054,
"grad_norm": 0.2109375,
"learning_rate": 2.789317507418398e-06,
"loss": 1.2397,
"step": 244
},
{
"epoch": 0.7270029673590505,
"grad_norm": 0.1982421875,
"learning_rate": 2.7596439169139467e-06,
"loss": 1.2308,
"step": 245
},
{
"epoch": 0.7299703264094956,
"grad_norm": 0.1962890625,
"learning_rate": 2.729970326409496e-06,
"loss": 1.244,
"step": 246
},
{
"epoch": 0.7329376854599406,
"grad_norm": 0.201171875,
"learning_rate": 2.700296735905045e-06,
"loss": 1.2553,
"step": 247
},
{
"epoch": 0.7359050445103857,
"grad_norm": 0.2197265625,
"learning_rate": 2.670623145400594e-06,
"loss": 1.2875,
"step": 248
},
{
"epoch": 0.7388724035608308,
"grad_norm": 0.205078125,
"learning_rate": 2.6409495548961426e-06,
"loss": 1.2506,
"step": 249
},
{
"epoch": 0.7418397626112759,
"grad_norm": 0.1923828125,
"learning_rate": 2.6112759643916917e-06,
"loss": 1.2325,
"step": 250
},
{
"epoch": 0.744807121661721,
"grad_norm": 0.197265625,
"learning_rate": 2.5816023738872403e-06,
"loss": 1.2208,
"step": 251
},
{
"epoch": 0.7477744807121661,
"grad_norm": 0.1865234375,
"learning_rate": 2.5519287833827894e-06,
"loss": 1.2257,
"step": 252
},
{
"epoch": 0.7507418397626113,
"grad_norm": 0.1943359375,
"learning_rate": 2.5222551928783385e-06,
"loss": 1.232,
"step": 253
},
{
"epoch": 0.7537091988130564,
"grad_norm": 0.2099609375,
"learning_rate": 2.4925816023738876e-06,
"loss": 1.27,
"step": 254
},
{
"epoch": 0.7566765578635015,
"grad_norm": 0.18359375,
"learning_rate": 2.4629080118694367e-06,
"loss": 1.2424,
"step": 255
},
{
"epoch": 0.7596439169139466,
"grad_norm": 0.2021484375,
"learning_rate": 2.4332344213649853e-06,
"loss": 1.2693,
"step": 256
},
{
"epoch": 0.7626112759643917,
"grad_norm": 0.19921875,
"learning_rate": 2.4035608308605344e-06,
"loss": 1.2664,
"step": 257
},
{
"epoch": 0.7655786350148368,
"grad_norm": 0.1943359375,
"learning_rate": 2.3738872403560835e-06,
"loss": 1.2501,
"step": 258
},
{
"epoch": 0.7685459940652819,
"grad_norm": 0.1923828125,
"learning_rate": 2.344213649851632e-06,
"loss": 1.2318,
"step": 259
},
{
"epoch": 0.771513353115727,
"grad_norm": 0.2109375,
"learning_rate": 2.314540059347181e-06,
"loss": 1.3029,
"step": 260
},
{
"epoch": 0.7744807121661721,
"grad_norm": 0.2080078125,
"learning_rate": 2.2848664688427303e-06,
"loss": 1.2724,
"step": 261
},
{
"epoch": 0.7774480712166172,
"grad_norm": 0.201171875,
"learning_rate": 2.255192878338279e-06,
"loss": 1.2878,
"step": 262
},
{
"epoch": 0.7804154302670623,
"grad_norm": 0.1982421875,
"learning_rate": 2.225519287833828e-06,
"loss": 1.2552,
"step": 263
},
{
"epoch": 0.7833827893175074,
"grad_norm": 0.201171875,
"learning_rate": 2.195845697329377e-06,
"loss": 1.2643,
"step": 264
},
{
"epoch": 0.7863501483679525,
"grad_norm": 0.2119140625,
"learning_rate": 2.166172106824926e-06,
"loss": 1.233,
"step": 265
},
{
"epoch": 0.7893175074183977,
"grad_norm": 0.1962890625,
"learning_rate": 2.136498516320475e-06,
"loss": 1.2309,
"step": 266
},
{
"epoch": 0.7922848664688428,
"grad_norm": 0.193359375,
"learning_rate": 2.106824925816024e-06,
"loss": 1.2229,
"step": 267
},
{
"epoch": 0.7952522255192879,
"grad_norm": 0.1953125,
"learning_rate": 2.077151335311573e-06,
"loss": 1.2481,
"step": 268
},
{
"epoch": 0.798219584569733,
"grad_norm": 0.19140625,
"learning_rate": 2.0474777448071216e-06,
"loss": 1.2295,
"step": 269
},
{
"epoch": 0.8011869436201781,
"grad_norm": 0.1884765625,
"learning_rate": 2.0178041543026707e-06,
"loss": 1.2315,
"step": 270
},
{
"epoch": 0.8041543026706232,
"grad_norm": 0.19140625,
"learning_rate": 1.98813056379822e-06,
"loss": 1.2581,
"step": 271
},
{
"epoch": 0.8071216617210683,
"grad_norm": 0.1953125,
"learning_rate": 1.9584569732937684e-06,
"loss": 1.1988,
"step": 272
},
{
"epoch": 0.8100890207715133,
"grad_norm": 0.19140625,
"learning_rate": 1.9287833827893175e-06,
"loss": 1.2688,
"step": 273
},
{
"epoch": 0.8130563798219584,
"grad_norm": 0.1923828125,
"learning_rate": 1.8991097922848666e-06,
"loss": 1.2592,
"step": 274
},
{
"epoch": 0.8160237388724035,
"grad_norm": 0.2578125,
"learning_rate": 1.8694362017804155e-06,
"loss": 1.262,
"step": 275
},
{
"epoch": 0.8189910979228486,
"grad_norm": 0.224609375,
"learning_rate": 1.8397626112759646e-06,
"loss": 1.2743,
"step": 276
},
{
"epoch": 0.8219584569732937,
"grad_norm": 0.2197265625,
"learning_rate": 1.8100890207715136e-06,
"loss": 1.2792,
"step": 277
},
{
"epoch": 0.8249258160237388,
"grad_norm": 0.22265625,
"learning_rate": 1.7804154302670625e-06,
"loss": 1.2854,
"step": 278
},
{
"epoch": 0.827893175074184,
"grad_norm": 0.208984375,
"learning_rate": 1.7507418397626114e-06,
"loss": 1.3017,
"step": 279
},
{
"epoch": 0.8308605341246291,
"grad_norm": 0.201171875,
"learning_rate": 1.7210682492581604e-06,
"loss": 1.2655,
"step": 280
},
{
"epoch": 0.8338278931750742,
"grad_norm": 0.1962890625,
"learning_rate": 1.6913946587537095e-06,
"loss": 1.2525,
"step": 281
},
{
"epoch": 0.8367952522255193,
"grad_norm": 0.1875,
"learning_rate": 1.6617210682492582e-06,
"loss": 1.247,
"step": 282
},
{
"epoch": 0.8397626112759644,
"grad_norm": 0.185546875,
"learning_rate": 1.6320474777448073e-06,
"loss": 1.2561,
"step": 283
},
{
"epoch": 0.8427299703264095,
"grad_norm": 0.1904296875,
"learning_rate": 1.6023738872403563e-06,
"loss": 1.2832,
"step": 284
},
{
"epoch": 0.8456973293768546,
"grad_norm": 0.203125,
"learning_rate": 1.572700296735905e-06,
"loss": 1.2847,
"step": 285
},
{
"epoch": 0.8486646884272997,
"grad_norm": 0.1953125,
"learning_rate": 1.543026706231454e-06,
"loss": 1.2585,
"step": 286
},
{
"epoch": 0.8516320474777448,
"grad_norm": 0.2099609375,
"learning_rate": 1.5133531157270031e-06,
"loss": 1.293,
"step": 287
},
{
"epoch": 0.8545994065281899,
"grad_norm": 0.2041015625,
"learning_rate": 1.483679525222552e-06,
"loss": 1.2895,
"step": 288
},
{
"epoch": 0.857566765578635,
"grad_norm": 0.1875,
"learning_rate": 1.4540059347181009e-06,
"loss": 1.2713,
"step": 289
},
{
"epoch": 0.8605341246290801,
"grad_norm": 0.193359375,
"learning_rate": 1.42433234421365e-06,
"loss": 1.2637,
"step": 290
},
{
"epoch": 0.8635014836795252,
"grad_norm": 0.21484375,
"learning_rate": 1.394658753709199e-06,
"loss": 1.2238,
"step": 291
},
{
"epoch": 0.8664688427299704,
"grad_norm": 0.2158203125,
"learning_rate": 1.364985163204748e-06,
"loss": 1.3296,
"step": 292
},
{
"epoch": 0.8694362017804155,
"grad_norm": 0.189453125,
"learning_rate": 1.335311572700297e-06,
"loss": 1.2612,
"step": 293
},
{
"epoch": 0.8724035608308606,
"grad_norm": 0.19921875,
"learning_rate": 1.3056379821958458e-06,
"loss": 1.2456,
"step": 294
},
{
"epoch": 0.8753709198813057,
"grad_norm": 0.2001953125,
"learning_rate": 1.2759643916913947e-06,
"loss": 1.2478,
"step": 295
},
{
"epoch": 0.8783382789317508,
"grad_norm": 0.193359375,
"learning_rate": 1.2462908011869438e-06,
"loss": 1.2667,
"step": 296
},
{
"epoch": 0.8813056379821959,
"grad_norm": 0.2041015625,
"learning_rate": 1.2166172106824927e-06,
"loss": 1.2313,
"step": 297
},
{
"epoch": 0.884272997032641,
"grad_norm": 0.1923828125,
"learning_rate": 1.1869436201780417e-06,
"loss": 1.2604,
"step": 298
},
{
"epoch": 0.887240356083086,
"grad_norm": 0.2001953125,
"learning_rate": 1.1572700296735906e-06,
"loss": 1.2672,
"step": 299
},
{
"epoch": 0.8902077151335311,
"grad_norm": 0.193359375,
"learning_rate": 1.1275964391691395e-06,
"loss": 1.253,
"step": 300
},
{
"epoch": 0.8931750741839762,
"grad_norm": 0.1962890625,
"learning_rate": 1.0979228486646885e-06,
"loss": 1.2247,
"step": 301
},
{
"epoch": 0.8961424332344213,
"grad_norm": 0.2119140625,
"learning_rate": 1.0682492581602374e-06,
"loss": 1.291,
"step": 302
},
{
"epoch": 0.8991097922848664,
"grad_norm": 0.1904296875,
"learning_rate": 1.0385756676557865e-06,
"loss": 1.2788,
"step": 303
},
{
"epoch": 0.9020771513353115,
"grad_norm": 0.1884765625,
"learning_rate": 1.0089020771513354e-06,
"loss": 1.2542,
"step": 304
},
{
"epoch": 0.9050445103857567,
"grad_norm": 0.1884765625,
"learning_rate": 9.792284866468842e-07,
"loss": 1.2522,
"step": 305
},
{
"epoch": 0.9080118694362018,
"grad_norm": 0.1953125,
"learning_rate": 9.495548961424333e-07,
"loss": 1.2059,
"step": 306
},
{
"epoch": 0.9109792284866469,
"grad_norm": 0.189453125,
"learning_rate": 9.198813056379823e-07,
"loss": 1.2572,
"step": 307
},
{
"epoch": 0.913946587537092,
"grad_norm": 0.1923828125,
"learning_rate": 8.902077151335312e-07,
"loss": 1.2355,
"step": 308
},
{
"epoch": 0.9169139465875371,
"grad_norm": 0.212890625,
"learning_rate": 8.605341246290802e-07,
"loss": 1.2088,
"step": 309
},
{
"epoch": 0.9198813056379822,
"grad_norm": 0.193359375,
"learning_rate": 8.308605341246291e-07,
"loss": 1.2711,
"step": 310
},
{
"epoch": 0.9228486646884273,
"grad_norm": 0.1845703125,
"learning_rate": 8.011869436201782e-07,
"loss": 1.2234,
"step": 311
},
{
"epoch": 0.9258160237388724,
"grad_norm": 0.205078125,
"learning_rate": 7.71513353115727e-07,
"loss": 1.3062,
"step": 312
},
{
"epoch": 0.9287833827893175,
"grad_norm": 0.2021484375,
"learning_rate": 7.41839762611276e-07,
"loss": 1.2521,
"step": 313
},
{
"epoch": 0.9317507418397626,
"grad_norm": 0.2099609375,
"learning_rate": 7.12166172106825e-07,
"loss": 1.2534,
"step": 314
},
{
"epoch": 0.9347181008902077,
"grad_norm": 0.1923828125,
"learning_rate": 6.82492581602374e-07,
"loss": 1.2616,
"step": 315
},
{
"epoch": 0.9376854599406528,
"grad_norm": 0.197265625,
"learning_rate": 6.528189910979229e-07,
"loss": 1.3005,
"step": 316
},
{
"epoch": 0.9406528189910979,
"grad_norm": 0.19921875,
"learning_rate": 6.231454005934719e-07,
"loss": 1.3083,
"step": 317
},
{
"epoch": 0.9436201780415431,
"grad_norm": 0.1845703125,
"learning_rate": 5.934718100890209e-07,
"loss": 1.2245,
"step": 318
},
{
"epoch": 0.9465875370919882,
"grad_norm": 0.1962890625,
"learning_rate": 5.637982195845697e-07,
"loss": 1.2227,
"step": 319
},
{
"epoch": 0.9495548961424333,
"grad_norm": 0.1904296875,
"learning_rate": 5.341246290801187e-07,
"loss": 1.2399,
"step": 320
},
{
"epoch": 0.9525222551928784,
"grad_norm": 0.1962890625,
"learning_rate": 5.044510385756677e-07,
"loss": 1.2081,
"step": 321
},
{
"epoch": 0.9554896142433235,
"grad_norm": 0.189453125,
"learning_rate": 4.7477744807121665e-07,
"loss": 1.2437,
"step": 322
},
{
"epoch": 0.9584569732937686,
"grad_norm": 0.22265625,
"learning_rate": 4.451038575667656e-07,
"loss": 1.1966,
"step": 323
},
{
"epoch": 0.9614243323442137,
"grad_norm": 0.1982421875,
"learning_rate": 4.1543026706231454e-07,
"loss": 1.2334,
"step": 324
},
{
"epoch": 0.9643916913946587,
"grad_norm": 0.201171875,
"learning_rate": 3.857566765578635e-07,
"loss": 1.2657,
"step": 325
},
{
"epoch": 0.9673590504451038,
"grad_norm": 0.1923828125,
"learning_rate": 3.560830860534125e-07,
"loss": 1.2559,
"step": 326
},
{
"epoch": 0.9703264094955489,
"grad_norm": 0.18359375,
"learning_rate": 3.2640949554896146e-07,
"loss": 1.2565,
"step": 327
},
{
"epoch": 0.973293768545994,
"grad_norm": 0.1845703125,
"learning_rate": 2.9673590504451043e-07,
"loss": 1.2533,
"step": 328
},
{
"epoch": 0.9762611275964391,
"grad_norm": 0.1875,
"learning_rate": 2.6706231454005935e-07,
"loss": 1.2565,
"step": 329
},
{
"epoch": 0.9792284866468842,
"grad_norm": 0.2490234375,
"learning_rate": 2.3738872403560833e-07,
"loss": 1.331,
"step": 330
},
{
"epoch": 0.9821958456973294,
"grad_norm": 0.2138671875,
"learning_rate": 2.0771513353115727e-07,
"loss": 1.2968,
"step": 331
},
{
"epoch": 0.9851632047477745,
"grad_norm": 0.1865234375,
"learning_rate": 1.7804154302670624e-07,
"loss": 1.2477,
"step": 332
},
{
"epoch": 0.9881305637982196,
"grad_norm": 0.189453125,
"learning_rate": 1.4836795252225522e-07,
"loss": 1.2617,
"step": 333
},
{
"epoch": 0.9910979228486647,
"grad_norm": 0.1904296875,
"learning_rate": 1.1869436201780416e-07,
"loss": 1.2204,
"step": 334
},
{
"epoch": 0.9940652818991098,
"grad_norm": 0.2265625,
"learning_rate": 8.902077151335312e-08,
"loss": 1.253,
"step": 335
},
{
"epoch": 0.9970326409495549,
"grad_norm": 0.208984375,
"learning_rate": 5.934718100890208e-08,
"loss": 1.2939,
"step": 336
},
{
"epoch": 1.0,
"grad_norm": 0.76171875,
"learning_rate": 2.967359050445104e-08,
"loss": 1.2321,
"step": 337
},
{
"epoch": 1.0,
"eval_loss": 1.2519956827163696,
"eval_runtime": 12.2277,
"eval_samples_per_second": 2.862,
"eval_steps_per_second": 0.409,
"step": 337
}
],
"logging_steps": 1.0,
"max_steps": 337,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.154519532014928e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}