Wanff
Add fine-tuned model
326c1de
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 0,
"global_step": 666,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003003003003003003,
"grad_norm": 0.380859375,
"learning_rate": 9.984984984984985e-06,
"loss": 1.8538,
"step": 1
},
{
"epoch": 0.006006006006006006,
"grad_norm": 0.41015625,
"learning_rate": 9.96996996996997e-06,
"loss": 1.9422,
"step": 2
},
{
"epoch": 0.009009009009009009,
"grad_norm": 0.44921875,
"learning_rate": 9.954954954954956e-06,
"loss": 1.94,
"step": 3
},
{
"epoch": 0.012012012012012012,
"grad_norm": 0.388671875,
"learning_rate": 9.93993993993994e-06,
"loss": 1.89,
"step": 4
},
{
"epoch": 0.015015015015015015,
"grad_norm": 0.35546875,
"learning_rate": 9.924924924924926e-06,
"loss": 1.7841,
"step": 5
},
{
"epoch": 0.018018018018018018,
"grad_norm": 0.455078125,
"learning_rate": 9.90990990990991e-06,
"loss": 1.8042,
"step": 6
},
{
"epoch": 0.021021021021021023,
"grad_norm": 0.390625,
"learning_rate": 9.894894894894896e-06,
"loss": 1.7175,
"step": 7
},
{
"epoch": 0.024024024024024024,
"grad_norm": 0.34765625,
"learning_rate": 9.879879879879881e-06,
"loss": 1.7793,
"step": 8
},
{
"epoch": 0.02702702702702703,
"grad_norm": 0.328125,
"learning_rate": 9.864864864864865e-06,
"loss": 1.7466,
"step": 9
},
{
"epoch": 0.03003003003003003,
"grad_norm": 0.314453125,
"learning_rate": 9.849849849849851e-06,
"loss": 1.7399,
"step": 10
},
{
"epoch": 0.03303303303303303,
"grad_norm": 0.30859375,
"learning_rate": 9.834834834834835e-06,
"loss": 1.7661,
"step": 11
},
{
"epoch": 0.036036036036036036,
"grad_norm": 0.302734375,
"learning_rate": 9.81981981981982e-06,
"loss": 1.7423,
"step": 12
},
{
"epoch": 0.03903903903903904,
"grad_norm": 0.2890625,
"learning_rate": 9.804804804804806e-06,
"loss": 1.7121,
"step": 13
},
{
"epoch": 0.042042042042042045,
"grad_norm": 0.271484375,
"learning_rate": 9.78978978978979e-06,
"loss": 1.6143,
"step": 14
},
{
"epoch": 0.04504504504504504,
"grad_norm": 0.2890625,
"learning_rate": 9.774774774774776e-06,
"loss": 1.6264,
"step": 15
},
{
"epoch": 0.04804804804804805,
"grad_norm": 0.28125,
"learning_rate": 9.75975975975976e-06,
"loss": 1.6299,
"step": 16
},
{
"epoch": 0.05105105105105105,
"grad_norm": 0.275390625,
"learning_rate": 9.744744744744746e-06,
"loss": 1.5991,
"step": 17
},
{
"epoch": 0.05405405405405406,
"grad_norm": 0.271484375,
"learning_rate": 9.729729729729732e-06,
"loss": 1.5668,
"step": 18
},
{
"epoch": 0.057057057057057055,
"grad_norm": 0.294921875,
"learning_rate": 9.714714714714716e-06,
"loss": 1.6369,
"step": 19
},
{
"epoch": 0.06006006006006006,
"grad_norm": 0.265625,
"learning_rate": 9.699699699699701e-06,
"loss": 1.5686,
"step": 20
},
{
"epoch": 0.06306306306306306,
"grad_norm": 0.2578125,
"learning_rate": 9.684684684684685e-06,
"loss": 1.5885,
"step": 21
},
{
"epoch": 0.06606606606606606,
"grad_norm": 0.2431640625,
"learning_rate": 9.669669669669671e-06,
"loss": 1.5285,
"step": 22
},
{
"epoch": 0.06906906906906907,
"grad_norm": 0.2490234375,
"learning_rate": 9.654654654654655e-06,
"loss": 1.5526,
"step": 23
},
{
"epoch": 0.07207207207207207,
"grad_norm": 0.2353515625,
"learning_rate": 9.63963963963964e-06,
"loss": 1.5849,
"step": 24
},
{
"epoch": 0.07507507507507508,
"grad_norm": 0.23828125,
"learning_rate": 9.624624624624626e-06,
"loss": 1.4842,
"step": 25
},
{
"epoch": 0.07807807807807808,
"grad_norm": 0.232421875,
"learning_rate": 9.60960960960961e-06,
"loss": 1.5246,
"step": 26
},
{
"epoch": 0.08108108108108109,
"grad_norm": 0.2470703125,
"learning_rate": 9.594594594594594e-06,
"loss": 1.4512,
"step": 27
},
{
"epoch": 0.08408408408408409,
"grad_norm": 0.2236328125,
"learning_rate": 9.57957957957958e-06,
"loss": 1.5347,
"step": 28
},
{
"epoch": 0.08708708708708708,
"grad_norm": 0.2333984375,
"learning_rate": 9.564564564564566e-06,
"loss": 1.5637,
"step": 29
},
{
"epoch": 0.09009009009009009,
"grad_norm": 0.2080078125,
"learning_rate": 9.54954954954955e-06,
"loss": 1.5056,
"step": 30
},
{
"epoch": 0.09309309309309309,
"grad_norm": 0.220703125,
"learning_rate": 9.534534534534535e-06,
"loss": 1.475,
"step": 31
},
{
"epoch": 0.0960960960960961,
"grad_norm": 0.216796875,
"learning_rate": 9.51951951951952e-06,
"loss": 1.5068,
"step": 32
},
{
"epoch": 0.0990990990990991,
"grad_norm": 0.220703125,
"learning_rate": 9.504504504504505e-06,
"loss": 1.4519,
"step": 33
},
{
"epoch": 0.1021021021021021,
"grad_norm": 0.78125,
"learning_rate": 9.489489489489491e-06,
"loss": 1.485,
"step": 34
},
{
"epoch": 0.10510510510510511,
"grad_norm": 0.2119140625,
"learning_rate": 9.474474474474475e-06,
"loss": 1.4007,
"step": 35
},
{
"epoch": 0.10810810810810811,
"grad_norm": 0.197265625,
"learning_rate": 9.45945945945946e-06,
"loss": 1.3841,
"step": 36
},
{
"epoch": 0.1111111111111111,
"grad_norm": 0.1923828125,
"learning_rate": 9.444444444444445e-06,
"loss": 1.3714,
"step": 37
},
{
"epoch": 0.11411411411411411,
"grad_norm": 0.1767578125,
"learning_rate": 9.42942942942943e-06,
"loss": 1.3823,
"step": 38
},
{
"epoch": 0.11711711711711711,
"grad_norm": 0.1904296875,
"learning_rate": 9.414414414414416e-06,
"loss": 1.4019,
"step": 39
},
{
"epoch": 0.12012012012012012,
"grad_norm": 0.1865234375,
"learning_rate": 9.3993993993994e-06,
"loss": 1.367,
"step": 40
},
{
"epoch": 0.12312312312312312,
"grad_norm": 0.185546875,
"learning_rate": 9.384384384384386e-06,
"loss": 1.3934,
"step": 41
},
{
"epoch": 0.12612612612612611,
"grad_norm": 0.1875,
"learning_rate": 9.36936936936937e-06,
"loss": 1.3594,
"step": 42
},
{
"epoch": 0.12912912912912913,
"grad_norm": 0.193359375,
"learning_rate": 9.354354354354355e-06,
"loss": 1.374,
"step": 43
},
{
"epoch": 0.13213213213213212,
"grad_norm": 0.1845703125,
"learning_rate": 9.339339339339341e-06,
"loss": 1.3473,
"step": 44
},
{
"epoch": 0.13513513513513514,
"grad_norm": 0.1767578125,
"learning_rate": 9.324324324324325e-06,
"loss": 1.3387,
"step": 45
},
{
"epoch": 0.13813813813813813,
"grad_norm": 0.2158203125,
"learning_rate": 9.30930930930931e-06,
"loss": 1.2735,
"step": 46
},
{
"epoch": 0.14114114114114115,
"grad_norm": 0.1787109375,
"learning_rate": 9.294294294294295e-06,
"loss": 1.3242,
"step": 47
},
{
"epoch": 0.14414414414414414,
"grad_norm": 0.1953125,
"learning_rate": 9.27927927927928e-06,
"loss": 1.3704,
"step": 48
},
{
"epoch": 0.14714714714714713,
"grad_norm": 0.1806640625,
"learning_rate": 9.264264264264266e-06,
"loss": 1.3486,
"step": 49
},
{
"epoch": 0.15015015015015015,
"grad_norm": 0.16796875,
"learning_rate": 9.24924924924925e-06,
"loss": 1.3569,
"step": 50
},
{
"epoch": 0.15315315315315314,
"grad_norm": 0.1650390625,
"learning_rate": 9.234234234234236e-06,
"loss": 1.3056,
"step": 51
},
{
"epoch": 0.15615615615615616,
"grad_norm": 0.1796875,
"learning_rate": 9.21921921921922e-06,
"loss": 1.2833,
"step": 52
},
{
"epoch": 0.15915915915915915,
"grad_norm": 0.1650390625,
"learning_rate": 9.204204204204204e-06,
"loss": 1.2864,
"step": 53
},
{
"epoch": 0.16216216216216217,
"grad_norm": 0.177734375,
"learning_rate": 9.189189189189191e-06,
"loss": 1.3085,
"step": 54
},
{
"epoch": 0.16516516516516516,
"grad_norm": 0.169921875,
"learning_rate": 9.174174174174175e-06,
"loss": 1.3305,
"step": 55
},
{
"epoch": 0.16816816816816818,
"grad_norm": 0.224609375,
"learning_rate": 9.15915915915916e-06,
"loss": 1.256,
"step": 56
},
{
"epoch": 0.17117117117117117,
"grad_norm": 0.1552734375,
"learning_rate": 9.144144144144145e-06,
"loss": 1.3023,
"step": 57
},
{
"epoch": 0.17417417417417416,
"grad_norm": 0.734375,
"learning_rate": 9.129129129129129e-06,
"loss": 1.2863,
"step": 58
},
{
"epoch": 0.17717717717717718,
"grad_norm": 0.16796875,
"learning_rate": 9.114114114114115e-06,
"loss": 1.3406,
"step": 59
},
{
"epoch": 0.18018018018018017,
"grad_norm": 0.1806640625,
"learning_rate": 9.0990990990991e-06,
"loss": 1.2647,
"step": 60
},
{
"epoch": 0.1831831831831832,
"grad_norm": 0.169921875,
"learning_rate": 9.084084084084084e-06,
"loss": 1.2668,
"step": 61
},
{
"epoch": 0.18618618618618618,
"grad_norm": 0.16796875,
"learning_rate": 9.06906906906907e-06,
"loss": 1.31,
"step": 62
},
{
"epoch": 0.1891891891891892,
"grad_norm": 0.1494140625,
"learning_rate": 9.054054054054054e-06,
"loss": 1.2698,
"step": 63
},
{
"epoch": 0.1921921921921922,
"grad_norm": 0.154296875,
"learning_rate": 9.03903903903904e-06,
"loss": 1.257,
"step": 64
},
{
"epoch": 0.19519519519519518,
"grad_norm": 0.158203125,
"learning_rate": 9.024024024024025e-06,
"loss": 1.3423,
"step": 65
},
{
"epoch": 0.1981981981981982,
"grad_norm": 0.1796875,
"learning_rate": 9.00900900900901e-06,
"loss": 1.2955,
"step": 66
},
{
"epoch": 0.2012012012012012,
"grad_norm": 0.1650390625,
"learning_rate": 8.993993993993995e-06,
"loss": 1.2188,
"step": 67
},
{
"epoch": 0.2042042042042042,
"grad_norm": 0.23828125,
"learning_rate": 8.97897897897898e-06,
"loss": 1.3124,
"step": 68
},
{
"epoch": 0.2072072072072072,
"grad_norm": 0.23828125,
"learning_rate": 8.963963963963965e-06,
"loss": 1.2602,
"step": 69
},
{
"epoch": 0.21021021021021022,
"grad_norm": 0.15625,
"learning_rate": 8.94894894894895e-06,
"loss": 1.2859,
"step": 70
},
{
"epoch": 0.2132132132132132,
"grad_norm": 0.1923828125,
"learning_rate": 8.933933933933935e-06,
"loss": 1.2146,
"step": 71
},
{
"epoch": 0.21621621621621623,
"grad_norm": 0.158203125,
"learning_rate": 8.91891891891892e-06,
"loss": 1.2468,
"step": 72
},
{
"epoch": 0.21921921921921922,
"grad_norm": 0.1640625,
"learning_rate": 8.903903903903904e-06,
"loss": 1.295,
"step": 73
},
{
"epoch": 0.2222222222222222,
"grad_norm": 0.33984375,
"learning_rate": 8.888888888888888e-06,
"loss": 1.2008,
"step": 74
},
{
"epoch": 0.22522522522522523,
"grad_norm": 0.1611328125,
"learning_rate": 8.873873873873876e-06,
"loss": 1.1868,
"step": 75
},
{
"epoch": 0.22822822822822822,
"grad_norm": 0.1796875,
"learning_rate": 8.85885885885886e-06,
"loss": 1.1832,
"step": 76
},
{
"epoch": 0.23123123123123124,
"grad_norm": 0.15625,
"learning_rate": 8.843843843843844e-06,
"loss": 1.2389,
"step": 77
},
{
"epoch": 0.23423423423423423,
"grad_norm": 0.1455078125,
"learning_rate": 8.82882882882883e-06,
"loss": 1.1528,
"step": 78
},
{
"epoch": 0.23723723723723725,
"grad_norm": 0.166015625,
"learning_rate": 8.813813813813813e-06,
"loss": 1.1683,
"step": 79
},
{
"epoch": 0.24024024024024024,
"grad_norm": 0.169921875,
"learning_rate": 8.798798798798799e-06,
"loss": 1.1644,
"step": 80
},
{
"epoch": 0.24324324324324326,
"grad_norm": 0.2119140625,
"learning_rate": 8.783783783783785e-06,
"loss": 1.2171,
"step": 81
},
{
"epoch": 0.24624624624624625,
"grad_norm": 0.1669921875,
"learning_rate": 8.768768768768769e-06,
"loss": 1.2204,
"step": 82
},
{
"epoch": 0.24924924924924924,
"grad_norm": 0.15234375,
"learning_rate": 8.753753753753755e-06,
"loss": 1.1925,
"step": 83
},
{
"epoch": 0.25225225225225223,
"grad_norm": 0.1611328125,
"learning_rate": 8.738738738738739e-06,
"loss": 1.2293,
"step": 84
},
{
"epoch": 0.2552552552552553,
"grad_norm": 0.1513671875,
"learning_rate": 8.723723723723724e-06,
"loss": 1.1508,
"step": 85
},
{
"epoch": 0.25825825825825827,
"grad_norm": 0.1591796875,
"learning_rate": 8.70870870870871e-06,
"loss": 1.1758,
"step": 86
},
{
"epoch": 0.26126126126126126,
"grad_norm": 0.1708984375,
"learning_rate": 8.693693693693694e-06,
"loss": 1.1769,
"step": 87
},
{
"epoch": 0.26426426426426425,
"grad_norm": 0.169921875,
"learning_rate": 8.67867867867868e-06,
"loss": 1.1837,
"step": 88
},
{
"epoch": 0.2672672672672673,
"grad_norm": 0.1572265625,
"learning_rate": 8.663663663663664e-06,
"loss": 1.1788,
"step": 89
},
{
"epoch": 0.2702702702702703,
"grad_norm": 0.166015625,
"learning_rate": 8.64864864864865e-06,
"loss": 1.2157,
"step": 90
},
{
"epoch": 0.2732732732732733,
"grad_norm": 0.232421875,
"learning_rate": 8.633633633633635e-06,
"loss": 1.1862,
"step": 91
},
{
"epoch": 0.27627627627627627,
"grad_norm": 0.1591796875,
"learning_rate": 8.618618618618619e-06,
"loss": 1.1819,
"step": 92
},
{
"epoch": 0.27927927927927926,
"grad_norm": 0.2255859375,
"learning_rate": 8.603603603603605e-06,
"loss": 1.1669,
"step": 93
},
{
"epoch": 0.2822822822822823,
"grad_norm": 0.158203125,
"learning_rate": 8.588588588588589e-06,
"loss": 1.1939,
"step": 94
},
{
"epoch": 0.2852852852852853,
"grad_norm": 0.1591796875,
"learning_rate": 8.573573573573574e-06,
"loss": 1.1632,
"step": 95
},
{
"epoch": 0.2882882882882883,
"grad_norm": 0.275390625,
"learning_rate": 8.55855855855856e-06,
"loss": 1.1402,
"step": 96
},
{
"epoch": 0.2912912912912913,
"grad_norm": 0.1591796875,
"learning_rate": 8.543543543543544e-06,
"loss": 1.1705,
"step": 97
},
{
"epoch": 0.29429429429429427,
"grad_norm": 0.162109375,
"learning_rate": 8.52852852852853e-06,
"loss": 1.2272,
"step": 98
},
{
"epoch": 0.2972972972972973,
"grad_norm": 0.2470703125,
"learning_rate": 8.513513513513514e-06,
"loss": 1.2087,
"step": 99
},
{
"epoch": 0.3003003003003003,
"grad_norm": 0.16015625,
"learning_rate": 8.4984984984985e-06,
"loss": 1.1722,
"step": 100
},
{
"epoch": 0.3033033033033033,
"grad_norm": 0.1630859375,
"learning_rate": 8.483483483483485e-06,
"loss": 1.2111,
"step": 101
},
{
"epoch": 0.3063063063063063,
"grad_norm": 0.25390625,
"learning_rate": 8.46846846846847e-06,
"loss": 1.1933,
"step": 102
},
{
"epoch": 0.30930930930930933,
"grad_norm": 0.1669921875,
"learning_rate": 8.453453453453453e-06,
"loss": 1.2445,
"step": 103
},
{
"epoch": 0.3123123123123123,
"grad_norm": 0.1806640625,
"learning_rate": 8.438438438438439e-06,
"loss": 1.1687,
"step": 104
},
{
"epoch": 0.3153153153153153,
"grad_norm": 0.1787109375,
"learning_rate": 8.423423423423423e-06,
"loss": 1.214,
"step": 105
},
{
"epoch": 0.3183183183183183,
"grad_norm": 0.1689453125,
"learning_rate": 8.408408408408409e-06,
"loss": 1.2096,
"step": 106
},
{
"epoch": 0.3213213213213213,
"grad_norm": 0.171875,
"learning_rate": 8.393393393393394e-06,
"loss": 1.1346,
"step": 107
},
{
"epoch": 0.32432432432432434,
"grad_norm": 0.1640625,
"learning_rate": 8.378378378378378e-06,
"loss": 1.2269,
"step": 108
},
{
"epoch": 0.32732732732732733,
"grad_norm": 0.166015625,
"learning_rate": 8.363363363363364e-06,
"loss": 1.1531,
"step": 109
},
{
"epoch": 0.3303303303303303,
"grad_norm": 0.154296875,
"learning_rate": 8.348348348348348e-06,
"loss": 1.2189,
"step": 110
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.1826171875,
"learning_rate": 8.333333333333334e-06,
"loss": 1.2086,
"step": 111
},
{
"epoch": 0.33633633633633636,
"grad_norm": 0.19140625,
"learning_rate": 8.31831831831832e-06,
"loss": 1.1763,
"step": 112
},
{
"epoch": 0.33933933933933935,
"grad_norm": 0.19921875,
"learning_rate": 8.303303303303303e-06,
"loss": 1.1049,
"step": 113
},
{
"epoch": 0.34234234234234234,
"grad_norm": 0.1669921875,
"learning_rate": 8.288288288288289e-06,
"loss": 1.1289,
"step": 114
},
{
"epoch": 0.34534534534534533,
"grad_norm": 0.1748046875,
"learning_rate": 8.273273273273273e-06,
"loss": 1.2346,
"step": 115
},
{
"epoch": 0.3483483483483483,
"grad_norm": 0.171875,
"learning_rate": 8.258258258258259e-06,
"loss": 1.1443,
"step": 116
},
{
"epoch": 0.35135135135135137,
"grad_norm": 0.1728515625,
"learning_rate": 8.243243243243245e-06,
"loss": 1.1922,
"step": 117
},
{
"epoch": 0.35435435435435436,
"grad_norm": 0.1669921875,
"learning_rate": 8.228228228228229e-06,
"loss": 1.1718,
"step": 118
},
{
"epoch": 0.35735735735735735,
"grad_norm": 0.1845703125,
"learning_rate": 8.213213213213214e-06,
"loss": 1.1457,
"step": 119
},
{
"epoch": 0.36036036036036034,
"grad_norm": 0.169921875,
"learning_rate": 8.198198198198198e-06,
"loss": 1.181,
"step": 120
},
{
"epoch": 0.3633633633633634,
"grad_norm": 0.189453125,
"learning_rate": 8.183183183183184e-06,
"loss": 1.1286,
"step": 121
},
{
"epoch": 0.3663663663663664,
"grad_norm": 0.158203125,
"learning_rate": 8.16816816816817e-06,
"loss": 1.1398,
"step": 122
},
{
"epoch": 0.36936936936936937,
"grad_norm": 0.171875,
"learning_rate": 8.153153153153154e-06,
"loss": 1.108,
"step": 123
},
{
"epoch": 0.37237237237237236,
"grad_norm": 0.1591796875,
"learning_rate": 8.13813813813814e-06,
"loss": 1.1187,
"step": 124
},
{
"epoch": 0.37537537537537535,
"grad_norm": 0.171875,
"learning_rate": 8.123123123123123e-06,
"loss": 1.1621,
"step": 125
},
{
"epoch": 0.3783783783783784,
"grad_norm": 0.181640625,
"learning_rate": 8.108108108108109e-06,
"loss": 1.0978,
"step": 126
},
{
"epoch": 0.3813813813813814,
"grad_norm": 0.1630859375,
"learning_rate": 8.093093093093095e-06,
"loss": 1.1396,
"step": 127
},
{
"epoch": 0.3843843843843844,
"grad_norm": 0.1630859375,
"learning_rate": 8.078078078078079e-06,
"loss": 1.152,
"step": 128
},
{
"epoch": 0.38738738738738737,
"grad_norm": 0.1806640625,
"learning_rate": 8.063063063063063e-06,
"loss": 1.1388,
"step": 129
},
{
"epoch": 0.39039039039039036,
"grad_norm": 0.162109375,
"learning_rate": 8.048048048048048e-06,
"loss": 1.1545,
"step": 130
},
{
"epoch": 0.3933933933933934,
"grad_norm": 0.1669921875,
"learning_rate": 8.033033033033034e-06,
"loss": 1.1585,
"step": 131
},
{
"epoch": 0.3963963963963964,
"grad_norm": 0.1845703125,
"learning_rate": 8.018018018018018e-06,
"loss": 1.1477,
"step": 132
},
{
"epoch": 0.3993993993993994,
"grad_norm": 0.158203125,
"learning_rate": 8.003003003003004e-06,
"loss": 1.1065,
"step": 133
},
{
"epoch": 0.4024024024024024,
"grad_norm": 0.1708984375,
"learning_rate": 7.987987987987988e-06,
"loss": 1.0734,
"step": 134
},
{
"epoch": 0.40540540540540543,
"grad_norm": 0.2265625,
"learning_rate": 7.972972972972974e-06,
"loss": 1.1644,
"step": 135
},
{
"epoch": 0.4084084084084084,
"grad_norm": 0.1650390625,
"learning_rate": 7.95795795795796e-06,
"loss": 1.162,
"step": 136
},
{
"epoch": 0.4114114114114114,
"grad_norm": 0.1669921875,
"learning_rate": 7.942942942942943e-06,
"loss": 1.1401,
"step": 137
},
{
"epoch": 0.4144144144144144,
"grad_norm": 0.166015625,
"learning_rate": 7.927927927927929e-06,
"loss": 1.1074,
"step": 138
},
{
"epoch": 0.4174174174174174,
"grad_norm": 0.1689453125,
"learning_rate": 7.912912912912913e-06,
"loss": 1.1504,
"step": 139
},
{
"epoch": 0.42042042042042044,
"grad_norm": 0.166015625,
"learning_rate": 7.897897897897899e-06,
"loss": 1.0743,
"step": 140
},
{
"epoch": 0.42342342342342343,
"grad_norm": 0.1572265625,
"learning_rate": 7.882882882882884e-06,
"loss": 1.0917,
"step": 141
},
{
"epoch": 0.4264264264264264,
"grad_norm": 0.1650390625,
"learning_rate": 7.867867867867868e-06,
"loss": 1.063,
"step": 142
},
{
"epoch": 0.4294294294294294,
"grad_norm": 0.212890625,
"learning_rate": 7.852852852852854e-06,
"loss": 1.094,
"step": 143
},
{
"epoch": 0.43243243243243246,
"grad_norm": 0.16796875,
"learning_rate": 7.837837837837838e-06,
"loss": 1.0964,
"step": 144
},
{
"epoch": 0.43543543543543545,
"grad_norm": 0.154296875,
"learning_rate": 7.822822822822824e-06,
"loss": 1.0823,
"step": 145
},
{
"epoch": 0.43843843843843844,
"grad_norm": 0.1796875,
"learning_rate": 7.807807807807808e-06,
"loss": 1.1712,
"step": 146
},
{
"epoch": 0.44144144144144143,
"grad_norm": 0.16796875,
"learning_rate": 7.792792792792793e-06,
"loss": 1.1003,
"step": 147
},
{
"epoch": 0.4444444444444444,
"grad_norm": 0.1708984375,
"learning_rate": 7.77777777777778e-06,
"loss": 1.1223,
"step": 148
},
{
"epoch": 0.44744744744744747,
"grad_norm": 0.1904296875,
"learning_rate": 7.762762762762763e-06,
"loss": 1.1415,
"step": 149
},
{
"epoch": 0.45045045045045046,
"grad_norm": 0.166015625,
"learning_rate": 7.747747747747749e-06,
"loss": 1.0606,
"step": 150
},
{
"epoch": 0.45345345345345345,
"grad_norm": 0.1875,
"learning_rate": 7.732732732732733e-06,
"loss": 1.1077,
"step": 151
},
{
"epoch": 0.45645645645645644,
"grad_norm": 0.185546875,
"learning_rate": 7.717717717717719e-06,
"loss": 1.1512,
"step": 152
},
{
"epoch": 0.4594594594594595,
"grad_norm": 0.283203125,
"learning_rate": 7.702702702702704e-06,
"loss": 1.1091,
"step": 153
},
{
"epoch": 0.4624624624624625,
"grad_norm": 0.24609375,
"learning_rate": 7.687687687687688e-06,
"loss": 1.0208,
"step": 154
},
{
"epoch": 0.46546546546546547,
"grad_norm": 0.1787109375,
"learning_rate": 7.672672672672672e-06,
"loss": 1.0738,
"step": 155
},
{
"epoch": 0.46846846846846846,
"grad_norm": 0.1748046875,
"learning_rate": 7.657657657657658e-06,
"loss": 1.1016,
"step": 156
},
{
"epoch": 0.47147147147147145,
"grad_norm": 0.1787109375,
"learning_rate": 7.642642642642644e-06,
"loss": 1.1251,
"step": 157
},
{
"epoch": 0.4744744744744745,
"grad_norm": 0.169921875,
"learning_rate": 7.6276276276276285e-06,
"loss": 1.1134,
"step": 158
},
{
"epoch": 0.4774774774774775,
"grad_norm": 0.18359375,
"learning_rate": 7.612612612612613e-06,
"loss": 1.1119,
"step": 159
},
{
"epoch": 0.4804804804804805,
"grad_norm": 0.1650390625,
"learning_rate": 7.597597597597598e-06,
"loss": 1.0753,
"step": 160
},
{
"epoch": 0.48348348348348347,
"grad_norm": 0.2041015625,
"learning_rate": 7.582582582582583e-06,
"loss": 1.146,
"step": 161
},
{
"epoch": 0.4864864864864865,
"grad_norm": 0.1845703125,
"learning_rate": 7.567567567567569e-06,
"loss": 1.1122,
"step": 162
},
{
"epoch": 0.4894894894894895,
"grad_norm": 0.1708984375,
"learning_rate": 7.552552552552554e-06,
"loss": 1.0973,
"step": 163
},
{
"epoch": 0.4924924924924925,
"grad_norm": 0.177734375,
"learning_rate": 7.5375375375375385e-06,
"loss": 1.1017,
"step": 164
},
{
"epoch": 0.4954954954954955,
"grad_norm": 0.173828125,
"learning_rate": 7.5225225225225225e-06,
"loss": 1.0342,
"step": 165
},
{
"epoch": 0.4984984984984985,
"grad_norm": 0.1806640625,
"learning_rate": 7.507507507507507e-06,
"loss": 1.0665,
"step": 166
},
{
"epoch": 0.5015015015015015,
"grad_norm": 0.1982421875,
"learning_rate": 7.492492492492494e-06,
"loss": 1.071,
"step": 167
},
{
"epoch": 0.5045045045045045,
"grad_norm": 0.2060546875,
"learning_rate": 7.477477477477479e-06,
"loss": 1.0892,
"step": 168
},
{
"epoch": 0.5075075075075075,
"grad_norm": 0.236328125,
"learning_rate": 7.462462462462463e-06,
"loss": 1.0704,
"step": 169
},
{
"epoch": 0.5105105105105106,
"grad_norm": 0.181640625,
"learning_rate": 7.447447447447448e-06,
"loss": 1.0669,
"step": 170
},
{
"epoch": 0.5135135135135135,
"grad_norm": 0.1748046875,
"learning_rate": 7.4324324324324324e-06,
"loss": 1.0444,
"step": 171
},
{
"epoch": 0.5165165165165165,
"grad_norm": 0.1650390625,
"learning_rate": 7.417417417417418e-06,
"loss": 1.1139,
"step": 172
},
{
"epoch": 0.5195195195195195,
"grad_norm": 0.166015625,
"learning_rate": 7.402402402402403e-06,
"loss": 1.0268,
"step": 173
},
{
"epoch": 0.5225225225225225,
"grad_norm": 0.2119140625,
"learning_rate": 7.387387387387388e-06,
"loss": 1.1087,
"step": 174
},
{
"epoch": 0.5255255255255256,
"grad_norm": 0.1826171875,
"learning_rate": 7.372372372372373e-06,
"loss": 1.0657,
"step": 175
},
{
"epoch": 0.5285285285285285,
"grad_norm": 0.1689453125,
"learning_rate": 7.3573573573573575e-06,
"loss": 1.0798,
"step": 176
},
{
"epoch": 0.5315315315315315,
"grad_norm": 0.2021484375,
"learning_rate": 7.342342342342343e-06,
"loss": 1.1279,
"step": 177
},
{
"epoch": 0.5345345345345346,
"grad_norm": 0.2021484375,
"learning_rate": 7.327327327327328e-06,
"loss": 1.109,
"step": 178
},
{
"epoch": 0.5375375375375375,
"grad_norm": 0.1884765625,
"learning_rate": 7.312312312312313e-06,
"loss": 1.1461,
"step": 179
},
{
"epoch": 0.5405405405405406,
"grad_norm": 0.1796875,
"learning_rate": 7.297297297297298e-06,
"loss": 1.0578,
"step": 180
},
{
"epoch": 0.5435435435435435,
"grad_norm": 0.2099609375,
"learning_rate": 7.282282282282283e-06,
"loss": 1.0451,
"step": 181
},
{
"epoch": 0.5465465465465466,
"grad_norm": 0.1787109375,
"learning_rate": 7.267267267267268e-06,
"loss": 1.0463,
"step": 182
},
{
"epoch": 0.5495495495495496,
"grad_norm": 0.2490234375,
"learning_rate": 7.252252252252253e-06,
"loss": 1.1026,
"step": 183
},
{
"epoch": 0.5525525525525525,
"grad_norm": 0.3203125,
"learning_rate": 7.237237237237238e-06,
"loss": 1.2223,
"step": 184
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.19140625,
"learning_rate": 7.222222222222223e-06,
"loss": 1.0613,
"step": 185
},
{
"epoch": 0.5585585585585585,
"grad_norm": 0.1748046875,
"learning_rate": 7.207207207207208e-06,
"loss": 1.0544,
"step": 186
},
{
"epoch": 0.5615615615615616,
"grad_norm": 0.1796875,
"learning_rate": 7.1921921921921935e-06,
"loss": 1.0668,
"step": 187
},
{
"epoch": 0.5645645645645646,
"grad_norm": 0.197265625,
"learning_rate": 7.177177177177178e-06,
"loss": 1.0629,
"step": 188
},
{
"epoch": 0.5675675675675675,
"grad_norm": 0.294921875,
"learning_rate": 7.162162162162163e-06,
"loss": 1.1224,
"step": 189
},
{
"epoch": 0.5705705705705706,
"grad_norm": 0.203125,
"learning_rate": 7.147147147147148e-06,
"loss": 1.0369,
"step": 190
},
{
"epoch": 0.5735735735735735,
"grad_norm": 0.1796875,
"learning_rate": 7.132132132132132e-06,
"loss": 1.0807,
"step": 191
},
{
"epoch": 0.5765765765765766,
"grad_norm": 0.2275390625,
"learning_rate": 7.117117117117117e-06,
"loss": 1.137,
"step": 192
},
{
"epoch": 0.5795795795795796,
"grad_norm": 0.1689453125,
"learning_rate": 7.102102102102103e-06,
"loss": 1.0603,
"step": 193
},
{
"epoch": 0.5825825825825826,
"grad_norm": 0.185546875,
"learning_rate": 7.087087087087087e-06,
"loss": 1.034,
"step": 194
},
{
"epoch": 0.5855855855855856,
"grad_norm": 0.21875,
"learning_rate": 7.072072072072072e-06,
"loss": 1.0648,
"step": 195
},
{
"epoch": 0.5885885885885885,
"grad_norm": 0.1884765625,
"learning_rate": 7.057057057057057e-06,
"loss": 1.0913,
"step": 196
},
{
"epoch": 0.5915915915915916,
"grad_norm": 0.1875,
"learning_rate": 7.042042042042042e-06,
"loss": 1.051,
"step": 197
},
{
"epoch": 0.5945945945945946,
"grad_norm": 0.1787109375,
"learning_rate": 7.027027027027028e-06,
"loss": 1.0765,
"step": 198
},
{
"epoch": 0.5975975975975976,
"grad_norm": 0.1845703125,
"learning_rate": 7.0120120120120125e-06,
"loss": 1.0985,
"step": 199
},
{
"epoch": 0.6006006006006006,
"grad_norm": 0.2333984375,
"learning_rate": 6.996996996996997e-06,
"loss": 1.0873,
"step": 200
},
{
"epoch": 0.6036036036036037,
"grad_norm": 0.2490234375,
"learning_rate": 6.981981981981982e-06,
"loss": 1.0184,
"step": 201
},
{
"epoch": 0.6066066066066066,
"grad_norm": 0.1923828125,
"learning_rate": 6.966966966966967e-06,
"loss": 1.0217,
"step": 202
},
{
"epoch": 0.6096096096096096,
"grad_norm": 0.1787109375,
"learning_rate": 6.951951951951953e-06,
"loss": 1.0505,
"step": 203
},
{
"epoch": 0.6126126126126126,
"grad_norm": 0.181640625,
"learning_rate": 6.936936936936938e-06,
"loss": 1.0737,
"step": 204
},
{
"epoch": 0.6156156156156156,
"grad_norm": 0.171875,
"learning_rate": 6.9219219219219225e-06,
"loss": 1.0287,
"step": 205
},
{
"epoch": 0.6186186186186187,
"grad_norm": 0.1787109375,
"learning_rate": 6.906906906906907e-06,
"loss": 1.0718,
"step": 206
},
{
"epoch": 0.6216216216216216,
"grad_norm": 0.28125,
"learning_rate": 6.891891891891892e-06,
"loss": 1.0806,
"step": 207
},
{
"epoch": 0.6246246246246246,
"grad_norm": 0.185546875,
"learning_rate": 6.876876876876878e-06,
"loss": 1.0694,
"step": 208
},
{
"epoch": 0.6276276276276276,
"grad_norm": 0.1767578125,
"learning_rate": 6.861861861861863e-06,
"loss": 1.0709,
"step": 209
},
{
"epoch": 0.6306306306306306,
"grad_norm": 0.296875,
"learning_rate": 6.846846846846848e-06,
"loss": 1.1308,
"step": 210
},
{
"epoch": 0.6336336336336337,
"grad_norm": 0.1884765625,
"learning_rate": 6.8318318318318324e-06,
"loss": 1.0627,
"step": 211
},
{
"epoch": 0.6366366366366366,
"grad_norm": 0.1787109375,
"learning_rate": 6.816816816816817e-06,
"loss": 1.058,
"step": 212
},
{
"epoch": 0.6396396396396397,
"grad_norm": 0.173828125,
"learning_rate": 6.801801801801803e-06,
"loss": 1.111,
"step": 213
},
{
"epoch": 0.6426426426426426,
"grad_norm": 0.2119140625,
"learning_rate": 6.786786786786788e-06,
"loss": 1.0756,
"step": 214
},
{
"epoch": 0.6456456456456456,
"grad_norm": 0.220703125,
"learning_rate": 6.771771771771773e-06,
"loss": 1.0691,
"step": 215
},
{
"epoch": 0.6486486486486487,
"grad_norm": 0.275390625,
"learning_rate": 6.7567567567567575e-06,
"loss": 1.0131,
"step": 216
},
{
"epoch": 0.6516516516516516,
"grad_norm": 0.2890625,
"learning_rate": 6.7417417417417415e-06,
"loss": 1.1311,
"step": 217
},
{
"epoch": 0.6546546546546547,
"grad_norm": 0.1875,
"learning_rate": 6.726726726726728e-06,
"loss": 1.0339,
"step": 218
},
{
"epoch": 0.6576576576576577,
"grad_norm": 0.23046875,
"learning_rate": 6.711711711711713e-06,
"loss": 1.0225,
"step": 219
},
{
"epoch": 0.6606606606606606,
"grad_norm": 0.19140625,
"learning_rate": 6.696696696696697e-06,
"loss": 1.0839,
"step": 220
},
{
"epoch": 0.6636636636636637,
"grad_norm": 0.205078125,
"learning_rate": 6.681681681681682e-06,
"loss": 1.0691,
"step": 221
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.201171875,
"learning_rate": 6.666666666666667e-06,
"loss": 1.1376,
"step": 222
},
{
"epoch": 0.6696696696696697,
"grad_norm": 0.212890625,
"learning_rate": 6.651651651651652e-06,
"loss": 0.9878,
"step": 223
},
{
"epoch": 0.6726726726726727,
"grad_norm": 0.216796875,
"learning_rate": 6.636636636636637e-06,
"loss": 1.1246,
"step": 224
},
{
"epoch": 0.6756756756756757,
"grad_norm": 0.1748046875,
"learning_rate": 6.621621621621622e-06,
"loss": 1.0395,
"step": 225
},
{
"epoch": 0.6786786786786787,
"grad_norm": 0.1748046875,
"learning_rate": 6.606606606606607e-06,
"loss": 1.0488,
"step": 226
},
{
"epoch": 0.6816816816816816,
"grad_norm": 0.1796875,
"learning_rate": 6.591591591591592e-06,
"loss": 1.0414,
"step": 227
},
{
"epoch": 0.6846846846846847,
"grad_norm": 0.1875,
"learning_rate": 6.5765765765765775e-06,
"loss": 1.0614,
"step": 228
},
{
"epoch": 0.6876876876876877,
"grad_norm": 0.2294921875,
"learning_rate": 6.561561561561562e-06,
"loss": 1.0281,
"step": 229
},
{
"epoch": 0.6906906906906907,
"grad_norm": 0.2158203125,
"learning_rate": 6.546546546546547e-06,
"loss": 1.0452,
"step": 230
},
{
"epoch": 0.6936936936936937,
"grad_norm": 0.1806640625,
"learning_rate": 6.531531531531532e-06,
"loss": 1.0629,
"step": 231
},
{
"epoch": 0.6966966966966966,
"grad_norm": 0.1884765625,
"learning_rate": 6.516516516516517e-06,
"loss": 1.0539,
"step": 232
},
{
"epoch": 0.6996996996996997,
"grad_norm": 0.2734375,
"learning_rate": 6.501501501501502e-06,
"loss": 1.0472,
"step": 233
},
{
"epoch": 0.7027027027027027,
"grad_norm": 0.1923828125,
"learning_rate": 6.486486486486487e-06,
"loss": 1.0533,
"step": 234
},
{
"epoch": 0.7057057057057057,
"grad_norm": 0.265625,
"learning_rate": 6.471471471471472e-06,
"loss": 1.0836,
"step": 235
},
{
"epoch": 0.7087087087087087,
"grad_norm": 0.291015625,
"learning_rate": 6.456456456456457e-06,
"loss": 1.0926,
"step": 236
},
{
"epoch": 0.7117117117117117,
"grad_norm": 0.22265625,
"learning_rate": 6.441441441441442e-06,
"loss": 1.0518,
"step": 237
},
{
"epoch": 0.7147147147147147,
"grad_norm": 0.23046875,
"learning_rate": 6.426426426426427e-06,
"loss": 1.0474,
"step": 238
},
{
"epoch": 0.7177177177177178,
"grad_norm": 0.1826171875,
"learning_rate": 6.4114114114114125e-06,
"loss": 1.0081,
"step": 239
},
{
"epoch": 0.7207207207207207,
"grad_norm": 0.1796875,
"learning_rate": 6.396396396396397e-06,
"loss": 1.0576,
"step": 240
},
{
"epoch": 0.7237237237237237,
"grad_norm": 0.1943359375,
"learning_rate": 6.381381381381382e-06,
"loss": 1.0618,
"step": 241
},
{
"epoch": 0.7267267267267268,
"grad_norm": 0.23828125,
"learning_rate": 6.366366366366366e-06,
"loss": 1.0926,
"step": 242
},
{
"epoch": 0.7297297297297297,
"grad_norm": 0.1953125,
"learning_rate": 6.351351351351351e-06,
"loss": 1.0171,
"step": 243
},
{
"epoch": 0.7327327327327328,
"grad_norm": 0.189453125,
"learning_rate": 6.336336336336338e-06,
"loss": 1.0365,
"step": 244
},
{
"epoch": 0.7357357357357357,
"grad_norm": 0.240234375,
"learning_rate": 6.321321321321322e-06,
"loss": 1.0427,
"step": 245
},
{
"epoch": 0.7387387387387387,
"grad_norm": 0.2001953125,
"learning_rate": 6.3063063063063065e-06,
"loss": 1.0773,
"step": 246
},
{
"epoch": 0.7417417417417418,
"grad_norm": 0.1923828125,
"learning_rate": 6.291291291291291e-06,
"loss": 1.0738,
"step": 247
},
{
"epoch": 0.7447447447447447,
"grad_norm": 0.1806640625,
"learning_rate": 6.276276276276276e-06,
"loss": 1.0316,
"step": 248
},
{
"epoch": 0.7477477477477478,
"grad_norm": 0.2099609375,
"learning_rate": 6.261261261261262e-06,
"loss": 1.0866,
"step": 249
},
{
"epoch": 0.7507507507507507,
"grad_norm": 0.1865234375,
"learning_rate": 6.246246246246247e-06,
"loss": 1.0717,
"step": 250
},
{
"epoch": 0.7537537537537538,
"grad_norm": 0.21484375,
"learning_rate": 6.2312312312312316e-06,
"loss": 1.1358,
"step": 251
},
{
"epoch": 0.7567567567567568,
"grad_norm": 0.205078125,
"learning_rate": 6.2162162162162164e-06,
"loss": 1.0638,
"step": 252
},
{
"epoch": 0.7597597597597597,
"grad_norm": 0.2216796875,
"learning_rate": 6.201201201201201e-06,
"loss": 1.0374,
"step": 253
},
{
"epoch": 0.7627627627627628,
"grad_norm": 0.1796875,
"learning_rate": 6.186186186186187e-06,
"loss": 1.042,
"step": 254
},
{
"epoch": 0.7657657657657657,
"grad_norm": 0.1884765625,
"learning_rate": 6.171171171171172e-06,
"loss": 1.1211,
"step": 255
},
{
"epoch": 0.7687687687687688,
"grad_norm": 0.205078125,
"learning_rate": 6.156156156156157e-06,
"loss": 1.0377,
"step": 256
},
{
"epoch": 0.7717717717717718,
"grad_norm": 0.1904296875,
"learning_rate": 6.1411411411411415e-06,
"loss": 1.0754,
"step": 257
},
{
"epoch": 0.7747747747747747,
"grad_norm": 0.234375,
"learning_rate": 6.126126126126126e-06,
"loss": 1.0352,
"step": 258
},
{
"epoch": 0.7777777777777778,
"grad_norm": 0.1904296875,
"learning_rate": 6.111111111111112e-06,
"loss": 1.0454,
"step": 259
},
{
"epoch": 0.7807807807807807,
"grad_norm": 0.193359375,
"learning_rate": 6.096096096096097e-06,
"loss": 0.9994,
"step": 260
},
{
"epoch": 0.7837837837837838,
"grad_norm": 0.197265625,
"learning_rate": 6.081081081081082e-06,
"loss": 1.0977,
"step": 261
},
{
"epoch": 0.7867867867867868,
"grad_norm": 0.197265625,
"learning_rate": 6.066066066066067e-06,
"loss": 1.0262,
"step": 262
},
{
"epoch": 0.7897897897897898,
"grad_norm": 0.18359375,
"learning_rate": 6.0510510510510515e-06,
"loss": 1.0164,
"step": 263
},
{
"epoch": 0.7927927927927928,
"grad_norm": 0.84375,
"learning_rate": 6.036036036036037e-06,
"loss": 0.9989,
"step": 264
},
{
"epoch": 0.7957957957957958,
"grad_norm": 0.18359375,
"learning_rate": 6.021021021021022e-06,
"loss": 1.0707,
"step": 265
},
{
"epoch": 0.7987987987987988,
"grad_norm": 0.2001953125,
"learning_rate": 6.006006006006007e-06,
"loss": 1.0657,
"step": 266
},
{
"epoch": 0.8018018018018018,
"grad_norm": 0.1943359375,
"learning_rate": 5.990990990990992e-06,
"loss": 0.9818,
"step": 267
},
{
"epoch": 0.8048048048048048,
"grad_norm": 0.2333984375,
"learning_rate": 5.975975975975976e-06,
"loss": 0.9907,
"step": 268
},
{
"epoch": 0.8078078078078078,
"grad_norm": 0.181640625,
"learning_rate": 5.960960960960962e-06,
"loss": 1.0336,
"step": 269
},
{
"epoch": 0.8108108108108109,
"grad_norm": 0.1875,
"learning_rate": 5.945945945945947e-06,
"loss": 1.0133,
"step": 270
},
{
"epoch": 0.8138138138138138,
"grad_norm": 0.185546875,
"learning_rate": 5.930930930930931e-06,
"loss": 1.0485,
"step": 271
},
{
"epoch": 0.8168168168168168,
"grad_norm": 0.1748046875,
"learning_rate": 5.915915915915916e-06,
"loss": 1.0265,
"step": 272
},
{
"epoch": 0.8198198198198198,
"grad_norm": 0.1953125,
"learning_rate": 5.900900900900901e-06,
"loss": 1.034,
"step": 273
},
{
"epoch": 0.8228228228228228,
"grad_norm": 0.185546875,
"learning_rate": 5.885885885885886e-06,
"loss": 1.0087,
"step": 274
},
{
"epoch": 0.8258258258258259,
"grad_norm": 0.1884765625,
"learning_rate": 5.870870870870871e-06,
"loss": 1.0682,
"step": 275
},
{
"epoch": 0.8288288288288288,
"grad_norm": 0.189453125,
"learning_rate": 5.855855855855856e-06,
"loss": 1.0595,
"step": 276
},
{
"epoch": 0.8318318318318318,
"grad_norm": 0.259765625,
"learning_rate": 5.840840840840841e-06,
"loss": 1.0575,
"step": 277
},
{
"epoch": 0.8348348348348348,
"grad_norm": 0.2080078125,
"learning_rate": 5.825825825825826e-06,
"loss": 1.0547,
"step": 278
},
{
"epoch": 0.8378378378378378,
"grad_norm": 0.1826171875,
"learning_rate": 5.810810810810811e-06,
"loss": 1.0158,
"step": 279
},
{
"epoch": 0.8408408408408409,
"grad_norm": 0.197265625,
"learning_rate": 5.7957957957957965e-06,
"loss": 1.0198,
"step": 280
},
{
"epoch": 0.8438438438438438,
"grad_norm": 0.1962890625,
"learning_rate": 5.780780780780781e-06,
"loss": 1.0825,
"step": 281
},
{
"epoch": 0.8468468468468469,
"grad_norm": 0.2001953125,
"learning_rate": 5.765765765765766e-06,
"loss": 1.0267,
"step": 282
},
{
"epoch": 0.8498498498498499,
"grad_norm": 0.2119140625,
"learning_rate": 5.750750750750751e-06,
"loss": 1.0595,
"step": 283
},
{
"epoch": 0.8528528528528528,
"grad_norm": 0.1787109375,
"learning_rate": 5.735735735735736e-06,
"loss": 1.0012,
"step": 284
},
{
"epoch": 0.8558558558558559,
"grad_norm": 0.18359375,
"learning_rate": 5.720720720720722e-06,
"loss": 0.9924,
"step": 285
},
{
"epoch": 0.8588588588588588,
"grad_norm": 0.2060546875,
"learning_rate": 5.7057057057057065e-06,
"loss": 0.9936,
"step": 286
},
{
"epoch": 0.8618618618618619,
"grad_norm": 0.208984375,
"learning_rate": 5.690690690690691e-06,
"loss": 1.0019,
"step": 287
},
{
"epoch": 0.8648648648648649,
"grad_norm": 0.203125,
"learning_rate": 5.675675675675676e-06,
"loss": 1.0806,
"step": 288
},
{
"epoch": 0.8678678678678678,
"grad_norm": 0.189453125,
"learning_rate": 5.660660660660661e-06,
"loss": 1.0216,
"step": 289
},
{
"epoch": 0.8708708708708709,
"grad_norm": 0.2275390625,
"learning_rate": 5.645645645645647e-06,
"loss": 1.0059,
"step": 290
},
{
"epoch": 0.8738738738738738,
"grad_norm": 0.30859375,
"learning_rate": 5.6306306306306316e-06,
"loss": 1.0178,
"step": 291
},
{
"epoch": 0.8768768768768769,
"grad_norm": 0.203125,
"learning_rate": 5.615615615615616e-06,
"loss": 1.0429,
"step": 292
},
{
"epoch": 0.8798798798798799,
"grad_norm": 0.1826171875,
"learning_rate": 5.600600600600601e-06,
"loss": 1.0538,
"step": 293
},
{
"epoch": 0.8828828828828829,
"grad_norm": 0.1953125,
"learning_rate": 5.585585585585585e-06,
"loss": 1.0416,
"step": 294
},
{
"epoch": 0.8858858858858859,
"grad_norm": 0.1943359375,
"learning_rate": 5.570570570570572e-06,
"loss": 1.0591,
"step": 295
},
{
"epoch": 0.8888888888888888,
"grad_norm": 0.189453125,
"learning_rate": 5.555555555555557e-06,
"loss": 1.0384,
"step": 296
},
{
"epoch": 0.8918918918918919,
"grad_norm": 0.302734375,
"learning_rate": 5.540540540540541e-06,
"loss": 1.0146,
"step": 297
},
{
"epoch": 0.8948948948948949,
"grad_norm": 0.19140625,
"learning_rate": 5.5255255255255255e-06,
"loss": 1.0308,
"step": 298
},
{
"epoch": 0.8978978978978979,
"grad_norm": 0.1953125,
"learning_rate": 5.51051051051051e-06,
"loss": 1.0114,
"step": 299
},
{
"epoch": 0.9009009009009009,
"grad_norm": 0.2236328125,
"learning_rate": 5.495495495495496e-06,
"loss": 1.007,
"step": 300
},
{
"epoch": 0.9039039039039038,
"grad_norm": 0.2158203125,
"learning_rate": 5.480480480480481e-06,
"loss": 1.0519,
"step": 301
},
{
"epoch": 0.9069069069069069,
"grad_norm": 0.18359375,
"learning_rate": 5.465465465465466e-06,
"loss": 1.0157,
"step": 302
},
{
"epoch": 0.9099099099099099,
"grad_norm": 0.1875,
"learning_rate": 5.450450450450451e-06,
"loss": 1.0499,
"step": 303
},
{
"epoch": 0.9129129129129129,
"grad_norm": 0.19140625,
"learning_rate": 5.4354354354354355e-06,
"loss": 1.0034,
"step": 304
},
{
"epoch": 0.9159159159159159,
"grad_norm": 0.1904296875,
"learning_rate": 5.420420420420421e-06,
"loss": 1.0292,
"step": 305
},
{
"epoch": 0.918918918918919,
"grad_norm": 0.1884765625,
"learning_rate": 5.405405405405406e-06,
"loss": 1.0642,
"step": 306
},
{
"epoch": 0.9219219219219219,
"grad_norm": 0.20703125,
"learning_rate": 5.390390390390391e-06,
"loss": 1.0699,
"step": 307
},
{
"epoch": 0.924924924924925,
"grad_norm": 0.1826171875,
"learning_rate": 5.375375375375376e-06,
"loss": 1.0071,
"step": 308
},
{
"epoch": 0.9279279279279279,
"grad_norm": 0.3046875,
"learning_rate": 5.360360360360361e-06,
"loss": 1.0306,
"step": 309
},
{
"epoch": 0.9309309309309309,
"grad_norm": 0.1845703125,
"learning_rate": 5.345345345345346e-06,
"loss": 1.0747,
"step": 310
},
{
"epoch": 0.933933933933934,
"grad_norm": 0.234375,
"learning_rate": 5.330330330330331e-06,
"loss": 1.0506,
"step": 311
},
{
"epoch": 0.9369369369369369,
"grad_norm": 0.232421875,
"learning_rate": 5.315315315315316e-06,
"loss": 1.0198,
"step": 312
},
{
"epoch": 0.93993993993994,
"grad_norm": 0.2578125,
"learning_rate": 5.300300300300301e-06,
"loss": 0.9944,
"step": 313
},
{
"epoch": 0.9429429429429429,
"grad_norm": 0.1943359375,
"learning_rate": 5.285285285285286e-06,
"loss": 1.0638,
"step": 314
},
{
"epoch": 0.9459459459459459,
"grad_norm": 0.1875,
"learning_rate": 5.2702702702702705e-06,
"loss": 1.0278,
"step": 315
},
{
"epoch": 0.948948948948949,
"grad_norm": 0.171875,
"learning_rate": 5.255255255255256e-06,
"loss": 1.0118,
"step": 316
},
{
"epoch": 0.9519519519519519,
"grad_norm": 0.19140625,
"learning_rate": 5.240240240240241e-06,
"loss": 1.0728,
"step": 317
},
{
"epoch": 0.954954954954955,
"grad_norm": 0.1953125,
"learning_rate": 5.225225225225226e-06,
"loss": 0.9994,
"step": 318
},
{
"epoch": 0.9579579579579579,
"grad_norm": 0.1943359375,
"learning_rate": 5.21021021021021e-06,
"loss": 1.0243,
"step": 319
},
{
"epoch": 0.960960960960961,
"grad_norm": 0.185546875,
"learning_rate": 5.195195195195195e-06,
"loss": 1.0333,
"step": 320
},
{
"epoch": 0.963963963963964,
"grad_norm": 0.1943359375,
"learning_rate": 5.180180180180181e-06,
"loss": 1.0193,
"step": 321
},
{
"epoch": 0.9669669669669669,
"grad_norm": 0.19140625,
"learning_rate": 5.165165165165165e-06,
"loss": 1.0078,
"step": 322
},
{
"epoch": 0.96996996996997,
"grad_norm": 0.1796875,
"learning_rate": 5.15015015015015e-06,
"loss": 1.0054,
"step": 323
},
{
"epoch": 0.972972972972973,
"grad_norm": 0.2265625,
"learning_rate": 5.135135135135135e-06,
"loss": 1.0266,
"step": 324
},
{
"epoch": 0.975975975975976,
"grad_norm": 0.275390625,
"learning_rate": 5.12012012012012e-06,
"loss": 1.0165,
"step": 325
},
{
"epoch": 0.978978978978979,
"grad_norm": 0.2109375,
"learning_rate": 5.105105105105106e-06,
"loss": 1.0165,
"step": 326
},
{
"epoch": 0.9819819819819819,
"grad_norm": 0.19140625,
"learning_rate": 5.0900900900900905e-06,
"loss": 1.0413,
"step": 327
},
{
"epoch": 0.984984984984985,
"grad_norm": 0.1865234375,
"learning_rate": 5.075075075075075e-06,
"loss": 0.9882,
"step": 328
},
{
"epoch": 0.987987987987988,
"grad_norm": 0.1875,
"learning_rate": 5.06006006006006e-06,
"loss": 1.017,
"step": 329
},
{
"epoch": 0.990990990990991,
"grad_norm": 0.275390625,
"learning_rate": 5.045045045045045e-06,
"loss": 1.04,
"step": 330
},
{
"epoch": 0.993993993993994,
"grad_norm": 0.193359375,
"learning_rate": 5.030030030030031e-06,
"loss": 1.0391,
"step": 331
},
{
"epoch": 0.996996996996997,
"grad_norm": 0.1904296875,
"learning_rate": 5.0150150150150156e-06,
"loss": 1.0346,
"step": 332
},
{
"epoch": 1.0,
"grad_norm": 0.197265625,
"learning_rate": 5e-06,
"loss": 0.9981,
"step": 333
},
{
"epoch": 1.003003003003003,
"grad_norm": 0.1904296875,
"learning_rate": 4.984984984984985e-06,
"loss": 1.0296,
"step": 334
},
{
"epoch": 1.006006006006006,
"grad_norm": 0.2216796875,
"learning_rate": 4.96996996996997e-06,
"loss": 1.0689,
"step": 335
},
{
"epoch": 1.009009009009009,
"grad_norm": 0.1884765625,
"learning_rate": 4.954954954954955e-06,
"loss": 0.9999,
"step": 336
},
{
"epoch": 1.012012012012012,
"grad_norm": 0.236328125,
"learning_rate": 4.939939939939941e-06,
"loss": 1.0488,
"step": 337
},
{
"epoch": 1.015015015015015,
"grad_norm": 0.193359375,
"learning_rate": 4.9249249249249255e-06,
"loss": 1.0273,
"step": 338
},
{
"epoch": 1.018018018018018,
"grad_norm": 0.2158203125,
"learning_rate": 4.90990990990991e-06,
"loss": 0.966,
"step": 339
},
{
"epoch": 1.021021021021021,
"grad_norm": 0.189453125,
"learning_rate": 4.894894894894895e-06,
"loss": 1.0157,
"step": 340
},
{
"epoch": 1.024024024024024,
"grad_norm": 0.193359375,
"learning_rate": 4.87987987987988e-06,
"loss": 1.0229,
"step": 341
},
{
"epoch": 1.027027027027027,
"grad_norm": 0.189453125,
"learning_rate": 4.864864864864866e-06,
"loss": 0.9985,
"step": 342
},
{
"epoch": 1.03003003003003,
"grad_norm": 0.2265625,
"learning_rate": 4.849849849849851e-06,
"loss": 0.9961,
"step": 343
},
{
"epoch": 1.033033033033033,
"grad_norm": 0.193359375,
"learning_rate": 4.8348348348348355e-06,
"loss": 1.0309,
"step": 344
},
{
"epoch": 1.0360360360360361,
"grad_norm": 0.205078125,
"learning_rate": 4.81981981981982e-06,
"loss": 0.9868,
"step": 345
},
{
"epoch": 1.039039039039039,
"grad_norm": 0.24609375,
"learning_rate": 4.804804804804805e-06,
"loss": 1.0407,
"step": 346
},
{
"epoch": 1.042042042042042,
"grad_norm": 0.1982421875,
"learning_rate": 4.78978978978979e-06,
"loss": 0.9476,
"step": 347
},
{
"epoch": 1.045045045045045,
"grad_norm": 0.224609375,
"learning_rate": 4.774774774774775e-06,
"loss": 1.0,
"step": 348
},
{
"epoch": 1.048048048048048,
"grad_norm": 0.201171875,
"learning_rate": 4.75975975975976e-06,
"loss": 1.0248,
"step": 349
},
{
"epoch": 1.0510510510510511,
"grad_norm": 0.19140625,
"learning_rate": 4.7447447447447454e-06,
"loss": 0.9981,
"step": 350
},
{
"epoch": 1.054054054054054,
"grad_norm": 0.1884765625,
"learning_rate": 4.72972972972973e-06,
"loss": 1.0065,
"step": 351
},
{
"epoch": 1.057057057057057,
"grad_norm": 0.25,
"learning_rate": 4.714714714714715e-06,
"loss": 0.9776,
"step": 352
},
{
"epoch": 1.06006006006006,
"grad_norm": 0.32421875,
"learning_rate": 4.6996996996997e-06,
"loss": 0.9722,
"step": 353
},
{
"epoch": 1.063063063063063,
"grad_norm": 0.236328125,
"learning_rate": 4.684684684684685e-06,
"loss": 1.0631,
"step": 354
},
{
"epoch": 1.0660660660660661,
"grad_norm": 0.1962890625,
"learning_rate": 4.6696696696696705e-06,
"loss": 1.0184,
"step": 355
},
{
"epoch": 1.069069069069069,
"grad_norm": 0.185546875,
"learning_rate": 4.654654654654655e-06,
"loss": 0.9913,
"step": 356
},
{
"epoch": 1.072072072072072,
"grad_norm": 0.26953125,
"learning_rate": 4.63963963963964e-06,
"loss": 1.0086,
"step": 357
},
{
"epoch": 1.075075075075075,
"grad_norm": 0.2216796875,
"learning_rate": 4.624624624624625e-06,
"loss": 1.0372,
"step": 358
},
{
"epoch": 1.078078078078078,
"grad_norm": 0.2080078125,
"learning_rate": 4.60960960960961e-06,
"loss": 1.0109,
"step": 359
},
{
"epoch": 1.0810810810810811,
"grad_norm": 0.201171875,
"learning_rate": 4.594594594594596e-06,
"loss": 1.0296,
"step": 360
},
{
"epoch": 1.0840840840840842,
"grad_norm": 0.31640625,
"learning_rate": 4.57957957957958e-06,
"loss": 1.0216,
"step": 361
},
{
"epoch": 1.087087087087087,
"grad_norm": 0.1982421875,
"learning_rate": 4.5645645645645645e-06,
"loss": 0.9909,
"step": 362
},
{
"epoch": 1.09009009009009,
"grad_norm": 0.2158203125,
"learning_rate": 4.54954954954955e-06,
"loss": 0.9906,
"step": 363
},
{
"epoch": 1.093093093093093,
"grad_norm": 0.228515625,
"learning_rate": 4.534534534534535e-06,
"loss": 1.0128,
"step": 364
},
{
"epoch": 1.0960960960960962,
"grad_norm": 0.2060546875,
"learning_rate": 4.51951951951952e-06,
"loss": 0.9887,
"step": 365
},
{
"epoch": 1.0990990990990992,
"grad_norm": 0.33203125,
"learning_rate": 4.504504504504505e-06,
"loss": 1.0146,
"step": 366
},
{
"epoch": 1.102102102102102,
"grad_norm": 0.197265625,
"learning_rate": 4.48948948948949e-06,
"loss": 1.0376,
"step": 367
},
{
"epoch": 1.105105105105105,
"grad_norm": 0.310546875,
"learning_rate": 4.474474474474475e-06,
"loss": 0.9775,
"step": 368
},
{
"epoch": 1.1081081081081081,
"grad_norm": 0.19921875,
"learning_rate": 4.45945945945946e-06,
"loss": 1.0109,
"step": 369
},
{
"epoch": 1.1111111111111112,
"grad_norm": 0.1982421875,
"learning_rate": 4.444444444444444e-06,
"loss": 0.9723,
"step": 370
},
{
"epoch": 1.1141141141141142,
"grad_norm": 0.1796875,
"learning_rate": 4.42942942942943e-06,
"loss": 0.9766,
"step": 371
},
{
"epoch": 1.117117117117117,
"grad_norm": 0.2197265625,
"learning_rate": 4.414414414414415e-06,
"loss": 0.9923,
"step": 372
},
{
"epoch": 1.12012012012012,
"grad_norm": 0.29296875,
"learning_rate": 4.3993993993993996e-06,
"loss": 1.0186,
"step": 373
},
{
"epoch": 1.1231231231231231,
"grad_norm": 0.203125,
"learning_rate": 4.384384384384384e-06,
"loss": 0.987,
"step": 374
},
{
"epoch": 1.1261261261261262,
"grad_norm": 0.2041015625,
"learning_rate": 4.369369369369369e-06,
"loss": 1.0088,
"step": 375
},
{
"epoch": 1.1291291291291292,
"grad_norm": 0.1865234375,
"learning_rate": 4.354354354354355e-06,
"loss": 0.999,
"step": 376
},
{
"epoch": 1.132132132132132,
"grad_norm": 0.197265625,
"learning_rate": 4.33933933933934e-06,
"loss": 1.0609,
"step": 377
},
{
"epoch": 1.135135135135135,
"grad_norm": 0.2021484375,
"learning_rate": 4.324324324324325e-06,
"loss": 0.9939,
"step": 378
},
{
"epoch": 1.1381381381381381,
"grad_norm": 0.2060546875,
"learning_rate": 4.3093093093093095e-06,
"loss": 0.9913,
"step": 379
},
{
"epoch": 1.1411411411411412,
"grad_norm": 0.19140625,
"learning_rate": 4.294294294294294e-06,
"loss": 1.0285,
"step": 380
},
{
"epoch": 1.1441441441441442,
"grad_norm": 0.208984375,
"learning_rate": 4.27927927927928e-06,
"loss": 1.0208,
"step": 381
},
{
"epoch": 1.147147147147147,
"grad_norm": 0.19140625,
"learning_rate": 4.264264264264265e-06,
"loss": 0.9686,
"step": 382
},
{
"epoch": 1.15015015015015,
"grad_norm": 0.2119140625,
"learning_rate": 4.24924924924925e-06,
"loss": 0.9906,
"step": 383
},
{
"epoch": 1.1531531531531531,
"grad_norm": 0.30078125,
"learning_rate": 4.234234234234235e-06,
"loss": 0.9693,
"step": 384
},
{
"epoch": 1.1561561561561562,
"grad_norm": 0.1953125,
"learning_rate": 4.2192192192192195e-06,
"loss": 0.9903,
"step": 385
},
{
"epoch": 1.1591591591591592,
"grad_norm": 0.2109375,
"learning_rate": 4.204204204204204e-06,
"loss": 1.0406,
"step": 386
},
{
"epoch": 1.1621621621621623,
"grad_norm": 0.25,
"learning_rate": 4.189189189189189e-06,
"loss": 0.9862,
"step": 387
},
{
"epoch": 1.165165165165165,
"grad_norm": 0.189453125,
"learning_rate": 4.174174174174174e-06,
"loss": 1.0201,
"step": 388
},
{
"epoch": 1.1681681681681682,
"grad_norm": 0.2255859375,
"learning_rate": 4.15915915915916e-06,
"loss": 0.9724,
"step": 389
},
{
"epoch": 1.1711711711711712,
"grad_norm": 0.2021484375,
"learning_rate": 4.1441441441441446e-06,
"loss": 1.0003,
"step": 390
},
{
"epoch": 1.1741741741741742,
"grad_norm": 0.2021484375,
"learning_rate": 4.129129129129129e-06,
"loss": 0.9948,
"step": 391
},
{
"epoch": 1.1771771771771773,
"grad_norm": 0.2353515625,
"learning_rate": 4.114114114114114e-06,
"loss": 1.0185,
"step": 392
},
{
"epoch": 1.1801801801801801,
"grad_norm": 0.2275390625,
"learning_rate": 4.099099099099099e-06,
"loss": 1.0226,
"step": 393
},
{
"epoch": 1.1831831831831832,
"grad_norm": 0.2265625,
"learning_rate": 4.084084084084085e-06,
"loss": 1.0025,
"step": 394
},
{
"epoch": 1.1861861861861862,
"grad_norm": 0.2060546875,
"learning_rate": 4.06906906906907e-06,
"loss": 0.9371,
"step": 395
},
{
"epoch": 1.1891891891891893,
"grad_norm": 0.19921875,
"learning_rate": 4.0540540540540545e-06,
"loss": 1.0129,
"step": 396
},
{
"epoch": 1.1921921921921923,
"grad_norm": 0.2119140625,
"learning_rate": 4.039039039039039e-06,
"loss": 1.0687,
"step": 397
},
{
"epoch": 1.1951951951951951,
"grad_norm": 0.193359375,
"learning_rate": 4.024024024024024e-06,
"loss": 1.0445,
"step": 398
},
{
"epoch": 1.1981981981981982,
"grad_norm": 0.2109375,
"learning_rate": 4.009009009009009e-06,
"loss": 1.0211,
"step": 399
},
{
"epoch": 1.2012012012012012,
"grad_norm": 0.26953125,
"learning_rate": 3.993993993993994e-06,
"loss": 1.0167,
"step": 400
},
{
"epoch": 1.2042042042042043,
"grad_norm": 0.193359375,
"learning_rate": 3.97897897897898e-06,
"loss": 1.0212,
"step": 401
},
{
"epoch": 1.2072072072072073,
"grad_norm": 0.53515625,
"learning_rate": 3.9639639639639645e-06,
"loss": 1.0609,
"step": 402
},
{
"epoch": 1.2102102102102101,
"grad_norm": 0.287109375,
"learning_rate": 3.948948948948949e-06,
"loss": 0.9795,
"step": 403
},
{
"epoch": 1.2132132132132132,
"grad_norm": 0.1943359375,
"learning_rate": 3.933933933933934e-06,
"loss": 1.028,
"step": 404
},
{
"epoch": 1.2162162162162162,
"grad_norm": 0.2158203125,
"learning_rate": 3.918918918918919e-06,
"loss": 1.0134,
"step": 405
},
{
"epoch": 1.2192192192192193,
"grad_norm": 0.1923828125,
"learning_rate": 3.903903903903904e-06,
"loss": 0.9599,
"step": 406
},
{
"epoch": 1.2222222222222223,
"grad_norm": 0.29296875,
"learning_rate": 3.88888888888889e-06,
"loss": 0.998,
"step": 407
},
{
"epoch": 1.2252252252252251,
"grad_norm": 0.21484375,
"learning_rate": 3.8738738738738744e-06,
"loss": 0.9977,
"step": 408
},
{
"epoch": 1.2282282282282282,
"grad_norm": 0.24609375,
"learning_rate": 3.858858858858859e-06,
"loss": 1.0013,
"step": 409
},
{
"epoch": 1.2312312312312312,
"grad_norm": 0.2021484375,
"learning_rate": 3.843843843843844e-06,
"loss": 0.9703,
"step": 410
},
{
"epoch": 1.2342342342342343,
"grad_norm": 0.2734375,
"learning_rate": 3.828828828828829e-06,
"loss": 1.0743,
"step": 411
},
{
"epoch": 1.2372372372372373,
"grad_norm": 0.20703125,
"learning_rate": 3.8138138138138143e-06,
"loss": 0.9566,
"step": 412
},
{
"epoch": 1.2402402402402402,
"grad_norm": 0.19921875,
"learning_rate": 3.798798798798799e-06,
"loss": 0.9841,
"step": 413
},
{
"epoch": 1.2432432432432432,
"grad_norm": 0.1982421875,
"learning_rate": 3.7837837837837844e-06,
"loss": 0.9808,
"step": 414
},
{
"epoch": 1.2462462462462462,
"grad_norm": 0.2275390625,
"learning_rate": 3.7687687687687692e-06,
"loss": 1.002,
"step": 415
},
{
"epoch": 1.2492492492492493,
"grad_norm": 0.337890625,
"learning_rate": 3.7537537537537537e-06,
"loss": 1.0202,
"step": 416
},
{
"epoch": 1.2522522522522523,
"grad_norm": 0.287109375,
"learning_rate": 3.7387387387387394e-06,
"loss": 1.0209,
"step": 417
},
{
"epoch": 1.2552552552552552,
"grad_norm": 0.2431640625,
"learning_rate": 3.723723723723724e-06,
"loss": 0.9587,
"step": 418
},
{
"epoch": 1.2582582582582582,
"grad_norm": 0.20703125,
"learning_rate": 3.708708708708709e-06,
"loss": 1.0201,
"step": 419
},
{
"epoch": 1.2612612612612613,
"grad_norm": 0.203125,
"learning_rate": 3.693693693693694e-06,
"loss": 0.9911,
"step": 420
},
{
"epoch": 1.2642642642642643,
"grad_norm": 0.21875,
"learning_rate": 3.6786786786786788e-06,
"loss": 1.0118,
"step": 421
},
{
"epoch": 1.2672672672672673,
"grad_norm": 0.240234375,
"learning_rate": 3.663663663663664e-06,
"loss": 0.9989,
"step": 422
},
{
"epoch": 1.2702702702702702,
"grad_norm": 0.1982421875,
"learning_rate": 3.648648648648649e-06,
"loss": 0.964,
"step": 423
},
{
"epoch": 1.2732732732732732,
"grad_norm": 0.1923828125,
"learning_rate": 3.633633633633634e-06,
"loss": 0.9894,
"step": 424
},
{
"epoch": 1.2762762762762763,
"grad_norm": 0.2060546875,
"learning_rate": 3.618618618618619e-06,
"loss": 1.0428,
"step": 425
},
{
"epoch": 1.2792792792792793,
"grad_norm": 0.2177734375,
"learning_rate": 3.603603603603604e-06,
"loss": 0.9881,
"step": 426
},
{
"epoch": 1.2822822822822824,
"grad_norm": 0.2080078125,
"learning_rate": 3.588588588588589e-06,
"loss": 1.0189,
"step": 427
},
{
"epoch": 1.2852852852852852,
"grad_norm": 0.19140625,
"learning_rate": 3.573573573573574e-06,
"loss": 0.9937,
"step": 428
},
{
"epoch": 1.2882882882882882,
"grad_norm": 0.19921875,
"learning_rate": 3.5585585585585584e-06,
"loss": 1.0097,
"step": 429
},
{
"epoch": 1.2912912912912913,
"grad_norm": 0.2236328125,
"learning_rate": 3.5435435435435437e-06,
"loss": 0.9759,
"step": 430
},
{
"epoch": 1.2942942942942943,
"grad_norm": 0.2158203125,
"learning_rate": 3.5285285285285286e-06,
"loss": 1.0062,
"step": 431
},
{
"epoch": 1.2972972972972974,
"grad_norm": 0.21484375,
"learning_rate": 3.513513513513514e-06,
"loss": 1.0051,
"step": 432
},
{
"epoch": 1.3003003003003002,
"grad_norm": 0.2265625,
"learning_rate": 3.4984984984984987e-06,
"loss": 0.9856,
"step": 433
},
{
"epoch": 1.3033033033033032,
"grad_norm": 0.1953125,
"learning_rate": 3.4834834834834835e-06,
"loss": 0.9853,
"step": 434
},
{
"epoch": 1.3063063063063063,
"grad_norm": 0.208984375,
"learning_rate": 3.468468468468469e-06,
"loss": 1.0093,
"step": 435
},
{
"epoch": 1.3093093093093093,
"grad_norm": 0.2490234375,
"learning_rate": 3.4534534534534537e-06,
"loss": 1.0461,
"step": 436
},
{
"epoch": 1.3123123123123124,
"grad_norm": 0.1953125,
"learning_rate": 3.438438438438439e-06,
"loss": 1.0062,
"step": 437
},
{
"epoch": 1.3153153153153152,
"grad_norm": 0.1962890625,
"learning_rate": 3.423423423423424e-06,
"loss": 0.9996,
"step": 438
},
{
"epoch": 1.3183183183183182,
"grad_norm": 0.2109375,
"learning_rate": 3.4084084084084086e-06,
"loss": 1.0118,
"step": 439
},
{
"epoch": 1.3213213213213213,
"grad_norm": 0.23828125,
"learning_rate": 3.393393393393394e-06,
"loss": 1.0836,
"step": 440
},
{
"epoch": 1.3243243243243243,
"grad_norm": 0.2080078125,
"learning_rate": 3.3783783783783788e-06,
"loss": 1.012,
"step": 441
},
{
"epoch": 1.3273273273273274,
"grad_norm": 0.1962890625,
"learning_rate": 3.363363363363364e-06,
"loss": 1.0009,
"step": 442
},
{
"epoch": 1.3303303303303302,
"grad_norm": 0.197265625,
"learning_rate": 3.3483483483483485e-06,
"loss": 0.9475,
"step": 443
},
{
"epoch": 1.3333333333333333,
"grad_norm": 0.1953125,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.9706,
"step": 444
},
{
"epoch": 1.3363363363363363,
"grad_norm": 0.2001953125,
"learning_rate": 3.3183183183183186e-06,
"loss": 0.9638,
"step": 445
},
{
"epoch": 1.3393393393393394,
"grad_norm": 0.2109375,
"learning_rate": 3.3033033033033035e-06,
"loss": 0.9995,
"step": 446
},
{
"epoch": 1.3423423423423424,
"grad_norm": 0.19921875,
"learning_rate": 3.2882882882882887e-06,
"loss": 0.9348,
"step": 447
},
{
"epoch": 1.3453453453453452,
"grad_norm": 0.205078125,
"learning_rate": 3.2732732732732736e-06,
"loss": 1.0285,
"step": 448
},
{
"epoch": 1.3483483483483483,
"grad_norm": 0.208984375,
"learning_rate": 3.2582582582582584e-06,
"loss": 1.0523,
"step": 449
},
{
"epoch": 1.3513513513513513,
"grad_norm": 0.21875,
"learning_rate": 3.2432432432432437e-06,
"loss": 0.9908,
"step": 450
},
{
"epoch": 1.3543543543543544,
"grad_norm": 0.197265625,
"learning_rate": 3.2282282282282286e-06,
"loss": 0.964,
"step": 451
},
{
"epoch": 1.3573573573573574,
"grad_norm": 0.220703125,
"learning_rate": 3.2132132132132134e-06,
"loss": 1.055,
"step": 452
},
{
"epoch": 1.3603603603603602,
"grad_norm": 0.2001953125,
"learning_rate": 3.1981981981981987e-06,
"loss": 0.973,
"step": 453
},
{
"epoch": 1.3633633633633635,
"grad_norm": 0.1953125,
"learning_rate": 3.183183183183183e-06,
"loss": 1.013,
"step": 454
},
{
"epoch": 1.3663663663663663,
"grad_norm": 0.201171875,
"learning_rate": 3.168168168168169e-06,
"loss": 0.9992,
"step": 455
},
{
"epoch": 1.3693693693693694,
"grad_norm": 0.1953125,
"learning_rate": 3.1531531531531532e-06,
"loss": 0.9663,
"step": 456
},
{
"epoch": 1.3723723723723724,
"grad_norm": 0.208984375,
"learning_rate": 3.138138138138138e-06,
"loss": 0.985,
"step": 457
},
{
"epoch": 1.3753753753753752,
"grad_norm": 0.1923828125,
"learning_rate": 3.1231231231231234e-06,
"loss": 0.9693,
"step": 458
},
{
"epoch": 1.3783783783783785,
"grad_norm": 0.2255859375,
"learning_rate": 3.1081081081081082e-06,
"loss": 1.0308,
"step": 459
},
{
"epoch": 1.3813813813813813,
"grad_norm": 0.208984375,
"learning_rate": 3.0930930930930935e-06,
"loss": 0.9799,
"step": 460
},
{
"epoch": 1.3843843843843844,
"grad_norm": 0.19921875,
"learning_rate": 3.0780780780780783e-06,
"loss": 1.0527,
"step": 461
},
{
"epoch": 1.3873873873873874,
"grad_norm": 0.203125,
"learning_rate": 3.063063063063063e-06,
"loss": 0.9906,
"step": 462
},
{
"epoch": 1.3903903903903903,
"grad_norm": 0.1923828125,
"learning_rate": 3.0480480480480485e-06,
"loss": 0.981,
"step": 463
},
{
"epoch": 1.3933933933933935,
"grad_norm": 0.353515625,
"learning_rate": 3.0330330330330333e-06,
"loss": 0.9998,
"step": 464
},
{
"epoch": 1.3963963963963963,
"grad_norm": 0.20703125,
"learning_rate": 3.0180180180180186e-06,
"loss": 0.9736,
"step": 465
},
{
"epoch": 1.3993993993993994,
"grad_norm": 0.197265625,
"learning_rate": 3.0030030030030034e-06,
"loss": 0.9697,
"step": 466
},
{
"epoch": 1.4024024024024024,
"grad_norm": 0.1962890625,
"learning_rate": 2.987987987987988e-06,
"loss": 1.018,
"step": 467
},
{
"epoch": 1.4054054054054055,
"grad_norm": 0.203125,
"learning_rate": 2.9729729729729736e-06,
"loss": 1.0017,
"step": 468
},
{
"epoch": 1.4084084084084085,
"grad_norm": 0.23828125,
"learning_rate": 2.957957957957958e-06,
"loss": 0.9449,
"step": 469
},
{
"epoch": 1.4114114114114114,
"grad_norm": 0.2451171875,
"learning_rate": 2.942942942942943e-06,
"loss": 0.9696,
"step": 470
},
{
"epoch": 1.4144144144144144,
"grad_norm": 0.20703125,
"learning_rate": 2.927927927927928e-06,
"loss": 0.9748,
"step": 471
},
{
"epoch": 1.4174174174174174,
"grad_norm": 0.2451171875,
"learning_rate": 2.912912912912913e-06,
"loss": 0.9515,
"step": 472
},
{
"epoch": 1.4204204204204205,
"grad_norm": 0.205078125,
"learning_rate": 2.8978978978978983e-06,
"loss": 0.9795,
"step": 473
},
{
"epoch": 1.4234234234234235,
"grad_norm": 0.2255859375,
"learning_rate": 2.882882882882883e-06,
"loss": 0.9753,
"step": 474
},
{
"epoch": 1.4264264264264264,
"grad_norm": 0.244140625,
"learning_rate": 2.867867867867868e-06,
"loss": 1.0243,
"step": 475
},
{
"epoch": 1.4294294294294294,
"grad_norm": 0.201171875,
"learning_rate": 2.8528528528528532e-06,
"loss": 0.9963,
"step": 476
},
{
"epoch": 1.4324324324324325,
"grad_norm": 0.2001953125,
"learning_rate": 2.837837837837838e-06,
"loss": 1.0299,
"step": 477
},
{
"epoch": 1.4354354354354355,
"grad_norm": 0.2158203125,
"learning_rate": 2.8228228228228234e-06,
"loss": 1.0606,
"step": 478
},
{
"epoch": 1.4384384384384385,
"grad_norm": 0.193359375,
"learning_rate": 2.807807807807808e-06,
"loss": 0.9905,
"step": 479
},
{
"epoch": 1.4414414414414414,
"grad_norm": 0.197265625,
"learning_rate": 2.7927927927927926e-06,
"loss": 0.9506,
"step": 480
},
{
"epoch": 1.4444444444444444,
"grad_norm": 0.19140625,
"learning_rate": 2.7777777777777783e-06,
"loss": 0.9851,
"step": 481
},
{
"epoch": 1.4474474474474475,
"grad_norm": 0.263671875,
"learning_rate": 2.7627627627627628e-06,
"loss": 1.014,
"step": 482
},
{
"epoch": 1.4504504504504505,
"grad_norm": 0.201171875,
"learning_rate": 2.747747747747748e-06,
"loss": 1.0065,
"step": 483
},
{
"epoch": 1.4534534534534536,
"grad_norm": 0.2216796875,
"learning_rate": 2.732732732732733e-06,
"loss": 1.0507,
"step": 484
},
{
"epoch": 1.4564564564564564,
"grad_norm": 0.2021484375,
"learning_rate": 2.7177177177177177e-06,
"loss": 0.9682,
"step": 485
},
{
"epoch": 1.4594594594594594,
"grad_norm": 0.19921875,
"learning_rate": 2.702702702702703e-06,
"loss": 1.0245,
"step": 486
},
{
"epoch": 1.4624624624624625,
"grad_norm": 0.201171875,
"learning_rate": 2.687687687687688e-06,
"loss": 1.0337,
"step": 487
},
{
"epoch": 1.4654654654654655,
"grad_norm": 0.2080078125,
"learning_rate": 2.672672672672673e-06,
"loss": 0.9696,
"step": 488
},
{
"epoch": 1.4684684684684686,
"grad_norm": 0.21875,
"learning_rate": 2.657657657657658e-06,
"loss": 0.9765,
"step": 489
},
{
"epoch": 1.4714714714714714,
"grad_norm": 0.201171875,
"learning_rate": 2.642642642642643e-06,
"loss": 0.9387,
"step": 490
},
{
"epoch": 1.4744744744744744,
"grad_norm": 0.259765625,
"learning_rate": 2.627627627627628e-06,
"loss": 1.014,
"step": 491
},
{
"epoch": 1.4774774774774775,
"grad_norm": 0.267578125,
"learning_rate": 2.612612612612613e-06,
"loss": 0.9767,
"step": 492
},
{
"epoch": 1.4804804804804805,
"grad_norm": 0.271484375,
"learning_rate": 2.5975975975975974e-06,
"loss": 0.9864,
"step": 493
},
{
"epoch": 1.4834834834834836,
"grad_norm": 0.197265625,
"learning_rate": 2.5825825825825827e-06,
"loss": 1.0044,
"step": 494
},
{
"epoch": 1.4864864864864864,
"grad_norm": 0.265625,
"learning_rate": 2.5675675675675675e-06,
"loss": 1.0053,
"step": 495
},
{
"epoch": 1.4894894894894894,
"grad_norm": 0.220703125,
"learning_rate": 2.552552552552553e-06,
"loss": 1.043,
"step": 496
},
{
"epoch": 1.4924924924924925,
"grad_norm": 0.19921875,
"learning_rate": 2.5375375375375377e-06,
"loss": 0.9681,
"step": 497
},
{
"epoch": 1.4954954954954955,
"grad_norm": 0.216796875,
"learning_rate": 2.5225225225225225e-06,
"loss": 1.0167,
"step": 498
},
{
"epoch": 1.4984984984984986,
"grad_norm": 0.197265625,
"learning_rate": 2.5075075075075078e-06,
"loss": 1.0176,
"step": 499
},
{
"epoch": 1.5015015015015014,
"grad_norm": 0.4453125,
"learning_rate": 2.4924924924924926e-06,
"loss": 0.9974,
"step": 500
},
{
"epoch": 1.5045045045045045,
"grad_norm": 0.283203125,
"learning_rate": 2.4774774774774775e-06,
"loss": 0.9738,
"step": 501
},
{
"epoch": 1.5075075075075075,
"grad_norm": 0.21484375,
"learning_rate": 2.4624624624624628e-06,
"loss": 0.9876,
"step": 502
},
{
"epoch": 1.5105105105105106,
"grad_norm": 0.2001953125,
"learning_rate": 2.4474474474474476e-06,
"loss": 0.9731,
"step": 503
},
{
"epoch": 1.5135135135135136,
"grad_norm": 0.2353515625,
"learning_rate": 2.432432432432433e-06,
"loss": 1.0173,
"step": 504
},
{
"epoch": 1.5165165165165164,
"grad_norm": 0.27734375,
"learning_rate": 2.4174174174174177e-06,
"loss": 0.9874,
"step": 505
},
{
"epoch": 1.5195195195195195,
"grad_norm": 0.2001953125,
"learning_rate": 2.4024024024024026e-06,
"loss": 0.9523,
"step": 506
},
{
"epoch": 1.5225225225225225,
"grad_norm": 0.220703125,
"learning_rate": 2.3873873873873874e-06,
"loss": 1.0534,
"step": 507
},
{
"epoch": 1.5255255255255256,
"grad_norm": 0.2177734375,
"learning_rate": 2.3723723723723727e-06,
"loss": 0.9529,
"step": 508
},
{
"epoch": 1.5285285285285286,
"grad_norm": 0.2080078125,
"learning_rate": 2.3573573573573576e-06,
"loss": 1.0049,
"step": 509
},
{
"epoch": 1.5315315315315314,
"grad_norm": 0.2158203125,
"learning_rate": 2.3423423423423424e-06,
"loss": 1.0219,
"step": 510
},
{
"epoch": 1.5345345345345347,
"grad_norm": 0.205078125,
"learning_rate": 2.3273273273273277e-06,
"loss": 0.9923,
"step": 511
},
{
"epoch": 1.5375375375375375,
"grad_norm": 0.205078125,
"learning_rate": 2.3123123123123125e-06,
"loss": 0.9805,
"step": 512
},
{
"epoch": 1.5405405405405406,
"grad_norm": 0.1962890625,
"learning_rate": 2.297297297297298e-06,
"loss": 0.9979,
"step": 513
},
{
"epoch": 1.5435435435435436,
"grad_norm": 0.2001953125,
"learning_rate": 2.2822822822822822e-06,
"loss": 0.999,
"step": 514
},
{
"epoch": 1.5465465465465464,
"grad_norm": 0.2275390625,
"learning_rate": 2.2672672672672675e-06,
"loss": 0.9612,
"step": 515
},
{
"epoch": 1.5495495495495497,
"grad_norm": 0.193359375,
"learning_rate": 2.2522522522522524e-06,
"loss": 0.9834,
"step": 516
},
{
"epoch": 1.5525525525525525,
"grad_norm": 0.3828125,
"learning_rate": 2.2372372372372376e-06,
"loss": 0.9795,
"step": 517
},
{
"epoch": 1.5555555555555556,
"grad_norm": 0.2138671875,
"learning_rate": 2.222222222222222e-06,
"loss": 0.9436,
"step": 518
},
{
"epoch": 1.5585585585585586,
"grad_norm": 0.2001953125,
"learning_rate": 2.2072072072072073e-06,
"loss": 0.962,
"step": 519
},
{
"epoch": 1.5615615615615615,
"grad_norm": 0.2021484375,
"learning_rate": 2.192192192192192e-06,
"loss": 1.0042,
"step": 520
},
{
"epoch": 1.5645645645645647,
"grad_norm": 0.2041015625,
"learning_rate": 2.1771771771771775e-06,
"loss": 1.0371,
"step": 521
},
{
"epoch": 1.5675675675675675,
"grad_norm": 0.2041015625,
"learning_rate": 2.1621621621621623e-06,
"loss": 1.0429,
"step": 522
},
{
"epoch": 1.5705705705705706,
"grad_norm": 0.2060546875,
"learning_rate": 2.147147147147147e-06,
"loss": 0.931,
"step": 523
},
{
"epoch": 1.5735735735735736,
"grad_norm": 0.1982421875,
"learning_rate": 2.1321321321321325e-06,
"loss": 1.0305,
"step": 524
},
{
"epoch": 1.5765765765765765,
"grad_norm": 0.294921875,
"learning_rate": 2.1171171171171173e-06,
"loss": 0.9281,
"step": 525
},
{
"epoch": 1.5795795795795797,
"grad_norm": 0.201171875,
"learning_rate": 2.102102102102102e-06,
"loss": 0.9863,
"step": 526
},
{
"epoch": 1.5825825825825826,
"grad_norm": 0.2041015625,
"learning_rate": 2.087087087087087e-06,
"loss": 0.9703,
"step": 527
},
{
"epoch": 1.5855855855855856,
"grad_norm": 0.2333984375,
"learning_rate": 2.0720720720720723e-06,
"loss": 1.0013,
"step": 528
},
{
"epoch": 1.5885885885885886,
"grad_norm": 0.19140625,
"learning_rate": 2.057057057057057e-06,
"loss": 0.9472,
"step": 529
},
{
"epoch": 1.5915915915915915,
"grad_norm": 0.1923828125,
"learning_rate": 2.0420420420420424e-06,
"loss": 0.9629,
"step": 530
},
{
"epoch": 1.5945945945945947,
"grad_norm": 0.291015625,
"learning_rate": 2.0270270270270273e-06,
"loss": 0.9893,
"step": 531
},
{
"epoch": 1.5975975975975976,
"grad_norm": 0.2041015625,
"learning_rate": 2.012012012012012e-06,
"loss": 1.0118,
"step": 532
},
{
"epoch": 1.6006006006006006,
"grad_norm": 0.21484375,
"learning_rate": 1.996996996996997e-06,
"loss": 0.9447,
"step": 533
},
{
"epoch": 1.6036036036036037,
"grad_norm": 0.20703125,
"learning_rate": 1.9819819819819822e-06,
"loss": 0.9844,
"step": 534
},
{
"epoch": 1.6066066066066065,
"grad_norm": 0.2099609375,
"learning_rate": 1.966966966966967e-06,
"loss": 0.9383,
"step": 535
},
{
"epoch": 1.6096096096096097,
"grad_norm": 0.259765625,
"learning_rate": 1.951951951951952e-06,
"loss": 1.0075,
"step": 536
},
{
"epoch": 1.6126126126126126,
"grad_norm": 0.1943359375,
"learning_rate": 1.9369369369369372e-06,
"loss": 0.943,
"step": 537
},
{
"epoch": 1.6156156156156156,
"grad_norm": 0.2080078125,
"learning_rate": 1.921921921921922e-06,
"loss": 0.9848,
"step": 538
},
{
"epoch": 1.6186186186186187,
"grad_norm": 0.2138671875,
"learning_rate": 1.9069069069069071e-06,
"loss": 0.9484,
"step": 539
},
{
"epoch": 1.6216216216216215,
"grad_norm": 0.24609375,
"learning_rate": 1.8918918918918922e-06,
"loss": 1.0233,
"step": 540
},
{
"epoch": 1.6246246246246248,
"grad_norm": 0.29296875,
"learning_rate": 1.8768768768768768e-06,
"loss": 0.9918,
"step": 541
},
{
"epoch": 1.6276276276276276,
"grad_norm": 0.2138671875,
"learning_rate": 1.861861861861862e-06,
"loss": 1.0472,
"step": 542
},
{
"epoch": 1.6306306306306306,
"grad_norm": 0.1953125,
"learning_rate": 1.846846846846847e-06,
"loss": 0.976,
"step": 543
},
{
"epoch": 1.6336336336336337,
"grad_norm": 0.201171875,
"learning_rate": 1.831831831831832e-06,
"loss": 0.9576,
"step": 544
},
{
"epoch": 1.6366366366366365,
"grad_norm": 0.21484375,
"learning_rate": 1.816816816816817e-06,
"loss": 0.9836,
"step": 545
},
{
"epoch": 1.6396396396396398,
"grad_norm": 0.21484375,
"learning_rate": 1.801801801801802e-06,
"loss": 1.0699,
"step": 546
},
{
"epoch": 1.6426426426426426,
"grad_norm": 0.201171875,
"learning_rate": 1.786786786786787e-06,
"loss": 1.0028,
"step": 547
},
{
"epoch": 1.6456456456456456,
"grad_norm": 0.357421875,
"learning_rate": 1.7717717717717719e-06,
"loss": 1.0296,
"step": 548
},
{
"epoch": 1.6486486486486487,
"grad_norm": 0.212890625,
"learning_rate": 1.756756756756757e-06,
"loss": 1.0657,
"step": 549
},
{
"epoch": 1.6516516516516515,
"grad_norm": 0.201171875,
"learning_rate": 1.7417417417417418e-06,
"loss": 1.0059,
"step": 550
},
{
"epoch": 1.6546546546546548,
"grad_norm": 0.19921875,
"learning_rate": 1.7267267267267268e-06,
"loss": 0.9707,
"step": 551
},
{
"epoch": 1.6576576576576576,
"grad_norm": 0.201171875,
"learning_rate": 1.711711711711712e-06,
"loss": 0.9538,
"step": 552
},
{
"epoch": 1.6606606606606606,
"grad_norm": 0.2177734375,
"learning_rate": 1.696696696696697e-06,
"loss": 0.9963,
"step": 553
},
{
"epoch": 1.6636636636636637,
"grad_norm": 0.2099609375,
"learning_rate": 1.681681681681682e-06,
"loss": 1.0286,
"step": 554
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.1982421875,
"learning_rate": 1.6666666666666667e-06,
"loss": 0.9819,
"step": 555
},
{
"epoch": 1.6696696696696698,
"grad_norm": 0.2021484375,
"learning_rate": 1.6516516516516517e-06,
"loss": 0.9611,
"step": 556
},
{
"epoch": 1.6726726726726726,
"grad_norm": 0.212890625,
"learning_rate": 1.6366366366366368e-06,
"loss": 0.9716,
"step": 557
},
{
"epoch": 1.6756756756756757,
"grad_norm": 0.1982421875,
"learning_rate": 1.6216216216216219e-06,
"loss": 0.9671,
"step": 558
},
{
"epoch": 1.6786786786786787,
"grad_norm": 0.2177734375,
"learning_rate": 1.6066066066066067e-06,
"loss": 0.9823,
"step": 559
},
{
"epoch": 1.6816816816816815,
"grad_norm": 0.2080078125,
"learning_rate": 1.5915915915915916e-06,
"loss": 1.0137,
"step": 560
},
{
"epoch": 1.6846846846846848,
"grad_norm": 0.203125,
"learning_rate": 1.5765765765765766e-06,
"loss": 1.0035,
"step": 561
},
{
"epoch": 1.6876876876876876,
"grad_norm": 0.28515625,
"learning_rate": 1.5615615615615617e-06,
"loss": 1.0099,
"step": 562
},
{
"epoch": 1.6906906906906907,
"grad_norm": 0.2197265625,
"learning_rate": 1.5465465465465467e-06,
"loss": 0.9928,
"step": 563
},
{
"epoch": 1.6936936936936937,
"grad_norm": 0.2001953125,
"learning_rate": 1.5315315315315316e-06,
"loss": 0.9631,
"step": 564
},
{
"epoch": 1.6966966966966965,
"grad_norm": 0.232421875,
"learning_rate": 1.5165165165165167e-06,
"loss": 0.9909,
"step": 565
},
{
"epoch": 1.6996996996996998,
"grad_norm": 0.208984375,
"learning_rate": 1.5015015015015017e-06,
"loss": 0.9794,
"step": 566
},
{
"epoch": 1.7027027027027026,
"grad_norm": 0.3359375,
"learning_rate": 1.4864864864864868e-06,
"loss": 0.9484,
"step": 567
},
{
"epoch": 1.7057057057057057,
"grad_norm": 0.19921875,
"learning_rate": 1.4714714714714714e-06,
"loss": 0.9477,
"step": 568
},
{
"epoch": 1.7087087087087087,
"grad_norm": 0.240234375,
"learning_rate": 1.4564564564564565e-06,
"loss": 1.0411,
"step": 569
},
{
"epoch": 1.7117117117117115,
"grad_norm": 0.201171875,
"learning_rate": 1.4414414414414416e-06,
"loss": 0.9995,
"step": 570
},
{
"epoch": 1.7147147147147148,
"grad_norm": 0.2294921875,
"learning_rate": 1.4264264264264266e-06,
"loss": 0.9882,
"step": 571
},
{
"epoch": 1.7177177177177176,
"grad_norm": 0.2119140625,
"learning_rate": 1.4114114114114117e-06,
"loss": 0.9916,
"step": 572
},
{
"epoch": 1.7207207207207207,
"grad_norm": 0.28125,
"learning_rate": 1.3963963963963963e-06,
"loss": 0.9888,
"step": 573
},
{
"epoch": 1.7237237237237237,
"grad_norm": 0.2158203125,
"learning_rate": 1.3813813813813814e-06,
"loss": 0.9913,
"step": 574
},
{
"epoch": 1.7267267267267268,
"grad_norm": 0.294921875,
"learning_rate": 1.3663663663663664e-06,
"loss": 1.0083,
"step": 575
},
{
"epoch": 1.7297297297297298,
"grad_norm": 0.2109375,
"learning_rate": 1.3513513513513515e-06,
"loss": 0.9847,
"step": 576
},
{
"epoch": 1.7327327327327327,
"grad_norm": 0.1962890625,
"learning_rate": 1.3363363363363366e-06,
"loss": 0.981,
"step": 577
},
{
"epoch": 1.7357357357357357,
"grad_norm": 0.2099609375,
"learning_rate": 1.3213213213213214e-06,
"loss": 0.9571,
"step": 578
},
{
"epoch": 1.7387387387387387,
"grad_norm": 0.197265625,
"learning_rate": 1.3063063063063065e-06,
"loss": 0.9027,
"step": 579
},
{
"epoch": 1.7417417417417418,
"grad_norm": 0.255859375,
"learning_rate": 1.2912912912912913e-06,
"loss": 0.9199,
"step": 580
},
{
"epoch": 1.7447447447447448,
"grad_norm": 0.228515625,
"learning_rate": 1.2762762762762764e-06,
"loss": 0.9709,
"step": 581
},
{
"epoch": 1.7477477477477477,
"grad_norm": 0.19921875,
"learning_rate": 1.2612612612612613e-06,
"loss": 0.9939,
"step": 582
},
{
"epoch": 1.7507507507507507,
"grad_norm": 0.25,
"learning_rate": 1.2462462462462463e-06,
"loss": 1.0307,
"step": 583
},
{
"epoch": 1.7537537537537538,
"grad_norm": 0.1982421875,
"learning_rate": 1.2312312312312314e-06,
"loss": 1.0096,
"step": 584
},
{
"epoch": 1.7567567567567568,
"grad_norm": 0.23046875,
"learning_rate": 1.2162162162162164e-06,
"loss": 1.0162,
"step": 585
},
{
"epoch": 1.7597597597597598,
"grad_norm": 0.26953125,
"learning_rate": 1.2012012012012013e-06,
"loss": 0.9755,
"step": 586
},
{
"epoch": 1.7627627627627627,
"grad_norm": 0.212890625,
"learning_rate": 1.1861861861861864e-06,
"loss": 0.9147,
"step": 587
},
{
"epoch": 1.7657657657657657,
"grad_norm": 0.1943359375,
"learning_rate": 1.1711711711711712e-06,
"loss": 0.9598,
"step": 588
},
{
"epoch": 1.7687687687687688,
"grad_norm": 0.1943359375,
"learning_rate": 1.1561561561561563e-06,
"loss": 0.9519,
"step": 589
},
{
"epoch": 1.7717717717717718,
"grad_norm": 0.208984375,
"learning_rate": 1.1411411411411411e-06,
"loss": 0.9299,
"step": 590
},
{
"epoch": 1.7747747747747749,
"grad_norm": 0.197265625,
"learning_rate": 1.1261261261261262e-06,
"loss": 0.9965,
"step": 591
},
{
"epoch": 1.7777777777777777,
"grad_norm": 0.1953125,
"learning_rate": 1.111111111111111e-06,
"loss": 0.987,
"step": 592
},
{
"epoch": 1.7807807807807807,
"grad_norm": 0.2119140625,
"learning_rate": 1.096096096096096e-06,
"loss": 1.0014,
"step": 593
},
{
"epoch": 1.7837837837837838,
"grad_norm": 0.2041015625,
"learning_rate": 1.0810810810810812e-06,
"loss": 0.9659,
"step": 594
},
{
"epoch": 1.7867867867867868,
"grad_norm": 0.19921875,
"learning_rate": 1.0660660660660662e-06,
"loss": 0.9801,
"step": 595
},
{
"epoch": 1.7897897897897899,
"grad_norm": 0.267578125,
"learning_rate": 1.051051051051051e-06,
"loss": 0.9974,
"step": 596
},
{
"epoch": 1.7927927927927927,
"grad_norm": 0.2119140625,
"learning_rate": 1.0360360360360361e-06,
"loss": 1.033,
"step": 597
},
{
"epoch": 1.795795795795796,
"grad_norm": 0.193359375,
"learning_rate": 1.0210210210210212e-06,
"loss": 1.0094,
"step": 598
},
{
"epoch": 1.7987987987987988,
"grad_norm": 0.259765625,
"learning_rate": 1.006006006006006e-06,
"loss": 0.9708,
"step": 599
},
{
"epoch": 1.8018018018018018,
"grad_norm": 0.31640625,
"learning_rate": 9.909909909909911e-07,
"loss": 0.998,
"step": 600
},
{
"epoch": 1.8048048048048049,
"grad_norm": 0.20703125,
"learning_rate": 9.75975975975976e-07,
"loss": 0.9866,
"step": 601
},
{
"epoch": 1.8078078078078077,
"grad_norm": 0.2041015625,
"learning_rate": 9.60960960960961e-07,
"loss": 0.9943,
"step": 602
},
{
"epoch": 1.810810810810811,
"grad_norm": 0.19921875,
"learning_rate": 9.459459459459461e-07,
"loss": 0.9521,
"step": 603
},
{
"epoch": 1.8138138138138138,
"grad_norm": 0.333984375,
"learning_rate": 9.30930930930931e-07,
"loss": 0.9498,
"step": 604
},
{
"epoch": 1.8168168168168168,
"grad_norm": 0.326171875,
"learning_rate": 9.15915915915916e-07,
"loss": 0.8899,
"step": 605
},
{
"epoch": 1.8198198198198199,
"grad_norm": 0.26953125,
"learning_rate": 9.00900900900901e-07,
"loss": 0.9516,
"step": 606
},
{
"epoch": 1.8228228228228227,
"grad_norm": 0.1962890625,
"learning_rate": 8.858858858858859e-07,
"loss": 0.9876,
"step": 607
},
{
"epoch": 1.825825825825826,
"grad_norm": 0.19140625,
"learning_rate": 8.708708708708709e-07,
"loss": 0.9698,
"step": 608
},
{
"epoch": 1.8288288288288288,
"grad_norm": 0.2490234375,
"learning_rate": 8.55855855855856e-07,
"loss": 0.9624,
"step": 609
},
{
"epoch": 1.8318318318318318,
"grad_norm": 0.2314453125,
"learning_rate": 8.40840840840841e-07,
"loss": 1.0414,
"step": 610
},
{
"epoch": 1.834834834834835,
"grad_norm": 0.2138671875,
"learning_rate": 8.258258258258259e-07,
"loss": 0.9519,
"step": 611
},
{
"epoch": 1.8378378378378377,
"grad_norm": 0.1982421875,
"learning_rate": 8.108108108108109e-07,
"loss": 1.0047,
"step": 612
},
{
"epoch": 1.840840840840841,
"grad_norm": 0.2353515625,
"learning_rate": 7.957957957957958e-07,
"loss": 0.9718,
"step": 613
},
{
"epoch": 1.8438438438438438,
"grad_norm": 0.19921875,
"learning_rate": 7.807807807807808e-07,
"loss": 0.963,
"step": 614
},
{
"epoch": 1.8468468468468469,
"grad_norm": 0.2001953125,
"learning_rate": 7.657657657657658e-07,
"loss": 0.9523,
"step": 615
},
{
"epoch": 1.84984984984985,
"grad_norm": 0.19921875,
"learning_rate": 7.507507507507509e-07,
"loss": 0.9765,
"step": 616
},
{
"epoch": 1.8528528528528527,
"grad_norm": 0.2177734375,
"learning_rate": 7.357357357357357e-07,
"loss": 1.0135,
"step": 617
},
{
"epoch": 1.855855855855856,
"grad_norm": 0.2158203125,
"learning_rate": 7.207207207207208e-07,
"loss": 0.9795,
"step": 618
},
{
"epoch": 1.8588588588588588,
"grad_norm": 0.28515625,
"learning_rate": 7.057057057057058e-07,
"loss": 0.9955,
"step": 619
},
{
"epoch": 1.8618618618618619,
"grad_norm": 0.333984375,
"learning_rate": 6.906906906906907e-07,
"loss": 0.97,
"step": 620
},
{
"epoch": 1.864864864864865,
"grad_norm": 0.2099609375,
"learning_rate": 6.756756756756758e-07,
"loss": 0.9781,
"step": 621
},
{
"epoch": 1.8678678678678677,
"grad_norm": 0.232421875,
"learning_rate": 6.606606606606607e-07,
"loss": 0.9932,
"step": 622
},
{
"epoch": 1.870870870870871,
"grad_norm": 0.2080078125,
"learning_rate": 6.456456456456457e-07,
"loss": 1.0623,
"step": 623
},
{
"epoch": 1.8738738738738738,
"grad_norm": 0.197265625,
"learning_rate": 6.306306306306306e-07,
"loss": 1.0123,
"step": 624
},
{
"epoch": 1.8768768768768769,
"grad_norm": 0.212890625,
"learning_rate": 6.156156156156157e-07,
"loss": 0.9996,
"step": 625
},
{
"epoch": 1.87987987987988,
"grad_norm": 0.19921875,
"learning_rate": 6.006006006006006e-07,
"loss": 1.0018,
"step": 626
},
{
"epoch": 1.8828828828828827,
"grad_norm": 0.2109375,
"learning_rate": 5.855855855855856e-07,
"loss": 1.0228,
"step": 627
},
{
"epoch": 1.885885885885886,
"grad_norm": 0.205078125,
"learning_rate": 5.705705705705706e-07,
"loss": 0.9766,
"step": 628
},
{
"epoch": 1.8888888888888888,
"grad_norm": 0.2080078125,
"learning_rate": 5.555555555555555e-07,
"loss": 0.9793,
"step": 629
},
{
"epoch": 1.8918918918918919,
"grad_norm": 0.203125,
"learning_rate": 5.405405405405406e-07,
"loss": 0.9526,
"step": 630
},
{
"epoch": 1.894894894894895,
"grad_norm": 0.19921875,
"learning_rate": 5.255255255255255e-07,
"loss": 0.9866,
"step": 631
},
{
"epoch": 1.8978978978978978,
"grad_norm": 0.2451171875,
"learning_rate": 5.105105105105106e-07,
"loss": 0.9467,
"step": 632
},
{
"epoch": 1.900900900900901,
"grad_norm": 0.19140625,
"learning_rate": 4.954954954954956e-07,
"loss": 0.9579,
"step": 633
},
{
"epoch": 1.9039039039039038,
"grad_norm": 0.205078125,
"learning_rate": 4.804804804804805e-07,
"loss": 1.0449,
"step": 634
},
{
"epoch": 1.906906906906907,
"grad_norm": 0.232421875,
"learning_rate": 4.654654654654655e-07,
"loss": 0.9884,
"step": 635
},
{
"epoch": 1.90990990990991,
"grad_norm": 0.1943359375,
"learning_rate": 4.504504504504505e-07,
"loss": 0.9813,
"step": 636
},
{
"epoch": 1.9129129129129128,
"grad_norm": 0.228515625,
"learning_rate": 4.3543543543543544e-07,
"loss": 0.9638,
"step": 637
},
{
"epoch": 1.915915915915916,
"grad_norm": 0.2294921875,
"learning_rate": 4.204204204204205e-07,
"loss": 1.1317,
"step": 638
},
{
"epoch": 1.9189189189189189,
"grad_norm": 0.34375,
"learning_rate": 4.0540540540540546e-07,
"loss": 0.938,
"step": 639
},
{
"epoch": 1.921921921921922,
"grad_norm": 0.2451171875,
"learning_rate": 3.903903903903904e-07,
"loss": 0.9528,
"step": 640
},
{
"epoch": 1.924924924924925,
"grad_norm": 0.2294921875,
"learning_rate": 3.7537537537537543e-07,
"loss": 0.9967,
"step": 641
},
{
"epoch": 1.9279279279279278,
"grad_norm": 0.1982421875,
"learning_rate": 3.603603603603604e-07,
"loss": 0.9644,
"step": 642
},
{
"epoch": 1.930930930930931,
"grad_norm": 0.20703125,
"learning_rate": 3.4534534534534535e-07,
"loss": 1.002,
"step": 643
},
{
"epoch": 1.9339339339339339,
"grad_norm": 0.201171875,
"learning_rate": 3.3033033033033036e-07,
"loss": 1.0571,
"step": 644
},
{
"epoch": 1.936936936936937,
"grad_norm": 0.201171875,
"learning_rate": 3.153153153153153e-07,
"loss": 0.961,
"step": 645
},
{
"epoch": 1.93993993993994,
"grad_norm": 0.22265625,
"learning_rate": 3.003003003003003e-07,
"loss": 0.9988,
"step": 646
},
{
"epoch": 1.9429429429429428,
"grad_norm": 0.201171875,
"learning_rate": 2.852852852852853e-07,
"loss": 0.9738,
"step": 647
},
{
"epoch": 1.945945945945946,
"grad_norm": 0.23828125,
"learning_rate": 2.702702702702703e-07,
"loss": 0.9963,
"step": 648
},
{
"epoch": 1.9489489489489489,
"grad_norm": 0.2001953125,
"learning_rate": 2.552552552552553e-07,
"loss": 1.0002,
"step": 649
},
{
"epoch": 1.951951951951952,
"grad_norm": 0.1982421875,
"learning_rate": 2.4024024024024026e-07,
"loss": 0.9523,
"step": 650
},
{
"epoch": 1.954954954954955,
"grad_norm": 0.1953125,
"learning_rate": 2.2522522522522524e-07,
"loss": 0.9612,
"step": 651
},
{
"epoch": 1.9579579579579578,
"grad_norm": 0.201171875,
"learning_rate": 2.1021021021021025e-07,
"loss": 0.9434,
"step": 652
},
{
"epoch": 1.960960960960961,
"grad_norm": 0.2158203125,
"learning_rate": 1.951951951951952e-07,
"loss": 0.9514,
"step": 653
},
{
"epoch": 1.9639639639639639,
"grad_norm": 0.37890625,
"learning_rate": 1.801801801801802e-07,
"loss": 1.0606,
"step": 654
},
{
"epoch": 1.966966966966967,
"grad_norm": 0.291015625,
"learning_rate": 1.6516516516516518e-07,
"loss": 0.9818,
"step": 655
},
{
"epoch": 1.96996996996997,
"grad_norm": 0.19140625,
"learning_rate": 1.5015015015015016e-07,
"loss": 0.9588,
"step": 656
},
{
"epoch": 1.972972972972973,
"grad_norm": 0.1982421875,
"learning_rate": 1.3513513513513515e-07,
"loss": 0.9622,
"step": 657
},
{
"epoch": 1.975975975975976,
"grad_norm": 0.2099609375,
"learning_rate": 1.2012012012012013e-07,
"loss": 1.011,
"step": 658
},
{
"epoch": 1.978978978978979,
"grad_norm": 0.23046875,
"learning_rate": 1.0510510510510513e-07,
"loss": 1.0144,
"step": 659
},
{
"epoch": 1.981981981981982,
"grad_norm": 0.2158203125,
"learning_rate": 9.00900900900901e-08,
"loss": 1.0183,
"step": 660
},
{
"epoch": 1.984984984984985,
"grad_norm": 0.205078125,
"learning_rate": 7.507507507507508e-08,
"loss": 1.0452,
"step": 661
},
{
"epoch": 1.987987987987988,
"grad_norm": 0.2255859375,
"learning_rate": 6.006006006006006e-08,
"loss": 1.0146,
"step": 662
},
{
"epoch": 1.990990990990991,
"grad_norm": 0.2021484375,
"learning_rate": 4.504504504504505e-08,
"loss": 0.9768,
"step": 663
},
{
"epoch": 1.993993993993994,
"grad_norm": 0.2177734375,
"learning_rate": 3.003003003003003e-08,
"loss": 0.961,
"step": 664
},
{
"epoch": 1.996996996996997,
"grad_norm": 0.205078125,
"learning_rate": 1.5015015015015016e-08,
"loss": 0.9584,
"step": 665
},
{
"epoch": 2.0,
"grad_norm": 0.21875,
"learning_rate": 0.0,
"loss": 1.007,
"step": 666
}
],
"logging_steps": 1.0,
"max_steps": 666,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.104695600296493e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}