Robometer-4B-LIBERO-Prog-Only / trainer_state.json
aliangdw's picture
Duplicate from aliangdw/libero_ablation_prog_only_lora_ft_4frames
edf5b57
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 71.42857142857143,
"eval_steps": 50,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.07142857142857142,
"grad_norm": 2.3660926818847656,
"learning_rate": 0.0,
"loss": 0.8846,
"step": 1
},
{
"epoch": 0.14285714285714285,
"grad_norm": 2.3493313789367676,
"learning_rate": 1.2500000000000002e-07,
"loss": 0.8855,
"step": 2
},
{
"epoch": 0.21428571428571427,
"grad_norm": 2.3634419441223145,
"learning_rate": 2.5000000000000004e-07,
"loss": 0.8848,
"step": 3
},
{
"epoch": 0.2857142857142857,
"grad_norm": 2.377121925354004,
"learning_rate": 3.75e-07,
"loss": 0.8856,
"step": 4
},
{
"epoch": 0.35714285714285715,
"grad_norm": 2.3198955059051514,
"learning_rate": 5.000000000000001e-07,
"loss": 0.8803,
"step": 5
},
{
"epoch": 0.42857142857142855,
"grad_norm": 2.3753902912139893,
"learning_rate": 6.25e-07,
"loss": 0.8858,
"step": 6
},
{
"epoch": 0.5,
"grad_norm": 2.3688530921936035,
"learning_rate": 7.5e-07,
"loss": 0.8856,
"step": 7
},
{
"epoch": 0.5714285714285714,
"grad_norm": 2.330031156539917,
"learning_rate": 8.750000000000001e-07,
"loss": 0.888,
"step": 8
},
{
"epoch": 0.6428571428571429,
"grad_norm": 2.640928030014038,
"learning_rate": 1.0000000000000002e-06,
"loss": 0.8882,
"step": 9
},
{
"epoch": 0.7142857142857143,
"grad_norm": 2.6690409183502197,
"learning_rate": 1.125e-06,
"loss": 0.889,
"step": 10
},
{
"epoch": 0.7857142857142857,
"grad_norm": 2.6395134925842285,
"learning_rate": 1.25e-06,
"loss": 0.8845,
"step": 11
},
{
"epoch": 0.8571428571428571,
"grad_norm": 2.6309006214141846,
"learning_rate": 1.3750000000000002e-06,
"loss": 0.8859,
"step": 12
},
{
"epoch": 0.9285714285714286,
"grad_norm": 2.669376850128174,
"learning_rate": 1.5e-06,
"loss": 0.8872,
"step": 13
},
{
"epoch": 1.0,
"grad_norm": 2.630403757095337,
"learning_rate": 1.6250000000000001e-06,
"loss": 0.8794,
"step": 14
},
{
"epoch": 1.0714285714285714,
"grad_norm": 2.4702253341674805,
"learning_rate": 1.7500000000000002e-06,
"loss": 0.8828,
"step": 15
},
{
"epoch": 1.1428571428571428,
"grad_norm": 2.5032989978790283,
"learning_rate": 1.875e-06,
"loss": 0.8828,
"step": 16
},
{
"epoch": 1.2142857142857142,
"grad_norm": 2.4898006916046143,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.8856,
"step": 17
},
{
"epoch": 1.2857142857142856,
"grad_norm": 2.4425768852233887,
"learning_rate": 2.1250000000000004e-06,
"loss": 0.8789,
"step": 18
},
{
"epoch": 1.3571428571428572,
"grad_norm": 2.485208034515381,
"learning_rate": 2.25e-06,
"loss": 0.8762,
"step": 19
},
{
"epoch": 1.4285714285714286,
"grad_norm": 2.4805994033813477,
"learning_rate": 2.375e-06,
"loss": 0.8802,
"step": 20
},
{
"epoch": 1.5,
"grad_norm": 2.6056840419769287,
"learning_rate": 2.5e-06,
"loss": 0.8728,
"step": 21
},
{
"epoch": 1.5714285714285714,
"grad_norm": 2.584972381591797,
"learning_rate": 2.625e-06,
"loss": 0.8669,
"step": 22
},
{
"epoch": 1.6428571428571428,
"grad_norm": 2.7537307739257812,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.8705,
"step": 23
},
{
"epoch": 1.7142857142857144,
"grad_norm": 2.7155187129974365,
"learning_rate": 2.8750000000000004e-06,
"loss": 0.8615,
"step": 24
},
{
"epoch": 1.7857142857142856,
"grad_norm": 2.7130961418151855,
"learning_rate": 3e-06,
"loss": 0.8621,
"step": 25
},
{
"epoch": 1.8571428571428572,
"grad_norm": 2.691835880279541,
"learning_rate": 3.125e-06,
"loss": 0.8562,
"step": 26
},
{
"epoch": 1.9285714285714286,
"grad_norm": 2.6454622745513916,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.855,
"step": 27
},
{
"epoch": 2.0,
"grad_norm": 2.6290812492370605,
"learning_rate": 3.3750000000000003e-06,
"loss": 0.8451,
"step": 28
},
{
"epoch": 2.0714285714285716,
"grad_norm": 2.3237743377685547,
"learning_rate": 3.5000000000000004e-06,
"loss": 0.8469,
"step": 29
},
{
"epoch": 2.142857142857143,
"grad_norm": 2.2709200382232666,
"learning_rate": 3.625e-06,
"loss": 0.8412,
"step": 30
},
{
"epoch": 2.2142857142857144,
"grad_norm": 2.2170989513397217,
"learning_rate": 3.75e-06,
"loss": 0.8345,
"step": 31
},
{
"epoch": 2.2857142857142856,
"grad_norm": 2.1836729049682617,
"learning_rate": 3.875e-06,
"loss": 0.8304,
"step": 32
},
{
"epoch": 2.357142857142857,
"grad_norm": 2.086469888687134,
"learning_rate": 4.000000000000001e-06,
"loss": 0.821,
"step": 33
},
{
"epoch": 2.4285714285714284,
"grad_norm": 2.0059635639190674,
"learning_rate": 4.125e-06,
"loss": 0.8192,
"step": 34
},
{
"epoch": 2.5,
"grad_norm": 1.858028769493103,
"learning_rate": 4.250000000000001e-06,
"loss": 0.8124,
"step": 35
},
{
"epoch": 2.571428571428571,
"grad_norm": 1.6682943105697632,
"learning_rate": 4.375e-06,
"loss": 0.8062,
"step": 36
},
{
"epoch": 2.642857142857143,
"grad_norm": 1.1254407167434692,
"learning_rate": 4.5e-06,
"loss": 0.8051,
"step": 37
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.9991090893745422,
"learning_rate": 4.625e-06,
"loss": 0.8088,
"step": 38
},
{
"epoch": 2.7857142857142856,
"grad_norm": 0.77524334192276,
"learning_rate": 4.75e-06,
"loss": 0.8025,
"step": 39
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.7410176992416382,
"learning_rate": 4.875000000000001e-06,
"loss": 0.8033,
"step": 40
},
{
"epoch": 2.928571428571429,
"grad_norm": 0.7375764846801758,
"learning_rate": 5e-06,
"loss": 0.8016,
"step": 41
},
{
"epoch": 3.0,
"grad_norm": 0.7172738909721375,
"learning_rate": 5.125e-06,
"loss": 0.8037,
"step": 42
},
{
"epoch": 3.0714285714285716,
"grad_norm": 0.7604594230651855,
"learning_rate": 5.25e-06,
"loss": 0.7751,
"step": 43
},
{
"epoch": 3.142857142857143,
"grad_norm": 0.5869937539100647,
"learning_rate": 5.375e-06,
"loss": 0.7746,
"step": 44
},
{
"epoch": 3.2142857142857144,
"grad_norm": 0.6492642164230347,
"learning_rate": 5.500000000000001e-06,
"loss": 0.7732,
"step": 45
},
{
"epoch": 3.2857142857142856,
"grad_norm": 0.7614601254463196,
"learning_rate": 5.625e-06,
"loss": 0.7739,
"step": 46
},
{
"epoch": 3.357142857142857,
"grad_norm": 0.5540997385978699,
"learning_rate": 5.750000000000001e-06,
"loss": 0.7686,
"step": 47
},
{
"epoch": 3.4285714285714284,
"grad_norm": 0.653535008430481,
"learning_rate": 5.875e-06,
"loss": 0.7681,
"step": 48
},
{
"epoch": 3.5,
"grad_norm": 0.6040160655975342,
"learning_rate": 6e-06,
"loss": 0.7477,
"step": 49
},
{
"epoch": 3.571428571428571,
"grad_norm": 0.6676759123802185,
"learning_rate": 6.125e-06,
"loss": 0.7482,
"step": 50
},
{
"epoch": 3.642857142857143,
"grad_norm": 0.9932788610458374,
"learning_rate": 6.25e-06,
"loss": 0.7208,
"step": 51
},
{
"epoch": 3.7142857142857144,
"grad_norm": 0.9420803189277649,
"learning_rate": 6.375000000000001e-06,
"loss": 0.7247,
"step": 52
},
{
"epoch": 3.7857142857142856,
"grad_norm": 0.68571537733078,
"learning_rate": 6.5000000000000004e-06,
"loss": 0.7215,
"step": 53
},
{
"epoch": 3.857142857142857,
"grad_norm": 0.5234955549240112,
"learning_rate": 6.625000000000001e-06,
"loss": 0.7165,
"step": 54
},
{
"epoch": 3.928571428571429,
"grad_norm": 0.5814024806022644,
"learning_rate": 6.750000000000001e-06,
"loss": 0.7174,
"step": 55
},
{
"epoch": 4.0,
"grad_norm": 0.6290055513381958,
"learning_rate": 6.875000000000001e-06,
"loss": 0.7191,
"step": 56
},
{
"epoch": 4.071428571428571,
"grad_norm": 1.7454973459243774,
"learning_rate": 7.000000000000001e-06,
"loss": 0.8047,
"step": 57
},
{
"epoch": 4.142857142857143,
"grad_norm": 1.5601106882095337,
"learning_rate": 7.1249999999999995e-06,
"loss": 0.8023,
"step": 58
},
{
"epoch": 4.214285714285714,
"grad_norm": 1.1704920530319214,
"learning_rate": 7.25e-06,
"loss": 0.8011,
"step": 59
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.7773192524909973,
"learning_rate": 7.375e-06,
"loss": 0.7922,
"step": 60
},
{
"epoch": 4.357142857142857,
"grad_norm": 0.672889769077301,
"learning_rate": 7.5e-06,
"loss": 0.7916,
"step": 61
},
{
"epoch": 4.428571428571429,
"grad_norm": 1.138426423072815,
"learning_rate": 7.625e-06,
"loss": 0.7903,
"step": 62
},
{
"epoch": 4.5,
"grad_norm": 1.2380563020706177,
"learning_rate": 7.75e-06,
"loss": 0.7765,
"step": 63
},
{
"epoch": 4.571428571428571,
"grad_norm": 1.3037537336349487,
"learning_rate": 7.875e-06,
"loss": 0.7729,
"step": 64
},
{
"epoch": 4.642857142857143,
"grad_norm": 0.9317808151245117,
"learning_rate": 8.000000000000001e-06,
"loss": 0.7885,
"step": 65
},
{
"epoch": 4.714285714285714,
"grad_norm": 0.8518655300140381,
"learning_rate": 8.125000000000001e-06,
"loss": 0.7827,
"step": 66
},
{
"epoch": 4.785714285714286,
"grad_norm": 0.7318400740623474,
"learning_rate": 8.25e-06,
"loss": 0.7849,
"step": 67
},
{
"epoch": 4.857142857142857,
"grad_norm": 0.6134525537490845,
"learning_rate": 8.375e-06,
"loss": 0.7809,
"step": 68
},
{
"epoch": 4.928571428571429,
"grad_norm": 0.7042633295059204,
"learning_rate": 8.500000000000002e-06,
"loss": 0.7784,
"step": 69
},
{
"epoch": 5.0,
"grad_norm": 0.7181460857391357,
"learning_rate": 8.625e-06,
"loss": 0.783,
"step": 70
},
{
"epoch": 5.071428571428571,
"grad_norm": 0.9051682353019714,
"learning_rate": 8.75e-06,
"loss": 0.7986,
"step": 71
},
{
"epoch": 5.142857142857143,
"grad_norm": 0.9225665330886841,
"learning_rate": 8.875e-06,
"loss": 0.7987,
"step": 72
},
{
"epoch": 5.214285714285714,
"grad_norm": 0.826231837272644,
"learning_rate": 9e-06,
"loss": 0.8017,
"step": 73
},
{
"epoch": 5.285714285714286,
"grad_norm": 0.6356866955757141,
"learning_rate": 9.125e-06,
"loss": 0.7975,
"step": 74
},
{
"epoch": 5.357142857142857,
"grad_norm": 0.610805332660675,
"learning_rate": 9.25e-06,
"loss": 0.7975,
"step": 75
},
{
"epoch": 5.428571428571429,
"grad_norm": 0.5991847515106201,
"learning_rate": 9.375000000000001e-06,
"loss": 0.8004,
"step": 76
},
{
"epoch": 5.5,
"grad_norm": 0.561540961265564,
"learning_rate": 9.5e-06,
"loss": 0.7856,
"step": 77
},
{
"epoch": 5.571428571428571,
"grad_norm": 0.676536500453949,
"learning_rate": 9.625e-06,
"loss": 0.7836,
"step": 78
},
{
"epoch": 5.642857142857143,
"grad_norm": 2.1673362255096436,
"learning_rate": 9.750000000000002e-06,
"loss": 0.7161,
"step": 79
},
{
"epoch": 5.714285714285714,
"grad_norm": 1.967721939086914,
"learning_rate": 9.875000000000001e-06,
"loss": 0.7125,
"step": 80
},
{
"epoch": 5.785714285714286,
"grad_norm": 1.675350308418274,
"learning_rate": 1e-05,
"loss": 0.7037,
"step": 81
},
{
"epoch": 5.857142857142857,
"grad_norm": 1.2397103309631348,
"learning_rate": 1.0125e-05,
"loss": 0.6994,
"step": 82
},
{
"epoch": 5.928571428571429,
"grad_norm": 0.916667103767395,
"learning_rate": 1.025e-05,
"loss": 0.6936,
"step": 83
},
{
"epoch": 6.0,
"grad_norm": 0.5249179601669312,
"learning_rate": 1.0375e-05,
"loss": 0.6876,
"step": 84
},
{
"epoch": 6.071428571428571,
"grad_norm": 0.5052206516265869,
"learning_rate": 1.05e-05,
"loss": 0.6884,
"step": 85
},
{
"epoch": 6.142857142857143,
"grad_norm": 0.6524572968482971,
"learning_rate": 1.0625e-05,
"loss": 0.6923,
"step": 86
},
{
"epoch": 6.214285714285714,
"grad_norm": 0.8058570623397827,
"learning_rate": 1.075e-05,
"loss": 0.6922,
"step": 87
},
{
"epoch": 6.285714285714286,
"grad_norm": 0.8595154285430908,
"learning_rate": 1.0875e-05,
"loss": 0.6924,
"step": 88
},
{
"epoch": 6.357142857142857,
"grad_norm": 0.8969433307647705,
"learning_rate": 1.1000000000000001e-05,
"loss": 0.689,
"step": 89
},
{
"epoch": 6.428571428571429,
"grad_norm": 0.9493811130523682,
"learning_rate": 1.1125000000000001e-05,
"loss": 0.6907,
"step": 90
},
{
"epoch": 6.5,
"grad_norm": 1.3903621435165405,
"learning_rate": 1.125e-05,
"loss": 0.7704,
"step": 91
},
{
"epoch": 6.571428571428571,
"grad_norm": 1.2406781911849976,
"learning_rate": 1.1375e-05,
"loss": 0.7695,
"step": 92
},
{
"epoch": 6.642857142857143,
"grad_norm": 1.0621867179870605,
"learning_rate": 1.1500000000000002e-05,
"loss": 0.7746,
"step": 93
},
{
"epoch": 6.714285714285714,
"grad_norm": 0.8159370422363281,
"learning_rate": 1.1625000000000001e-05,
"loss": 0.7805,
"step": 94
},
{
"epoch": 6.785714285714286,
"grad_norm": 0.6595308184623718,
"learning_rate": 1.175e-05,
"loss": 0.7718,
"step": 95
},
{
"epoch": 6.857142857142857,
"grad_norm": 0.5958001613616943,
"learning_rate": 1.1875e-05,
"loss": 0.7728,
"step": 96
},
{
"epoch": 6.928571428571429,
"grad_norm": 0.7216199636459351,
"learning_rate": 1.2e-05,
"loss": 0.7711,
"step": 97
},
{
"epoch": 7.0,
"grad_norm": 0.8589462041854858,
"learning_rate": 1.2125e-05,
"loss": 0.7789,
"step": 98
},
{
"epoch": 7.071428571428571,
"grad_norm": 1.2068138122558594,
"learning_rate": 1.225e-05,
"loss": 0.7538,
"step": 99
},
{
"epoch": 7.142857142857143,
"grad_norm": 1.1054496765136719,
"learning_rate": 1.2375000000000001e-05,
"loss": 0.7567,
"step": 100
},
{
"epoch": 7.214285714285714,
"grad_norm": 1.0165095329284668,
"learning_rate": 1.25e-05,
"loss": 0.7537,
"step": 101
},
{
"epoch": 7.285714285714286,
"grad_norm": 0.7962751984596252,
"learning_rate": 1.2625e-05,
"loss": 0.7488,
"step": 102
},
{
"epoch": 7.357142857142857,
"grad_norm": 0.7194035649299622,
"learning_rate": 1.2750000000000002e-05,
"loss": 0.7496,
"step": 103
},
{
"epoch": 7.428571428571429,
"grad_norm": 0.6004267930984497,
"learning_rate": 1.2875000000000001e-05,
"loss": 0.7483,
"step": 104
},
{
"epoch": 7.5,
"grad_norm": 0.7990235090255737,
"learning_rate": 1.3000000000000001e-05,
"loss": 0.712,
"step": 105
},
{
"epoch": 7.571428571428571,
"grad_norm": 0.6726771593093872,
"learning_rate": 1.3125e-05,
"loss": 0.704,
"step": 106
},
{
"epoch": 7.642857142857143,
"grad_norm": 1.0569907426834106,
"learning_rate": 1.3250000000000002e-05,
"loss": 0.7688,
"step": 107
},
{
"epoch": 7.714285714285714,
"grad_norm": 1.0951803922653198,
"learning_rate": 1.3375000000000002e-05,
"loss": 0.7709,
"step": 108
},
{
"epoch": 7.785714285714286,
"grad_norm": 1.11770498752594,
"learning_rate": 1.3500000000000001e-05,
"loss": 0.7711,
"step": 109
},
{
"epoch": 7.857142857142857,
"grad_norm": 0.9682809710502625,
"learning_rate": 1.3625e-05,
"loss": 0.7677,
"step": 110
},
{
"epoch": 7.928571428571429,
"grad_norm": 0.8097149729728699,
"learning_rate": 1.3750000000000002e-05,
"loss": 0.7684,
"step": 111
},
{
"epoch": 8.0,
"grad_norm": 0.6262074708938599,
"learning_rate": 1.3875000000000002e-05,
"loss": 0.7701,
"step": 112
},
{
"epoch": 8.071428571428571,
"grad_norm": 0.6234034299850464,
"learning_rate": 1.4000000000000001e-05,
"loss": 0.7845,
"step": 113
},
{
"epoch": 8.142857142857142,
"grad_norm": 0.7203190922737122,
"learning_rate": 1.4125e-05,
"loss": 0.7809,
"step": 114
},
{
"epoch": 8.214285714285714,
"grad_norm": 0.5600826740264893,
"learning_rate": 1.4249999999999999e-05,
"loss": 0.7836,
"step": 115
},
{
"epoch": 8.285714285714286,
"grad_norm": 0.6843335032463074,
"learning_rate": 1.4374999999999999e-05,
"loss": 0.7854,
"step": 116
},
{
"epoch": 8.357142857142858,
"grad_norm": 0.6722122430801392,
"learning_rate": 1.45e-05,
"loss": 0.781,
"step": 117
},
{
"epoch": 8.428571428571429,
"grad_norm": 0.6493569612503052,
"learning_rate": 1.4625e-05,
"loss": 0.7769,
"step": 118
},
{
"epoch": 8.5,
"grad_norm": 0.7080219388008118,
"learning_rate": 1.475e-05,
"loss": 0.7745,
"step": 119
},
{
"epoch": 8.571428571428571,
"grad_norm": 0.7532919049263,
"learning_rate": 1.4875e-05,
"loss": 0.7822,
"step": 120
},
{
"epoch": 8.642857142857142,
"grad_norm": 0.8535563945770264,
"learning_rate": 1.5e-05,
"loss": 0.7609,
"step": 121
},
{
"epoch": 8.714285714285714,
"grad_norm": 0.7034247517585754,
"learning_rate": 1.5125e-05,
"loss": 0.7635,
"step": 122
},
{
"epoch": 8.785714285714286,
"grad_norm": 0.6291443705558777,
"learning_rate": 1.525e-05,
"loss": 0.763,
"step": 123
},
{
"epoch": 8.857142857142858,
"grad_norm": 0.6242823004722595,
"learning_rate": 1.5375e-05,
"loss": 0.7551,
"step": 124
},
{
"epoch": 8.928571428571429,
"grad_norm": 0.5362811088562012,
"learning_rate": 1.55e-05,
"loss": 0.7605,
"step": 125
},
{
"epoch": 9.0,
"grad_norm": 0.7345648407936096,
"learning_rate": 1.5625e-05,
"loss": 0.7597,
"step": 126
},
{
"epoch": 9.071428571428571,
"grad_norm": 0.6531787514686584,
"learning_rate": 1.575e-05,
"loss": 0.7675,
"step": 127
},
{
"epoch": 9.142857142857142,
"grad_norm": 0.6141201257705688,
"learning_rate": 1.5875e-05,
"loss": 0.7608,
"step": 128
},
{
"epoch": 9.214285714285714,
"grad_norm": 0.6407638192176819,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.7648,
"step": 129
},
{
"epoch": 9.285714285714286,
"grad_norm": 0.5688261389732361,
"learning_rate": 1.6125000000000002e-05,
"loss": 0.7553,
"step": 130
},
{
"epoch": 9.357142857142858,
"grad_norm": 0.5389134287834167,
"learning_rate": 1.6250000000000002e-05,
"loss": 0.7599,
"step": 131
},
{
"epoch": 9.428571428571429,
"grad_norm": 0.5594817399978638,
"learning_rate": 1.6375e-05,
"loss": 0.7513,
"step": 132
},
{
"epoch": 9.5,
"grad_norm": 0.5750069618225098,
"learning_rate": 1.65e-05,
"loss": 0.7649,
"step": 133
},
{
"epoch": 9.571428571428571,
"grad_norm": 0.661956250667572,
"learning_rate": 1.6625e-05,
"loss": 0.7663,
"step": 134
},
{
"epoch": 9.642857142857142,
"grad_norm": 0.5706619620323181,
"learning_rate": 1.675e-05,
"loss": 0.7643,
"step": 135
},
{
"epoch": 9.714285714285714,
"grad_norm": 0.5953116416931152,
"learning_rate": 1.6875000000000004e-05,
"loss": 0.7642,
"step": 136
},
{
"epoch": 9.785714285714286,
"grad_norm": 0.7578563690185547,
"learning_rate": 1.7000000000000003e-05,
"loss": 0.7617,
"step": 137
},
{
"epoch": 9.857142857142858,
"grad_norm": 0.5435178875923157,
"learning_rate": 1.7125000000000003e-05,
"loss": 0.7629,
"step": 138
},
{
"epoch": 9.928571428571429,
"grad_norm": 0.5672174692153931,
"learning_rate": 1.725e-05,
"loss": 0.7625,
"step": 139
},
{
"epoch": 10.0,
"grad_norm": 0.5299157500267029,
"learning_rate": 1.7375e-05,
"loss": 0.7581,
"step": 140
},
{
"epoch": 10.071428571428571,
"grad_norm": 0.9660478234291077,
"learning_rate": 1.75e-05,
"loss": 0.7539,
"step": 141
},
{
"epoch": 10.142857142857142,
"grad_norm": 0.6911635994911194,
"learning_rate": 1.7625e-05,
"loss": 0.759,
"step": 142
},
{
"epoch": 10.214285714285714,
"grad_norm": 0.6531702876091003,
"learning_rate": 1.775e-05,
"loss": 0.7464,
"step": 143
},
{
"epoch": 10.285714285714286,
"grad_norm": 0.5785026550292969,
"learning_rate": 1.7875e-05,
"loss": 0.7577,
"step": 144
},
{
"epoch": 10.357142857142858,
"grad_norm": 0.5896267890930176,
"learning_rate": 1.8e-05,
"loss": 0.7516,
"step": 145
},
{
"epoch": 10.428571428571429,
"grad_norm": 0.678227961063385,
"learning_rate": 1.8125e-05,
"loss": 0.75,
"step": 146
},
{
"epoch": 10.5,
"grad_norm": 0.6887372136116028,
"learning_rate": 1.825e-05,
"loss": 0.7848,
"step": 147
},
{
"epoch": 10.571428571428571,
"grad_norm": 0.6198720335960388,
"learning_rate": 1.8375e-05,
"loss": 0.7833,
"step": 148
},
{
"epoch": 10.642857142857142,
"grad_norm": 0.5759323239326477,
"learning_rate": 1.85e-05,
"loss": 0.7677,
"step": 149
},
{
"epoch": 10.714285714285714,
"grad_norm": 0.6369615197181702,
"learning_rate": 1.8625000000000002e-05,
"loss": 0.7633,
"step": 150
},
{
"epoch": 10.785714285714286,
"grad_norm": 0.6877391338348389,
"learning_rate": 1.8750000000000002e-05,
"loss": 0.7685,
"step": 151
},
{
"epoch": 10.857142857142858,
"grad_norm": 0.8872264623641968,
"learning_rate": 1.8875e-05,
"loss": 0.7759,
"step": 152
},
{
"epoch": 10.928571428571429,
"grad_norm": 0.6468808054924011,
"learning_rate": 1.9e-05,
"loss": 0.764,
"step": 153
},
{
"epoch": 11.0,
"grad_norm": 0.6648755073547363,
"learning_rate": 1.9125e-05,
"loss": 0.7714,
"step": 154
},
{
"epoch": 11.071428571428571,
"grad_norm": 0.9788458347320557,
"learning_rate": 1.925e-05,
"loss": 0.7274,
"step": 155
},
{
"epoch": 11.142857142857142,
"grad_norm": 0.8279010653495789,
"learning_rate": 1.9375e-05,
"loss": 0.7115,
"step": 156
},
{
"epoch": 11.214285714285714,
"grad_norm": 0.5429275631904602,
"learning_rate": 1.9500000000000003e-05,
"loss": 0.7157,
"step": 157
},
{
"epoch": 11.285714285714286,
"grad_norm": 0.5696080327033997,
"learning_rate": 1.9625000000000003e-05,
"loss": 0.7229,
"step": 158
},
{
"epoch": 11.357142857142858,
"grad_norm": 0.7434468865394592,
"learning_rate": 1.9750000000000002e-05,
"loss": 0.7235,
"step": 159
},
{
"epoch": 11.428571428571429,
"grad_norm": 0.6734916567802429,
"learning_rate": 1.9875000000000002e-05,
"loss": 0.7156,
"step": 160
},
{
"epoch": 11.5,
"grad_norm": 0.6256027221679688,
"learning_rate": 2e-05,
"loss": 0.6624,
"step": 161
},
{
"epoch": 11.571428571428571,
"grad_norm": 0.5517652630805969,
"learning_rate": 2.0125e-05,
"loss": 0.6686,
"step": 162
},
{
"epoch": 11.642857142857142,
"grad_norm": 1.057498812675476,
"learning_rate": 2.025e-05,
"loss": 0.7649,
"step": 163
},
{
"epoch": 11.714285714285714,
"grad_norm": 0.8094059824943542,
"learning_rate": 2.0375e-05,
"loss": 0.77,
"step": 164
},
{
"epoch": 11.785714285714286,
"grad_norm": 0.6059132814407349,
"learning_rate": 2.05e-05,
"loss": 0.7598,
"step": 165
},
{
"epoch": 11.857142857142858,
"grad_norm": 0.7302872538566589,
"learning_rate": 2.0625e-05,
"loss": 0.7645,
"step": 166
},
{
"epoch": 11.928571428571429,
"grad_norm": 0.7135368585586548,
"learning_rate": 2.075e-05,
"loss": 0.7567,
"step": 167
},
{
"epoch": 12.0,
"grad_norm": 0.6311301589012146,
"learning_rate": 2.0875e-05,
"loss": 0.7627,
"step": 168
},
{
"epoch": 12.071428571428571,
"grad_norm": 0.8360495567321777,
"learning_rate": 2.1e-05,
"loss": 0.7207,
"step": 169
},
{
"epoch": 12.142857142857142,
"grad_norm": 0.6071323156356812,
"learning_rate": 2.1125000000000002e-05,
"loss": 0.72,
"step": 170
},
{
"epoch": 12.214285714285714,
"grad_norm": 0.7711718082427979,
"learning_rate": 2.125e-05,
"loss": 0.721,
"step": 171
},
{
"epoch": 12.285714285714286,
"grad_norm": 0.5850419402122498,
"learning_rate": 2.1375e-05,
"loss": 0.7224,
"step": 172
},
{
"epoch": 12.357142857142858,
"grad_norm": 0.7409706115722656,
"learning_rate": 2.15e-05,
"loss": 0.7208,
"step": 173
},
{
"epoch": 12.428571428571429,
"grad_norm": 0.6865264177322388,
"learning_rate": 2.1625e-05,
"loss": 0.7247,
"step": 174
},
{
"epoch": 12.5,
"grad_norm": 0.7756849527359009,
"learning_rate": 2.175e-05,
"loss": 0.6742,
"step": 175
},
{
"epoch": 12.571428571428571,
"grad_norm": 0.7514200806617737,
"learning_rate": 2.1875e-05,
"loss": 0.671,
"step": 176
},
{
"epoch": 12.642857142857142,
"grad_norm": 1.246495246887207,
"learning_rate": 2.2000000000000003e-05,
"loss": 0.7796,
"step": 177
},
{
"epoch": 12.714285714285714,
"grad_norm": 1.0580798387527466,
"learning_rate": 2.2125000000000002e-05,
"loss": 0.7736,
"step": 178
},
{
"epoch": 12.785714285714286,
"grad_norm": 0.7741261720657349,
"learning_rate": 2.2250000000000002e-05,
"loss": 0.7688,
"step": 179
},
{
"epoch": 12.857142857142858,
"grad_norm": 0.6478248834609985,
"learning_rate": 2.2375000000000002e-05,
"loss": 0.7714,
"step": 180
},
{
"epoch": 12.928571428571429,
"grad_norm": 0.6844660043716431,
"learning_rate": 2.25e-05,
"loss": 0.7723,
"step": 181
},
{
"epoch": 13.0,
"grad_norm": 0.7990371584892273,
"learning_rate": 2.2625e-05,
"loss": 0.7792,
"step": 182
},
{
"epoch": 13.071428571428571,
"grad_norm": 1.4357235431671143,
"learning_rate": 2.275e-05,
"loss": 0.7387,
"step": 183
},
{
"epoch": 13.142857142857142,
"grad_norm": 1.2491614818572998,
"learning_rate": 2.2875e-05,
"loss": 0.7322,
"step": 184
},
{
"epoch": 13.214285714285714,
"grad_norm": 0.9462071657180786,
"learning_rate": 2.3000000000000003e-05,
"loss": 0.716,
"step": 185
},
{
"epoch": 13.285714285714286,
"grad_norm": 0.577681839466095,
"learning_rate": 2.3125000000000003e-05,
"loss": 0.7191,
"step": 186
},
{
"epoch": 13.357142857142858,
"grad_norm": 0.6737596988677979,
"learning_rate": 2.3250000000000003e-05,
"loss": 0.7253,
"step": 187
},
{
"epoch": 13.428571428571429,
"grad_norm": 0.8452826738357544,
"learning_rate": 2.3375000000000002e-05,
"loss": 0.7176,
"step": 188
},
{
"epoch": 13.5,
"grad_norm": 1.248802900314331,
"learning_rate": 2.35e-05,
"loss": 0.7485,
"step": 189
},
{
"epoch": 13.571428571428571,
"grad_norm": 1.0237886905670166,
"learning_rate": 2.3624999999999998e-05,
"loss": 0.748,
"step": 190
},
{
"epoch": 13.642857142857142,
"grad_norm": 0.6560722589492798,
"learning_rate": 2.375e-05,
"loss": 0.7386,
"step": 191
},
{
"epoch": 13.714285714285714,
"grad_norm": 0.5957949161529541,
"learning_rate": 2.3875e-05,
"loss": 0.7377,
"step": 192
},
{
"epoch": 13.785714285714286,
"grad_norm": 0.6464928388595581,
"learning_rate": 2.4e-05,
"loss": 0.7405,
"step": 193
},
{
"epoch": 13.857142857142858,
"grad_norm": 0.8083170056343079,
"learning_rate": 2.4125e-05,
"loss": 0.7392,
"step": 194
},
{
"epoch": 13.928571428571429,
"grad_norm": 0.7957527041435242,
"learning_rate": 2.425e-05,
"loss": 0.7458,
"step": 195
},
{
"epoch": 14.0,
"grad_norm": 0.7611489295959473,
"learning_rate": 2.4375e-05,
"loss": 0.7317,
"step": 196
},
{
"epoch": 14.071428571428571,
"grad_norm": 0.750963568687439,
"learning_rate": 2.45e-05,
"loss": 0.7238,
"step": 197
},
{
"epoch": 14.142857142857142,
"grad_norm": 0.6039023995399475,
"learning_rate": 2.4625000000000002e-05,
"loss": 0.7155,
"step": 198
},
{
"epoch": 14.214285714285714,
"grad_norm": 0.7066697478294373,
"learning_rate": 2.4750000000000002e-05,
"loss": 0.7342,
"step": 199
},
{
"epoch": 14.285714285714286,
"grad_norm": 0.7328922748565674,
"learning_rate": 2.4875e-05,
"loss": 0.7237,
"step": 200
},
{
"epoch": 14.357142857142858,
"grad_norm": 0.7922976016998291,
"learning_rate": 2.5e-05,
"loss": 0.722,
"step": 201
},
{
"epoch": 14.428571428571429,
"grad_norm": 0.6235881447792053,
"learning_rate": 2.4999980961416097e-05,
"loss": 0.7193,
"step": 202
},
{
"epoch": 14.5,
"grad_norm": 0.6755703687667847,
"learning_rate": 2.499992384572238e-05,
"loss": 0.712,
"step": 203
},
{
"epoch": 14.571428571428571,
"grad_norm": 0.5535378456115723,
"learning_rate": 2.4999828653092835e-05,
"loss": 0.7164,
"step": 204
},
{
"epoch": 14.642857142857142,
"grad_norm": 0.6555245518684387,
"learning_rate": 2.4999695383817435e-05,
"loss": 0.7258,
"step": 205
},
{
"epoch": 14.714285714285714,
"grad_norm": 0.6272715330123901,
"learning_rate": 2.499952403830214e-05,
"loss": 0.7247,
"step": 206
},
{
"epoch": 14.785714285714286,
"grad_norm": 0.5653781890869141,
"learning_rate": 2.4999314617068904e-05,
"loss": 0.7268,
"step": 207
},
{
"epoch": 14.857142857142858,
"grad_norm": 0.7756087779998779,
"learning_rate": 2.4999067120755652e-05,
"loss": 0.7302,
"step": 208
},
{
"epoch": 14.928571428571429,
"grad_norm": 0.7235432863235474,
"learning_rate": 2.4998781550116305e-05,
"loss": 0.7151,
"step": 209
},
{
"epoch": 15.0,
"grad_norm": 0.659776508808136,
"learning_rate": 2.499845790602076e-05,
"loss": 0.7298,
"step": 210
},
{
"epoch": 15.071428571428571,
"grad_norm": 0.6604132652282715,
"learning_rate": 2.4998096189454893e-05,
"loss": 0.7363,
"step": 211
},
{
"epoch": 15.142857142857142,
"grad_norm": 0.5568569898605347,
"learning_rate": 2.4997696401520555e-05,
"loss": 0.7426,
"step": 212
},
{
"epoch": 15.214285714285714,
"grad_norm": 0.6004931926727295,
"learning_rate": 2.499725854343557e-05,
"loss": 0.7295,
"step": 213
},
{
"epoch": 15.285714285714286,
"grad_norm": 0.6031244993209839,
"learning_rate": 2.4996782616533732e-05,
"loss": 0.7335,
"step": 214
},
{
"epoch": 15.357142857142858,
"grad_norm": 0.612106204032898,
"learning_rate": 2.499626862226479e-05,
"loss": 0.7365,
"step": 215
},
{
"epoch": 15.428571428571429,
"grad_norm": 0.6817440390586853,
"learning_rate": 2.4995716562194465e-05,
"loss": 0.7241,
"step": 216
},
{
"epoch": 15.5,
"grad_norm": 0.6546328067779541,
"learning_rate": 2.499512643800443e-05,
"loss": 0.7176,
"step": 217
},
{
"epoch": 15.571428571428571,
"grad_norm": 0.5789527893066406,
"learning_rate": 2.4994498251492302e-05,
"loss": 0.7207,
"step": 218
},
{
"epoch": 15.642857142857142,
"grad_norm": 0.8149313926696777,
"learning_rate": 2.4993832004571646e-05,
"loss": 0.6817,
"step": 219
},
{
"epoch": 15.714285714285714,
"grad_norm": 0.7203310132026672,
"learning_rate": 2.4993127699271966e-05,
"loss": 0.6925,
"step": 220
},
{
"epoch": 15.785714285714286,
"grad_norm": 0.6413833498954773,
"learning_rate": 2.49923853377387e-05,
"loss": 0.6839,
"step": 221
},
{
"epoch": 15.857142857142858,
"grad_norm": 0.6889573335647583,
"learning_rate": 2.4991604922233204e-05,
"loss": 0.6826,
"step": 222
},
{
"epoch": 15.928571428571429,
"grad_norm": 0.6012479066848755,
"learning_rate": 2.4990786455132764e-05,
"loss": 0.682,
"step": 223
},
{
"epoch": 16.0,
"grad_norm": 0.7324628233909607,
"learning_rate": 2.4989929938930576e-05,
"loss": 0.6801,
"step": 224
},
{
"epoch": 16.071428571428573,
"grad_norm": 0.9579339623451233,
"learning_rate": 2.498903537623573e-05,
"loss": 0.7441,
"step": 225
},
{
"epoch": 16.142857142857142,
"grad_norm": 0.7767286896705627,
"learning_rate": 2.4988102769773227e-05,
"loss": 0.7361,
"step": 226
},
{
"epoch": 16.214285714285715,
"grad_norm": 0.8367361426353455,
"learning_rate": 2.4987132122383936e-05,
"loss": 0.7314,
"step": 227
},
{
"epoch": 16.285714285714285,
"grad_norm": 0.7636885046958923,
"learning_rate": 2.4986123437024627e-05,
"loss": 0.7394,
"step": 228
},
{
"epoch": 16.357142857142858,
"grad_norm": 0.739570140838623,
"learning_rate": 2.4985076716767927e-05,
"loss": 0.7267,
"step": 229
},
{
"epoch": 16.428571428571427,
"grad_norm": 0.6783609986305237,
"learning_rate": 2.4983991964802327e-05,
"loss": 0.7198,
"step": 230
},
{
"epoch": 16.5,
"grad_norm": 0.7383100986480713,
"learning_rate": 2.4982869184432174e-05,
"loss": 0.7264,
"step": 231
},
{
"epoch": 16.571428571428573,
"grad_norm": 1.1309088468551636,
"learning_rate": 2.498170837907765e-05,
"loss": 0.7285,
"step": 232
},
{
"epoch": 16.642857142857142,
"grad_norm": 0.8620618581771851,
"learning_rate": 2.4980509552274765e-05,
"loss": 0.6829,
"step": 233
},
{
"epoch": 16.714285714285715,
"grad_norm": 1.0242780447006226,
"learning_rate": 2.4979272707675356e-05,
"loss": 0.6864,
"step": 234
},
{
"epoch": 16.785714285714285,
"grad_norm": 0.827475368976593,
"learning_rate": 2.497799784904707e-05,
"loss": 0.6806,
"step": 235
},
{
"epoch": 16.857142857142858,
"grad_norm": 0.72999107837677,
"learning_rate": 2.4976684980273338e-05,
"loss": 0.6813,
"step": 236
},
{
"epoch": 16.928571428571427,
"grad_norm": 0.7277812957763672,
"learning_rate": 2.4975334105353396e-05,
"loss": 0.6721,
"step": 237
},
{
"epoch": 17.0,
"grad_norm": 0.7639715671539307,
"learning_rate": 2.497394522840224e-05,
"loss": 0.6754,
"step": 238
},
{
"epoch": 17.071428571428573,
"grad_norm": 1.5324257612228394,
"learning_rate": 2.4972518353650626e-05,
"loss": 0.7489,
"step": 239
},
{
"epoch": 17.142857142857142,
"grad_norm": 1.061765193939209,
"learning_rate": 2.497105348544507e-05,
"loss": 0.7334,
"step": 240
},
{
"epoch": 17.214285714285715,
"grad_norm": 0.7973777651786804,
"learning_rate": 2.4969550628247805e-05,
"loss": 0.7255,
"step": 241
},
{
"epoch": 17.285714285714285,
"grad_norm": 0.8032246828079224,
"learning_rate": 2.49680097866368e-05,
"loss": 0.7249,
"step": 242
},
{
"epoch": 17.357142857142858,
"grad_norm": 0.8305944204330444,
"learning_rate": 2.4966430965305727e-05,
"loss": 0.7303,
"step": 243
},
{
"epoch": 17.428571428571427,
"grad_norm": 0.90361088514328,
"learning_rate": 2.4964814169063948e-05,
"loss": 0.726,
"step": 244
},
{
"epoch": 17.5,
"grad_norm": 0.9854000210762024,
"learning_rate": 2.4963159402836506e-05,
"loss": 0.7047,
"step": 245
},
{
"epoch": 17.571428571428573,
"grad_norm": 0.9168617725372314,
"learning_rate": 2.49614666716641e-05,
"loss": 0.7009,
"step": 246
},
{
"epoch": 17.642857142857142,
"grad_norm": 1.0073875188827515,
"learning_rate": 2.495973598070309e-05,
"loss": 0.7611,
"step": 247
},
{
"epoch": 17.714285714285715,
"grad_norm": 1.3313695192337036,
"learning_rate": 2.4957967335225456e-05,
"loss": 0.7705,
"step": 248
},
{
"epoch": 17.785714285714285,
"grad_norm": 0.9598591923713684,
"learning_rate": 2.4956160740618806e-05,
"loss": 0.7617,
"step": 249
},
{
"epoch": 17.857142857142858,
"grad_norm": 0.8475302457809448,
"learning_rate": 2.495431620238633e-05,
"loss": 0.7469,
"step": 250
},
{
"epoch": 17.928571428571427,
"grad_norm": 0.7061119675636292,
"learning_rate": 2.495243372614682e-05,
"loss": 0.7636,
"step": 251
},
{
"epoch": 18.0,
"grad_norm": 0.8885191679000854,
"learning_rate": 2.495051331763462e-05,
"loss": 0.7456,
"step": 252
},
{
"epoch": 18.071428571428573,
"grad_norm": 1.5245048999786377,
"learning_rate": 2.494855498269963e-05,
"loss": 0.7002,
"step": 253
},
{
"epoch": 18.142857142857142,
"grad_norm": 1.2819387912750244,
"learning_rate": 2.4946558727307277e-05,
"loss": 0.6848,
"step": 254
},
{
"epoch": 18.214285714285715,
"grad_norm": 1.09604811668396,
"learning_rate": 2.4944524557538503e-05,
"loss": 0.6706,
"step": 255
},
{
"epoch": 18.285714285714285,
"grad_norm": 0.8738585710525513,
"learning_rate": 2.4942452479589735e-05,
"loss": 0.6699,
"step": 256
},
{
"epoch": 18.357142857142858,
"grad_norm": 0.745101273059845,
"learning_rate": 2.494034249977289e-05,
"loss": 0.6669,
"step": 257
},
{
"epoch": 18.428571428571427,
"grad_norm": 0.7654664516448975,
"learning_rate": 2.4938194624515333e-05,
"loss": 0.6564,
"step": 258
},
{
"epoch": 18.5,
"grad_norm": 1.1224812269210815,
"learning_rate": 2.4936008860359854e-05,
"loss": 0.6996,
"step": 259
},
{
"epoch": 18.571428571428573,
"grad_norm": 1.1067287921905518,
"learning_rate": 2.4933785213964677e-05,
"loss": 0.6967,
"step": 260
},
{
"epoch": 18.642857142857142,
"grad_norm": 0.9101780652999878,
"learning_rate": 2.4931523692103418e-05,
"loss": 0.6702,
"step": 261
},
{
"epoch": 18.714285714285715,
"grad_norm": 1.0092182159423828,
"learning_rate": 2.492922430166506e-05,
"loss": 0.6802,
"step": 262
},
{
"epoch": 18.785714285714285,
"grad_norm": 0.8685599565505981,
"learning_rate": 2.4926887049653943e-05,
"loss": 0.6728,
"step": 263
},
{
"epoch": 18.857142857142858,
"grad_norm": 0.9305260181427002,
"learning_rate": 2.492451194318975e-05,
"loss": 0.6767,
"step": 264
},
{
"epoch": 18.928571428571427,
"grad_norm": 0.7498168349266052,
"learning_rate": 2.4922098989507454e-05,
"loss": 0.6679,
"step": 265
},
{
"epoch": 19.0,
"grad_norm": 0.7739676833152771,
"learning_rate": 2.4919648195957344e-05,
"loss": 0.6659,
"step": 266
},
{
"epoch": 19.071428571428573,
"grad_norm": 0.8912931084632874,
"learning_rate": 2.4917159570004954e-05,
"loss": 0.6866,
"step": 267
},
{
"epoch": 19.142857142857142,
"grad_norm": 0.7929975986480713,
"learning_rate": 2.491463311923108e-05,
"loss": 0.6826,
"step": 268
},
{
"epoch": 19.214285714285715,
"grad_norm": 0.8658651113510132,
"learning_rate": 2.491206885133171e-05,
"loss": 0.6868,
"step": 269
},
{
"epoch": 19.285714285714285,
"grad_norm": 0.8181875944137573,
"learning_rate": 2.490946677411807e-05,
"loss": 0.6822,
"step": 270
},
{
"epoch": 19.357142857142858,
"grad_norm": 0.7899583578109741,
"learning_rate": 2.4906826895516528e-05,
"loss": 0.6851,
"step": 271
},
{
"epoch": 19.428571428571427,
"grad_norm": 0.7281643152236938,
"learning_rate": 2.490414922356861e-05,
"loss": 0.6715,
"step": 272
},
{
"epoch": 19.5,
"grad_norm": 0.9926586151123047,
"learning_rate": 2.4901433766430975e-05,
"loss": 0.6844,
"step": 273
},
{
"epoch": 19.571428571428573,
"grad_norm": 0.7438750267028809,
"learning_rate": 2.4898680532375374e-05,
"loss": 0.6785,
"step": 274
},
{
"epoch": 19.642857142857142,
"grad_norm": 0.8924391865730286,
"learning_rate": 2.489588952978863e-05,
"loss": 0.7222,
"step": 275
},
{
"epoch": 19.714285714285715,
"grad_norm": 0.9729384779930115,
"learning_rate": 2.4893060767172632e-05,
"loss": 0.709,
"step": 276
},
{
"epoch": 19.785714285714285,
"grad_norm": 0.9596732258796692,
"learning_rate": 2.489019425314427e-05,
"loss": 0.6996,
"step": 277
},
{
"epoch": 19.857142857142858,
"grad_norm": 0.8963414430618286,
"learning_rate": 2.4887289996435452e-05,
"loss": 0.7058,
"step": 278
},
{
"epoch": 19.928571428571427,
"grad_norm": 0.767913281917572,
"learning_rate": 2.4884348005893045e-05,
"loss": 0.6979,
"step": 279
},
{
"epoch": 20.0,
"grad_norm": 0.7956211566925049,
"learning_rate": 2.488136829047886e-05,
"loss": 0.7011,
"step": 280
},
{
"epoch": 20.071428571428573,
"grad_norm": 0.7484908699989319,
"learning_rate": 2.487835085926963e-05,
"loss": 0.673,
"step": 281
},
{
"epoch": 20.142857142857142,
"grad_norm": 0.7686476707458496,
"learning_rate": 2.487529572145697e-05,
"loss": 0.6639,
"step": 282
},
{
"epoch": 20.214285714285715,
"grad_norm": 0.8410912156105042,
"learning_rate": 2.4872202886347362e-05,
"loss": 0.6762,
"step": 283
},
{
"epoch": 20.285714285714285,
"grad_norm": 0.804176390171051,
"learning_rate": 2.486907236336212e-05,
"loss": 0.671,
"step": 284
},
{
"epoch": 20.357142857142858,
"grad_norm": 0.7640565633773804,
"learning_rate": 2.4865904162037358e-05,
"loss": 0.665,
"step": 285
},
{
"epoch": 20.428571428571427,
"grad_norm": 0.9300803542137146,
"learning_rate": 2.4862698292023963e-05,
"loss": 0.6644,
"step": 286
},
{
"epoch": 20.5,
"grad_norm": 0.9111459851264954,
"learning_rate": 2.4859454763087577e-05,
"loss": 0.6623,
"step": 287
},
{
"epoch": 20.571428571428573,
"grad_norm": 1.0425329208374023,
"learning_rate": 2.4856173585108544e-05,
"loss": 0.6593,
"step": 288
},
{
"epoch": 20.642857142857142,
"grad_norm": 0.8418837785720825,
"learning_rate": 2.4852854768081912e-05,
"loss": 0.6588,
"step": 289
},
{
"epoch": 20.714285714285715,
"grad_norm": 0.8000286221504211,
"learning_rate": 2.4849498322117364e-05,
"loss": 0.6518,
"step": 290
},
{
"epoch": 20.785714285714285,
"grad_norm": 0.9251210689544678,
"learning_rate": 2.4846104257439222e-05,
"loss": 0.6608,
"step": 291
},
{
"epoch": 20.857142857142858,
"grad_norm": 1.1003504991531372,
"learning_rate": 2.4842672584386396e-05,
"loss": 0.6597,
"step": 292
},
{
"epoch": 20.928571428571427,
"grad_norm": 1.5497894287109375,
"learning_rate": 2.483920331341235e-05,
"loss": 0.6633,
"step": 293
},
{
"epoch": 21.0,
"grad_norm": 0.8097646832466125,
"learning_rate": 2.4835696455085093e-05,
"loss": 0.6455,
"step": 294
},
{
"epoch": 21.071428571428573,
"grad_norm": 1.112264633178711,
"learning_rate": 2.483215202008712e-05,
"loss": 0.6954,
"step": 295
},
{
"epoch": 21.142857142857142,
"grad_norm": 1.1363542079925537,
"learning_rate": 2.4828570019215396e-05,
"loss": 0.6809,
"step": 296
},
{
"epoch": 21.214285714285715,
"grad_norm": 0.9695779085159302,
"learning_rate": 2.4824950463381314e-05,
"loss": 0.681,
"step": 297
},
{
"epoch": 21.285714285714285,
"grad_norm": 0.8442912101745605,
"learning_rate": 2.482129336361067e-05,
"loss": 0.6814,
"step": 298
},
{
"epoch": 21.357142857142858,
"grad_norm": 1.031419038772583,
"learning_rate": 2.481759873104363e-05,
"loss": 0.6675,
"step": 299
},
{
"epoch": 21.428571428571427,
"grad_norm": 1.1369857788085938,
"learning_rate": 2.4813866576934676e-05,
"loss": 0.6711,
"step": 300
},
{
"epoch": 21.5,
"grad_norm": 1.0303454399108887,
"learning_rate": 2.4810096912652604e-05,
"loss": 0.6708,
"step": 301
},
{
"epoch": 21.571428571428573,
"grad_norm": 1.1198464632034302,
"learning_rate": 2.480628974968046e-05,
"loss": 0.6699,
"step": 302
},
{
"epoch": 21.642857142857142,
"grad_norm": 1.3571269512176514,
"learning_rate": 2.4802445099615525e-05,
"loss": 0.6511,
"step": 303
},
{
"epoch": 21.714285714285715,
"grad_norm": 1.0004920959472656,
"learning_rate": 2.479856297416927e-05,
"loss": 0.6333,
"step": 304
},
{
"epoch": 21.785714285714285,
"grad_norm": 1.2059205770492554,
"learning_rate": 2.4794643385167327e-05,
"loss": 0.6371,
"step": 305
},
{
"epoch": 21.857142857142858,
"grad_norm": 1.1341015100479126,
"learning_rate": 2.4790686344549436e-05,
"loss": 0.6175,
"step": 306
},
{
"epoch": 21.928571428571427,
"grad_norm": 0.7347421646118164,
"learning_rate": 2.478669186436943e-05,
"loss": 0.6144,
"step": 307
},
{
"epoch": 22.0,
"grad_norm": 1.150259256362915,
"learning_rate": 2.478265995679519e-05,
"loss": 0.6328,
"step": 308
},
{
"epoch": 22.071428571428573,
"grad_norm": 1.1301559209823608,
"learning_rate": 2.4778590634108613e-05,
"loss": 0.6658,
"step": 309
},
{
"epoch": 22.142857142857142,
"grad_norm": 0.7597207427024841,
"learning_rate": 2.4774483908705546e-05,
"loss": 0.6484,
"step": 310
},
{
"epoch": 22.214285714285715,
"grad_norm": 0.898051917552948,
"learning_rate": 2.4770339793095802e-05,
"loss": 0.6587,
"step": 311
},
{
"epoch": 22.285714285714285,
"grad_norm": 1.7075557708740234,
"learning_rate": 2.4766158299903062e-05,
"loss": 0.6608,
"step": 312
},
{
"epoch": 22.357142857142858,
"grad_norm": 1.6019538640975952,
"learning_rate": 2.4761939441864895e-05,
"loss": 0.6453,
"step": 313
},
{
"epoch": 22.428571428571427,
"grad_norm": 0.953170657157898,
"learning_rate": 2.4757683231832662e-05,
"loss": 0.6336,
"step": 314
},
{
"epoch": 22.5,
"grad_norm": 1.7574703693389893,
"learning_rate": 2.4753389682771523e-05,
"loss": 0.5956,
"step": 315
},
{
"epoch": 22.571428571428573,
"grad_norm": 1.262270450592041,
"learning_rate": 2.474905880776037e-05,
"loss": 0.6061,
"step": 316
},
{
"epoch": 22.642857142857142,
"grad_norm": 0.8571851253509521,
"learning_rate": 2.47446906199918e-05,
"loss": 0.6321,
"step": 317
},
{
"epoch": 22.714285714285715,
"grad_norm": 0.9796807765960693,
"learning_rate": 2.4740285132772072e-05,
"loss": 0.6258,
"step": 318
},
{
"epoch": 22.785714285714285,
"grad_norm": 1.6591973304748535,
"learning_rate": 2.4735842359521064e-05,
"loss": 0.6387,
"step": 319
},
{
"epoch": 22.857142857142858,
"grad_norm": 1.05576491355896,
"learning_rate": 2.4731362313772233e-05,
"loss": 0.6181,
"step": 320
},
{
"epoch": 22.928571428571427,
"grad_norm": 0.7001498341560364,
"learning_rate": 2.4726845009172572e-05,
"loss": 0.6257,
"step": 321
},
{
"epoch": 23.0,
"grad_norm": 1.1179414987564087,
"learning_rate": 2.4722290459482578e-05,
"loss": 0.6171,
"step": 322
},
{
"epoch": 23.071428571428573,
"grad_norm": 0.7260156273841858,
"learning_rate": 2.47176986785762e-05,
"loss": 0.5838,
"step": 323
},
{
"epoch": 23.142857142857142,
"grad_norm": 0.781168520450592,
"learning_rate": 2.47130696804408e-05,
"loss": 0.5951,
"step": 324
},
{
"epoch": 23.214285714285715,
"grad_norm": 0.7263346314430237,
"learning_rate": 2.47084034791771e-05,
"loss": 0.5889,
"step": 325
},
{
"epoch": 23.285714285714285,
"grad_norm": 0.7460491061210632,
"learning_rate": 2.4703700088999167e-05,
"loss": 0.5864,
"step": 326
},
{
"epoch": 23.357142857142858,
"grad_norm": 0.775688886642456,
"learning_rate": 2.4698959524234346e-05,
"loss": 0.5819,
"step": 327
},
{
"epoch": 23.428571428571427,
"grad_norm": 0.8014679551124573,
"learning_rate": 2.4694181799323206e-05,
"loss": 0.5867,
"step": 328
},
{
"epoch": 23.5,
"grad_norm": 0.9386325478553772,
"learning_rate": 2.468936692881954e-05,
"loss": 0.6276,
"step": 329
},
{
"epoch": 23.571428571428573,
"grad_norm": 0.7792544960975647,
"learning_rate": 2.4684514927390274e-05,
"loss": 0.63,
"step": 330
},
{
"epoch": 23.642857142857142,
"grad_norm": 0.7428668737411499,
"learning_rate": 2.4679625809815443e-05,
"loss": 0.5905,
"step": 331
},
{
"epoch": 23.714285714285715,
"grad_norm": 0.8062078952789307,
"learning_rate": 2.467469959098815e-05,
"loss": 0.5958,
"step": 332
},
{
"epoch": 23.785714285714285,
"grad_norm": 0.7396784424781799,
"learning_rate": 2.4669736285914505e-05,
"loss": 0.5961,
"step": 333
},
{
"epoch": 23.857142857142858,
"grad_norm": 0.757745087146759,
"learning_rate": 2.4664735909713606e-05,
"loss": 0.6001,
"step": 334
},
{
"epoch": 23.928571428571427,
"grad_norm": 0.8699296116828918,
"learning_rate": 2.465969847761746e-05,
"loss": 0.5897,
"step": 335
},
{
"epoch": 24.0,
"grad_norm": 1.1751132011413574,
"learning_rate": 2.4654624004970957e-05,
"loss": 0.5962,
"step": 336
},
{
"epoch": 24.071428571428573,
"grad_norm": 0.8975971937179565,
"learning_rate": 2.464951250723183e-05,
"loss": 0.6029,
"step": 337
},
{
"epoch": 24.142857142857142,
"grad_norm": 1.503801941871643,
"learning_rate": 2.4644363999970576e-05,
"loss": 0.5957,
"step": 338
},
{
"epoch": 24.214285714285715,
"grad_norm": 0.7873043417930603,
"learning_rate": 2.4639178498870452e-05,
"loss": 0.5901,
"step": 339
},
{
"epoch": 24.285714285714285,
"grad_norm": 0.7711262106895447,
"learning_rate": 2.4633956019727385e-05,
"loss": 0.6011,
"step": 340
},
{
"epoch": 24.357142857142858,
"grad_norm": 0.8626933097839355,
"learning_rate": 2.4628696578449956e-05,
"loss": 0.5967,
"step": 341
},
{
"epoch": 24.428571428571427,
"grad_norm": 0.7888921499252319,
"learning_rate": 2.4623400191059335e-05,
"loss": 0.5835,
"step": 342
},
{
"epoch": 24.5,
"grad_norm": 0.8386034369468689,
"learning_rate": 2.4618066873689238e-05,
"loss": 0.5934,
"step": 343
},
{
"epoch": 24.571428571428573,
"grad_norm": 0.8325148224830627,
"learning_rate": 2.461269664258587e-05,
"loss": 0.5739,
"step": 344
},
{
"epoch": 24.642857142857142,
"grad_norm": 1.0222346782684326,
"learning_rate": 2.4607289514107888e-05,
"loss": 0.6099,
"step": 345
},
{
"epoch": 24.714285714285715,
"grad_norm": 1.1683640480041504,
"learning_rate": 2.460184550472635e-05,
"loss": 0.6103,
"step": 346
},
{
"epoch": 24.785714285714285,
"grad_norm": 1.232172966003418,
"learning_rate": 2.4596364631024643e-05,
"loss": 0.611,
"step": 347
},
{
"epoch": 24.857142857142858,
"grad_norm": 0.7627729773521423,
"learning_rate": 2.459084690969846e-05,
"loss": 0.6029,
"step": 348
},
{
"epoch": 24.928571428571427,
"grad_norm": 0.7204194664955139,
"learning_rate": 2.4585292357555746e-05,
"loss": 0.6058,
"step": 349
},
{
"epoch": 25.0,
"grad_norm": 0.9375554919242859,
"learning_rate": 2.457970099151662e-05,
"loss": 0.598,
"step": 350
},
{
"epoch": 25.071428571428573,
"grad_norm": 0.9481148719787598,
"learning_rate": 2.4574072828613354e-05,
"loss": 0.6118,
"step": 351
},
{
"epoch": 25.142857142857142,
"grad_norm": 1.4235519170761108,
"learning_rate": 2.4568407885990313e-05,
"loss": 0.6339,
"step": 352
},
{
"epoch": 25.214285714285715,
"grad_norm": 1.3172290325164795,
"learning_rate": 2.4562706180903894e-05,
"loss": 0.6218,
"step": 353
},
{
"epoch": 25.285714285714285,
"grad_norm": 0.8847378492355347,
"learning_rate": 2.4556967730722478e-05,
"loss": 0.6093,
"step": 354
},
{
"epoch": 25.357142857142858,
"grad_norm": 1.7306571006774902,
"learning_rate": 2.455119255292638e-05,
"loss": 0.6117,
"step": 355
},
{
"epoch": 25.428571428571427,
"grad_norm": 0.9493682980537415,
"learning_rate": 2.4545380665107786e-05,
"loss": 0.6068,
"step": 356
},
{
"epoch": 25.5,
"grad_norm": 0.972621738910675,
"learning_rate": 2.453953208497073e-05,
"loss": 0.6081,
"step": 357
},
{
"epoch": 25.571428571428573,
"grad_norm": 1.5284761190414429,
"learning_rate": 2.4533646830330986e-05,
"loss": 0.5911,
"step": 358
},
{
"epoch": 25.642857142857142,
"grad_norm": 1.1001543998718262,
"learning_rate": 2.452772491911607e-05,
"loss": 0.5839,
"step": 359
},
{
"epoch": 25.714285714285715,
"grad_norm": 1.3298473358154297,
"learning_rate": 2.4521766369365142e-05,
"loss": 0.5827,
"step": 360
},
{
"epoch": 25.785714285714285,
"grad_norm": 0.9805197715759277,
"learning_rate": 2.4515771199228987e-05,
"loss": 0.5743,
"step": 361
},
{
"epoch": 25.857142857142858,
"grad_norm": 1.7736384868621826,
"learning_rate": 2.450973942696993e-05,
"loss": 0.5842,
"step": 362
},
{
"epoch": 25.928571428571427,
"grad_norm": 1.1173772811889648,
"learning_rate": 2.450367107096179e-05,
"loss": 0.5645,
"step": 363
},
{
"epoch": 26.0,
"grad_norm": 0.7397013902664185,
"learning_rate": 2.449756614968984e-05,
"loss": 0.564,
"step": 364
},
{
"epoch": 26.071428571428573,
"grad_norm": 1.4986436367034912,
"learning_rate": 2.449142468175072e-05,
"loss": 0.6343,
"step": 365
},
{
"epoch": 26.142857142857142,
"grad_norm": 1.8107956647872925,
"learning_rate": 2.4485246685852413e-05,
"loss": 0.6313,
"step": 366
},
{
"epoch": 26.214285714285715,
"grad_norm": 1.253281593322754,
"learning_rate": 2.4479032180814166e-05,
"loss": 0.6194,
"step": 367
},
{
"epoch": 26.285714285714285,
"grad_norm": 1.7954612970352173,
"learning_rate": 2.447278118556644e-05,
"loss": 0.6289,
"step": 368
},
{
"epoch": 26.357142857142858,
"grad_norm": 1.6291406154632568,
"learning_rate": 2.446649371915084e-05,
"loss": 0.6245,
"step": 369
},
{
"epoch": 26.428571428571427,
"grad_norm": 0.9229593276977539,
"learning_rate": 2.4460169800720095e-05,
"loss": 0.5978,
"step": 370
},
{
"epoch": 26.5,
"grad_norm": 1.3691909313201904,
"learning_rate": 2.4453809449537947e-05,
"loss": 0.581,
"step": 371
},
{
"epoch": 26.571428571428573,
"grad_norm": 1.8004933595657349,
"learning_rate": 2.4447412684979127e-05,
"loss": 0.5866,
"step": 372
},
{
"epoch": 26.642857142857142,
"grad_norm": 1.102176308631897,
"learning_rate": 2.4440979526529295e-05,
"loss": 0.5967,
"step": 373
},
{
"epoch": 26.714285714285715,
"grad_norm": 0.8056683540344238,
"learning_rate": 2.4434509993784972e-05,
"loss": 0.5816,
"step": 374
},
{
"epoch": 26.785714285714285,
"grad_norm": 1.2256656885147095,
"learning_rate": 2.4428004106453462e-05,
"loss": 0.5943,
"step": 375
},
{
"epoch": 26.857142857142858,
"grad_norm": 1.1078814268112183,
"learning_rate": 2.4421461884352836e-05,
"loss": 0.5914,
"step": 376
},
{
"epoch": 26.928571428571427,
"grad_norm": 0.9050626754760742,
"learning_rate": 2.4414883347411836e-05,
"loss": 0.592,
"step": 377
},
{
"epoch": 27.0,
"grad_norm": 1.0470325946807861,
"learning_rate": 2.440826851566983e-05,
"loss": 0.5894,
"step": 378
},
{
"epoch": 27.071428571428573,
"grad_norm": 1.515498399734497,
"learning_rate": 2.4401617409276735e-05,
"loss": 0.5724,
"step": 379
},
{
"epoch": 27.142857142857142,
"grad_norm": 0.9086301922798157,
"learning_rate": 2.439493004849298e-05,
"loss": 0.5605,
"step": 380
},
{
"epoch": 27.214285714285715,
"grad_norm": 1.295502781867981,
"learning_rate": 2.438820645368942e-05,
"loss": 0.5664,
"step": 381
},
{
"epoch": 27.285714285714285,
"grad_norm": 1.0472607612609863,
"learning_rate": 2.4381446645347297e-05,
"loss": 0.5733,
"step": 382
},
{
"epoch": 27.357142857142858,
"grad_norm": 0.9298902750015259,
"learning_rate": 2.4374650644058156e-05,
"loss": 0.5615,
"step": 383
},
{
"epoch": 27.428571428571427,
"grad_norm": 0.8370387554168701,
"learning_rate": 2.43678184705238e-05,
"loss": 0.5681,
"step": 384
},
{
"epoch": 27.5,
"grad_norm": 1.045771598815918,
"learning_rate": 2.4360950145556208e-05,
"loss": 0.5866,
"step": 385
},
{
"epoch": 27.571428571428573,
"grad_norm": 1.17662513256073,
"learning_rate": 2.4354045690077492e-05,
"loss": 0.5955,
"step": 386
},
{
"epoch": 27.642857142857142,
"grad_norm": 1.1505502462387085,
"learning_rate": 2.4347105125119824e-05,
"loss": 0.6055,
"step": 387
},
{
"epoch": 27.714285714285715,
"grad_norm": 1.4744963645935059,
"learning_rate": 2.4340128471825362e-05,
"loss": 0.6101,
"step": 388
},
{
"epoch": 27.785714285714285,
"grad_norm": 1.2976175546646118,
"learning_rate": 2.4333115751446208e-05,
"loss": 0.6085,
"step": 389
},
{
"epoch": 27.857142857142858,
"grad_norm": 1.0312047004699707,
"learning_rate": 2.4326066985344318e-05,
"loss": 0.5929,
"step": 390
},
{
"epoch": 27.928571428571427,
"grad_norm": 1.004387378692627,
"learning_rate": 2.4318982194991463e-05,
"loss": 0.6002,
"step": 391
},
{
"epoch": 28.0,
"grad_norm": 1.5520254373550415,
"learning_rate": 2.4311861401969138e-05,
"loss": 0.5991,
"step": 392
},
{
"epoch": 28.071428571428573,
"grad_norm": 1.0190459489822388,
"learning_rate": 2.4304704627968515e-05,
"loss": 0.5965,
"step": 393
},
{
"epoch": 28.142857142857142,
"grad_norm": 1.498430609703064,
"learning_rate": 2.429751189479037e-05,
"loss": 0.5863,
"step": 394
},
{
"epoch": 28.214285714285715,
"grad_norm": 1.0456204414367676,
"learning_rate": 2.429028322434501e-05,
"loss": 0.5886,
"step": 395
},
{
"epoch": 28.285714285714285,
"grad_norm": 1.0039376020431519,
"learning_rate": 2.4283018638652234e-05,
"loss": 0.5989,
"step": 396
},
{
"epoch": 28.357142857142858,
"grad_norm": 1.1230565309524536,
"learning_rate": 2.427571815984121e-05,
"loss": 0.5818,
"step": 397
},
{
"epoch": 28.428571428571427,
"grad_norm": 1.0421648025512695,
"learning_rate": 2.4268381810150474e-05,
"loss": 0.5937,
"step": 398
},
{
"epoch": 28.5,
"grad_norm": 1.4874267578125,
"learning_rate": 2.426100961192782e-05,
"loss": 0.5756,
"step": 399
},
{
"epoch": 28.571428571428573,
"grad_norm": 0.9157134294509888,
"learning_rate": 2.4253601587630236e-05,
"loss": 0.5645,
"step": 400
},
{
"epoch": 28.642857142857142,
"grad_norm": 0.9978843927383423,
"learning_rate": 2.4246157759823855e-05,
"loss": 0.5999,
"step": 401
},
{
"epoch": 28.714285714285715,
"grad_norm": 1.1077312231063843,
"learning_rate": 2.4238678151183863e-05,
"loss": 0.5871,
"step": 402
},
{
"epoch": 28.785714285714285,
"grad_norm": 1.4482558965682983,
"learning_rate": 2.423116278449445e-05,
"loss": 0.574,
"step": 403
},
{
"epoch": 28.857142857142858,
"grad_norm": 0.9626414775848389,
"learning_rate": 2.4223611682648724e-05,
"loss": 0.5903,
"step": 404
},
{
"epoch": 28.928571428571427,
"grad_norm": 1.0190569162368774,
"learning_rate": 2.4216024868648644e-05,
"loss": 0.5661,
"step": 405
},
{
"epoch": 29.0,
"grad_norm": 0.9681732058525085,
"learning_rate": 2.4208402365604972e-05,
"loss": 0.562,
"step": 406
},
{
"epoch": 29.071428571428573,
"grad_norm": 0.9364088177680969,
"learning_rate": 2.420074419673717e-05,
"loss": 0.573,
"step": 407
},
{
"epoch": 29.142857142857142,
"grad_norm": 1.1561006307601929,
"learning_rate": 2.4193050385373344e-05,
"loss": 0.5681,
"step": 408
},
{
"epoch": 29.214285714285715,
"grad_norm": 1.0007522106170654,
"learning_rate": 2.418532095495018e-05,
"loss": 0.5782,
"step": 409
},
{
"epoch": 29.285714285714285,
"grad_norm": 0.9259665012359619,
"learning_rate": 2.417755592901287e-05,
"loss": 0.5703,
"step": 410
},
{
"epoch": 29.357142857142858,
"grad_norm": 1.2152159214019775,
"learning_rate": 2.4169755331215023e-05,
"loss": 0.5573,
"step": 411
},
{
"epoch": 29.428571428571427,
"grad_norm": 0.921935498714447,
"learning_rate": 2.4161919185318617e-05,
"loss": 0.558,
"step": 412
},
{
"epoch": 29.5,
"grad_norm": 0.8668715357780457,
"learning_rate": 2.4154047515193904e-05,
"loss": 0.5596,
"step": 413
},
{
"epoch": 29.571428571428573,
"grad_norm": 1.3148274421691895,
"learning_rate": 2.4146140344819363e-05,
"loss": 0.5607,
"step": 414
},
{
"epoch": 29.642857142857142,
"grad_norm": 1.2812392711639404,
"learning_rate": 2.4138197698281606e-05,
"loss": 0.5311,
"step": 415
},
{
"epoch": 29.714285714285715,
"grad_norm": 0.9346650242805481,
"learning_rate": 2.413021959977531e-05,
"loss": 0.5329,
"step": 416
},
{
"epoch": 29.785714285714285,
"grad_norm": 1.4665607213974,
"learning_rate": 2.4122206073603142e-05,
"loss": 0.5365,
"step": 417
},
{
"epoch": 29.857142857142858,
"grad_norm": 1.2906500101089478,
"learning_rate": 2.4114157144175703e-05,
"loss": 0.5323,
"step": 418
},
{
"epoch": 29.928571428571427,
"grad_norm": 0.7557607889175415,
"learning_rate": 2.4106072836011422e-05,
"loss": 0.5239,
"step": 419
},
{
"epoch": 30.0,
"grad_norm": 1.106185793876648,
"learning_rate": 2.40979531737365e-05,
"loss": 0.5296,
"step": 420
},
{
"epoch": 30.071428571428573,
"grad_norm": 1.5687097311019897,
"learning_rate": 2.4089798182084845e-05,
"loss": 0.5776,
"step": 421
},
{
"epoch": 30.142857142857142,
"grad_norm": 1.0635465383529663,
"learning_rate": 2.4081607885897966e-05,
"loss": 0.5614,
"step": 422
},
{
"epoch": 30.214285714285715,
"grad_norm": 0.9863991141319275,
"learning_rate": 2.407338231012494e-05,
"loss": 0.5778,
"step": 423
},
{
"epoch": 30.285714285714285,
"grad_norm": 1.0064969062805176,
"learning_rate": 2.406512147982228e-05,
"loss": 0.5666,
"step": 424
},
{
"epoch": 30.357142857142858,
"grad_norm": 1.1447433233261108,
"learning_rate": 2.4056825420153917e-05,
"loss": 0.5646,
"step": 425
},
{
"epoch": 30.428571428571427,
"grad_norm": 0.7946691513061523,
"learning_rate": 2.4048494156391087e-05,
"loss": 0.5625,
"step": 426
},
{
"epoch": 30.5,
"grad_norm": 1.1659761667251587,
"learning_rate": 2.4040127713912264e-05,
"loss": 0.5643,
"step": 427
},
{
"epoch": 30.571428571428573,
"grad_norm": 1.0258344411849976,
"learning_rate": 2.403172611820308e-05,
"loss": 0.5689,
"step": 428
},
{
"epoch": 30.642857142857142,
"grad_norm": 1.0674619674682617,
"learning_rate": 2.4023289394856257e-05,
"loss": 0.5699,
"step": 429
},
{
"epoch": 30.714285714285715,
"grad_norm": 1.2931327819824219,
"learning_rate": 2.401481756957152e-05,
"loss": 0.5754,
"step": 430
},
{
"epoch": 30.785714285714285,
"grad_norm": 0.8293156027793884,
"learning_rate": 2.4006310668155508e-05,
"loss": 0.5673,
"step": 431
},
{
"epoch": 30.857142857142858,
"grad_norm": 1.1221078634262085,
"learning_rate": 2.3997768716521723e-05,
"loss": 0.5805,
"step": 432
},
{
"epoch": 30.928571428571427,
"grad_norm": 1.3781203031539917,
"learning_rate": 2.398919174069043e-05,
"loss": 0.5752,
"step": 433
},
{
"epoch": 31.0,
"grad_norm": 1.2068958282470703,
"learning_rate": 2.398057976678859e-05,
"loss": 0.5691,
"step": 434
},
{
"epoch": 31.071428571428573,
"grad_norm": 1.0165269374847412,
"learning_rate": 2.3971932821049765e-05,
"loss": 0.5714,
"step": 435
},
{
"epoch": 31.142857142857142,
"grad_norm": 1.2817696332931519,
"learning_rate": 2.396325092981405e-05,
"loss": 0.5785,
"step": 436
},
{
"epoch": 31.214285714285715,
"grad_norm": 1.44329035282135,
"learning_rate": 2.3954534119527996e-05,
"loss": 0.5822,
"step": 437
},
{
"epoch": 31.285714285714285,
"grad_norm": 1.4649674892425537,
"learning_rate": 2.3945782416744517e-05,
"loss": 0.5736,
"step": 438
},
{
"epoch": 31.357142857142858,
"grad_norm": 0.9614746570587158,
"learning_rate": 2.3936995848122812e-05,
"loss": 0.5645,
"step": 439
},
{
"epoch": 31.428571428571427,
"grad_norm": 1.0065515041351318,
"learning_rate": 2.3928174440428297e-05,
"loss": 0.5736,
"step": 440
},
{
"epoch": 31.5,
"grad_norm": 1.4679027795791626,
"learning_rate": 2.391931822053251e-05,
"loss": 0.5442,
"step": 441
},
{
"epoch": 31.571428571428573,
"grad_norm": 1.3159974813461304,
"learning_rate": 2.3910427215413036e-05,
"loss": 0.5393,
"step": 442
},
{
"epoch": 31.642857142857142,
"grad_norm": 1.4550544023513794,
"learning_rate": 2.390150145215341e-05,
"loss": 0.5596,
"step": 443
},
{
"epoch": 31.714285714285715,
"grad_norm": 1.5841645002365112,
"learning_rate": 2.3892540957943067e-05,
"loss": 0.5597,
"step": 444
},
{
"epoch": 31.785714285714285,
"grad_norm": 0.9494394659996033,
"learning_rate": 2.3883545760077215e-05,
"loss": 0.5471,
"step": 445
},
{
"epoch": 31.857142857142858,
"grad_norm": 1.0761160850524902,
"learning_rate": 2.3874515885956792e-05,
"loss": 0.5445,
"step": 446
},
{
"epoch": 31.928571428571427,
"grad_norm": 1.1207195520401,
"learning_rate": 2.386545136308836e-05,
"loss": 0.5434,
"step": 447
},
{
"epoch": 32.0,
"grad_norm": 1.2892533540725708,
"learning_rate": 2.3856352219084024e-05,
"loss": 0.5491,
"step": 448
},
{
"epoch": 32.07142857142857,
"grad_norm": 0.8984603881835938,
"learning_rate": 2.384721848166136e-05,
"loss": 0.5535,
"step": 449
},
{
"epoch": 32.142857142857146,
"grad_norm": 1.3474533557891846,
"learning_rate": 2.3838050178643312e-05,
"loss": 0.5527,
"step": 450
},
{
"epoch": 32.214285714285715,
"grad_norm": 0.8426799178123474,
"learning_rate": 2.3828847337958126e-05,
"loss": 0.5559,
"step": 451
},
{
"epoch": 32.285714285714285,
"grad_norm": 1.0497685670852661,
"learning_rate": 2.3819609987639247e-05,
"loss": 0.5426,
"step": 452
},
{
"epoch": 32.357142857142854,
"grad_norm": 0.9918515086174011,
"learning_rate": 2.3810338155825245e-05,
"loss": 0.5346,
"step": 453
},
{
"epoch": 32.42857142857143,
"grad_norm": 1.1618802547454834,
"learning_rate": 2.3801031870759732e-05,
"loss": 0.5373,
"step": 454
},
{
"epoch": 32.5,
"grad_norm": 0.9634193778038025,
"learning_rate": 2.379169116079126e-05,
"loss": 0.5679,
"step": 455
},
{
"epoch": 32.57142857142857,
"grad_norm": 1.1792367696762085,
"learning_rate": 2.378231605437326e-05,
"loss": 0.5664,
"step": 456
},
{
"epoch": 32.642857142857146,
"grad_norm": 0.948975682258606,
"learning_rate": 2.3772906580063924e-05,
"loss": 0.5548,
"step": 457
},
{
"epoch": 32.714285714285715,
"grad_norm": 0.7861557602882385,
"learning_rate": 2.3763462766526145e-05,
"loss": 0.5618,
"step": 458
},
{
"epoch": 32.785714285714285,
"grad_norm": 1.0439471006393433,
"learning_rate": 2.3753984642527423e-05,
"loss": 0.5478,
"step": 459
},
{
"epoch": 32.857142857142854,
"grad_norm": 1.1698310375213623,
"learning_rate": 2.3744472236939753e-05,
"loss": 0.5562,
"step": 460
},
{
"epoch": 32.92857142857143,
"grad_norm": 0.9327784180641174,
"learning_rate": 2.3734925578739588e-05,
"loss": 0.5434,
"step": 461
},
{
"epoch": 33.0,
"grad_norm": 1.0151194334030151,
"learning_rate": 2.3725344697007696e-05,
"loss": 0.5491,
"step": 462
},
{
"epoch": 33.07142857142857,
"grad_norm": 1.4244743585586548,
"learning_rate": 2.3715729620929106e-05,
"loss": 0.5546,
"step": 463
},
{
"epoch": 33.142857142857146,
"grad_norm": 0.8315675258636475,
"learning_rate": 2.3706080379793007e-05,
"loss": 0.5405,
"step": 464
},
{
"epoch": 33.214285714285715,
"grad_norm": 1.5355232954025269,
"learning_rate": 2.3696397002992663e-05,
"loss": 0.5423,
"step": 465
},
{
"epoch": 33.285714285714285,
"grad_norm": 1.0639145374298096,
"learning_rate": 2.3686679520025314e-05,
"loss": 0.5454,
"step": 466
},
{
"epoch": 33.357142857142854,
"grad_norm": 0.7924723625183105,
"learning_rate": 2.36769279604921e-05,
"loss": 0.5211,
"step": 467
},
{
"epoch": 33.42857142857143,
"grad_norm": 0.8822352886199951,
"learning_rate": 2.366714235409797e-05,
"loss": 0.5463,
"step": 468
},
{
"epoch": 33.5,
"grad_norm": 0.9778538346290588,
"learning_rate": 2.365732273065157e-05,
"loss": 0.5121,
"step": 469
},
{
"epoch": 33.57142857142857,
"grad_norm": 0.9171921610832214,
"learning_rate": 2.3647469120065177e-05,
"loss": 0.5204,
"step": 470
},
{
"epoch": 33.642857142857146,
"grad_norm": 1.127368450164795,
"learning_rate": 2.36375815523546e-05,
"loss": 0.5429,
"step": 471
},
{
"epoch": 33.714285714285715,
"grad_norm": 0.893721878528595,
"learning_rate": 2.3627660057639082e-05,
"loss": 0.5512,
"step": 472
},
{
"epoch": 33.785714285714285,
"grad_norm": 1.115839958190918,
"learning_rate": 2.361770466614122e-05,
"loss": 0.553,
"step": 473
},
{
"epoch": 33.857142857142854,
"grad_norm": 1.000789999961853,
"learning_rate": 2.3607715408186863e-05,
"loss": 0.5328,
"step": 474
},
{
"epoch": 33.92857142857143,
"grad_norm": 1.0224825143814087,
"learning_rate": 2.3597692314205016e-05,
"loss": 0.5171,
"step": 475
},
{
"epoch": 34.0,
"grad_norm": 1.0545403957366943,
"learning_rate": 2.358763541472777e-05,
"loss": 0.5292,
"step": 476
},
{
"epoch": 34.07142857142857,
"grad_norm": 1.2135273218154907,
"learning_rate": 2.3577544740390184e-05,
"loss": 0.54,
"step": 477
},
{
"epoch": 34.142857142857146,
"grad_norm": 0.9006894826889038,
"learning_rate": 2.35674203219302e-05,
"loss": 0.5528,
"step": 478
},
{
"epoch": 34.214285714285715,
"grad_norm": 1.2805321216583252,
"learning_rate": 2.355726219018855e-05,
"loss": 0.536,
"step": 479
},
{
"epoch": 34.285714285714285,
"grad_norm": 1.0423541069030762,
"learning_rate": 2.354707037610867e-05,
"loss": 0.5441,
"step": 480
},
{
"epoch": 34.357142857142854,
"grad_norm": 0.8751712441444397,
"learning_rate": 2.353684491073659e-05,
"loss": 0.5408,
"step": 481
},
{
"epoch": 34.42857142857143,
"grad_norm": 1.3459856510162354,
"learning_rate": 2.3526585825220848e-05,
"loss": 0.5467,
"step": 482
},
{
"epoch": 34.5,
"grad_norm": 1.3484444618225098,
"learning_rate": 2.35162931508124e-05,
"loss": 0.5865,
"step": 483
},
{
"epoch": 34.57142857142857,
"grad_norm": 1.1730215549468994,
"learning_rate": 2.3505966918864525e-05,
"loss": 0.568,
"step": 484
},
{
"epoch": 34.642857142857146,
"grad_norm": 1.0420297384262085,
"learning_rate": 2.3495607160832707e-05,
"loss": 0.541,
"step": 485
},
{
"epoch": 34.714285714285715,
"grad_norm": 1.0816285610198975,
"learning_rate": 2.3485213908274567e-05,
"loss": 0.5432,
"step": 486
},
{
"epoch": 34.785714285714285,
"grad_norm": 1.0314637422561646,
"learning_rate": 2.3474787192849756e-05,
"loss": 0.5605,
"step": 487
},
{
"epoch": 34.857142857142854,
"grad_norm": 0.8850322365760803,
"learning_rate": 2.346432704631986e-05,
"loss": 0.547,
"step": 488
},
{
"epoch": 34.92857142857143,
"grad_norm": 0.967745840549469,
"learning_rate": 2.3453833500548295e-05,
"loss": 0.5442,
"step": 489
},
{
"epoch": 35.0,
"grad_norm": 0.939887285232544,
"learning_rate": 2.3443306587500225e-05,
"loss": 0.5385,
"step": 490
},
{
"epoch": 35.07142857142857,
"grad_norm": 0.9256707429885864,
"learning_rate": 2.3432746339242448e-05,
"loss": 0.5479,
"step": 491
},
{
"epoch": 35.142857142857146,
"grad_norm": 0.7846079468727112,
"learning_rate": 2.342215278794332e-05,
"loss": 0.5534,
"step": 492
},
{
"epoch": 35.214285714285715,
"grad_norm": 0.9177327752113342,
"learning_rate": 2.341152596587262e-05,
"loss": 0.5488,
"step": 493
},
{
"epoch": 35.285714285714285,
"grad_norm": 0.842380702495575,
"learning_rate": 2.340086590540151e-05,
"loss": 0.5409,
"step": 494
},
{
"epoch": 35.357142857142854,
"grad_norm": 1.0821106433868408,
"learning_rate": 2.339017263900237e-05,
"loss": 0.5478,
"step": 495
},
{
"epoch": 35.42857142857143,
"grad_norm": 1.1775977611541748,
"learning_rate": 2.3379446199248747e-05,
"loss": 0.5403,
"step": 496
},
{
"epoch": 35.5,
"grad_norm": 1.223752498626709,
"learning_rate": 2.3368686618815238e-05,
"loss": 0.579,
"step": 497
},
{
"epoch": 35.57142857142857,
"grad_norm": 1.1240078210830688,
"learning_rate": 2.335789393047739e-05,
"loss": 0.5988,
"step": 498
},
{
"epoch": 35.642857142857146,
"grad_norm": 1.8912179470062256,
"learning_rate": 2.334706816711161e-05,
"loss": 0.5889,
"step": 499
},
{
"epoch": 35.714285714285715,
"grad_norm": 1.0839145183563232,
"learning_rate": 2.3336209361695035e-05,
"loss": 0.5583,
"step": 500
},
{
"epoch": 35.785714285714285,
"grad_norm": 1.2622607946395874,
"learning_rate": 2.3325317547305485e-05,
"loss": 0.5541,
"step": 501
},
{
"epoch": 35.857142857142854,
"grad_norm": 1.1510413885116577,
"learning_rate": 2.3314392757121308e-05,
"loss": 0.5553,
"step": 502
},
{
"epoch": 35.92857142857143,
"grad_norm": 1.113884687423706,
"learning_rate": 2.3303435024421312e-05,
"loss": 0.548,
"step": 503
},
{
"epoch": 36.0,
"grad_norm": 0.9615234136581421,
"learning_rate": 2.3292444382584648e-05,
"loss": 0.5471,
"step": 504
},
{
"epoch": 36.07142857142857,
"grad_norm": 1.3409103155136108,
"learning_rate": 2.328142086509072e-05,
"loss": 0.5253,
"step": 505
},
{
"epoch": 36.142857142857146,
"grad_norm": 1.717495322227478,
"learning_rate": 2.3270364505519073e-05,
"loss": 0.5394,
"step": 506
},
{
"epoch": 36.214285714285715,
"grad_norm": 1.1437082290649414,
"learning_rate": 2.32592753375493e-05,
"loss": 0.5274,
"step": 507
},
{
"epoch": 36.285714285714285,
"grad_norm": 1.7708051204681396,
"learning_rate": 2.324815339496092e-05,
"loss": 0.5204,
"step": 508
},
{
"epoch": 36.357142857142854,
"grad_norm": 1.691807508468628,
"learning_rate": 2.3236998711633307e-05,
"loss": 0.5181,
"step": 509
},
{
"epoch": 36.42857142857143,
"grad_norm": 1.3929164409637451,
"learning_rate": 2.322581132154556e-05,
"loss": 0.513,
"step": 510
},
{
"epoch": 36.5,
"grad_norm": 1.71475088596344,
"learning_rate": 2.3214591258776404e-05,
"loss": 0.525,
"step": 511
},
{
"epoch": 36.57142857142857,
"grad_norm": 1.6751059293746948,
"learning_rate": 2.3203338557504105e-05,
"loss": 0.5245,
"step": 512
},
{
"epoch": 36.642857142857146,
"grad_norm": 1.5937445163726807,
"learning_rate": 2.3192053252006335e-05,
"loss": 0.5573,
"step": 513
},
{
"epoch": 36.714285714285715,
"grad_norm": 1.2509270906448364,
"learning_rate": 2.3180735376660094e-05,
"loss": 0.5492,
"step": 514
},
{
"epoch": 36.785714285714285,
"grad_norm": 1.5236318111419678,
"learning_rate": 2.3169384965941592e-05,
"loss": 0.557,
"step": 515
},
{
"epoch": 36.857142857142854,
"grad_norm": 1.6809778213500977,
"learning_rate": 2.3158002054426153e-05,
"loss": 0.5523,
"step": 516
},
{
"epoch": 36.92857142857143,
"grad_norm": 1.3588533401489258,
"learning_rate": 2.3146586676788095e-05,
"loss": 0.5389,
"step": 517
},
{
"epoch": 37.0,
"grad_norm": 1.1601979732513428,
"learning_rate": 2.3135138867800642e-05,
"loss": 0.5238,
"step": 518
},
{
"epoch": 37.07142857142857,
"grad_norm": 1.3859103918075562,
"learning_rate": 2.3123658662335802e-05,
"loss": 0.5267,
"step": 519
},
{
"epoch": 37.142857142857146,
"grad_norm": 1.9306292533874512,
"learning_rate": 2.311214609536427e-05,
"loss": 0.5289,
"step": 520
},
{
"epoch": 37.214285714285715,
"grad_norm": 1.127606749534607,
"learning_rate": 2.3100601201955324e-05,
"loss": 0.5234,
"step": 521
},
{
"epoch": 37.285714285714285,
"grad_norm": 1.0079513788223267,
"learning_rate": 2.308902401727672e-05,
"loss": 0.513,
"step": 522
},
{
"epoch": 37.357142857142854,
"grad_norm": 1.6860461235046387,
"learning_rate": 2.3077414576594553e-05,
"loss": 0.5092,
"step": 523
},
{
"epoch": 37.42857142857143,
"grad_norm": 1.6068872213363647,
"learning_rate": 2.3065772915273203e-05,
"loss": 0.5223,
"step": 524
},
{
"epoch": 37.5,
"grad_norm": 0.8457321524620056,
"learning_rate": 2.305409906877519e-05,
"loss": 0.5075,
"step": 525
},
{
"epoch": 37.57142857142857,
"grad_norm": 1.1763814687728882,
"learning_rate": 2.3042393072661074e-05,
"loss": 0.518,
"step": 526
},
{
"epoch": 37.642857142857146,
"grad_norm": 1.9821209907531738,
"learning_rate": 2.3030654962589346e-05,
"loss": 0.5888,
"step": 527
},
{
"epoch": 37.714285714285715,
"grad_norm": 1.848170280456543,
"learning_rate": 2.3018884774316328e-05,
"loss": 0.5672,
"step": 528
},
{
"epoch": 37.785714285714285,
"grad_norm": 1.2518821954727173,
"learning_rate": 2.3007082543696055e-05,
"loss": 0.5609,
"step": 529
},
{
"epoch": 37.857142857142854,
"grad_norm": 1.4334330558776855,
"learning_rate": 2.299524830668017e-05,
"loss": 0.5573,
"step": 530
},
{
"epoch": 37.92857142857143,
"grad_norm": 1.6201841831207275,
"learning_rate": 2.2983382099317803e-05,
"loss": 0.5561,
"step": 531
},
{
"epoch": 38.0,
"grad_norm": 1.2917560338974,
"learning_rate": 2.2971483957755487e-05,
"loss": 0.5541,
"step": 532
},
{
"epoch": 38.07142857142857,
"grad_norm": 1.4161721467971802,
"learning_rate": 2.295955391823702e-05,
"loss": 0.5389,
"step": 533
},
{
"epoch": 38.142857142857146,
"grad_norm": 1.714737892150879,
"learning_rate": 2.2947592017103383e-05,
"loss": 0.548,
"step": 534
},
{
"epoch": 38.214285714285715,
"grad_norm": 1.0994311571121216,
"learning_rate": 2.2935598290792583e-05,
"loss": 0.5414,
"step": 535
},
{
"epoch": 38.285714285714285,
"grad_norm": 1.3677231073379517,
"learning_rate": 2.2923572775839603e-05,
"loss": 0.5282,
"step": 536
},
{
"epoch": 38.357142857142854,
"grad_norm": 1.073483943939209,
"learning_rate": 2.2911515508876243e-05,
"loss": 0.5375,
"step": 537
},
{
"epoch": 38.42857142857143,
"grad_norm": 1.5279548168182373,
"learning_rate": 2.2899426526631033e-05,
"loss": 0.5185,
"step": 538
},
{
"epoch": 38.5,
"grad_norm": 0.997622549533844,
"learning_rate": 2.2887305865929104e-05,
"loss": 0.5293,
"step": 539
},
{
"epoch": 38.57142857142857,
"grad_norm": 1.0080162286758423,
"learning_rate": 2.2875153563692094e-05,
"loss": 0.5204,
"step": 540
},
{
"epoch": 38.642857142857146,
"grad_norm": 1.0973855257034302,
"learning_rate": 2.286296965693802e-05,
"loss": 0.5139,
"step": 541
},
{
"epoch": 38.714285714285715,
"grad_norm": 1.270318627357483,
"learning_rate": 2.285075418278118e-05,
"loss": 0.5142,
"step": 542
},
{
"epoch": 38.785714285714285,
"grad_norm": 1.0092289447784424,
"learning_rate": 2.283850717843202e-05,
"loss": 0.5157,
"step": 543
},
{
"epoch": 38.857142857142854,
"grad_norm": 0.9526764750480652,
"learning_rate": 2.2826228681197047e-05,
"loss": 0.502,
"step": 544
},
{
"epoch": 38.92857142857143,
"grad_norm": 1.3919442892074585,
"learning_rate": 2.281391872847869e-05,
"loss": 0.5167,
"step": 545
},
{
"epoch": 39.0,
"grad_norm": 1.0284240245819092,
"learning_rate": 2.2801577357775193e-05,
"loss": 0.4974,
"step": 546
},
{
"epoch": 39.07142857142857,
"grad_norm": 1.4247255325317383,
"learning_rate": 2.2789204606680524e-05,
"loss": 0.553,
"step": 547
},
{
"epoch": 39.142857142857146,
"grad_norm": 1.605211615562439,
"learning_rate": 2.2776800512884218e-05,
"loss": 0.5516,
"step": 548
},
{
"epoch": 39.214285714285715,
"grad_norm": 1.5498794317245483,
"learning_rate": 2.2764365114171303e-05,
"loss": 0.5338,
"step": 549
},
{
"epoch": 39.285714285714285,
"grad_norm": 1.094758152961731,
"learning_rate": 2.2751898448422155e-05,
"loss": 0.5433,
"step": 550
},
{
"epoch": 39.357142857142854,
"grad_norm": 1.4792970418930054,
"learning_rate": 2.27394005536124e-05,
"loss": 0.5347,
"step": 551
},
{
"epoch": 39.42857142857143,
"grad_norm": 1.1492030620574951,
"learning_rate": 2.2726871467812795e-05,
"loss": 0.5354,
"step": 552
},
{
"epoch": 39.5,
"grad_norm": 1.2064441442489624,
"learning_rate": 2.2714311229189105e-05,
"loss": 0.5494,
"step": 553
},
{
"epoch": 39.57142857142857,
"grad_norm": 1.3887778520584106,
"learning_rate": 2.2701719876002004e-05,
"loss": 0.5456,
"step": 554
},
{
"epoch": 39.642857142857146,
"grad_norm": 1.0589540004730225,
"learning_rate": 2.268909744660693e-05,
"loss": 0.5412,
"step": 555
},
{
"epoch": 39.714285714285715,
"grad_norm": 1.4876841306686401,
"learning_rate": 2.267644397945399e-05,
"loss": 0.5406,
"step": 556
},
{
"epoch": 39.785714285714285,
"grad_norm": 1.1347283124923706,
"learning_rate": 2.2663759513087846e-05,
"loss": 0.5391,
"step": 557
},
{
"epoch": 39.857142857142854,
"grad_norm": 1.2739876508712769,
"learning_rate": 2.2651044086147578e-05,
"loss": 0.53,
"step": 558
},
{
"epoch": 39.92857142857143,
"grad_norm": 1.210117220878601,
"learning_rate": 2.2638297737366583e-05,
"loss": 0.5287,
"step": 559
},
{
"epoch": 40.0,
"grad_norm": 0.9031302332878113,
"learning_rate": 2.2625520505572452e-05,
"loss": 0.5275,
"step": 560
},
{
"epoch": 40.07142857142857,
"grad_norm": 1.3935803174972534,
"learning_rate": 2.2612712429686845e-05,
"loss": 0.5246,
"step": 561
},
{
"epoch": 40.142857142857146,
"grad_norm": 1.246086597442627,
"learning_rate": 2.259987354872538e-05,
"loss": 0.529,
"step": 562
},
{
"epoch": 40.214285714285715,
"grad_norm": 1.4561989307403564,
"learning_rate": 2.2587003901797528e-05,
"loss": 0.523,
"step": 563
},
{
"epoch": 40.285714285714285,
"grad_norm": 0.9257284998893738,
"learning_rate": 2.2574103528106448e-05,
"loss": 0.533,
"step": 564
},
{
"epoch": 40.357142857142854,
"grad_norm": 1.0180156230926514,
"learning_rate": 2.2561172466948925e-05,
"loss": 0.5219,
"step": 565
},
{
"epoch": 40.42857142857143,
"grad_norm": 1.1004180908203125,
"learning_rate": 2.2548210757715216e-05,
"loss": 0.5112,
"step": 566
},
{
"epoch": 40.5,
"grad_norm": 1.298823595046997,
"learning_rate": 2.2535218439888933e-05,
"loss": 0.5358,
"step": 567
},
{
"epoch": 40.57142857142857,
"grad_norm": 0.983917772769928,
"learning_rate": 2.2522195553046926e-05,
"loss": 0.5369,
"step": 568
},
{
"epoch": 40.642857142857146,
"grad_norm": 1.5962039232254028,
"learning_rate": 2.2509142136859168e-05,
"loss": 0.5122,
"step": 569
},
{
"epoch": 40.714285714285715,
"grad_norm": 1.2760443687438965,
"learning_rate": 2.2496058231088635e-05,
"loss": 0.51,
"step": 570
},
{
"epoch": 40.785714285714285,
"grad_norm": 0.838486909866333,
"learning_rate": 2.248294387559116e-05,
"loss": 0.511,
"step": 571
},
{
"epoch": 40.857142857142854,
"grad_norm": 0.9061436057090759,
"learning_rate": 2.246979911031536e-05,
"loss": 0.5033,
"step": 572
},
{
"epoch": 40.92857142857143,
"grad_norm": 0.9709542989730835,
"learning_rate": 2.2456623975302454e-05,
"loss": 0.4921,
"step": 573
},
{
"epoch": 41.0,
"grad_norm": 1.4476726055145264,
"learning_rate": 2.2443418510686198e-05,
"loss": 0.4981,
"step": 574
},
{
"epoch": 41.07142857142857,
"grad_norm": 1.2485681772232056,
"learning_rate": 2.2430182756692728e-05,
"loss": 0.5102,
"step": 575
},
{
"epoch": 41.142857142857146,
"grad_norm": 0.935390830039978,
"learning_rate": 2.2416916753640442e-05,
"loss": 0.5013,
"step": 576
},
{
"epoch": 41.214285714285715,
"grad_norm": 1.5691496133804321,
"learning_rate": 2.2403620541939884e-05,
"loss": 0.491,
"step": 577
},
{
"epoch": 41.285714285714285,
"grad_norm": 1.8644148111343384,
"learning_rate": 2.2390294162093627e-05,
"loss": 0.4939,
"step": 578
},
{
"epoch": 41.357142857142854,
"grad_norm": 1.1802425384521484,
"learning_rate": 2.2376937654696133e-05,
"loss": 0.4899,
"step": 579
},
{
"epoch": 41.42857142857143,
"grad_norm": 1.2618484497070312,
"learning_rate": 2.2363551060433636e-05,
"loss": 0.4894,
"step": 580
},
{
"epoch": 41.5,
"grad_norm": 1.7197902202606201,
"learning_rate": 2.2350134420084023e-05,
"loss": 0.5338,
"step": 581
},
{
"epoch": 41.57142857142857,
"grad_norm": 1.7317899465560913,
"learning_rate": 2.233668777451672e-05,
"loss": 0.5395,
"step": 582
},
{
"epoch": 41.642857142857146,
"grad_norm": 0.8841091394424438,
"learning_rate": 2.2323211164692526e-05,
"loss": 0.5036,
"step": 583
},
{
"epoch": 41.714285714285715,
"grad_norm": 0.9463407397270203,
"learning_rate": 2.2309704631663536e-05,
"loss": 0.5171,
"step": 584
},
{
"epoch": 41.785714285714285,
"grad_norm": 1.4781519174575806,
"learning_rate": 2.2296168216573e-05,
"loss": 0.5074,
"step": 585
},
{
"epoch": 41.857142857142854,
"grad_norm": 1.249396562576294,
"learning_rate": 2.2282601960655175e-05,
"loss": 0.5043,
"step": 586
},
{
"epoch": 41.92857142857143,
"grad_norm": 1.3514676094055176,
"learning_rate": 2.2269005905235234e-05,
"loss": 0.5083,
"step": 587
},
{
"epoch": 42.0,
"grad_norm": 0.8868072032928467,
"learning_rate": 2.2255380091729124e-05,
"loss": 0.5041,
"step": 588
},
{
"epoch": 42.07142857142857,
"grad_norm": 1.2411155700683594,
"learning_rate": 2.224172456164343e-05,
"loss": 0.5147,
"step": 589
},
{
"epoch": 42.142857142857146,
"grad_norm": 1.137658715248108,
"learning_rate": 2.2228039356575265e-05,
"loss": 0.5132,
"step": 590
},
{
"epoch": 42.214285714285715,
"grad_norm": 1.076025366783142,
"learning_rate": 2.221432451821214e-05,
"loss": 0.511,
"step": 591
},
{
"epoch": 42.285714285714285,
"grad_norm": 1.2936358451843262,
"learning_rate": 2.2200580088331825e-05,
"loss": 0.5231,
"step": 592
},
{
"epoch": 42.357142857142854,
"grad_norm": 1.1697425842285156,
"learning_rate": 2.2186806108802248e-05,
"loss": 0.495,
"step": 593
},
{
"epoch": 42.42857142857143,
"grad_norm": 0.9060876369476318,
"learning_rate": 2.217300262158133e-05,
"loss": 0.5091,
"step": 594
},
{
"epoch": 42.5,
"grad_norm": 1.0277864933013916,
"learning_rate": 2.215916966871689e-05,
"loss": 0.5451,
"step": 595
},
{
"epoch": 42.57142857142857,
"grad_norm": 1.2367963790893555,
"learning_rate": 2.2145307292346502e-05,
"loss": 0.5316,
"step": 596
},
{
"epoch": 42.642857142857146,
"grad_norm": 0.9642969965934753,
"learning_rate": 2.213141553469737e-05,
"loss": 0.4955,
"step": 597
},
{
"epoch": 42.714285714285715,
"grad_norm": 1.2247182130813599,
"learning_rate": 2.211749443808619e-05,
"loss": 0.5331,
"step": 598
},
{
"epoch": 42.785714285714285,
"grad_norm": 1.3418000936508179,
"learning_rate": 2.2103544044919045e-05,
"loss": 0.502,
"step": 599
},
{
"epoch": 42.857142857142854,
"grad_norm": 0.9258021116256714,
"learning_rate": 2.208956439769125e-05,
"loss": 0.5091,
"step": 600
},
{
"epoch": 42.92857142857143,
"grad_norm": 0.9550617337226868,
"learning_rate": 2.2075555538987227e-05,
"loss": 0.504,
"step": 601
},
{
"epoch": 43.0,
"grad_norm": 1.3864645957946777,
"learning_rate": 2.20615175114804e-05,
"loss": 0.5071,
"step": 602
},
{
"epoch": 43.07142857142857,
"grad_norm": 1.5451107025146484,
"learning_rate": 2.2047450357933032e-05,
"loss": 0.5342,
"step": 603
},
{
"epoch": 43.142857142857146,
"grad_norm": 1.1475048065185547,
"learning_rate": 2.2033354121196102e-05,
"loss": 0.5311,
"step": 604
},
{
"epoch": 43.214285714285715,
"grad_norm": 1.2265154123306274,
"learning_rate": 2.201922884420921e-05,
"loss": 0.5337,
"step": 605
},
{
"epoch": 43.285714285714285,
"grad_norm": 2.201294183731079,
"learning_rate": 2.200507457000039e-05,
"loss": 0.553,
"step": 606
},
{
"epoch": 43.357142857142854,
"grad_norm": 1.2212409973144531,
"learning_rate": 2.1990891341686008e-05,
"loss": 0.5375,
"step": 607
},
{
"epoch": 43.42857142857143,
"grad_norm": 0.8546030521392822,
"learning_rate": 2.1976679202470654e-05,
"loss": 0.5137,
"step": 608
},
{
"epoch": 43.5,
"grad_norm": 1.0307977199554443,
"learning_rate": 2.1962438195646958e-05,
"loss": 0.4959,
"step": 609
},
{
"epoch": 43.57142857142857,
"grad_norm": 0.9181543588638306,
"learning_rate": 2.1948168364595497e-05,
"loss": 0.5007,
"step": 610
},
{
"epoch": 43.642857142857146,
"grad_norm": 1.2193859815597534,
"learning_rate": 2.1933869752784654e-05,
"loss": 0.4844,
"step": 611
},
{
"epoch": 43.714285714285715,
"grad_norm": 1.193314552307129,
"learning_rate": 2.1919542403770476e-05,
"loss": 0.4721,
"step": 612
},
{
"epoch": 43.785714285714285,
"grad_norm": 1.0561559200286865,
"learning_rate": 2.1905186361196556e-05,
"loss": 0.4683,
"step": 613
},
{
"epoch": 43.857142857142854,
"grad_norm": 0.921262264251709,
"learning_rate": 2.189080166879389e-05,
"loss": 0.4678,
"step": 614
},
{
"epoch": 43.92857142857143,
"grad_norm": 1.2379264831542969,
"learning_rate": 2.1876388370380745e-05,
"loss": 0.4652,
"step": 615
},
{
"epoch": 44.0,
"grad_norm": 1.4002087116241455,
"learning_rate": 2.186194650986253e-05,
"loss": 0.4521,
"step": 616
},
{
"epoch": 44.07142857142857,
"grad_norm": 1.3483561277389526,
"learning_rate": 2.184747613123165e-05,
"loss": 0.5277,
"step": 617
},
{
"epoch": 44.142857142857146,
"grad_norm": 1.591948390007019,
"learning_rate": 2.1832977278567394e-05,
"loss": 0.5384,
"step": 618
},
{
"epoch": 44.214285714285715,
"grad_norm": 1.5949158668518066,
"learning_rate": 2.181844999603578e-05,
"loss": 0.5439,
"step": 619
},
{
"epoch": 44.285714285714285,
"grad_norm": 1.2110360860824585,
"learning_rate": 2.1803894327889425e-05,
"loss": 0.5387,
"step": 620
},
{
"epoch": 44.357142857142854,
"grad_norm": 1.531814455986023,
"learning_rate": 2.178931031846743e-05,
"loss": 0.5284,
"step": 621
},
{
"epoch": 44.42857142857143,
"grad_norm": 1.276275634765625,
"learning_rate": 2.1774698012195206e-05,
"loss": 0.5178,
"step": 622
},
{
"epoch": 44.5,
"grad_norm": 1.3554123640060425,
"learning_rate": 2.1760057453584376e-05,
"loss": 0.5296,
"step": 623
},
{
"epoch": 44.57142857142857,
"grad_norm": 1.1485780477523804,
"learning_rate": 2.1745388687232624e-05,
"loss": 0.518,
"step": 624
},
{
"epoch": 44.642857142857146,
"grad_norm": 1.1781114339828491,
"learning_rate": 2.1730691757823553e-05,
"loss": 0.4851,
"step": 625
},
{
"epoch": 44.714285714285715,
"grad_norm": 1.0893974304199219,
"learning_rate": 2.171596671012655e-05,
"loss": 0.4879,
"step": 626
},
{
"epoch": 44.785714285714285,
"grad_norm": 1.5385526418685913,
"learning_rate": 2.1701213588996683e-05,
"loss": 0.4914,
"step": 627
},
{
"epoch": 44.857142857142854,
"grad_norm": 1.2437193393707275,
"learning_rate": 2.16864324393745e-05,
"loss": 0.4886,
"step": 628
},
{
"epoch": 44.92857142857143,
"grad_norm": 1.032419204711914,
"learning_rate": 2.1671623306285956e-05,
"loss": 0.4817,
"step": 629
},
{
"epoch": 45.0,
"grad_norm": 0.8966816663742065,
"learning_rate": 2.1656786234842237e-05,
"loss": 0.4716,
"step": 630
},
{
"epoch": 45.07142857142857,
"grad_norm": 0.8522362112998962,
"learning_rate": 2.1641921270239632e-05,
"loss": 0.5231,
"step": 631
},
{
"epoch": 45.142857142857146,
"grad_norm": 1.3971338272094727,
"learning_rate": 2.1627028457759408e-05,
"loss": 0.5232,
"step": 632
},
{
"epoch": 45.214285714285715,
"grad_norm": 0.8815805912017822,
"learning_rate": 2.1612107842767647e-05,
"loss": 0.5332,
"step": 633
},
{
"epoch": 45.285714285714285,
"grad_norm": 1.0322710275650024,
"learning_rate": 2.1597159470715133e-05,
"loss": 0.514,
"step": 634
},
{
"epoch": 45.357142857142854,
"grad_norm": 1.049417495727539,
"learning_rate": 2.15821833871372e-05,
"loss": 0.5072,
"step": 635
},
{
"epoch": 45.42857142857143,
"grad_norm": 1.2748231887817383,
"learning_rate": 2.1567179637653594e-05,
"loss": 0.5082,
"step": 636
},
{
"epoch": 45.5,
"grad_norm": 1.0079835653305054,
"learning_rate": 2.1552148267968347e-05,
"loss": 0.5117,
"step": 637
},
{
"epoch": 45.57142857142857,
"grad_norm": 1.1158453226089478,
"learning_rate": 2.1537089323869604e-05,
"loss": 0.4981,
"step": 638
},
{
"epoch": 45.642857142857146,
"grad_norm": 0.9508253931999207,
"learning_rate": 2.152200285122953e-05,
"loss": 0.4802,
"step": 639
},
{
"epoch": 45.714285714285715,
"grad_norm": 0.9986389875411987,
"learning_rate": 2.1506888896004133e-05,
"loss": 0.4886,
"step": 640
},
{
"epoch": 45.785714285714285,
"grad_norm": 1.0040160417556763,
"learning_rate": 2.1491747504233138e-05,
"loss": 0.4882,
"step": 641
},
{
"epoch": 45.857142857142854,
"grad_norm": 1.2012308835983276,
"learning_rate": 2.147657872203986e-05,
"loss": 0.4743,
"step": 642
},
{
"epoch": 45.92857142857143,
"grad_norm": 1.10345458984375,
"learning_rate": 2.1461382595631036e-05,
"loss": 0.474,
"step": 643
},
{
"epoch": 46.0,
"grad_norm": 1.7070404291152954,
"learning_rate": 2.14461591712967e-05,
"loss": 0.4722,
"step": 644
},
{
"epoch": 46.07142857142857,
"grad_norm": 0.9975429177284241,
"learning_rate": 2.1430908495410042e-05,
"loss": 0.4875,
"step": 645
},
{
"epoch": 46.142857142857146,
"grad_norm": 1.1342263221740723,
"learning_rate": 2.1415630614427272e-05,
"loss": 0.4815,
"step": 646
},
{
"epoch": 46.214285714285715,
"grad_norm": 0.9953164458274841,
"learning_rate": 2.140032557488746e-05,
"loss": 0.4853,
"step": 647
},
{
"epoch": 46.285714285714285,
"grad_norm": 1.2954375743865967,
"learning_rate": 2.1384993423412407e-05,
"loss": 0.472,
"step": 648
},
{
"epoch": 46.357142857142854,
"grad_norm": 1.391609787940979,
"learning_rate": 2.136963420670651e-05,
"loss": 0.4983,
"step": 649
},
{
"epoch": 46.42857142857143,
"grad_norm": 1.08943510055542,
"learning_rate": 2.135424797155661e-05,
"loss": 0.4816,
"step": 650
},
{
"epoch": 46.5,
"grad_norm": 1.3413355350494385,
"learning_rate": 2.1338834764831845e-05,
"loss": 0.4805,
"step": 651
},
{
"epoch": 46.57142857142857,
"grad_norm": 1.7216105461120605,
"learning_rate": 2.1323394633483514e-05,
"loss": 0.4862,
"step": 652
},
{
"epoch": 46.642857142857146,
"grad_norm": 1.4927905797958374,
"learning_rate": 2.1307927624544934e-05,
"loss": 0.488,
"step": 653
},
{
"epoch": 46.714285714285715,
"grad_norm": 0.9677718877792358,
"learning_rate": 2.1292433785131298e-05,
"loss": 0.4754,
"step": 654
},
{
"epoch": 46.785714285714285,
"grad_norm": 1.8179880380630493,
"learning_rate": 2.1276913162439532e-05,
"loss": 0.4819,
"step": 655
},
{
"epoch": 46.857142857142854,
"grad_norm": 1.3467894792556763,
"learning_rate": 2.1261365803748138e-05,
"loss": 0.4773,
"step": 656
},
{
"epoch": 46.92857142857143,
"grad_norm": 1.2468312978744507,
"learning_rate": 2.124579175641707e-05,
"loss": 0.4767,
"step": 657
},
{
"epoch": 47.0,
"grad_norm": 1.0712882280349731,
"learning_rate": 2.1230191067887574e-05,
"loss": 0.4892,
"step": 658
},
{
"epoch": 47.07142857142857,
"grad_norm": 1.5465167760849,
"learning_rate": 2.121456378568206e-05,
"loss": 0.5172,
"step": 659
},
{
"epoch": 47.142857142857146,
"grad_norm": 1.4434707164764404,
"learning_rate": 2.1198909957403928e-05,
"loss": 0.5215,
"step": 660
},
{
"epoch": 47.214285714285715,
"grad_norm": 1.7783178091049194,
"learning_rate": 2.1183229630737467e-05,
"loss": 0.5311,
"step": 661
},
{
"epoch": 47.285714285714285,
"grad_norm": 1.5930043458938599,
"learning_rate": 2.1167522853447664e-05,
"loss": 0.5148,
"step": 662
},
{
"epoch": 47.357142857142854,
"grad_norm": 1.139757513999939,
"learning_rate": 2.1151789673380086e-05,
"loss": 0.5086,
"step": 663
},
{
"epoch": 47.42857142857143,
"grad_norm": 1.0376228094100952,
"learning_rate": 2.113603013846073e-05,
"loss": 0.5086,
"step": 664
},
{
"epoch": 47.5,
"grad_norm": 1.1366406679153442,
"learning_rate": 2.1120244296695874e-05,
"loss": 0.4864,
"step": 665
},
{
"epoch": 47.57142857142857,
"grad_norm": 1.6081647872924805,
"learning_rate": 2.1104432196171924e-05,
"loss": 0.497,
"step": 666
},
{
"epoch": 47.642857142857146,
"grad_norm": 1.612080454826355,
"learning_rate": 2.1088593885055288e-05,
"loss": 0.5288,
"step": 667
},
{
"epoch": 47.714285714285715,
"grad_norm": 1.436149001121521,
"learning_rate": 2.1072729411592206e-05,
"loss": 0.5378,
"step": 668
},
{
"epoch": 47.785714285714285,
"grad_norm": 1.150154948234558,
"learning_rate": 2.105683882410861e-05,
"loss": 0.5208,
"step": 669
},
{
"epoch": 47.857142857142854,
"grad_norm": 1.2914098501205444,
"learning_rate": 2.1040922171009993e-05,
"loss": 0.5121,
"step": 670
},
{
"epoch": 47.92857142857143,
"grad_norm": 1.283091425895691,
"learning_rate": 2.1024979500781232e-05,
"loss": 0.5282,
"step": 671
},
{
"epoch": 48.0,
"grad_norm": 1.0183498859405518,
"learning_rate": 2.1009010861986476e-05,
"loss": 0.5218,
"step": 672
},
{
"epoch": 48.07142857142857,
"grad_norm": 1.2651445865631104,
"learning_rate": 2.099301630326896e-05,
"loss": 0.506,
"step": 673
},
{
"epoch": 48.142857142857146,
"grad_norm": 1.3340294361114502,
"learning_rate": 2.0976995873350887e-05,
"loss": 0.5042,
"step": 674
},
{
"epoch": 48.214285714285715,
"grad_norm": 1.3751298189163208,
"learning_rate": 2.096094962103326e-05,
"loss": 0.5179,
"step": 675
},
{
"epoch": 48.285714285714285,
"grad_norm": 1.1513422727584839,
"learning_rate": 2.0944877595195755e-05,
"loss": 0.5042,
"step": 676
},
{
"epoch": 48.357142857142854,
"grad_norm": 1.2138347625732422,
"learning_rate": 2.092877984479654e-05,
"loss": 0.4913,
"step": 677
},
{
"epoch": 48.42857142857143,
"grad_norm": 1.053213119506836,
"learning_rate": 2.091265641887217e-05,
"loss": 0.4938,
"step": 678
},
{
"epoch": 48.5,
"grad_norm": 0.9006969332695007,
"learning_rate": 2.089650736653738e-05,
"loss": 0.489,
"step": 679
},
{
"epoch": 48.57142857142857,
"grad_norm": 1.1186134815216064,
"learning_rate": 2.088033273698499e-05,
"loss": 0.4847,
"step": 680
},
{
"epoch": 48.642857142857146,
"grad_norm": 0.9486108422279358,
"learning_rate": 2.086413257948573e-05,
"loss": 0.4911,
"step": 681
},
{
"epoch": 48.714285714285715,
"grad_norm": 1.0337382555007935,
"learning_rate": 2.0847906943388085e-05,
"loss": 0.4987,
"step": 682
},
{
"epoch": 48.785714285714285,
"grad_norm": 1.0863022804260254,
"learning_rate": 2.0831655878118155e-05,
"loss": 0.5047,
"step": 683
},
{
"epoch": 48.857142857142854,
"grad_norm": 1.1968601942062378,
"learning_rate": 2.081537943317951e-05,
"loss": 0.4954,
"step": 684
},
{
"epoch": 48.92857142857143,
"grad_norm": 0.9392350316047668,
"learning_rate": 2.0799077658153022e-05,
"loss": 0.49,
"step": 685
},
{
"epoch": 49.0,
"grad_norm": 0.9233869314193726,
"learning_rate": 2.0782750602696722e-05,
"loss": 0.4859,
"step": 686
},
{
"epoch": 49.07142857142857,
"grad_norm": 1.6311028003692627,
"learning_rate": 2.0766398316545648e-05,
"loss": 0.4887,
"step": 687
},
{
"epoch": 49.142857142857146,
"grad_norm": 1.3586399555206299,
"learning_rate": 2.0750020849511712e-05,
"loss": 0.4741,
"step": 688
},
{
"epoch": 49.214285714285715,
"grad_norm": 1.0023800134658813,
"learning_rate": 2.0733618251483506e-05,
"loss": 0.4679,
"step": 689
},
{
"epoch": 49.285714285714285,
"grad_norm": 1.2750869989395142,
"learning_rate": 2.07171905724262e-05,
"loss": 0.4741,
"step": 690
},
{
"epoch": 49.357142857142854,
"grad_norm": 1.5000548362731934,
"learning_rate": 2.070073786238134e-05,
"loss": 0.4978,
"step": 691
},
{
"epoch": 49.42857142857143,
"grad_norm": 0.9990112781524658,
"learning_rate": 2.0684260171466745e-05,
"loss": 0.4802,
"step": 692
},
{
"epoch": 49.5,
"grad_norm": 1.5221929550170898,
"learning_rate": 2.066775754987632e-05,
"loss": 0.5306,
"step": 693
},
{
"epoch": 49.57142857142857,
"grad_norm": 0.9824712872505188,
"learning_rate": 2.0651230047879905e-05,
"loss": 0.515,
"step": 694
},
{
"epoch": 49.642857142857146,
"grad_norm": 1.8085649013519287,
"learning_rate": 2.0634677715823137e-05,
"loss": 0.5009,
"step": 695
},
{
"epoch": 49.714285714285715,
"grad_norm": 1.3218837976455688,
"learning_rate": 2.0618100604127295e-05,
"loss": 0.4922,
"step": 696
},
{
"epoch": 49.785714285714285,
"grad_norm": 0.9237383008003235,
"learning_rate": 2.0601498763289138e-05,
"loss": 0.4839,
"step": 697
},
{
"epoch": 49.857142857142854,
"grad_norm": 0.9790183305740356,
"learning_rate": 2.058487224388075e-05,
"loss": 0.491,
"step": 698
},
{
"epoch": 49.92857142857143,
"grad_norm": 0.901969850063324,
"learning_rate": 2.0568221096549384e-05,
"loss": 0.4829,
"step": 699
},
{
"epoch": 50.0,
"grad_norm": 1.3763041496276855,
"learning_rate": 2.0551545372017332e-05,
"loss": 0.4899,
"step": 700
},
{
"epoch": 50.07142857142857,
"grad_norm": 1.2385435104370117,
"learning_rate": 2.0534845121081742e-05,
"loss": 0.5242,
"step": 701
},
{
"epoch": 50.142857142857146,
"grad_norm": 1.226261854171753,
"learning_rate": 2.0518120394614477e-05,
"loss": 0.5066,
"step": 702
},
{
"epoch": 50.214285714285715,
"grad_norm": 1.8473483324050903,
"learning_rate": 2.0501371243561946e-05,
"loss": 0.5146,
"step": 703
},
{
"epoch": 50.285714285714285,
"grad_norm": 0.9717339873313904,
"learning_rate": 2.0484597718944973e-05,
"loss": 0.4933,
"step": 704
},
{
"epoch": 50.357142857142854,
"grad_norm": 0.8980492353439331,
"learning_rate": 2.0467799871858624e-05,
"loss": 0.5008,
"step": 705
},
{
"epoch": 50.42857142857143,
"grad_norm": 1.2667917013168335,
"learning_rate": 2.045097775347205e-05,
"loss": 0.5037,
"step": 706
},
{
"epoch": 50.5,
"grad_norm": 1.1079235076904297,
"learning_rate": 2.0434131415028346e-05,
"loss": 0.5308,
"step": 707
},
{
"epoch": 50.57142857142857,
"grad_norm": 1.2020525932312012,
"learning_rate": 2.0417260907844376e-05,
"loss": 0.5263,
"step": 708
},
{
"epoch": 50.642857142857146,
"grad_norm": 0.9911800026893616,
"learning_rate": 2.0400366283310636e-05,
"loss": 0.5242,
"step": 709
},
{
"epoch": 50.714285714285715,
"grad_norm": 1.3526697158813477,
"learning_rate": 2.038344759289108e-05,
"loss": 0.505,
"step": 710
},
{
"epoch": 50.785714285714285,
"grad_norm": 0.9310383796691895,
"learning_rate": 2.036650488812297e-05,
"loss": 0.5118,
"step": 711
},
{
"epoch": 50.857142857142854,
"grad_norm": 1.1431801319122314,
"learning_rate": 2.0349538220616727e-05,
"loss": 0.5034,
"step": 712
},
{
"epoch": 50.92857142857143,
"grad_norm": 1.1919463872909546,
"learning_rate": 2.033254764205576e-05,
"loss": 0.5078,
"step": 713
},
{
"epoch": 51.0,
"grad_norm": 1.073749303817749,
"learning_rate": 2.031553320419632e-05,
"loss": 0.5062,
"step": 714
},
{
"epoch": 51.07142857142857,
"grad_norm": 1.0797291994094849,
"learning_rate": 2.029849495886733e-05,
"loss": 0.4973,
"step": 715
},
{
"epoch": 51.142857142857146,
"grad_norm": 0.9815460443496704,
"learning_rate": 2.0281432957970248e-05,
"loss": 0.4946,
"step": 716
},
{
"epoch": 51.214285714285715,
"grad_norm": 1.0774970054626465,
"learning_rate": 2.026434725347888e-05,
"loss": 0.4957,
"step": 717
},
{
"epoch": 51.285714285714285,
"grad_norm": 1.0873366594314575,
"learning_rate": 2.0247237897439254e-05,
"loss": 0.4934,
"step": 718
},
{
"epoch": 51.357142857142854,
"grad_norm": 1.7948505878448486,
"learning_rate": 2.0230104941969426e-05,
"loss": 0.4875,
"step": 719
},
{
"epoch": 51.42857142857143,
"grad_norm": 0.9562950730323792,
"learning_rate": 2.021294843925936e-05,
"loss": 0.4953,
"step": 720
},
{
"epoch": 51.5,
"grad_norm": 1.1674566268920898,
"learning_rate": 2.0195768441570727e-05,
"loss": 0.4786,
"step": 721
},
{
"epoch": 51.57142857142857,
"grad_norm": 1.289368987083435,
"learning_rate": 2.017856500123679e-05,
"loss": 0.4852,
"step": 722
},
{
"epoch": 51.642857142857146,
"grad_norm": 1.5186100006103516,
"learning_rate": 2.0161338170662208e-05,
"loss": 0.5016,
"step": 723
},
{
"epoch": 51.714285714285715,
"grad_norm": 1.0470561981201172,
"learning_rate": 2.0144088002322893e-05,
"loss": 0.4961,
"step": 724
},
{
"epoch": 51.785714285714285,
"grad_norm": 1.3715919256210327,
"learning_rate": 2.012681454876585e-05,
"loss": 0.4965,
"step": 725
},
{
"epoch": 51.857142857142854,
"grad_norm": 1.760349154472351,
"learning_rate": 2.010951786260901e-05,
"loss": 0.5053,
"step": 726
},
{
"epoch": 51.92857142857143,
"grad_norm": 0.989055335521698,
"learning_rate": 2.0092197996541085e-05,
"loss": 0.4947,
"step": 727
},
{
"epoch": 52.0,
"grad_norm": 1.1888622045516968,
"learning_rate": 2.007485500332139e-05,
"loss": 0.4999,
"step": 728
},
{
"epoch": 52.07142857142857,
"grad_norm": 1.1220037937164307,
"learning_rate": 2.005748893577969e-05,
"loss": 0.4733,
"step": 729
},
{
"epoch": 52.142857142857146,
"grad_norm": 1.0497477054595947,
"learning_rate": 2.004009984681603e-05,
"loss": 0.4897,
"step": 730
},
{
"epoch": 52.214285714285715,
"grad_norm": 1.002177119255066,
"learning_rate": 2.0022687789400607e-05,
"loss": 0.4602,
"step": 731
},
{
"epoch": 52.285714285714285,
"grad_norm": 1.1776399612426758,
"learning_rate": 2.0005252816573553e-05,
"loss": 0.4707,
"step": 732
},
{
"epoch": 52.357142857142854,
"grad_norm": 1.0840985774993896,
"learning_rate": 1.9987794981444823e-05,
"loss": 0.4703,
"step": 733
},
{
"epoch": 52.42857142857143,
"grad_norm": 0.9027703404426575,
"learning_rate": 1.9970314337194017e-05,
"loss": 0.4657,
"step": 734
},
{
"epoch": 52.5,
"grad_norm": 1.3420389890670776,
"learning_rate": 1.99528109370702e-05,
"loss": 0.4993,
"step": 735
},
{
"epoch": 52.57142857142857,
"grad_norm": 1.270432710647583,
"learning_rate": 1.9935284834391768e-05,
"loss": 0.5083,
"step": 736
},
{
"epoch": 52.642857142857146,
"grad_norm": 1.1852244138717651,
"learning_rate": 1.991773608254627e-05,
"loss": 0.4631,
"step": 737
},
{
"epoch": 52.714285714285715,
"grad_norm": 0.8186385035514832,
"learning_rate": 1.9900164734990246e-05,
"loss": 0.4521,
"step": 738
},
{
"epoch": 52.785714285714285,
"grad_norm": 0.8292627334594727,
"learning_rate": 1.988257084524907e-05,
"loss": 0.4547,
"step": 739
},
{
"epoch": 52.857142857142854,
"grad_norm": 0.9689545035362244,
"learning_rate": 1.9864954466916776e-05,
"loss": 0.4491,
"step": 740
},
{
"epoch": 52.92857142857143,
"grad_norm": 0.8372877836227417,
"learning_rate": 1.9847315653655915e-05,
"loss": 0.4482,
"step": 741
},
{
"epoch": 53.0,
"grad_norm": 0.8107361197471619,
"learning_rate": 1.9829654459197366e-05,
"loss": 0.4358,
"step": 742
},
{
"epoch": 53.07142857142857,
"grad_norm": 1.1326669454574585,
"learning_rate": 1.9811970937340196e-05,
"loss": 0.4778,
"step": 743
},
{
"epoch": 53.142857142857146,
"grad_norm": 1.2389705181121826,
"learning_rate": 1.979426514195147e-05,
"loss": 0.4932,
"step": 744
},
{
"epoch": 53.214285714285715,
"grad_norm": 1.0887947082519531,
"learning_rate": 1.977653712696612e-05,
"loss": 0.4844,
"step": 745
},
{
"epoch": 53.285714285714285,
"grad_norm": 1.0338947772979736,
"learning_rate": 1.9758786946386747e-05,
"loss": 0.4858,
"step": 746
},
{
"epoch": 53.357142857142854,
"grad_norm": 1.2080212831497192,
"learning_rate": 1.9741014654283486e-05,
"loss": 0.4965,
"step": 747
},
{
"epoch": 53.42857142857143,
"grad_norm": 0.9042844176292419,
"learning_rate": 1.9723220304793818e-05,
"loss": 0.4728,
"step": 748
},
{
"epoch": 53.5,
"grad_norm": 0.9950966835021973,
"learning_rate": 1.970540395212242e-05,
"loss": 0.4872,
"step": 749
},
{
"epoch": 53.57142857142857,
"grad_norm": 0.9533802270889282,
"learning_rate": 1.9687565650540984e-05,
"loss": 0.4812,
"step": 750
},
{
"epoch": 53.642857142857146,
"grad_norm": 1.2186425924301147,
"learning_rate": 1.9669705454388076e-05,
"loss": 0.5068,
"step": 751
},
{
"epoch": 53.714285714285715,
"grad_norm": 1.2322208881378174,
"learning_rate": 1.9651823418068954e-05,
"loss": 0.504,
"step": 752
},
{
"epoch": 53.785714285714285,
"grad_norm": 1.1904113292694092,
"learning_rate": 1.96339195960554e-05,
"loss": 0.5047,
"step": 753
},
{
"epoch": 53.857142857142854,
"grad_norm": 1.0589790344238281,
"learning_rate": 1.9615994042885556e-05,
"loss": 0.4982,
"step": 754
},
{
"epoch": 53.92857142857143,
"grad_norm": 1.0914963483810425,
"learning_rate": 1.9598046813163766e-05,
"loss": 0.5033,
"step": 755
},
{
"epoch": 54.0,
"grad_norm": 1.1800282001495361,
"learning_rate": 1.958007796156041e-05,
"loss": 0.4797,
"step": 756
},
{
"epoch": 54.07142857142857,
"grad_norm": 1.3749839067459106,
"learning_rate": 1.9562087542811725e-05,
"loss": 0.484,
"step": 757
},
{
"epoch": 54.142857142857146,
"grad_norm": 1.4643574953079224,
"learning_rate": 1.9544075611719642e-05,
"loss": 0.4876,
"step": 758
},
{
"epoch": 54.214285714285715,
"grad_norm": 1.3803106546401978,
"learning_rate": 1.9526042223151634e-05,
"loss": 0.4781,
"step": 759
},
{
"epoch": 54.285714285714285,
"grad_norm": 1.45980703830719,
"learning_rate": 1.9507987432040527e-05,
"loss": 0.4741,
"step": 760
},
{
"epoch": 54.357142857142854,
"grad_norm": 1.256339192390442,
"learning_rate": 1.9489911293384337e-05,
"loss": 0.4642,
"step": 761
},
{
"epoch": 54.42857142857143,
"grad_norm": 1.1292353868484497,
"learning_rate": 1.947181386224613e-05,
"loss": 0.4555,
"step": 762
},
{
"epoch": 54.5,
"grad_norm": 1.1383864879608154,
"learning_rate": 1.9453695193753812e-05,
"loss": 0.4832,
"step": 763
},
{
"epoch": 54.57142857142857,
"grad_norm": 1.0356770753860474,
"learning_rate": 1.9435555343099993e-05,
"loss": 0.4795,
"step": 764
},
{
"epoch": 54.642857142857146,
"grad_norm": 1.1825125217437744,
"learning_rate": 1.9417394365541803e-05,
"loss": 0.4459,
"step": 765
},
{
"epoch": 54.714285714285715,
"grad_norm": 1.20157790184021,
"learning_rate": 1.9399212316400726e-05,
"loss": 0.4402,
"step": 766
},
{
"epoch": 54.785714285714285,
"grad_norm": 0.9616509079933167,
"learning_rate": 1.9381009251062447e-05,
"loss": 0.4374,
"step": 767
},
{
"epoch": 54.857142857142854,
"grad_norm": 1.1224186420440674,
"learning_rate": 1.936278522497665e-05,
"loss": 0.4331,
"step": 768
},
{
"epoch": 54.92857142857143,
"grad_norm": 1.2672364711761475,
"learning_rate": 1.934454029365688e-05,
"loss": 0.4457,
"step": 769
},
{
"epoch": 55.0,
"grad_norm": 1.050889492034912,
"learning_rate": 1.9326274512680363e-05,
"loss": 0.4349,
"step": 770
},
{
"epoch": 55.07142857142857,
"grad_norm": 2.0807056427001953,
"learning_rate": 1.930798793768784e-05,
"loss": 0.5191,
"step": 771
},
{
"epoch": 55.142857142857146,
"grad_norm": 1.3528231382369995,
"learning_rate": 1.9289680624383383e-05,
"loss": 0.5089,
"step": 772
},
{
"epoch": 55.214285714285715,
"grad_norm": 1.518905520439148,
"learning_rate": 1.927135262853425e-05,
"loss": 0.5047,
"step": 773
},
{
"epoch": 55.285714285714285,
"grad_norm": 1.4318668842315674,
"learning_rate": 1.925300400597069e-05,
"loss": 0.4982,
"step": 774
},
{
"epoch": 55.357142857142854,
"grad_norm": 2.070688486099243,
"learning_rate": 1.9234634812585788e-05,
"loss": 0.5147,
"step": 775
},
{
"epoch": 55.42857142857143,
"grad_norm": 1.3405417203903198,
"learning_rate": 1.92162451043353e-05,
"loss": 0.4914,
"step": 776
},
{
"epoch": 55.5,
"grad_norm": 1.6927118301391602,
"learning_rate": 1.9197834937237457e-05,
"loss": 0.5037,
"step": 777
},
{
"epoch": 55.57142857142857,
"grad_norm": 1.8138580322265625,
"learning_rate": 1.9179404367372832e-05,
"loss": 0.5024,
"step": 778
},
{
"epoch": 55.642857142857146,
"grad_norm": 2.0208585262298584,
"learning_rate": 1.9160953450884125e-05,
"loss": 0.5384,
"step": 779
},
{
"epoch": 55.714285714285715,
"grad_norm": 2.25797438621521,
"learning_rate": 1.9142482243976035e-05,
"loss": 0.5283,
"step": 780
},
{
"epoch": 55.785714285714285,
"grad_norm": 1.1891279220581055,
"learning_rate": 1.912399080291506e-05,
"loss": 0.5278,
"step": 781
},
{
"epoch": 55.857142857142854,
"grad_norm": 1.4322525262832642,
"learning_rate": 1.910547918402934e-05,
"loss": 0.5276,
"step": 782
},
{
"epoch": 55.92857142857143,
"grad_norm": 2.4593029022216797,
"learning_rate": 1.9086947443708472e-05,
"loss": 0.5233,
"step": 783
},
{
"epoch": 56.0,
"grad_norm": 2.622018575668335,
"learning_rate": 1.9068395638403347e-05,
"loss": 0.5255,
"step": 784
},
{
"epoch": 56.07142857142857,
"grad_norm": 1.38777756690979,
"learning_rate": 1.9049823824625993e-05,
"loss": 0.5101,
"step": 785
},
{
"epoch": 56.142857142857146,
"grad_norm": 1.46625554561615,
"learning_rate": 1.9031232058949362e-05,
"loss": 0.5136,
"step": 786
},
{
"epoch": 56.214285714285715,
"grad_norm": 2.046380043029785,
"learning_rate": 1.901262039800721e-05,
"loss": 0.5203,
"step": 787
},
{
"epoch": 56.285714285714285,
"grad_norm": 2.132017135620117,
"learning_rate": 1.899398889849387e-05,
"loss": 0.5217,
"step": 788
},
{
"epoch": 56.357142857142854,
"grad_norm": 1.447534203529358,
"learning_rate": 1.897533761716413e-05,
"loss": 0.5065,
"step": 789
},
{
"epoch": 56.42857142857143,
"grad_norm": 1.4169517755508423,
"learning_rate": 1.8956666610833024e-05,
"loss": 0.4964,
"step": 790
},
{
"epoch": 56.5,
"grad_norm": 1.4682508707046509,
"learning_rate": 1.893797593637568e-05,
"loss": 0.503,
"step": 791
},
{
"epoch": 56.57142857142857,
"grad_norm": 1.9355621337890625,
"learning_rate": 1.8919265650727127e-05,
"loss": 0.513,
"step": 792
},
{
"epoch": 56.642857142857146,
"grad_norm": 1.3220174312591553,
"learning_rate": 1.8900535810882146e-05,
"loss": 0.5042,
"step": 793
},
{
"epoch": 56.714285714285715,
"grad_norm": 1.2512553930282593,
"learning_rate": 1.8881786473895074e-05,
"loss": 0.4999,
"step": 794
},
{
"epoch": 56.785714285714285,
"grad_norm": 1.3863564729690552,
"learning_rate": 1.8863017696879642e-05,
"loss": 0.5136,
"step": 795
},
{
"epoch": 56.857142857142854,
"grad_norm": 2.035372257232666,
"learning_rate": 1.8844229537008803e-05,
"loss": 0.5169,
"step": 796
},
{
"epoch": 56.92857142857143,
"grad_norm": 1.2508081197738647,
"learning_rate": 1.8825422051514546e-05,
"loss": 0.5034,
"step": 797
},
{
"epoch": 57.0,
"grad_norm": 1.1252161264419556,
"learning_rate": 1.880659529768774e-05,
"loss": 0.499,
"step": 798
},
{
"epoch": 57.07142857142857,
"grad_norm": 1.2981981039047241,
"learning_rate": 1.878774933287794e-05,
"loss": 0.4656,
"step": 799
},
{
"epoch": 57.142857142857146,
"grad_norm": 1.5123335123062134,
"learning_rate": 1.8768884214493217e-05,
"loss": 0.4644,
"step": 800
},
{
"epoch": 57.214285714285715,
"grad_norm": 1.5413196086883545,
"learning_rate": 1.8750000000000002e-05,
"loss": 0.4718,
"step": 801
},
{
"epoch": 57.285714285714285,
"grad_norm": 1.064577341079712,
"learning_rate": 1.873109674692288e-05,
"loss": 0.4565,
"step": 802
},
{
"epoch": 57.357142857142854,
"grad_norm": 1.0118346214294434,
"learning_rate": 1.8712174512844445e-05,
"loss": 0.4452,
"step": 803
},
{
"epoch": 57.42857142857143,
"grad_norm": 1.1867804527282715,
"learning_rate": 1.8693233355405097e-05,
"loss": 0.4662,
"step": 804
},
{
"epoch": 57.5,
"grad_norm": 1.3060356378555298,
"learning_rate": 1.8674273332302888e-05,
"loss": 0.4885,
"step": 805
},
{
"epoch": 57.57142857142857,
"grad_norm": 1.154142141342163,
"learning_rate": 1.865529450129334e-05,
"loss": 0.4803,
"step": 806
},
{
"epoch": 57.642857142857146,
"grad_norm": 1.1288933753967285,
"learning_rate": 1.863629692018926e-05,
"loss": 0.46,
"step": 807
},
{
"epoch": 57.714285714285715,
"grad_norm": 1.0589231252670288,
"learning_rate": 1.861728064686058e-05,
"loss": 0.4544,
"step": 808
},
{
"epoch": 57.785714285714285,
"grad_norm": 1.0576995611190796,
"learning_rate": 1.859824573923416e-05,
"loss": 0.4536,
"step": 809
},
{
"epoch": 57.857142857142854,
"grad_norm": 0.9447432160377502,
"learning_rate": 1.857919225529363e-05,
"loss": 0.4448,
"step": 810
},
{
"epoch": 57.92857142857143,
"grad_norm": 1.0149959325790405,
"learning_rate": 1.8560120253079215e-05,
"loss": 0.4486,
"step": 811
},
{
"epoch": 58.0,
"grad_norm": 0.9544550180435181,
"learning_rate": 1.854102979068753e-05,
"loss": 0.4462,
"step": 812
},
{
"epoch": 58.07142857142857,
"grad_norm": 1.2468597888946533,
"learning_rate": 1.852192092627144e-05,
"loss": 0.4761,
"step": 813
},
{
"epoch": 58.142857142857146,
"grad_norm": 1.5163100957870483,
"learning_rate": 1.850279371803986e-05,
"loss": 0.4787,
"step": 814
},
{
"epoch": 58.214285714285715,
"grad_norm": 1.0021116733551025,
"learning_rate": 1.848364822425759e-05,
"loss": 0.4615,
"step": 815
},
{
"epoch": 58.285714285714285,
"grad_norm": 1.043244481086731,
"learning_rate": 1.8464484503245105e-05,
"loss": 0.4545,
"step": 816
},
{
"epoch": 58.357142857142854,
"grad_norm": 1.0752507448196411,
"learning_rate": 1.8445302613378444e-05,
"loss": 0.4732,
"step": 817
},
{
"epoch": 58.42857142857143,
"grad_norm": 1.4201626777648926,
"learning_rate": 1.842610261308895e-05,
"loss": 0.4549,
"step": 818
},
{
"epoch": 58.5,
"grad_norm": 1.5272448062896729,
"learning_rate": 1.840688456086318e-05,
"loss": 0.4869,
"step": 819
},
{
"epoch": 58.57142857142857,
"grad_norm": 1.0299702882766724,
"learning_rate": 1.8387648515242625e-05,
"loss": 0.4748,
"step": 820
},
{
"epoch": 58.642857142857146,
"grad_norm": 0.9576692581176758,
"learning_rate": 1.8368394534823635e-05,
"loss": 0.4726,
"step": 821
},
{
"epoch": 58.714285714285715,
"grad_norm": 1.2337346076965332,
"learning_rate": 1.8349122678257167e-05,
"loss": 0.4643,
"step": 822
},
{
"epoch": 58.785714285714285,
"grad_norm": 1.2723453044891357,
"learning_rate": 1.832983300424864e-05,
"loss": 0.4724,
"step": 823
},
{
"epoch": 58.857142857142854,
"grad_norm": 1.1447594165802002,
"learning_rate": 1.831052557155775e-05,
"loss": 0.4783,
"step": 824
},
{
"epoch": 58.92857142857143,
"grad_norm": 1.0443583726882935,
"learning_rate": 1.8291200438998273e-05,
"loss": 0.4703,
"step": 825
},
{
"epoch": 59.0,
"grad_norm": 1.0168306827545166,
"learning_rate": 1.827185766543793e-05,
"loss": 0.4603,
"step": 826
},
{
"epoch": 59.07142857142857,
"grad_norm": 1.530557632446289,
"learning_rate": 1.8252497309798147e-05,
"loss": 0.4793,
"step": 827
},
{
"epoch": 59.142857142857146,
"grad_norm": 1.9315145015716553,
"learning_rate": 1.823311943105394e-05,
"loss": 0.4631,
"step": 828
},
{
"epoch": 59.214285714285715,
"grad_norm": 0.9021238088607788,
"learning_rate": 1.821372408823368e-05,
"loss": 0.4389,
"step": 829
},
{
"epoch": 59.285714285714285,
"grad_norm": 1.1420314311981201,
"learning_rate": 1.8194311340418946e-05,
"loss": 0.468,
"step": 830
},
{
"epoch": 59.357142857142854,
"grad_norm": 1.4039769172668457,
"learning_rate": 1.8174881246744335e-05,
"loss": 0.4488,
"step": 831
},
{
"epoch": 59.42857142857143,
"grad_norm": 1.3697187900543213,
"learning_rate": 1.8155433866397285e-05,
"loss": 0.4487,
"step": 832
},
{
"epoch": 59.5,
"grad_norm": 1.7672585248947144,
"learning_rate": 1.8135969258617884e-05,
"loss": 0.4566,
"step": 833
},
{
"epoch": 59.57142857142857,
"grad_norm": 0.8074151277542114,
"learning_rate": 1.8116487482698706e-05,
"loss": 0.4433,
"step": 834
},
{
"epoch": 59.642857142857146,
"grad_norm": 1.2005566358566284,
"learning_rate": 1.809698859798462e-05,
"loss": 0.4609,
"step": 835
},
{
"epoch": 59.714285714285715,
"grad_norm": 1.7096061706542969,
"learning_rate": 1.807747266387261e-05,
"loss": 0.4588,
"step": 836
},
{
"epoch": 59.785714285714285,
"grad_norm": 1.5230960845947266,
"learning_rate": 1.8057939739811595e-05,
"loss": 0.4521,
"step": 837
},
{
"epoch": 59.857142857142854,
"grad_norm": 1.007333517074585,
"learning_rate": 1.8038389885302248e-05,
"loss": 0.4624,
"step": 838
},
{
"epoch": 59.92857142857143,
"grad_norm": 1.0005038976669312,
"learning_rate": 1.8018823159896816e-05,
"loss": 0.4571,
"step": 839
},
{
"epoch": 60.0,
"grad_norm": 1.280025839805603,
"learning_rate": 1.799923962319894e-05,
"loss": 0.4513,
"step": 840
},
{
"epoch": 60.07142857142857,
"grad_norm": 1.4071838855743408,
"learning_rate": 1.7979639334863467e-05,
"loss": 0.5027,
"step": 841
},
{
"epoch": 60.142857142857146,
"grad_norm": 1.7667009830474854,
"learning_rate": 1.796002235459628e-05,
"loss": 0.5014,
"step": 842
},
{
"epoch": 60.214285714285715,
"grad_norm": 1.211600661277771,
"learning_rate": 1.7940388742154096e-05,
"loss": 0.4824,
"step": 843
},
{
"epoch": 60.285714285714285,
"grad_norm": 1.164533019065857,
"learning_rate": 1.7920738557344308e-05,
"loss": 0.4948,
"step": 844
},
{
"epoch": 60.357142857142854,
"grad_norm": 1.3832149505615234,
"learning_rate": 1.790107186002478e-05,
"loss": 0.4807,
"step": 845
},
{
"epoch": 60.42857142857143,
"grad_norm": 1.3253182172775269,
"learning_rate": 1.788138871010369e-05,
"loss": 0.4993,
"step": 846
},
{
"epoch": 60.5,
"grad_norm": 1.081281304359436,
"learning_rate": 1.7861689167539324e-05,
"loss": 0.5017,
"step": 847
},
{
"epoch": 60.57142857142857,
"grad_norm": 1.1732275485992432,
"learning_rate": 1.784197329233991e-05,
"loss": 0.4831,
"step": 848
},
{
"epoch": 60.642857142857146,
"grad_norm": 1.404449462890625,
"learning_rate": 1.782224114456341e-05,
"loss": 0.4625,
"step": 849
},
{
"epoch": 60.714285714285715,
"grad_norm": 1.4505149126052856,
"learning_rate": 1.7802492784317377e-05,
"loss": 0.4613,
"step": 850
},
{
"epoch": 60.785714285714285,
"grad_norm": 0.950019121170044,
"learning_rate": 1.7782728271758746e-05,
"loss": 0.4545,
"step": 851
},
{
"epoch": 60.857142857142854,
"grad_norm": 1.111656904220581,
"learning_rate": 1.776294766709364e-05,
"loss": 0.4534,
"step": 852
},
{
"epoch": 60.92857142857143,
"grad_norm": 1.3437578678131104,
"learning_rate": 1.7743151030577214e-05,
"loss": 0.4496,
"step": 853
},
{
"epoch": 61.0,
"grad_norm": 1.6430153846740723,
"learning_rate": 1.7723338422513462e-05,
"loss": 0.4606,
"step": 854
},
{
"epoch": 61.07142857142857,
"grad_norm": 1.0425664186477661,
"learning_rate": 1.7703509903255015e-05,
"loss": 0.4747,
"step": 855
},
{
"epoch": 61.142857142857146,
"grad_norm": 0.8745775818824768,
"learning_rate": 1.768366553320299e-05,
"loss": 0.4624,
"step": 856
},
{
"epoch": 61.214285714285715,
"grad_norm": 0.9095162749290466,
"learning_rate": 1.7663805372806775e-05,
"loss": 0.4771,
"step": 857
},
{
"epoch": 61.285714285714285,
"grad_norm": 1.3033194541931152,
"learning_rate": 1.764392948256386e-05,
"loss": 0.4813,
"step": 858
},
{
"epoch": 61.357142857142854,
"grad_norm": 1.0416628122329712,
"learning_rate": 1.762403792301966e-05,
"loss": 0.4631,
"step": 859
},
{
"epoch": 61.42857142857143,
"grad_norm": 1.0443403720855713,
"learning_rate": 1.760413075476731e-05,
"loss": 0.4704,
"step": 860
},
{
"epoch": 61.5,
"grad_norm": 1.1741394996643066,
"learning_rate": 1.7584208038447505e-05,
"loss": 0.4949,
"step": 861
},
{
"epoch": 61.57142857142857,
"grad_norm": 1.1339579820632935,
"learning_rate": 1.7564269834748285e-05,
"loss": 0.4983,
"step": 862
},
{
"epoch": 61.642857142857146,
"grad_norm": 1.135117530822754,
"learning_rate": 1.7544316204404877e-05,
"loss": 0.4624,
"step": 863
},
{
"epoch": 61.714285714285715,
"grad_norm": 1.2317984104156494,
"learning_rate": 1.7524347208199505e-05,
"loss": 0.4668,
"step": 864
},
{
"epoch": 61.785714285714285,
"grad_norm": 0.9803894758224487,
"learning_rate": 1.7504362906961193e-05,
"loss": 0.4586,
"step": 865
},
{
"epoch": 61.857142857142854,
"grad_norm": 1.0460914373397827,
"learning_rate": 1.748436336156558e-05,
"loss": 0.4559,
"step": 866
},
{
"epoch": 61.92857142857143,
"grad_norm": 1.1371439695358276,
"learning_rate": 1.7464348632934758e-05,
"loss": 0.4468,
"step": 867
},
{
"epoch": 62.0,
"grad_norm": 0.9589784145355225,
"learning_rate": 1.7444318782037064e-05,
"loss": 0.4567,
"step": 868
},
{
"epoch": 62.07142857142857,
"grad_norm": 1.1414674520492554,
"learning_rate": 1.742427386988689e-05,
"loss": 0.4797,
"step": 869
},
{
"epoch": 62.142857142857146,
"grad_norm": 1.1334103345870972,
"learning_rate": 1.740421395754452e-05,
"loss": 0.4909,
"step": 870
},
{
"epoch": 62.214285714285715,
"grad_norm": 1.0260701179504395,
"learning_rate": 1.738413910611592e-05,
"loss": 0.4685,
"step": 871
},
{
"epoch": 62.285714285714285,
"grad_norm": 0.9620410799980164,
"learning_rate": 1.7364049376752577e-05,
"loss": 0.4771,
"step": 872
},
{
"epoch": 62.357142857142854,
"grad_norm": 1.180635690689087,
"learning_rate": 1.7343944830651288e-05,
"loss": 0.4832,
"step": 873
},
{
"epoch": 62.42857142857143,
"grad_norm": 0.9632714986801147,
"learning_rate": 1.7323825529053984e-05,
"loss": 0.4715,
"step": 874
},
{
"epoch": 62.5,
"grad_norm": 1.1919854879379272,
"learning_rate": 1.7303691533247546e-05,
"loss": 0.5109,
"step": 875
},
{
"epoch": 62.57142857142857,
"grad_norm": 1.4476512670516968,
"learning_rate": 1.7283542904563625e-05,
"loss": 0.4865,
"step": 876
},
{
"epoch": 62.642857142857146,
"grad_norm": 1.503262996673584,
"learning_rate": 1.726337970437843e-05,
"loss": 0.4798,
"step": 877
},
{
"epoch": 62.714285714285715,
"grad_norm": 0.9983755350112915,
"learning_rate": 1.7243201994112565e-05,
"loss": 0.4626,
"step": 878
},
{
"epoch": 62.785714285714285,
"grad_norm": 1.2122546434402466,
"learning_rate": 1.722300983523084e-05,
"loss": 0.4883,
"step": 879
},
{
"epoch": 62.857142857142854,
"grad_norm": 1.2778229713439941,
"learning_rate": 1.720280328924207e-05,
"loss": 0.4746,
"step": 880
},
{
"epoch": 62.92857142857143,
"grad_norm": 1.441675066947937,
"learning_rate": 1.7182582417698903e-05,
"loss": 0.4674,
"step": 881
},
{
"epoch": 63.0,
"grad_norm": 1.1600130796432495,
"learning_rate": 1.7162347282197614e-05,
"loss": 0.4649,
"step": 882
},
{
"epoch": 63.07142857142857,
"grad_norm": 1.3658174276351929,
"learning_rate": 1.714209794437794e-05,
"loss": 0.4961,
"step": 883
},
{
"epoch": 63.142857142857146,
"grad_norm": 1.085675597190857,
"learning_rate": 1.712183446592287e-05,
"loss": 0.4956,
"step": 884
},
{
"epoch": 63.214285714285715,
"grad_norm": 1.0059946775436401,
"learning_rate": 1.7101556908558478e-05,
"loss": 0.4847,
"step": 885
},
{
"epoch": 63.285714285714285,
"grad_norm": 1.1383388042449951,
"learning_rate": 1.7081265334053717e-05,
"loss": 0.4735,
"step": 886
},
{
"epoch": 63.357142857142854,
"grad_norm": 1.0623657703399658,
"learning_rate": 1.7060959804220246e-05,
"loss": 0.4893,
"step": 887
},
{
"epoch": 63.42857142857143,
"grad_norm": 1.3424171209335327,
"learning_rate": 1.704064038091223e-05,
"loss": 0.4881,
"step": 888
},
{
"epoch": 63.5,
"grad_norm": 1.3154292106628418,
"learning_rate": 1.7020307126026152e-05,
"loss": 0.4811,
"step": 889
},
{
"epoch": 63.57142857142857,
"grad_norm": 1.2570388317108154,
"learning_rate": 1.6999960101500643e-05,
"loss": 0.477,
"step": 890
},
{
"epoch": 63.642857142857146,
"grad_norm": 1.4699902534484863,
"learning_rate": 1.697959936931625e-05,
"loss": 0.4847,
"step": 891
},
{
"epoch": 63.714285714285715,
"grad_norm": 1.0701394081115723,
"learning_rate": 1.6959224991495315e-05,
"loss": 0.5011,
"step": 892
},
{
"epoch": 63.785714285714285,
"grad_norm": 1.0866906642913818,
"learning_rate": 1.6938837030101714e-05,
"loss": 0.4822,
"step": 893
},
{
"epoch": 63.857142857142854,
"grad_norm": 0.9790834188461304,
"learning_rate": 1.6918435547240716e-05,
"loss": 0.4909,
"step": 894
},
{
"epoch": 63.92857142857143,
"grad_norm": 1.3998795747756958,
"learning_rate": 1.6898020605058773e-05,
"loss": 0.5037,
"step": 895
},
{
"epoch": 64.0,
"grad_norm": 1.5495517253875732,
"learning_rate": 1.6877592265743344e-05,
"loss": 0.4845,
"step": 896
},
{
"epoch": 64.07142857142857,
"grad_norm": 1.5856549739837646,
"learning_rate": 1.6857150591522692e-05,
"loss": 0.4949,
"step": 897
},
{
"epoch": 64.14285714285714,
"grad_norm": 1.0695977210998535,
"learning_rate": 1.6836695644665698e-05,
"loss": 0.476,
"step": 898
},
{
"epoch": 64.21428571428571,
"grad_norm": 1.0463613271713257,
"learning_rate": 1.6816227487481685e-05,
"loss": 0.4988,
"step": 899
},
{
"epoch": 64.28571428571429,
"grad_norm": 1.0080933570861816,
"learning_rate": 1.6795746182320205e-05,
"loss": 0.4693,
"step": 900
},
{
"epoch": 64.35714285714286,
"grad_norm": 1.0522123575210571,
"learning_rate": 1.677525179157086e-05,
"loss": 0.4724,
"step": 901
},
{
"epoch": 64.42857142857143,
"grad_norm": 1.22300124168396,
"learning_rate": 1.675474437766313e-05,
"loss": 0.4656,
"step": 902
},
{
"epoch": 64.5,
"grad_norm": 2.2347230911254883,
"learning_rate": 1.6734224003066146e-05,
"loss": 0.4796,
"step": 903
},
{
"epoch": 64.57142857142857,
"grad_norm": 1.5236304998397827,
"learning_rate": 1.671369073028853e-05,
"loss": 0.4549,
"step": 904
},
{
"epoch": 64.64285714285714,
"grad_norm": 1.7851380109786987,
"learning_rate": 1.669314462187819e-05,
"loss": 0.4712,
"step": 905
},
{
"epoch": 64.71428571428571,
"grad_norm": 1.1473972797393799,
"learning_rate": 1.6672585740422137e-05,
"loss": 0.4773,
"step": 906
},
{
"epoch": 64.78571428571429,
"grad_norm": 1.01731276512146,
"learning_rate": 1.665201414854629e-05,
"loss": 0.4669,
"step": 907
},
{
"epoch": 64.85714285714286,
"grad_norm": 1.6853854656219482,
"learning_rate": 1.6631429908915288e-05,
"loss": 0.4796,
"step": 908
},
{
"epoch": 64.92857142857143,
"grad_norm": 1.1149555444717407,
"learning_rate": 1.661083308423229e-05,
"loss": 0.4547,
"step": 909
},
{
"epoch": 65.0,
"grad_norm": 1.5415064096450806,
"learning_rate": 1.65902237372388e-05,
"loss": 0.4549,
"step": 910
},
{
"epoch": 65.07142857142857,
"grad_norm": 1.0730501413345337,
"learning_rate": 1.656960193071446e-05,
"loss": 0.474,
"step": 911
},
{
"epoch": 65.14285714285714,
"grad_norm": 1.0335873365402222,
"learning_rate": 1.654896772747687e-05,
"loss": 0.4571,
"step": 912
},
{
"epoch": 65.21428571428571,
"grad_norm": 1.1057350635528564,
"learning_rate": 1.652832119038139e-05,
"loss": 0.4698,
"step": 913
},
{
"epoch": 65.28571428571429,
"grad_norm": 0.9643803834915161,
"learning_rate": 1.6507662382320956e-05,
"loss": 0.4478,
"step": 914
},
{
"epoch": 65.35714285714286,
"grad_norm": 1.1186069250106812,
"learning_rate": 1.6486991366225878e-05,
"loss": 0.4534,
"step": 915
},
{
"epoch": 65.42857142857143,
"grad_norm": 1.0552515983581543,
"learning_rate": 1.6466308205063655e-05,
"loss": 0.4544,
"step": 916
},
{
"epoch": 65.5,
"grad_norm": 1.2044850587844849,
"learning_rate": 1.6445612961838783e-05,
"loss": 0.4767,
"step": 917
},
{
"epoch": 65.57142857142857,
"grad_norm": 1.4131428003311157,
"learning_rate": 1.6424905699592564e-05,
"loss": 0.4731,
"step": 918
},
{
"epoch": 65.64285714285714,
"grad_norm": 1.7141954898834229,
"learning_rate": 1.6404186481402908e-05,
"loss": 0.5097,
"step": 919
},
{
"epoch": 65.71428571428571,
"grad_norm": 1.2740991115570068,
"learning_rate": 1.638345537038415e-05,
"loss": 0.4901,
"step": 920
},
{
"epoch": 65.78571428571429,
"grad_norm": 1.2890080213546753,
"learning_rate": 1.6362712429686846e-05,
"loss": 0.4943,
"step": 921
},
{
"epoch": 65.85714285714286,
"grad_norm": 1.3176370859146118,
"learning_rate": 1.6341957722497592e-05,
"loss": 0.4854,
"step": 922
},
{
"epoch": 65.92857142857143,
"grad_norm": 1.7787507772445679,
"learning_rate": 1.632119131203882e-05,
"loss": 0.4744,
"step": 923
},
{
"epoch": 66.0,
"grad_norm": 1.4425883293151855,
"learning_rate": 1.630041326156863e-05,
"loss": 0.4727,
"step": 924
},
{
"epoch": 66.07142857142857,
"grad_norm": 1.1869782209396362,
"learning_rate": 1.6279623634380557e-05,
"loss": 0.4823,
"step": 925
},
{
"epoch": 66.14285714285714,
"grad_norm": 1.7626454830169678,
"learning_rate": 1.6258822493803414e-05,
"loss": 0.4759,
"step": 926
},
{
"epoch": 66.21428571428571,
"grad_norm": 1.1252734661102295,
"learning_rate": 1.6238009903201085e-05,
"loss": 0.4907,
"step": 927
},
{
"epoch": 66.28571428571429,
"grad_norm": 1.1061081886291504,
"learning_rate": 1.6217185925972325e-05,
"loss": 0.4691,
"step": 928
},
{
"epoch": 66.35714285714286,
"grad_norm": 1.4413056373596191,
"learning_rate": 1.6196350625550585e-05,
"loss": 0.4734,
"step": 929
},
{
"epoch": 66.42857142857143,
"grad_norm": 1.218800663948059,
"learning_rate": 1.61755040654038e-05,
"loss": 0.4617,
"step": 930
},
{
"epoch": 66.5,
"grad_norm": 1.0052813291549683,
"learning_rate": 1.615464630903421e-05,
"loss": 0.4667,
"step": 931
},
{
"epoch": 66.57142857142857,
"grad_norm": 1.0919910669326782,
"learning_rate": 1.6133777419978158e-05,
"loss": 0.4702,
"step": 932
},
{
"epoch": 66.64285714285714,
"grad_norm": 1.2588508129119873,
"learning_rate": 1.6112897461805895e-05,
"loss": 0.4569,
"step": 933
},
{
"epoch": 66.71428571428571,
"grad_norm": 1.2223925590515137,
"learning_rate": 1.6092006498121405e-05,
"loss": 0.4507,
"step": 934
},
{
"epoch": 66.78571428571429,
"grad_norm": 1.0514506101608276,
"learning_rate": 1.607110459256217e-05,
"loss": 0.4414,
"step": 935
},
{
"epoch": 66.85714285714286,
"grad_norm": 1.1408120393753052,
"learning_rate": 1.6050191808799035e-05,
"loss": 0.4443,
"step": 936
},
{
"epoch": 66.92857142857143,
"grad_norm": 1.3204582929611206,
"learning_rate": 1.602926821053596e-05,
"loss": 0.4466,
"step": 937
},
{
"epoch": 67.0,
"grad_norm": 1.1937634944915771,
"learning_rate": 1.600833386150985e-05,
"loss": 0.4329,
"step": 938
},
{
"epoch": 67.07142857142857,
"grad_norm": 1.4955601692199707,
"learning_rate": 1.5987388825490368e-05,
"loss": 0.4449,
"step": 939
},
{
"epoch": 67.14285714285714,
"grad_norm": 1.0294854640960693,
"learning_rate": 1.5966433166279727e-05,
"loss": 0.4618,
"step": 940
},
{
"epoch": 67.21428571428571,
"grad_norm": 1.460986614227295,
"learning_rate": 1.5945466947712488e-05,
"loss": 0.4628,
"step": 941
},
{
"epoch": 67.28571428571429,
"grad_norm": 1.2559874057769775,
"learning_rate": 1.592449023365541e-05,
"loss": 0.4433,
"step": 942
},
{
"epoch": 67.35714285714286,
"grad_norm": 1.1981381177902222,
"learning_rate": 1.590350308800718e-05,
"loss": 0.4525,
"step": 943
},
{
"epoch": 67.42857142857143,
"grad_norm": 0.8590506911277771,
"learning_rate": 1.5882505574698295e-05,
"loss": 0.4228,
"step": 944
},
{
"epoch": 67.5,
"grad_norm": 1.2536005973815918,
"learning_rate": 1.586149775769082e-05,
"loss": 0.4743,
"step": 945
},
{
"epoch": 67.57142857142857,
"grad_norm": 1.1335843801498413,
"learning_rate": 1.584047970097821e-05,
"loss": 0.4666,
"step": 946
},
{
"epoch": 67.64285714285714,
"grad_norm": 1.0234830379486084,
"learning_rate": 1.581945146858511e-05,
"loss": 0.4706,
"step": 947
},
{
"epoch": 67.71428571428571,
"grad_norm": 1.2412245273590088,
"learning_rate": 1.5798413124567162e-05,
"loss": 0.4666,
"step": 948
},
{
"epoch": 67.78571428571429,
"grad_norm": 1.1734881401062012,
"learning_rate": 1.577736473301081e-05,
"loss": 0.4663,
"step": 949
},
{
"epoch": 67.85714285714286,
"grad_norm": 1.1366450786590576,
"learning_rate": 1.575630635803311e-05,
"loss": 0.4588,
"step": 950
},
{
"epoch": 67.92857142857143,
"grad_norm": 1.072285771369934,
"learning_rate": 1.573523806378151e-05,
"loss": 0.4689,
"step": 951
},
{
"epoch": 68.0,
"grad_norm": 1.3719762563705444,
"learning_rate": 1.5714159914433704e-05,
"loss": 0.4568,
"step": 952
},
{
"epoch": 68.07142857142857,
"grad_norm": 0.9671856164932251,
"learning_rate": 1.569307197419738e-05,
"loss": 0.4306,
"step": 953
},
{
"epoch": 68.14285714285714,
"grad_norm": 1.438032865524292,
"learning_rate": 1.5671974307310073e-05,
"loss": 0.4331,
"step": 954
},
{
"epoch": 68.21428571428571,
"grad_norm": 1.164744257926941,
"learning_rate": 1.5650866978038925e-05,
"loss": 0.4472,
"step": 955
},
{
"epoch": 68.28571428571429,
"grad_norm": 1.0590718984603882,
"learning_rate": 1.562975005068052e-05,
"loss": 0.4374,
"step": 956
},
{
"epoch": 68.35714285714286,
"grad_norm": 1.3910428285598755,
"learning_rate": 1.560862358956069e-05,
"loss": 0.4387,
"step": 957
},
{
"epoch": 68.42857142857143,
"grad_norm": 1.3515472412109375,
"learning_rate": 1.558748765903429e-05,
"loss": 0.4335,
"step": 958
},
{
"epoch": 68.5,
"grad_norm": 1.5539228916168213,
"learning_rate": 1.5566342323485032e-05,
"loss": 0.4751,
"step": 959
},
{
"epoch": 68.57142857142857,
"grad_norm": 1.2260050773620605,
"learning_rate": 1.554518764732528e-05,
"loss": 0.4685,
"step": 960
},
{
"epoch": 68.64285714285714,
"grad_norm": 1.1879860162734985,
"learning_rate": 1.5524023694995844e-05,
"loss": 0.4156,
"step": 961
},
{
"epoch": 68.71428571428571,
"grad_norm": 1.4440523386001587,
"learning_rate": 1.5502850530965798e-05,
"loss": 0.4143,
"step": 962
},
{
"epoch": 68.78571428571429,
"grad_norm": 1.9903374910354614,
"learning_rate": 1.5481668219732263e-05,
"loss": 0.4207,
"step": 963
},
{
"epoch": 68.85714285714286,
"grad_norm": 1.6261475086212158,
"learning_rate": 1.5460476825820237e-05,
"loss": 0.4173,
"step": 964
},
{
"epoch": 68.92857142857143,
"grad_norm": 0.9548959732055664,
"learning_rate": 1.5439276413782375e-05,
"loss": 0.4068,
"step": 965
},
{
"epoch": 69.0,
"grad_norm": 1.1789215803146362,
"learning_rate": 1.541806704819882e-05,
"loss": 0.4015,
"step": 966
},
{
"epoch": 69.07142857142857,
"grad_norm": 1.8189136981964111,
"learning_rate": 1.5396848793676966e-05,
"loss": 0.4882,
"step": 967
},
{
"epoch": 69.14285714285714,
"grad_norm": 1.3893080949783325,
"learning_rate": 1.5375621714851308e-05,
"loss": 0.4985,
"step": 968
},
{
"epoch": 69.21428571428571,
"grad_norm": 1.2738388776779175,
"learning_rate": 1.5354385876383197e-05,
"loss": 0.4848,
"step": 969
},
{
"epoch": 69.28571428571429,
"grad_norm": 1.2682971954345703,
"learning_rate": 1.533314134296069e-05,
"loss": 0.491,
"step": 970
},
{
"epoch": 69.35714285714286,
"grad_norm": 1.3060152530670166,
"learning_rate": 1.5311888179298313e-05,
"loss": 0.4985,
"step": 971
},
{
"epoch": 69.42857142857143,
"grad_norm": 1.422782063484192,
"learning_rate": 1.5290626450136893e-05,
"loss": 0.4872,
"step": 972
},
{
"epoch": 69.5,
"grad_norm": 1.2269575595855713,
"learning_rate": 1.5269356220243345e-05,
"loss": 0.4855,
"step": 973
},
{
"epoch": 69.57142857142857,
"grad_norm": 1.1599860191345215,
"learning_rate": 1.524807755441047e-05,
"loss": 0.4692,
"step": 974
},
{
"epoch": 69.64285714285714,
"grad_norm": 1.1501458883285522,
"learning_rate": 1.5226790517456785e-05,
"loss": 0.4775,
"step": 975
},
{
"epoch": 69.71428571428571,
"grad_norm": 1.0019612312316895,
"learning_rate": 1.5205495174226287e-05,
"loss": 0.4817,
"step": 976
},
{
"epoch": 69.78571428571429,
"grad_norm": 0.926329493522644,
"learning_rate": 1.5184191589588292e-05,
"loss": 0.4833,
"step": 977
},
{
"epoch": 69.85714285714286,
"grad_norm": 0.9601024985313416,
"learning_rate": 1.5162879828437209e-05,
"loss": 0.4718,
"step": 978
},
{
"epoch": 69.92857142857143,
"grad_norm": 0.9741531610488892,
"learning_rate": 1.514155995569236e-05,
"loss": 0.4637,
"step": 979
},
{
"epoch": 70.0,
"grad_norm": 0.9561474919319153,
"learning_rate": 1.5120232036297775e-05,
"loss": 0.4689,
"step": 980
},
{
"epoch": 70.07142857142857,
"grad_norm": 1.6145578622817993,
"learning_rate": 1.5098896135221991e-05,
"loss": 0.4802,
"step": 981
},
{
"epoch": 70.14285714285714,
"grad_norm": 1.30496084690094,
"learning_rate": 1.5077552317457872e-05,
"loss": 0.4809,
"step": 982
},
{
"epoch": 70.21428571428571,
"grad_norm": 0.9824367761611938,
"learning_rate": 1.5056200648022378e-05,
"loss": 0.4683,
"step": 983
},
{
"epoch": 70.28571428571429,
"grad_norm": 1.4853835105895996,
"learning_rate": 1.5034841191956407e-05,
"loss": 0.4782,
"step": 984
},
{
"epoch": 70.35714285714286,
"grad_norm": 1.475623607635498,
"learning_rate": 1.501347401432456e-05,
"loss": 0.4645,
"step": 985
},
{
"epoch": 70.42857142857143,
"grad_norm": 1.3770936727523804,
"learning_rate": 1.4992099180214966e-05,
"loss": 0.4609,
"step": 986
},
{
"epoch": 70.5,
"grad_norm": 1.4309428930282593,
"learning_rate": 1.4970716754739078e-05,
"loss": 0.4733,
"step": 987
},
{
"epoch": 70.57142857142857,
"grad_norm": 1.2341463565826416,
"learning_rate": 1.4949326803031472e-05,
"loss": 0.484,
"step": 988
},
{
"epoch": 70.64285714285714,
"grad_norm": 1.704871416091919,
"learning_rate": 1.492792939024965e-05,
"loss": 0.5028,
"step": 989
},
{
"epoch": 70.71428571428571,
"grad_norm": 1.9965828657150269,
"learning_rate": 1.4906524581573844e-05,
"loss": 0.5179,
"step": 990
},
{
"epoch": 70.78571428571429,
"grad_norm": 1.4135334491729736,
"learning_rate": 1.4885112442206812e-05,
"loss": 0.5087,
"step": 991
},
{
"epoch": 70.85714285714286,
"grad_norm": 1.0720967054367065,
"learning_rate": 1.486369303737364e-05,
"loss": 0.4871,
"step": 992
},
{
"epoch": 70.92857142857143,
"grad_norm": 1.3676438331604004,
"learning_rate": 1.4842266432321561e-05,
"loss": 0.4887,
"step": 993
},
{
"epoch": 71.0,
"grad_norm": 1.7217768430709839,
"learning_rate": 1.482083269231972e-05,
"loss": 0.495,
"step": 994
},
{
"epoch": 71.07142857142857,
"grad_norm": 1.1710909605026245,
"learning_rate": 1.4799391882659004e-05,
"loss": 0.4983,
"step": 995
},
{
"epoch": 71.14285714285714,
"grad_norm": 1.051698923110962,
"learning_rate": 1.4777944068651848e-05,
"loss": 0.486,
"step": 996
},
{
"epoch": 71.21428571428571,
"grad_norm": 1.3500066995620728,
"learning_rate": 1.4756489315632003e-05,
"loss": 0.4805,
"step": 997
},
{
"epoch": 71.28571428571429,
"grad_norm": 1.3487000465393066,
"learning_rate": 1.4735027688954369e-05,
"loss": 0.4835,
"step": 998
},
{
"epoch": 71.35714285714286,
"grad_norm": 1.2461570501327515,
"learning_rate": 1.4713559253994793e-05,
"loss": 0.4854,
"step": 999
},
{
"epoch": 71.42857142857143,
"grad_norm": 1.0939944982528687,
"learning_rate": 1.469208407614984e-05,
"loss": 0.4725,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 2000,
"num_input_tokens_seen": 0,
"num_train_epochs": 143,
"save_steps": 200,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": false,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}