test_ViT-Masked_2 / trainer_state.json
ppak10's picture
test_ViT-Masked_2
4f753b5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 518,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0019305019305019305,
"grad_norm": 9.087823867797852,
"learning_rate": 9.980694980694981e-06,
"loss": 763.5779,
"step": 1
},
{
"epoch": 0.003861003861003861,
"grad_norm": 8.02014446258545,
"learning_rate": 9.961389961389962e-06,
"loss": 742.0983,
"step": 2
},
{
"epoch": 0.005791505791505791,
"grad_norm": 6.846795558929443,
"learning_rate": 9.942084942084944e-06,
"loss": 754.556,
"step": 3
},
{
"epoch": 0.007722007722007722,
"grad_norm": 5.642313003540039,
"learning_rate": 9.922779922779924e-06,
"loss": 760.8958,
"step": 4
},
{
"epoch": 0.009652509652509652,
"grad_norm": 4.954174995422363,
"learning_rate": 9.903474903474905e-06,
"loss": 773.4886,
"step": 5
},
{
"epoch": 0.011583011583011582,
"grad_norm": 4.209826946258545,
"learning_rate": 9.884169884169885e-06,
"loss": 752.0172,
"step": 6
},
{
"epoch": 0.013513513513513514,
"grad_norm": 3.8238916397094727,
"learning_rate": 9.864864864864865e-06,
"loss": 765.1586,
"step": 7
},
{
"epoch": 0.015444015444015444,
"grad_norm": 3.472975730895996,
"learning_rate": 9.845559845559847e-06,
"loss": 758.0544,
"step": 8
},
{
"epoch": 0.017374517374517374,
"grad_norm": 3.3365626335144043,
"learning_rate": 9.826254826254828e-06,
"loss": 760.6899,
"step": 9
},
{
"epoch": 0.019305019305019305,
"grad_norm": 3.205853223800659,
"learning_rate": 9.806949806949808e-06,
"loss": 763.1088,
"step": 10
},
{
"epoch": 0.021235521235521235,
"grad_norm": 2.9912545680999756,
"learning_rate": 9.787644787644789e-06,
"loss": 751.3445,
"step": 11
},
{
"epoch": 0.023166023166023165,
"grad_norm": 2.9894907474517822,
"learning_rate": 9.768339768339769e-06,
"loss": 762.5296,
"step": 12
},
{
"epoch": 0.025096525096525095,
"grad_norm": 2.973778009414673,
"learning_rate": 9.749034749034751e-06,
"loss": 766.5692,
"step": 13
},
{
"epoch": 0.02702702702702703,
"grad_norm": 2.866206645965576,
"learning_rate": 9.729729729729732e-06,
"loss": 759.659,
"step": 14
},
{
"epoch": 0.02895752895752896,
"grad_norm": 2.7454729080200195,
"learning_rate": 9.71042471042471e-06,
"loss": 749.6011,
"step": 15
},
{
"epoch": 0.03088803088803089,
"grad_norm": 2.7870497703552246,
"learning_rate": 9.69111969111969e-06,
"loss": 757.2128,
"step": 16
},
{
"epoch": 0.032818532818532815,
"grad_norm": 2.727757453918457,
"learning_rate": 9.671814671814673e-06,
"loss": 752.8752,
"step": 17
},
{
"epoch": 0.03474903474903475,
"grad_norm": 2.761842966079712,
"learning_rate": 9.652509652509653e-06,
"loss": 763.5405,
"step": 18
},
{
"epoch": 0.03667953667953668,
"grad_norm": 2.64555025100708,
"learning_rate": 9.633204633204634e-06,
"loss": 752.1604,
"step": 19
},
{
"epoch": 0.03861003861003861,
"grad_norm": 2.604011297225952,
"learning_rate": 9.613899613899614e-06,
"loss": 745.1989,
"step": 20
},
{
"epoch": 0.04054054054054054,
"grad_norm": 2.5989909172058105,
"learning_rate": 9.594594594594594e-06,
"loss": 744.575,
"step": 21
},
{
"epoch": 0.04247104247104247,
"grad_norm": 2.646167278289795,
"learning_rate": 9.575289575289576e-06,
"loss": 753.7839,
"step": 22
},
{
"epoch": 0.0444015444015444,
"grad_norm": 2.6674346923828125,
"learning_rate": 9.555984555984557e-06,
"loss": 764.2125,
"step": 23
},
{
"epoch": 0.04633204633204633,
"grad_norm": 2.6234052181243896,
"learning_rate": 9.536679536679537e-06,
"loss": 753.5481,
"step": 24
},
{
"epoch": 0.04826254826254826,
"grad_norm": 2.550206422805786,
"learning_rate": 9.517374517374518e-06,
"loss": 744.3314,
"step": 25
},
{
"epoch": 0.05019305019305019,
"grad_norm": 2.634002208709717,
"learning_rate": 9.498069498069498e-06,
"loss": 756.7723,
"step": 26
},
{
"epoch": 0.052123552123552123,
"grad_norm": 2.632336378097534,
"learning_rate": 9.47876447876448e-06,
"loss": 756.9872,
"step": 27
},
{
"epoch": 0.05405405405405406,
"grad_norm": 2.628897190093994,
"learning_rate": 9.45945945945946e-06,
"loss": 756.0316,
"step": 28
},
{
"epoch": 0.055984555984555984,
"grad_norm": 2.6032018661499023,
"learning_rate": 9.440154440154441e-06,
"loss": 754.0662,
"step": 29
},
{
"epoch": 0.05791505791505792,
"grad_norm": 2.5998055934906006,
"learning_rate": 9.420849420849421e-06,
"loss": 749.3468,
"step": 30
},
{
"epoch": 0.059845559845559844,
"grad_norm": 2.623134136199951,
"learning_rate": 9.401544401544402e-06,
"loss": 753.1194,
"step": 31
},
{
"epoch": 0.06177606177606178,
"grad_norm": 2.636357069015503,
"learning_rate": 9.382239382239384e-06,
"loss": 755.1291,
"step": 32
},
{
"epoch": 0.0637065637065637,
"grad_norm": 2.709622621536255,
"learning_rate": 9.362934362934364e-06,
"loss": 774.9333,
"step": 33
},
{
"epoch": 0.06563706563706563,
"grad_norm": 2.6019175052642822,
"learning_rate": 9.343629343629345e-06,
"loss": 758.8455,
"step": 34
},
{
"epoch": 0.06756756756756757,
"grad_norm": 2.620014190673828,
"learning_rate": 9.324324324324325e-06,
"loss": 751.6978,
"step": 35
},
{
"epoch": 0.0694980694980695,
"grad_norm": 2.6363117694854736,
"learning_rate": 9.305019305019305e-06,
"loss": 755.4987,
"step": 36
},
{
"epoch": 0.07142857142857142,
"grad_norm": 2.697289228439331,
"learning_rate": 9.285714285714288e-06,
"loss": 769.0797,
"step": 37
},
{
"epoch": 0.07335907335907337,
"grad_norm": 2.684602975845337,
"learning_rate": 9.266409266409268e-06,
"loss": 771.785,
"step": 38
},
{
"epoch": 0.07528957528957529,
"grad_norm": 2.6869068145751953,
"learning_rate": 9.247104247104248e-06,
"loss": 771.0751,
"step": 39
},
{
"epoch": 0.07722007722007722,
"grad_norm": 2.65386962890625,
"learning_rate": 9.227799227799229e-06,
"loss": 766.6141,
"step": 40
},
{
"epoch": 0.07915057915057915,
"grad_norm": 2.5492303371429443,
"learning_rate": 9.20849420849421e-06,
"loss": 741.012,
"step": 41
},
{
"epoch": 0.08108108108108109,
"grad_norm": 2.659393548965454,
"learning_rate": 9.189189189189191e-06,
"loss": 761.9696,
"step": 42
},
{
"epoch": 0.08301158301158301,
"grad_norm": 2.6672518253326416,
"learning_rate": 9.169884169884172e-06,
"loss": 762.8466,
"step": 43
},
{
"epoch": 0.08494208494208494,
"grad_norm": 2.60377836227417,
"learning_rate": 9.15057915057915e-06,
"loss": 758.5039,
"step": 44
},
{
"epoch": 0.08687258687258688,
"grad_norm": 2.622608184814453,
"learning_rate": 9.13127413127413e-06,
"loss": 750.5551,
"step": 45
},
{
"epoch": 0.0888030888030888,
"grad_norm": 2.6671438217163086,
"learning_rate": 9.111969111969113e-06,
"loss": 766.5024,
"step": 46
},
{
"epoch": 0.09073359073359073,
"grad_norm": 2.5831432342529297,
"learning_rate": 9.092664092664093e-06,
"loss": 747.624,
"step": 47
},
{
"epoch": 0.09266409266409266,
"grad_norm": 2.61625075340271,
"learning_rate": 9.073359073359074e-06,
"loss": 759.9783,
"step": 48
},
{
"epoch": 0.0945945945945946,
"grad_norm": 2.6503899097442627,
"learning_rate": 9.054054054054054e-06,
"loss": 766.5831,
"step": 49
},
{
"epoch": 0.09652509652509653,
"grad_norm": 2.713566303253174,
"learning_rate": 9.034749034749034e-06,
"loss": 764.6612,
"step": 50
},
{
"epoch": 0.09845559845559845,
"grad_norm": 2.6607675552368164,
"learning_rate": 9.015444015444017e-06,
"loss": 759.2146,
"step": 51
},
{
"epoch": 0.10038610038610038,
"grad_norm": 2.703293800354004,
"learning_rate": 8.996138996138997e-06,
"loss": 771.3636,
"step": 52
},
{
"epoch": 0.10231660231660232,
"grad_norm": 2.6380341053009033,
"learning_rate": 8.976833976833977e-06,
"loss": 762.6519,
"step": 53
},
{
"epoch": 0.10424710424710425,
"grad_norm": 2.6804656982421875,
"learning_rate": 8.957528957528958e-06,
"loss": 764.4567,
"step": 54
},
{
"epoch": 0.10617760617760617,
"grad_norm": 2.6722824573516846,
"learning_rate": 8.938223938223938e-06,
"loss": 762.7928,
"step": 55
},
{
"epoch": 0.10810810810810811,
"grad_norm": 2.5956473350524902,
"learning_rate": 8.91891891891892e-06,
"loss": 757.1573,
"step": 56
},
{
"epoch": 0.11003861003861004,
"grad_norm": 2.67158579826355,
"learning_rate": 8.8996138996139e-06,
"loss": 762.9989,
"step": 57
},
{
"epoch": 0.11196911196911197,
"grad_norm": 2.6719319820404053,
"learning_rate": 8.880308880308881e-06,
"loss": 765.219,
"step": 58
},
{
"epoch": 0.1138996138996139,
"grad_norm": 2.690995931625366,
"learning_rate": 8.861003861003861e-06,
"loss": 755.7704,
"step": 59
},
{
"epoch": 0.11583011583011583,
"grad_norm": 2.672006845474243,
"learning_rate": 8.841698841698842e-06,
"loss": 755.6411,
"step": 60
},
{
"epoch": 0.11776061776061776,
"grad_norm": 2.6625969409942627,
"learning_rate": 8.822393822393824e-06,
"loss": 755.6705,
"step": 61
},
{
"epoch": 0.11969111969111969,
"grad_norm": 2.6644089221954346,
"learning_rate": 8.803088803088804e-06,
"loss": 755.7343,
"step": 62
},
{
"epoch": 0.12162162162162163,
"grad_norm": 2.6913063526153564,
"learning_rate": 8.783783783783785e-06,
"loss": 760.8112,
"step": 63
},
{
"epoch": 0.12355212355212356,
"grad_norm": 2.640939474105835,
"learning_rate": 8.764478764478765e-06,
"loss": 752.9957,
"step": 64
},
{
"epoch": 0.12548262548262548,
"grad_norm": 2.706070899963379,
"learning_rate": 8.745173745173746e-06,
"loss": 751.4651,
"step": 65
},
{
"epoch": 0.1274131274131274,
"grad_norm": 2.6751809120178223,
"learning_rate": 8.725868725868728e-06,
"loss": 760.4512,
"step": 66
},
{
"epoch": 0.12934362934362933,
"grad_norm": 2.731750965118408,
"learning_rate": 8.706563706563708e-06,
"loss": 766.3732,
"step": 67
},
{
"epoch": 0.13127413127413126,
"grad_norm": 2.688737392425537,
"learning_rate": 8.687258687258689e-06,
"loss": 749.8923,
"step": 68
},
{
"epoch": 0.13320463320463322,
"grad_norm": 2.67354679107666,
"learning_rate": 8.667953667953669e-06,
"loss": 751.1083,
"step": 69
},
{
"epoch": 0.13513513513513514,
"grad_norm": 2.6414129734039307,
"learning_rate": 8.64864864864865e-06,
"loss": 733.0116,
"step": 70
},
{
"epoch": 0.13706563706563707,
"grad_norm": 3.0136189460754395,
"learning_rate": 8.629343629343631e-06,
"loss": 764.5856,
"step": 71
},
{
"epoch": 0.138996138996139,
"grad_norm": 2.7174079418182373,
"learning_rate": 8.61003861003861e-06,
"loss": 753.4861,
"step": 72
},
{
"epoch": 0.14092664092664092,
"grad_norm": 2.714317560195923,
"learning_rate": 8.59073359073359e-06,
"loss": 754.7225,
"step": 73
},
{
"epoch": 0.14285714285714285,
"grad_norm": 2.7697298526763916,
"learning_rate": 8.571428571428571e-06,
"loss": 765.4945,
"step": 74
},
{
"epoch": 0.14478764478764478,
"grad_norm": 2.7908132076263428,
"learning_rate": 8.552123552123553e-06,
"loss": 779.0895,
"step": 75
},
{
"epoch": 0.14671814671814673,
"grad_norm": 2.7896623611450195,
"learning_rate": 8.532818532818533e-06,
"loss": 767.8585,
"step": 76
},
{
"epoch": 0.14864864864864866,
"grad_norm": 2.718611240386963,
"learning_rate": 8.513513513513514e-06,
"loss": 760.3116,
"step": 77
},
{
"epoch": 0.15057915057915058,
"grad_norm": 2.78110671043396,
"learning_rate": 8.494208494208494e-06,
"loss": 761.649,
"step": 78
},
{
"epoch": 0.1525096525096525,
"grad_norm": 2.822859048843384,
"learning_rate": 8.474903474903475e-06,
"loss": 766.2858,
"step": 79
},
{
"epoch": 0.15444015444015444,
"grad_norm": 2.719345808029175,
"learning_rate": 8.455598455598457e-06,
"loss": 755.3741,
"step": 80
},
{
"epoch": 0.15637065637065636,
"grad_norm": 2.710378408432007,
"learning_rate": 8.436293436293437e-06,
"loss": 758.8147,
"step": 81
},
{
"epoch": 0.1583011583011583,
"grad_norm": 2.7957656383514404,
"learning_rate": 8.416988416988418e-06,
"loss": 761.3373,
"step": 82
},
{
"epoch": 0.16023166023166024,
"grad_norm": 2.8298914432525635,
"learning_rate": 8.397683397683398e-06,
"loss": 769.8754,
"step": 83
},
{
"epoch": 0.16216216216216217,
"grad_norm": 2.762465715408325,
"learning_rate": 8.378378378378378e-06,
"loss": 769.7825,
"step": 84
},
{
"epoch": 0.1640926640926641,
"grad_norm": 2.773038625717163,
"learning_rate": 8.35907335907336e-06,
"loss": 749.3358,
"step": 85
},
{
"epoch": 0.16602316602316602,
"grad_norm": 2.7703516483306885,
"learning_rate": 8.33976833976834e-06,
"loss": 757.9563,
"step": 86
},
{
"epoch": 0.16795366795366795,
"grad_norm": 2.930047035217285,
"learning_rate": 8.320463320463321e-06,
"loss": 764.3655,
"step": 87
},
{
"epoch": 0.16988416988416988,
"grad_norm": 2.7261102199554443,
"learning_rate": 8.301158301158302e-06,
"loss": 748.2144,
"step": 88
},
{
"epoch": 0.1718146718146718,
"grad_norm": 2.793009042739868,
"learning_rate": 8.281853281853282e-06,
"loss": 754.8672,
"step": 89
},
{
"epoch": 0.17374517374517376,
"grad_norm": 2.8894758224487305,
"learning_rate": 8.262548262548264e-06,
"loss": 764.7795,
"step": 90
},
{
"epoch": 0.17567567567567569,
"grad_norm": 2.8510689735412598,
"learning_rate": 8.243243243243245e-06,
"loss": 753.4908,
"step": 91
},
{
"epoch": 0.1776061776061776,
"grad_norm": 2.757417917251587,
"learning_rate": 8.223938223938225e-06,
"loss": 760.0729,
"step": 92
},
{
"epoch": 0.17953667953667954,
"grad_norm": 2.8439671993255615,
"learning_rate": 8.204633204633205e-06,
"loss": 771.863,
"step": 93
},
{
"epoch": 0.18146718146718147,
"grad_norm": 2.8313705921173096,
"learning_rate": 8.185328185328186e-06,
"loss": 765.174,
"step": 94
},
{
"epoch": 0.1833976833976834,
"grad_norm": 2.7856626510620117,
"learning_rate": 8.166023166023168e-06,
"loss": 758.1021,
"step": 95
},
{
"epoch": 0.18532818532818532,
"grad_norm": 2.8734123706817627,
"learning_rate": 8.146718146718148e-06,
"loss": 768.5622,
"step": 96
},
{
"epoch": 0.18725868725868725,
"grad_norm": 2.833714723587036,
"learning_rate": 8.127413127413129e-06,
"loss": 753.1698,
"step": 97
},
{
"epoch": 0.1891891891891892,
"grad_norm": 2.8014986515045166,
"learning_rate": 8.108108108108109e-06,
"loss": 752.7419,
"step": 98
},
{
"epoch": 0.19111969111969113,
"grad_norm": 2.7595949172973633,
"learning_rate": 8.08880308880309e-06,
"loss": 755.9596,
"step": 99
},
{
"epoch": 0.19305019305019305,
"grad_norm": 2.7833123207092285,
"learning_rate": 8.06949806949807e-06,
"loss": 765.9607,
"step": 100
},
{
"epoch": 0.19498069498069498,
"grad_norm": 2.777548313140869,
"learning_rate": 8.05019305019305e-06,
"loss": 766.981,
"step": 101
},
{
"epoch": 0.1969111969111969,
"grad_norm": 2.7974212169647217,
"learning_rate": 8.03088803088803e-06,
"loss": 753.3157,
"step": 102
},
{
"epoch": 0.19884169884169883,
"grad_norm": 3.194911003112793,
"learning_rate": 8.011583011583011e-06,
"loss": 773.3227,
"step": 103
},
{
"epoch": 0.20077220077220076,
"grad_norm": 2.7325661182403564,
"learning_rate": 7.992277992277993e-06,
"loss": 746.1143,
"step": 104
},
{
"epoch": 0.20270270270270271,
"grad_norm": 2.740203619003296,
"learning_rate": 7.972972972972974e-06,
"loss": 750.4532,
"step": 105
},
{
"epoch": 0.20463320463320464,
"grad_norm": 2.764615297317505,
"learning_rate": 7.953667953667954e-06,
"loss": 757.8738,
"step": 106
},
{
"epoch": 0.20656370656370657,
"grad_norm": 2.783639430999756,
"learning_rate": 7.934362934362934e-06,
"loss": 759.7949,
"step": 107
},
{
"epoch": 0.2084942084942085,
"grad_norm": 2.933793783187866,
"learning_rate": 7.915057915057915e-06,
"loss": 771.2281,
"step": 108
},
{
"epoch": 0.21042471042471042,
"grad_norm": 2.795241117477417,
"learning_rate": 7.895752895752897e-06,
"loss": 747.9264,
"step": 109
},
{
"epoch": 0.21235521235521235,
"grad_norm": 2.755584239959717,
"learning_rate": 7.876447876447877e-06,
"loss": 746.8754,
"step": 110
},
{
"epoch": 0.21428571428571427,
"grad_norm": 2.802136182785034,
"learning_rate": 7.857142857142858e-06,
"loss": 745.1733,
"step": 111
},
{
"epoch": 0.21621621621621623,
"grad_norm": 3.5105533599853516,
"learning_rate": 7.837837837837838e-06,
"loss": 745.6346,
"step": 112
},
{
"epoch": 0.21814671814671815,
"grad_norm": 2.8462111949920654,
"learning_rate": 7.818532818532818e-06,
"loss": 757.0023,
"step": 113
},
{
"epoch": 0.22007722007722008,
"grad_norm": 3.079543113708496,
"learning_rate": 7.7992277992278e-06,
"loss": 757.2842,
"step": 114
},
{
"epoch": 0.222007722007722,
"grad_norm": 2.8306119441986084,
"learning_rate": 7.779922779922781e-06,
"loss": 752.3796,
"step": 115
},
{
"epoch": 0.22393822393822393,
"grad_norm": 3.1957056522369385,
"learning_rate": 7.760617760617761e-06,
"loss": 753.2894,
"step": 116
},
{
"epoch": 0.22586872586872586,
"grad_norm": 2.90783953666687,
"learning_rate": 7.741312741312742e-06,
"loss": 753.8042,
"step": 117
},
{
"epoch": 0.2277992277992278,
"grad_norm": 2.8529882431030273,
"learning_rate": 7.722007722007722e-06,
"loss": 758.0192,
"step": 118
},
{
"epoch": 0.22972972972972974,
"grad_norm": 2.8292617797851562,
"learning_rate": 7.702702702702704e-06,
"loss": 756.563,
"step": 119
},
{
"epoch": 0.23166023166023167,
"grad_norm": 2.8797755241394043,
"learning_rate": 7.683397683397685e-06,
"loss": 761.1346,
"step": 120
},
{
"epoch": 0.2335907335907336,
"grad_norm": 2.7390382289886475,
"learning_rate": 7.664092664092665e-06,
"loss": 750.3627,
"step": 121
},
{
"epoch": 0.23552123552123552,
"grad_norm": 2.7686545848846436,
"learning_rate": 7.644787644787645e-06,
"loss": 750.1412,
"step": 122
},
{
"epoch": 0.23745173745173745,
"grad_norm": 2.798008680343628,
"learning_rate": 7.625482625482627e-06,
"loss": 742.588,
"step": 123
},
{
"epoch": 0.23938223938223938,
"grad_norm": 4.088344097137451,
"learning_rate": 7.606177606177607e-06,
"loss": 758.9719,
"step": 124
},
{
"epoch": 0.2413127413127413,
"grad_norm": 2.9229252338409424,
"learning_rate": 7.5868725868725875e-06,
"loss": 756.5762,
"step": 125
},
{
"epoch": 0.24324324324324326,
"grad_norm": 3.1966676712036133,
"learning_rate": 7.567567567567569e-06,
"loss": 768.2112,
"step": 126
},
{
"epoch": 0.24517374517374518,
"grad_norm": 3.255308151245117,
"learning_rate": 7.548262548262549e-06,
"loss": 768.8184,
"step": 127
},
{
"epoch": 0.2471042471042471,
"grad_norm": 2.803837537765503,
"learning_rate": 7.5289575289575304e-06,
"loss": 758.1424,
"step": 128
},
{
"epoch": 0.24903474903474904,
"grad_norm": 2.8075811862945557,
"learning_rate": 7.509652509652511e-06,
"loss": 748.3246,
"step": 129
},
{
"epoch": 0.25096525096525096,
"grad_norm": 3.1468892097473145,
"learning_rate": 7.49034749034749e-06,
"loss": 759.9876,
"step": 130
},
{
"epoch": 0.2528957528957529,
"grad_norm": 3.324525833129883,
"learning_rate": 7.471042471042471e-06,
"loss": 756.2051,
"step": 131
},
{
"epoch": 0.2548262548262548,
"grad_norm": 2.8439035415649414,
"learning_rate": 7.451737451737452e-06,
"loss": 755.392,
"step": 132
},
{
"epoch": 0.25675675675675674,
"grad_norm": 2.882779598236084,
"learning_rate": 7.4324324324324324e-06,
"loss": 771.8354,
"step": 133
},
{
"epoch": 0.25868725868725867,
"grad_norm": 3.0841407775878906,
"learning_rate": 7.413127413127414e-06,
"loss": 754.2871,
"step": 134
},
{
"epoch": 0.2606177606177606,
"grad_norm": 3.2702057361602783,
"learning_rate": 7.393822393822394e-06,
"loss": 742.1606,
"step": 135
},
{
"epoch": 0.2625482625482625,
"grad_norm": 2.7956244945526123,
"learning_rate": 7.3745173745173745e-06,
"loss": 750.0869,
"step": 136
},
{
"epoch": 0.2644787644787645,
"grad_norm": 3.004718542098999,
"learning_rate": 7.355212355212356e-06,
"loss": 781.2869,
"step": 137
},
{
"epoch": 0.26640926640926643,
"grad_norm": 3.8438968658447266,
"learning_rate": 7.335907335907336e-06,
"loss": 755.309,
"step": 138
},
{
"epoch": 0.26833976833976836,
"grad_norm": 3.0307114124298096,
"learning_rate": 7.316602316602317e-06,
"loss": 757.5778,
"step": 139
},
{
"epoch": 0.2702702702702703,
"grad_norm": 2.9648211002349854,
"learning_rate": 7.297297297297298e-06,
"loss": 753.4247,
"step": 140
},
{
"epoch": 0.2722007722007722,
"grad_norm": 2.9691784381866455,
"learning_rate": 7.277992277992278e-06,
"loss": 757.2734,
"step": 141
},
{
"epoch": 0.27413127413127414,
"grad_norm": 2.8389272689819336,
"learning_rate": 7.2586872586872595e-06,
"loss": 758.1973,
"step": 142
},
{
"epoch": 0.27606177606177607,
"grad_norm": 2.8457741737365723,
"learning_rate": 7.23938223938224e-06,
"loss": 759.3056,
"step": 143
},
{
"epoch": 0.277992277992278,
"grad_norm": 2.9947755336761475,
"learning_rate": 7.22007722007722e-06,
"loss": 756.6718,
"step": 144
},
{
"epoch": 0.2799227799227799,
"grad_norm": 3.285741090774536,
"learning_rate": 7.2007722007722015e-06,
"loss": 741.2257,
"step": 145
},
{
"epoch": 0.28185328185328185,
"grad_norm": 3.118593692779541,
"learning_rate": 7.181467181467182e-06,
"loss": 757.1205,
"step": 146
},
{
"epoch": 0.28378378378378377,
"grad_norm": 3.1158804893493652,
"learning_rate": 7.162162162162163e-06,
"loss": 747.1119,
"step": 147
},
{
"epoch": 0.2857142857142857,
"grad_norm": 2.9939019680023193,
"learning_rate": 7.1428571428571436e-06,
"loss": 738.4834,
"step": 148
},
{
"epoch": 0.2876447876447876,
"grad_norm": 3.792679786682129,
"learning_rate": 7.123552123552124e-06,
"loss": 758.1683,
"step": 149
},
{
"epoch": 0.28957528957528955,
"grad_norm": 3.3988595008850098,
"learning_rate": 7.104247104247105e-06,
"loss": 757.7793,
"step": 150
},
{
"epoch": 0.2915057915057915,
"grad_norm": 3.4164488315582275,
"learning_rate": 7.084942084942086e-06,
"loss": 753.686,
"step": 151
},
{
"epoch": 0.29343629343629346,
"grad_norm": 4.295523166656494,
"learning_rate": 7.065637065637067e-06,
"loss": 749.4221,
"step": 152
},
{
"epoch": 0.2953667953667954,
"grad_norm": 3.1985435485839844,
"learning_rate": 7.046332046332047e-06,
"loss": 766.7484,
"step": 153
},
{
"epoch": 0.2972972972972973,
"grad_norm": 2.811443567276001,
"learning_rate": 7.027027027027028e-06,
"loss": 744.4705,
"step": 154
},
{
"epoch": 0.29922779922779924,
"grad_norm": 2.8985869884490967,
"learning_rate": 7.007722007722009e-06,
"loss": 771.2823,
"step": 155
},
{
"epoch": 0.30115830115830117,
"grad_norm": 2.913670301437378,
"learning_rate": 6.988416988416989e-06,
"loss": 767.1945,
"step": 156
},
{
"epoch": 0.3030888030888031,
"grad_norm": 3.091149091720581,
"learning_rate": 6.9691119691119706e-06,
"loss": 770.168,
"step": 157
},
{
"epoch": 0.305019305019305,
"grad_norm": 2.89846134185791,
"learning_rate": 6.949806949806951e-06,
"loss": 746.1279,
"step": 158
},
{
"epoch": 0.30694980694980695,
"grad_norm": 2.981823444366455,
"learning_rate": 6.9305019305019305e-06,
"loss": 749.2059,
"step": 159
},
{
"epoch": 0.3088803088803089,
"grad_norm": 3.6476705074310303,
"learning_rate": 6.911196911196911e-06,
"loss": 757.4434,
"step": 160
},
{
"epoch": 0.3108108108108108,
"grad_norm": 2.946174383163452,
"learning_rate": 6.891891891891892e-06,
"loss": 759.8816,
"step": 161
},
{
"epoch": 0.3127413127413127,
"grad_norm": 3.030103921890259,
"learning_rate": 6.872586872586873e-06,
"loss": 757.3468,
"step": 162
},
{
"epoch": 0.31467181467181465,
"grad_norm": 3.3172245025634766,
"learning_rate": 6.853281853281854e-06,
"loss": 753.3054,
"step": 163
},
{
"epoch": 0.3166023166023166,
"grad_norm": 3.128265857696533,
"learning_rate": 6.833976833976834e-06,
"loss": 764.0436,
"step": 164
},
{
"epoch": 0.3185328185328185,
"grad_norm": 2.8332438468933105,
"learning_rate": 6.814671814671815e-06,
"loss": 747.4676,
"step": 165
},
{
"epoch": 0.3204633204633205,
"grad_norm": 2.9192118644714355,
"learning_rate": 6.795366795366796e-06,
"loss": 759.479,
"step": 166
},
{
"epoch": 0.3223938223938224,
"grad_norm": 2.926225423812866,
"learning_rate": 6.776061776061776e-06,
"loss": 749.5502,
"step": 167
},
{
"epoch": 0.32432432432432434,
"grad_norm": 3.739107608795166,
"learning_rate": 6.7567567567567575e-06,
"loss": 761.9246,
"step": 168
},
{
"epoch": 0.32625482625482627,
"grad_norm": 3.1689376831054688,
"learning_rate": 6.737451737451738e-06,
"loss": 759.9313,
"step": 169
},
{
"epoch": 0.3281853281853282,
"grad_norm": 4.102996349334717,
"learning_rate": 6.718146718146718e-06,
"loss": 755.9019,
"step": 170
},
{
"epoch": 0.3301158301158301,
"grad_norm": 4.32927131652832,
"learning_rate": 6.6988416988417e-06,
"loss": 748.2408,
"step": 171
},
{
"epoch": 0.33204633204633205,
"grad_norm": 2.9341273307800293,
"learning_rate": 6.67953667953668e-06,
"loss": 767.1997,
"step": 172
},
{
"epoch": 0.333976833976834,
"grad_norm": 2.9747681617736816,
"learning_rate": 6.66023166023166e-06,
"loss": 749.2424,
"step": 173
},
{
"epoch": 0.3359073359073359,
"grad_norm": 3.03954815864563,
"learning_rate": 6.640926640926642e-06,
"loss": 753.1185,
"step": 174
},
{
"epoch": 0.33783783783783783,
"grad_norm": 2.896865129470825,
"learning_rate": 6.621621621621622e-06,
"loss": 760.0892,
"step": 175
},
{
"epoch": 0.33976833976833976,
"grad_norm": 4.517345905303955,
"learning_rate": 6.602316602316603e-06,
"loss": 761.2517,
"step": 176
},
{
"epoch": 0.3416988416988417,
"grad_norm": 3.4095852375030518,
"learning_rate": 6.583011583011584e-06,
"loss": 760.7,
"step": 177
},
{
"epoch": 0.3436293436293436,
"grad_norm": 3.5481455326080322,
"learning_rate": 6.563706563706564e-06,
"loss": 761.6207,
"step": 178
},
{
"epoch": 0.34555984555984554,
"grad_norm": 4.07427978515625,
"learning_rate": 6.544401544401545e-06,
"loss": 762.6121,
"step": 179
},
{
"epoch": 0.3474903474903475,
"grad_norm": 3.08604097366333,
"learning_rate": 6.525096525096526e-06,
"loss": 743.1675,
"step": 180
},
{
"epoch": 0.34942084942084944,
"grad_norm": 4.791824817657471,
"learning_rate": 6.505791505791507e-06,
"loss": 773.7855,
"step": 181
},
{
"epoch": 0.35135135135135137,
"grad_norm": 5.513062953948975,
"learning_rate": 6.486486486486487e-06,
"loss": 755.7749,
"step": 182
},
{
"epoch": 0.3532818532818533,
"grad_norm": 4.703281879425049,
"learning_rate": 6.467181467181468e-06,
"loss": 762.9774,
"step": 183
},
{
"epoch": 0.3552123552123552,
"grad_norm": 4.766940593719482,
"learning_rate": 6.447876447876449e-06,
"loss": 757.7225,
"step": 184
},
{
"epoch": 0.35714285714285715,
"grad_norm": 2.995068073272705,
"learning_rate": 6.4285714285714295e-06,
"loss": 757.5352,
"step": 185
},
{
"epoch": 0.3590733590733591,
"grad_norm": 4.172158241271973,
"learning_rate": 6.409266409266411e-06,
"loss": 758.1395,
"step": 186
},
{
"epoch": 0.361003861003861,
"grad_norm": 3.057084798812866,
"learning_rate": 6.389961389961391e-06,
"loss": 763.8903,
"step": 187
},
{
"epoch": 0.36293436293436293,
"grad_norm": 3.4208884239196777,
"learning_rate": 6.370656370656371e-06,
"loss": 755.9663,
"step": 188
},
{
"epoch": 0.36486486486486486,
"grad_norm": 3.5881717205047607,
"learning_rate": 6.351351351351351e-06,
"loss": 764.6107,
"step": 189
},
{
"epoch": 0.3667953667953668,
"grad_norm": 4.713850498199463,
"learning_rate": 6.332046332046332e-06,
"loss": 748.8102,
"step": 190
},
{
"epoch": 0.3687258687258687,
"grad_norm": 3.244720935821533,
"learning_rate": 6.312741312741313e-06,
"loss": 764.7598,
"step": 191
},
{
"epoch": 0.37065637065637064,
"grad_norm": 4.447683334350586,
"learning_rate": 6.293436293436294e-06,
"loss": 752.6754,
"step": 192
},
{
"epoch": 0.37258687258687256,
"grad_norm": 3.9772603511810303,
"learning_rate": 6.274131274131274e-06,
"loss": 756.0471,
"step": 193
},
{
"epoch": 0.3745173745173745,
"grad_norm": 4.803740978240967,
"learning_rate": 6.254826254826255e-06,
"loss": 769.636,
"step": 194
},
{
"epoch": 0.3764478764478765,
"grad_norm": 4.907277584075928,
"learning_rate": 6.235521235521236e-06,
"loss": 765.9659,
"step": 195
},
{
"epoch": 0.3783783783783784,
"grad_norm": 3.6414902210235596,
"learning_rate": 6.2162162162162164e-06,
"loss": 760.1011,
"step": 196
},
{
"epoch": 0.3803088803088803,
"grad_norm": 3.6037609577178955,
"learning_rate": 6.196911196911197e-06,
"loss": 750.7067,
"step": 197
},
{
"epoch": 0.38223938223938225,
"grad_norm": 2.8772242069244385,
"learning_rate": 6.177606177606178e-06,
"loss": 756.6414,
"step": 198
},
{
"epoch": 0.3841698841698842,
"grad_norm": 5.2583184242248535,
"learning_rate": 6.1583011583011585e-06,
"loss": 751.6912,
"step": 199
},
{
"epoch": 0.3861003861003861,
"grad_norm": 2.8050296306610107,
"learning_rate": 6.13899613899614e-06,
"loss": 743.5164,
"step": 200
},
{
"epoch": 0.38803088803088803,
"grad_norm": 3.5677103996276855,
"learning_rate": 6.11969111969112e-06,
"loss": 764.8164,
"step": 201
},
{
"epoch": 0.38996138996138996,
"grad_norm": 3.1550533771514893,
"learning_rate": 6.1003861003861005e-06,
"loss": 762.9319,
"step": 202
},
{
"epoch": 0.3918918918918919,
"grad_norm": 2.991600751876831,
"learning_rate": 6.081081081081082e-06,
"loss": 770.1368,
"step": 203
},
{
"epoch": 0.3938223938223938,
"grad_norm": 2.9870667457580566,
"learning_rate": 6.061776061776062e-06,
"loss": 751.1348,
"step": 204
},
{
"epoch": 0.39575289575289574,
"grad_norm": 2.980593204498291,
"learning_rate": 6.0424710424710434e-06,
"loss": 769.3608,
"step": 205
},
{
"epoch": 0.39768339768339767,
"grad_norm": 3.3218741416931152,
"learning_rate": 6.023166023166024e-06,
"loss": 763.5135,
"step": 206
},
{
"epoch": 0.3996138996138996,
"grad_norm": 3.1396753787994385,
"learning_rate": 6.003861003861004e-06,
"loss": 757.1105,
"step": 207
},
{
"epoch": 0.4015444015444015,
"grad_norm": 3.2232918739318848,
"learning_rate": 5.9845559845559855e-06,
"loss": 743.5112,
"step": 208
},
{
"epoch": 0.4034749034749035,
"grad_norm": 3.3327605724334717,
"learning_rate": 5.965250965250966e-06,
"loss": 766.4379,
"step": 209
},
{
"epoch": 0.40540540540540543,
"grad_norm": 3.4697072505950928,
"learning_rate": 5.945945945945947e-06,
"loss": 767.7292,
"step": 210
},
{
"epoch": 0.40733590733590735,
"grad_norm": 4.161364555358887,
"learning_rate": 5.9266409266409275e-06,
"loss": 746.888,
"step": 211
},
{
"epoch": 0.4092664092664093,
"grad_norm": 3.0299108028411865,
"learning_rate": 5.907335907335908e-06,
"loss": 773.0205,
"step": 212
},
{
"epoch": 0.4111969111969112,
"grad_norm": 3.702420949935913,
"learning_rate": 5.888030888030889e-06,
"loss": 762.8275,
"step": 213
},
{
"epoch": 0.41312741312741313,
"grad_norm": 4.044224262237549,
"learning_rate": 5.86872586872587e-06,
"loss": 762.0264,
"step": 214
},
{
"epoch": 0.41505791505791506,
"grad_norm": 3.09975266456604,
"learning_rate": 5.84942084942085e-06,
"loss": 738.4187,
"step": 215
},
{
"epoch": 0.416988416988417,
"grad_norm": 3.779367208480835,
"learning_rate": 5.83011583011583e-06,
"loss": 764.1588,
"step": 216
},
{
"epoch": 0.4189189189189189,
"grad_norm": 3.9344935417175293,
"learning_rate": 5.810810810810811e-06,
"loss": 751.987,
"step": 217
},
{
"epoch": 0.42084942084942084,
"grad_norm": 3.073119878768921,
"learning_rate": 5.791505791505791e-06,
"loss": 763.5579,
"step": 218
},
{
"epoch": 0.42277992277992277,
"grad_norm": 5.221838474273682,
"learning_rate": 5.7722007722007725e-06,
"loss": 736.679,
"step": 219
},
{
"epoch": 0.4247104247104247,
"grad_norm": 3.210583209991455,
"learning_rate": 5.752895752895753e-06,
"loss": 743.9155,
"step": 220
},
{
"epoch": 0.4266409266409266,
"grad_norm": 3.032778739929199,
"learning_rate": 5.733590733590734e-06,
"loss": 756.9208,
"step": 221
},
{
"epoch": 0.42857142857142855,
"grad_norm": 3.052091598510742,
"learning_rate": 5.7142857142857145e-06,
"loss": 753.4189,
"step": 222
},
{
"epoch": 0.4305019305019305,
"grad_norm": 2.934615135192871,
"learning_rate": 5.694980694980695e-06,
"loss": 744.2302,
"step": 223
},
{
"epoch": 0.43243243243243246,
"grad_norm": 3.453640937805176,
"learning_rate": 5.675675675675676e-06,
"loss": 753.5289,
"step": 224
},
{
"epoch": 0.4343629343629344,
"grad_norm": 4.493204116821289,
"learning_rate": 5.6563706563706566e-06,
"loss": 773.2119,
"step": 225
},
{
"epoch": 0.4362934362934363,
"grad_norm": 3.1990344524383545,
"learning_rate": 5.637065637065637e-06,
"loss": 771.2296,
"step": 226
},
{
"epoch": 0.43822393822393824,
"grad_norm": 4.990328788757324,
"learning_rate": 5.617760617760618e-06,
"loss": 766.2954,
"step": 227
},
{
"epoch": 0.44015444015444016,
"grad_norm": 6.218008518218994,
"learning_rate": 5.598455598455599e-06,
"loss": 754.2188,
"step": 228
},
{
"epoch": 0.4420849420849421,
"grad_norm": 4.668118476867676,
"learning_rate": 5.57915057915058e-06,
"loss": 764.908,
"step": 229
},
{
"epoch": 0.444015444015444,
"grad_norm": 3.144949436187744,
"learning_rate": 5.55984555984556e-06,
"loss": 769.907,
"step": 230
},
{
"epoch": 0.44594594594594594,
"grad_norm": 3.761617422103882,
"learning_rate": 5.540540540540541e-06,
"loss": 762.5698,
"step": 231
},
{
"epoch": 0.44787644787644787,
"grad_norm": 3.3352720737457275,
"learning_rate": 5.521235521235522e-06,
"loss": 760.8379,
"step": 232
},
{
"epoch": 0.4498069498069498,
"grad_norm": 3.646754741668701,
"learning_rate": 5.501930501930502e-06,
"loss": 762.5335,
"step": 233
},
{
"epoch": 0.4517374517374517,
"grad_norm": 5.615658283233643,
"learning_rate": 5.4826254826254836e-06,
"loss": 763.7914,
"step": 234
},
{
"epoch": 0.45366795366795365,
"grad_norm": 3.1293540000915527,
"learning_rate": 5.463320463320464e-06,
"loss": 765.2655,
"step": 235
},
{
"epoch": 0.4555984555984556,
"grad_norm": 3.9679007530212402,
"learning_rate": 5.444015444015444e-06,
"loss": 776.4496,
"step": 236
},
{
"epoch": 0.4575289575289575,
"grad_norm": 4.110316753387451,
"learning_rate": 5.424710424710426e-06,
"loss": 769.2201,
"step": 237
},
{
"epoch": 0.4594594594594595,
"grad_norm": 3.5175106525421143,
"learning_rate": 5.405405405405406e-06,
"loss": 759.53,
"step": 238
},
{
"epoch": 0.4613899613899614,
"grad_norm": 3.206360101699829,
"learning_rate": 5.386100386100387e-06,
"loss": 753.7003,
"step": 239
},
{
"epoch": 0.46332046332046334,
"grad_norm": 2.994739294052124,
"learning_rate": 5.366795366795368e-06,
"loss": 750.8613,
"step": 240
},
{
"epoch": 0.46525096525096526,
"grad_norm": 2.966420888900757,
"learning_rate": 5.347490347490348e-06,
"loss": 755.6821,
"step": 241
},
{
"epoch": 0.4671814671814672,
"grad_norm": 5.035081386566162,
"learning_rate": 5.328185328185329e-06,
"loss": 753.4547,
"step": 242
},
{
"epoch": 0.4691119691119691,
"grad_norm": 3.399747371673584,
"learning_rate": 5.30888030888031e-06,
"loss": 752.4637,
"step": 243
},
{
"epoch": 0.47104247104247104,
"grad_norm": 2.9818832874298096,
"learning_rate": 5.28957528957529e-06,
"loss": 761.8242,
"step": 244
},
{
"epoch": 0.47297297297297297,
"grad_norm": 3.317462921142578,
"learning_rate": 5.2702702702702705e-06,
"loss": 741.8611,
"step": 245
},
{
"epoch": 0.4749034749034749,
"grad_norm": 3.9936470985412598,
"learning_rate": 5.250965250965251e-06,
"loss": 755.0592,
"step": 246
},
{
"epoch": 0.4768339768339768,
"grad_norm": 3.263794422149658,
"learning_rate": 5.231660231660231e-06,
"loss": 748.0864,
"step": 247
},
{
"epoch": 0.47876447876447875,
"grad_norm": 3.1874217987060547,
"learning_rate": 5.212355212355213e-06,
"loss": 751.8619,
"step": 248
},
{
"epoch": 0.4806949806949807,
"grad_norm": 4.754833698272705,
"learning_rate": 5.193050193050193e-06,
"loss": 763.7399,
"step": 249
},
{
"epoch": 0.4826254826254826,
"grad_norm": 3.21230149269104,
"learning_rate": 5.173745173745173e-06,
"loss": 757.1412,
"step": 250
},
{
"epoch": 0.48455598455598453,
"grad_norm": 3.2287521362304688,
"learning_rate": 5.154440154440155e-06,
"loss": 749.6309,
"step": 251
},
{
"epoch": 0.4864864864864865,
"grad_norm": 3.137535572052002,
"learning_rate": 5.135135135135135e-06,
"loss": 759.4156,
"step": 252
},
{
"epoch": 0.48841698841698844,
"grad_norm": 3.6146438121795654,
"learning_rate": 5.115830115830116e-06,
"loss": 741.8262,
"step": 253
},
{
"epoch": 0.49034749034749037,
"grad_norm": 2.9531517028808594,
"learning_rate": 5.096525096525097e-06,
"loss": 763.7745,
"step": 254
},
{
"epoch": 0.4922779922779923,
"grad_norm": 3.0513949394226074,
"learning_rate": 5.077220077220077e-06,
"loss": 755.271,
"step": 255
},
{
"epoch": 0.4942084942084942,
"grad_norm": 3.2384207248687744,
"learning_rate": 5.057915057915058e-06,
"loss": 765.3377,
"step": 256
},
{
"epoch": 0.49613899613899615,
"grad_norm": 3.8821310997009277,
"learning_rate": 5.038610038610039e-06,
"loss": 763.2088,
"step": 257
},
{
"epoch": 0.4980694980694981,
"grad_norm": 2.926225423812866,
"learning_rate": 5.01930501930502e-06,
"loss": 758.5596,
"step": 258
},
{
"epoch": 0.5,
"grad_norm": 2.9751245975494385,
"learning_rate": 5e-06,
"loss": 757.9719,
"step": 259
},
{
"epoch": 0.5019305019305019,
"grad_norm": 4.772404193878174,
"learning_rate": 4.980694980694981e-06,
"loss": 757.3872,
"step": 260
},
{
"epoch": 0.5038610038610039,
"grad_norm": 3.096656322479248,
"learning_rate": 4.961389961389962e-06,
"loss": 760.4723,
"step": 261
},
{
"epoch": 0.5057915057915058,
"grad_norm": 3.504852056503296,
"learning_rate": 4.9420849420849425e-06,
"loss": 747.486,
"step": 262
},
{
"epoch": 0.5077220077220077,
"grad_norm": 3.33747935295105,
"learning_rate": 4.922779922779924e-06,
"loss": 758.9821,
"step": 263
},
{
"epoch": 0.5096525096525096,
"grad_norm": 3.4812850952148438,
"learning_rate": 4.903474903474904e-06,
"loss": 765.7698,
"step": 264
},
{
"epoch": 0.5115830115830116,
"grad_norm": 3.174971580505371,
"learning_rate": 4.8841698841698845e-06,
"loss": 756.7213,
"step": 265
},
{
"epoch": 0.5135135135135135,
"grad_norm": 3.601083755493164,
"learning_rate": 4.864864864864866e-06,
"loss": 762.7404,
"step": 266
},
{
"epoch": 0.5154440154440154,
"grad_norm": 3.3433072566986084,
"learning_rate": 4.845559845559845e-06,
"loss": 747.8812,
"step": 267
},
{
"epoch": 0.5173745173745173,
"grad_norm": 3.7494962215423584,
"learning_rate": 4.8262548262548266e-06,
"loss": 756.0776,
"step": 268
},
{
"epoch": 0.5193050193050193,
"grad_norm": 4.609169960021973,
"learning_rate": 4.806949806949807e-06,
"loss": 750.7801,
"step": 269
},
{
"epoch": 0.5212355212355212,
"grad_norm": 5.737427711486816,
"learning_rate": 4.787644787644788e-06,
"loss": 751.1343,
"step": 270
},
{
"epoch": 0.5231660231660231,
"grad_norm": 3.3391857147216797,
"learning_rate": 4.768339768339769e-06,
"loss": 760.2618,
"step": 271
},
{
"epoch": 0.525096525096525,
"grad_norm": 3.1758346557617188,
"learning_rate": 4.749034749034749e-06,
"loss": 756.4235,
"step": 272
},
{
"epoch": 0.527027027027027,
"grad_norm": 4.515329360961914,
"learning_rate": 4.72972972972973e-06,
"loss": 770.3212,
"step": 273
},
{
"epoch": 0.528957528957529,
"grad_norm": 3.2932803630828857,
"learning_rate": 4.710424710424711e-06,
"loss": 751.3934,
"step": 274
},
{
"epoch": 0.5308880308880309,
"grad_norm": 3.047992467880249,
"learning_rate": 4.691119691119692e-06,
"loss": 749.6434,
"step": 275
},
{
"epoch": 0.5328185328185329,
"grad_norm": 3.2720017433166504,
"learning_rate": 4.671814671814672e-06,
"loss": 771.3862,
"step": 276
},
{
"epoch": 0.5347490347490348,
"grad_norm": 3.6635942459106445,
"learning_rate": 4.652509652509653e-06,
"loss": 758.3706,
"step": 277
},
{
"epoch": 0.5366795366795367,
"grad_norm": 3.4579808712005615,
"learning_rate": 4.633204633204634e-06,
"loss": 747.9475,
"step": 278
},
{
"epoch": 0.5386100386100386,
"grad_norm": 3.525111675262451,
"learning_rate": 4.613899613899614e-06,
"loss": 756.5895,
"step": 279
},
{
"epoch": 0.5405405405405406,
"grad_norm": 3.336205005645752,
"learning_rate": 4.594594594594596e-06,
"loss": 772.9059,
"step": 280
},
{
"epoch": 0.5424710424710425,
"grad_norm": 3.758382558822632,
"learning_rate": 4.575289575289575e-06,
"loss": 769.8763,
"step": 281
},
{
"epoch": 0.5444015444015444,
"grad_norm": 3.054797649383545,
"learning_rate": 4.5559845559845564e-06,
"loss": 768.0063,
"step": 282
},
{
"epoch": 0.5463320463320464,
"grad_norm": 4.434202671051025,
"learning_rate": 4.536679536679537e-06,
"loss": 760.1692,
"step": 283
},
{
"epoch": 0.5482625482625483,
"grad_norm": 4.3827667236328125,
"learning_rate": 4.517374517374517e-06,
"loss": 744.6565,
"step": 284
},
{
"epoch": 0.5501930501930502,
"grad_norm": 6.237070560455322,
"learning_rate": 4.4980694980694985e-06,
"loss": 767.5054,
"step": 285
},
{
"epoch": 0.5521235521235521,
"grad_norm": 3.162343740463257,
"learning_rate": 4.478764478764479e-06,
"loss": 762.6232,
"step": 286
},
{
"epoch": 0.5540540540540541,
"grad_norm": 3.9073193073272705,
"learning_rate": 4.45945945945946e-06,
"loss": 778.0848,
"step": 287
},
{
"epoch": 0.555984555984556,
"grad_norm": 6.03090763092041,
"learning_rate": 4.4401544401544405e-06,
"loss": 759.6763,
"step": 288
},
{
"epoch": 0.5579150579150579,
"grad_norm": 4.3667311668396,
"learning_rate": 4.420849420849421e-06,
"loss": 762.0499,
"step": 289
},
{
"epoch": 0.5598455598455598,
"grad_norm": 3.5320377349853516,
"learning_rate": 4.401544401544402e-06,
"loss": 769.951,
"step": 290
},
{
"epoch": 0.5617760617760618,
"grad_norm": 3.7424259185791016,
"learning_rate": 4.382239382239383e-06,
"loss": 767.4916,
"step": 291
},
{
"epoch": 0.5637065637065637,
"grad_norm": 3.3271844387054443,
"learning_rate": 4.362934362934364e-06,
"loss": 761.4987,
"step": 292
},
{
"epoch": 0.5656370656370656,
"grad_norm": 2.957545757293701,
"learning_rate": 4.343629343629344e-06,
"loss": 762.5405,
"step": 293
},
{
"epoch": 0.5675675675675675,
"grad_norm": 3.087904453277588,
"learning_rate": 4.324324324324325e-06,
"loss": 752.0869,
"step": 294
},
{
"epoch": 0.5694980694980695,
"grad_norm": 3.4047775268554688,
"learning_rate": 4.305019305019305e-06,
"loss": 764.2693,
"step": 295
},
{
"epoch": 0.5714285714285714,
"grad_norm": 3.244701385498047,
"learning_rate": 4.2857142857142855e-06,
"loss": 754.4578,
"step": 296
},
{
"epoch": 0.5733590733590733,
"grad_norm": 3.3166253566741943,
"learning_rate": 4.266409266409267e-06,
"loss": 773.7292,
"step": 297
},
{
"epoch": 0.5752895752895753,
"grad_norm": 3.0471668243408203,
"learning_rate": 4.247104247104247e-06,
"loss": 778.6743,
"step": 298
},
{
"epoch": 0.5772200772200772,
"grad_norm": 3.6165595054626465,
"learning_rate": 4.227799227799228e-06,
"loss": 755.6964,
"step": 299
},
{
"epoch": 0.5791505791505791,
"grad_norm": 3.426254987716675,
"learning_rate": 4.208494208494209e-06,
"loss": 768.4984,
"step": 300
},
{
"epoch": 0.581081081081081,
"grad_norm": 4.2183518409729,
"learning_rate": 4.189189189189189e-06,
"loss": 756.4444,
"step": 301
},
{
"epoch": 0.583011583011583,
"grad_norm": 4.252375602722168,
"learning_rate": 4.16988416988417e-06,
"loss": 760.7186,
"step": 302
},
{
"epoch": 0.584942084942085,
"grad_norm": 3.898979425430298,
"learning_rate": 4.150579150579151e-06,
"loss": 755.8693,
"step": 303
},
{
"epoch": 0.5868725868725869,
"grad_norm": 4.586599349975586,
"learning_rate": 4.131274131274132e-06,
"loss": 767.7788,
"step": 304
},
{
"epoch": 0.5888030888030888,
"grad_norm": 3.4545576572418213,
"learning_rate": 4.1119691119691125e-06,
"loss": 755.6498,
"step": 305
},
{
"epoch": 0.5907335907335908,
"grad_norm": 3.056624174118042,
"learning_rate": 4.092664092664093e-06,
"loss": 779.4406,
"step": 306
},
{
"epoch": 0.5926640926640927,
"grad_norm": 3.1570804119110107,
"learning_rate": 4.073359073359074e-06,
"loss": 756.1407,
"step": 307
},
{
"epoch": 0.5945945945945946,
"grad_norm": 5.404672622680664,
"learning_rate": 4.0540540540540545e-06,
"loss": 770.681,
"step": 308
},
{
"epoch": 0.5965250965250966,
"grad_norm": 3.0374257564544678,
"learning_rate": 4.034749034749035e-06,
"loss": 759.7466,
"step": 309
},
{
"epoch": 0.5984555984555985,
"grad_norm": 3.2685699462890625,
"learning_rate": 4.015444015444015e-06,
"loss": 762.7603,
"step": 310
},
{
"epoch": 0.6003861003861004,
"grad_norm": 7.29334831237793,
"learning_rate": 3.996138996138997e-06,
"loss": 742.7489,
"step": 311
},
{
"epoch": 0.6023166023166023,
"grad_norm": 3.852389335632324,
"learning_rate": 3.976833976833977e-06,
"loss": 767.3596,
"step": 312
},
{
"epoch": 0.6042471042471043,
"grad_norm": 3.200193166732788,
"learning_rate": 3.957528957528957e-06,
"loss": 766.8765,
"step": 313
},
{
"epoch": 0.6061776061776062,
"grad_norm": 3.5071451663970947,
"learning_rate": 3.938223938223939e-06,
"loss": 776.7089,
"step": 314
},
{
"epoch": 0.6081081081081081,
"grad_norm": 4.16587495803833,
"learning_rate": 3.918918918918919e-06,
"loss": 741.2016,
"step": 315
},
{
"epoch": 0.61003861003861,
"grad_norm": 5.703463554382324,
"learning_rate": 3.8996138996139e-06,
"loss": 756.0547,
"step": 316
},
{
"epoch": 0.611969111969112,
"grad_norm": 5.430912017822266,
"learning_rate": 3.880308880308881e-06,
"loss": 756.722,
"step": 317
},
{
"epoch": 0.6138996138996139,
"grad_norm": 8.650154113769531,
"learning_rate": 3.861003861003861e-06,
"loss": 745.7667,
"step": 318
},
{
"epoch": 0.6158301158301158,
"grad_norm": 13.80824089050293,
"learning_rate": 3.841698841698842e-06,
"loss": 755.2433,
"step": 319
},
{
"epoch": 0.6177606177606177,
"grad_norm": 7.08932638168335,
"learning_rate": 3.822393822393823e-06,
"loss": 753.7036,
"step": 320
},
{
"epoch": 0.6196911196911197,
"grad_norm": 3.6433534622192383,
"learning_rate": 3.8030888030888036e-06,
"loss": 773.3873,
"step": 321
},
{
"epoch": 0.6216216216216216,
"grad_norm": 3.2302098274230957,
"learning_rate": 3.7837837837837844e-06,
"loss": 758.14,
"step": 322
},
{
"epoch": 0.6235521235521235,
"grad_norm": 3.731865644454956,
"learning_rate": 3.7644787644787652e-06,
"loss": 766.5978,
"step": 323
},
{
"epoch": 0.6254826254826255,
"grad_norm": 3.244114875793457,
"learning_rate": 3.745173745173745e-06,
"loss": 748.5593,
"step": 324
},
{
"epoch": 0.6274131274131274,
"grad_norm": 6.71386194229126,
"learning_rate": 3.725868725868726e-06,
"loss": 750.8797,
"step": 325
},
{
"epoch": 0.6293436293436293,
"grad_norm": 6.857434272766113,
"learning_rate": 3.706563706563707e-06,
"loss": 751.0732,
"step": 326
},
{
"epoch": 0.6312741312741312,
"grad_norm": 3.1057608127593994,
"learning_rate": 3.6872586872586872e-06,
"loss": 751.9279,
"step": 327
},
{
"epoch": 0.6332046332046332,
"grad_norm": 4.462062835693359,
"learning_rate": 3.667953667953668e-06,
"loss": 761.2999,
"step": 328
},
{
"epoch": 0.6351351351351351,
"grad_norm": 5.394342422485352,
"learning_rate": 3.648648648648649e-06,
"loss": 765.5782,
"step": 329
},
{
"epoch": 0.637065637065637,
"grad_norm": 4.586513996124268,
"learning_rate": 3.6293436293436297e-06,
"loss": 759.7944,
"step": 330
},
{
"epoch": 0.638996138996139,
"grad_norm": 6.204144477844238,
"learning_rate": 3.61003861003861e-06,
"loss": 749.0181,
"step": 331
},
{
"epoch": 0.640926640926641,
"grad_norm": 3.065887451171875,
"learning_rate": 3.590733590733591e-06,
"loss": 764.4531,
"step": 332
},
{
"epoch": 0.6428571428571429,
"grad_norm": 3.5210766792297363,
"learning_rate": 3.5714285714285718e-06,
"loss": 757.1459,
"step": 333
},
{
"epoch": 0.6447876447876448,
"grad_norm": 4.145437240600586,
"learning_rate": 3.5521235521235526e-06,
"loss": 772.7172,
"step": 334
},
{
"epoch": 0.6467181467181468,
"grad_norm": 3.4393627643585205,
"learning_rate": 3.5328185328185334e-06,
"loss": 748.3174,
"step": 335
},
{
"epoch": 0.6486486486486487,
"grad_norm": 6.470264911651611,
"learning_rate": 3.513513513513514e-06,
"loss": 761.2096,
"step": 336
},
{
"epoch": 0.6505791505791506,
"grad_norm": 3.4703409671783447,
"learning_rate": 3.4942084942084947e-06,
"loss": 752.6991,
"step": 337
},
{
"epoch": 0.6525096525096525,
"grad_norm": 9.870447158813477,
"learning_rate": 3.4749034749034755e-06,
"loss": 771.034,
"step": 338
},
{
"epoch": 0.6544401544401545,
"grad_norm": 9.164502143859863,
"learning_rate": 3.4555984555984555e-06,
"loss": 759.8434,
"step": 339
},
{
"epoch": 0.6563706563706564,
"grad_norm": 8.678630828857422,
"learning_rate": 3.4362934362934363e-06,
"loss": 760.7397,
"step": 340
},
{
"epoch": 0.6583011583011583,
"grad_norm": 4.10188102722168,
"learning_rate": 3.416988416988417e-06,
"loss": 752.9614,
"step": 341
},
{
"epoch": 0.6602316602316602,
"grad_norm": 5.000187873840332,
"learning_rate": 3.397683397683398e-06,
"loss": 741.5472,
"step": 342
},
{
"epoch": 0.6621621621621622,
"grad_norm": 5.491066932678223,
"learning_rate": 3.3783783783783788e-06,
"loss": 752.3487,
"step": 343
},
{
"epoch": 0.6640926640926641,
"grad_norm": 4.743298530578613,
"learning_rate": 3.359073359073359e-06,
"loss": 740.1313,
"step": 344
},
{
"epoch": 0.666023166023166,
"grad_norm": 3.8254621028900146,
"learning_rate": 3.33976833976834e-06,
"loss": 751.6027,
"step": 345
},
{
"epoch": 0.667953667953668,
"grad_norm": 3.4094951152801514,
"learning_rate": 3.320463320463321e-06,
"loss": 747.6785,
"step": 346
},
{
"epoch": 0.6698841698841699,
"grad_norm": 4.3971757888793945,
"learning_rate": 3.3011583011583016e-06,
"loss": 765.18,
"step": 347
},
{
"epoch": 0.6718146718146718,
"grad_norm": 5.712260723114014,
"learning_rate": 3.281853281853282e-06,
"loss": 754.3199,
"step": 348
},
{
"epoch": 0.6737451737451737,
"grad_norm": 2.944014310836792,
"learning_rate": 3.262548262548263e-06,
"loss": 759.125,
"step": 349
},
{
"epoch": 0.6756756756756757,
"grad_norm": 5.154186725616455,
"learning_rate": 3.2432432432432437e-06,
"loss": 773.4568,
"step": 350
},
{
"epoch": 0.6776061776061776,
"grad_norm": 4.158810138702393,
"learning_rate": 3.2239382239382245e-06,
"loss": 773.0182,
"step": 351
},
{
"epoch": 0.6795366795366795,
"grad_norm": 2.9692142009735107,
"learning_rate": 3.2046332046332054e-06,
"loss": 759.3475,
"step": 352
},
{
"epoch": 0.6814671814671814,
"grad_norm": 5.837861061096191,
"learning_rate": 3.1853281853281853e-06,
"loss": 758.8844,
"step": 353
},
{
"epoch": 0.6833976833976834,
"grad_norm": 4.23380708694458,
"learning_rate": 3.166023166023166e-06,
"loss": 762.8468,
"step": 354
},
{
"epoch": 0.6853281853281853,
"grad_norm": 5.026414394378662,
"learning_rate": 3.146718146718147e-06,
"loss": 738.6548,
"step": 355
},
{
"epoch": 0.6872586872586872,
"grad_norm": 4.041588306427002,
"learning_rate": 3.1274131274131274e-06,
"loss": 766.1196,
"step": 356
},
{
"epoch": 0.6891891891891891,
"grad_norm": 4.39390230178833,
"learning_rate": 3.1081081081081082e-06,
"loss": 749.96,
"step": 357
},
{
"epoch": 0.6911196911196911,
"grad_norm": 4.870336055755615,
"learning_rate": 3.088803088803089e-06,
"loss": 750.3604,
"step": 358
},
{
"epoch": 0.693050193050193,
"grad_norm": 6.5269036293029785,
"learning_rate": 3.06949806949807e-06,
"loss": 765.9295,
"step": 359
},
{
"epoch": 0.694980694980695,
"grad_norm": 3.1647534370422363,
"learning_rate": 3.0501930501930503e-06,
"loss": 747.9913,
"step": 360
},
{
"epoch": 0.696911196911197,
"grad_norm": 3.435527801513672,
"learning_rate": 3.030888030888031e-06,
"loss": 770.7856,
"step": 361
},
{
"epoch": 0.6988416988416989,
"grad_norm": 5.05522346496582,
"learning_rate": 3.011583011583012e-06,
"loss": 773.0812,
"step": 362
},
{
"epoch": 0.7007722007722008,
"grad_norm": 4.161330223083496,
"learning_rate": 2.9922779922779927e-06,
"loss": 771.5272,
"step": 363
},
{
"epoch": 0.7027027027027027,
"grad_norm": 4.4345526695251465,
"learning_rate": 2.9729729729729736e-06,
"loss": 754.7959,
"step": 364
},
{
"epoch": 0.7046332046332047,
"grad_norm": 2.9777281284332275,
"learning_rate": 2.953667953667954e-06,
"loss": 749.3711,
"step": 365
},
{
"epoch": 0.7065637065637066,
"grad_norm": 2.855987310409546,
"learning_rate": 2.934362934362935e-06,
"loss": 749.5988,
"step": 366
},
{
"epoch": 0.7084942084942085,
"grad_norm": 3.307720184326172,
"learning_rate": 2.915057915057915e-06,
"loss": 761.0093,
"step": 367
},
{
"epoch": 0.7104247104247104,
"grad_norm": 3.787365198135376,
"learning_rate": 2.8957528957528956e-06,
"loss": 751.6588,
"step": 368
},
{
"epoch": 0.7123552123552124,
"grad_norm": 3.029486656188965,
"learning_rate": 2.8764478764478764e-06,
"loss": 755.2015,
"step": 369
},
{
"epoch": 0.7142857142857143,
"grad_norm": 3.0125091075897217,
"learning_rate": 2.8571428571428573e-06,
"loss": 766.543,
"step": 370
},
{
"epoch": 0.7162162162162162,
"grad_norm": 4.560346603393555,
"learning_rate": 2.837837837837838e-06,
"loss": 768.441,
"step": 371
},
{
"epoch": 0.7181467181467182,
"grad_norm": 3.190322160720825,
"learning_rate": 2.8185328185328185e-06,
"loss": 753.3053,
"step": 372
},
{
"epoch": 0.7200772200772201,
"grad_norm": 3.4040372371673584,
"learning_rate": 2.7992277992277993e-06,
"loss": 769.1228,
"step": 373
},
{
"epoch": 0.722007722007722,
"grad_norm": 5.434439182281494,
"learning_rate": 2.77992277992278e-06,
"loss": 754.5753,
"step": 374
},
{
"epoch": 0.7239382239382239,
"grad_norm": 3.3807380199432373,
"learning_rate": 2.760617760617761e-06,
"loss": 768.5261,
"step": 375
},
{
"epoch": 0.7258687258687259,
"grad_norm": 3.2653305530548096,
"learning_rate": 2.7413127413127418e-06,
"loss": 768.1964,
"step": 376
},
{
"epoch": 0.7277992277992278,
"grad_norm": 3.041628360748291,
"learning_rate": 2.722007722007722e-06,
"loss": 771.9323,
"step": 377
},
{
"epoch": 0.7297297297297297,
"grad_norm": 3.092245101928711,
"learning_rate": 2.702702702702703e-06,
"loss": 753.942,
"step": 378
},
{
"epoch": 0.7316602316602316,
"grad_norm": 4.467896938323975,
"learning_rate": 2.683397683397684e-06,
"loss": 749.8477,
"step": 379
},
{
"epoch": 0.7335907335907336,
"grad_norm": 2.989520788192749,
"learning_rate": 2.6640926640926647e-06,
"loss": 759.7245,
"step": 380
},
{
"epoch": 0.7355212355212355,
"grad_norm": 2.9288792610168457,
"learning_rate": 2.644787644787645e-06,
"loss": 761.0784,
"step": 381
},
{
"epoch": 0.7374517374517374,
"grad_norm": 3.411576986312866,
"learning_rate": 2.6254826254826255e-06,
"loss": 769.0735,
"step": 382
},
{
"epoch": 0.7393822393822393,
"grad_norm": 7.500954627990723,
"learning_rate": 2.6061776061776063e-06,
"loss": 752.552,
"step": 383
},
{
"epoch": 0.7413127413127413,
"grad_norm": 4.436896324157715,
"learning_rate": 2.5868725868725867e-06,
"loss": 772.7829,
"step": 384
},
{
"epoch": 0.7432432432432432,
"grad_norm": 3.8016252517700195,
"learning_rate": 2.5675675675675675e-06,
"loss": 756.5428,
"step": 385
},
{
"epoch": 0.7451737451737451,
"grad_norm": 3.1768035888671875,
"learning_rate": 2.5482625482625484e-06,
"loss": 761.3637,
"step": 386
},
{
"epoch": 0.747104247104247,
"grad_norm": 3.24792218208313,
"learning_rate": 2.528957528957529e-06,
"loss": 764.2406,
"step": 387
},
{
"epoch": 0.749034749034749,
"grad_norm": 3.6789443492889404,
"learning_rate": 2.50965250965251e-06,
"loss": 764.6515,
"step": 388
},
{
"epoch": 0.750965250965251,
"grad_norm": 2.998337507247925,
"learning_rate": 2.4903474903474904e-06,
"loss": 745.9545,
"step": 389
},
{
"epoch": 0.752895752895753,
"grad_norm": 5.5688796043396,
"learning_rate": 2.4710424710424712e-06,
"loss": 757.1937,
"step": 390
},
{
"epoch": 0.7548262548262549,
"grad_norm": 3.491425037384033,
"learning_rate": 2.451737451737452e-06,
"loss": 747.8929,
"step": 391
},
{
"epoch": 0.7567567567567568,
"grad_norm": 5.370115280151367,
"learning_rate": 2.432432432432433e-06,
"loss": 760.0563,
"step": 392
},
{
"epoch": 0.7586872586872587,
"grad_norm": 3.907552480697632,
"learning_rate": 2.4131274131274133e-06,
"loss": 752.7516,
"step": 393
},
{
"epoch": 0.7606177606177607,
"grad_norm": 3.052731513977051,
"learning_rate": 2.393822393822394e-06,
"loss": 752.1655,
"step": 394
},
{
"epoch": 0.7625482625482626,
"grad_norm": 5.308294296264648,
"learning_rate": 2.3745173745173745e-06,
"loss": 767.8257,
"step": 395
},
{
"epoch": 0.7644787644787645,
"grad_norm": 3.72119140625,
"learning_rate": 2.3552123552123553e-06,
"loss": 764.8139,
"step": 396
},
{
"epoch": 0.7664092664092664,
"grad_norm": 3.117110252380371,
"learning_rate": 2.335907335907336e-06,
"loss": 766.5566,
"step": 397
},
{
"epoch": 0.7683397683397684,
"grad_norm": 3.8100202083587646,
"learning_rate": 2.316602316602317e-06,
"loss": 767.1085,
"step": 398
},
{
"epoch": 0.7702702702702703,
"grad_norm": 3.1219418048858643,
"learning_rate": 2.297297297297298e-06,
"loss": 750.5438,
"step": 399
},
{
"epoch": 0.7722007722007722,
"grad_norm": 3.212282180786133,
"learning_rate": 2.2779922779922782e-06,
"loss": 748.5366,
"step": 400
},
{
"epoch": 0.7741312741312741,
"grad_norm": 3.0505459308624268,
"learning_rate": 2.2586872586872586e-06,
"loss": 743.7197,
"step": 401
},
{
"epoch": 0.7760617760617761,
"grad_norm": 3.131042718887329,
"learning_rate": 2.2393822393822394e-06,
"loss": 756.7755,
"step": 402
},
{
"epoch": 0.777992277992278,
"grad_norm": 4.418597221374512,
"learning_rate": 2.2200772200772203e-06,
"loss": 754.0249,
"step": 403
},
{
"epoch": 0.7799227799227799,
"grad_norm": 4.271742820739746,
"learning_rate": 2.200772200772201e-06,
"loss": 768.3597,
"step": 404
},
{
"epoch": 0.7818532818532818,
"grad_norm": 3.788788080215454,
"learning_rate": 2.181467181467182e-06,
"loss": 758.9531,
"step": 405
},
{
"epoch": 0.7837837837837838,
"grad_norm": 3.5197062492370605,
"learning_rate": 2.1621621621621623e-06,
"loss": 754.1716,
"step": 406
},
{
"epoch": 0.7857142857142857,
"grad_norm": 3.414795398712158,
"learning_rate": 2.1428571428571427e-06,
"loss": 751.0524,
"step": 407
},
{
"epoch": 0.7876447876447876,
"grad_norm": 3.1258320808410645,
"learning_rate": 2.1235521235521236e-06,
"loss": 761.6645,
"step": 408
},
{
"epoch": 0.7895752895752896,
"grad_norm": 6.547064781188965,
"learning_rate": 2.1042471042471044e-06,
"loss": 765.9225,
"step": 409
},
{
"epoch": 0.7915057915057915,
"grad_norm": 4.772284030914307,
"learning_rate": 2.084942084942085e-06,
"loss": 764.9897,
"step": 410
},
{
"epoch": 0.7934362934362934,
"grad_norm": 4.287495136260986,
"learning_rate": 2.065637065637066e-06,
"loss": 756.7947,
"step": 411
},
{
"epoch": 0.7953667953667953,
"grad_norm": 5.286579132080078,
"learning_rate": 2.0463320463320464e-06,
"loss": 756.2988,
"step": 412
},
{
"epoch": 0.7972972972972973,
"grad_norm": 3.132582902908325,
"learning_rate": 2.0270270270270273e-06,
"loss": 760.6235,
"step": 413
},
{
"epoch": 0.7992277992277992,
"grad_norm": 4.088491439819336,
"learning_rate": 2.0077220077220077e-06,
"loss": 751.6579,
"step": 414
},
{
"epoch": 0.8011583011583011,
"grad_norm": 3.853825807571411,
"learning_rate": 1.9884169884169885e-06,
"loss": 751.7198,
"step": 415
},
{
"epoch": 0.803088803088803,
"grad_norm": 3.8805510997772217,
"learning_rate": 1.9691119691119693e-06,
"loss": 754.175,
"step": 416
},
{
"epoch": 0.805019305019305,
"grad_norm": 3.465831756591797,
"learning_rate": 1.94980694980695e-06,
"loss": 763.5966,
"step": 417
},
{
"epoch": 0.806949806949807,
"grad_norm": 3.4799764156341553,
"learning_rate": 1.9305019305019305e-06,
"loss": 757.6487,
"step": 418
},
{
"epoch": 0.8088803088803089,
"grad_norm": 4.306357383728027,
"learning_rate": 1.9111969111969114e-06,
"loss": 770.5115,
"step": 419
},
{
"epoch": 0.8108108108108109,
"grad_norm": 5.999144554138184,
"learning_rate": 1.8918918918918922e-06,
"loss": 750.0549,
"step": 420
},
{
"epoch": 0.8127413127413128,
"grad_norm": 3.353970527648926,
"learning_rate": 1.8725868725868726e-06,
"loss": 770.1407,
"step": 421
},
{
"epoch": 0.8146718146718147,
"grad_norm": 3.1123435497283936,
"learning_rate": 1.8532818532818534e-06,
"loss": 755.7714,
"step": 422
},
{
"epoch": 0.8166023166023166,
"grad_norm": 3.5919506549835205,
"learning_rate": 1.833976833976834e-06,
"loss": 768.7866,
"step": 423
},
{
"epoch": 0.8185328185328186,
"grad_norm": 5.815296649932861,
"learning_rate": 1.8146718146718149e-06,
"loss": 769.049,
"step": 424
},
{
"epoch": 0.8204633204633205,
"grad_norm": 6.508057117462158,
"learning_rate": 1.7953667953667955e-06,
"loss": 744.9335,
"step": 425
},
{
"epoch": 0.8223938223938224,
"grad_norm": 3.8051586151123047,
"learning_rate": 1.7760617760617763e-06,
"loss": 755.9141,
"step": 426
},
{
"epoch": 0.8243243243243243,
"grad_norm": 3.628269910812378,
"learning_rate": 1.756756756756757e-06,
"loss": 758.3513,
"step": 427
},
{
"epoch": 0.8262548262548263,
"grad_norm": 5.430158615112305,
"learning_rate": 1.7374517374517377e-06,
"loss": 754.9781,
"step": 428
},
{
"epoch": 0.8281853281853282,
"grad_norm": 3.8903748989105225,
"learning_rate": 1.7181467181467181e-06,
"loss": 762.0396,
"step": 429
},
{
"epoch": 0.8301158301158301,
"grad_norm": 3.2768187522888184,
"learning_rate": 1.698841698841699e-06,
"loss": 748.2692,
"step": 430
},
{
"epoch": 0.832046332046332,
"grad_norm": 4.247284889221191,
"learning_rate": 1.6795366795366796e-06,
"loss": 759.6968,
"step": 431
},
{
"epoch": 0.833976833976834,
"grad_norm": 3.205296039581299,
"learning_rate": 1.6602316602316604e-06,
"loss": 745.4493,
"step": 432
},
{
"epoch": 0.8359073359073359,
"grad_norm": 3.3943800926208496,
"learning_rate": 1.640926640926641e-06,
"loss": 759.4459,
"step": 433
},
{
"epoch": 0.8378378378378378,
"grad_norm": 3.8933515548706055,
"learning_rate": 1.6216216216216219e-06,
"loss": 768.5825,
"step": 434
},
{
"epoch": 0.8397683397683398,
"grad_norm": 3.123276710510254,
"learning_rate": 1.6023166023166027e-06,
"loss": 749.004,
"step": 435
},
{
"epoch": 0.8416988416988417,
"grad_norm": 3.028196334838867,
"learning_rate": 1.583011583011583e-06,
"loss": 754.6147,
"step": 436
},
{
"epoch": 0.8436293436293436,
"grad_norm": 5.627590179443359,
"learning_rate": 1.5637065637065637e-06,
"loss": 764.9931,
"step": 437
},
{
"epoch": 0.8455598455598455,
"grad_norm": 4.444796562194824,
"learning_rate": 1.5444015444015445e-06,
"loss": 756.2324,
"step": 438
},
{
"epoch": 0.8474903474903475,
"grad_norm": 4.994812965393066,
"learning_rate": 1.5250965250965251e-06,
"loss": 764.1788,
"step": 439
},
{
"epoch": 0.8494208494208494,
"grad_norm": 3.8100221157073975,
"learning_rate": 1.505791505791506e-06,
"loss": 748.0216,
"step": 440
},
{
"epoch": 0.8513513513513513,
"grad_norm": 3.284438371658325,
"learning_rate": 1.4864864864864868e-06,
"loss": 744.6481,
"step": 441
},
{
"epoch": 0.8532818532818532,
"grad_norm": 3.754772901535034,
"learning_rate": 1.4671814671814674e-06,
"loss": 759.6836,
"step": 442
},
{
"epoch": 0.8552123552123552,
"grad_norm": 3.4503631591796875,
"learning_rate": 1.4478764478764478e-06,
"loss": 757.1558,
"step": 443
},
{
"epoch": 0.8571428571428571,
"grad_norm": 3.9186127185821533,
"learning_rate": 1.4285714285714286e-06,
"loss": 749.7752,
"step": 444
},
{
"epoch": 0.859073359073359,
"grad_norm": 5.067336082458496,
"learning_rate": 1.4092664092664092e-06,
"loss": 768.12,
"step": 445
},
{
"epoch": 0.861003861003861,
"grad_norm": 3.5622620582580566,
"learning_rate": 1.38996138996139e-06,
"loss": 749.7698,
"step": 446
},
{
"epoch": 0.862934362934363,
"grad_norm": 3.2070393562316895,
"learning_rate": 1.3706563706563709e-06,
"loss": 765.8137,
"step": 447
},
{
"epoch": 0.8648648648648649,
"grad_norm": 3.920966386795044,
"learning_rate": 1.3513513513513515e-06,
"loss": 763.2885,
"step": 448
},
{
"epoch": 0.8667953667953668,
"grad_norm": 4.828410625457764,
"learning_rate": 1.3320463320463323e-06,
"loss": 744.1356,
"step": 449
},
{
"epoch": 0.8687258687258688,
"grad_norm": 2.9684271812438965,
"learning_rate": 1.3127413127413127e-06,
"loss": 748.5911,
"step": 450
},
{
"epoch": 0.8706563706563707,
"grad_norm": 3.4309277534484863,
"learning_rate": 1.2934362934362933e-06,
"loss": 754.4804,
"step": 451
},
{
"epoch": 0.8725868725868726,
"grad_norm": 3.2176759243011475,
"learning_rate": 1.2741312741312742e-06,
"loss": 763.7363,
"step": 452
},
{
"epoch": 0.8745173745173745,
"grad_norm": 4.412746429443359,
"learning_rate": 1.254826254826255e-06,
"loss": 762.2845,
"step": 453
},
{
"epoch": 0.8764478764478765,
"grad_norm": 3.3320093154907227,
"learning_rate": 1.2355212355212356e-06,
"loss": 737.4437,
"step": 454
},
{
"epoch": 0.8783783783783784,
"grad_norm": 3.1412718296051025,
"learning_rate": 1.2162162162162164e-06,
"loss": 752.1414,
"step": 455
},
{
"epoch": 0.8803088803088803,
"grad_norm": 2.9298524856567383,
"learning_rate": 1.196911196911197e-06,
"loss": 749.2881,
"step": 456
},
{
"epoch": 0.8822393822393823,
"grad_norm": 3.4683337211608887,
"learning_rate": 1.1776061776061777e-06,
"loss": 750.4893,
"step": 457
},
{
"epoch": 0.8841698841698842,
"grad_norm": 8.293436050415039,
"learning_rate": 1.1583011583011585e-06,
"loss": 759.465,
"step": 458
},
{
"epoch": 0.8861003861003861,
"grad_norm": 3.0078611373901367,
"learning_rate": 1.1389961389961391e-06,
"loss": 765.3209,
"step": 459
},
{
"epoch": 0.888030888030888,
"grad_norm": 3.5911474227905273,
"learning_rate": 1.1196911196911197e-06,
"loss": 748.1154,
"step": 460
},
{
"epoch": 0.88996138996139,
"grad_norm": 3.626723527908325,
"learning_rate": 1.1003861003861005e-06,
"loss": 763.6529,
"step": 461
},
{
"epoch": 0.8918918918918919,
"grad_norm": 3.322850465774536,
"learning_rate": 1.0810810810810812e-06,
"loss": 757.5947,
"step": 462
},
{
"epoch": 0.8938223938223938,
"grad_norm": 3.866856575012207,
"learning_rate": 1.0617760617760618e-06,
"loss": 761.439,
"step": 463
},
{
"epoch": 0.8957528957528957,
"grad_norm": 3.293731927871704,
"learning_rate": 1.0424710424710426e-06,
"loss": 763.1173,
"step": 464
},
{
"epoch": 0.8976833976833977,
"grad_norm": 3.4967000484466553,
"learning_rate": 1.0231660231660232e-06,
"loss": 771.8659,
"step": 465
},
{
"epoch": 0.8996138996138996,
"grad_norm": 2.9513790607452393,
"learning_rate": 1.0038610038610038e-06,
"loss": 759.2766,
"step": 466
},
{
"epoch": 0.9015444015444015,
"grad_norm": 3.051725387573242,
"learning_rate": 9.845559845559847e-07,
"loss": 761.7484,
"step": 467
},
{
"epoch": 0.9034749034749034,
"grad_norm": 3.222289562225342,
"learning_rate": 9.652509652509653e-07,
"loss": 753.144,
"step": 468
},
{
"epoch": 0.9054054054054054,
"grad_norm": 4.781485557556152,
"learning_rate": 9.459459459459461e-07,
"loss": 766.5778,
"step": 469
},
{
"epoch": 0.9073359073359073,
"grad_norm": 9.611944198608398,
"learning_rate": 9.266409266409267e-07,
"loss": 757.17,
"step": 470
},
{
"epoch": 0.9092664092664092,
"grad_norm": 5.166945457458496,
"learning_rate": 9.073359073359074e-07,
"loss": 758.192,
"step": 471
},
{
"epoch": 0.9111969111969112,
"grad_norm": 3.2668616771698,
"learning_rate": 8.880308880308882e-07,
"loss": 772.9589,
"step": 472
},
{
"epoch": 0.9131274131274131,
"grad_norm": 3.808668851852417,
"learning_rate": 8.687258687258689e-07,
"loss": 743.6304,
"step": 473
},
{
"epoch": 0.915057915057915,
"grad_norm": 3.6966071128845215,
"learning_rate": 8.494208494208495e-07,
"loss": 739.0352,
"step": 474
},
{
"epoch": 0.916988416988417,
"grad_norm": 5.23868465423584,
"learning_rate": 8.301158301158302e-07,
"loss": 758.2206,
"step": 475
},
{
"epoch": 0.918918918918919,
"grad_norm": 5.4658732414245605,
"learning_rate": 8.108108108108109e-07,
"loss": 745.1991,
"step": 476
},
{
"epoch": 0.9208494208494209,
"grad_norm": 4.291003227233887,
"learning_rate": 7.915057915057915e-07,
"loss": 765.403,
"step": 477
},
{
"epoch": 0.9227799227799228,
"grad_norm": 3.528947353363037,
"learning_rate": 7.722007722007723e-07,
"loss": 764.4042,
"step": 478
},
{
"epoch": 0.9247104247104247,
"grad_norm": 3.179927349090576,
"learning_rate": 7.52895752895753e-07,
"loss": 757.1093,
"step": 479
},
{
"epoch": 0.9266409266409267,
"grad_norm": 4.567697048187256,
"learning_rate": 7.335907335907337e-07,
"loss": 759.746,
"step": 480
},
{
"epoch": 0.9285714285714286,
"grad_norm": 3.929335355758667,
"learning_rate": 7.142857142857143e-07,
"loss": 758.0447,
"step": 481
},
{
"epoch": 0.9305019305019305,
"grad_norm": 5.094730377197266,
"learning_rate": 6.94980694980695e-07,
"loss": 757.1946,
"step": 482
},
{
"epoch": 0.9324324324324325,
"grad_norm": 3.7125465869903564,
"learning_rate": 6.756756756756758e-07,
"loss": 770.7656,
"step": 483
},
{
"epoch": 0.9343629343629344,
"grad_norm": 3.3825149536132812,
"learning_rate": 6.563706563706564e-07,
"loss": 747.5059,
"step": 484
},
{
"epoch": 0.9362934362934363,
"grad_norm": 3.3807835578918457,
"learning_rate": 6.370656370656371e-07,
"loss": 767.5899,
"step": 485
},
{
"epoch": 0.9382239382239382,
"grad_norm": 2.997955322265625,
"learning_rate": 6.177606177606178e-07,
"loss": 758.338,
"step": 486
},
{
"epoch": 0.9401544401544402,
"grad_norm": 3.478146553039551,
"learning_rate": 5.984555984555985e-07,
"loss": 758.3116,
"step": 487
},
{
"epoch": 0.9420849420849421,
"grad_norm": 3.0303502082824707,
"learning_rate": 5.791505791505792e-07,
"loss": 752.7023,
"step": 488
},
{
"epoch": 0.944015444015444,
"grad_norm": 4.182223796844482,
"learning_rate": 5.598455598455599e-07,
"loss": 767.7703,
"step": 489
},
{
"epoch": 0.9459459459459459,
"grad_norm": 3.0645620822906494,
"learning_rate": 5.405405405405406e-07,
"loss": 756.7206,
"step": 490
},
{
"epoch": 0.9478764478764479,
"grad_norm": 3.5093557834625244,
"learning_rate": 5.212355212355213e-07,
"loss": 759.0178,
"step": 491
},
{
"epoch": 0.9498069498069498,
"grad_norm": 5.231660842895508,
"learning_rate": 5.019305019305019e-07,
"loss": 758.1909,
"step": 492
},
{
"epoch": 0.9517374517374517,
"grad_norm": 4.316704273223877,
"learning_rate": 4.826254826254826e-07,
"loss": 756.6906,
"step": 493
},
{
"epoch": 0.9536679536679536,
"grad_norm": 3.842200994491577,
"learning_rate": 4.6332046332046336e-07,
"loss": 757.8163,
"step": 494
},
{
"epoch": 0.9555984555984556,
"grad_norm": 3.351325511932373,
"learning_rate": 4.440154440154441e-07,
"loss": 762.0304,
"step": 495
},
{
"epoch": 0.9575289575289575,
"grad_norm": 3.0159342288970947,
"learning_rate": 4.2471042471042474e-07,
"loss": 761.4176,
"step": 496
},
{
"epoch": 0.9594594594594594,
"grad_norm": 3.074551582336426,
"learning_rate": 4.0540540540540546e-07,
"loss": 751.5521,
"step": 497
},
{
"epoch": 0.9613899613899614,
"grad_norm": 3.896904945373535,
"learning_rate": 3.8610038610038613e-07,
"loss": 763.3478,
"step": 498
},
{
"epoch": 0.9633204633204633,
"grad_norm": 3.919020175933838,
"learning_rate": 3.6679536679536685e-07,
"loss": 768.0314,
"step": 499
},
{
"epoch": 0.9652509652509652,
"grad_norm": 2.949843645095825,
"learning_rate": 3.474903474903475e-07,
"loss": 755.5469,
"step": 500
},
{
"epoch": 0.9671814671814671,
"grad_norm": 3.6389143466949463,
"learning_rate": 3.281853281853282e-07,
"loss": 744.184,
"step": 501
},
{
"epoch": 0.9691119691119691,
"grad_norm": 4.229357719421387,
"learning_rate": 3.088803088803089e-07,
"loss": 761.7263,
"step": 502
},
{
"epoch": 0.971042471042471,
"grad_norm": 3.1734883785247803,
"learning_rate": 2.895752895752896e-07,
"loss": 758.8358,
"step": 503
},
{
"epoch": 0.972972972972973,
"grad_norm": 3.089228868484497,
"learning_rate": 2.702702702702703e-07,
"loss": 760.397,
"step": 504
},
{
"epoch": 0.974903474903475,
"grad_norm": 3.926948070526123,
"learning_rate": 2.5096525096525096e-07,
"loss": 755.6573,
"step": 505
},
{
"epoch": 0.9768339768339769,
"grad_norm": 2.9857828617095947,
"learning_rate": 2.3166023166023168e-07,
"loss": 767.6666,
"step": 506
},
{
"epoch": 0.9787644787644788,
"grad_norm": 4.68814754486084,
"learning_rate": 2.1235521235521237e-07,
"loss": 767.6501,
"step": 507
},
{
"epoch": 0.9806949806949807,
"grad_norm": 3.457016706466675,
"learning_rate": 1.9305019305019306e-07,
"loss": 760.5441,
"step": 508
},
{
"epoch": 0.9826254826254827,
"grad_norm": 4.365758419036865,
"learning_rate": 1.7374517374517376e-07,
"loss": 760.8904,
"step": 509
},
{
"epoch": 0.9845559845559846,
"grad_norm": 3.6141843795776367,
"learning_rate": 1.5444015444015445e-07,
"loss": 754.2842,
"step": 510
},
{
"epoch": 0.9864864864864865,
"grad_norm": 4.281587600708008,
"learning_rate": 1.3513513513513515e-07,
"loss": 754.181,
"step": 511
},
{
"epoch": 0.9884169884169884,
"grad_norm": 3.3735556602478027,
"learning_rate": 1.1583011583011584e-07,
"loss": 755.9177,
"step": 512
},
{
"epoch": 0.9903474903474904,
"grad_norm": 3.126232624053955,
"learning_rate": 9.652509652509653e-08,
"loss": 750.3485,
"step": 513
},
{
"epoch": 0.9922779922779923,
"grad_norm": 5.9563727378845215,
"learning_rate": 7.722007722007723e-08,
"loss": 766.379,
"step": 514
},
{
"epoch": 0.9942084942084942,
"grad_norm": 3.0199837684631348,
"learning_rate": 5.791505791505792e-08,
"loss": 753.6818,
"step": 515
},
{
"epoch": 0.9961389961389961,
"grad_norm": 3.0909945964813232,
"learning_rate": 3.861003861003861e-08,
"loss": 749.71,
"step": 516
},
{
"epoch": 0.9980694980694981,
"grad_norm": 3.416806221008301,
"learning_rate": 1.9305019305019306e-08,
"loss": 772.7269,
"step": 517
},
{
"epoch": 1.0,
"grad_norm": 5.65108585357666,
"learning_rate": 0.0,
"loss": 744.4813,
"step": 518
},
{
"epoch": 1.0,
"step": 518,
"total_flos": 1.1089078720895386e+18,
"train_loss": 758.2892525573495,
"train_runtime": 2215.2409,
"train_samples_per_second": 239.051,
"train_steps_per_second": 0.234
}
],
"logging_steps": 1,
"max_steps": 518,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 1.1089078720895386e+18,
"train_batch_size": 1024,
"trial_name": null,
"trial_params": null
}