SirajRLX's picture
Upload folder using huggingface_hub
d9737b1 verified
{
"best_global_step": 4500,
"best_metric": 0.6938078999519348,
"best_model_checkpoint": "task2file/sft_devstral_24B_v2/checkpoints/checkpoint-4500",
"epoch": 1.8987341772151898,
"eval_steps": 100,
"global_step": 4500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008438818565400844,
"grad_norm": 1.597854733467102,
"learning_rate": 8.787346221441124e-08,
"loss": 1.3927901983261108,
"step": 2
},
{
"epoch": 0.0016877637130801688,
"grad_norm": 1.6547431945800781,
"learning_rate": 2.6362038664323375e-07,
"loss": 1.407160758972168,
"step": 4
},
{
"epoch": 0.002531645569620253,
"grad_norm": 1.8221601247787476,
"learning_rate": 4.393673110720563e-07,
"loss": 1.376656174659729,
"step": 6
},
{
"epoch": 0.0033755274261603376,
"grad_norm": 1.4831048250198364,
"learning_rate": 6.151142355008788e-07,
"loss": 1.247712254524231,
"step": 8
},
{
"epoch": 0.004219409282700422,
"grad_norm": 1.668201208114624,
"learning_rate": 7.908611599297013e-07,
"loss": 1.2685163021087646,
"step": 10
},
{
"epoch": 0.005063291139240506,
"grad_norm": 1.67417311668396,
"learning_rate": 9.666080843585237e-07,
"loss": 1.2942761182785034,
"step": 12
},
{
"epoch": 0.00590717299578059,
"grad_norm": 1.7154079675674438,
"learning_rate": 1.1423550087873463e-06,
"loss": 1.3638604879379272,
"step": 14
},
{
"epoch": 0.006751054852320675,
"grad_norm": 1.729427456855774,
"learning_rate": 1.3181019332161688e-06,
"loss": 1.3476728200912476,
"step": 16
},
{
"epoch": 0.007594936708860759,
"grad_norm": 1.3813447952270508,
"learning_rate": 1.4938488576449913e-06,
"loss": 1.3476393222808838,
"step": 18
},
{
"epoch": 0.008438818565400843,
"grad_norm": 1.557220458984375,
"learning_rate": 1.6695957820738139e-06,
"loss": 1.2449309825897217,
"step": 20
},
{
"epoch": 0.009282700421940928,
"grad_norm": 1.1883500814437866,
"learning_rate": 1.8453427065026362e-06,
"loss": 1.3125361204147339,
"step": 22
},
{
"epoch": 0.010126582278481013,
"grad_norm": 1.7290029525756836,
"learning_rate": 2.0210896309314587e-06,
"loss": 1.3724769353866577,
"step": 24
},
{
"epoch": 0.010970464135021098,
"grad_norm": 1.5627557039260864,
"learning_rate": 2.1968365553602812e-06,
"loss": 1.3401387929916382,
"step": 26
},
{
"epoch": 0.01181434599156118,
"grad_norm": 1.796866774559021,
"learning_rate": 2.3725834797891038e-06,
"loss": 1.365437388420105,
"step": 28
},
{
"epoch": 0.012658227848101266,
"grad_norm": 1.7030404806137085,
"learning_rate": 2.5483304042179263e-06,
"loss": 1.2706533670425415,
"step": 30
},
{
"epoch": 0.01350210970464135,
"grad_norm": 1.3186293840408325,
"learning_rate": 2.724077328646749e-06,
"loss": 1.3084994554519653,
"step": 32
},
{
"epoch": 0.014345991561181435,
"grad_norm": 1.5762513875961304,
"learning_rate": 2.8998242530755714e-06,
"loss": 1.3259696960449219,
"step": 34
},
{
"epoch": 0.015189873417721518,
"grad_norm": 1.422295331954956,
"learning_rate": 3.075571177504394e-06,
"loss": 1.3205676078796387,
"step": 36
},
{
"epoch": 0.016033755274261603,
"grad_norm": 1.495523452758789,
"learning_rate": 3.2513181019332165e-06,
"loss": 1.3740568161010742,
"step": 38
},
{
"epoch": 0.016877637130801686,
"grad_norm": 1.5112254619598389,
"learning_rate": 3.427065026362039e-06,
"loss": 1.321828842163086,
"step": 40
},
{
"epoch": 0.017721518987341773,
"grad_norm": 1.4667807817459106,
"learning_rate": 3.602811950790861e-06,
"loss": 1.3673173189163208,
"step": 42
},
{
"epoch": 0.018565400843881856,
"grad_norm": 1.6609723567962646,
"learning_rate": 3.7785588752196836e-06,
"loss": 1.3968093395233154,
"step": 44
},
{
"epoch": 0.019409282700421943,
"grad_norm": 1.59381103515625,
"learning_rate": 3.954305799648506e-06,
"loss": 1.4295302629470825,
"step": 46
},
{
"epoch": 0.020253164556962026,
"grad_norm": 1.1470608711242676,
"learning_rate": 4.130052724077329e-06,
"loss": 1.2536572217941284,
"step": 48
},
{
"epoch": 0.02109704641350211,
"grad_norm": 1.2014588117599487,
"learning_rate": 4.305799648506151e-06,
"loss": 1.242217779159546,
"step": 50
},
{
"epoch": 0.021940928270042195,
"grad_norm": 1.2327464818954468,
"learning_rate": 4.481546572934974e-06,
"loss": 1.2166963815689087,
"step": 52
},
{
"epoch": 0.02278481012658228,
"grad_norm": 1.9708983898162842,
"learning_rate": 4.657293497363796e-06,
"loss": 1.25709867477417,
"step": 54
},
{
"epoch": 0.02362869198312236,
"grad_norm": 1.180569052696228,
"learning_rate": 4.833040421792619e-06,
"loss": 1.2886158227920532,
"step": 56
},
{
"epoch": 0.024472573839662448,
"grad_norm": 1.5029548406600952,
"learning_rate": 5.008787346221441e-06,
"loss": 1.29886794090271,
"step": 58
},
{
"epoch": 0.02531645569620253,
"grad_norm": 1.5380216836929321,
"learning_rate": 5.184534270650264e-06,
"loss": 1.2387628555297852,
"step": 60
},
{
"epoch": 0.026160337552742614,
"grad_norm": 1.572144865989685,
"learning_rate": 5.3602811950790864e-06,
"loss": 1.2177000045776367,
"step": 62
},
{
"epoch": 0.0270042194092827,
"grad_norm": 1.4882780313491821,
"learning_rate": 5.536028119507909e-06,
"loss": 1.181516170501709,
"step": 64
},
{
"epoch": 0.027848101265822784,
"grad_norm": 1.2982488870620728,
"learning_rate": 5.7117750439367315e-06,
"loss": 1.2101733684539795,
"step": 66
},
{
"epoch": 0.02869198312236287,
"grad_norm": 1.5236955881118774,
"learning_rate": 5.887521968365554e-06,
"loss": 1.2277681827545166,
"step": 68
},
{
"epoch": 0.029535864978902954,
"grad_norm": 1.4521006345748901,
"learning_rate": 6.0632688927943766e-06,
"loss": 1.1688424348831177,
"step": 70
},
{
"epoch": 0.030379746835443037,
"grad_norm": 1.2352311611175537,
"learning_rate": 6.239015817223199e-06,
"loss": 1.273059368133545,
"step": 72
},
{
"epoch": 0.031223628691983123,
"grad_norm": 1.3438209295272827,
"learning_rate": 6.414762741652021e-06,
"loss": 1.1609034538269043,
"step": 74
},
{
"epoch": 0.032067510548523206,
"grad_norm": 1.9009398221969604,
"learning_rate": 6.590509666080843e-06,
"loss": 1.2508260011672974,
"step": 76
},
{
"epoch": 0.03291139240506329,
"grad_norm": 1.6718412637710571,
"learning_rate": 6.766256590509666e-06,
"loss": 1.2524956464767456,
"step": 78
},
{
"epoch": 0.03375527426160337,
"grad_norm": 1.249891757965088,
"learning_rate": 6.942003514938488e-06,
"loss": 1.1472493410110474,
"step": 80
},
{
"epoch": 0.03459915611814346,
"grad_norm": 1.4398653507232666,
"learning_rate": 7.117750439367312e-06,
"loss": 1.0845389366149902,
"step": 82
},
{
"epoch": 0.035443037974683546,
"grad_norm": 1.3701167106628418,
"learning_rate": 7.293497363796134e-06,
"loss": 1.1088868379592896,
"step": 84
},
{
"epoch": 0.036286919831223625,
"grad_norm": 1.277998924255371,
"learning_rate": 7.469244288224957e-06,
"loss": 1.1513772010803223,
"step": 86
},
{
"epoch": 0.03713080168776371,
"grad_norm": 1.4970002174377441,
"learning_rate": 7.644991212653779e-06,
"loss": 1.1385771036148071,
"step": 88
},
{
"epoch": 0.0379746835443038,
"grad_norm": 1.3384218215942383,
"learning_rate": 7.820738137082601e-06,
"loss": 1.1632680892944336,
"step": 90
},
{
"epoch": 0.038818565400843885,
"grad_norm": 1.4317446947097778,
"learning_rate": 7.996485061511425e-06,
"loss": 1.2256064414978027,
"step": 92
},
{
"epoch": 0.039662447257383965,
"grad_norm": 1.8743640184402466,
"learning_rate": 8.172231985940246e-06,
"loss": 1.1935789585113525,
"step": 94
},
{
"epoch": 0.04050632911392405,
"grad_norm": 1.4789546728134155,
"learning_rate": 8.347978910369069e-06,
"loss": 1.1429362297058105,
"step": 96
},
{
"epoch": 0.04135021097046414,
"grad_norm": 1.658605694770813,
"learning_rate": 8.523725834797891e-06,
"loss": 1.1831508874893188,
"step": 98
},
{
"epoch": 0.04219409282700422,
"grad_norm": 1.5077892541885376,
"learning_rate": 8.699472759226714e-06,
"loss": 1.0539867877960205,
"step": 100
},
{
"epoch": 0.04219409282700422,
"eval_loss": 1.138856053352356,
"eval_runtime": 859.7128,
"eval_samples_per_second": 2.451,
"eval_steps_per_second": 2.451,
"step": 100
},
{
"epoch": 0.043037974683544304,
"grad_norm": 1.4335681200027466,
"learning_rate": 8.875219683655536e-06,
"loss": 1.0719901323318481,
"step": 102
},
{
"epoch": 0.04388185654008439,
"grad_norm": 1.7387681007385254,
"learning_rate": 9.050966608084359e-06,
"loss": 1.0654313564300537,
"step": 104
},
{
"epoch": 0.04472573839662447,
"grad_norm": 1.6071950197219849,
"learning_rate": 9.226713532513181e-06,
"loss": 1.0752698183059692,
"step": 106
},
{
"epoch": 0.04556962025316456,
"grad_norm": 1.40005362033844,
"learning_rate": 9.402460456942004e-06,
"loss": 1.1029763221740723,
"step": 108
},
{
"epoch": 0.046413502109704644,
"grad_norm": 2.2338669300079346,
"learning_rate": 9.578207381370826e-06,
"loss": 1.1157960891723633,
"step": 110
},
{
"epoch": 0.04725738396624472,
"grad_norm": 1.4972727298736572,
"learning_rate": 9.753954305799649e-06,
"loss": 1.1095420122146606,
"step": 112
},
{
"epoch": 0.04810126582278481,
"grad_norm": 1.317979097366333,
"learning_rate": 9.929701230228471e-06,
"loss": 1.109113097190857,
"step": 114
},
{
"epoch": 0.048945147679324896,
"grad_norm": 1.496346116065979,
"learning_rate": 1.0105448154657294e-05,
"loss": 1.1055104732513428,
"step": 116
},
{
"epoch": 0.049789029535864976,
"grad_norm": 1.385406732559204,
"learning_rate": 1.0281195079086117e-05,
"loss": 1.118395209312439,
"step": 118
},
{
"epoch": 0.05063291139240506,
"grad_norm": 1.524222731590271,
"learning_rate": 1.0456942003514939e-05,
"loss": 1.1008446216583252,
"step": 120
},
{
"epoch": 0.05147679324894515,
"grad_norm": 1.6308200359344482,
"learning_rate": 1.0632688927943762e-05,
"loss": 1.0891425609588623,
"step": 122
},
{
"epoch": 0.05232067510548523,
"grad_norm": 1.3681106567382812,
"learning_rate": 1.0808435852372584e-05,
"loss": 0.9080473184585571,
"step": 124
},
{
"epoch": 0.053164556962025315,
"grad_norm": 1.9429908990859985,
"learning_rate": 1.0984182776801407e-05,
"loss": 1.0337369441986084,
"step": 126
},
{
"epoch": 0.0540084388185654,
"grad_norm": 1.5830830335617065,
"learning_rate": 1.115992970123023e-05,
"loss": 1.0703333616256714,
"step": 128
},
{
"epoch": 0.05485232067510549,
"grad_norm": 1.4792555570602417,
"learning_rate": 1.1335676625659052e-05,
"loss": 1.004652738571167,
"step": 130
},
{
"epoch": 0.05569620253164557,
"grad_norm": 1.7196226119995117,
"learning_rate": 1.1511423550087874e-05,
"loss": 0.9798293709754944,
"step": 132
},
{
"epoch": 0.056540084388185655,
"grad_norm": 1.8733659982681274,
"learning_rate": 1.1687170474516697e-05,
"loss": 1.0213249921798706,
"step": 134
},
{
"epoch": 0.05738396624472574,
"grad_norm": 1.3431142568588257,
"learning_rate": 1.186291739894552e-05,
"loss": 1.0358591079711914,
"step": 136
},
{
"epoch": 0.05822784810126582,
"grad_norm": 1.527864933013916,
"learning_rate": 1.2038664323374342e-05,
"loss": 0.9372249841690063,
"step": 138
},
{
"epoch": 0.05907172995780591,
"grad_norm": 1.5495563745498657,
"learning_rate": 1.2214411247803164e-05,
"loss": 1.0277758836746216,
"step": 140
},
{
"epoch": 0.059915611814345994,
"grad_norm": 1.6792418956756592,
"learning_rate": 1.2390158172231985e-05,
"loss": 1.0349801778793335,
"step": 142
},
{
"epoch": 0.060759493670886074,
"grad_norm": 1.6468945741653442,
"learning_rate": 1.256590509666081e-05,
"loss": 0.9578297734260559,
"step": 144
},
{
"epoch": 0.06160337552742616,
"grad_norm": 1.7243824005126953,
"learning_rate": 1.2741652021089632e-05,
"loss": 1.0628854036331177,
"step": 146
},
{
"epoch": 0.06244725738396625,
"grad_norm": 1.7286981344223022,
"learning_rate": 1.2917398945518455e-05,
"loss": 0.9336449503898621,
"step": 148
},
{
"epoch": 0.06329113924050633,
"grad_norm": 1.6411832571029663,
"learning_rate": 1.3093145869947277e-05,
"loss": 0.953730583190918,
"step": 150
},
{
"epoch": 0.06413502109704641,
"grad_norm": 1.8297001123428345,
"learning_rate": 1.3268892794376098e-05,
"loss": 1.051239013671875,
"step": 152
},
{
"epoch": 0.06497890295358649,
"grad_norm": 1.9660519361495972,
"learning_rate": 1.3444639718804922e-05,
"loss": 0.9955035448074341,
"step": 154
},
{
"epoch": 0.06582278481012659,
"grad_norm": 1.8423733711242676,
"learning_rate": 1.3620386643233743e-05,
"loss": 0.913300096988678,
"step": 156
},
{
"epoch": 0.06666666666666667,
"grad_norm": 1.9146347045898438,
"learning_rate": 1.3796133567662567e-05,
"loss": 1.0429846048355103,
"step": 158
},
{
"epoch": 0.06751054852320675,
"grad_norm": 1.6221821308135986,
"learning_rate": 1.3971880492091388e-05,
"loss": 1.0360238552093506,
"step": 160
},
{
"epoch": 0.06835443037974684,
"grad_norm": 2.173283338546753,
"learning_rate": 1.4147627416520212e-05,
"loss": 1.0227266550064087,
"step": 162
},
{
"epoch": 0.06919831223628692,
"grad_norm": 1.7091665267944336,
"learning_rate": 1.4323374340949033e-05,
"loss": 1.0075194835662842,
"step": 164
},
{
"epoch": 0.070042194092827,
"grad_norm": 1.7219135761260986,
"learning_rate": 1.4499121265377857e-05,
"loss": 1.0044782161712646,
"step": 166
},
{
"epoch": 0.07088607594936709,
"grad_norm": 1.6558159589767456,
"learning_rate": 1.4674868189806678e-05,
"loss": 0.9393973350524902,
"step": 168
},
{
"epoch": 0.07172995780590717,
"grad_norm": 1.9362739324569702,
"learning_rate": 1.4850615114235502e-05,
"loss": 0.9955337643623352,
"step": 170
},
{
"epoch": 0.07257383966244725,
"grad_norm": 1.7792853116989136,
"learning_rate": 1.5026362038664323e-05,
"loss": 0.9659126400947571,
"step": 172
},
{
"epoch": 0.07341772151898734,
"grad_norm": 1.7184511423110962,
"learning_rate": 1.5202108963093147e-05,
"loss": 0.9077855348587036,
"step": 174
},
{
"epoch": 0.07426160337552742,
"grad_norm": 1.5701428651809692,
"learning_rate": 1.537785588752197e-05,
"loss": 0.9305018782615662,
"step": 176
},
{
"epoch": 0.0751054852320675,
"grad_norm": 1.970229148864746,
"learning_rate": 1.555360281195079e-05,
"loss": 1.0211774110794067,
"step": 178
},
{
"epoch": 0.0759493670886076,
"grad_norm": 1.8410269021987915,
"learning_rate": 1.5729349736379615e-05,
"loss": 0.9479315876960754,
"step": 180
},
{
"epoch": 0.07679324894514768,
"grad_norm": 1.8991246223449707,
"learning_rate": 1.5905096660808434e-05,
"loss": 1.0629050731658936,
"step": 182
},
{
"epoch": 0.07763713080168777,
"grad_norm": 1.8052008152008057,
"learning_rate": 1.608084358523726e-05,
"loss": 0.946983814239502,
"step": 184
},
{
"epoch": 0.07848101265822785,
"grad_norm": 1.547108769416809,
"learning_rate": 1.625659050966608e-05,
"loss": 0.9413356184959412,
"step": 186
},
{
"epoch": 0.07932489451476793,
"grad_norm": 1.9713538885116577,
"learning_rate": 1.6432337434094905e-05,
"loss": 0.9337888956069946,
"step": 188
},
{
"epoch": 0.08016877637130802,
"grad_norm": 1.708789348602295,
"learning_rate": 1.6608084358523728e-05,
"loss": 0.9816337823867798,
"step": 190
},
{
"epoch": 0.0810126582278481,
"grad_norm": 1.815292477607727,
"learning_rate": 1.678383128295255e-05,
"loss": 1.017122507095337,
"step": 192
},
{
"epoch": 0.08185654008438818,
"grad_norm": 1.7950682640075684,
"learning_rate": 1.6959578207381373e-05,
"loss": 0.991599440574646,
"step": 194
},
{
"epoch": 0.08270042194092828,
"grad_norm": 1.692512035369873,
"learning_rate": 1.7135325131810195e-05,
"loss": 0.9570834040641785,
"step": 196
},
{
"epoch": 0.08354430379746836,
"grad_norm": 2.056089162826538,
"learning_rate": 1.7311072056239018e-05,
"loss": 1.035754919052124,
"step": 198
},
{
"epoch": 0.08438818565400844,
"grad_norm": 1.7022203207015991,
"learning_rate": 1.7486818980667837e-05,
"loss": 1.0124205350875854,
"step": 200
},
{
"epoch": 0.08438818565400844,
"eval_loss": 0.995743453502655,
"eval_runtime": 846.8257,
"eval_samples_per_second": 2.488,
"eval_steps_per_second": 2.488,
"step": 200
},
{
"epoch": 0.08523206751054853,
"grad_norm": 1.6088604927062988,
"learning_rate": 1.7662565905096663e-05,
"loss": 0.8946985006332397,
"step": 202
},
{
"epoch": 0.08607594936708861,
"grad_norm": 2.02270770072937,
"learning_rate": 1.7838312829525482e-05,
"loss": 0.976133406162262,
"step": 204
},
{
"epoch": 0.08691983122362869,
"grad_norm": 1.7832789421081543,
"learning_rate": 1.8014059753954308e-05,
"loss": 0.9079383611679077,
"step": 206
},
{
"epoch": 0.08776371308016878,
"grad_norm": 1.9793545007705688,
"learning_rate": 1.8189806678383127e-05,
"loss": 0.8650367856025696,
"step": 208
},
{
"epoch": 0.08860759493670886,
"grad_norm": 1.8124271631240845,
"learning_rate": 1.8365553602811953e-05,
"loss": 0.9327266812324524,
"step": 210
},
{
"epoch": 0.08945147679324894,
"grad_norm": 1.8581212759017944,
"learning_rate": 1.8541300527240772e-05,
"loss": 0.9811079502105713,
"step": 212
},
{
"epoch": 0.09029535864978903,
"grad_norm": 2.001699447631836,
"learning_rate": 1.8717047451669598e-05,
"loss": 0.9546971321105957,
"step": 214
},
{
"epoch": 0.09113924050632911,
"grad_norm": 1.6994978189468384,
"learning_rate": 1.8892794376098417e-05,
"loss": 0.9611319899559021,
"step": 216
},
{
"epoch": 0.0919831223628692,
"grad_norm": 2.1379497051239014,
"learning_rate": 1.9068541300527243e-05,
"loss": 0.9781531095504761,
"step": 218
},
{
"epoch": 0.09282700421940929,
"grad_norm": 1.8961224555969238,
"learning_rate": 1.9244288224956066e-05,
"loss": 0.9374833106994629,
"step": 220
},
{
"epoch": 0.09367088607594937,
"grad_norm": 1.851464033126831,
"learning_rate": 1.9420035149384885e-05,
"loss": 0.9681299328804016,
"step": 222
},
{
"epoch": 0.09451476793248945,
"grad_norm": 2.0642266273498535,
"learning_rate": 1.959578207381371e-05,
"loss": 1.0086225271224976,
"step": 224
},
{
"epoch": 0.09535864978902954,
"grad_norm": 1.8658756017684937,
"learning_rate": 1.977152899824253e-05,
"loss": 0.9190312623977661,
"step": 226
},
{
"epoch": 0.09620253164556962,
"grad_norm": 2.4398674964904785,
"learning_rate": 1.9947275922671356e-05,
"loss": 0.9740874171257019,
"step": 228
},
{
"epoch": 0.0970464135021097,
"grad_norm": 1.849183440208435,
"learning_rate": 2.0123022847100175e-05,
"loss": 0.884376049041748,
"step": 230
},
{
"epoch": 0.09789029535864979,
"grad_norm": 2.027320384979248,
"learning_rate": 2.0298769771529e-05,
"loss": 0.9116487503051758,
"step": 232
},
{
"epoch": 0.09873417721518987,
"grad_norm": 1.6800135374069214,
"learning_rate": 2.047451669595782e-05,
"loss": 0.9035115242004395,
"step": 234
},
{
"epoch": 0.09957805907172995,
"grad_norm": 2.2362256050109863,
"learning_rate": 2.0650263620386646e-05,
"loss": 0.9043796062469482,
"step": 236
},
{
"epoch": 0.10042194092827005,
"grad_norm": 1.938215970993042,
"learning_rate": 2.0826010544815465e-05,
"loss": 1.0888828039169312,
"step": 238
},
{
"epoch": 0.10126582278481013,
"grad_norm": 1.890328049659729,
"learning_rate": 2.100175746924429e-05,
"loss": 0.9960280656814575,
"step": 240
},
{
"epoch": 0.1021097046413502,
"grad_norm": 2.021235227584839,
"learning_rate": 2.117750439367311e-05,
"loss": 0.9848901629447937,
"step": 242
},
{
"epoch": 0.1029535864978903,
"grad_norm": 2.023920774459839,
"learning_rate": 2.1353251318101936e-05,
"loss": 0.891694188117981,
"step": 244
},
{
"epoch": 0.10379746835443038,
"grad_norm": 1.8061069250106812,
"learning_rate": 2.1528998242530755e-05,
"loss": 0.9059976935386658,
"step": 246
},
{
"epoch": 0.10464135021097046,
"grad_norm": 2.176302194595337,
"learning_rate": 2.1704745166959578e-05,
"loss": 1.0056109428405762,
"step": 248
},
{
"epoch": 0.10548523206751055,
"grad_norm": 1.9820969104766846,
"learning_rate": 2.18804920913884e-05,
"loss": 0.9645357728004456,
"step": 250
},
{
"epoch": 0.10632911392405063,
"grad_norm": 1.8764572143554688,
"learning_rate": 2.2056239015817223e-05,
"loss": 1.0178182125091553,
"step": 252
},
{
"epoch": 0.10717299578059072,
"grad_norm": 2.56221342086792,
"learning_rate": 2.223198594024605e-05,
"loss": 0.9546761512756348,
"step": 254
},
{
"epoch": 0.1080168776371308,
"grad_norm": 2.6779074668884277,
"learning_rate": 2.2407732864674868e-05,
"loss": 0.9300968647003174,
"step": 256
},
{
"epoch": 0.10886075949367088,
"grad_norm": 2.140897512435913,
"learning_rate": 2.2583479789103694e-05,
"loss": 0.926638662815094,
"step": 258
},
{
"epoch": 0.10970464135021098,
"grad_norm": 2.0880508422851562,
"learning_rate": 2.2759226713532513e-05,
"loss": 1.0681840181350708,
"step": 260
},
{
"epoch": 0.11054852320675106,
"grad_norm": 2.7273616790771484,
"learning_rate": 2.293497363796134e-05,
"loss": 1.0840941667556763,
"step": 262
},
{
"epoch": 0.11139240506329114,
"grad_norm": 1.6723874807357788,
"learning_rate": 2.3110720562390158e-05,
"loss": 0.8637182116508484,
"step": 264
},
{
"epoch": 0.11223628691983123,
"grad_norm": 1.806243896484375,
"learning_rate": 2.3286467486818984e-05,
"loss": 0.9554686546325684,
"step": 266
},
{
"epoch": 0.11308016877637131,
"grad_norm": 1.9086743593215942,
"learning_rate": 2.3462214411247803e-05,
"loss": 0.9556593894958496,
"step": 268
},
{
"epoch": 0.11392405063291139,
"grad_norm": 2.1822304725646973,
"learning_rate": 2.3637961335676626e-05,
"loss": 0.9177709817886353,
"step": 270
},
{
"epoch": 0.11476793248945148,
"grad_norm": 2.1009039878845215,
"learning_rate": 2.3813708260105448e-05,
"loss": 0.9288759827613831,
"step": 272
},
{
"epoch": 0.11561181434599156,
"grad_norm": 1.9814810752868652,
"learning_rate": 2.398945518453427e-05,
"loss": 0.9881691932678223,
"step": 274
},
{
"epoch": 0.11645569620253164,
"grad_norm": 1.9946284294128418,
"learning_rate": 2.4165202108963093e-05,
"loss": 0.9390727281570435,
"step": 276
},
{
"epoch": 0.11729957805907174,
"grad_norm": 2.4489169120788574,
"learning_rate": 2.4340949033391916e-05,
"loss": 0.9625692963600159,
"step": 278
},
{
"epoch": 0.11814345991561181,
"grad_norm": 2.0919103622436523,
"learning_rate": 2.451669595782074e-05,
"loss": 0.9304702877998352,
"step": 280
},
{
"epoch": 0.1189873417721519,
"grad_norm": 1.912914752960205,
"learning_rate": 2.469244288224956e-05,
"loss": 0.9313994646072388,
"step": 282
},
{
"epoch": 0.11983122362869199,
"grad_norm": 2.1553256511688232,
"learning_rate": 2.4868189806678387e-05,
"loss": 1.004011869430542,
"step": 284
},
{
"epoch": 0.12067510548523207,
"grad_norm": 2.0129058361053467,
"learning_rate": 2.504393673110721e-05,
"loss": 0.9092531204223633,
"step": 286
},
{
"epoch": 0.12151898734177215,
"grad_norm": 2.1632325649261475,
"learning_rate": 2.5219683655536032e-05,
"loss": 0.993347704410553,
"step": 288
},
{
"epoch": 0.12236286919831224,
"grad_norm": 2.3072738647460938,
"learning_rate": 2.539543057996485e-05,
"loss": 0.978348433971405,
"step": 290
},
{
"epoch": 0.12320675105485232,
"grad_norm": 2.056560516357422,
"learning_rate": 2.5571177504393674e-05,
"loss": 1.0018101930618286,
"step": 292
},
{
"epoch": 0.1240506329113924,
"grad_norm": 1.8906747102737427,
"learning_rate": 2.5746924428822493e-05,
"loss": 0.9607775211334229,
"step": 294
},
{
"epoch": 0.1248945147679325,
"grad_norm": 2.1375651359558105,
"learning_rate": 2.5922671353251322e-05,
"loss": 0.9259153008460999,
"step": 296
},
{
"epoch": 0.1257383966244726,
"grad_norm": 1.9994823932647705,
"learning_rate": 2.609841827768014e-05,
"loss": 0.8524524569511414,
"step": 298
},
{
"epoch": 0.12658227848101267,
"grad_norm": 2.2421181201934814,
"learning_rate": 2.6274165202108964e-05,
"loss": 1.0047069787979126,
"step": 300
},
{
"epoch": 0.12658227848101267,
"eval_loss": 0.9517185688018799,
"eval_runtime": 860.0287,
"eval_samples_per_second": 2.45,
"eval_steps_per_second": 2.45,
"step": 300
},
{
"epoch": 0.12742616033755275,
"grad_norm": 2.1206254959106445,
"learning_rate": 2.6449912126537786e-05,
"loss": 0.8475471138954163,
"step": 302
},
{
"epoch": 0.12827004219409283,
"grad_norm": 1.885161280632019,
"learning_rate": 2.6625659050966612e-05,
"loss": 0.8643121123313904,
"step": 304
},
{
"epoch": 0.1291139240506329,
"grad_norm": 3.1441781520843506,
"learning_rate": 2.680140597539543e-05,
"loss": 0.8804612159729004,
"step": 306
},
{
"epoch": 0.12995780590717299,
"grad_norm": 1.953133225440979,
"learning_rate": 2.6977152899824254e-05,
"loss": 0.8348029255867004,
"step": 308
},
{
"epoch": 0.1308016877637131,
"grad_norm": 2.3762667179107666,
"learning_rate": 2.7152899824253076e-05,
"loss": 0.8889057040214539,
"step": 310
},
{
"epoch": 0.13164556962025317,
"grad_norm": 2.4651103019714355,
"learning_rate": 2.7328646748681902e-05,
"loss": 1.025565505027771,
"step": 312
},
{
"epoch": 0.13248945147679325,
"grad_norm": 1.8522284030914307,
"learning_rate": 2.7504393673110725e-05,
"loss": 0.868915855884552,
"step": 314
},
{
"epoch": 0.13333333333333333,
"grad_norm": 1.8048083782196045,
"learning_rate": 2.7680140597539544e-05,
"loss": 0.8821638226509094,
"step": 316
},
{
"epoch": 0.1341772151898734,
"grad_norm": 1.9933605194091797,
"learning_rate": 2.7855887521968367e-05,
"loss": 0.8735360503196716,
"step": 318
},
{
"epoch": 0.1350210970464135,
"grad_norm": 2.044337034225464,
"learning_rate": 2.8031634446397186e-05,
"loss": 0.8288834691047668,
"step": 320
},
{
"epoch": 0.1358649789029536,
"grad_norm": 2.416067361831665,
"learning_rate": 2.8207381370826015e-05,
"loss": 0.9104969501495361,
"step": 322
},
{
"epoch": 0.13670886075949368,
"grad_norm": 2.0731265544891357,
"learning_rate": 2.8383128295254834e-05,
"loss": 0.8689924478530884,
"step": 324
},
{
"epoch": 0.13755274261603376,
"grad_norm": 2.049126386642456,
"learning_rate": 2.8558875219683657e-05,
"loss": 0.9312222003936768,
"step": 326
},
{
"epoch": 0.13839662447257384,
"grad_norm": 2.131026268005371,
"learning_rate": 2.8734622144112476e-05,
"loss": 0.8933501839637756,
"step": 328
},
{
"epoch": 0.13924050632911392,
"grad_norm": 1.766754150390625,
"learning_rate": 2.8910369068541305e-05,
"loss": 0.8998261094093323,
"step": 330
},
{
"epoch": 0.140084388185654,
"grad_norm": 2.197706460952759,
"learning_rate": 2.9086115992970124e-05,
"loss": 0.8826426267623901,
"step": 332
},
{
"epoch": 0.1409282700421941,
"grad_norm": 1.953715443611145,
"learning_rate": 2.9261862917398947e-05,
"loss": 0.8590307831764221,
"step": 334
},
{
"epoch": 0.14177215189873418,
"grad_norm": 2.200929880142212,
"learning_rate": 2.943760984182777e-05,
"loss": 0.9317060708999634,
"step": 336
},
{
"epoch": 0.14261603375527426,
"grad_norm": 2.1195082664489746,
"learning_rate": 2.961335676625659e-05,
"loss": 0.9965578317642212,
"step": 338
},
{
"epoch": 0.14345991561181434,
"grad_norm": 2.3449771404266357,
"learning_rate": 2.9789103690685414e-05,
"loss": 0.8353848457336426,
"step": 340
},
{
"epoch": 0.14430379746835442,
"grad_norm": 2.000497579574585,
"learning_rate": 2.9964850615114237e-05,
"loss": 0.9154735803604126,
"step": 342
},
{
"epoch": 0.1451476793248945,
"grad_norm": 2.141890525817871,
"learning_rate": 3.014059753954306e-05,
"loss": 0.9530655741691589,
"step": 344
},
{
"epoch": 0.1459915611814346,
"grad_norm": 1.7717392444610596,
"learning_rate": 3.031634446397188e-05,
"loss": 0.896998405456543,
"step": 346
},
{
"epoch": 0.1468354430379747,
"grad_norm": 1.8796685934066772,
"learning_rate": 3.0492091388400708e-05,
"loss": 0.9084208011627197,
"step": 348
},
{
"epoch": 0.14767932489451477,
"grad_norm": 2.0298709869384766,
"learning_rate": 3.066783831282953e-05,
"loss": 0.9183387756347656,
"step": 350
},
{
"epoch": 0.14852320675105485,
"grad_norm": 1.9245645999908447,
"learning_rate": 3.084358523725835e-05,
"loss": 0.8624772429466248,
"step": 352
},
{
"epoch": 0.14936708860759493,
"grad_norm": 2.325681209564209,
"learning_rate": 3.101933216168717e-05,
"loss": 0.9142400026321411,
"step": 354
},
{
"epoch": 0.150210970464135,
"grad_norm": 2.1200530529022217,
"learning_rate": 3.1195079086115995e-05,
"loss": 0.9064018130302429,
"step": 356
},
{
"epoch": 0.15105485232067511,
"grad_norm": 1.979314923286438,
"learning_rate": 3.137082601054482e-05,
"loss": 0.9199238419532776,
"step": 358
},
{
"epoch": 0.1518987341772152,
"grad_norm": 2.1122689247131348,
"learning_rate": 3.154657293497364e-05,
"loss": 0.8030132055282593,
"step": 360
},
{
"epoch": 0.15274261603375527,
"grad_norm": 2.105767250061035,
"learning_rate": 3.172231985940246e-05,
"loss": 0.9185854196548462,
"step": 362
},
{
"epoch": 0.15358649789029535,
"grad_norm": 2.179471015930176,
"learning_rate": 3.1898066783831285e-05,
"loss": 0.9365083575248718,
"step": 364
},
{
"epoch": 0.15443037974683543,
"grad_norm": 2.1444311141967773,
"learning_rate": 3.207381370826011e-05,
"loss": 0.8965140581130981,
"step": 366
},
{
"epoch": 0.15527426160337554,
"grad_norm": 2.4171674251556396,
"learning_rate": 3.224956063268893e-05,
"loss": 0.8787504434585571,
"step": 368
},
{
"epoch": 0.15611814345991562,
"grad_norm": 2.418628215789795,
"learning_rate": 3.242530755711775e-05,
"loss": 0.8925284147262573,
"step": 370
},
{
"epoch": 0.1569620253164557,
"grad_norm": 2.2228314876556396,
"learning_rate": 3.2601054481546575e-05,
"loss": 0.876179039478302,
"step": 372
},
{
"epoch": 0.15780590717299578,
"grad_norm": 2.324237108230591,
"learning_rate": 3.27768014059754e-05,
"loss": 0.8365707993507385,
"step": 374
},
{
"epoch": 0.15864978902953586,
"grad_norm": 2.6344552040100098,
"learning_rate": 3.295254833040422e-05,
"loss": 0.7864399552345276,
"step": 376
},
{
"epoch": 0.15949367088607594,
"grad_norm": 2.047536611557007,
"learning_rate": 3.312829525483304e-05,
"loss": 0.9271875023841858,
"step": 378
},
{
"epoch": 0.16033755274261605,
"grad_norm": 2.120025157928467,
"learning_rate": 3.3304042179261865e-05,
"loss": 0.8799133896827698,
"step": 380
},
{
"epoch": 0.16118143459915613,
"grad_norm": 2.363692045211792,
"learning_rate": 3.347978910369069e-05,
"loss": 0.8973530530929565,
"step": 382
},
{
"epoch": 0.1620253164556962,
"grad_norm": 2.1796772480010986,
"learning_rate": 3.365553602811951e-05,
"loss": 1.0277652740478516,
"step": 384
},
{
"epoch": 0.16286919831223629,
"grad_norm": 1.9192595481872559,
"learning_rate": 3.383128295254833e-05,
"loss": 0.8909643888473511,
"step": 386
},
{
"epoch": 0.16371308016877636,
"grad_norm": 1.7874376773834229,
"learning_rate": 3.4007029876977155e-05,
"loss": 0.837049663066864,
"step": 388
},
{
"epoch": 0.16455696202531644,
"grad_norm": 2.3402366638183594,
"learning_rate": 3.4182776801405974e-05,
"loss": 0.8625202775001526,
"step": 390
},
{
"epoch": 0.16540084388185655,
"grad_norm": 2.1137185096740723,
"learning_rate": 3.43585237258348e-05,
"loss": 0.9288321137428284,
"step": 392
},
{
"epoch": 0.16624472573839663,
"grad_norm": 2.3776895999908447,
"learning_rate": 3.453427065026362e-05,
"loss": 0.9328726530075073,
"step": 394
},
{
"epoch": 0.1670886075949367,
"grad_norm": 2.34941029548645,
"learning_rate": 3.4710017574692445e-05,
"loss": 0.9273309707641602,
"step": 396
},
{
"epoch": 0.1679324894514768,
"grad_norm": 2.1272573471069336,
"learning_rate": 3.4885764499121264e-05,
"loss": 0.8703887462615967,
"step": 398
},
{
"epoch": 0.16877637130801687,
"grad_norm": 2.047290802001953,
"learning_rate": 3.506151142355009e-05,
"loss": 0.8808165788650513,
"step": 400
},
{
"epoch": 0.16877637130801687,
"eval_loss": 0.9282881617546082,
"eval_runtime": 869.6867,
"eval_samples_per_second": 2.423,
"eval_steps_per_second": 2.423,
"step": 400
},
{
"epoch": 0.16962025316455695,
"grad_norm": 1.9874159097671509,
"learning_rate": 3.5237258347978916e-05,
"loss": 0.9643645286560059,
"step": 402
},
{
"epoch": 0.17046413502109706,
"grad_norm": 1.9299919605255127,
"learning_rate": 3.5413005272407735e-05,
"loss": 0.9173495769500732,
"step": 404
},
{
"epoch": 0.17130801687763714,
"grad_norm": 2.3379697799682617,
"learning_rate": 3.5588752196836555e-05,
"loss": 0.8998411893844604,
"step": 406
},
{
"epoch": 0.17215189873417722,
"grad_norm": 2.241370916366577,
"learning_rate": 3.5764499121265374e-05,
"loss": 0.9310802221298218,
"step": 408
},
{
"epoch": 0.1729957805907173,
"grad_norm": 2.4490108489990234,
"learning_rate": 3.5940246045694206e-05,
"loss": 0.9605053067207336,
"step": 410
},
{
"epoch": 0.17383966244725738,
"grad_norm": 1.8247230052947998,
"learning_rate": 3.6115992970123026e-05,
"loss": 0.8485683798789978,
"step": 412
},
{
"epoch": 0.17468354430379746,
"grad_norm": 2.4608843326568604,
"learning_rate": 3.6291739894551845e-05,
"loss": 0.9325968623161316,
"step": 414
},
{
"epoch": 0.17552742616033756,
"grad_norm": 1.8923161029815674,
"learning_rate": 3.646748681898067e-05,
"loss": 0.9125096201896667,
"step": 416
},
{
"epoch": 0.17637130801687764,
"grad_norm": 1.8502769470214844,
"learning_rate": 3.6643233743409497e-05,
"loss": 0.8852217197418213,
"step": 418
},
{
"epoch": 0.17721518987341772,
"grad_norm": 1.9155100584030151,
"learning_rate": 3.6818980667838316e-05,
"loss": 0.9192792773246765,
"step": 420
},
{
"epoch": 0.1780590717299578,
"grad_norm": 2.181476593017578,
"learning_rate": 3.6994727592267135e-05,
"loss": 0.8787404298782349,
"step": 422
},
{
"epoch": 0.17890295358649788,
"grad_norm": 2.2469847202301025,
"learning_rate": 3.717047451669596e-05,
"loss": 0.9109582901000977,
"step": 424
},
{
"epoch": 0.17974683544303796,
"grad_norm": 2.08145809173584,
"learning_rate": 3.734622144112479e-05,
"loss": 0.8560389280319214,
"step": 426
},
{
"epoch": 0.18059071729957807,
"grad_norm": 4.121932506561279,
"learning_rate": 3.7521968365553606e-05,
"loss": 0.9456104040145874,
"step": 428
},
{
"epoch": 0.18143459915611815,
"grad_norm": 2.177459478378296,
"learning_rate": 3.7697715289982425e-05,
"loss": 0.8421300649642944,
"step": 430
},
{
"epoch": 0.18227848101265823,
"grad_norm": 2.324970245361328,
"learning_rate": 3.787346221441125e-05,
"loss": 0.9199858903884888,
"step": 432
},
{
"epoch": 0.1831223628691983,
"grad_norm": 2.133718490600586,
"learning_rate": 3.804920913884007e-05,
"loss": 0.8953126668930054,
"step": 434
},
{
"epoch": 0.1839662447257384,
"grad_norm": 1.8527995347976685,
"learning_rate": 3.8224956063268896e-05,
"loss": 0.8732239007949829,
"step": 436
},
{
"epoch": 0.1848101265822785,
"grad_norm": 1.95817232131958,
"learning_rate": 3.8400702987697715e-05,
"loss": 0.8818746209144592,
"step": 438
},
{
"epoch": 0.18565400843881857,
"grad_norm": 2.2107293605804443,
"learning_rate": 3.857644991212654e-05,
"loss": 0.9153507947921753,
"step": 440
},
{
"epoch": 0.18649789029535865,
"grad_norm": 2.004754066467285,
"learning_rate": 3.875219683655536e-05,
"loss": 0.8960154056549072,
"step": 442
},
{
"epoch": 0.18734177215189873,
"grad_norm": 2.1851706504821777,
"learning_rate": 3.8927943760984186e-05,
"loss": 0.909011721611023,
"step": 444
},
{
"epoch": 0.1881856540084388,
"grad_norm": 2.4492485523223877,
"learning_rate": 3.9103690685413005e-05,
"loss": 0.8880158066749573,
"step": 446
},
{
"epoch": 0.1890295358649789,
"grad_norm": 2.745453119277954,
"learning_rate": 3.927943760984183e-05,
"loss": 0.8500842452049255,
"step": 448
},
{
"epoch": 0.189873417721519,
"grad_norm": 2.1924264430999756,
"learning_rate": 3.945518453427065e-05,
"loss": 0.9004045724868774,
"step": 450
},
{
"epoch": 0.19071729957805908,
"grad_norm": 2.4051687717437744,
"learning_rate": 3.9630931458699476e-05,
"loss": 0.9020664095878601,
"step": 452
},
{
"epoch": 0.19156118143459916,
"grad_norm": 1.8077667951583862,
"learning_rate": 3.9806678383128295e-05,
"loss": 0.8639500737190247,
"step": 454
},
{
"epoch": 0.19240506329113924,
"grad_norm": 2.089043378829956,
"learning_rate": 3.998242530755712e-05,
"loss": 0.8642048239707947,
"step": 456
},
{
"epoch": 0.19324894514767932,
"grad_norm": 2.029578447341919,
"learning_rate": 4.015817223198594e-05,
"loss": 0.9371927380561829,
"step": 458
},
{
"epoch": 0.1940928270042194,
"grad_norm": 2.26582407951355,
"learning_rate": 4.033391915641476e-05,
"loss": 0.9120588302612305,
"step": 460
},
{
"epoch": 0.1949367088607595,
"grad_norm": 1.8671411275863647,
"learning_rate": 4.050966608084359e-05,
"loss": 0.8758644461631775,
"step": 462
},
{
"epoch": 0.19578059071729959,
"grad_norm": 1.9403492212295532,
"learning_rate": 4.068541300527241e-05,
"loss": 0.914577305316925,
"step": 464
},
{
"epoch": 0.19662447257383966,
"grad_norm": 1.9939641952514648,
"learning_rate": 4.086115992970123e-05,
"loss": 0.8592531681060791,
"step": 466
},
{
"epoch": 0.19746835443037974,
"grad_norm": 2.1511380672454834,
"learning_rate": 4.103690685413005e-05,
"loss": 0.9251965880393982,
"step": 468
},
{
"epoch": 0.19831223628691982,
"grad_norm": 2.2260982990264893,
"learning_rate": 4.121265377855888e-05,
"loss": 0.8465172052383423,
"step": 470
},
{
"epoch": 0.1991561181434599,
"grad_norm": 2.0510010719299316,
"learning_rate": 4.13884007029877e-05,
"loss": 0.8943672180175781,
"step": 472
},
{
"epoch": 0.2,
"grad_norm": 2.2040133476257324,
"learning_rate": 4.156414762741652e-05,
"loss": 0.9594319462776184,
"step": 474
},
{
"epoch": 0.2008438818565401,
"grad_norm": 2.355181932449341,
"learning_rate": 4.173989455184534e-05,
"loss": 0.9031813144683838,
"step": 476
},
{
"epoch": 0.20168776371308017,
"grad_norm": 2.8434665203094482,
"learning_rate": 4.1915641476274166e-05,
"loss": 0.9225798845291138,
"step": 478
},
{
"epoch": 0.20253164556962025,
"grad_norm": 2.1715340614318848,
"learning_rate": 4.209138840070299e-05,
"loss": 0.894163966178894,
"step": 480
},
{
"epoch": 0.20337552742616033,
"grad_norm": 2.078916072845459,
"learning_rate": 4.226713532513181e-05,
"loss": 0.8424109816551208,
"step": 482
},
{
"epoch": 0.2042194092827004,
"grad_norm": 1.9760961532592773,
"learning_rate": 4.244288224956064e-05,
"loss": 0.9102715849876404,
"step": 484
},
{
"epoch": 0.20506329113924052,
"grad_norm": 1.9684507846832275,
"learning_rate": 4.2618629173989456e-05,
"loss": 0.8693854808807373,
"step": 486
},
{
"epoch": 0.2059071729957806,
"grad_norm": 2.1633450984954834,
"learning_rate": 4.279437609841828e-05,
"loss": 0.8617543578147888,
"step": 488
},
{
"epoch": 0.20675105485232068,
"grad_norm": 2.2695257663726807,
"learning_rate": 4.29701230228471e-05,
"loss": 0.9167086482048035,
"step": 490
},
{
"epoch": 0.20759493670886076,
"grad_norm": 2.4180049896240234,
"learning_rate": 4.314586994727593e-05,
"loss": 0.8333520889282227,
"step": 492
},
{
"epoch": 0.20843881856540084,
"grad_norm": 2.2942769527435303,
"learning_rate": 4.3321616871704746e-05,
"loss": 0.918351411819458,
"step": 494
},
{
"epoch": 0.20928270042194091,
"grad_norm": 1.826458215713501,
"learning_rate": 4.349736379613357e-05,
"loss": 0.8565171957015991,
"step": 496
},
{
"epoch": 0.21012658227848102,
"grad_norm": 1.9694055318832397,
"learning_rate": 4.367311072056239e-05,
"loss": 0.8684167861938477,
"step": 498
},
{
"epoch": 0.2109704641350211,
"grad_norm": 1.892659306526184,
"learning_rate": 4.384885764499122e-05,
"loss": 0.7752788662910461,
"step": 500
},
{
"epoch": 0.2109704641350211,
"eval_loss": 0.9080732464790344,
"eval_runtime": 857.0753,
"eval_samples_per_second": 2.458,
"eval_steps_per_second": 2.458,
"step": 500
},
{
"epoch": 0.21181434599156118,
"grad_norm": 1.9322253465652466,
"learning_rate": 4.4024604569420036e-05,
"loss": 0.948570728302002,
"step": 502
},
{
"epoch": 0.21265822784810126,
"grad_norm": 2.0456058979034424,
"learning_rate": 4.4200351493848855e-05,
"loss": 0.8741024732589722,
"step": 504
},
{
"epoch": 0.21350210970464134,
"grad_norm": 2.2406177520751953,
"learning_rate": 4.437609841827768e-05,
"loss": 0.9053841829299927,
"step": 506
},
{
"epoch": 0.21434599156118145,
"grad_norm": 2.013934850692749,
"learning_rate": 4.455184534270651e-05,
"loss": 0.8886576294898987,
"step": 508
},
{
"epoch": 0.21518987341772153,
"grad_norm": 1.9771125316619873,
"learning_rate": 4.4727592267135326e-05,
"loss": 0.8834167718887329,
"step": 510
},
{
"epoch": 0.2160337552742616,
"grad_norm": 1.785905361175537,
"learning_rate": 4.4903339191564146e-05,
"loss": 0.7938863039016724,
"step": 512
},
{
"epoch": 0.2168776371308017,
"grad_norm": 1.7946031093597412,
"learning_rate": 4.507908611599297e-05,
"loss": 0.8071596026420593,
"step": 514
},
{
"epoch": 0.21772151898734177,
"grad_norm": 2.2217721939086914,
"learning_rate": 4.52548330404218e-05,
"loss": 0.797417163848877,
"step": 516
},
{
"epoch": 0.21856540084388185,
"grad_norm": 1.9022471904754639,
"learning_rate": 4.5430579964850617e-05,
"loss": 0.8109536170959473,
"step": 518
},
{
"epoch": 0.21940928270042195,
"grad_norm": 1.8988343477249146,
"learning_rate": 4.5606326889279436e-05,
"loss": 0.8647034168243408,
"step": 520
},
{
"epoch": 0.22025316455696203,
"grad_norm": 2.6014881134033203,
"learning_rate": 4.578207381370827e-05,
"loss": 0.8763713240623474,
"step": 522
},
{
"epoch": 0.2210970464135021,
"grad_norm": 1.9512032270431519,
"learning_rate": 4.595782073813709e-05,
"loss": 0.9525764584541321,
"step": 524
},
{
"epoch": 0.2219409282700422,
"grad_norm": 1.9246160984039307,
"learning_rate": 4.613356766256591e-05,
"loss": 0.8839208483695984,
"step": 526
},
{
"epoch": 0.22278481012658227,
"grad_norm": 1.9713703393936157,
"learning_rate": 4.6309314586994726e-05,
"loss": 0.8888868093490601,
"step": 528
},
{
"epoch": 0.22362869198312235,
"grad_norm": 2.1175239086151123,
"learning_rate": 4.648506151142355e-05,
"loss": 0.8123540878295898,
"step": 530
},
{
"epoch": 0.22447257383966246,
"grad_norm": 1.7656135559082031,
"learning_rate": 4.666080843585238e-05,
"loss": 0.7447702884674072,
"step": 532
},
{
"epoch": 0.22531645569620254,
"grad_norm": 2.15748929977417,
"learning_rate": 4.68365553602812e-05,
"loss": 0.8778411746025085,
"step": 534
},
{
"epoch": 0.22616033755274262,
"grad_norm": 2.1733345985412598,
"learning_rate": 4.7012302284710016e-05,
"loss": 0.8985894918441772,
"step": 536
},
{
"epoch": 0.2270042194092827,
"grad_norm": 1.7182204723358154,
"learning_rate": 4.718804920913884e-05,
"loss": 0.8031114339828491,
"step": 538
},
{
"epoch": 0.22784810126582278,
"grad_norm": 1.8586329221725464,
"learning_rate": 4.736379613356767e-05,
"loss": 0.9399706721305847,
"step": 540
},
{
"epoch": 0.22869198312236286,
"grad_norm": 2.105637311935425,
"learning_rate": 4.753954305799649e-05,
"loss": 0.8672119975090027,
"step": 542
},
{
"epoch": 0.22953586497890296,
"grad_norm": 1.760584831237793,
"learning_rate": 4.771528998242531e-05,
"loss": 0.8663905262947083,
"step": 544
},
{
"epoch": 0.23037974683544304,
"grad_norm": 1.579990267753601,
"learning_rate": 4.789103690685413e-05,
"loss": 0.8575801849365234,
"step": 546
},
{
"epoch": 0.23122362869198312,
"grad_norm": 1.9242485761642456,
"learning_rate": 4.806678383128295e-05,
"loss": 0.828412652015686,
"step": 548
},
{
"epoch": 0.2320675105485232,
"grad_norm": 1.812137246131897,
"learning_rate": 4.824253075571178e-05,
"loss": 0.8183464407920837,
"step": 550
},
{
"epoch": 0.23291139240506328,
"grad_norm": 1.804733395576477,
"learning_rate": 4.84182776801406e-05,
"loss": 0.7822491526603699,
"step": 552
},
{
"epoch": 0.23375527426160336,
"grad_norm": 2.052257537841797,
"learning_rate": 4.859402460456942e-05,
"loss": 0.9050943851470947,
"step": 554
},
{
"epoch": 0.23459915611814347,
"grad_norm": 1.9803621768951416,
"learning_rate": 4.876977152899824e-05,
"loss": 0.8846852779388428,
"step": 556
},
{
"epoch": 0.23544303797468355,
"grad_norm": 1.820125937461853,
"learning_rate": 4.894551845342707e-05,
"loss": 0.8649531602859497,
"step": 558
},
{
"epoch": 0.23628691983122363,
"grad_norm": 2.0963921546936035,
"learning_rate": 4.912126537785589e-05,
"loss": 0.9307748079299927,
"step": 560
},
{
"epoch": 0.2371308016877637,
"grad_norm": 2.079697847366333,
"learning_rate": 4.929701230228471e-05,
"loss": 0.9092473387718201,
"step": 562
},
{
"epoch": 0.2379746835443038,
"grad_norm": 2.0291287899017334,
"learning_rate": 4.947275922671353e-05,
"loss": 0.8976567983627319,
"step": 564
},
{
"epoch": 0.23881856540084387,
"grad_norm": 1.9636707305908203,
"learning_rate": 4.964850615114236e-05,
"loss": 0.8931006193161011,
"step": 566
},
{
"epoch": 0.23966244725738398,
"grad_norm": 1.922049880027771,
"learning_rate": 4.982425307557118e-05,
"loss": 0.829562246799469,
"step": 568
},
{
"epoch": 0.24050632911392406,
"grad_norm": 2.150334596633911,
"learning_rate": 5e-05,
"loss": 0.8568030595779419,
"step": 570
},
{
"epoch": 0.24135021097046414,
"grad_norm": 2.024437427520752,
"learning_rate": 5.017574692442882e-05,
"loss": 0.8623508810997009,
"step": 572
},
{
"epoch": 0.24219409282700421,
"grad_norm": 1.8312673568725586,
"learning_rate": 5.035149384885765e-05,
"loss": 0.7853795886039734,
"step": 574
},
{
"epoch": 0.2430379746835443,
"grad_norm": 1.9271961450576782,
"learning_rate": 5.0527240773286467e-05,
"loss": 0.9727587103843689,
"step": 576
},
{
"epoch": 0.2438818565400844,
"grad_norm": 1.931249976158142,
"learning_rate": 5.0702987697715286e-05,
"loss": 0.8859632015228271,
"step": 578
},
{
"epoch": 0.24472573839662448,
"grad_norm": 1.8195210695266724,
"learning_rate": 5.087873462214412e-05,
"loss": 0.8959492444992065,
"step": 580
},
{
"epoch": 0.24556962025316456,
"grad_norm": 2.0018749237060547,
"learning_rate": 5.105448154657294e-05,
"loss": 0.8146185874938965,
"step": 582
},
{
"epoch": 0.24641350210970464,
"grad_norm": 2.09798526763916,
"learning_rate": 5.1230228471001764e-05,
"loss": 0.8545317053794861,
"step": 584
},
{
"epoch": 0.24725738396624472,
"grad_norm": 1.8063944578170776,
"learning_rate": 5.140597539543058e-05,
"loss": 0.8650105595588684,
"step": 586
},
{
"epoch": 0.2481012658227848,
"grad_norm": 1.8535740375518799,
"learning_rate": 5.15817223198594e-05,
"loss": 0.8395693302154541,
"step": 588
},
{
"epoch": 0.2489451476793249,
"grad_norm": 2.1443960666656494,
"learning_rate": 5.175746924428823e-05,
"loss": 0.8267397284507751,
"step": 590
},
{
"epoch": 0.249789029535865,
"grad_norm": 1.9637391567230225,
"learning_rate": 5.193321616871705e-05,
"loss": 0.8500015139579773,
"step": 592
},
{
"epoch": 0.25063291139240507,
"grad_norm": 1.9457582235336304,
"learning_rate": 5.2108963093145866e-05,
"loss": 0.887481153011322,
"step": 594
},
{
"epoch": 0.2514767932489452,
"grad_norm": 1.7458715438842773,
"learning_rate": 5.228471001757469e-05,
"loss": 0.8444154858589172,
"step": 596
},
{
"epoch": 0.2523206751054852,
"grad_norm": 1.8341439962387085,
"learning_rate": 5.2460456942003525e-05,
"loss": 0.8301781415939331,
"step": 598
},
{
"epoch": 0.25316455696202533,
"grad_norm": 2.127747058868408,
"learning_rate": 5.2636203866432344e-05,
"loss": 0.8921551704406738,
"step": 600
},
{
"epoch": 0.25316455696202533,
"eval_loss": 0.8903881311416626,
"eval_runtime": 845.9969,
"eval_samples_per_second": 2.491,
"eval_steps_per_second": 2.491,
"step": 600
},
{
"epoch": 0.2540084388185654,
"grad_norm": 2.421459674835205,
"learning_rate": 5.281195079086116e-05,
"loss": 0.8678019642829895,
"step": 602
},
{
"epoch": 0.2548523206751055,
"grad_norm": 1.7736057043075562,
"learning_rate": 5.298769771528999e-05,
"loss": 0.8564275503158569,
"step": 604
},
{
"epoch": 0.25569620253164554,
"grad_norm": 2.28430438041687,
"learning_rate": 5.316344463971881e-05,
"loss": 0.8529049158096313,
"step": 606
},
{
"epoch": 0.25654008438818565,
"grad_norm": 1.8892366886138916,
"learning_rate": 5.333919156414763e-05,
"loss": 0.8672881126403809,
"step": 608
},
{
"epoch": 0.25738396624472576,
"grad_norm": 1.9059702157974243,
"learning_rate": 5.3514938488576446e-05,
"loss": 0.9094445109367371,
"step": 610
},
{
"epoch": 0.2582278481012658,
"grad_norm": 2.0657339096069336,
"learning_rate": 5.369068541300527e-05,
"loss": 0.8361946940422058,
"step": 612
},
{
"epoch": 0.2590717299578059,
"grad_norm": 1.8987553119659424,
"learning_rate": 5.3866432337434105e-05,
"loss": 0.8319925665855408,
"step": 614
},
{
"epoch": 0.25991561181434597,
"grad_norm": 2.1176226139068604,
"learning_rate": 5.4042179261862924e-05,
"loss": 0.9818069934844971,
"step": 616
},
{
"epoch": 0.2607594936708861,
"grad_norm": 2.142096519470215,
"learning_rate": 5.421792618629174e-05,
"loss": 0.8675919771194458,
"step": 618
},
{
"epoch": 0.2616033755274262,
"grad_norm": 1.9527089595794678,
"learning_rate": 5.439367311072057e-05,
"loss": 0.8845479488372803,
"step": 620
},
{
"epoch": 0.26244725738396624,
"grad_norm": 1.7071453332901,
"learning_rate": 5.456942003514939e-05,
"loss": 0.809393048286438,
"step": 622
},
{
"epoch": 0.26329113924050634,
"grad_norm": 1.9133527278900146,
"learning_rate": 5.474516695957821e-05,
"loss": 0.8262377977371216,
"step": 624
},
{
"epoch": 0.2641350210970464,
"grad_norm": 2.0217554569244385,
"learning_rate": 5.492091388400703e-05,
"loss": 0.9006736278533936,
"step": 626
},
{
"epoch": 0.2649789029535865,
"grad_norm": 1.773273229598999,
"learning_rate": 5.509666080843585e-05,
"loss": 0.8243603110313416,
"step": 628
},
{
"epoch": 0.26582278481012656,
"grad_norm": 1.6580880880355835,
"learning_rate": 5.527240773286467e-05,
"loss": 0.8112778663635254,
"step": 630
},
{
"epoch": 0.26666666666666666,
"grad_norm": 1.8342082500457764,
"learning_rate": 5.5448154657293504e-05,
"loss": 0.8390820622444153,
"step": 632
},
{
"epoch": 0.26751054852320677,
"grad_norm": 1.863695502281189,
"learning_rate": 5.5623901581722323e-05,
"loss": 0.8264521360397339,
"step": 634
},
{
"epoch": 0.2683544303797468,
"grad_norm": 1.9462928771972656,
"learning_rate": 5.579964850615115e-05,
"loss": 0.9512701630592346,
"step": 636
},
{
"epoch": 0.26919831223628693,
"grad_norm": 1.7776058912277222,
"learning_rate": 5.597539543057997e-05,
"loss": 0.9422703981399536,
"step": 638
},
{
"epoch": 0.270042194092827,
"grad_norm": 2.9457077980041504,
"learning_rate": 5.615114235500879e-05,
"loss": 0.7991042137145996,
"step": 640
},
{
"epoch": 0.2708860759493671,
"grad_norm": 1.445265531539917,
"learning_rate": 5.6326889279437614e-05,
"loss": 0.8188099265098572,
"step": 642
},
{
"epoch": 0.2717299578059072,
"grad_norm": 2.063850164413452,
"learning_rate": 5.650263620386643e-05,
"loss": 0.9799772500991821,
"step": 644
},
{
"epoch": 0.27257383966244725,
"grad_norm": 2.0488009452819824,
"learning_rate": 5.667838312829525e-05,
"loss": 0.8462742567062378,
"step": 646
},
{
"epoch": 0.27341772151898736,
"grad_norm": 1.8747851848602295,
"learning_rate": 5.685413005272408e-05,
"loss": 0.8226412534713745,
"step": 648
},
{
"epoch": 0.2742616033755274,
"grad_norm": 1.849074125289917,
"learning_rate": 5.702987697715291e-05,
"loss": 0.9146338105201721,
"step": 650
},
{
"epoch": 0.2751054852320675,
"grad_norm": 1.7738500833511353,
"learning_rate": 5.720562390158173e-05,
"loss": 0.7574424147605896,
"step": 652
},
{
"epoch": 0.2759493670886076,
"grad_norm": 1.911102294921875,
"learning_rate": 5.738137082601055e-05,
"loss": 0.8930003046989441,
"step": 654
},
{
"epoch": 0.2767932489451477,
"grad_norm": 1.5716617107391357,
"learning_rate": 5.755711775043937e-05,
"loss": 0.7578965425491333,
"step": 656
},
{
"epoch": 0.2776371308016878,
"grad_norm": 1.789036512374878,
"learning_rate": 5.7732864674868194e-05,
"loss": 0.8149038553237915,
"step": 658
},
{
"epoch": 0.27848101265822783,
"grad_norm": 1.68622624874115,
"learning_rate": 5.790861159929701e-05,
"loss": 0.8265765905380249,
"step": 660
},
{
"epoch": 0.27932489451476794,
"grad_norm": 2.078423261642456,
"learning_rate": 5.808435852372583e-05,
"loss": 0.9651970267295837,
"step": 662
},
{
"epoch": 0.280168776371308,
"grad_norm": 1.7878645658493042,
"learning_rate": 5.826010544815466e-05,
"loss": 0.8295148015022278,
"step": 664
},
{
"epoch": 0.2810126582278481,
"grad_norm": 1.970838189125061,
"learning_rate": 5.843585237258348e-05,
"loss": 0.7778491377830505,
"step": 666
},
{
"epoch": 0.2818565400843882,
"grad_norm": 1.943596363067627,
"learning_rate": 5.861159929701231e-05,
"loss": 0.9818071722984314,
"step": 668
},
{
"epoch": 0.28270042194092826,
"grad_norm": 1.8793812990188599,
"learning_rate": 5.878734622144113e-05,
"loss": 0.9297797083854675,
"step": 670
},
{
"epoch": 0.28354430379746837,
"grad_norm": 1.8813483715057373,
"learning_rate": 5.8963093145869955e-05,
"loss": 0.8748109936714172,
"step": 672
},
{
"epoch": 0.2843881856540084,
"grad_norm": 1.7658562660217285,
"learning_rate": 5.9138840070298774e-05,
"loss": 0.8505244851112366,
"step": 674
},
{
"epoch": 0.2852320675105485,
"grad_norm": 1.6767617464065552,
"learning_rate": 5.931458699472759e-05,
"loss": 0.8476597666740417,
"step": 676
},
{
"epoch": 0.28607594936708863,
"grad_norm": 2.703104257583618,
"learning_rate": 5.949033391915641e-05,
"loss": 0.8775192499160767,
"step": 678
},
{
"epoch": 0.2869198312236287,
"grad_norm": 1.9959728717803955,
"learning_rate": 5.966608084358524e-05,
"loss": 0.855262279510498,
"step": 680
},
{
"epoch": 0.2877637130801688,
"grad_norm": 1.9093716144561768,
"learning_rate": 5.984182776801406e-05,
"loss": 0.7574936151504517,
"step": 682
},
{
"epoch": 0.28860759493670884,
"grad_norm": 1.9829599857330322,
"learning_rate": 6.001757469244289e-05,
"loss": 0.8630690574645996,
"step": 684
},
{
"epoch": 0.28945147679324895,
"grad_norm": 1.8777490854263306,
"learning_rate": 6.019332161687171e-05,
"loss": 0.8513249158859253,
"step": 686
},
{
"epoch": 0.290295358649789,
"grad_norm": 1.9453173875808716,
"learning_rate": 6.0369068541300535e-05,
"loss": 0.9097008109092712,
"step": 688
},
{
"epoch": 0.2911392405063291,
"grad_norm": 1.8527908325195312,
"learning_rate": 6.0544815465729354e-05,
"loss": 0.8291722536087036,
"step": 690
},
{
"epoch": 0.2919831223628692,
"grad_norm": 1.9255812168121338,
"learning_rate": 6.0720562390158174e-05,
"loss": 0.880009651184082,
"step": 692
},
{
"epoch": 0.29282700421940927,
"grad_norm": 1.6637977361679077,
"learning_rate": 6.0896309314587e-05,
"loss": 0.8791794180870056,
"step": 694
},
{
"epoch": 0.2936708860759494,
"grad_norm": 1.825940728187561,
"learning_rate": 6.107205623901582e-05,
"loss": 0.8662407398223877,
"step": 696
},
{
"epoch": 0.29451476793248943,
"grad_norm": 1.9348198175430298,
"learning_rate": 6.124780316344464e-05,
"loss": 0.8984515070915222,
"step": 698
},
{
"epoch": 0.29535864978902954,
"grad_norm": 1.659345030784607,
"learning_rate": 6.142355008787346e-05,
"loss": 0.827385663986206,
"step": 700
},
{
"epoch": 0.29535864978902954,
"eval_loss": 0.8730722069740295,
"eval_runtime": 858.184,
"eval_samples_per_second": 2.455,
"eval_steps_per_second": 2.455,
"step": 700
},
{
"epoch": 0.29620253164556964,
"grad_norm": 1.6531789302825928,
"learning_rate": 6.159929701230229e-05,
"loss": 0.9337764382362366,
"step": 702
},
{
"epoch": 0.2970464135021097,
"grad_norm": 1.8269121646881104,
"learning_rate": 6.177504393673111e-05,
"loss": 0.8250943422317505,
"step": 704
},
{
"epoch": 0.2978902953586498,
"grad_norm": 1.692808747291565,
"learning_rate": 6.195079086115994e-05,
"loss": 0.8657428026199341,
"step": 706
},
{
"epoch": 0.29873417721518986,
"grad_norm": 1.6736913919448853,
"learning_rate": 6.212653778558876e-05,
"loss": 0.8889590501785278,
"step": 708
},
{
"epoch": 0.29957805907172996,
"grad_norm": 1.6841140985488892,
"learning_rate": 6.230228471001758e-05,
"loss": 0.7822914123535156,
"step": 710
},
{
"epoch": 0.30042194092827,
"grad_norm": 1.6644599437713623,
"learning_rate": 6.24780316344464e-05,
"loss": 0.8747053742408752,
"step": 712
},
{
"epoch": 0.3012658227848101,
"grad_norm": 1.8187819719314575,
"learning_rate": 6.265377855887522e-05,
"loss": 0.8976446390151978,
"step": 714
},
{
"epoch": 0.30210970464135023,
"grad_norm": 1.7845178842544556,
"learning_rate": 6.282952548330404e-05,
"loss": 0.9401160478591919,
"step": 716
},
{
"epoch": 0.3029535864978903,
"grad_norm": 1.559773564338684,
"learning_rate": 6.300527240773286e-05,
"loss": 0.8754280209541321,
"step": 718
},
{
"epoch": 0.3037974683544304,
"grad_norm": 1.5919631719589233,
"learning_rate": 6.318101933216169e-05,
"loss": 0.8278581500053406,
"step": 720
},
{
"epoch": 0.30464135021097044,
"grad_norm": 1.8551076650619507,
"learning_rate": 6.335676625659052e-05,
"loss": 0.8868640065193176,
"step": 722
},
{
"epoch": 0.30548523206751055,
"grad_norm": 1.6907769441604614,
"learning_rate": 6.353251318101934e-05,
"loss": 0.8631605505943298,
"step": 724
},
{
"epoch": 0.30632911392405066,
"grad_norm": 1.820867657661438,
"learning_rate": 6.370826010544816e-05,
"loss": 0.9142873883247375,
"step": 726
},
{
"epoch": 0.3071729957805907,
"grad_norm": 1.685154676437378,
"learning_rate": 6.388400702987698e-05,
"loss": 0.8258634805679321,
"step": 728
},
{
"epoch": 0.3080168776371308,
"grad_norm": 1.9294627904891968,
"learning_rate": 6.40597539543058e-05,
"loss": 0.9545516967773438,
"step": 730
},
{
"epoch": 0.30886075949367087,
"grad_norm": 1.6075409650802612,
"learning_rate": 6.423550087873462e-05,
"loss": 0.8370757699012756,
"step": 732
},
{
"epoch": 0.309704641350211,
"grad_norm": 1.635750651359558,
"learning_rate": 6.441124780316345e-05,
"loss": 0.8356084823608398,
"step": 734
},
{
"epoch": 0.3105485232067511,
"grad_norm": 1.6376131772994995,
"learning_rate": 6.458699472759227e-05,
"loss": 0.7579531669616699,
"step": 736
},
{
"epoch": 0.31139240506329113,
"grad_norm": 1.7135766744613647,
"learning_rate": 6.47627416520211e-05,
"loss": 0.8436318039894104,
"step": 738
},
{
"epoch": 0.31223628691983124,
"grad_norm": 1.7095093727111816,
"learning_rate": 6.493848857644992e-05,
"loss": 0.7998805046081543,
"step": 740
},
{
"epoch": 0.3130801687763713,
"grad_norm": 1.782615303993225,
"learning_rate": 6.511423550087874e-05,
"loss": 0.915776789188385,
"step": 742
},
{
"epoch": 0.3139240506329114,
"grad_norm": 1.8461172580718994,
"learning_rate": 6.528998242530756e-05,
"loss": 0.8300962448120117,
"step": 744
},
{
"epoch": 0.31476793248945145,
"grad_norm": 1.5659871101379395,
"learning_rate": 6.546572934973638e-05,
"loss": 0.8239848017692566,
"step": 746
},
{
"epoch": 0.31561181434599156,
"grad_norm": 1.9997349977493286,
"learning_rate": 6.56414762741652e-05,
"loss": 0.8236988186836243,
"step": 748
},
{
"epoch": 0.31645569620253167,
"grad_norm": 1.9811526536941528,
"learning_rate": 6.581722319859403e-05,
"loss": 0.8516603112220764,
"step": 750
},
{
"epoch": 0.3172995780590717,
"grad_norm": 1.9877923727035522,
"learning_rate": 6.599297012302285e-05,
"loss": 0.9037567973136902,
"step": 752
},
{
"epoch": 0.3181434599156118,
"grad_norm": 1.6729352474212646,
"learning_rate": 6.616871704745168e-05,
"loss": 0.8350864052772522,
"step": 754
},
{
"epoch": 0.3189873417721519,
"grad_norm": 1.9055802822113037,
"learning_rate": 6.63444639718805e-05,
"loss": 0.8246616125106812,
"step": 756
},
{
"epoch": 0.319831223628692,
"grad_norm": 1.597999930381775,
"learning_rate": 6.652021089630932e-05,
"loss": 0.8014416098594666,
"step": 758
},
{
"epoch": 0.3206751054852321,
"grad_norm": 1.7432531118392944,
"learning_rate": 6.669595782073814e-05,
"loss": 0.9199523329734802,
"step": 760
},
{
"epoch": 0.32151898734177214,
"grad_norm": 1.820164442062378,
"learning_rate": 6.687170474516696e-05,
"loss": 0.7764829397201538,
"step": 762
},
{
"epoch": 0.32236286919831225,
"grad_norm": 1.6408652067184448,
"learning_rate": 6.704745166959578e-05,
"loss": 0.8072620630264282,
"step": 764
},
{
"epoch": 0.3232067510548523,
"grad_norm": 1.8894155025482178,
"learning_rate": 6.722319859402461e-05,
"loss": 0.9006885886192322,
"step": 766
},
{
"epoch": 0.3240506329113924,
"grad_norm": 1.6903613805770874,
"learning_rate": 6.739894551845343e-05,
"loss": 0.7772189378738403,
"step": 768
},
{
"epoch": 0.32489451476793246,
"grad_norm": 1.7540696859359741,
"learning_rate": 6.757469244288225e-05,
"loss": 0.8825590014457703,
"step": 770
},
{
"epoch": 0.32573839662447257,
"grad_norm": 1.603008508682251,
"learning_rate": 6.775043936731108e-05,
"loss": 0.8376453518867493,
"step": 772
},
{
"epoch": 0.3265822784810127,
"grad_norm": 1.5381462574005127,
"learning_rate": 6.79261862917399e-05,
"loss": 0.92608243227005,
"step": 774
},
{
"epoch": 0.32742616033755273,
"grad_norm": 1.4815537929534912,
"learning_rate": 6.810193321616872e-05,
"loss": 0.6842183470726013,
"step": 776
},
{
"epoch": 0.32827004219409284,
"grad_norm": 1.8543411493301392,
"learning_rate": 6.827768014059754e-05,
"loss": 0.8868235349655151,
"step": 778
},
{
"epoch": 0.3291139240506329,
"grad_norm": 1.8895748853683472,
"learning_rate": 6.845342706502637e-05,
"loss": 0.8148112297058105,
"step": 780
},
{
"epoch": 0.329957805907173,
"grad_norm": 1.8150591850280762,
"learning_rate": 6.862917398945519e-05,
"loss": 0.8760337829589844,
"step": 782
},
{
"epoch": 0.3308016877637131,
"grad_norm": 1.6661378145217896,
"learning_rate": 6.880492091388401e-05,
"loss": 0.8266322612762451,
"step": 784
},
{
"epoch": 0.33164556962025316,
"grad_norm": 2.2849128246307373,
"learning_rate": 6.898066783831283e-05,
"loss": 0.8599053025245667,
"step": 786
},
{
"epoch": 0.33248945147679326,
"grad_norm": 1.7233171463012695,
"learning_rate": 6.915641476274165e-05,
"loss": 0.8312317132949829,
"step": 788
},
{
"epoch": 0.3333333333333333,
"grad_norm": 1.7637618780136108,
"learning_rate": 6.933216168717048e-05,
"loss": 0.8379700779914856,
"step": 790
},
{
"epoch": 0.3341772151898734,
"grad_norm": 1.7780474424362183,
"learning_rate": 6.95079086115993e-05,
"loss": 0.8994934558868408,
"step": 792
},
{
"epoch": 0.33502109704641353,
"grad_norm": 1.5798883438110352,
"learning_rate": 6.968365553602812e-05,
"loss": 0.8021857738494873,
"step": 794
},
{
"epoch": 0.3358649789029536,
"grad_norm": 1.7316070795059204,
"learning_rate": 6.985940246045695e-05,
"loss": 0.8814419507980347,
"step": 796
},
{
"epoch": 0.3367088607594937,
"grad_norm": 1.711315631866455,
"learning_rate": 7.003514938488577e-05,
"loss": 0.8545029163360596,
"step": 798
},
{
"epoch": 0.33755274261603374,
"grad_norm": 1.5023137331008911,
"learning_rate": 7.021089630931459e-05,
"loss": 0.8006189465522766,
"step": 800
},
{
"epoch": 0.33755274261603374,
"eval_loss": 0.8635594248771667,
"eval_runtime": 865.9348,
"eval_samples_per_second": 2.433,
"eval_steps_per_second": 2.433,
"step": 800
},
{
"epoch": 0.33839662447257385,
"grad_norm": 1.8377124071121216,
"learning_rate": 7.038664323374341e-05,
"loss": 0.7625874280929565,
"step": 802
},
{
"epoch": 0.3392405063291139,
"grad_norm": 1.5361332893371582,
"learning_rate": 7.056239015817223e-05,
"loss": 0.8490484356880188,
"step": 804
},
{
"epoch": 0.340084388185654,
"grad_norm": 1.8727388381958008,
"learning_rate": 7.073813708260105e-05,
"loss": 0.8915753364562988,
"step": 806
},
{
"epoch": 0.3409282700421941,
"grad_norm": 1.567700743675232,
"learning_rate": 7.091388400702988e-05,
"loss": 0.8902620077133179,
"step": 808
},
{
"epoch": 0.34177215189873417,
"grad_norm": 1.5302914381027222,
"learning_rate": 7.10896309314587e-05,
"loss": 0.7897103428840637,
"step": 810
},
{
"epoch": 0.3426160337552743,
"grad_norm": 1.8819153308868408,
"learning_rate": 7.126537785588753e-05,
"loss": 0.8648831248283386,
"step": 812
},
{
"epoch": 0.3434599156118143,
"grad_norm": 1.5671379566192627,
"learning_rate": 7.144112478031635e-05,
"loss": 0.8449499607086182,
"step": 814
},
{
"epoch": 0.34430379746835443,
"grad_norm": 1.6570971012115479,
"learning_rate": 7.161687170474517e-05,
"loss": 0.848559558391571,
"step": 816
},
{
"epoch": 0.34514767932489454,
"grad_norm": 1.9108437299728394,
"learning_rate": 7.179261862917399e-05,
"loss": 0.8847543597221375,
"step": 818
},
{
"epoch": 0.3459915611814346,
"grad_norm": 1.4909496307373047,
"learning_rate": 7.196836555360281e-05,
"loss": 0.7642563581466675,
"step": 820
},
{
"epoch": 0.3468354430379747,
"grad_norm": 1.768518328666687,
"learning_rate": 7.214411247803163e-05,
"loss": 0.8714305758476257,
"step": 822
},
{
"epoch": 0.34767932489451475,
"grad_norm": 1.715343952178955,
"learning_rate": 7.231985940246046e-05,
"loss": 0.7712987661361694,
"step": 824
},
{
"epoch": 0.34852320675105486,
"grad_norm": 1.6687803268432617,
"learning_rate": 7.24956063268893e-05,
"loss": 0.8122798204421997,
"step": 826
},
{
"epoch": 0.3493670886075949,
"grad_norm": 1.5160514116287231,
"learning_rate": 7.267135325131811e-05,
"loss": 0.793245792388916,
"step": 828
},
{
"epoch": 0.350210970464135,
"grad_norm": 1.6449401378631592,
"learning_rate": 7.284710017574693e-05,
"loss": 0.8747497200965881,
"step": 830
},
{
"epoch": 0.3510548523206751,
"grad_norm": 1.3907722234725952,
"learning_rate": 7.302284710017575e-05,
"loss": 0.6743978261947632,
"step": 832
},
{
"epoch": 0.3518987341772152,
"grad_norm": 1.633555293083191,
"learning_rate": 7.319859402460457e-05,
"loss": 0.8524789214134216,
"step": 834
},
{
"epoch": 0.3527426160337553,
"grad_norm": 1.5414257049560547,
"learning_rate": 7.337434094903339e-05,
"loss": 0.8045110702514648,
"step": 836
},
{
"epoch": 0.35358649789029534,
"grad_norm": 1.8520616292953491,
"learning_rate": 7.355008787346221e-05,
"loss": 0.8319593071937561,
"step": 838
},
{
"epoch": 0.35443037974683544,
"grad_norm": 1.6629763841629028,
"learning_rate": 7.372583479789104e-05,
"loss": 0.8188939094543457,
"step": 840
},
{
"epoch": 0.35527426160337555,
"grad_norm": 1.804087519645691,
"learning_rate": 7.390158172231987e-05,
"loss": 0.8875360488891602,
"step": 842
},
{
"epoch": 0.3561181434599156,
"grad_norm": 1.6031663417816162,
"learning_rate": 7.407732864674869e-05,
"loss": 0.8159612417221069,
"step": 844
},
{
"epoch": 0.3569620253164557,
"grad_norm": 1.7413033246994019,
"learning_rate": 7.425307557117751e-05,
"loss": 0.8422684669494629,
"step": 846
},
{
"epoch": 0.35780590717299576,
"grad_norm": 1.7699719667434692,
"learning_rate": 7.442882249560633e-05,
"loss": 0.9343502521514893,
"step": 848
},
{
"epoch": 0.35864978902953587,
"grad_norm": 1.4613301753997803,
"learning_rate": 7.460456942003515e-05,
"loss": 0.8168979287147522,
"step": 850
},
{
"epoch": 0.3594936708860759,
"grad_norm": 1.542431354522705,
"learning_rate": 7.478031634446397e-05,
"loss": 0.9014382362365723,
"step": 852
},
{
"epoch": 0.36033755274261603,
"grad_norm": 1.6070159673690796,
"learning_rate": 7.49560632688928e-05,
"loss": 0.8162738084793091,
"step": 854
},
{
"epoch": 0.36118143459915614,
"grad_norm": 1.7979451417922974,
"learning_rate": 7.513181019332162e-05,
"loss": 0.8354527950286865,
"step": 856
},
{
"epoch": 0.3620253164556962,
"grad_norm": 2.327045202255249,
"learning_rate": 7.530755711775044e-05,
"loss": 0.8214042782783508,
"step": 858
},
{
"epoch": 0.3628691983122363,
"grad_norm": 1.5085111856460571,
"learning_rate": 7.548330404217927e-05,
"loss": 0.7472147941589355,
"step": 860
},
{
"epoch": 0.36371308016877635,
"grad_norm": 1.6006290912628174,
"learning_rate": 7.565905096660809e-05,
"loss": 0.7586950063705444,
"step": 862
},
{
"epoch": 0.36455696202531646,
"grad_norm": 1.5170620679855347,
"learning_rate": 7.583479789103691e-05,
"loss": 0.8169914484024048,
"step": 864
},
{
"epoch": 0.36540084388185656,
"grad_norm": 1.5848352909088135,
"learning_rate": 7.601054481546573e-05,
"loss": 0.8263922929763794,
"step": 866
},
{
"epoch": 0.3662447257383966,
"grad_norm": 1.8502342700958252,
"learning_rate": 7.618629173989455e-05,
"loss": 0.8726240992546082,
"step": 868
},
{
"epoch": 0.3670886075949367,
"grad_norm": 1.506847620010376,
"learning_rate": 7.636203866432338e-05,
"loss": 0.7220374941825867,
"step": 870
},
{
"epoch": 0.3679324894514768,
"grad_norm": 1.5350452661514282,
"learning_rate": 7.65377855887522e-05,
"loss": 0.8028547167778015,
"step": 872
},
{
"epoch": 0.3687763713080169,
"grad_norm": 1.5011043548583984,
"learning_rate": 7.671353251318102e-05,
"loss": 0.7659649848937988,
"step": 874
},
{
"epoch": 0.369620253164557,
"grad_norm": 1.7019832134246826,
"learning_rate": 7.688927943760984e-05,
"loss": 0.8773653507232666,
"step": 876
},
{
"epoch": 0.37046413502109704,
"grad_norm": 1.4918498992919922,
"learning_rate": 7.706502636203867e-05,
"loss": 0.7977569103240967,
"step": 878
},
{
"epoch": 0.37130801687763715,
"grad_norm": 1.6422638893127441,
"learning_rate": 7.724077328646749e-05,
"loss": 0.7491976022720337,
"step": 880
},
{
"epoch": 0.3721518987341772,
"grad_norm": 1.7590434551239014,
"learning_rate": 7.741652021089631e-05,
"loss": 0.8754181265830994,
"step": 882
},
{
"epoch": 0.3729957805907173,
"grad_norm": 3.868894100189209,
"learning_rate": 7.759226713532513e-05,
"loss": 0.8482301235198975,
"step": 884
},
{
"epoch": 0.37383966244725736,
"grad_norm": 2.111875534057617,
"learning_rate": 7.776801405975396e-05,
"loss": 0.8109031915664673,
"step": 886
},
{
"epoch": 0.37468354430379747,
"grad_norm": 2.0838418006896973,
"learning_rate": 7.794376098418278e-05,
"loss": 0.8660775423049927,
"step": 888
},
{
"epoch": 0.3755274261603376,
"grad_norm": 1.553022027015686,
"learning_rate": 7.81195079086116e-05,
"loss": 0.8418024778366089,
"step": 890
},
{
"epoch": 0.3763713080168776,
"grad_norm": 1.334747314453125,
"learning_rate": 7.829525483304042e-05,
"loss": 0.7764869928359985,
"step": 892
},
{
"epoch": 0.37721518987341773,
"grad_norm": 1.4692286252975464,
"learning_rate": 7.847100175746925e-05,
"loss": 0.7460401654243469,
"step": 894
},
{
"epoch": 0.3780590717299578,
"grad_norm": 1.5374023914337158,
"learning_rate": 7.864674868189807e-05,
"loss": 0.7662873268127441,
"step": 896
},
{
"epoch": 0.3789029535864979,
"grad_norm": 1.5662524700164795,
"learning_rate": 7.882249560632689e-05,
"loss": 0.8165306448936462,
"step": 898
},
{
"epoch": 0.379746835443038,
"grad_norm": 4.498590469360352,
"learning_rate": 7.899824253075572e-05,
"loss": 0.7913232445716858,
"step": 900
},
{
"epoch": 0.379746835443038,
"eval_loss": 0.8491304516792297,
"eval_runtime": 852.6211,
"eval_samples_per_second": 2.471,
"eval_steps_per_second": 2.471,
"step": 900
},
{
"epoch": 0.38059071729957805,
"grad_norm": 1.6320613622665405,
"learning_rate": 7.917398945518454e-05,
"loss": 0.8097161054611206,
"step": 902
},
{
"epoch": 0.38143459915611816,
"grad_norm": 1.2562934160232544,
"learning_rate": 7.934973637961336e-05,
"loss": 0.786399781703949,
"step": 904
},
{
"epoch": 0.3822784810126582,
"grad_norm": 1.6957594156265259,
"learning_rate": 7.952548330404218e-05,
"loss": 0.8385500311851501,
"step": 906
},
{
"epoch": 0.3831223628691983,
"grad_norm": 1.6662386655807495,
"learning_rate": 7.9701230228471e-05,
"loss": 0.8157848715782166,
"step": 908
},
{
"epoch": 0.38396624472573837,
"grad_norm": 1.6717777252197266,
"learning_rate": 7.987697715289982e-05,
"loss": 0.7937968373298645,
"step": 910
},
{
"epoch": 0.3848101265822785,
"grad_norm": 1.399484395980835,
"learning_rate": 8.005272407732865e-05,
"loss": 0.7800109386444092,
"step": 912
},
{
"epoch": 0.3856540084388186,
"grad_norm": 1.5671080350875854,
"learning_rate": 8.022847100175747e-05,
"loss": 0.8135939240455627,
"step": 914
},
{
"epoch": 0.38649789029535864,
"grad_norm": 1.4427763223648071,
"learning_rate": 8.04042179261863e-05,
"loss": 0.7482035160064697,
"step": 916
},
{
"epoch": 0.38734177215189874,
"grad_norm": 1.3314121961593628,
"learning_rate": 8.057996485061512e-05,
"loss": 0.7201873064041138,
"step": 918
},
{
"epoch": 0.3881856540084388,
"grad_norm": 1.5695286989212036,
"learning_rate": 8.075571177504394e-05,
"loss": 0.7933040857315063,
"step": 920
},
{
"epoch": 0.3890295358649789,
"grad_norm": 1.5091747045516968,
"learning_rate": 8.093145869947276e-05,
"loss": 0.8058338165283203,
"step": 922
},
{
"epoch": 0.389873417721519,
"grad_norm": 1.6287630796432495,
"learning_rate": 8.110720562390158e-05,
"loss": 0.7617828249931335,
"step": 924
},
{
"epoch": 0.39071729957805906,
"grad_norm": 1.6129482984542847,
"learning_rate": 8.12829525483304e-05,
"loss": 0.8710150122642517,
"step": 926
},
{
"epoch": 0.39156118143459917,
"grad_norm": 1.6457173824310303,
"learning_rate": 8.145869947275922e-05,
"loss": 0.9122233390808105,
"step": 928
},
{
"epoch": 0.3924050632911392,
"grad_norm": 1.6768827438354492,
"learning_rate": 8.163444639718805e-05,
"loss": 0.8339303731918335,
"step": 930
},
{
"epoch": 0.39324894514767933,
"grad_norm": 1.5419740676879883,
"learning_rate": 8.181019332161688e-05,
"loss": 0.8220396041870117,
"step": 932
},
{
"epoch": 0.39409282700421944,
"grad_norm": 1.4563747644424438,
"learning_rate": 8.19859402460457e-05,
"loss": 0.8531478047370911,
"step": 934
},
{
"epoch": 0.3949367088607595,
"grad_norm": 1.6208328008651733,
"learning_rate": 8.216168717047452e-05,
"loss": 0.8330869078636169,
"step": 936
},
{
"epoch": 0.3957805907172996,
"grad_norm": 1.6492482423782349,
"learning_rate": 8.233743409490334e-05,
"loss": 0.8011296987533569,
"step": 938
},
{
"epoch": 0.39662447257383965,
"grad_norm": 2.1611905097961426,
"learning_rate": 8.251318101933216e-05,
"loss": 0.8111353516578674,
"step": 940
},
{
"epoch": 0.39746835443037976,
"grad_norm": 1.7108231782913208,
"learning_rate": 8.268892794376098e-05,
"loss": 0.8282017111778259,
"step": 942
},
{
"epoch": 0.3983122362869198,
"grad_norm": 1.543465495109558,
"learning_rate": 8.286467486818981e-05,
"loss": 0.7770059704780579,
"step": 944
},
{
"epoch": 0.3991561181434599,
"grad_norm": 1.419969081878662,
"learning_rate": 8.304042179261863e-05,
"loss": 0.8646430373191833,
"step": 946
},
{
"epoch": 0.4,
"grad_norm": 1.5002100467681885,
"learning_rate": 8.321616871704746e-05,
"loss": 0.7949403524398804,
"step": 948
},
{
"epoch": 0.4008438818565401,
"grad_norm": 1.38933265209198,
"learning_rate": 8.339191564147628e-05,
"loss": 0.8124079704284668,
"step": 950
},
{
"epoch": 0.4016877637130802,
"grad_norm": 1.5948443412780762,
"learning_rate": 8.35676625659051e-05,
"loss": 0.8634148836135864,
"step": 952
},
{
"epoch": 0.40253164556962023,
"grad_norm": 1.4437624216079712,
"learning_rate": 8.374340949033392e-05,
"loss": 0.7410681247711182,
"step": 954
},
{
"epoch": 0.40337552742616034,
"grad_norm": 1.3457095623016357,
"learning_rate": 8.391915641476274e-05,
"loss": 0.7680280208587646,
"step": 956
},
{
"epoch": 0.40421940928270045,
"grad_norm": 1.610288143157959,
"learning_rate": 8.409490333919156e-05,
"loss": 0.7921904921531677,
"step": 958
},
{
"epoch": 0.4050632911392405,
"grad_norm": 1.5321530103683472,
"learning_rate": 8.427065026362039e-05,
"loss": 0.8320037126541138,
"step": 960
},
{
"epoch": 0.4059071729957806,
"grad_norm": 1.699881672859192,
"learning_rate": 8.444639718804921e-05,
"loss": 0.8303092122077942,
"step": 962
},
{
"epoch": 0.40675105485232066,
"grad_norm": 1.591515064239502,
"learning_rate": 8.462214411247804e-05,
"loss": 0.9029796719551086,
"step": 964
},
{
"epoch": 0.40759493670886077,
"grad_norm": 1.5930429697036743,
"learning_rate": 8.479789103690686e-05,
"loss": 0.8165359497070312,
"step": 966
},
{
"epoch": 0.4084388185654008,
"grad_norm": 1.509774923324585,
"learning_rate": 8.497363796133568e-05,
"loss": 0.8276026248931885,
"step": 968
},
{
"epoch": 0.4092827004219409,
"grad_norm": 1.3617016077041626,
"learning_rate": 8.51493848857645e-05,
"loss": 0.8159419894218445,
"step": 970
},
{
"epoch": 0.41012658227848103,
"grad_norm": 1.3580708503723145,
"learning_rate": 8.532513181019332e-05,
"loss": 0.7882336378097534,
"step": 972
},
{
"epoch": 0.4109704641350211,
"grad_norm": 1.3337358236312866,
"learning_rate": 8.550087873462214e-05,
"loss": 0.7462319731712341,
"step": 974
},
{
"epoch": 0.4118143459915612,
"grad_norm": 1.450363278388977,
"learning_rate": 8.567662565905097e-05,
"loss": 0.7500866651535034,
"step": 976
},
{
"epoch": 0.41265822784810124,
"grad_norm": 1.5305321216583252,
"learning_rate": 8.585237258347979e-05,
"loss": 0.8432503342628479,
"step": 978
},
{
"epoch": 0.41350210970464135,
"grad_norm": 1.2097326517105103,
"learning_rate": 8.602811950790861e-05,
"loss": 0.8330482840538025,
"step": 980
},
{
"epoch": 0.41434599156118146,
"grad_norm": 1.3916101455688477,
"learning_rate": 8.620386643233744e-05,
"loss": 0.8137149810791016,
"step": 982
},
{
"epoch": 0.4151898734177215,
"grad_norm": 1.6411453485488892,
"learning_rate": 8.637961335676626e-05,
"loss": 0.8273854851722717,
"step": 984
},
{
"epoch": 0.4160337552742616,
"grad_norm": 1.6734566688537598,
"learning_rate": 8.655536028119508e-05,
"loss": 0.794026255607605,
"step": 986
},
{
"epoch": 0.41687763713080167,
"grad_norm": 1.352325677871704,
"learning_rate": 8.67311072056239e-05,
"loss": 0.7721655368804932,
"step": 988
},
{
"epoch": 0.4177215189873418,
"grad_norm": 1.5368729829788208,
"learning_rate": 8.690685413005273e-05,
"loss": 0.8123438954353333,
"step": 990
},
{
"epoch": 0.41856540084388183,
"grad_norm": 1.4903568029403687,
"learning_rate": 8.708260105448155e-05,
"loss": 0.8370974659919739,
"step": 992
},
{
"epoch": 0.41940928270042194,
"grad_norm": 1.3405622243881226,
"learning_rate": 8.725834797891037e-05,
"loss": 0.780426561832428,
"step": 994
},
{
"epoch": 0.42025316455696204,
"grad_norm": 1.4761021137237549,
"learning_rate": 8.743409490333919e-05,
"loss": 0.8304934501647949,
"step": 996
},
{
"epoch": 0.4210970464135021,
"grad_norm": 1.520033359527588,
"learning_rate": 8.760984182776801e-05,
"loss": 0.7960568070411682,
"step": 998
},
{
"epoch": 0.4219409282700422,
"grad_norm": 1.6916255950927734,
"learning_rate": 8.778558875219684e-05,
"loss": 0.7884663939476013,
"step": 1000
},
{
"epoch": 0.4219409282700422,
"eval_loss": 0.8388314247131348,
"eval_runtime": 847.4828,
"eval_samples_per_second": 2.486,
"eval_steps_per_second": 2.486,
"step": 1000
},
{
"epoch": 0.42278481012658226,
"grad_norm": 1.6796396970748901,
"learning_rate": 8.796133567662566e-05,
"loss": 0.7930826544761658,
"step": 1002
},
{
"epoch": 0.42362869198312236,
"grad_norm": 1.4480048418045044,
"learning_rate": 8.813708260105448e-05,
"loss": 0.7138194441795349,
"step": 1004
},
{
"epoch": 0.42447257383966247,
"grad_norm": 1.2499021291732788,
"learning_rate": 8.831282952548331e-05,
"loss": 0.7367453575134277,
"step": 1006
},
{
"epoch": 0.4253164556962025,
"grad_norm": 1.6906769275665283,
"learning_rate": 8.848857644991213e-05,
"loss": 0.9051005244255066,
"step": 1008
},
{
"epoch": 0.42616033755274263,
"grad_norm": 1.4196792840957642,
"learning_rate": 8.866432337434095e-05,
"loss": 0.7469457387924194,
"step": 1010
},
{
"epoch": 0.4270042194092827,
"grad_norm": 1.5132776498794556,
"learning_rate": 8.884007029876977e-05,
"loss": 0.7443049550056458,
"step": 1012
},
{
"epoch": 0.4278481012658228,
"grad_norm": 1.335705280303955,
"learning_rate": 8.901581722319859e-05,
"loss": 0.784084677696228,
"step": 1014
},
{
"epoch": 0.4286919831223629,
"grad_norm": 1.6510252952575684,
"learning_rate": 8.919156414762741e-05,
"loss": 0.8603647947311401,
"step": 1016
},
{
"epoch": 0.42953586497890295,
"grad_norm": 1.35535728931427,
"learning_rate": 8.936731107205624e-05,
"loss": 0.7921645641326904,
"step": 1018
},
{
"epoch": 0.43037974683544306,
"grad_norm": 1.4952049255371094,
"learning_rate": 8.954305799648506e-05,
"loss": 0.799993634223938,
"step": 1020
},
{
"epoch": 0.4312236286919831,
"grad_norm": 1.5026042461395264,
"learning_rate": 8.97188049209139e-05,
"loss": 0.7697094082832336,
"step": 1022
},
{
"epoch": 0.4320675105485232,
"grad_norm": 1.5424275398254395,
"learning_rate": 8.989455184534271e-05,
"loss": 0.7988215684890747,
"step": 1024
},
{
"epoch": 0.43291139240506327,
"grad_norm": 1.438716173171997,
"learning_rate": 9.007029876977153e-05,
"loss": 0.7841635942459106,
"step": 1026
},
{
"epoch": 0.4337552742616034,
"grad_norm": 1.5040369033813477,
"learning_rate": 9.024604569420035e-05,
"loss": 0.7485025525093079,
"step": 1028
},
{
"epoch": 0.4345991561181435,
"grad_norm": 1.4354394674301147,
"learning_rate": 9.042179261862917e-05,
"loss": 0.7735623121261597,
"step": 1030
},
{
"epoch": 0.43544303797468353,
"grad_norm": 1.4841680526733398,
"learning_rate": 9.059753954305799e-05,
"loss": 0.8918828964233398,
"step": 1032
},
{
"epoch": 0.43628691983122364,
"grad_norm": 1.428813099861145,
"learning_rate": 9.077328646748682e-05,
"loss": 0.835110068321228,
"step": 1034
},
{
"epoch": 0.4371308016877637,
"grad_norm": 1.559020757675171,
"learning_rate": 9.094903339191566e-05,
"loss": 0.746295690536499,
"step": 1036
},
{
"epoch": 0.4379746835443038,
"grad_norm": 1.6996115446090698,
"learning_rate": 9.112478031634448e-05,
"loss": 0.8089123368263245,
"step": 1038
},
{
"epoch": 0.4388185654008439,
"grad_norm": 1.6615465879440308,
"learning_rate": 9.13005272407733e-05,
"loss": 0.8807073831558228,
"step": 1040
},
{
"epoch": 0.43966244725738396,
"grad_norm": 1.239142894744873,
"learning_rate": 9.147627416520211e-05,
"loss": 0.7638427019119263,
"step": 1042
},
{
"epoch": 0.44050632911392407,
"grad_norm": 1.1915178298950195,
"learning_rate": 9.165202108963093e-05,
"loss": 0.7817409634590149,
"step": 1044
},
{
"epoch": 0.4413502109704641,
"grad_norm": 1.6276934146881104,
"learning_rate": 9.182776801405975e-05,
"loss": 0.8586427569389343,
"step": 1046
},
{
"epoch": 0.4421940928270042,
"grad_norm": 1.480345606803894,
"learning_rate": 9.200351493848857e-05,
"loss": 0.7481811046600342,
"step": 1048
},
{
"epoch": 0.4430379746835443,
"grad_norm": 1.308419108390808,
"learning_rate": 9.21792618629174e-05,
"loss": 0.8074686527252197,
"step": 1050
},
{
"epoch": 0.4438818565400844,
"grad_norm": 1.6167182922363281,
"learning_rate": 9.235500878734624e-05,
"loss": 0.8455166816711426,
"step": 1052
},
{
"epoch": 0.4447257383966245,
"grad_norm": 1.6058826446533203,
"learning_rate": 9.253075571177506e-05,
"loss": 0.7255295515060425,
"step": 1054
},
{
"epoch": 0.44556962025316454,
"grad_norm": 1.6745728254318237,
"learning_rate": 9.270650263620387e-05,
"loss": 0.8329368233680725,
"step": 1056
},
{
"epoch": 0.44641350210970465,
"grad_norm": 1.5657380819320679,
"learning_rate": 9.28822495606327e-05,
"loss": 0.8583613634109497,
"step": 1058
},
{
"epoch": 0.4472573839662447,
"grad_norm": 1.5052601099014282,
"learning_rate": 9.305799648506151e-05,
"loss": 0.8546127080917358,
"step": 1060
},
{
"epoch": 0.4481012658227848,
"grad_norm": 1.510636806488037,
"learning_rate": 9.323374340949033e-05,
"loss": 0.8416863679885864,
"step": 1062
},
{
"epoch": 0.4489451476793249,
"grad_norm": 1.4446617364883423,
"learning_rate": 9.340949033391916e-05,
"loss": 0.830390453338623,
"step": 1064
},
{
"epoch": 0.44978902953586497,
"grad_norm": 1.6032582521438599,
"learning_rate": 9.358523725834798e-05,
"loss": 0.8000447154045105,
"step": 1066
},
{
"epoch": 0.4506329113924051,
"grad_norm": 1.5295692682266235,
"learning_rate": 9.37609841827768e-05,
"loss": 0.8310818672180176,
"step": 1068
},
{
"epoch": 0.45147679324894513,
"grad_norm": 1.3161942958831787,
"learning_rate": 9.393673110720564e-05,
"loss": 0.8377846479415894,
"step": 1070
},
{
"epoch": 0.45232067510548524,
"grad_norm": 1.4101601839065552,
"learning_rate": 9.411247803163445e-05,
"loss": 0.7852389216423035,
"step": 1072
},
{
"epoch": 0.4531645569620253,
"grad_norm": 1.4352775812149048,
"learning_rate": 9.428822495606327e-05,
"loss": 0.8763723969459534,
"step": 1074
},
{
"epoch": 0.4540084388185654,
"grad_norm": 1.4584673643112183,
"learning_rate": 9.44639718804921e-05,
"loss": 0.8177199363708496,
"step": 1076
},
{
"epoch": 0.4548523206751055,
"grad_norm": 1.6470575332641602,
"learning_rate": 9.463971880492091e-05,
"loss": 0.8333053588867188,
"step": 1078
},
{
"epoch": 0.45569620253164556,
"grad_norm": 1.4429512023925781,
"learning_rate": 9.481546572934975e-05,
"loss": 0.8546649217605591,
"step": 1080
},
{
"epoch": 0.45654008438818566,
"grad_norm": 1.4885371923446655,
"learning_rate": 9.499121265377856e-05,
"loss": 0.838036298751831,
"step": 1082
},
{
"epoch": 0.4573839662447257,
"grad_norm": 1.4601678848266602,
"learning_rate": 9.516695957820738e-05,
"loss": 0.7295010089874268,
"step": 1084
},
{
"epoch": 0.4582278481012658,
"grad_norm": 1.2399365901947021,
"learning_rate": 9.53427065026362e-05,
"loss": 0.6990782618522644,
"step": 1086
},
{
"epoch": 0.45907172995780593,
"grad_norm": 1.2936921119689941,
"learning_rate": 9.551845342706504e-05,
"loss": 0.7790928483009338,
"step": 1088
},
{
"epoch": 0.459915611814346,
"grad_norm": 1.3408331871032715,
"learning_rate": 9.569420035149385e-05,
"loss": 0.8061056733131409,
"step": 1090
},
{
"epoch": 0.4607594936708861,
"grad_norm": 1.5525178909301758,
"learning_rate": 9.586994727592267e-05,
"loss": 0.856796383857727,
"step": 1092
},
{
"epoch": 0.46160337552742614,
"grad_norm": 1.2944618463516235,
"learning_rate": 9.604569420035149e-05,
"loss": 0.7626663446426392,
"step": 1094
},
{
"epoch": 0.46244725738396625,
"grad_norm": 1.412204623222351,
"learning_rate": 9.622144112478033e-05,
"loss": 0.7524681091308594,
"step": 1096
},
{
"epoch": 0.46329113924050636,
"grad_norm": 1.4851596355438232,
"learning_rate": 9.639718804920914e-05,
"loss": 0.8430375456809998,
"step": 1098
},
{
"epoch": 0.4641350210970464,
"grad_norm": 1.831943154335022,
"learning_rate": 9.657293497363796e-05,
"loss": 0.8374918103218079,
"step": 1100
},
{
"epoch": 0.4641350210970464,
"eval_loss": 0.8283821940422058,
"eval_runtime": 861.0464,
"eval_samples_per_second": 2.447,
"eval_steps_per_second": 2.447,
"step": 1100
},
{
"epoch": 0.4649789029535865,
"grad_norm": 1.4989945888519287,
"learning_rate": 9.674868189806678e-05,
"loss": 0.8063139915466309,
"step": 1102
},
{
"epoch": 0.46582278481012657,
"grad_norm": 1.3772722482681274,
"learning_rate": 9.692442882249562e-05,
"loss": 0.8109207153320312,
"step": 1104
},
{
"epoch": 0.4666666666666667,
"grad_norm": 1.4963124990463257,
"learning_rate": 9.710017574692443e-05,
"loss": 0.8667853474617004,
"step": 1106
},
{
"epoch": 0.4675105485232067,
"grad_norm": 1.4250836372375488,
"learning_rate": 9.727592267135325e-05,
"loss": 0.8020523190498352,
"step": 1108
},
{
"epoch": 0.46835443037974683,
"grad_norm": 1.475599765777588,
"learning_rate": 9.745166959578209e-05,
"loss": 0.8271048069000244,
"step": 1110
},
{
"epoch": 0.46919831223628694,
"grad_norm": 1.3727436065673828,
"learning_rate": 9.76274165202109e-05,
"loss": 0.7615619897842407,
"step": 1112
},
{
"epoch": 0.470042194092827,
"grad_norm": 1.2233914136886597,
"learning_rate": 9.780316344463972e-05,
"loss": 0.7843242883682251,
"step": 1114
},
{
"epoch": 0.4708860759493671,
"grad_norm": 1.5734832286834717,
"learning_rate": 9.797891036906854e-05,
"loss": 0.834839940071106,
"step": 1116
},
{
"epoch": 0.47172995780590715,
"grad_norm": 1.3778531551361084,
"learning_rate": 9.815465729349736e-05,
"loss": 0.7584373950958252,
"step": 1118
},
{
"epoch": 0.47257383966244726,
"grad_norm": 1.5535035133361816,
"learning_rate": 9.833040421792618e-05,
"loss": 0.8204697370529175,
"step": 1120
},
{
"epoch": 0.47341772151898737,
"grad_norm": 1.4743636846542358,
"learning_rate": 9.850615114235501e-05,
"loss": 0.9012852311134338,
"step": 1122
},
{
"epoch": 0.4742616033755274,
"grad_norm": 1.4134864807128906,
"learning_rate": 9.868189806678383e-05,
"loss": 0.8392805457115173,
"step": 1124
},
{
"epoch": 0.4751054852320675,
"grad_norm": 1.3308019638061523,
"learning_rate": 9.885764499121267e-05,
"loss": 0.7135441303253174,
"step": 1126
},
{
"epoch": 0.4759493670886076,
"grad_norm": 1.5354844331741333,
"learning_rate": 9.903339191564149e-05,
"loss": 0.8464727401733398,
"step": 1128
},
{
"epoch": 0.4767932489451477,
"grad_norm": 1.2730523347854614,
"learning_rate": 9.92091388400703e-05,
"loss": 0.7691597938537598,
"step": 1130
},
{
"epoch": 0.47763713080168774,
"grad_norm": 1.5459758043289185,
"learning_rate": 9.938488576449912e-05,
"loss": 0.8068788647651672,
"step": 1132
},
{
"epoch": 0.47848101265822784,
"grad_norm": 1.345678687095642,
"learning_rate": 9.956063268892794e-05,
"loss": 0.8091006278991699,
"step": 1134
},
{
"epoch": 0.47932489451476795,
"grad_norm": 1.317076563835144,
"learning_rate": 9.973637961335676e-05,
"loss": 0.735533595085144,
"step": 1136
},
{
"epoch": 0.480168776371308,
"grad_norm": 1.5011168718338013,
"learning_rate": 9.99121265377856e-05,
"loss": 0.7935182452201843,
"step": 1138
},
{
"epoch": 0.4810126582278481,
"grad_norm": 1.673899531364441,
"learning_rate": 9.999999855824502e-05,
"loss": 0.8203520774841309,
"step": 1140
},
{
"epoch": 0.48185654008438816,
"grad_norm": 1.344337821006775,
"learning_rate": 9.999998702420562e-05,
"loss": 0.7233241200447083,
"step": 1142
},
{
"epoch": 0.48270042194092827,
"grad_norm": 1.5819076299667358,
"learning_rate": 9.999996395612948e-05,
"loss": 0.8795552849769592,
"step": 1144
},
{
"epoch": 0.4835443037974684,
"grad_norm": 1.7427241802215576,
"learning_rate": 9.999992935402192e-05,
"loss": 0.8482733964920044,
"step": 1146
},
{
"epoch": 0.48438818565400843,
"grad_norm": 1.2877503633499146,
"learning_rate": 9.999988321789093e-05,
"loss": 0.7905706167221069,
"step": 1148
},
{
"epoch": 0.48523206751054854,
"grad_norm": 1.4887222051620483,
"learning_rate": 9.999982554774715e-05,
"loss": 0.8609708547592163,
"step": 1150
},
{
"epoch": 0.4860759493670886,
"grad_norm": 1.3625136613845825,
"learning_rate": 9.999975634360388e-05,
"loss": 0.7890065908432007,
"step": 1152
},
{
"epoch": 0.4869198312236287,
"grad_norm": 1.3631492853164673,
"learning_rate": 9.999967560547708e-05,
"loss": 0.7908958196640015,
"step": 1154
},
{
"epoch": 0.4877637130801688,
"grad_norm": 1.5244156122207642,
"learning_rate": 9.99995833333854e-05,
"loss": 0.8509655594825745,
"step": 1156
},
{
"epoch": 0.48860759493670886,
"grad_norm": 1.2513200044631958,
"learning_rate": 9.999947952735007e-05,
"loss": 0.7329106330871582,
"step": 1158
},
{
"epoch": 0.48945147679324896,
"grad_norm": 1.1539413928985596,
"learning_rate": 9.99993641873951e-05,
"loss": 0.7237489223480225,
"step": 1160
},
{
"epoch": 0.490295358649789,
"grad_norm": 1.3859314918518066,
"learning_rate": 9.999923731354706e-05,
"loss": 0.8650591373443604,
"step": 1162
},
{
"epoch": 0.4911392405063291,
"grad_norm": 1.2910805940628052,
"learning_rate": 9.999909890583521e-05,
"loss": 0.7516807913780212,
"step": 1164
},
{
"epoch": 0.4919831223628692,
"grad_norm": 1.6100077629089355,
"learning_rate": 9.999894896429152e-05,
"loss": 0.7082475423812866,
"step": 1166
},
{
"epoch": 0.4928270042194093,
"grad_norm": 1.2313556671142578,
"learning_rate": 9.999878748895053e-05,
"loss": 0.8403750658035278,
"step": 1168
},
{
"epoch": 0.4936708860759494,
"grad_norm": 1.3402830362319946,
"learning_rate": 9.999861447984952e-05,
"loss": 0.8083041906356812,
"step": 1170
},
{
"epoch": 0.49451476793248944,
"grad_norm": 1.516775131225586,
"learning_rate": 9.999842993702839e-05,
"loss": 0.8339354991912842,
"step": 1172
},
{
"epoch": 0.49535864978902955,
"grad_norm": 1.2698423862457275,
"learning_rate": 9.999823386052971e-05,
"loss": 0.7708724141120911,
"step": 1174
},
{
"epoch": 0.4962025316455696,
"grad_norm": 1.339390516281128,
"learning_rate": 9.999802625039872e-05,
"loss": 0.7589715719223022,
"step": 1176
},
{
"epoch": 0.4970464135021097,
"grad_norm": 1.4618452787399292,
"learning_rate": 9.99978071066833e-05,
"loss": 0.8523206114768982,
"step": 1178
},
{
"epoch": 0.4978902953586498,
"grad_norm": 1.4812564849853516,
"learning_rate": 9.9997576429434e-05,
"loss": 0.8143196105957031,
"step": 1180
},
{
"epoch": 0.49873417721518987,
"grad_norm": 1.5720716714859009,
"learning_rate": 9.999733421870405e-05,
"loss": 0.800125002861023,
"step": 1182
},
{
"epoch": 0.49957805907173,
"grad_norm": 1.4421230554580688,
"learning_rate": 9.99970804745493e-05,
"loss": 0.7618259191513062,
"step": 1184
},
{
"epoch": 0.5004219409282701,
"grad_norm": 1.5794934034347534,
"learning_rate": 9.99968151970283e-05,
"loss": 0.7162163853645325,
"step": 1186
},
{
"epoch": 0.5012658227848101,
"grad_norm": 1.8590432405471802,
"learning_rate": 9.999653838620225e-05,
"loss": 0.8089820146560669,
"step": 1188
},
{
"epoch": 0.5021097046413502,
"grad_norm": 1.5194507837295532,
"learning_rate": 9.999625004213498e-05,
"loss": 0.8011203408241272,
"step": 1190
},
{
"epoch": 0.5029535864978903,
"grad_norm": 1.6986470222473145,
"learning_rate": 9.999595016489303e-05,
"loss": 0.761158287525177,
"step": 1192
},
{
"epoch": 0.5037974683544304,
"grad_norm": 1.4413946866989136,
"learning_rate": 9.999563875454559e-05,
"loss": 0.7898027300834656,
"step": 1194
},
{
"epoch": 0.5046413502109705,
"grad_norm": 1.4509994983673096,
"learning_rate": 9.999531581116443e-05,
"loss": 0.8018442392349243,
"step": 1196
},
{
"epoch": 0.5054852320675105,
"grad_norm": 1.400659441947937,
"learning_rate": 9.999498133482412e-05,
"loss": 0.7804076075553894,
"step": 1198
},
{
"epoch": 0.5063291139240507,
"grad_norm": 1.486840009689331,
"learning_rate": 9.999463532560178e-05,
"loss": 0.82496178150177,
"step": 1200
},
{
"epoch": 0.5063291139240507,
"eval_loss": 0.8186545968055725,
"eval_runtime": 862.1638,
"eval_samples_per_second": 2.444,
"eval_steps_per_second": 2.444,
"step": 1200
},
{
"epoch": 0.5071729957805907,
"grad_norm": 1.2770357131958008,
"learning_rate": 9.999427778357723e-05,
"loss": 0.8037722706794739,
"step": 1202
},
{
"epoch": 0.5080168776371308,
"grad_norm": 1.4540977478027344,
"learning_rate": 9.999390870883297e-05,
"loss": 0.7329373359680176,
"step": 1204
},
{
"epoch": 0.5088607594936709,
"grad_norm": 1.4469913244247437,
"learning_rate": 9.999352810145412e-05,
"loss": 0.8224589824676514,
"step": 1206
},
{
"epoch": 0.509704641350211,
"grad_norm": 1.46500563621521,
"learning_rate": 9.999313596152847e-05,
"loss": 0.8106292486190796,
"step": 1208
},
{
"epoch": 0.510548523206751,
"grad_norm": 1.3526637554168701,
"learning_rate": 9.999273228914649e-05,
"loss": 0.747698187828064,
"step": 1210
},
{
"epoch": 0.5113924050632911,
"grad_norm": 1.28840172290802,
"learning_rate": 9.999231708440131e-05,
"loss": 0.7612425684928894,
"step": 1212
},
{
"epoch": 0.5122362869198313,
"grad_norm": 1.0283230543136597,
"learning_rate": 9.99918903473887e-05,
"loss": 0.6839463710784912,
"step": 1214
},
{
"epoch": 0.5130801687763713,
"grad_norm": 1.5231431722640991,
"learning_rate": 9.999145207820708e-05,
"loss": 0.8539203405380249,
"step": 1216
},
{
"epoch": 0.5139240506329114,
"grad_norm": 1.3289231061935425,
"learning_rate": 9.999100227695758e-05,
"loss": 0.7960102558135986,
"step": 1218
},
{
"epoch": 0.5147679324894515,
"grad_norm": 1.3770930767059326,
"learning_rate": 9.999054094374396e-05,
"loss": 0.7639255523681641,
"step": 1220
},
{
"epoch": 0.5156118143459916,
"grad_norm": 1.3028030395507812,
"learning_rate": 9.999006807867262e-05,
"loss": 0.7743061780929565,
"step": 1222
},
{
"epoch": 0.5164556962025316,
"grad_norm": 1.1827034950256348,
"learning_rate": 9.998958368185265e-05,
"loss": 0.7922407984733582,
"step": 1224
},
{
"epoch": 0.5172995780590718,
"grad_norm": 1.2973705530166626,
"learning_rate": 9.99890877533958e-05,
"loss": 0.7671286463737488,
"step": 1226
},
{
"epoch": 0.5181434599156118,
"grad_norm": 1.5820153951644897,
"learning_rate": 9.998858029341646e-05,
"loss": 0.7546951174736023,
"step": 1228
},
{
"epoch": 0.5189873417721519,
"grad_norm": 1.6140317916870117,
"learning_rate": 9.99880613020317e-05,
"loss": 0.8734183311462402,
"step": 1230
},
{
"epoch": 0.5198312236286919,
"grad_norm": 1.1190184354782104,
"learning_rate": 9.998753077936122e-05,
"loss": 0.8410643339157104,
"step": 1232
},
{
"epoch": 0.5206751054852321,
"grad_norm": 1.3876196146011353,
"learning_rate": 9.998698872552744e-05,
"loss": 0.7769841551780701,
"step": 1234
},
{
"epoch": 0.5215189873417722,
"grad_norm": 1.699522852897644,
"learning_rate": 9.998643514065535e-05,
"loss": 0.8846109509468079,
"step": 1236
},
{
"epoch": 0.5223628691983122,
"grad_norm": 1.3805134296417236,
"learning_rate": 9.998587002487271e-05,
"loss": 0.7664945125579834,
"step": 1238
},
{
"epoch": 0.5232067510548524,
"grad_norm": 1.3679476976394653,
"learning_rate": 9.998529337830984e-05,
"loss": 0.7243514060974121,
"step": 1240
},
{
"epoch": 0.5240506329113924,
"grad_norm": 1.399200677871704,
"learning_rate": 9.998470520109977e-05,
"loss": 0.8061941862106323,
"step": 1242
},
{
"epoch": 0.5248945147679325,
"grad_norm": 1.3441044092178345,
"learning_rate": 9.99841054933782e-05,
"loss": 0.7741840481758118,
"step": 1244
},
{
"epoch": 0.5257383966244725,
"grad_norm": 1.3375325202941895,
"learning_rate": 9.998349425528344e-05,
"loss": 0.7619491815567017,
"step": 1246
},
{
"epoch": 0.5265822784810127,
"grad_norm": 1.5517847537994385,
"learning_rate": 9.998287148695651e-05,
"loss": 0.8315094113349915,
"step": 1248
},
{
"epoch": 0.5274261603375527,
"grad_norm": 1.244997501373291,
"learning_rate": 9.998223718854107e-05,
"loss": 0.7536082863807678,
"step": 1250
},
{
"epoch": 0.5282700421940928,
"grad_norm": 1.3190033435821533,
"learning_rate": 9.998159136018344e-05,
"loss": 0.826419472694397,
"step": 1252
},
{
"epoch": 0.529113924050633,
"grad_norm": 1.2750061750411987,
"learning_rate": 9.998093400203259e-05,
"loss": 0.7866435647010803,
"step": 1254
},
{
"epoch": 0.529957805907173,
"grad_norm": 1.422908067703247,
"learning_rate": 9.998026511424017e-05,
"loss": 0.7796626687049866,
"step": 1256
},
{
"epoch": 0.5308016877637131,
"grad_norm": 1.435552954673767,
"learning_rate": 9.997958469696048e-05,
"loss": 0.815027117729187,
"step": 1258
},
{
"epoch": 0.5316455696202531,
"grad_norm": 1.1950994729995728,
"learning_rate": 9.997889275035049e-05,
"loss": 0.6925795674324036,
"step": 1260
},
{
"epoch": 0.5324894514767933,
"grad_norm": 1.3049622774124146,
"learning_rate": 9.997818927456978e-05,
"loss": 0.822464108467102,
"step": 1262
},
{
"epoch": 0.5333333333333333,
"grad_norm": 1.2197340726852417,
"learning_rate": 9.997747426978066e-05,
"loss": 0.7955381274223328,
"step": 1264
},
{
"epoch": 0.5341772151898734,
"grad_norm": 1.2463661432266235,
"learning_rate": 9.997674773614807e-05,
"loss": 0.8642181754112244,
"step": 1266
},
{
"epoch": 0.5350210970464135,
"grad_norm": 1.421393871307373,
"learning_rate": 9.99760096738396e-05,
"loss": 0.8776891827583313,
"step": 1268
},
{
"epoch": 0.5358649789029536,
"grad_norm": 1.4347561597824097,
"learning_rate": 9.997526008302549e-05,
"loss": 0.7446491122245789,
"step": 1270
},
{
"epoch": 0.5367088607594936,
"grad_norm": 1.2056710720062256,
"learning_rate": 9.99744989638787e-05,
"loss": 0.8581281304359436,
"step": 1272
},
{
"epoch": 0.5375527426160338,
"grad_norm": 1.1672608852386475,
"learning_rate": 9.997372631657475e-05,
"loss": 0.7386330366134644,
"step": 1274
},
{
"epoch": 0.5383966244725739,
"grad_norm": 1.4313966035842896,
"learning_rate": 9.997294214129191e-05,
"loss": 0.7806804776191711,
"step": 1276
},
{
"epoch": 0.5392405063291139,
"grad_norm": 1.1666971445083618,
"learning_rate": 9.997214643821107e-05,
"loss": 0.6830351948738098,
"step": 1278
},
{
"epoch": 0.540084388185654,
"grad_norm": 1.491783857345581,
"learning_rate": 9.997133920751578e-05,
"loss": 0.8570694327354431,
"step": 1280
},
{
"epoch": 0.5409282700421941,
"grad_norm": 1.1879212856292725,
"learning_rate": 9.997052044939226e-05,
"loss": 0.7016772031784058,
"step": 1282
},
{
"epoch": 0.5417721518987342,
"grad_norm": 1.2692012786865234,
"learning_rate": 9.996969016402935e-05,
"loss": 0.7711107134819031,
"step": 1284
},
{
"epoch": 0.5426160337552742,
"grad_norm": 1.3318448066711426,
"learning_rate": 9.996884835161863e-05,
"loss": 0.7807164788246155,
"step": 1286
},
{
"epoch": 0.5434599156118144,
"grad_norm": 1.1786744594573975,
"learning_rate": 9.996799501235425e-05,
"loss": 0.7331319451332092,
"step": 1288
},
{
"epoch": 0.5443037974683544,
"grad_norm": 1.4092369079589844,
"learning_rate": 9.996713014643309e-05,
"loss": 0.7191547155380249,
"step": 1290
},
{
"epoch": 0.5451476793248945,
"grad_norm": 1.377099633216858,
"learning_rate": 9.996625375405463e-05,
"loss": 0.7233871221542358,
"step": 1292
},
{
"epoch": 0.5459915611814345,
"grad_norm": 1.404945969581604,
"learning_rate": 9.996536583542105e-05,
"loss": 0.7925472855567932,
"step": 1294
},
{
"epoch": 0.5468354430379747,
"grad_norm": 1.2555286884307861,
"learning_rate": 9.996446639073718e-05,
"loss": 0.7749786376953125,
"step": 1296
},
{
"epoch": 0.5476793248945148,
"grad_norm": 1.2577459812164307,
"learning_rate": 9.996355542021048e-05,
"loss": 0.7647517919540405,
"step": 1298
},
{
"epoch": 0.5485232067510548,
"grad_norm": 1.3587758541107178,
"learning_rate": 9.996263292405113e-05,
"loss": 0.8621891140937805,
"step": 1300
},
{
"epoch": 0.5485232067510548,
"eval_loss": 0.808323085308075,
"eval_runtime": 853.577,
"eval_samples_per_second": 2.468,
"eval_steps_per_second": 2.468,
"step": 1300
},
{
"epoch": 0.549367088607595,
"grad_norm": 1.327125906944275,
"learning_rate": 9.996169890247191e-05,
"loss": 0.749254584312439,
"step": 1302
},
{
"epoch": 0.550210970464135,
"grad_norm": 1.4620670080184937,
"learning_rate": 9.99607533556883e-05,
"loss": 0.7362856268882751,
"step": 1304
},
{
"epoch": 0.5510548523206751,
"grad_norm": 1.4119454622268677,
"learning_rate": 9.99597962839184e-05,
"loss": 0.7918445467948914,
"step": 1306
},
{
"epoch": 0.5518987341772152,
"grad_norm": 1.497522234916687,
"learning_rate": 9.995882768738298e-05,
"loss": 0.7348005175590515,
"step": 1308
},
{
"epoch": 0.5527426160337553,
"grad_norm": 1.535741925239563,
"learning_rate": 9.99578475663055e-05,
"loss": 0.8310725688934326,
"step": 1310
},
{
"epoch": 0.5535864978902953,
"grad_norm": 1.4606215953826904,
"learning_rate": 9.995685592091204e-05,
"loss": 0.8232766389846802,
"step": 1312
},
{
"epoch": 0.5544303797468354,
"grad_norm": 1.2442357540130615,
"learning_rate": 9.995585275143136e-05,
"loss": 0.8273071050643921,
"step": 1314
},
{
"epoch": 0.5552742616033756,
"grad_norm": 1.5128520727157593,
"learning_rate": 9.995483805809487e-05,
"loss": 0.7518656253814697,
"step": 1316
},
{
"epoch": 0.5561181434599156,
"grad_norm": 1.340149998664856,
"learning_rate": 9.995381184113664e-05,
"loss": 0.8261662721633911,
"step": 1318
},
{
"epoch": 0.5569620253164557,
"grad_norm": 1.1409451961517334,
"learning_rate": 9.99527741007934e-05,
"loss": 0.5775256156921387,
"step": 1320
},
{
"epoch": 0.5578059071729958,
"grad_norm": 1.3489247560501099,
"learning_rate": 9.995172483730455e-05,
"loss": 0.7698423862457275,
"step": 1322
},
{
"epoch": 0.5586497890295359,
"grad_norm": 1.4950530529022217,
"learning_rate": 9.995066405091211e-05,
"loss": 0.8053334355354309,
"step": 1324
},
{
"epoch": 0.5594936708860759,
"grad_norm": 1.3814653158187866,
"learning_rate": 9.994959174186078e-05,
"loss": 0.7826266288757324,
"step": 1326
},
{
"epoch": 0.560337552742616,
"grad_norm": 1.3383625745773315,
"learning_rate": 9.994850791039796e-05,
"loss": 0.7862131595611572,
"step": 1328
},
{
"epoch": 0.5611814345991561,
"grad_norm": 1.3529670238494873,
"learning_rate": 9.994741255677363e-05,
"loss": 0.8428501486778259,
"step": 1330
},
{
"epoch": 0.5620253164556962,
"grad_norm": 1.254215121269226,
"learning_rate": 9.994630568124049e-05,
"loss": 0.7340869307518005,
"step": 1332
},
{
"epoch": 0.5628691983122363,
"grad_norm": 1.2869828939437866,
"learning_rate": 9.994518728405386e-05,
"loss": 0.7052226662635803,
"step": 1334
},
{
"epoch": 0.5637130801687764,
"grad_norm": 1.4321808815002441,
"learning_rate": 9.994405736547174e-05,
"loss": 0.8297074437141418,
"step": 1336
},
{
"epoch": 0.5645569620253165,
"grad_norm": 1.4638891220092773,
"learning_rate": 9.994291592575478e-05,
"loss": 0.7183220982551575,
"step": 1338
},
{
"epoch": 0.5654008438818565,
"grad_norm": 1.4947413206100464,
"learning_rate": 9.994176296516628e-05,
"loss": 0.8146093487739563,
"step": 1340
},
{
"epoch": 0.5662447257383966,
"grad_norm": 1.343862533569336,
"learning_rate": 9.994059848397221e-05,
"loss": 0.7583593130111694,
"step": 1342
},
{
"epoch": 0.5670886075949367,
"grad_norm": 1.203550100326538,
"learning_rate": 9.993942248244121e-05,
"loss": 0.7682924270629883,
"step": 1344
},
{
"epoch": 0.5679324894514768,
"grad_norm": 1.287660002708435,
"learning_rate": 9.993823496084455e-05,
"loss": 0.8139828443527222,
"step": 1346
},
{
"epoch": 0.5687763713080168,
"grad_norm": 1.3326014280319214,
"learning_rate": 9.993703591945616e-05,
"loss": 0.7529099583625793,
"step": 1348
},
{
"epoch": 0.569620253164557,
"grad_norm": 1.2441487312316895,
"learning_rate": 9.993582535855263e-05,
"loss": 0.6997471451759338,
"step": 1350
},
{
"epoch": 0.570464135021097,
"grad_norm": 1.2647649049758911,
"learning_rate": 9.993460327841325e-05,
"loss": 0.7421218752861023,
"step": 1352
},
{
"epoch": 0.5713080168776371,
"grad_norm": 1.146399974822998,
"learning_rate": 9.99333696793199e-05,
"loss": 0.7342398166656494,
"step": 1354
},
{
"epoch": 0.5721518987341773,
"grad_norm": 1.3346691131591797,
"learning_rate": 9.993212456155715e-05,
"loss": 0.7175891399383545,
"step": 1356
},
{
"epoch": 0.5729957805907173,
"grad_norm": 1.3950672149658203,
"learning_rate": 9.993086792541222e-05,
"loss": 0.8108891248703003,
"step": 1358
},
{
"epoch": 0.5738396624472574,
"grad_norm": 1.339931845664978,
"learning_rate": 9.992959977117502e-05,
"loss": 0.6979889273643494,
"step": 1360
},
{
"epoch": 0.5746835443037974,
"grad_norm": 1.3276840448379517,
"learning_rate": 9.992832009913806e-05,
"loss": 0.7635799050331116,
"step": 1362
},
{
"epoch": 0.5755274261603376,
"grad_norm": 1.5015610456466675,
"learning_rate": 9.992702890959653e-05,
"loss": 0.7575043439865112,
"step": 1364
},
{
"epoch": 0.5763713080168776,
"grad_norm": 1.4755414724349976,
"learning_rate": 9.99257262028483e-05,
"loss": 0.8134847283363342,
"step": 1366
},
{
"epoch": 0.5772151898734177,
"grad_norm": 1.3788783550262451,
"learning_rate": 9.992441197919388e-05,
"loss": 0.7663828134536743,
"step": 1368
},
{
"epoch": 0.5780590717299579,
"grad_norm": 1.2814711332321167,
"learning_rate": 9.992308623893644e-05,
"loss": 0.6711251735687256,
"step": 1370
},
{
"epoch": 0.5789029535864979,
"grad_norm": 1.5343635082244873,
"learning_rate": 9.99217489823818e-05,
"loss": 0.8097200393676758,
"step": 1372
},
{
"epoch": 0.579746835443038,
"grad_norm": 1.3029557466506958,
"learning_rate": 9.992040020983843e-05,
"loss": 0.8274240493774414,
"step": 1374
},
{
"epoch": 0.580590717299578,
"grad_norm": 1.4034144878387451,
"learning_rate": 9.991903992161746e-05,
"loss": 0.7758964896202087,
"step": 1376
},
{
"epoch": 0.5814345991561182,
"grad_norm": 1.2340021133422852,
"learning_rate": 9.991766811803271e-05,
"loss": 0.6571930050849915,
"step": 1378
},
{
"epoch": 0.5822784810126582,
"grad_norm": 1.3082842826843262,
"learning_rate": 9.991628479940061e-05,
"loss": 0.7381542921066284,
"step": 1380
},
{
"epoch": 0.5831223628691983,
"grad_norm": 1.8134801387786865,
"learning_rate": 9.991488996604025e-05,
"loss": 0.8081237077713013,
"step": 1382
},
{
"epoch": 0.5839662447257384,
"grad_norm": 1.4598309993743896,
"learning_rate": 9.991348361827343e-05,
"loss": 0.7761610746383667,
"step": 1384
},
{
"epoch": 0.5848101265822785,
"grad_norm": 1.2974225282669067,
"learning_rate": 9.991206575642453e-05,
"loss": 0.6872953176498413,
"step": 1386
},
{
"epoch": 0.5856540084388185,
"grad_norm": 1.24009370803833,
"learning_rate": 9.991063638082065e-05,
"loss": 0.7601345777511597,
"step": 1388
},
{
"epoch": 0.5864978902953587,
"grad_norm": 1.176713228225708,
"learning_rate": 9.99091954917915e-05,
"loss": 0.7138593792915344,
"step": 1390
},
{
"epoch": 0.5873417721518988,
"grad_norm": 1.1056525707244873,
"learning_rate": 9.990774308966949e-05,
"loss": 0.7730305194854736,
"step": 1392
},
{
"epoch": 0.5881856540084388,
"grad_norm": 1.382847547531128,
"learning_rate": 9.990627917478962e-05,
"loss": 0.7076689600944519,
"step": 1394
},
{
"epoch": 0.5890295358649789,
"grad_norm": 1.2507930994033813,
"learning_rate": 9.990480374748964e-05,
"loss": 0.7970513105392456,
"step": 1396
},
{
"epoch": 0.589873417721519,
"grad_norm": 1.2266724109649658,
"learning_rate": 9.990331680810987e-05,
"loss": 0.7906717658042908,
"step": 1398
},
{
"epoch": 0.5907172995780591,
"grad_norm": 1.299920916557312,
"learning_rate": 9.99018183569933e-05,
"loss": 0.853204607963562,
"step": 1400
},
{
"epoch": 0.5907172995780591,
"eval_loss": 0.8009664416313171,
"eval_runtime": 851.9417,
"eval_samples_per_second": 2.473,
"eval_steps_per_second": 2.473,
"step": 1400
},
{
"epoch": 0.5915611814345991,
"grad_norm": 1.2114863395690918,
"learning_rate": 9.990030839448564e-05,
"loss": 0.8140703439712524,
"step": 1402
},
{
"epoch": 0.5924050632911393,
"grad_norm": 1.3301794528961182,
"learning_rate": 9.989878692093518e-05,
"loss": 0.7471320629119873,
"step": 1404
},
{
"epoch": 0.5932489451476793,
"grad_norm": 1.2611899375915527,
"learning_rate": 9.98972539366929e-05,
"loss": 0.7307024002075195,
"step": 1406
},
{
"epoch": 0.5940928270042194,
"grad_norm": 1.1717802286148071,
"learning_rate": 9.989570944211244e-05,
"loss": 0.6843112111091614,
"step": 1408
},
{
"epoch": 0.5949367088607594,
"grad_norm": 1.3323513269424438,
"learning_rate": 9.989415343755006e-05,
"loss": 0.7025372385978699,
"step": 1410
},
{
"epoch": 0.5957805907172996,
"grad_norm": 1.4225109815597534,
"learning_rate": 9.989258592336473e-05,
"loss": 0.7792683839797974,
"step": 1412
},
{
"epoch": 0.5966244725738397,
"grad_norm": 1.2878522872924805,
"learning_rate": 9.989100689991804e-05,
"loss": 0.8328315019607544,
"step": 1414
},
{
"epoch": 0.5974683544303797,
"grad_norm": 1.2067214250564575,
"learning_rate": 9.988941636757421e-05,
"loss": 0.7700617909431458,
"step": 1416
},
{
"epoch": 0.5983122362869199,
"grad_norm": 1.1213195323944092,
"learning_rate": 9.988781432670019e-05,
"loss": 0.6872363090515137,
"step": 1418
},
{
"epoch": 0.5991561181434599,
"grad_norm": 1.3211694955825806,
"learning_rate": 9.98862007776655e-05,
"loss": 0.7184111475944519,
"step": 1420
},
{
"epoch": 0.6,
"grad_norm": 1.1916998624801636,
"learning_rate": 9.98845757208424e-05,
"loss": 0.8120859265327454,
"step": 1422
},
{
"epoch": 0.60084388185654,
"grad_norm": 1.2772804498672485,
"learning_rate": 9.988293915660572e-05,
"loss": 0.7586462497711182,
"step": 1424
},
{
"epoch": 0.6016877637130802,
"grad_norm": 1.4139106273651123,
"learning_rate": 9.988129108533299e-05,
"loss": 0.8175994157791138,
"step": 1426
},
{
"epoch": 0.6025316455696202,
"grad_norm": 1.4481157064437866,
"learning_rate": 9.987963150740439e-05,
"loss": 0.7662636041641235,
"step": 1428
},
{
"epoch": 0.6033755274261603,
"grad_norm": 1.6000999212265015,
"learning_rate": 9.987796042320277e-05,
"loss": 0.7477837800979614,
"step": 1430
},
{
"epoch": 0.6042194092827005,
"grad_norm": 1.26194429397583,
"learning_rate": 9.98762778331136e-05,
"loss": 0.7392798662185669,
"step": 1432
},
{
"epoch": 0.6050632911392405,
"grad_norm": 1.2370645999908447,
"learning_rate": 9.987458373752503e-05,
"loss": 0.7795998454093933,
"step": 1434
},
{
"epoch": 0.6059071729957806,
"grad_norm": 1.4908311367034912,
"learning_rate": 9.987287813682784e-05,
"loss": 0.7833777070045471,
"step": 1436
},
{
"epoch": 0.6067510548523207,
"grad_norm": 1.2918652296066284,
"learning_rate": 9.987116103141549e-05,
"loss": 0.7269768118858337,
"step": 1438
},
{
"epoch": 0.6075949367088608,
"grad_norm": 1.2170461416244507,
"learning_rate": 9.98694324216841e-05,
"loss": 0.7599279284477234,
"step": 1440
},
{
"epoch": 0.6084388185654008,
"grad_norm": 1.4373505115509033,
"learning_rate": 9.98676923080324e-05,
"loss": 0.8256514668464661,
"step": 1442
},
{
"epoch": 0.6092827004219409,
"grad_norm": 1.3523614406585693,
"learning_rate": 9.986594069086181e-05,
"loss": 0.8462428450584412,
"step": 1444
},
{
"epoch": 0.610126582278481,
"grad_norm": 1.5131851434707642,
"learning_rate": 9.98641775705764e-05,
"loss": 0.8402239084243774,
"step": 1446
},
{
"epoch": 0.6109704641350211,
"grad_norm": 1.3518229722976685,
"learning_rate": 9.98624029475829e-05,
"loss": 0.7585759162902832,
"step": 1448
},
{
"epoch": 0.6118143459915611,
"grad_norm": 1.3403998613357544,
"learning_rate": 9.986061682229064e-05,
"loss": 0.773881733417511,
"step": 1450
},
{
"epoch": 0.6126582278481013,
"grad_norm": 1.1835366487503052,
"learning_rate": 9.985881919511168e-05,
"loss": 0.6770316958427429,
"step": 1452
},
{
"epoch": 0.6135021097046414,
"grad_norm": 1.1825730800628662,
"learning_rate": 9.985701006646069e-05,
"loss": 0.7081645727157593,
"step": 1454
},
{
"epoch": 0.6143459915611814,
"grad_norm": 1.378994345664978,
"learning_rate": 9.9855189436755e-05,
"loss": 0.7750917673110962,
"step": 1456
},
{
"epoch": 0.6151898734177215,
"grad_norm": 1.4208749532699585,
"learning_rate": 9.985335730641458e-05,
"loss": 0.7517801523208618,
"step": 1458
},
{
"epoch": 0.6160337552742616,
"grad_norm": 1.1413639783859253,
"learning_rate": 9.98515136758621e-05,
"loss": 0.712832510471344,
"step": 1460
},
{
"epoch": 0.6168776371308017,
"grad_norm": 1.3949562311172485,
"learning_rate": 9.984965854552283e-05,
"loss": 0.7884142994880676,
"step": 1462
},
{
"epoch": 0.6177215189873417,
"grad_norm": 1.4057096242904663,
"learning_rate": 9.984779191582471e-05,
"loss": 0.796623706817627,
"step": 1464
},
{
"epoch": 0.6185654008438819,
"grad_norm": 1.1681689023971558,
"learning_rate": 9.984591378719834e-05,
"loss": 0.7862933874130249,
"step": 1466
},
{
"epoch": 0.619409282700422,
"grad_norm": 1.2585291862487793,
"learning_rate": 9.984402416007696e-05,
"loss": 0.7889828681945801,
"step": 1468
},
{
"epoch": 0.620253164556962,
"grad_norm": 1.2598098516464233,
"learning_rate": 9.984212303489649e-05,
"loss": 0.7375997304916382,
"step": 1470
},
{
"epoch": 0.6210970464135022,
"grad_norm": 1.4628467559814453,
"learning_rate": 9.984021041209547e-05,
"loss": 0.7839564085006714,
"step": 1472
},
{
"epoch": 0.6219409282700422,
"grad_norm": 1.3606770038604736,
"learning_rate": 9.983828629211511e-05,
"loss": 0.7566051483154297,
"step": 1474
},
{
"epoch": 0.6227848101265823,
"grad_norm": 1.182644248008728,
"learning_rate": 9.983635067539927e-05,
"loss": 0.6638457179069519,
"step": 1476
},
{
"epoch": 0.6236286919831223,
"grad_norm": 1.5617793798446655,
"learning_rate": 9.983440356239445e-05,
"loss": 0.8227225542068481,
"step": 1478
},
{
"epoch": 0.6244725738396625,
"grad_norm": 1.2290058135986328,
"learning_rate": 9.98324449535498e-05,
"loss": 0.7086431980133057,
"step": 1480
},
{
"epoch": 0.6253164556962025,
"grad_norm": 1.3822678327560425,
"learning_rate": 9.983047484931716e-05,
"loss": 0.8076596856117249,
"step": 1482
},
{
"epoch": 0.6261603375527426,
"grad_norm": 1.163699746131897,
"learning_rate": 9.982849325015098e-05,
"loss": 0.7514539361000061,
"step": 1484
},
{
"epoch": 0.6270042194092827,
"grad_norm": 1.2635631561279297,
"learning_rate": 9.982650015650839e-05,
"loss": 0.7298142910003662,
"step": 1486
},
{
"epoch": 0.6278481012658228,
"grad_norm": 1.3135387897491455,
"learning_rate": 9.982449556884914e-05,
"loss": 0.8092831373214722,
"step": 1488
},
{
"epoch": 0.6286919831223629,
"grad_norm": 1.3577877283096313,
"learning_rate": 9.982247948763567e-05,
"loss": 0.7934147715568542,
"step": 1490
},
{
"epoch": 0.6295358649789029,
"grad_norm": 1.1482092142105103,
"learning_rate": 9.982045191333304e-05,
"loss": 0.789363443851471,
"step": 1492
},
{
"epoch": 0.6303797468354431,
"grad_norm": 1.189771056175232,
"learning_rate": 9.981841284640895e-05,
"loss": 0.7458413243293762,
"step": 1494
},
{
"epoch": 0.6312236286919831,
"grad_norm": 1.2815836668014526,
"learning_rate": 9.981636228733383e-05,
"loss": 0.7299918532371521,
"step": 1496
},
{
"epoch": 0.6320675105485232,
"grad_norm": 1.36761474609375,
"learning_rate": 9.981430023658068e-05,
"loss": 0.7545169591903687,
"step": 1498
},
{
"epoch": 0.6329113924050633,
"grad_norm": 1.2594345808029175,
"learning_rate": 9.981222669462513e-05,
"loss": 0.7358481884002686,
"step": 1500
},
{
"epoch": 0.6329113924050633,
"eval_loss": 0.7896141409873962,
"eval_runtime": 865.9069,
"eval_samples_per_second": 2.433,
"eval_steps_per_second": 2.433,
"step": 1500
},
{
"epoch": 0.6337552742616034,
"grad_norm": 3.6419246196746826,
"learning_rate": 9.981014166194556e-05,
"loss": 0.8253764510154724,
"step": 1502
},
{
"epoch": 0.6345991561181434,
"grad_norm": 1.7333487272262573,
"learning_rate": 9.980804513902294e-05,
"loss": 0.8254884481430054,
"step": 1504
},
{
"epoch": 0.6354430379746835,
"grad_norm": 1.1998231410980225,
"learning_rate": 9.980593712634088e-05,
"loss": 0.7833738327026367,
"step": 1506
},
{
"epoch": 0.6362869198312237,
"grad_norm": 1.347011685371399,
"learning_rate": 9.980381762438566e-05,
"loss": 0.753408670425415,
"step": 1508
},
{
"epoch": 0.6371308016877637,
"grad_norm": 1.1759053468704224,
"learning_rate": 9.980168663364622e-05,
"loss": 0.7867791652679443,
"step": 1510
},
{
"epoch": 0.6379746835443038,
"grad_norm": 1.3113552331924438,
"learning_rate": 9.979954415461412e-05,
"loss": 0.6753612160682678,
"step": 1512
},
{
"epoch": 0.6388185654008439,
"grad_norm": 1.3258320093154907,
"learning_rate": 9.979739018778362e-05,
"loss": 0.750367283821106,
"step": 1514
},
{
"epoch": 0.639662447257384,
"grad_norm": 1.175145149230957,
"learning_rate": 9.979522473365157e-05,
"loss": 0.7505861520767212,
"step": 1516
},
{
"epoch": 0.640506329113924,
"grad_norm": 1.2276148796081543,
"learning_rate": 9.979304779271752e-05,
"loss": 0.7429317831993103,
"step": 1518
},
{
"epoch": 0.6413502109704642,
"grad_norm": 1.3262875080108643,
"learning_rate": 9.979085936548362e-05,
"loss": 0.786217212677002,
"step": 1520
},
{
"epoch": 0.6421940928270042,
"grad_norm": 1.3067121505737305,
"learning_rate": 9.978865945245473e-05,
"loss": 0.6942036151885986,
"step": 1522
},
{
"epoch": 0.6430379746835443,
"grad_norm": 1.5352400541305542,
"learning_rate": 9.978644805413832e-05,
"loss": 0.8281817436218262,
"step": 1524
},
{
"epoch": 0.6438818565400843,
"grad_norm": 1.2848507165908813,
"learning_rate": 9.97842251710445e-05,
"loss": 0.8110972046852112,
"step": 1526
},
{
"epoch": 0.6447257383966245,
"grad_norm": 1.352196216583252,
"learning_rate": 9.978199080368607e-05,
"loss": 0.7354730367660522,
"step": 1528
},
{
"epoch": 0.6455696202531646,
"grad_norm": 1.2427687644958496,
"learning_rate": 9.977974495257842e-05,
"loss": 0.7915583848953247,
"step": 1530
},
{
"epoch": 0.6464135021097046,
"grad_norm": 1.3163504600524902,
"learning_rate": 9.977748761823967e-05,
"loss": 0.7400109171867371,
"step": 1532
},
{
"epoch": 0.6472573839662448,
"grad_norm": 1.2496893405914307,
"learning_rate": 9.977521880119049e-05,
"loss": 0.7104899287223816,
"step": 1534
},
{
"epoch": 0.6481012658227848,
"grad_norm": 1.0907179117202759,
"learning_rate": 9.97729385019543e-05,
"loss": 0.8074463605880737,
"step": 1536
},
{
"epoch": 0.6489451476793249,
"grad_norm": 1.2323429584503174,
"learning_rate": 9.977064672105712e-05,
"loss": 0.7770540714263916,
"step": 1538
},
{
"epoch": 0.6497890295358649,
"grad_norm": 1.224428415298462,
"learning_rate": 9.976834345902759e-05,
"loss": 0.806465208530426,
"step": 1540
},
{
"epoch": 0.6506329113924051,
"grad_norm": 1.3529564142227173,
"learning_rate": 9.976602871639705e-05,
"loss": 0.7306749224662781,
"step": 1542
},
{
"epoch": 0.6514767932489451,
"grad_norm": 1.1770031452178955,
"learning_rate": 9.976370249369946e-05,
"loss": 0.783933699131012,
"step": 1544
},
{
"epoch": 0.6523206751054852,
"grad_norm": 1.205283522605896,
"learning_rate": 9.976136479147144e-05,
"loss": 0.6937689185142517,
"step": 1546
},
{
"epoch": 0.6531645569620254,
"grad_norm": 1.2329360246658325,
"learning_rate": 9.975901561025223e-05,
"loss": 0.8041763305664062,
"step": 1548
},
{
"epoch": 0.6540084388185654,
"grad_norm": 1.499973177909851,
"learning_rate": 9.975665495058377e-05,
"loss": 0.750390887260437,
"step": 1550
},
{
"epoch": 0.6548523206751055,
"grad_norm": 1.31832754611969,
"learning_rate": 9.975428281301061e-05,
"loss": 0.7658298015594482,
"step": 1552
},
{
"epoch": 0.6556962025316456,
"grad_norm": 1.3998414278030396,
"learning_rate": 9.975189919807994e-05,
"loss": 0.8651264905929565,
"step": 1554
},
{
"epoch": 0.6565400843881857,
"grad_norm": 1.2002551555633545,
"learning_rate": 9.974950410634164e-05,
"loss": 0.6776561141014099,
"step": 1556
},
{
"epoch": 0.6573839662447257,
"grad_norm": 1.1986602544784546,
"learning_rate": 9.97470975383482e-05,
"loss": 0.8159130811691284,
"step": 1558
},
{
"epoch": 0.6582278481012658,
"grad_norm": 1.3583602905273438,
"learning_rate": 9.974467949465477e-05,
"loss": 0.7528039216995239,
"step": 1560
},
{
"epoch": 0.6590717299578059,
"grad_norm": 1.4176239967346191,
"learning_rate": 9.974224997581913e-05,
"loss": 0.6970920562744141,
"step": 1562
},
{
"epoch": 0.659915611814346,
"grad_norm": 1.3899401426315308,
"learning_rate": 9.973980898240177e-05,
"loss": 0.7718377113342285,
"step": 1564
},
{
"epoch": 0.660759493670886,
"grad_norm": 1.222413182258606,
"learning_rate": 9.973735651496571e-05,
"loss": 0.7346280217170715,
"step": 1566
},
{
"epoch": 0.6616033755274262,
"grad_norm": 1.3750087022781372,
"learning_rate": 9.973489257407676e-05,
"loss": 0.7923588156700134,
"step": 1568
},
{
"epoch": 0.6624472573839663,
"grad_norm": 1.24547278881073,
"learning_rate": 9.973241716030325e-05,
"loss": 0.8258910179138184,
"step": 1570
},
{
"epoch": 0.6632911392405063,
"grad_norm": 1.2464141845703125,
"learning_rate": 9.972993027421624e-05,
"loss": 0.7869232296943665,
"step": 1572
},
{
"epoch": 0.6641350210970464,
"grad_norm": 1.3088903427124023,
"learning_rate": 9.972743191638939e-05,
"loss": 0.8144775629043579,
"step": 1574
},
{
"epoch": 0.6649789029535865,
"grad_norm": 1.2252418994903564,
"learning_rate": 9.972492208739903e-05,
"loss": 0.7432073950767517,
"step": 1576
},
{
"epoch": 0.6658227848101266,
"grad_norm": 1.2303717136383057,
"learning_rate": 9.972240078782413e-05,
"loss": 0.7386854887008667,
"step": 1578
},
{
"epoch": 0.6666666666666666,
"grad_norm": 1.0226294994354248,
"learning_rate": 9.971986801824631e-05,
"loss": 0.7127882838249207,
"step": 1580
},
{
"epoch": 0.6675105485232068,
"grad_norm": 1.362332820892334,
"learning_rate": 9.971732377924982e-05,
"loss": 0.7557716369628906,
"step": 1582
},
{
"epoch": 0.6683544303797468,
"grad_norm": 1.4436695575714111,
"learning_rate": 9.971476807142158e-05,
"loss": 0.7832611203193665,
"step": 1584
},
{
"epoch": 0.6691983122362869,
"grad_norm": 1.276695966720581,
"learning_rate": 9.971220089535113e-05,
"loss": 0.8190197944641113,
"step": 1586
},
{
"epoch": 0.6700421940928271,
"grad_norm": 1.2413527965545654,
"learning_rate": 9.970962225163069e-05,
"loss": 0.747222363948822,
"step": 1588
},
{
"epoch": 0.6708860759493671,
"grad_norm": 1.3395767211914062,
"learning_rate": 9.970703214085507e-05,
"loss": 0.7846449017524719,
"step": 1590
},
{
"epoch": 0.6717299578059072,
"grad_norm": 1.291327953338623,
"learning_rate": 9.970443056362178e-05,
"loss": 0.8160232901573181,
"step": 1592
},
{
"epoch": 0.6725738396624472,
"grad_norm": 1.3139684200286865,
"learning_rate": 9.970181752053097e-05,
"loss": 0.7413806915283203,
"step": 1594
},
{
"epoch": 0.6734177215189874,
"grad_norm": 1.3170921802520752,
"learning_rate": 9.969919301218537e-05,
"loss": 0.7637304067611694,
"step": 1596
},
{
"epoch": 0.6742616033755274,
"grad_norm": 1.3349758386611938,
"learning_rate": 9.969655703919044e-05,
"loss": 0.7823366522789001,
"step": 1598
},
{
"epoch": 0.6751054852320675,
"grad_norm": 1.2151578664779663,
"learning_rate": 9.969390960215425e-05,
"loss": 0.6587790846824646,
"step": 1600
},
{
"epoch": 0.6751054852320675,
"eval_loss": 0.7836604714393616,
"eval_runtime": 861.5352,
"eval_samples_per_second": 2.446,
"eval_steps_per_second": 2.446,
"step": 1600
},
{
"epoch": 0.6759493670886076,
"grad_norm": 1.2541478872299194,
"learning_rate": 9.96912507016875e-05,
"loss": 0.7314544320106506,
"step": 1602
},
{
"epoch": 0.6767932489451477,
"grad_norm": 1.091790795326233,
"learning_rate": 9.968858033840357e-05,
"loss": 0.702468752861023,
"step": 1604
},
{
"epoch": 0.6776371308016877,
"grad_norm": 1.36745285987854,
"learning_rate": 9.968589851291841e-05,
"loss": 0.7691897749900818,
"step": 1606
},
{
"epoch": 0.6784810126582278,
"grad_norm": 1.1325993537902832,
"learning_rate": 9.968320522585072e-05,
"loss": 0.7422228455543518,
"step": 1608
},
{
"epoch": 0.679324894514768,
"grad_norm": 1.1015450954437256,
"learning_rate": 9.968050047782176e-05,
"loss": 0.677532434463501,
"step": 1610
},
{
"epoch": 0.680168776371308,
"grad_norm": 1.2216695547103882,
"learning_rate": 9.967778426945548e-05,
"loss": 0.7973438501358032,
"step": 1612
},
{
"epoch": 0.6810126582278481,
"grad_norm": 1.159395456314087,
"learning_rate": 9.967505660137843e-05,
"loss": 0.6742876172065735,
"step": 1614
},
{
"epoch": 0.6818565400843882,
"grad_norm": 1.404433250427246,
"learning_rate": 9.967231747421988e-05,
"loss": 0.7592008709907532,
"step": 1616
},
{
"epoch": 0.6827004219409283,
"grad_norm": 1.2489168643951416,
"learning_rate": 9.966956688861164e-05,
"loss": 0.7565826177597046,
"step": 1618
},
{
"epoch": 0.6835443037974683,
"grad_norm": 1.2960615158081055,
"learning_rate": 9.966680484518825e-05,
"loss": 0.7694597840309143,
"step": 1620
},
{
"epoch": 0.6843881856540084,
"grad_norm": 1.3598436117172241,
"learning_rate": 9.966403134458685e-05,
"loss": 0.8392959833145142,
"step": 1622
},
{
"epoch": 0.6852320675105485,
"grad_norm": 1.258065938949585,
"learning_rate": 9.966124638744722e-05,
"loss": 0.8014217019081116,
"step": 1624
},
{
"epoch": 0.6860759493670886,
"grad_norm": 1.3132309913635254,
"learning_rate": 9.965844997441184e-05,
"loss": 0.7029755711555481,
"step": 1626
},
{
"epoch": 0.6869198312236287,
"grad_norm": 1.1204946041107178,
"learning_rate": 9.965564210612575e-05,
"loss": 0.7213528752326965,
"step": 1628
},
{
"epoch": 0.6877637130801688,
"grad_norm": 1.037251591682434,
"learning_rate": 9.965282278323667e-05,
"loss": 0.6895437240600586,
"step": 1630
},
{
"epoch": 0.6886075949367089,
"grad_norm": 1.093807578086853,
"learning_rate": 9.964999200639498e-05,
"loss": 0.8035063743591309,
"step": 1632
},
{
"epoch": 0.6894514767932489,
"grad_norm": 1.367386817932129,
"learning_rate": 9.964714977625367e-05,
"loss": 0.6191847920417786,
"step": 1634
},
{
"epoch": 0.6902953586497891,
"grad_norm": 1.3160961866378784,
"learning_rate": 9.964429609346841e-05,
"loss": 0.7469727993011475,
"step": 1636
},
{
"epoch": 0.6911392405063291,
"grad_norm": 1.3736863136291504,
"learning_rate": 9.964143095869748e-05,
"loss": 0.7987836599349976,
"step": 1638
},
{
"epoch": 0.6919831223628692,
"grad_norm": 1.323209524154663,
"learning_rate": 9.963855437260182e-05,
"loss": 0.7901709675788879,
"step": 1640
},
{
"epoch": 0.6928270042194092,
"grad_norm": 1.3943440914154053,
"learning_rate": 9.963566633584496e-05,
"loss": 0.7889530658721924,
"step": 1642
},
{
"epoch": 0.6936708860759494,
"grad_norm": 1.3699116706848145,
"learning_rate": 9.963276684909317e-05,
"loss": 0.756829559803009,
"step": 1644
},
{
"epoch": 0.6945147679324895,
"grad_norm": 1.4216378927230835,
"learning_rate": 9.962985591301529e-05,
"loss": 0.7840303182601929,
"step": 1646
},
{
"epoch": 0.6953586497890295,
"grad_norm": 1.2231985330581665,
"learning_rate": 9.962693352828279e-05,
"loss": 0.700393557548523,
"step": 1648
},
{
"epoch": 0.6962025316455697,
"grad_norm": 1.3568313121795654,
"learning_rate": 9.962399969556983e-05,
"loss": 0.7010306715965271,
"step": 1650
},
{
"epoch": 0.6970464135021097,
"grad_norm": 1.1662907600402832,
"learning_rate": 9.96210544155532e-05,
"loss": 0.6935506463050842,
"step": 1652
},
{
"epoch": 0.6978902953586498,
"grad_norm": 1.3066680431365967,
"learning_rate": 9.96180976889123e-05,
"loss": 0.7913851141929626,
"step": 1654
},
{
"epoch": 0.6987341772151898,
"grad_norm": 1.2268375158309937,
"learning_rate": 9.961512951632918e-05,
"loss": 0.764849066734314,
"step": 1656
},
{
"epoch": 0.69957805907173,
"grad_norm": 1.4509469270706177,
"learning_rate": 9.96121498984886e-05,
"loss": 0.7544103860855103,
"step": 1658
},
{
"epoch": 0.70042194092827,
"grad_norm": 1.200772762298584,
"learning_rate": 9.960915883607782e-05,
"loss": 0.7766591310501099,
"step": 1660
},
{
"epoch": 0.7012658227848101,
"grad_norm": 1.3825311660766602,
"learning_rate": 9.960615632978687e-05,
"loss": 0.7433559894561768,
"step": 1662
},
{
"epoch": 0.7021097046413503,
"grad_norm": 1.3197243213653564,
"learning_rate": 9.960314238030836e-05,
"loss": 0.7770103812217712,
"step": 1664
},
{
"epoch": 0.7029535864978903,
"grad_norm": 1.515163779258728,
"learning_rate": 9.960011698833755e-05,
"loss": 0.8597216606140137,
"step": 1666
},
{
"epoch": 0.7037974683544304,
"grad_norm": 1.2329891920089722,
"learning_rate": 9.959708015457234e-05,
"loss": 0.7630532383918762,
"step": 1668
},
{
"epoch": 0.7046413502109705,
"grad_norm": 1.0592037439346313,
"learning_rate": 9.959403187971327e-05,
"loss": 0.7299806475639343,
"step": 1670
},
{
"epoch": 0.7054852320675106,
"grad_norm": 2.2717394828796387,
"learning_rate": 9.959097216446351e-05,
"loss": 0.6999854445457458,
"step": 1672
},
{
"epoch": 0.7063291139240506,
"grad_norm": 1.1552131175994873,
"learning_rate": 9.958790100952889e-05,
"loss": 0.8403060436248779,
"step": 1674
},
{
"epoch": 0.7071729957805907,
"grad_norm": 1.290488839149475,
"learning_rate": 9.958481841561787e-05,
"loss": 0.7729134559631348,
"step": 1676
},
{
"epoch": 0.7080168776371308,
"grad_norm": 1.1913278102874756,
"learning_rate": 9.958172438344152e-05,
"loss": 0.7100697755813599,
"step": 1678
},
{
"epoch": 0.7088607594936709,
"grad_norm": 1.2355852127075195,
"learning_rate": 9.957861891371359e-05,
"loss": 0.7014795541763306,
"step": 1680
},
{
"epoch": 0.7097046413502109,
"grad_norm": 1.258705496788025,
"learning_rate": 9.957550200715044e-05,
"loss": 0.8131424784660339,
"step": 1682
},
{
"epoch": 0.7105485232067511,
"grad_norm": 1.1102997064590454,
"learning_rate": 9.957237366447112e-05,
"loss": 0.6842480301856995,
"step": 1684
},
{
"epoch": 0.7113924050632912,
"grad_norm": 1.4466290473937988,
"learning_rate": 9.956923388639724e-05,
"loss": 0.6730120182037354,
"step": 1686
},
{
"epoch": 0.7122362869198312,
"grad_norm": 1.261152982711792,
"learning_rate": 9.956608267365311e-05,
"loss": 0.7109374403953552,
"step": 1688
},
{
"epoch": 0.7130801687763713,
"grad_norm": 1.4070630073547363,
"learning_rate": 9.956292002696562e-05,
"loss": 0.7545008063316345,
"step": 1690
},
{
"epoch": 0.7139240506329114,
"grad_norm": 1.2532793283462524,
"learning_rate": 9.955974594706436e-05,
"loss": 0.7892587184906006,
"step": 1692
},
{
"epoch": 0.7147679324894515,
"grad_norm": 1.1180293560028076,
"learning_rate": 9.955656043468153e-05,
"loss": 0.7348554134368896,
"step": 1694
},
{
"epoch": 0.7156118143459915,
"grad_norm": 1.333054542541504,
"learning_rate": 9.955336349055195e-05,
"loss": 0.8207674026489258,
"step": 1696
},
{
"epoch": 0.7164556962025317,
"grad_norm": 1.1373547315597534,
"learning_rate": 9.95501551154131e-05,
"loss": 0.7226691842079163,
"step": 1698
},
{
"epoch": 0.7172995780590717,
"grad_norm": 1.2342052459716797,
"learning_rate": 9.95469353100051e-05,
"loss": 0.726982831954956,
"step": 1700
},
{
"epoch": 0.7172995780590717,
"eval_loss": 0.7783148884773254,
"eval_runtime": 846.1986,
"eval_samples_per_second": 2.49,
"eval_steps_per_second": 2.49,
"step": 1700
},
{
"epoch": 0.7181434599156118,
"grad_norm": 1.3781483173370361,
"learning_rate": 9.95437040750707e-05,
"loss": 0.7623077034950256,
"step": 1702
},
{
"epoch": 0.7189873417721518,
"grad_norm": 1.301440715789795,
"learning_rate": 9.954046141135526e-05,
"loss": 0.7421616315841675,
"step": 1704
},
{
"epoch": 0.719831223628692,
"grad_norm": 1.1375854015350342,
"learning_rate": 9.953720731960683e-05,
"loss": 0.685523509979248,
"step": 1706
},
{
"epoch": 0.7206751054852321,
"grad_norm": 1.2014397382736206,
"learning_rate": 9.953394180057604e-05,
"loss": 0.756073534488678,
"step": 1708
},
{
"epoch": 0.7215189873417721,
"grad_norm": 1.232802152633667,
"learning_rate": 9.95306648550162e-05,
"loss": 0.7364522814750671,
"step": 1710
},
{
"epoch": 0.7223628691983123,
"grad_norm": 1.4462472200393677,
"learning_rate": 9.952737648368323e-05,
"loss": 0.7073688507080078,
"step": 1712
},
{
"epoch": 0.7232067510548523,
"grad_norm": 1.123523473739624,
"learning_rate": 9.95240766873357e-05,
"loss": 0.7147064805030823,
"step": 1714
},
{
"epoch": 0.7240506329113924,
"grad_norm": 1.4111510515213013,
"learning_rate": 9.95207654667348e-05,
"loss": 0.7108398079872131,
"step": 1716
},
{
"epoch": 0.7248945147679325,
"grad_norm": 1.2785903215408325,
"learning_rate": 9.951744282264437e-05,
"loss": 0.7080079317092896,
"step": 1718
},
{
"epoch": 0.7257383966244726,
"grad_norm": 1.1361653804779053,
"learning_rate": 9.951410875583089e-05,
"loss": 0.7396624684333801,
"step": 1720
},
{
"epoch": 0.7265822784810126,
"grad_norm": 1.0762585401535034,
"learning_rate": 9.951076326706346e-05,
"loss": 0.7724334597587585,
"step": 1722
},
{
"epoch": 0.7274261603375527,
"grad_norm": 1.3104428052902222,
"learning_rate": 9.950740635711379e-05,
"loss": 0.7311923503875732,
"step": 1724
},
{
"epoch": 0.7282700421940929,
"grad_norm": 1.1291942596435547,
"learning_rate": 9.95040380267563e-05,
"loss": 0.6878296732902527,
"step": 1726
},
{
"epoch": 0.7291139240506329,
"grad_norm": 1.5171746015548706,
"learning_rate": 9.9500658276768e-05,
"loss": 0.7410538196563721,
"step": 1728
},
{
"epoch": 0.729957805907173,
"grad_norm": 1.0966423749923706,
"learning_rate": 9.949726710792848e-05,
"loss": 0.6953532695770264,
"step": 1730
},
{
"epoch": 0.7308016877637131,
"grad_norm": 1.2436997890472412,
"learning_rate": 9.949386452102007e-05,
"loss": 0.6679023504257202,
"step": 1732
},
{
"epoch": 0.7316455696202532,
"grad_norm": 1.1364835500717163,
"learning_rate": 9.949045051682766e-05,
"loss": 0.8046789765357971,
"step": 1734
},
{
"epoch": 0.7324894514767932,
"grad_norm": 1.296648383140564,
"learning_rate": 9.948702509613878e-05,
"loss": 0.7322937846183777,
"step": 1736
},
{
"epoch": 0.7333333333333333,
"grad_norm": 1.2355525493621826,
"learning_rate": 9.948358825974365e-05,
"loss": 0.7442626357078552,
"step": 1738
},
{
"epoch": 0.7341772151898734,
"grad_norm": 1.1634451150894165,
"learning_rate": 9.948014000843504e-05,
"loss": 0.7231078743934631,
"step": 1740
},
{
"epoch": 0.7350210970464135,
"grad_norm": 1.1500129699707031,
"learning_rate": 9.947668034300843e-05,
"loss": 0.6436833143234253,
"step": 1742
},
{
"epoch": 0.7358649789029535,
"grad_norm": 1.3881278038024902,
"learning_rate": 9.947320926426189e-05,
"loss": 0.8170580863952637,
"step": 1744
},
{
"epoch": 0.7367088607594937,
"grad_norm": 1.3479492664337158,
"learning_rate": 9.94697267729961e-05,
"loss": 0.7830947041511536,
"step": 1746
},
{
"epoch": 0.7375527426160338,
"grad_norm": 1.0187158584594727,
"learning_rate": 9.946623287001444e-05,
"loss": 0.7358533143997192,
"step": 1748
},
{
"epoch": 0.7383966244725738,
"grad_norm": 1.2575689554214478,
"learning_rate": 9.946272755612287e-05,
"loss": 0.7279790639877319,
"step": 1750
},
{
"epoch": 0.739240506329114,
"grad_norm": 1.2045027017593384,
"learning_rate": 9.945921083213002e-05,
"loss": 0.6953092217445374,
"step": 1752
},
{
"epoch": 0.740084388185654,
"grad_norm": 1.3994466066360474,
"learning_rate": 9.945568269884708e-05,
"loss": 0.8094141483306885,
"step": 1754
},
{
"epoch": 0.7409282700421941,
"grad_norm": 1.2892286777496338,
"learning_rate": 9.945214315708797e-05,
"loss": 0.6979201436042786,
"step": 1756
},
{
"epoch": 0.7417721518987341,
"grad_norm": 1.2006971836090088,
"learning_rate": 9.944859220766919e-05,
"loss": 0.6810774803161621,
"step": 1758
},
{
"epoch": 0.7426160337552743,
"grad_norm": 1.055793285369873,
"learning_rate": 9.944502985140986e-05,
"loss": 0.6796762347221375,
"step": 1760
},
{
"epoch": 0.7434599156118143,
"grad_norm": 1.174714207649231,
"learning_rate": 9.944145608913175e-05,
"loss": 0.7954121828079224,
"step": 1762
},
{
"epoch": 0.7443037974683544,
"grad_norm": 1.1638222932815552,
"learning_rate": 9.943787092165926e-05,
"loss": 0.6939491629600525,
"step": 1764
},
{
"epoch": 0.7451476793248946,
"grad_norm": 1.1861820220947266,
"learning_rate": 9.943427434981942e-05,
"loss": 0.8112956285476685,
"step": 1766
},
{
"epoch": 0.7459915611814346,
"grad_norm": 0.9667421579360962,
"learning_rate": 9.943066637444189e-05,
"loss": 0.6812481880187988,
"step": 1768
},
{
"epoch": 0.7468354430379747,
"grad_norm": 1.2826191186904907,
"learning_rate": 9.942704699635898e-05,
"loss": 0.7598370313644409,
"step": 1770
},
{
"epoch": 0.7476793248945147,
"grad_norm": 1.2257909774780273,
"learning_rate": 9.942341621640558e-05,
"loss": 0.7118877172470093,
"step": 1772
},
{
"epoch": 0.7485232067510549,
"grad_norm": 1.5224615335464478,
"learning_rate": 9.941977403541925e-05,
"loss": 0.8037024736404419,
"step": 1774
},
{
"epoch": 0.7493670886075949,
"grad_norm": 1.188689947128296,
"learning_rate": 9.941612045424018e-05,
"loss": 0.6795828938484192,
"step": 1776
},
{
"epoch": 0.750210970464135,
"grad_norm": 1.0685369968414307,
"learning_rate": 9.941245547371116e-05,
"loss": 0.6934568881988525,
"step": 1778
},
{
"epoch": 0.7510548523206751,
"grad_norm": 1.1643654108047485,
"learning_rate": 9.940877909467767e-05,
"loss": 0.6883851289749146,
"step": 1780
},
{
"epoch": 0.7518987341772152,
"grad_norm": 1.15621018409729,
"learning_rate": 9.940509131798775e-05,
"loss": 0.8284637928009033,
"step": 1782
},
{
"epoch": 0.7527426160337553,
"grad_norm": 1.1946302652359009,
"learning_rate": 9.94013921444921e-05,
"loss": 0.7108310461044312,
"step": 1784
},
{
"epoch": 0.7535864978902953,
"grad_norm": 1.1536555290222168,
"learning_rate": 9.939768157504404e-05,
"loss": 0.7166154384613037,
"step": 1786
},
{
"epoch": 0.7544303797468355,
"grad_norm": 1.3184611797332764,
"learning_rate": 9.939395961049956e-05,
"loss": 0.7774572372436523,
"step": 1788
},
{
"epoch": 0.7552742616033755,
"grad_norm": 1.0782374143600464,
"learning_rate": 9.939022625171723e-05,
"loss": 0.7386471033096313,
"step": 1790
},
{
"epoch": 0.7561181434599156,
"grad_norm": 1.1616696119308472,
"learning_rate": 9.938648149955824e-05,
"loss": 0.6495215892791748,
"step": 1792
},
{
"epoch": 0.7569620253164557,
"grad_norm": 1.1715892553329468,
"learning_rate": 9.938272535488647e-05,
"loss": 0.7733646631240845,
"step": 1794
},
{
"epoch": 0.7578059071729958,
"grad_norm": 1.203466773033142,
"learning_rate": 9.937895781856838e-05,
"loss": 0.7354782223701477,
"step": 1796
},
{
"epoch": 0.7586497890295358,
"grad_norm": 1.246559977531433,
"learning_rate": 9.937517889147305e-05,
"loss": 0.823226273059845,
"step": 1798
},
{
"epoch": 0.759493670886076,
"grad_norm": 0.9968833923339844,
"learning_rate": 9.937138857447221e-05,
"loss": 0.6221681833267212,
"step": 1800
},
{
"epoch": 0.759493670886076,
"eval_loss": 0.7719914317131042,
"eval_runtime": 853.1943,
"eval_samples_per_second": 2.47,
"eval_steps_per_second": 2.47,
"step": 1800
},
{
"epoch": 0.760337552742616,
"grad_norm": 1.5454338788986206,
"learning_rate": 9.936758686844024e-05,
"loss": 0.7799059152603149,
"step": 1802
},
{
"epoch": 0.7611814345991561,
"grad_norm": 1.1954455375671387,
"learning_rate": 9.936377377425409e-05,
"loss": 0.653838038444519,
"step": 1804
},
{
"epoch": 0.7620253164556962,
"grad_norm": 1.2538350820541382,
"learning_rate": 9.935994929279339e-05,
"loss": 0.7046942710876465,
"step": 1806
},
{
"epoch": 0.7628691983122363,
"grad_norm": 1.2358729839324951,
"learning_rate": 9.935611342494035e-05,
"loss": 0.7821131348609924,
"step": 1808
},
{
"epoch": 0.7637130801687764,
"grad_norm": 1.2401310205459595,
"learning_rate": 9.935226617157986e-05,
"loss": 0.7594596147537231,
"step": 1810
},
{
"epoch": 0.7645569620253164,
"grad_norm": 1.3197205066680908,
"learning_rate": 9.934840753359938e-05,
"loss": 0.7512493133544922,
"step": 1812
},
{
"epoch": 0.7654008438818566,
"grad_norm": 1.2482305765151978,
"learning_rate": 9.934453751188903e-05,
"loss": 0.6953311562538147,
"step": 1814
},
{
"epoch": 0.7662447257383966,
"grad_norm": 1.5995157957077026,
"learning_rate": 9.934065610734157e-05,
"loss": 0.7699819803237915,
"step": 1816
},
{
"epoch": 0.7670886075949367,
"grad_norm": 1.2414922714233398,
"learning_rate": 9.933676332085235e-05,
"loss": 0.6532001495361328,
"step": 1818
},
{
"epoch": 0.7679324894514767,
"grad_norm": 1.2274713516235352,
"learning_rate": 9.933285915331937e-05,
"loss": 0.7716373801231384,
"step": 1820
},
{
"epoch": 0.7687763713080169,
"grad_norm": 1.2894618511199951,
"learning_rate": 9.932894360564322e-05,
"loss": 0.7002654671669006,
"step": 1822
},
{
"epoch": 0.769620253164557,
"grad_norm": 1.10796320438385,
"learning_rate": 9.932501667872718e-05,
"loss": 0.7970587015151978,
"step": 1824
},
{
"epoch": 0.770464135021097,
"grad_norm": 1.2393653392791748,
"learning_rate": 9.932107837347708e-05,
"loss": 0.8071644306182861,
"step": 1826
},
{
"epoch": 0.7713080168776372,
"grad_norm": 1.1999030113220215,
"learning_rate": 9.931712869080144e-05,
"loss": 0.7376157641410828,
"step": 1828
},
{
"epoch": 0.7721518987341772,
"grad_norm": 1.1166026592254639,
"learning_rate": 9.931316763161135e-05,
"loss": 0.7487053275108337,
"step": 1830
},
{
"epoch": 0.7729957805907173,
"grad_norm": 1.1788052320480347,
"learning_rate": 9.930919519682059e-05,
"loss": 0.733161985874176,
"step": 1832
},
{
"epoch": 0.7738396624472574,
"grad_norm": 1.309968113899231,
"learning_rate": 9.930521138734548e-05,
"loss": 0.7907692790031433,
"step": 1834
},
{
"epoch": 0.7746835443037975,
"grad_norm": 1.1685889959335327,
"learning_rate": 9.930121620410502e-05,
"loss": 0.7192210555076599,
"step": 1836
},
{
"epoch": 0.7755274261603375,
"grad_norm": 1.2243701219558716,
"learning_rate": 9.929720964802085e-05,
"loss": 0.7394438982009888,
"step": 1838
},
{
"epoch": 0.7763713080168776,
"grad_norm": 1.2940958738327026,
"learning_rate": 9.929319172001717e-05,
"loss": 0.7885041832923889,
"step": 1840
},
{
"epoch": 0.7772151898734178,
"grad_norm": 1.0952763557434082,
"learning_rate": 9.928916242102086e-05,
"loss": 0.6822885274887085,
"step": 1842
},
{
"epoch": 0.7780590717299578,
"grad_norm": 1.0333503484725952,
"learning_rate": 9.928512175196139e-05,
"loss": 0.7070927619934082,
"step": 1844
},
{
"epoch": 0.7789029535864979,
"grad_norm": 1.201359510421753,
"learning_rate": 9.928106971377088e-05,
"loss": 0.7041296362876892,
"step": 1846
},
{
"epoch": 0.779746835443038,
"grad_norm": 1.5381278991699219,
"learning_rate": 9.927700630738404e-05,
"loss": 0.6630192995071411,
"step": 1848
},
{
"epoch": 0.7805907172995781,
"grad_norm": 1.2858322858810425,
"learning_rate": 9.927293153373823e-05,
"loss": 0.7628101110458374,
"step": 1850
},
{
"epoch": 0.7814345991561181,
"grad_norm": 1.3730580806732178,
"learning_rate": 9.926884539377343e-05,
"loss": 0.7557390928268433,
"step": 1852
},
{
"epoch": 0.7822784810126582,
"grad_norm": 1.4954931735992432,
"learning_rate": 9.92647478884322e-05,
"loss": 0.8217329978942871,
"step": 1854
},
{
"epoch": 0.7831223628691983,
"grad_norm": 1.1092652082443237,
"learning_rate": 9.92606390186598e-05,
"loss": 0.672879695892334,
"step": 1856
},
{
"epoch": 0.7839662447257384,
"grad_norm": 1.2077893018722534,
"learning_rate": 9.925651878540404e-05,
"loss": 0.7380653619766235,
"step": 1858
},
{
"epoch": 0.7848101265822784,
"grad_norm": 1.0789313316345215,
"learning_rate": 9.925238718961538e-05,
"loss": 0.6648160219192505,
"step": 1860
},
{
"epoch": 0.7856540084388186,
"grad_norm": 1.3950812816619873,
"learning_rate": 9.924824423224692e-05,
"loss": 0.8316769003868103,
"step": 1862
},
{
"epoch": 0.7864978902953587,
"grad_norm": 1.3934763669967651,
"learning_rate": 9.924408991425433e-05,
"loss": 0.7901778817176819,
"step": 1864
},
{
"epoch": 0.7873417721518987,
"grad_norm": 1.2191659212112427,
"learning_rate": 9.923992423659596e-05,
"loss": 0.7643826007843018,
"step": 1866
},
{
"epoch": 0.7881856540084389,
"grad_norm": 0.986673891544342,
"learning_rate": 9.923574720023274e-05,
"loss": 0.6314064860343933,
"step": 1868
},
{
"epoch": 0.7890295358649789,
"grad_norm": 1.003552794456482,
"learning_rate": 9.923155880612823e-05,
"loss": 0.8244763016700745,
"step": 1870
},
{
"epoch": 0.789873417721519,
"grad_norm": 1.0831382274627686,
"learning_rate": 9.92273590552486e-05,
"loss": 0.7398403882980347,
"step": 1872
},
{
"epoch": 0.790717299578059,
"grad_norm": 1.1782667636871338,
"learning_rate": 9.922314794856267e-05,
"loss": 0.735211968421936,
"step": 1874
},
{
"epoch": 0.7915611814345992,
"grad_norm": 2.230534076690674,
"learning_rate": 9.921892548704186e-05,
"loss": 0.7550510764122009,
"step": 1876
},
{
"epoch": 0.7924050632911392,
"grad_norm": 1.0191401243209839,
"learning_rate": 9.92146916716602e-05,
"loss": 0.7676286697387695,
"step": 1878
},
{
"epoch": 0.7932489451476793,
"grad_norm": 1.1347072124481201,
"learning_rate": 9.921044650339438e-05,
"loss": 0.7409467697143555,
"step": 1880
},
{
"epoch": 0.7940928270042195,
"grad_norm": 1.107528567314148,
"learning_rate": 9.920618998322364e-05,
"loss": 0.7760165333747864,
"step": 1882
},
{
"epoch": 0.7949367088607595,
"grad_norm": 1.1110666990280151,
"learning_rate": 9.92019221121299e-05,
"loss": 0.7360131740570068,
"step": 1884
},
{
"epoch": 0.7957805907172996,
"grad_norm": 1.267580509185791,
"learning_rate": 9.919764289109765e-05,
"loss": 0.7784845232963562,
"step": 1886
},
{
"epoch": 0.7966244725738396,
"grad_norm": 1.5894557237625122,
"learning_rate": 9.919335232111407e-05,
"loss": 0.7880831360816956,
"step": 1888
},
{
"epoch": 0.7974683544303798,
"grad_norm": 1.1906384229660034,
"learning_rate": 9.918905040316886e-05,
"loss": 0.7315587997436523,
"step": 1890
},
{
"epoch": 0.7983122362869198,
"grad_norm": 1.3626811504364014,
"learning_rate": 9.918473713825445e-05,
"loss": 0.7808622121810913,
"step": 1892
},
{
"epoch": 0.7991561181434599,
"grad_norm": 1.1801300048828125,
"learning_rate": 9.918041252736577e-05,
"loss": 0.7055642604827881,
"step": 1894
},
{
"epoch": 0.8,
"grad_norm": 1.2669063806533813,
"learning_rate": 9.917607657150046e-05,
"loss": 0.7188893556594849,
"step": 1896
},
{
"epoch": 0.8008438818565401,
"grad_norm": 1.1746855974197388,
"learning_rate": 9.91717292716587e-05,
"loss": 0.7787454128265381,
"step": 1898
},
{
"epoch": 0.8016877637130801,
"grad_norm": 1.120012640953064,
"learning_rate": 9.916737062884338e-05,
"loss": 0.720715343952179,
"step": 1900
},
{
"epoch": 0.8016877637130801,
"eval_loss": 0.7648926973342896,
"eval_runtime": 865.9394,
"eval_samples_per_second": 2.433,
"eval_steps_per_second": 2.433,
"step": 1900
},
{
"epoch": 0.8025316455696202,
"grad_norm": 1.1745549440383911,
"learning_rate": 9.916300064405993e-05,
"loss": 0.7544789910316467,
"step": 1902
},
{
"epoch": 0.8033755274261604,
"grad_norm": 1.1439874172210693,
"learning_rate": 9.915861931831643e-05,
"loss": 0.7479203343391418,
"step": 1904
},
{
"epoch": 0.8042194092827004,
"grad_norm": 1.3508219718933105,
"learning_rate": 9.915422665262356e-05,
"loss": 0.6995842456817627,
"step": 1906
},
{
"epoch": 0.8050632911392405,
"grad_norm": 1.1519006490707397,
"learning_rate": 9.914982264799462e-05,
"loss": 0.7152725458145142,
"step": 1908
},
{
"epoch": 0.8059071729957806,
"grad_norm": 1.0818005800247192,
"learning_rate": 9.914540730544554e-05,
"loss": 0.7105516195297241,
"step": 1910
},
{
"epoch": 0.8067510548523207,
"grad_norm": 1.1611127853393555,
"learning_rate": 9.914098062599485e-05,
"loss": 0.6911059617996216,
"step": 1912
},
{
"epoch": 0.8075949367088607,
"grad_norm": 1.1964445114135742,
"learning_rate": 9.91365426106637e-05,
"loss": 0.6897286772727966,
"step": 1914
},
{
"epoch": 0.8084388185654009,
"grad_norm": 1.3873497247695923,
"learning_rate": 9.913209326047585e-05,
"loss": 0.7263250350952148,
"step": 1916
},
{
"epoch": 0.809282700421941,
"grad_norm": 1.1729894876480103,
"learning_rate": 9.91276325764577e-05,
"loss": 0.7045295238494873,
"step": 1918
},
{
"epoch": 0.810126582278481,
"grad_norm": 0.9089694619178772,
"learning_rate": 9.912316055963822e-05,
"loss": 0.587131142616272,
"step": 1920
},
{
"epoch": 0.810970464135021,
"grad_norm": 1.2051384449005127,
"learning_rate": 9.911867721104902e-05,
"loss": 0.7237880229949951,
"step": 1922
},
{
"epoch": 0.8118143459915612,
"grad_norm": 1.2152670621871948,
"learning_rate": 9.911418253172433e-05,
"loss": 0.6967294216156006,
"step": 1924
},
{
"epoch": 0.8126582278481013,
"grad_norm": 1.1193642616271973,
"learning_rate": 9.9109676522701e-05,
"loss": 0.7636315822601318,
"step": 1926
},
{
"epoch": 0.8135021097046413,
"grad_norm": 1.2457597255706787,
"learning_rate": 9.910515918501843e-05,
"loss": 0.7451969981193542,
"step": 1928
},
{
"epoch": 0.8143459915611815,
"grad_norm": 1.057009220123291,
"learning_rate": 9.910063051971876e-05,
"loss": 0.6320056319236755,
"step": 1930
},
{
"epoch": 0.8151898734177215,
"grad_norm": 1.2820258140563965,
"learning_rate": 9.909609052784661e-05,
"loss": 0.691004753112793,
"step": 1932
},
{
"epoch": 0.8160337552742616,
"grad_norm": 1.331312656402588,
"learning_rate": 9.909153921044927e-05,
"loss": 0.7741923332214355,
"step": 1934
},
{
"epoch": 0.8168776371308016,
"grad_norm": 1.2055360078811646,
"learning_rate": 9.908697656857668e-05,
"loss": 0.668049156665802,
"step": 1936
},
{
"epoch": 0.8177215189873418,
"grad_norm": 1.2124541997909546,
"learning_rate": 9.90824026032813e-05,
"loss": 0.6584748029708862,
"step": 1938
},
{
"epoch": 0.8185654008438819,
"grad_norm": 1.244288682937622,
"learning_rate": 9.90778173156183e-05,
"loss": 0.7081992626190186,
"step": 1940
},
{
"epoch": 0.8194092827004219,
"grad_norm": 1.250558853149414,
"learning_rate": 9.907322070664542e-05,
"loss": 0.7977840900421143,
"step": 1942
},
{
"epoch": 0.8202531645569621,
"grad_norm": 1.3892892599105835,
"learning_rate": 9.906861277742297e-05,
"loss": 0.7830103635787964,
"step": 1944
},
{
"epoch": 0.8210970464135021,
"grad_norm": 1.3152644634246826,
"learning_rate": 9.906399352901393e-05,
"loss": 0.8451479077339172,
"step": 1946
},
{
"epoch": 0.8219409282700422,
"grad_norm": 1.1102250814437866,
"learning_rate": 9.905936296248388e-05,
"loss": 0.7035528421401978,
"step": 1948
},
{
"epoch": 0.8227848101265823,
"grad_norm": 1.0271214246749878,
"learning_rate": 9.905472107890101e-05,
"loss": 0.764616847038269,
"step": 1950
},
{
"epoch": 0.8236286919831224,
"grad_norm": 1.1772255897521973,
"learning_rate": 9.905006787933609e-05,
"loss": 0.7699717283248901,
"step": 1952
},
{
"epoch": 0.8244725738396624,
"grad_norm": 1.2486404180526733,
"learning_rate": 9.904540336486252e-05,
"loss": 0.7755605578422546,
"step": 1954
},
{
"epoch": 0.8253164556962025,
"grad_norm": 1.070148229598999,
"learning_rate": 9.904072753655635e-05,
"loss": 0.688934326171875,
"step": 1956
},
{
"epoch": 0.8261603375527427,
"grad_norm": 1.118401288986206,
"learning_rate": 9.903604039549617e-05,
"loss": 0.7447791695594788,
"step": 1958
},
{
"epoch": 0.8270042194092827,
"grad_norm": 1.2209899425506592,
"learning_rate": 9.903134194276323e-05,
"loss": 0.7990683317184448,
"step": 1960
},
{
"epoch": 0.8278481012658228,
"grad_norm": 1.296093225479126,
"learning_rate": 9.902663217944137e-05,
"loss": 0.7290873527526855,
"step": 1962
},
{
"epoch": 0.8286919831223629,
"grad_norm": 1.2594937086105347,
"learning_rate": 9.902191110661704e-05,
"loss": 0.7971217036247253,
"step": 1964
},
{
"epoch": 0.829535864978903,
"grad_norm": 1.6016536951065063,
"learning_rate": 9.90171787253793e-05,
"loss": 0.6728768348693848,
"step": 1966
},
{
"epoch": 0.830379746835443,
"grad_norm": 3.3128950595855713,
"learning_rate": 9.901243503681983e-05,
"loss": 0.7684211730957031,
"step": 1968
},
{
"epoch": 0.8312236286919831,
"grad_norm": 1.2970373630523682,
"learning_rate": 9.90076800420329e-05,
"loss": 0.756637454032898,
"step": 1970
},
{
"epoch": 0.8320675105485232,
"grad_norm": 1.1388959884643555,
"learning_rate": 9.900291374211538e-05,
"loss": 0.6692084074020386,
"step": 1972
},
{
"epoch": 0.8329113924050633,
"grad_norm": 1.050641655921936,
"learning_rate": 9.899813613816677e-05,
"loss": 0.7298309803009033,
"step": 1974
},
{
"epoch": 0.8337552742616033,
"grad_norm": 1.2598577737808228,
"learning_rate": 9.899334723128922e-05,
"loss": 0.6886547803878784,
"step": 1976
},
{
"epoch": 0.8345991561181435,
"grad_norm": 1.2800767421722412,
"learning_rate": 9.898854702258735e-05,
"loss": 0.745341420173645,
"step": 1978
},
{
"epoch": 0.8354430379746836,
"grad_norm": 1.1923155784606934,
"learning_rate": 9.898373551316856e-05,
"loss": 0.7133575081825256,
"step": 1980
},
{
"epoch": 0.8362869198312236,
"grad_norm": 1.156121015548706,
"learning_rate": 9.897891270414272e-05,
"loss": 0.8117790818214417,
"step": 1982
},
{
"epoch": 0.8371308016877637,
"grad_norm": 1.0400618314743042,
"learning_rate": 9.897407859662238e-05,
"loss": 0.6094260215759277,
"step": 1984
},
{
"epoch": 0.8379746835443038,
"grad_norm": 1.451953411102295,
"learning_rate": 9.896923319172268e-05,
"loss": 0.7680332064628601,
"step": 1986
},
{
"epoch": 0.8388185654008439,
"grad_norm": 1.2560248374938965,
"learning_rate": 9.896437649056134e-05,
"loss": 0.6918784379959106,
"step": 1988
},
{
"epoch": 0.8396624472573839,
"grad_norm": 1.2744325399398804,
"learning_rate": 9.895950849425874e-05,
"loss": 0.7654696106910706,
"step": 1990
},
{
"epoch": 0.8405063291139241,
"grad_norm": 1.304439902305603,
"learning_rate": 9.895462920393781e-05,
"loss": 0.7585932612419128,
"step": 1992
},
{
"epoch": 0.8413502109704641,
"grad_norm": 1.578957200050354,
"learning_rate": 9.89497386207241e-05,
"loss": 0.7474164962768555,
"step": 1994
},
{
"epoch": 0.8421940928270042,
"grad_norm": 1.0358996391296387,
"learning_rate": 9.89448367457458e-05,
"loss": 0.663844883441925,
"step": 1996
},
{
"epoch": 0.8430379746835444,
"grad_norm": 1.2285103797912598,
"learning_rate": 9.893992358013366e-05,
"loss": 0.7578557729721069,
"step": 1998
},
{
"epoch": 0.8438818565400844,
"grad_norm": 1.2051875591278076,
"learning_rate": 9.893499912502108e-05,
"loss": 0.7795036435127258,
"step": 2000
},
{
"epoch": 0.8438818565400844,
"eval_loss": 0.7587011456489563,
"eval_runtime": 856.2276,
"eval_samples_per_second": 2.461,
"eval_steps_per_second": 2.461,
"step": 2000
},
{
"epoch": 0.8447257383966245,
"grad_norm": 1.145434021949768,
"learning_rate": 9.893006338154401e-05,
"loss": 0.731850802898407,
"step": 2002
},
{
"epoch": 0.8455696202531645,
"grad_norm": 1.0618077516555786,
"learning_rate": 9.892511635084101e-05,
"loss": 0.6711665391921997,
"step": 2004
},
{
"epoch": 0.8464135021097047,
"grad_norm": 1.1657867431640625,
"learning_rate": 9.892015803405331e-05,
"loss": 0.6894803643226624,
"step": 2006
},
{
"epoch": 0.8472573839662447,
"grad_norm": 1.080140233039856,
"learning_rate": 9.891518843232467e-05,
"loss": 0.628146231174469,
"step": 2008
},
{
"epoch": 0.8481012658227848,
"grad_norm": 1.0664509534835815,
"learning_rate": 9.891020754680151e-05,
"loss": 0.740858793258667,
"step": 2010
},
{
"epoch": 0.8489451476793249,
"grad_norm": 1.5567615032196045,
"learning_rate": 9.89052153786328e-05,
"loss": 0.7763919234275818,
"step": 2012
},
{
"epoch": 0.849789029535865,
"grad_norm": 1.4347095489501953,
"learning_rate": 9.890021192897016e-05,
"loss": 0.8131396770477295,
"step": 2014
},
{
"epoch": 0.850632911392405,
"grad_norm": 1.1787892580032349,
"learning_rate": 9.889519719896776e-05,
"loss": 0.6829051375389099,
"step": 2016
},
{
"epoch": 0.8514767932489451,
"grad_norm": 1.239745855331421,
"learning_rate": 9.889017118978241e-05,
"loss": 0.7664558291435242,
"step": 2018
},
{
"epoch": 0.8523206751054853,
"grad_norm": 1.1224207878112793,
"learning_rate": 9.888513390257352e-05,
"loss": 0.7307376861572266,
"step": 2020
},
{
"epoch": 0.8531645569620253,
"grad_norm": 1.100536823272705,
"learning_rate": 9.88800853385031e-05,
"loss": 0.6786578893661499,
"step": 2022
},
{
"epoch": 0.8540084388185654,
"grad_norm": 1.25773024559021,
"learning_rate": 9.887502549873576e-05,
"loss": 0.7971984148025513,
"step": 2024
},
{
"epoch": 0.8548523206751055,
"grad_norm": 0.9980104565620422,
"learning_rate": 9.886995438443868e-05,
"loss": 0.6990941166877747,
"step": 2026
},
{
"epoch": 0.8556962025316456,
"grad_norm": 1.0464621782302856,
"learning_rate": 9.886487199678171e-05,
"loss": 0.763938307762146,
"step": 2028
},
{
"epoch": 0.8565400843881856,
"grad_norm": 1.2303017377853394,
"learning_rate": 9.885977833693724e-05,
"loss": 0.7165632247924805,
"step": 2030
},
{
"epoch": 0.8573839662447258,
"grad_norm": 1.2203325033187866,
"learning_rate": 9.885467340608027e-05,
"loss": 0.7586364150047302,
"step": 2032
},
{
"epoch": 0.8582278481012658,
"grad_norm": 1.113882064819336,
"learning_rate": 9.884955720538843e-05,
"loss": 0.703253984451294,
"step": 2034
},
{
"epoch": 0.8590717299578059,
"grad_norm": 1.1731632947921753,
"learning_rate": 9.88444297360419e-05,
"loss": 0.8530917763710022,
"step": 2036
},
{
"epoch": 0.859915611814346,
"grad_norm": 1.4592338800430298,
"learning_rate": 9.883929099922349e-05,
"loss": 0.8166638612747192,
"step": 2038
},
{
"epoch": 0.8607594936708861,
"grad_norm": 1.1279125213623047,
"learning_rate": 9.883414099611864e-05,
"loss": 0.6762415170669556,
"step": 2040
},
{
"epoch": 0.8616033755274262,
"grad_norm": 1.1587293148040771,
"learning_rate": 9.882897972791534e-05,
"loss": 0.6826539039611816,
"step": 2042
},
{
"epoch": 0.8624472573839662,
"grad_norm": 1.1909502744674683,
"learning_rate": 9.88238071958042e-05,
"loss": 0.7372410893440247,
"step": 2044
},
{
"epoch": 0.8632911392405064,
"grad_norm": 1.0340155363082886,
"learning_rate": 9.881862340097841e-05,
"loss": 0.699260950088501,
"step": 2046
},
{
"epoch": 0.8641350210970464,
"grad_norm": 1.1745870113372803,
"learning_rate": 9.881342834463379e-05,
"loss": 0.7689789533615112,
"step": 2048
},
{
"epoch": 0.8649789029535865,
"grad_norm": 1.0003606081008911,
"learning_rate": 9.880822202796872e-05,
"loss": 0.6877372860908508,
"step": 2050
},
{
"epoch": 0.8658227848101265,
"grad_norm": 1.2546781301498413,
"learning_rate": 9.88030044521842e-05,
"loss": 0.7632413506507874,
"step": 2052
},
{
"epoch": 0.8666666666666667,
"grad_norm": 1.1178704500198364,
"learning_rate": 9.879777561848385e-05,
"loss": 0.6776729822158813,
"step": 2054
},
{
"epoch": 0.8675105485232067,
"grad_norm": 1.523606777191162,
"learning_rate": 9.879253552807384e-05,
"loss": 0.7592973709106445,
"step": 2056
},
{
"epoch": 0.8683544303797468,
"grad_norm": 1.3490995168685913,
"learning_rate": 9.878728418216296e-05,
"loss": 0.8028839230537415,
"step": 2058
},
{
"epoch": 0.869198312236287,
"grad_norm": 1.1851624250411987,
"learning_rate": 9.87820215819626e-05,
"loss": 0.7499933838844299,
"step": 2060
},
{
"epoch": 0.870042194092827,
"grad_norm": 1.1877925395965576,
"learning_rate": 9.877674772868672e-05,
"loss": 0.7324717044830322,
"step": 2062
},
{
"epoch": 0.8708860759493671,
"grad_norm": 1.2982885837554932,
"learning_rate": 9.877146262355194e-05,
"loss": 0.7456585168838501,
"step": 2064
},
{
"epoch": 0.8717299578059071,
"grad_norm": 1.043912649154663,
"learning_rate": 9.876616626777739e-05,
"loss": 0.7552799582481384,
"step": 2066
},
{
"epoch": 0.8725738396624473,
"grad_norm": 1.172580599784851,
"learning_rate": 9.876085866258487e-05,
"loss": 0.6964990496635437,
"step": 2068
},
{
"epoch": 0.8734177215189873,
"grad_norm": 1.26815927028656,
"learning_rate": 9.875553980919871e-05,
"loss": 0.7368612289428711,
"step": 2070
},
{
"epoch": 0.8742616033755274,
"grad_norm": 1.1268136501312256,
"learning_rate": 9.875020970884587e-05,
"loss": 0.7400802969932556,
"step": 2072
},
{
"epoch": 0.8751054852320675,
"grad_norm": 1.0556721687316895,
"learning_rate": 9.874486836275594e-05,
"loss": 0.6931334137916565,
"step": 2074
},
{
"epoch": 0.8759493670886076,
"grad_norm": 1.1967823505401611,
"learning_rate": 9.873951577216106e-05,
"loss": 0.7124089002609253,
"step": 2076
},
{
"epoch": 0.8767932489451477,
"grad_norm": 1.1753164529800415,
"learning_rate": 9.873415193829591e-05,
"loss": 0.7462030053138733,
"step": 2078
},
{
"epoch": 0.8776371308016878,
"grad_norm": 1.326923131942749,
"learning_rate": 9.872877686239789e-05,
"loss": 0.778078019618988,
"step": 2080
},
{
"epoch": 0.8784810126582279,
"grad_norm": 1.1472662687301636,
"learning_rate": 9.87233905457069e-05,
"loss": 0.6592919826507568,
"step": 2082
},
{
"epoch": 0.8793248945147679,
"grad_norm": 1.1162762641906738,
"learning_rate": 9.871799298946544e-05,
"loss": 0.661717414855957,
"step": 2084
},
{
"epoch": 0.880168776371308,
"grad_norm": 1.1694408655166626,
"learning_rate": 9.871258419491866e-05,
"loss": 0.6203670501708984,
"step": 2086
},
{
"epoch": 0.8810126582278481,
"grad_norm": 1.229691505432129,
"learning_rate": 9.870716416331425e-05,
"loss": 0.758888304233551,
"step": 2088
},
{
"epoch": 0.8818565400843882,
"grad_norm": 1.540377140045166,
"learning_rate": 9.870173289590251e-05,
"loss": 0.760649561882019,
"step": 2090
},
{
"epoch": 0.8827004219409282,
"grad_norm": 1.173628568649292,
"learning_rate": 9.869629039393632e-05,
"loss": 0.6981227397918701,
"step": 2092
},
{
"epoch": 0.8835443037974684,
"grad_norm": 1.1404013633728027,
"learning_rate": 9.869083665867116e-05,
"loss": 0.7808336615562439,
"step": 2094
},
{
"epoch": 0.8843881856540085,
"grad_norm": 1.1038721799850464,
"learning_rate": 9.868537169136511e-05,
"loss": 0.7540555596351624,
"step": 2096
},
{
"epoch": 0.8852320675105485,
"grad_norm": 1.1510080099105835,
"learning_rate": 9.867989549327885e-05,
"loss": 0.6650454998016357,
"step": 2098
},
{
"epoch": 0.8860759493670886,
"grad_norm": 1.166912317276001,
"learning_rate": 9.867440806567561e-05,
"loss": 0.673769474029541,
"step": 2100
},
{
"epoch": 0.8860759493670886,
"eval_loss": 0.7559094429016113,
"eval_runtime": 847.8311,
"eval_samples_per_second": 2.485,
"eval_steps_per_second": 2.485,
"step": 2100
},
{
"epoch": 0.8869198312236287,
"grad_norm": 1.227583885192871,
"learning_rate": 9.866890940982121e-05,
"loss": 0.8314241766929626,
"step": 2102
},
{
"epoch": 0.8877637130801688,
"grad_norm": 1.1813976764678955,
"learning_rate": 9.866339952698413e-05,
"loss": 0.6770843863487244,
"step": 2104
},
{
"epoch": 0.8886075949367088,
"grad_norm": 1.2471063137054443,
"learning_rate": 9.865787841843539e-05,
"loss": 0.7142292857170105,
"step": 2106
},
{
"epoch": 0.889451476793249,
"grad_norm": 1.1602860689163208,
"learning_rate": 9.865234608544858e-05,
"loss": 0.6981731653213501,
"step": 2108
},
{
"epoch": 0.890295358649789,
"grad_norm": 1.145677089691162,
"learning_rate": 9.864680252929992e-05,
"loss": 0.7019379138946533,
"step": 2110
},
{
"epoch": 0.8911392405063291,
"grad_norm": 1.2222462892532349,
"learning_rate": 9.86412477512682e-05,
"loss": 0.7690986394882202,
"step": 2112
},
{
"epoch": 0.8919831223628693,
"grad_norm": 1.1288166046142578,
"learning_rate": 9.863568175263478e-05,
"loss": 0.7241792678833008,
"step": 2114
},
{
"epoch": 0.8928270042194093,
"grad_norm": 1.1773978471755981,
"learning_rate": 9.863010453468364e-05,
"loss": 0.7392162084579468,
"step": 2116
},
{
"epoch": 0.8936708860759494,
"grad_norm": 1.102638840675354,
"learning_rate": 9.862451609870136e-05,
"loss": 0.7603078484535217,
"step": 2118
},
{
"epoch": 0.8945147679324894,
"grad_norm": 1.1325360536575317,
"learning_rate": 9.861891644597707e-05,
"loss": 0.6804911494255066,
"step": 2120
},
{
"epoch": 0.8953586497890296,
"grad_norm": 1.1381969451904297,
"learning_rate": 9.86133055778025e-05,
"loss": 0.787288248538971,
"step": 2122
},
{
"epoch": 0.8962025316455696,
"grad_norm": 1.2454546689987183,
"learning_rate": 9.860768349547196e-05,
"loss": 0.7282505035400391,
"step": 2124
},
{
"epoch": 0.8970464135021097,
"grad_norm": 1.2568305730819702,
"learning_rate": 9.860205020028237e-05,
"loss": 0.7554803490638733,
"step": 2126
},
{
"epoch": 0.8978902953586498,
"grad_norm": 1.1523523330688477,
"learning_rate": 9.859640569353321e-05,
"loss": 0.7126525044441223,
"step": 2128
},
{
"epoch": 0.8987341772151899,
"grad_norm": 1.314878225326538,
"learning_rate": 9.859074997652658e-05,
"loss": 0.7300811409950256,
"step": 2130
},
{
"epoch": 0.8995780590717299,
"grad_norm": 1.1272218227386475,
"learning_rate": 9.858508305056713e-05,
"loss": 0.7217329144477844,
"step": 2132
},
{
"epoch": 0.90042194092827,
"grad_norm": 1.10934317111969,
"learning_rate": 9.857940491696211e-05,
"loss": 0.714308500289917,
"step": 2134
},
{
"epoch": 0.9012658227848102,
"grad_norm": 1.1991039514541626,
"learning_rate": 9.857371557702136e-05,
"loss": 0.6613366007804871,
"step": 2136
},
{
"epoch": 0.9021097046413502,
"grad_norm": 1.3176918029785156,
"learning_rate": 9.85680150320573e-05,
"loss": 0.6972863078117371,
"step": 2138
},
{
"epoch": 0.9029535864978903,
"grad_norm": 1.1966592073440552,
"learning_rate": 9.856230328338496e-05,
"loss": 0.7299100160598755,
"step": 2140
},
{
"epoch": 0.9037974683544304,
"grad_norm": 1.2889270782470703,
"learning_rate": 9.85565803323219e-05,
"loss": 0.7145020961761475,
"step": 2142
},
{
"epoch": 0.9046413502109705,
"grad_norm": 1.2112789154052734,
"learning_rate": 9.855084618018828e-05,
"loss": 0.6717942953109741,
"step": 2144
},
{
"epoch": 0.9054852320675105,
"grad_norm": 1.2550239562988281,
"learning_rate": 9.85451008283069e-05,
"loss": 0.7460196018218994,
"step": 2146
},
{
"epoch": 0.9063291139240506,
"grad_norm": 1.2926387786865234,
"learning_rate": 9.853934427800309e-05,
"loss": 0.8300626873970032,
"step": 2148
},
{
"epoch": 0.9071729957805907,
"grad_norm": 1.0690672397613525,
"learning_rate": 9.853357653060478e-05,
"loss": 0.715215802192688,
"step": 2150
},
{
"epoch": 0.9080168776371308,
"grad_norm": 1.1021424531936646,
"learning_rate": 9.852779758744245e-05,
"loss": 0.7021427154541016,
"step": 2152
},
{
"epoch": 0.9088607594936708,
"grad_norm": 1.0713517665863037,
"learning_rate": 9.852200744984921e-05,
"loss": 0.7576406598091125,
"step": 2154
},
{
"epoch": 0.909704641350211,
"grad_norm": 1.277526617050171,
"learning_rate": 9.851620611916075e-05,
"loss": 0.7008846998214722,
"step": 2156
},
{
"epoch": 0.9105485232067511,
"grad_norm": 1.2434618473052979,
"learning_rate": 9.85103935967153e-05,
"loss": 0.7536613345146179,
"step": 2158
},
{
"epoch": 0.9113924050632911,
"grad_norm": 1.1654841899871826,
"learning_rate": 9.850456988385371e-05,
"loss": 0.7435567378997803,
"step": 2160
},
{
"epoch": 0.9122362869198313,
"grad_norm": 1.0718246698379517,
"learning_rate": 9.849873498191939e-05,
"loss": 0.7725666165351868,
"step": 2162
},
{
"epoch": 0.9130801687763713,
"grad_norm": 1.3425630331039429,
"learning_rate": 9.849288889225835e-05,
"loss": 0.7833593487739563,
"step": 2164
},
{
"epoch": 0.9139240506329114,
"grad_norm": 1.1989985704421997,
"learning_rate": 9.848703161621917e-05,
"loss": 0.7290158867835999,
"step": 2166
},
{
"epoch": 0.9147679324894514,
"grad_norm": 1.0549380779266357,
"learning_rate": 9.8481163155153e-05,
"loss": 0.6787996888160706,
"step": 2168
},
{
"epoch": 0.9156118143459916,
"grad_norm": 1.0757017135620117,
"learning_rate": 9.847528351041359e-05,
"loss": 0.7645748853683472,
"step": 2170
},
{
"epoch": 0.9164556962025316,
"grad_norm": 1.0636975765228271,
"learning_rate": 9.846939268335726e-05,
"loss": 0.6640698313713074,
"step": 2172
},
{
"epoch": 0.9172995780590717,
"grad_norm": 1.2038439512252808,
"learning_rate": 9.846349067534291e-05,
"loss": 0.7216284275054932,
"step": 2174
},
{
"epoch": 0.9181434599156119,
"grad_norm": 1.17854642868042,
"learning_rate": 9.845757748773203e-05,
"loss": 0.7244991660118103,
"step": 2176
},
{
"epoch": 0.9189873417721519,
"grad_norm": 1.0391159057617188,
"learning_rate": 9.845165312188864e-05,
"loss": 0.6043152809143066,
"step": 2178
},
{
"epoch": 0.919831223628692,
"grad_norm": 1.2382071018218994,
"learning_rate": 9.844571757917944e-05,
"loss": 0.7791659832000732,
"step": 2180
},
{
"epoch": 0.920675105485232,
"grad_norm": 1.0855708122253418,
"learning_rate": 9.84397708609736e-05,
"loss": 0.7190433144569397,
"step": 2182
},
{
"epoch": 0.9215189873417722,
"grad_norm": 1.103308916091919,
"learning_rate": 9.843381296864291e-05,
"loss": 0.6648658514022827,
"step": 2184
},
{
"epoch": 0.9223628691983122,
"grad_norm": 1.073517918586731,
"learning_rate": 9.842784390356178e-05,
"loss": 0.6891760230064392,
"step": 2186
},
{
"epoch": 0.9232067510548523,
"grad_norm": 1.0806199312210083,
"learning_rate": 9.842186366710712e-05,
"loss": 0.6880859136581421,
"step": 2188
},
{
"epoch": 0.9240506329113924,
"grad_norm": 1.0631483793258667,
"learning_rate": 9.841587226065848e-05,
"loss": 0.6238307952880859,
"step": 2190
},
{
"epoch": 0.9248945147679325,
"grad_norm": 1.2630863189697266,
"learning_rate": 9.840986968559795e-05,
"loss": 0.6905744075775146,
"step": 2192
},
{
"epoch": 0.9257383966244725,
"grad_norm": 1.1307560205459595,
"learning_rate": 9.840385594331022e-05,
"loss": 0.7531564235687256,
"step": 2194
},
{
"epoch": 0.9265822784810127,
"grad_norm": 1.0294862985610962,
"learning_rate": 9.839783103518254e-05,
"loss": 0.6750671863555908,
"step": 2196
},
{
"epoch": 0.9274261603375528,
"grad_norm": 1.2446976900100708,
"learning_rate": 9.839179496260472e-05,
"loss": 0.7200804352760315,
"step": 2198
},
{
"epoch": 0.9282700421940928,
"grad_norm": 1.2673420906066895,
"learning_rate": 9.83857477269692e-05,
"loss": 0.7002623677253723,
"step": 2200
},
{
"epoch": 0.9282700421940928,
"eval_loss": 0.7497645616531372,
"eval_runtime": 856.8766,
"eval_samples_per_second": 2.459,
"eval_steps_per_second": 2.459,
"step": 2200
},
{
"epoch": 0.9291139240506329,
"grad_norm": 1.5114624500274658,
"learning_rate": 9.837968932967094e-05,
"loss": 0.7718265056610107,
"step": 2202
},
{
"epoch": 0.929957805907173,
"grad_norm": 1.2059369087219238,
"learning_rate": 9.837361977210751e-05,
"loss": 0.7204271554946899,
"step": 2204
},
{
"epoch": 0.9308016877637131,
"grad_norm": 1.2077301740646362,
"learning_rate": 9.836753905567902e-05,
"loss": 0.7371073961257935,
"step": 2206
},
{
"epoch": 0.9316455696202531,
"grad_norm": 1.120097279548645,
"learning_rate": 9.836144718178818e-05,
"loss": 0.6601167321205139,
"step": 2208
},
{
"epoch": 0.9324894514767933,
"grad_norm": 1.1755714416503906,
"learning_rate": 9.835534415184029e-05,
"loss": 0.6897423267364502,
"step": 2210
},
{
"epoch": 0.9333333333333333,
"grad_norm": 1.3587000370025635,
"learning_rate": 9.834922996724317e-05,
"loss": 0.758438229560852,
"step": 2212
},
{
"epoch": 0.9341772151898734,
"grad_norm": 1.1898177862167358,
"learning_rate": 9.834310462940727e-05,
"loss": 0.7489214539527893,
"step": 2214
},
{
"epoch": 0.9350210970464135,
"grad_norm": 1.0814623832702637,
"learning_rate": 9.833696813974558e-05,
"loss": 0.6844488382339478,
"step": 2216
},
{
"epoch": 0.9358649789029536,
"grad_norm": 1.1060179471969604,
"learning_rate": 9.833082049967366e-05,
"loss": 0.6617586016654968,
"step": 2218
},
{
"epoch": 0.9367088607594937,
"grad_norm": 1.1780575513839722,
"learning_rate": 9.832466171060968e-05,
"loss": 0.7383584976196289,
"step": 2220
},
{
"epoch": 0.9375527426160337,
"grad_norm": 1.3734618425369263,
"learning_rate": 9.831849177397432e-05,
"loss": 0.7764308452606201,
"step": 2222
},
{
"epoch": 0.9383966244725739,
"grad_norm": 1.1367733478546143,
"learning_rate": 9.831231069119089e-05,
"loss": 0.6834397912025452,
"step": 2224
},
{
"epoch": 0.9392405063291139,
"grad_norm": 1.1695492267608643,
"learning_rate": 9.830611846368524e-05,
"loss": 0.7054480910301208,
"step": 2226
},
{
"epoch": 0.940084388185654,
"grad_norm": 1.0345736742019653,
"learning_rate": 9.829991509288579e-05,
"loss": 0.694448709487915,
"step": 2228
},
{
"epoch": 0.9409282700421941,
"grad_norm": 1.298105239868164,
"learning_rate": 9.829370058022356e-05,
"loss": 0.6839741468429565,
"step": 2230
},
{
"epoch": 0.9417721518987342,
"grad_norm": 1.2905502319335938,
"learning_rate": 9.828747492713209e-05,
"loss": 0.7886884212493896,
"step": 2232
},
{
"epoch": 0.9426160337552743,
"grad_norm": 1.12301504611969,
"learning_rate": 9.828123813504753e-05,
"loss": 0.7206413149833679,
"step": 2234
},
{
"epoch": 0.9434599156118143,
"grad_norm": 1.2644896507263184,
"learning_rate": 9.82749902054086e-05,
"loss": 0.7700693607330322,
"step": 2236
},
{
"epoch": 0.9443037974683545,
"grad_norm": 1.1626365184783936,
"learning_rate": 9.826873113965655e-05,
"loss": 0.7199711203575134,
"step": 2238
},
{
"epoch": 0.9451476793248945,
"grad_norm": 1.0728627443313599,
"learning_rate": 9.826246093923528e-05,
"loss": 0.7183539271354675,
"step": 2240
},
{
"epoch": 0.9459915611814346,
"grad_norm": 1.1444766521453857,
"learning_rate": 9.825617960559114e-05,
"loss": 0.7417964935302734,
"step": 2242
},
{
"epoch": 0.9468354430379747,
"grad_norm": 1.4059823751449585,
"learning_rate": 9.824988714017316e-05,
"loss": 0.7949740290641785,
"step": 2244
},
{
"epoch": 0.9476793248945148,
"grad_norm": 1.1349766254425049,
"learning_rate": 9.824358354443286e-05,
"loss": 0.6433083415031433,
"step": 2246
},
{
"epoch": 0.9485232067510548,
"grad_norm": 1.0879144668579102,
"learning_rate": 9.823726881982438e-05,
"loss": 0.6519861817359924,
"step": 2248
},
{
"epoch": 0.9493670886075949,
"grad_norm": 1.2289162874221802,
"learning_rate": 9.82309429678044e-05,
"loss": 0.7280195355415344,
"step": 2250
},
{
"epoch": 0.950210970464135,
"grad_norm": 1.1755765676498413,
"learning_rate": 9.822460598983217e-05,
"loss": 0.7524687647819519,
"step": 2252
},
{
"epoch": 0.9510548523206751,
"grad_norm": 1.179807186126709,
"learning_rate": 9.821825788736949e-05,
"loss": 0.7543174624443054,
"step": 2254
},
{
"epoch": 0.9518987341772152,
"grad_norm": 1.1234289407730103,
"learning_rate": 9.821189866188079e-05,
"loss": 0.716377854347229,
"step": 2256
},
{
"epoch": 0.9527426160337553,
"grad_norm": 1.0324063301086426,
"learning_rate": 9.820552831483297e-05,
"loss": 0.6403332948684692,
"step": 2258
},
{
"epoch": 0.9535864978902954,
"grad_norm": 1.1459579467773438,
"learning_rate": 9.819914684769558e-05,
"loss": 0.7406947612762451,
"step": 2260
},
{
"epoch": 0.9544303797468354,
"grad_norm": 1.2886124849319458,
"learning_rate": 9.819275426194072e-05,
"loss": 0.749687671661377,
"step": 2262
},
{
"epoch": 0.9552742616033755,
"grad_norm": 1.3349844217300415,
"learning_rate": 9.818635055904299e-05,
"loss": 0.778410017490387,
"step": 2264
},
{
"epoch": 0.9561181434599156,
"grad_norm": 1.0994901657104492,
"learning_rate": 9.81799357404796e-05,
"loss": 0.6701914668083191,
"step": 2266
},
{
"epoch": 0.9569620253164557,
"grad_norm": 1.1787796020507812,
"learning_rate": 9.817350980773038e-05,
"loss": 0.7205135226249695,
"step": 2268
},
{
"epoch": 0.9578059071729957,
"grad_norm": 1.100813627243042,
"learning_rate": 9.816707276227763e-05,
"loss": 0.6897916197776794,
"step": 2270
},
{
"epoch": 0.9586497890295359,
"grad_norm": 1.1280698776245117,
"learning_rate": 9.816062460560627e-05,
"loss": 0.6763570308685303,
"step": 2272
},
{
"epoch": 0.959493670886076,
"grad_norm": 1.2322514057159424,
"learning_rate": 9.815416533920374e-05,
"loss": 0.6948683857917786,
"step": 2274
},
{
"epoch": 0.960337552742616,
"grad_norm": 1.3963630199432373,
"learning_rate": 9.814769496456008e-05,
"loss": 0.7876828908920288,
"step": 2276
},
{
"epoch": 0.9611814345991562,
"grad_norm": 1.2093676328659058,
"learning_rate": 9.814121348316792e-05,
"loss": 0.8191362619400024,
"step": 2278
},
{
"epoch": 0.9620253164556962,
"grad_norm": 1.2223572731018066,
"learning_rate": 9.813472089652233e-05,
"loss": 0.7162626385688782,
"step": 2280
},
{
"epoch": 0.9628691983122363,
"grad_norm": 1.1498078107833862,
"learning_rate": 9.812821720612111e-05,
"loss": 0.7183970212936401,
"step": 2282
},
{
"epoch": 0.9637130801687763,
"grad_norm": 1.1563853025436401,
"learning_rate": 9.812170241346449e-05,
"loss": 0.734487771987915,
"step": 2284
},
{
"epoch": 0.9645569620253165,
"grad_norm": 1.1823415756225586,
"learning_rate": 9.81151765200553e-05,
"loss": 0.7312371730804443,
"step": 2286
},
{
"epoch": 0.9654008438818565,
"grad_norm": 1.1336151361465454,
"learning_rate": 9.810863952739899e-05,
"loss": 0.7668377757072449,
"step": 2288
},
{
"epoch": 0.9662447257383966,
"grad_norm": 1.0857036113739014,
"learning_rate": 9.810209143700347e-05,
"loss": 0.7100399732589722,
"step": 2290
},
{
"epoch": 0.9670886075949368,
"grad_norm": 1.1368129253387451,
"learning_rate": 9.809553225037926e-05,
"loss": 0.7169836163520813,
"step": 2292
},
{
"epoch": 0.9679324894514768,
"grad_norm": 1.141107439994812,
"learning_rate": 9.808896196903947e-05,
"loss": 0.7709535956382751,
"step": 2294
},
{
"epoch": 0.9687763713080169,
"grad_norm": 1.276405930519104,
"learning_rate": 9.808238059449971e-05,
"loss": 0.7300511002540588,
"step": 2296
},
{
"epoch": 0.9696202531645569,
"grad_norm": 0.9817046523094177,
"learning_rate": 9.80757881282782e-05,
"loss": 0.6259129047393799,
"step": 2298
},
{
"epoch": 0.9704641350210971,
"grad_norm": 1.3965257406234741,
"learning_rate": 9.806918457189566e-05,
"loss": 0.7361716032028198,
"step": 2300
},
{
"epoch": 0.9704641350210971,
"eval_loss": 0.7464568614959717,
"eval_runtime": 864.2128,
"eval_samples_per_second": 2.438,
"eval_steps_per_second": 2.438,
"step": 2300
},
{
"epoch": 0.9713080168776371,
"grad_norm": 1.2168612480163574,
"learning_rate": 9.806256992687544e-05,
"loss": 0.805477499961853,
"step": 2302
},
{
"epoch": 0.9721518987341772,
"grad_norm": 1.0418168306350708,
"learning_rate": 9.80559441947434e-05,
"loss": 0.6673368811607361,
"step": 2304
},
{
"epoch": 0.9729957805907173,
"grad_norm": 1.223128318786621,
"learning_rate": 9.804930737702796e-05,
"loss": 0.7585647106170654,
"step": 2306
},
{
"epoch": 0.9738396624472574,
"grad_norm": 1.264511227607727,
"learning_rate": 9.804265947526011e-05,
"loss": 0.7642034888267517,
"step": 2308
},
{
"epoch": 0.9746835443037974,
"grad_norm": 1.076887607574463,
"learning_rate": 9.803600049097339e-05,
"loss": 0.7094541192054749,
"step": 2310
},
{
"epoch": 0.9755274261603376,
"grad_norm": 1.0214987993240356,
"learning_rate": 9.802933042570392e-05,
"loss": 0.7370059490203857,
"step": 2312
},
{
"epoch": 0.9763713080168777,
"grad_norm": 1.3075295686721802,
"learning_rate": 9.802264928099035e-05,
"loss": 0.726834237575531,
"step": 2314
},
{
"epoch": 0.9772151898734177,
"grad_norm": 1.057386040687561,
"learning_rate": 9.801595705837385e-05,
"loss": 0.6742353439331055,
"step": 2316
},
{
"epoch": 0.9780590717299578,
"grad_norm": 1.3998085260391235,
"learning_rate": 9.800925375939825e-05,
"loss": 0.6862425208091736,
"step": 2318
},
{
"epoch": 0.9789029535864979,
"grad_norm": 1.080574631690979,
"learning_rate": 9.800253938560983e-05,
"loss": 0.6212031245231628,
"step": 2320
},
{
"epoch": 0.979746835443038,
"grad_norm": 1.3643771409988403,
"learning_rate": 9.799581393855748e-05,
"loss": 0.7522522211074829,
"step": 2322
},
{
"epoch": 0.980590717299578,
"grad_norm": 1.2455768585205078,
"learning_rate": 9.798907741979264e-05,
"loss": 0.7265716791152954,
"step": 2324
},
{
"epoch": 0.9814345991561182,
"grad_norm": 1.078774333000183,
"learning_rate": 9.798232983086927e-05,
"loss": 0.7160419225692749,
"step": 2326
},
{
"epoch": 0.9822784810126582,
"grad_norm": 1.3013948202133179,
"learning_rate": 9.797557117334394e-05,
"loss": 0.7991124391555786,
"step": 2328
},
{
"epoch": 0.9831223628691983,
"grad_norm": 1.2216732501983643,
"learning_rate": 9.796880144877572e-05,
"loss": 0.7193916440010071,
"step": 2330
},
{
"epoch": 0.9839662447257383,
"grad_norm": 1.1469542980194092,
"learning_rate": 9.796202065872627e-05,
"loss": 0.7184370756149292,
"step": 2332
},
{
"epoch": 0.9848101265822785,
"grad_norm": 1.0431830883026123,
"learning_rate": 9.795522880475979e-05,
"loss": 0.6474619507789612,
"step": 2334
},
{
"epoch": 0.9856540084388186,
"grad_norm": 1.1819576025009155,
"learning_rate": 9.794842588844299e-05,
"loss": 0.6392545700073242,
"step": 2336
},
{
"epoch": 0.9864978902953586,
"grad_norm": 1.1984983682632446,
"learning_rate": 9.794161191134525e-05,
"loss": 0.7358114719390869,
"step": 2338
},
{
"epoch": 0.9873417721518988,
"grad_norm": 1.3378512859344482,
"learning_rate": 9.793478687503834e-05,
"loss": 0.6762020587921143,
"step": 2340
},
{
"epoch": 0.9881856540084388,
"grad_norm": 1.272674560546875,
"learning_rate": 9.792795078109673e-05,
"loss": 0.7478934526443481,
"step": 2342
},
{
"epoch": 0.9890295358649789,
"grad_norm": 1.153746247291565,
"learning_rate": 9.792110363109733e-05,
"loss": 0.7316533923149109,
"step": 2344
},
{
"epoch": 0.9898734177215189,
"grad_norm": 1.1361702680587769,
"learning_rate": 9.791424542661967e-05,
"loss": 0.7078539133071899,
"step": 2346
},
{
"epoch": 0.9907172995780591,
"grad_norm": 1.3043115139007568,
"learning_rate": 9.790737616924581e-05,
"loss": 0.7945935130119324,
"step": 2348
},
{
"epoch": 0.9915611814345991,
"grad_norm": 1.1913264989852905,
"learning_rate": 9.790049586056034e-05,
"loss": 0.8247197866439819,
"step": 2350
},
{
"epoch": 0.9924050632911392,
"grad_norm": 1.1560171842575073,
"learning_rate": 9.789360450215041e-05,
"loss": 0.7099657654762268,
"step": 2352
},
{
"epoch": 0.9932489451476794,
"grad_norm": 1.2311041355133057,
"learning_rate": 9.788670209560575e-05,
"loss": 0.7480318546295166,
"step": 2354
},
{
"epoch": 0.9940928270042194,
"grad_norm": 1.1584707498550415,
"learning_rate": 9.787978864251859e-05,
"loss": 0.6870889067649841,
"step": 2356
},
{
"epoch": 0.9949367088607595,
"grad_norm": 1.057478666305542,
"learning_rate": 9.787286414448375e-05,
"loss": 0.6114922165870667,
"step": 2358
},
{
"epoch": 0.9957805907172996,
"grad_norm": 1.1431775093078613,
"learning_rate": 9.786592860309856e-05,
"loss": 0.6955118179321289,
"step": 2360
},
{
"epoch": 0.9966244725738397,
"grad_norm": 1.232142448425293,
"learning_rate": 9.785898201996292e-05,
"loss": 0.735048770904541,
"step": 2362
},
{
"epoch": 0.9974683544303797,
"grad_norm": 1.1236306428909302,
"learning_rate": 9.785202439667928e-05,
"loss": 0.7150241136550903,
"step": 2364
},
{
"epoch": 0.9983122362869198,
"grad_norm": 1.0517534017562866,
"learning_rate": 9.784505573485263e-05,
"loss": 0.6870222687721252,
"step": 2366
},
{
"epoch": 0.99915611814346,
"grad_norm": 1.1747480630874634,
"learning_rate": 9.78380760360905e-05,
"loss": 0.7521567940711975,
"step": 2368
},
{
"epoch": 1.0,
"grad_norm": 1.2790346145629883,
"learning_rate": 9.783108530200298e-05,
"loss": 0.7336234450340271,
"step": 2370
},
{
"epoch": 1.0008438818565402,
"grad_norm": 1.1216399669647217,
"learning_rate": 9.78240835342027e-05,
"loss": 0.6378109455108643,
"step": 2372
},
{
"epoch": 1.00168776371308,
"grad_norm": 1.267336368560791,
"learning_rate": 9.781707073430482e-05,
"loss": 0.6174905300140381,
"step": 2374
},
{
"epoch": 1.0025316455696203,
"grad_norm": 1.1342934370040894,
"learning_rate": 9.781004690392706e-05,
"loss": 0.6579123139381409,
"step": 2376
},
{
"epoch": 1.0033755274261604,
"grad_norm": 1.1317468881607056,
"learning_rate": 9.78030120446897e-05,
"loss": 0.6679617166519165,
"step": 2378
},
{
"epoch": 1.0042194092827004,
"grad_norm": 1.2992616891860962,
"learning_rate": 9.779596615821552e-05,
"loss": 0.7368149161338806,
"step": 2380
},
{
"epoch": 1.0050632911392405,
"grad_norm": 1.1714510917663574,
"learning_rate": 9.77889092461299e-05,
"loss": 0.6887164115905762,
"step": 2382
},
{
"epoch": 1.0059071729957807,
"grad_norm": 1.1670639514923096,
"learning_rate": 9.778184131006071e-05,
"loss": 0.681344211101532,
"step": 2384
},
{
"epoch": 1.0067510548523206,
"grad_norm": 1.2487291097640991,
"learning_rate": 9.77747623516384e-05,
"loss": 0.7342769503593445,
"step": 2386
},
{
"epoch": 1.0075949367088608,
"grad_norm": 1.2408956289291382,
"learning_rate": 9.776767237249595e-05,
"loss": 0.577454149723053,
"step": 2388
},
{
"epoch": 1.0084388185654007,
"grad_norm": 1.067991852760315,
"learning_rate": 9.776057137426889e-05,
"loss": 0.6588307023048401,
"step": 2390
},
{
"epoch": 1.009282700421941,
"grad_norm": 1.2821543216705322,
"learning_rate": 9.775345935859525e-05,
"loss": 0.7045041918754578,
"step": 2392
},
{
"epoch": 1.010126582278481,
"grad_norm": 1.3160134553909302,
"learning_rate": 9.774633632711569e-05,
"loss": 0.7141479253768921,
"step": 2394
},
{
"epoch": 1.010970464135021,
"grad_norm": 1.66774320602417,
"learning_rate": 9.773920228147329e-05,
"loss": 0.723293662071228,
"step": 2396
},
{
"epoch": 1.0118143459915612,
"grad_norm": 1.027588963508606,
"learning_rate": 9.77320572233138e-05,
"loss": 0.5812023878097534,
"step": 2398
},
{
"epoch": 1.0126582278481013,
"grad_norm": 1.406507968902588,
"learning_rate": 9.77249011542854e-05,
"loss": 0.7071458101272583,
"step": 2400
},
{
"epoch": 1.0126582278481013,
"eval_loss": 0.7421699166297913,
"eval_runtime": 854.2185,
"eval_samples_per_second": 2.467,
"eval_steps_per_second": 2.467,
"step": 2400
},
{
"epoch": 1.0135021097046413,
"grad_norm": 1.1236240863800049,
"learning_rate": 9.771773407603889e-05,
"loss": 0.7049722671508789,
"step": 2402
},
{
"epoch": 1.0143459915611814,
"grad_norm": 1.1924289464950562,
"learning_rate": 9.771055599022756e-05,
"loss": 0.635308027267456,
"step": 2404
},
{
"epoch": 1.0151898734177216,
"grad_norm": 1.1744966506958008,
"learning_rate": 9.770336689850727e-05,
"loss": 0.7286487817764282,
"step": 2406
},
{
"epoch": 1.0160337552742615,
"grad_norm": 1.2131173610687256,
"learning_rate": 9.769616680253639e-05,
"loss": 0.6828222274780273,
"step": 2408
},
{
"epoch": 1.0168776371308017,
"grad_norm": 1.0517828464508057,
"learning_rate": 9.768895570397585e-05,
"loss": 0.6652156114578247,
"step": 2410
},
{
"epoch": 1.0177215189873419,
"grad_norm": 1.1603758335113525,
"learning_rate": 9.768173360448912e-05,
"loss": 0.7278267741203308,
"step": 2412
},
{
"epoch": 1.0185654008438818,
"grad_norm": 1.3167752027511597,
"learning_rate": 9.767450050574218e-05,
"loss": 0.6082334518432617,
"step": 2414
},
{
"epoch": 1.019409282700422,
"grad_norm": 1.1754449605941772,
"learning_rate": 9.766725640940358e-05,
"loss": 0.67228102684021,
"step": 2416
},
{
"epoch": 1.0202531645569621,
"grad_norm": 1.060952067375183,
"learning_rate": 9.766000131714442e-05,
"loss": 0.5984366536140442,
"step": 2418
},
{
"epoch": 1.021097046413502,
"grad_norm": 1.0826152563095093,
"learning_rate": 9.765273523063825e-05,
"loss": 0.690661609172821,
"step": 2420
},
{
"epoch": 1.0219409282700422,
"grad_norm": 1.423723816871643,
"learning_rate": 9.764545815156125e-05,
"loss": 0.7960668802261353,
"step": 2422
},
{
"epoch": 1.0227848101265822,
"grad_norm": 1.0882549285888672,
"learning_rate": 9.763817008159212e-05,
"loss": 0.6971074342727661,
"step": 2424
},
{
"epoch": 1.0236286919831223,
"grad_norm": 1.1053040027618408,
"learning_rate": 9.763087102241206e-05,
"loss": 0.6854458451271057,
"step": 2426
},
{
"epoch": 1.0244725738396625,
"grad_norm": 1.1975224018096924,
"learning_rate": 9.762356097570482e-05,
"loss": 0.6724489331245422,
"step": 2428
},
{
"epoch": 1.0253164556962024,
"grad_norm": 1.1692171096801758,
"learning_rate": 9.76162399431567e-05,
"loss": 0.7064506411552429,
"step": 2430
},
{
"epoch": 1.0261603375527426,
"grad_norm": 1.1927787065505981,
"learning_rate": 9.760890792645649e-05,
"loss": 0.6605257391929626,
"step": 2432
},
{
"epoch": 1.0270042194092828,
"grad_norm": 1.4147427082061768,
"learning_rate": 9.760156492729558e-05,
"loss": 0.6872501373291016,
"step": 2434
},
{
"epoch": 1.0278481012658227,
"grad_norm": 1.2503126859664917,
"learning_rate": 9.759421094736785e-05,
"loss": 0.7117500305175781,
"step": 2436
},
{
"epoch": 1.0286919831223629,
"grad_norm": 1.229978084564209,
"learning_rate": 9.758684598836971e-05,
"loss": 0.6740369200706482,
"step": 2438
},
{
"epoch": 1.029535864978903,
"grad_norm": 1.4765945672988892,
"learning_rate": 9.757947005200014e-05,
"loss": 0.7215790748596191,
"step": 2440
},
{
"epoch": 1.030379746835443,
"grad_norm": 1.282632827758789,
"learning_rate": 9.757208313996061e-05,
"loss": 0.6961746215820312,
"step": 2442
},
{
"epoch": 1.0312236286919831,
"grad_norm": 1.259828805923462,
"learning_rate": 9.756468525395512e-05,
"loss": 0.6348349452018738,
"step": 2444
},
{
"epoch": 1.0320675105485233,
"grad_norm": 1.0984172821044922,
"learning_rate": 9.755727639569024e-05,
"loss": 0.6756057739257812,
"step": 2446
},
{
"epoch": 1.0329113924050632,
"grad_norm": 1.235835075378418,
"learning_rate": 9.754985656687506e-05,
"loss": 0.6968509554862976,
"step": 2448
},
{
"epoch": 1.0337552742616034,
"grad_norm": 1.273032546043396,
"learning_rate": 9.754242576922119e-05,
"loss": 0.6793950796127319,
"step": 2450
},
{
"epoch": 1.0345991561181433,
"grad_norm": 1.251996397972107,
"learning_rate": 9.753498400444274e-05,
"loss": 0.645270586013794,
"step": 2452
},
{
"epoch": 1.0354430379746835,
"grad_norm": 1.4310805797576904,
"learning_rate": 9.752753127425642e-05,
"loss": 0.7291322350502014,
"step": 2454
},
{
"epoch": 1.0362869198312237,
"grad_norm": 1.6582196950912476,
"learning_rate": 9.752006758038142e-05,
"loss": 0.7553019523620605,
"step": 2456
},
{
"epoch": 1.0371308016877636,
"grad_norm": 1.081773042678833,
"learning_rate": 9.751259292453947e-05,
"loss": 0.5637331008911133,
"step": 2458
},
{
"epoch": 1.0379746835443038,
"grad_norm": 1.1483876705169678,
"learning_rate": 9.750510730845483e-05,
"loss": 0.6012396216392517,
"step": 2460
},
{
"epoch": 1.038818565400844,
"grad_norm": 1.0879185199737549,
"learning_rate": 9.749761073385428e-05,
"loss": 0.6795822381973267,
"step": 2462
},
{
"epoch": 1.0396624472573839,
"grad_norm": 1.2378218173980713,
"learning_rate": 9.749010320246714e-05,
"loss": 0.6895145773887634,
"step": 2464
},
{
"epoch": 1.040506329113924,
"grad_norm": 1.253233790397644,
"learning_rate": 9.748258471602527e-05,
"loss": 0.7124115228652954,
"step": 2466
},
{
"epoch": 1.0413502109704642,
"grad_norm": 1.3994864225387573,
"learning_rate": 9.747505527626302e-05,
"loss": 0.7304861545562744,
"step": 2468
},
{
"epoch": 1.0421940928270041,
"grad_norm": 1.2360669374465942,
"learning_rate": 9.74675148849173e-05,
"loss": 0.6845837831497192,
"step": 2470
},
{
"epoch": 1.0430379746835443,
"grad_norm": 1.126849889755249,
"learning_rate": 9.74599635437275e-05,
"loss": 0.6780203580856323,
"step": 2472
},
{
"epoch": 1.0438818565400845,
"grad_norm": 1.169788122177124,
"learning_rate": 9.745240125443562e-05,
"loss": 0.7550003528594971,
"step": 2474
},
{
"epoch": 1.0447257383966244,
"grad_norm": 1.1311867237091064,
"learning_rate": 9.744482801878612e-05,
"loss": 0.6910399198532104,
"step": 2476
},
{
"epoch": 1.0455696202531646,
"grad_norm": 1.1267731189727783,
"learning_rate": 9.743724383852597e-05,
"loss": 0.7164814472198486,
"step": 2478
},
{
"epoch": 1.0464135021097047,
"grad_norm": 1.2239704132080078,
"learning_rate": 9.742964871540472e-05,
"loss": 0.6428439617156982,
"step": 2480
},
{
"epoch": 1.0472573839662447,
"grad_norm": 1.1854743957519531,
"learning_rate": 9.742204265117443e-05,
"loss": 0.6994290351867676,
"step": 2482
},
{
"epoch": 1.0481012658227848,
"grad_norm": 1.0695894956588745,
"learning_rate": 9.741442564758964e-05,
"loss": 0.6725777983665466,
"step": 2484
},
{
"epoch": 1.048945147679325,
"grad_norm": 1.1799863576889038,
"learning_rate": 9.740679770640748e-05,
"loss": 0.6538674235343933,
"step": 2486
},
{
"epoch": 1.049789029535865,
"grad_norm": 1.295546293258667,
"learning_rate": 9.739915882938754e-05,
"loss": 0.780756950378418,
"step": 2488
},
{
"epoch": 1.0506329113924051,
"grad_norm": 1.2371755838394165,
"learning_rate": 9.739150901829198e-05,
"loss": 0.6657930612564087,
"step": 2490
},
{
"epoch": 1.051476793248945,
"grad_norm": 1.103037714958191,
"learning_rate": 9.738384827488547e-05,
"loss": 0.6675208210945129,
"step": 2492
},
{
"epoch": 1.0523206751054852,
"grad_norm": 1.1835435628890991,
"learning_rate": 9.737617660093517e-05,
"loss": 0.6693358421325684,
"step": 2494
},
{
"epoch": 1.0531645569620254,
"grad_norm": 1.003771424293518,
"learning_rate": 9.736849399821082e-05,
"loss": 0.624502956867218,
"step": 2496
},
{
"epoch": 1.0540084388185653,
"grad_norm": 1.1391769647598267,
"learning_rate": 9.736080046848463e-05,
"loss": 0.6350868344306946,
"step": 2498
},
{
"epoch": 1.0548523206751055,
"grad_norm": 1.376518726348877,
"learning_rate": 9.735309601353134e-05,
"loss": 0.6721012592315674,
"step": 2500
},
{
"epoch": 1.0548523206751055,
"eval_loss": 0.741338849067688,
"eval_runtime": 847.7478,
"eval_samples_per_second": 2.485,
"eval_steps_per_second": 2.485,
"step": 2500
},
{
"epoch": 1.0556962025316456,
"grad_norm": 1.194190502166748,
"learning_rate": 9.734538063512824e-05,
"loss": 0.6888233423233032,
"step": 2502
},
{
"epoch": 1.0565400843881856,
"grad_norm": 1.378830909729004,
"learning_rate": 9.733765433505513e-05,
"loss": 0.7095553278923035,
"step": 2504
},
{
"epoch": 1.0573839662447257,
"grad_norm": 1.1289541721343994,
"learning_rate": 9.732991711509428e-05,
"loss": 0.6734166145324707,
"step": 2506
},
{
"epoch": 1.058227848101266,
"grad_norm": 1.1858116388320923,
"learning_rate": 9.732216897703054e-05,
"loss": 0.7006195187568665,
"step": 2508
},
{
"epoch": 1.0590717299578059,
"grad_norm": 1.1365686655044556,
"learning_rate": 9.731440992265127e-05,
"loss": 0.6481205821037292,
"step": 2510
},
{
"epoch": 1.059915611814346,
"grad_norm": 1.2886228561401367,
"learning_rate": 9.730663995374632e-05,
"loss": 0.679282546043396,
"step": 2512
},
{
"epoch": 1.0607594936708862,
"grad_norm": 1.355322003364563,
"learning_rate": 9.729885907210808e-05,
"loss": 0.7656359672546387,
"step": 2514
},
{
"epoch": 1.0616033755274261,
"grad_norm": 1.1552364826202393,
"learning_rate": 9.729106727953142e-05,
"loss": 0.5996183156967163,
"step": 2516
},
{
"epoch": 1.0624472573839663,
"grad_norm": 1.1419235467910767,
"learning_rate": 9.728326457781381e-05,
"loss": 0.7599716782569885,
"step": 2518
},
{
"epoch": 1.0632911392405062,
"grad_norm": 1.2240079641342163,
"learning_rate": 9.727545096875512e-05,
"loss": 0.7150241732597351,
"step": 2520
},
{
"epoch": 1.0641350210970464,
"grad_norm": 1.2463440895080566,
"learning_rate": 9.726762645415785e-05,
"loss": 0.734352171421051,
"step": 2522
},
{
"epoch": 1.0649789029535865,
"grad_norm": 1.1680364608764648,
"learning_rate": 9.725979103582697e-05,
"loss": 0.6950796842575073,
"step": 2524
},
{
"epoch": 1.0658227848101265,
"grad_norm": 1.1680421829223633,
"learning_rate": 9.725194471556991e-05,
"loss": 0.7096341252326965,
"step": 2526
},
{
"epoch": 1.0666666666666667,
"grad_norm": 1.043717861175537,
"learning_rate": 9.724408749519671e-05,
"loss": 0.6486304402351379,
"step": 2528
},
{
"epoch": 1.0675105485232068,
"grad_norm": 1.1240284442901611,
"learning_rate": 9.723621937651985e-05,
"loss": 0.6519505381584167,
"step": 2530
},
{
"epoch": 1.0683544303797468,
"grad_norm": 1.185223937034607,
"learning_rate": 9.722834036135439e-05,
"loss": 0.6724293231964111,
"step": 2532
},
{
"epoch": 1.069198312236287,
"grad_norm": 1.3234196901321411,
"learning_rate": 9.722045045151784e-05,
"loss": 0.6886576414108276,
"step": 2534
},
{
"epoch": 1.070042194092827,
"grad_norm": 1.333084225654602,
"learning_rate": 9.721254964883024e-05,
"loss": 0.688493549823761,
"step": 2536
},
{
"epoch": 1.070886075949367,
"grad_norm": 1.2435462474822998,
"learning_rate": 9.720463795511419e-05,
"loss": 0.6527412533760071,
"step": 2538
},
{
"epoch": 1.0717299578059072,
"grad_norm": 1.1521880626678467,
"learning_rate": 9.719671537219472e-05,
"loss": 0.6508163809776306,
"step": 2540
},
{
"epoch": 1.0725738396624473,
"grad_norm": 1.015013575553894,
"learning_rate": 9.718878190189947e-05,
"loss": 0.6954023838043213,
"step": 2542
},
{
"epoch": 1.0734177215189873,
"grad_norm": 1.1507678031921387,
"learning_rate": 9.718083754605851e-05,
"loss": 0.7201322913169861,
"step": 2544
},
{
"epoch": 1.0742616033755275,
"grad_norm": 1.0569016933441162,
"learning_rate": 9.717288230650444e-05,
"loss": 0.6688649654388428,
"step": 2546
},
{
"epoch": 1.0751054852320676,
"grad_norm": 1.2178492546081543,
"learning_rate": 9.716491618507241e-05,
"loss": 0.7077898979187012,
"step": 2548
},
{
"epoch": 1.0759493670886076,
"grad_norm": 1.3587230443954468,
"learning_rate": 9.715693918360002e-05,
"loss": 0.7312119603157043,
"step": 2550
},
{
"epoch": 1.0767932489451477,
"grad_norm": 1.1930122375488281,
"learning_rate": 9.714895130392744e-05,
"loss": 0.6910589337348938,
"step": 2552
},
{
"epoch": 1.0776371308016879,
"grad_norm": 1.2440707683563232,
"learning_rate": 9.71409525478973e-05,
"loss": 0.7942836284637451,
"step": 2554
},
{
"epoch": 1.0784810126582278,
"grad_norm": 1.3755065202713013,
"learning_rate": 9.713294291735477e-05,
"loss": 0.6652286052703857,
"step": 2556
},
{
"epoch": 1.079324894514768,
"grad_norm": 1.165448784828186,
"learning_rate": 9.71249224141475e-05,
"loss": 0.6025735139846802,
"step": 2558
},
{
"epoch": 1.080168776371308,
"grad_norm": 1.2981204986572266,
"learning_rate": 9.711689104012569e-05,
"loss": 0.7343734502792358,
"step": 2560
},
{
"epoch": 1.081012658227848,
"grad_norm": 1.2040622234344482,
"learning_rate": 9.710884879714202e-05,
"loss": 0.6903306841850281,
"step": 2562
},
{
"epoch": 1.0818565400843883,
"grad_norm": 1.1835904121398926,
"learning_rate": 9.710079568705168e-05,
"loss": 0.69134920835495,
"step": 2564
},
{
"epoch": 1.0827004219409282,
"grad_norm": 1.3345229625701904,
"learning_rate": 9.709273171171235e-05,
"loss": 0.6471185088157654,
"step": 2566
},
{
"epoch": 1.0835443037974684,
"grad_norm": 1.0884469747543335,
"learning_rate": 9.708465687298425e-05,
"loss": 0.6302382349967957,
"step": 2568
},
{
"epoch": 1.0843881856540085,
"grad_norm": 1.1994211673736572,
"learning_rate": 9.707657117273007e-05,
"loss": 0.7329678535461426,
"step": 2570
},
{
"epoch": 1.0852320675105485,
"grad_norm": 1.2609503269195557,
"learning_rate": 9.706847461281507e-05,
"loss": 0.719862163066864,
"step": 2572
},
{
"epoch": 1.0860759493670886,
"grad_norm": 1.2686879634857178,
"learning_rate": 9.706036719510694e-05,
"loss": 0.7142901420593262,
"step": 2574
},
{
"epoch": 1.0869198312236288,
"grad_norm": 1.2763310670852661,
"learning_rate": 9.705224892147591e-05,
"loss": 0.7009075284004211,
"step": 2576
},
{
"epoch": 1.0877637130801687,
"grad_norm": 1.1704022884368896,
"learning_rate": 9.70441197937947e-05,
"loss": 0.6873779296875,
"step": 2578
},
{
"epoch": 1.0886075949367089,
"grad_norm": 1.0482875108718872,
"learning_rate": 9.703597981393856e-05,
"loss": 0.6437726020812988,
"step": 2580
},
{
"epoch": 1.0894514767932488,
"grad_norm": 1.28431236743927,
"learning_rate": 9.702782898378521e-05,
"loss": 0.6933431625366211,
"step": 2582
},
{
"epoch": 1.090295358649789,
"grad_norm": 1.0962283611297607,
"learning_rate": 9.701966730521491e-05,
"loss": 0.6488757133483887,
"step": 2584
},
{
"epoch": 1.0911392405063292,
"grad_norm": 1.2177873849868774,
"learning_rate": 9.70114947801104e-05,
"loss": 0.6385396122932434,
"step": 2586
},
{
"epoch": 1.091983122362869,
"grad_norm": 1.197059988975525,
"learning_rate": 9.70033114103569e-05,
"loss": 0.6826614737510681,
"step": 2588
},
{
"epoch": 1.0928270042194093,
"grad_norm": 1.1624075174331665,
"learning_rate": 9.699511719784217e-05,
"loss": 0.605629563331604,
"step": 2590
},
{
"epoch": 1.0936708860759494,
"grad_norm": 1.2975167036056519,
"learning_rate": 9.698691214445648e-05,
"loss": 0.734926700592041,
"step": 2592
},
{
"epoch": 1.0945147679324894,
"grad_norm": 1.215414047241211,
"learning_rate": 9.697869625209255e-05,
"loss": 0.7281333804130554,
"step": 2594
},
{
"epoch": 1.0953586497890295,
"grad_norm": 1.1862860918045044,
"learning_rate": 9.697046952264563e-05,
"loss": 0.7388250827789307,
"step": 2596
},
{
"epoch": 1.0962025316455697,
"grad_norm": 1.1127797365188599,
"learning_rate": 9.696223195801348e-05,
"loss": 0.6495320796966553,
"step": 2598
},
{
"epoch": 1.0970464135021096,
"grad_norm": 1.0863338708877563,
"learning_rate": 9.695398356009636e-05,
"loss": 0.7157143950462341,
"step": 2600
},
{
"epoch": 1.0970464135021096,
"eval_loss": 0.7377332448959351,
"eval_runtime": 859.6612,
"eval_samples_per_second": 2.451,
"eval_steps_per_second": 2.451,
"step": 2600
},
{
"epoch": 1.0978902953586498,
"grad_norm": 1.1228652000427246,
"learning_rate": 9.694572433079699e-05,
"loss": 0.6597335934638977,
"step": 2602
},
{
"epoch": 1.09873417721519,
"grad_norm": 1.3077653646469116,
"learning_rate": 9.69374542720206e-05,
"loss": 0.6715680360794067,
"step": 2604
},
{
"epoch": 1.09957805907173,
"grad_norm": 1.241603970527649,
"learning_rate": 9.692917338567499e-05,
"loss": 0.6910243034362793,
"step": 2606
},
{
"epoch": 1.10042194092827,
"grad_norm": 1.1372551918029785,
"learning_rate": 9.692088167367037e-05,
"loss": 0.6519553065299988,
"step": 2608
},
{
"epoch": 1.1012658227848102,
"grad_norm": 1.2894765138626099,
"learning_rate": 9.691257913791949e-05,
"loss": 0.6542758941650391,
"step": 2610
},
{
"epoch": 1.1021097046413502,
"grad_norm": 1.0800915956497192,
"learning_rate": 9.690426578033755e-05,
"loss": 0.6886795163154602,
"step": 2612
},
{
"epoch": 1.1029535864978903,
"grad_norm": 1.3394384384155273,
"learning_rate": 9.689594160284233e-05,
"loss": 0.7512150406837463,
"step": 2614
},
{
"epoch": 1.1037974683544305,
"grad_norm": 1.2175323963165283,
"learning_rate": 9.688760660735402e-05,
"loss": 0.67207932472229,
"step": 2616
},
{
"epoch": 1.1046413502109704,
"grad_norm": 1.2181185483932495,
"learning_rate": 9.687926079579537e-05,
"loss": 0.6591740846633911,
"step": 2618
},
{
"epoch": 1.1054852320675106,
"grad_norm": 1.1740983724594116,
"learning_rate": 9.68709041700916e-05,
"loss": 0.6431041359901428,
"step": 2620
},
{
"epoch": 1.1063291139240505,
"grad_norm": 1.1792434453964233,
"learning_rate": 9.686253673217038e-05,
"loss": 0.6573615074157715,
"step": 2622
},
{
"epoch": 1.1071729957805907,
"grad_norm": 1.058391809463501,
"learning_rate": 9.685415848396196e-05,
"loss": 0.5576209425926208,
"step": 2624
},
{
"epoch": 1.1080168776371309,
"grad_norm": 1.3203206062316895,
"learning_rate": 9.684576942739903e-05,
"loss": 0.668684184551239,
"step": 2626
},
{
"epoch": 1.1088607594936708,
"grad_norm": 1.2391762733459473,
"learning_rate": 9.68373695644168e-05,
"loss": 0.6800089478492737,
"step": 2628
},
{
"epoch": 1.109704641350211,
"grad_norm": 1.2323405742645264,
"learning_rate": 9.682895889695292e-05,
"loss": 0.6433757543563843,
"step": 2630
},
{
"epoch": 1.1105485232067511,
"grad_norm": 1.2656551599502563,
"learning_rate": 9.682053742694759e-05,
"loss": 0.6628785729408264,
"step": 2632
},
{
"epoch": 1.111392405063291,
"grad_norm": 1.2984392642974854,
"learning_rate": 9.681210515634349e-05,
"loss": 0.6838971972465515,
"step": 2634
},
{
"epoch": 1.1122362869198312,
"grad_norm": 1.3200393915176392,
"learning_rate": 9.680366208708576e-05,
"loss": 0.7548647522926331,
"step": 2636
},
{
"epoch": 1.1130801687763714,
"grad_norm": 1.225388526916504,
"learning_rate": 9.679520822112208e-05,
"loss": 0.6553335189819336,
"step": 2638
},
{
"epoch": 1.1139240506329113,
"grad_norm": 1.2350653409957886,
"learning_rate": 9.678674356040259e-05,
"loss": 0.631401538848877,
"step": 2640
},
{
"epoch": 1.1147679324894515,
"grad_norm": 1.2325507402420044,
"learning_rate": 9.677826810687989e-05,
"loss": 0.6459156274795532,
"step": 2642
},
{
"epoch": 1.1156118143459917,
"grad_norm": 1.0008996725082397,
"learning_rate": 9.676978186250915e-05,
"loss": 0.6425284743309021,
"step": 2644
},
{
"epoch": 1.1164556962025316,
"grad_norm": 1.3767247200012207,
"learning_rate": 9.676128482924796e-05,
"loss": 0.6451422572135925,
"step": 2646
},
{
"epoch": 1.1172995780590718,
"grad_norm": 1.2070895433425903,
"learning_rate": 9.675277700905643e-05,
"loss": 0.6713272929191589,
"step": 2648
},
{
"epoch": 1.1181434599156117,
"grad_norm": 1.1582069396972656,
"learning_rate": 9.674425840389716e-05,
"loss": 0.6285044550895691,
"step": 2650
},
{
"epoch": 1.1189873417721519,
"grad_norm": 1.1641311645507812,
"learning_rate": 9.67357290157352e-05,
"loss": 0.624229907989502,
"step": 2652
},
{
"epoch": 1.119831223628692,
"grad_norm": 1.3071147203445435,
"learning_rate": 9.672718884653814e-05,
"loss": 0.7214919328689575,
"step": 2654
},
{
"epoch": 1.120675105485232,
"grad_norm": 1.2157800197601318,
"learning_rate": 9.671863789827602e-05,
"loss": 0.8062215447425842,
"step": 2656
},
{
"epoch": 1.1215189873417721,
"grad_norm": 1.2843927145004272,
"learning_rate": 9.671007617292138e-05,
"loss": 0.6362426280975342,
"step": 2658
},
{
"epoch": 1.1223628691983123,
"grad_norm": 1.1182712316513062,
"learning_rate": 9.670150367244927e-05,
"loss": 0.6181318163871765,
"step": 2660
},
{
"epoch": 1.1232067510548522,
"grad_norm": 1.566605806350708,
"learning_rate": 9.669292039883717e-05,
"loss": 0.6973897218704224,
"step": 2662
},
{
"epoch": 1.1240506329113924,
"grad_norm": 1.0726850032806396,
"learning_rate": 9.66843263540651e-05,
"loss": 0.6117324829101562,
"step": 2664
},
{
"epoch": 1.1248945147679326,
"grad_norm": 1.2953020334243774,
"learning_rate": 9.66757215401155e-05,
"loss": 0.642676830291748,
"step": 2666
},
{
"epoch": 1.1257383966244725,
"grad_norm": 1.1184383630752563,
"learning_rate": 9.66671059589734e-05,
"loss": 0.6757452487945557,
"step": 2668
},
{
"epoch": 1.1265822784810127,
"grad_norm": 1.2732970714569092,
"learning_rate": 9.66584796126262e-05,
"loss": 0.6861951947212219,
"step": 2670
},
{
"epoch": 1.1274261603375528,
"grad_norm": 1.2713000774383545,
"learning_rate": 9.664984250306383e-05,
"loss": 0.6727077960968018,
"step": 2672
},
{
"epoch": 1.1282700421940928,
"grad_norm": 1.269827961921692,
"learning_rate": 9.664119463227874e-05,
"loss": 0.7355974912643433,
"step": 2674
},
{
"epoch": 1.129113924050633,
"grad_norm": 1.3067172765731812,
"learning_rate": 9.663253600226581e-05,
"loss": 0.7121313214302063,
"step": 2676
},
{
"epoch": 1.129957805907173,
"grad_norm": 1.2958797216415405,
"learning_rate": 9.662386661502242e-05,
"loss": 0.6671369075775146,
"step": 2678
},
{
"epoch": 1.130801687763713,
"grad_norm": 1.2943401336669922,
"learning_rate": 9.661518647254842e-05,
"loss": 0.6153768301010132,
"step": 2680
},
{
"epoch": 1.1316455696202532,
"grad_norm": 1.1744167804718018,
"learning_rate": 9.660649557684616e-05,
"loss": 0.6070778965950012,
"step": 2682
},
{
"epoch": 1.1324894514767934,
"grad_norm": 1.159209132194519,
"learning_rate": 9.659779392992047e-05,
"loss": 0.676887035369873,
"step": 2684
},
{
"epoch": 1.1333333333333333,
"grad_norm": 1.1937510967254639,
"learning_rate": 9.658908153377866e-05,
"loss": 0.6086745262145996,
"step": 2686
},
{
"epoch": 1.1341772151898735,
"grad_norm": 1.1461687088012695,
"learning_rate": 9.658035839043049e-05,
"loss": 0.6493708491325378,
"step": 2688
},
{
"epoch": 1.1350210970464134,
"grad_norm": 2.066361665725708,
"learning_rate": 9.657162450188824e-05,
"loss": 0.6813004016876221,
"step": 2690
},
{
"epoch": 1.1358649789029536,
"grad_norm": 1.086910367012024,
"learning_rate": 9.656287987016664e-05,
"loss": 0.721062183380127,
"step": 2692
},
{
"epoch": 1.1367088607594937,
"grad_norm": 1.1869292259216309,
"learning_rate": 9.65541244972829e-05,
"loss": 0.5975021123886108,
"step": 2694
},
{
"epoch": 1.1375527426160337,
"grad_norm": 1.2456518411636353,
"learning_rate": 9.654535838525674e-05,
"loss": 0.6818324327468872,
"step": 2696
},
{
"epoch": 1.1383966244725738,
"grad_norm": 1.5271464586257935,
"learning_rate": 9.653658153611031e-05,
"loss": 0.6844469308853149,
"step": 2698
},
{
"epoch": 1.139240506329114,
"grad_norm": 1.1403794288635254,
"learning_rate": 9.652779395186827e-05,
"loss": 0.6388684511184692,
"step": 2700
},
{
"epoch": 1.139240506329114,
"eval_loss": 0.7335711717605591,
"eval_runtime": 861.9651,
"eval_samples_per_second": 2.444,
"eval_steps_per_second": 2.444,
"step": 2700
},
{
"epoch": 1.140084388185654,
"grad_norm": 1.1091634035110474,
"learning_rate": 9.651899563455775e-05,
"loss": 0.6154619455337524,
"step": 2702
},
{
"epoch": 1.140928270042194,
"grad_norm": 1.3280601501464844,
"learning_rate": 9.651018658620837e-05,
"loss": 0.629319429397583,
"step": 2704
},
{
"epoch": 1.1417721518987343,
"grad_norm": 1.226806402206421,
"learning_rate": 9.650136680885216e-05,
"loss": 0.6088175773620605,
"step": 2706
},
{
"epoch": 1.1426160337552742,
"grad_norm": 1.0593408346176147,
"learning_rate": 9.649253630452372e-05,
"loss": 0.6199659705162048,
"step": 2708
},
{
"epoch": 1.1434599156118144,
"grad_norm": 1.1112475395202637,
"learning_rate": 9.648369507526008e-05,
"loss": 0.7233364582061768,
"step": 2710
},
{
"epoch": 1.1443037974683543,
"grad_norm": 1.1737885475158691,
"learning_rate": 9.647484312310068e-05,
"loss": 0.6687955856323242,
"step": 2712
},
{
"epoch": 1.1451476793248945,
"grad_norm": 1.194532036781311,
"learning_rate": 9.646598045008756e-05,
"loss": 0.6508969068527222,
"step": 2714
},
{
"epoch": 1.1459915611814346,
"grad_norm": 1.069395899772644,
"learning_rate": 9.645710705826517e-05,
"loss": 0.6408317685127258,
"step": 2716
},
{
"epoch": 1.1468354430379746,
"grad_norm": 1.2429133653640747,
"learning_rate": 9.644822294968037e-05,
"loss": 0.650763750076294,
"step": 2718
},
{
"epoch": 1.1476793248945147,
"grad_norm": 1.2950133085250854,
"learning_rate": 9.64393281263826e-05,
"loss": 0.6952191591262817,
"step": 2720
},
{
"epoch": 1.148523206751055,
"grad_norm": 1.1972628831863403,
"learning_rate": 9.643042259042372e-05,
"loss": 0.6772956252098083,
"step": 2722
},
{
"epoch": 1.1493670886075948,
"grad_norm": 1.1670407056808472,
"learning_rate": 9.642150634385805e-05,
"loss": 0.6734447479248047,
"step": 2724
},
{
"epoch": 1.150210970464135,
"grad_norm": 1.120302677154541,
"learning_rate": 9.641257938874243e-05,
"loss": 0.6387717127799988,
"step": 2726
},
{
"epoch": 1.1510548523206752,
"grad_norm": 1.1241344213485718,
"learning_rate": 9.640364172713609e-05,
"loss": 0.6592874526977539,
"step": 2728
},
{
"epoch": 1.1518987341772151,
"grad_norm": 1.2627261877059937,
"learning_rate": 9.639469336110083e-05,
"loss": 0.7257466912269592,
"step": 2730
},
{
"epoch": 1.1527426160337553,
"grad_norm": 1.0528618097305298,
"learning_rate": 9.638573429270083e-05,
"loss": 0.572188138961792,
"step": 2732
},
{
"epoch": 1.1535864978902954,
"grad_norm": 1.212536334991455,
"learning_rate": 9.637676452400277e-05,
"loss": 0.678981602191925,
"step": 2734
},
{
"epoch": 1.1544303797468354,
"grad_norm": 1.152167797088623,
"learning_rate": 9.636778405707582e-05,
"loss": 0.6375001072883606,
"step": 2736
},
{
"epoch": 1.1552742616033755,
"grad_norm": 1.2400429248809814,
"learning_rate": 9.635879289399161e-05,
"loss": 0.7602289319038391,
"step": 2738
},
{
"epoch": 1.1561181434599157,
"grad_norm": 1.3488622903823853,
"learning_rate": 9.634979103682421e-05,
"loss": 0.6209543943405151,
"step": 2740
},
{
"epoch": 1.1569620253164556,
"grad_norm": 1.1999555826187134,
"learning_rate": 9.634077848765019e-05,
"loss": 0.6215830445289612,
"step": 2742
},
{
"epoch": 1.1578059071729958,
"grad_norm": 1.2008578777313232,
"learning_rate": 9.633175524854855e-05,
"loss": 0.6634654998779297,
"step": 2744
},
{
"epoch": 1.158649789029536,
"grad_norm": 1.3920676708221436,
"learning_rate": 9.63227213216008e-05,
"loss": 0.7515161633491516,
"step": 2746
},
{
"epoch": 1.159493670886076,
"grad_norm": 1.0551656484603882,
"learning_rate": 9.631367670889089e-05,
"loss": 0.724361777305603,
"step": 2748
},
{
"epoch": 1.160337552742616,
"grad_norm": 1.2820028066635132,
"learning_rate": 9.630462141250523e-05,
"loss": 0.6673553586006165,
"step": 2750
},
{
"epoch": 1.1611814345991562,
"grad_norm": 1.1452983617782593,
"learning_rate": 9.62955554345327e-05,
"loss": 0.7029784917831421,
"step": 2752
},
{
"epoch": 1.1620253164556962,
"grad_norm": 1.1808624267578125,
"learning_rate": 9.628647877706466e-05,
"loss": 0.7355457544326782,
"step": 2754
},
{
"epoch": 1.1628691983122363,
"grad_norm": 1.0574703216552734,
"learning_rate": 9.627739144219492e-05,
"loss": 0.6144933700561523,
"step": 2756
},
{
"epoch": 1.1637130801687763,
"grad_norm": 1.215733528137207,
"learning_rate": 9.626829343201974e-05,
"loss": 0.6843759417533875,
"step": 2758
},
{
"epoch": 1.1645569620253164,
"grad_norm": 1.1667706966400146,
"learning_rate": 9.625918474863787e-05,
"loss": 0.6197049617767334,
"step": 2760
},
{
"epoch": 1.1654008438818566,
"grad_norm": 1.3765631914138794,
"learning_rate": 9.62500653941505e-05,
"loss": 0.715958297252655,
"step": 2762
},
{
"epoch": 1.1662447257383965,
"grad_norm": 1.173715591430664,
"learning_rate": 9.62409353706613e-05,
"loss": 0.7433139085769653,
"step": 2764
},
{
"epoch": 1.1670886075949367,
"grad_norm": 1.1837430000305176,
"learning_rate": 9.623179468027637e-05,
"loss": 0.7174371480941772,
"step": 2766
},
{
"epoch": 1.1679324894514769,
"grad_norm": 1.1577154397964478,
"learning_rate": 9.622264332510432e-05,
"loss": 0.7184823751449585,
"step": 2768
},
{
"epoch": 1.1687763713080168,
"grad_norm": 1.165246605873108,
"learning_rate": 9.621348130725617e-05,
"loss": 0.693343460559845,
"step": 2770
},
{
"epoch": 1.169620253164557,
"grad_norm": 1.2853080034255981,
"learning_rate": 9.620430862884542e-05,
"loss": 0.6999852061271667,
"step": 2772
},
{
"epoch": 1.1704641350210971,
"grad_norm": 1.1782865524291992,
"learning_rate": 9.619512529198806e-05,
"loss": 0.6034331321716309,
"step": 2774
},
{
"epoch": 1.171308016877637,
"grad_norm": 1.4055447578430176,
"learning_rate": 9.61859312988025e-05,
"loss": 0.7588269710540771,
"step": 2776
},
{
"epoch": 1.1721518987341772,
"grad_norm": 1.1148805618286133,
"learning_rate": 9.617672665140957e-05,
"loss": 0.6913981437683105,
"step": 2778
},
{
"epoch": 1.1729957805907172,
"grad_norm": 1.1311042308807373,
"learning_rate": 9.616751135193266e-05,
"loss": 0.5976925492286682,
"step": 2780
},
{
"epoch": 1.1738396624472573,
"grad_norm": 1.2378602027893066,
"learning_rate": 9.615828540249754e-05,
"loss": 0.6897050142288208,
"step": 2782
},
{
"epoch": 1.1746835443037975,
"grad_norm": 1.3445732593536377,
"learning_rate": 9.614904880523248e-05,
"loss": 0.6772098541259766,
"step": 2784
},
{
"epoch": 1.1755274261603375,
"grad_norm": 1.3380862474441528,
"learning_rate": 9.613980156226815e-05,
"loss": 0.6354818344116211,
"step": 2786
},
{
"epoch": 1.1763713080168776,
"grad_norm": 1.0955157279968262,
"learning_rate": 9.613054367573773e-05,
"loss": 0.6541208028793335,
"step": 2788
},
{
"epoch": 1.1772151898734178,
"grad_norm": 1.0176626443862915,
"learning_rate": 9.612127514777686e-05,
"loss": 0.6472887992858887,
"step": 2790
},
{
"epoch": 1.1780590717299577,
"grad_norm": 1.2644864320755005,
"learning_rate": 9.611199598052357e-05,
"loss": 0.7511212229728699,
"step": 2792
},
{
"epoch": 1.1789029535864979,
"grad_norm": 1.248197317123413,
"learning_rate": 9.61027061761184e-05,
"loss": 0.696236789226532,
"step": 2794
},
{
"epoch": 1.179746835443038,
"grad_norm": 1.189935564994812,
"learning_rate": 9.609340573670436e-05,
"loss": 0.5962010622024536,
"step": 2796
},
{
"epoch": 1.180590717299578,
"grad_norm": 1.1760492324829102,
"learning_rate": 9.608409466442685e-05,
"loss": 0.5981685519218445,
"step": 2798
},
{
"epoch": 1.1814345991561181,
"grad_norm": 1.1820716857910156,
"learning_rate": 9.607477296143374e-05,
"loss": 0.6186091303825378,
"step": 2800
},
{
"epoch": 1.1814345991561181,
"eval_loss": 0.7298192977905273,
"eval_runtime": 849.544,
"eval_samples_per_second": 2.48,
"eval_steps_per_second": 2.48,
"step": 2800
},
{
"epoch": 1.1822784810126583,
"grad_norm": 1.0353888273239136,
"learning_rate": 9.606544062987541e-05,
"loss": 0.5859389901161194,
"step": 2802
},
{
"epoch": 1.1831223628691983,
"grad_norm": 1.3141933679580688,
"learning_rate": 9.605609767190464e-05,
"loss": 0.6573460698127747,
"step": 2804
},
{
"epoch": 1.1839662447257384,
"grad_norm": 1.1209372282028198,
"learning_rate": 9.604674408967664e-05,
"loss": 0.6991921067237854,
"step": 2806
},
{
"epoch": 1.1848101265822786,
"grad_norm": 1.2830493450164795,
"learning_rate": 9.603737988534913e-05,
"loss": 0.6438087821006775,
"step": 2808
},
{
"epoch": 1.1856540084388185,
"grad_norm": 1.1427195072174072,
"learning_rate": 9.602800506108225e-05,
"loss": 0.6452094316482544,
"step": 2810
},
{
"epoch": 1.1864978902953587,
"grad_norm": 1.316420078277588,
"learning_rate": 9.601861961903857e-05,
"loss": 0.6745601296424866,
"step": 2812
},
{
"epoch": 1.1873417721518988,
"grad_norm": 1.1643308401107788,
"learning_rate": 9.600922356138317e-05,
"loss": 0.6761514544487,
"step": 2814
},
{
"epoch": 1.1881856540084388,
"grad_norm": 1.036056399345398,
"learning_rate": 9.59998168902835e-05,
"loss": 0.6453908681869507,
"step": 2816
},
{
"epoch": 1.189029535864979,
"grad_norm": 1.2211129665374756,
"learning_rate": 9.599039960790954e-05,
"loss": 0.6576406359672546,
"step": 2818
},
{
"epoch": 1.189873417721519,
"grad_norm": 1.084114670753479,
"learning_rate": 9.598097171643364e-05,
"loss": 0.6214181780815125,
"step": 2820
},
{
"epoch": 1.190717299578059,
"grad_norm": 1.1297314167022705,
"learning_rate": 9.597153321803064e-05,
"loss": 0.6381646990776062,
"step": 2822
},
{
"epoch": 1.1915611814345992,
"grad_norm": 1.2568120956420898,
"learning_rate": 9.596208411487784e-05,
"loss": 0.7129076719284058,
"step": 2824
},
{
"epoch": 1.1924050632911392,
"grad_norm": 1.07041335105896,
"learning_rate": 9.595262440915493e-05,
"loss": 0.7123546004295349,
"step": 2826
},
{
"epoch": 1.1932489451476793,
"grad_norm": 1.3950074911117554,
"learning_rate": 9.594315410304413e-05,
"loss": 0.7263038158416748,
"step": 2828
},
{
"epoch": 1.1940928270042195,
"grad_norm": 1.2470672130584717,
"learning_rate": 9.593367319873002e-05,
"loss": 0.6863036751747131,
"step": 2830
},
{
"epoch": 1.1949367088607594,
"grad_norm": 1.2065461874008179,
"learning_rate": 9.592418169839968e-05,
"loss": 0.745354175567627,
"step": 2832
},
{
"epoch": 1.1957805907172996,
"grad_norm": 1.1710152626037598,
"learning_rate": 9.591467960424261e-05,
"loss": 0.6401656866073608,
"step": 2834
},
{
"epoch": 1.1966244725738397,
"grad_norm": 1.3324087858200073,
"learning_rate": 9.590516691845077e-05,
"loss": 0.7402615547180176,
"step": 2836
},
{
"epoch": 1.1974683544303797,
"grad_norm": 1.0100195407867432,
"learning_rate": 9.589564364321855e-05,
"loss": 0.5723769068717957,
"step": 2838
},
{
"epoch": 1.1983122362869199,
"grad_norm": 1.2706246376037598,
"learning_rate": 9.588610978074277e-05,
"loss": 0.6618966460227966,
"step": 2840
},
{
"epoch": 1.1991561181434598,
"grad_norm": 1.1921758651733398,
"learning_rate": 9.587656533322273e-05,
"loss": 0.7090804576873779,
"step": 2842
},
{
"epoch": 1.2,
"grad_norm": 1.36713445186615,
"learning_rate": 9.586701030286014e-05,
"loss": 0.6930652856826782,
"step": 2844
},
{
"epoch": 1.2008438818565401,
"grad_norm": 1.3084295988082886,
"learning_rate": 9.585744469185917e-05,
"loss": 0.7386236190795898,
"step": 2846
},
{
"epoch": 1.20168776371308,
"grad_norm": 1.198922038078308,
"learning_rate": 9.584786850242642e-05,
"loss": 0.6179903149604797,
"step": 2848
},
{
"epoch": 1.2025316455696202,
"grad_norm": 1.2106369733810425,
"learning_rate": 9.583828173677092e-05,
"loss": 0.7027528882026672,
"step": 2850
},
{
"epoch": 1.2033755274261604,
"grad_norm": 1.2959522008895874,
"learning_rate": 9.582868439710418e-05,
"loss": 0.6612945199012756,
"step": 2852
},
{
"epoch": 1.2042194092827003,
"grad_norm": 1.1441705226898193,
"learning_rate": 9.58190764856401e-05,
"loss": 0.7085917592048645,
"step": 2854
},
{
"epoch": 1.2050632911392405,
"grad_norm": 1.1586185693740845,
"learning_rate": 9.580945800459504e-05,
"loss": 0.7480600476264954,
"step": 2856
},
{
"epoch": 1.2059071729957807,
"grad_norm": 1.2068266868591309,
"learning_rate": 9.579982895618783e-05,
"loss": 0.7185836434364319,
"step": 2858
},
{
"epoch": 1.2067510548523206,
"grad_norm": 1.2188525199890137,
"learning_rate": 9.579018934263966e-05,
"loss": 0.6737306118011475,
"step": 2860
},
{
"epoch": 1.2075949367088608,
"grad_norm": 1.1513181924819946,
"learning_rate": 9.578053916617423e-05,
"loss": 0.7239293456077576,
"step": 2862
},
{
"epoch": 1.208438818565401,
"grad_norm": 1.2063703536987305,
"learning_rate": 9.577087842901764e-05,
"loss": 0.6416276097297668,
"step": 2864
},
{
"epoch": 1.2092827004219409,
"grad_norm": 1.102460503578186,
"learning_rate": 9.576120713339844e-05,
"loss": 0.697213351726532,
"step": 2866
},
{
"epoch": 1.210126582278481,
"grad_norm": 1.2484638690948486,
"learning_rate": 9.575152528154763e-05,
"loss": 0.6664742231369019,
"step": 2868
},
{
"epoch": 1.2109704641350212,
"grad_norm": 1.4476624727249146,
"learning_rate": 9.57418328756986e-05,
"loss": 0.6914868354797363,
"step": 2870
},
{
"epoch": 1.2118143459915611,
"grad_norm": 1.0130122900009155,
"learning_rate": 9.573212991808722e-05,
"loss": 0.662024736404419,
"step": 2872
},
{
"epoch": 1.2126582278481013,
"grad_norm": 1.014470100402832,
"learning_rate": 9.572241641095177e-05,
"loss": 0.6330409646034241,
"step": 2874
},
{
"epoch": 1.2135021097046415,
"grad_norm": 1.1803333759307861,
"learning_rate": 9.571269235653298e-05,
"loss": 0.6607463955879211,
"step": 2876
},
{
"epoch": 1.2143459915611814,
"grad_norm": 1.261366844177246,
"learning_rate": 9.570295775707398e-05,
"loss": 0.6925629377365112,
"step": 2878
},
{
"epoch": 1.2151898734177216,
"grad_norm": 1.226670503616333,
"learning_rate": 9.569321261482037e-05,
"loss": 0.7070510983467102,
"step": 2880
},
{
"epoch": 1.2160337552742617,
"grad_norm": 1.164565920829773,
"learning_rate": 9.568345693202016e-05,
"loss": 0.7243561744689941,
"step": 2882
},
{
"epoch": 1.2168776371308017,
"grad_norm": 1.060331106185913,
"learning_rate": 9.567369071092382e-05,
"loss": 0.6316909790039062,
"step": 2884
},
{
"epoch": 1.2177215189873418,
"grad_norm": 1.1998693943023682,
"learning_rate": 9.566391395378419e-05,
"loss": 0.6139125227928162,
"step": 2886
},
{
"epoch": 1.2185654008438818,
"grad_norm": 1.1875834465026855,
"learning_rate": 9.565412666285661e-05,
"loss": 0.688897430896759,
"step": 2888
},
{
"epoch": 1.219409282700422,
"grad_norm": 1.199174404144287,
"learning_rate": 9.564432884039882e-05,
"loss": 0.684590756893158,
"step": 2890
},
{
"epoch": 1.220253164556962,
"grad_norm": 1.2428219318389893,
"learning_rate": 9.563452048867099e-05,
"loss": 0.67433100938797,
"step": 2892
},
{
"epoch": 1.221097046413502,
"grad_norm": 1.0826431512832642,
"learning_rate": 9.562470160993568e-05,
"loss": 0.6959785223007202,
"step": 2894
},
{
"epoch": 1.2219409282700422,
"grad_norm": 1.3140246868133545,
"learning_rate": 9.561487220645797e-05,
"loss": 0.6443175673484802,
"step": 2896
},
{
"epoch": 1.2227848101265824,
"grad_norm": 1.2758334875106812,
"learning_rate": 9.560503228050529e-05,
"loss": 0.6715332865715027,
"step": 2898
},
{
"epoch": 1.2236286919831223,
"grad_norm": 1.3326421976089478,
"learning_rate": 9.559518183434753e-05,
"loss": 0.6896081566810608,
"step": 2900
},
{
"epoch": 1.2236286919831223,
"eval_loss": 0.7281573414802551,
"eval_runtime": 854.563,
"eval_samples_per_second": 2.466,
"eval_steps_per_second": 2.466,
"step": 2900
},
{
"epoch": 1.2244725738396625,
"grad_norm": 1.3225606679916382,
"learning_rate": 9.558532087025697e-05,
"loss": 0.6797633171081543,
"step": 2902
},
{
"epoch": 1.2253164556962026,
"grad_norm": 1.3058340549468994,
"learning_rate": 9.55754493905084e-05,
"loss": 0.6510948538780212,
"step": 2904
},
{
"epoch": 1.2261603375527426,
"grad_norm": 1.140268087387085,
"learning_rate": 9.556556739737892e-05,
"loss": 0.6481176614761353,
"step": 2906
},
{
"epoch": 1.2270042194092827,
"grad_norm": 1.465113639831543,
"learning_rate": 9.555567489314816e-05,
"loss": 0.7533771991729736,
"step": 2908
},
{
"epoch": 1.2278481012658227,
"grad_norm": 1.1468979120254517,
"learning_rate": 9.554577188009812e-05,
"loss": 0.6924305558204651,
"step": 2910
},
{
"epoch": 1.2286919831223628,
"grad_norm": 1.2193517684936523,
"learning_rate": 9.553585836051321e-05,
"loss": 0.7082820534706116,
"step": 2912
},
{
"epoch": 1.229535864978903,
"grad_norm": 1.2015037536621094,
"learning_rate": 9.552593433668034e-05,
"loss": 0.6735695004463196,
"step": 2914
},
{
"epoch": 1.230379746835443,
"grad_norm": 1.1915435791015625,
"learning_rate": 9.551599981088874e-05,
"loss": 0.7312048673629761,
"step": 2916
},
{
"epoch": 1.231223628691983,
"grad_norm": 1.2849410772323608,
"learning_rate": 9.550605478543013e-05,
"loss": 0.6590308547019958,
"step": 2918
},
{
"epoch": 1.2320675105485233,
"grad_norm": 1.192238688468933,
"learning_rate": 9.549609926259866e-05,
"loss": 0.6237715482711792,
"step": 2920
},
{
"epoch": 1.2329113924050632,
"grad_norm": 1.141845703125,
"learning_rate": 9.548613324469085e-05,
"loss": 0.6546295881271362,
"step": 2922
},
{
"epoch": 1.2337552742616034,
"grad_norm": 1.1662311553955078,
"learning_rate": 9.547615673400566e-05,
"loss": 0.5800934433937073,
"step": 2924
},
{
"epoch": 1.2345991561181435,
"grad_norm": 1.120578646659851,
"learning_rate": 9.546616973284453e-05,
"loss": 0.6487136483192444,
"step": 2926
},
{
"epoch": 1.2354430379746835,
"grad_norm": 1.0884860754013062,
"learning_rate": 9.54561722435112e-05,
"loss": 0.7515342235565186,
"step": 2928
},
{
"epoch": 1.2362869198312236,
"grad_norm": 1.4208670854568481,
"learning_rate": 9.544616426831196e-05,
"loss": 0.7162003517150879,
"step": 2930
},
{
"epoch": 1.2371308016877638,
"grad_norm": 1.083389401435852,
"learning_rate": 9.543614580955543e-05,
"loss": 0.708450198173523,
"step": 2932
},
{
"epoch": 1.2379746835443037,
"grad_norm": 1.141364336013794,
"learning_rate": 9.542611686955268e-05,
"loss": 0.6255859732627869,
"step": 2934
},
{
"epoch": 1.238818565400844,
"grad_norm": 1.122036099433899,
"learning_rate": 9.54160774506172e-05,
"loss": 0.6485402584075928,
"step": 2936
},
{
"epoch": 1.239662447257384,
"grad_norm": 1.3514165878295898,
"learning_rate": 9.540602755506487e-05,
"loss": 0.6735473871231079,
"step": 2938
},
{
"epoch": 1.240506329113924,
"grad_norm": 1.1762629747390747,
"learning_rate": 9.539596718521403e-05,
"loss": 0.6154970526695251,
"step": 2940
},
{
"epoch": 1.2413502109704642,
"grad_norm": 1.1609408855438232,
"learning_rate": 9.53858963433854e-05,
"loss": 0.6410251259803772,
"step": 2942
},
{
"epoch": 1.2421940928270043,
"grad_norm": 1.1750361919403076,
"learning_rate": 9.537581503190214e-05,
"loss": 0.6841039657592773,
"step": 2944
},
{
"epoch": 1.2430379746835443,
"grad_norm": 1.3125680685043335,
"learning_rate": 9.536572325308982e-05,
"loss": 0.7293462753295898,
"step": 2946
},
{
"epoch": 1.2438818565400844,
"grad_norm": 1.1737277507781982,
"learning_rate": 9.53556210092764e-05,
"loss": 0.7713663578033447,
"step": 2948
},
{
"epoch": 1.2447257383966246,
"grad_norm": 1.1702152490615845,
"learning_rate": 9.53455083027923e-05,
"loss": 0.6612298488616943,
"step": 2950
},
{
"epoch": 1.2455696202531645,
"grad_norm": 1.2594486474990845,
"learning_rate": 9.533538513597028e-05,
"loss": 0.6725803017616272,
"step": 2952
},
{
"epoch": 1.2464135021097047,
"grad_norm": 1.180816411972046,
"learning_rate": 9.532525151114562e-05,
"loss": 0.6421069502830505,
"step": 2954
},
{
"epoch": 1.2472573839662446,
"grad_norm": 1.25814688205719,
"learning_rate": 9.531510743065593e-05,
"loss": 0.7042996287345886,
"step": 2956
},
{
"epoch": 1.2481012658227848,
"grad_norm": 1.2101783752441406,
"learning_rate": 9.530495289684122e-05,
"loss": 0.7359137535095215,
"step": 2958
},
{
"epoch": 1.248945147679325,
"grad_norm": 1.1438405513763428,
"learning_rate": 9.5294787912044e-05,
"loss": 0.6186386346817017,
"step": 2960
},
{
"epoch": 1.249789029535865,
"grad_norm": 1.163364291191101,
"learning_rate": 9.52846124786091e-05,
"loss": 0.6243056058883667,
"step": 2962
},
{
"epoch": 1.250632911392405,
"grad_norm": 1.0695953369140625,
"learning_rate": 9.52744265988838e-05,
"loss": 0.6568763852119446,
"step": 2964
},
{
"epoch": 1.2514767932489452,
"grad_norm": 1.2228879928588867,
"learning_rate": 9.52642302752178e-05,
"loss": 0.6486776471138,
"step": 2966
},
{
"epoch": 1.2523206751054852,
"grad_norm": 1.2262967824935913,
"learning_rate": 9.52540235099632e-05,
"loss": 0.6293455958366394,
"step": 2968
},
{
"epoch": 1.2531645569620253,
"grad_norm": 1.0862956047058105,
"learning_rate": 9.524380630547449e-05,
"loss": 0.6549884080886841,
"step": 2970
},
{
"epoch": 1.2540084388185653,
"grad_norm": 1.1721880435943604,
"learning_rate": 9.52335786641086e-05,
"loss": 0.6126490831375122,
"step": 2972
},
{
"epoch": 1.2548523206751054,
"grad_norm": 1.2452391386032104,
"learning_rate": 9.522334058822483e-05,
"loss": 0.7078590393066406,
"step": 2974
},
{
"epoch": 1.2556962025316456,
"grad_norm": 1.2290222644805908,
"learning_rate": 9.521309208018492e-05,
"loss": 0.6166214942932129,
"step": 2976
},
{
"epoch": 1.2565400843881855,
"grad_norm": 1.1823618412017822,
"learning_rate": 9.520283314235299e-05,
"loss": 0.666228175163269,
"step": 2978
},
{
"epoch": 1.2573839662447257,
"grad_norm": 1.1702475547790527,
"learning_rate": 9.51925637770956e-05,
"loss": 0.7436795830726624,
"step": 2980
},
{
"epoch": 1.2582278481012659,
"grad_norm": 1.0879321098327637,
"learning_rate": 9.518228398678168e-05,
"loss": 0.7120893001556396,
"step": 2982
},
{
"epoch": 1.2590717299578058,
"grad_norm": 1.1608418226242065,
"learning_rate": 9.517199377378261e-05,
"loss": 0.6931713223457336,
"step": 2984
},
{
"epoch": 1.259915611814346,
"grad_norm": 1.1289087533950806,
"learning_rate": 9.51616931404721e-05,
"loss": 0.6803538799285889,
"step": 2986
},
{
"epoch": 1.2607594936708861,
"grad_norm": 1.1622236967086792,
"learning_rate": 9.515138208922633e-05,
"loss": 0.6499706506729126,
"step": 2988
},
{
"epoch": 1.261603375527426,
"grad_norm": 1.2492594718933105,
"learning_rate": 9.514106062242386e-05,
"loss": 0.6132655739784241,
"step": 2990
},
{
"epoch": 1.2624472573839662,
"grad_norm": 1.1538822650909424,
"learning_rate": 9.513072874244567e-05,
"loss": 0.6309265494346619,
"step": 2992
},
{
"epoch": 1.2632911392405064,
"grad_norm": 1.0828478336334229,
"learning_rate": 9.512038645167509e-05,
"loss": 0.6297751665115356,
"step": 2994
},
{
"epoch": 1.2641350210970463,
"grad_norm": 1.2440937757492065,
"learning_rate": 9.511003375249792e-05,
"loss": 0.6335258483886719,
"step": 2996
},
{
"epoch": 1.2649789029535865,
"grad_norm": 1.1259970664978027,
"learning_rate": 9.50996706473023e-05,
"loss": 0.6513770818710327,
"step": 2998
},
{
"epoch": 1.2658227848101267,
"grad_norm": 1.1530309915542603,
"learning_rate": 9.508929713847884e-05,
"loss": 0.6490892767906189,
"step": 3000
},
{
"epoch": 1.2658227848101267,
"eval_loss": 0.72515869140625,
"eval_runtime": 868.0515,
"eval_samples_per_second": 2.427,
"eval_steps_per_second": 2.427,
"step": 3000
},
{
"epoch": 1.2666666666666666,
"grad_norm": 1.2257169485092163,
"learning_rate": 9.507891322842048e-05,
"loss": 0.6936060786247253,
"step": 3002
},
{
"epoch": 1.2675105485232068,
"grad_norm": 1.0380109548568726,
"learning_rate": 9.506851891952259e-05,
"loss": 0.5941951870918274,
"step": 3004
},
{
"epoch": 1.268354430379747,
"grad_norm": 1.2830222845077515,
"learning_rate": 9.505811421418296e-05,
"loss": 0.648429811000824,
"step": 3006
},
{
"epoch": 1.2691983122362869,
"grad_norm": 1.2212986946105957,
"learning_rate": 9.504769911480171e-05,
"loss": 0.6868565678596497,
"step": 3008
},
{
"epoch": 1.270042194092827,
"grad_norm": 1.104656457901001,
"learning_rate": 9.503727362378145e-05,
"loss": 0.6777986288070679,
"step": 3010
},
{
"epoch": 1.2708860759493672,
"grad_norm": 1.1449005603790283,
"learning_rate": 9.502683774352713e-05,
"loss": 0.6581128239631653,
"step": 3012
},
{
"epoch": 1.2717299578059071,
"grad_norm": 1.2753362655639648,
"learning_rate": 9.501639147644608e-05,
"loss": 0.689930260181427,
"step": 3014
},
{
"epoch": 1.2725738396624473,
"grad_norm": 1.3367106914520264,
"learning_rate": 9.500593482494809e-05,
"loss": 0.7549214363098145,
"step": 3016
},
{
"epoch": 1.2734177215189875,
"grad_norm": 1.2309048175811768,
"learning_rate": 9.499546779144528e-05,
"loss": 0.6713513135910034,
"step": 3018
},
{
"epoch": 1.2742616033755274,
"grad_norm": 1.3833240270614624,
"learning_rate": 9.49849903783522e-05,
"loss": 0.7045458555221558,
"step": 3020
},
{
"epoch": 1.2751054852320676,
"grad_norm": 1.1402570009231567,
"learning_rate": 9.49745025880858e-05,
"loss": 0.708249568939209,
"step": 3022
},
{
"epoch": 1.2759493670886077,
"grad_norm": 1.0476267337799072,
"learning_rate": 9.496400442306541e-05,
"loss": 0.616210401058197,
"step": 3024
},
{
"epoch": 1.2767932489451477,
"grad_norm": 1.1045979261398315,
"learning_rate": 9.495349588571274e-05,
"loss": 0.6691827178001404,
"step": 3026
},
{
"epoch": 1.2776371308016878,
"grad_norm": 1.1760368347167969,
"learning_rate": 9.494297697845194e-05,
"loss": 0.6198306083679199,
"step": 3028
},
{
"epoch": 1.2784810126582278,
"grad_norm": 1.0015549659729004,
"learning_rate": 9.493244770370946e-05,
"loss": 0.5756480097770691,
"step": 3030
},
{
"epoch": 1.279324894514768,
"grad_norm": 1.2190428972244263,
"learning_rate": 9.492190806391427e-05,
"loss": 0.6794419884681702,
"step": 3032
},
{
"epoch": 1.2801687763713079,
"grad_norm": 1.0210410356521606,
"learning_rate": 9.491135806149762e-05,
"loss": 0.5847988724708557,
"step": 3034
},
{
"epoch": 1.281012658227848,
"grad_norm": 1.0678503513336182,
"learning_rate": 9.490079769889319e-05,
"loss": 0.6760231256484985,
"step": 3036
},
{
"epoch": 1.2818565400843882,
"grad_norm": 1.1811012029647827,
"learning_rate": 9.489022697853709e-05,
"loss": 0.7188448309898376,
"step": 3038
},
{
"epoch": 1.2827004219409281,
"grad_norm": 1.1134302616119385,
"learning_rate": 9.487964590286776e-05,
"loss": 0.674904465675354,
"step": 3040
},
{
"epoch": 1.2835443037974683,
"grad_norm": 1.1868232488632202,
"learning_rate": 9.486905447432603e-05,
"loss": 0.6016344428062439,
"step": 3042
},
{
"epoch": 1.2843881856540085,
"grad_norm": 1.1586613655090332,
"learning_rate": 9.485845269535517e-05,
"loss": 0.6965603828430176,
"step": 3044
},
{
"epoch": 1.2852320675105484,
"grad_norm": 1.149837613105774,
"learning_rate": 9.48478405684008e-05,
"loss": 0.656144380569458,
"step": 3046
},
{
"epoch": 1.2860759493670886,
"grad_norm": 1.228752613067627,
"learning_rate": 9.48372180959109e-05,
"loss": 0.6388653516769409,
"step": 3048
},
{
"epoch": 1.2869198312236287,
"grad_norm": 1.2403100728988647,
"learning_rate": 9.482658528033595e-05,
"loss": 0.6255465745925903,
"step": 3050
},
{
"epoch": 1.2877637130801687,
"grad_norm": 1.2483839988708496,
"learning_rate": 9.481594212412865e-05,
"loss": 0.6828253269195557,
"step": 3052
},
{
"epoch": 1.2886075949367088,
"grad_norm": 1.4161021709442139,
"learning_rate": 9.480528862974422e-05,
"loss": 0.7072080373764038,
"step": 3054
},
{
"epoch": 1.289451476793249,
"grad_norm": 1.1500437259674072,
"learning_rate": 9.479462479964021e-05,
"loss": 0.6082415580749512,
"step": 3056
},
{
"epoch": 1.290295358649789,
"grad_norm": 1.196595549583435,
"learning_rate": 9.478395063627654e-05,
"loss": 0.6653015613555908,
"step": 3058
},
{
"epoch": 1.2911392405063291,
"grad_norm": 1.2832285165786743,
"learning_rate": 9.477326614211557e-05,
"loss": 0.7095832824707031,
"step": 3060
},
{
"epoch": 1.2919831223628693,
"grad_norm": 1.2234288454055786,
"learning_rate": 9.476257131962198e-05,
"loss": 0.7183426022529602,
"step": 3062
},
{
"epoch": 1.2928270042194092,
"grad_norm": 1.2350459098815918,
"learning_rate": 9.475186617126286e-05,
"loss": 0.713284432888031,
"step": 3064
},
{
"epoch": 1.2936708860759494,
"grad_norm": 1.2079555988311768,
"learning_rate": 9.47411506995077e-05,
"loss": 0.6580002307891846,
"step": 3066
},
{
"epoch": 1.2945147679324895,
"grad_norm": 1.129796028137207,
"learning_rate": 9.473042490682835e-05,
"loss": 0.5967763662338257,
"step": 3068
},
{
"epoch": 1.2953586497890295,
"grad_norm": 1.1706618070602417,
"learning_rate": 9.471968879569901e-05,
"loss": 0.6724388003349304,
"step": 3070
},
{
"epoch": 1.2962025316455696,
"grad_norm": 1.0336005687713623,
"learning_rate": 9.470894236859635e-05,
"loss": 0.6527577638626099,
"step": 3072
},
{
"epoch": 1.2970464135021098,
"grad_norm": 1.1124558448791504,
"learning_rate": 9.469818562799932e-05,
"loss": 0.677132785320282,
"step": 3074
},
{
"epoch": 1.2978902953586497,
"grad_norm": 1.158069372177124,
"learning_rate": 9.468741857638933e-05,
"loss": 0.649718165397644,
"step": 3076
},
{
"epoch": 1.29873417721519,
"grad_norm": 1.092926263809204,
"learning_rate": 9.46766412162501e-05,
"loss": 0.6872133612632751,
"step": 3078
},
{
"epoch": 1.29957805907173,
"grad_norm": 1.1324822902679443,
"learning_rate": 9.466585355006777e-05,
"loss": 0.6495246291160583,
"step": 3080
},
{
"epoch": 1.30042194092827,
"grad_norm": 1.5882837772369385,
"learning_rate": 9.465505558033086e-05,
"loss": 0.6730570197105408,
"step": 3082
},
{
"epoch": 1.3012658227848102,
"grad_norm": 0.9866069555282593,
"learning_rate": 9.464424730953023e-05,
"loss": 0.5677527785301208,
"step": 3084
},
{
"epoch": 1.3021097046413503,
"grad_norm": 1.1560224294662476,
"learning_rate": 9.463342874015917e-05,
"loss": 0.6247856020927429,
"step": 3086
},
{
"epoch": 1.3029535864978903,
"grad_norm": 1.135939359664917,
"learning_rate": 9.462259987471329e-05,
"loss": 0.6889358758926392,
"step": 3088
},
{
"epoch": 1.3037974683544304,
"grad_norm": 1.3935760259628296,
"learning_rate": 9.461176071569063e-05,
"loss": 0.7097522020339966,
"step": 3090
},
{
"epoch": 1.3046413502109704,
"grad_norm": 1.153518795967102,
"learning_rate": 9.460091126559155e-05,
"loss": 0.7044580578804016,
"step": 3092
},
{
"epoch": 1.3054852320675105,
"grad_norm": 1.2112717628479004,
"learning_rate": 9.45900515269188e-05,
"loss": 0.6119300723075867,
"step": 3094
},
{
"epoch": 1.3063291139240507,
"grad_norm": 1.295591115951538,
"learning_rate": 9.457918150217754e-05,
"loss": 0.7150222063064575,
"step": 3096
},
{
"epoch": 1.3071729957805907,
"grad_norm": 1.1175775527954102,
"learning_rate": 9.456830119387527e-05,
"loss": 0.6043334007263184,
"step": 3098
},
{
"epoch": 1.3080168776371308,
"grad_norm": 1.4022588729858398,
"learning_rate": 9.455741060452186e-05,
"loss": 0.6354425549507141,
"step": 3100
},
{
"epoch": 1.3080168776371308,
"eval_loss": 0.7225774526596069,
"eval_runtime": 862.4006,
"eval_samples_per_second": 2.443,
"eval_steps_per_second": 2.443,
"step": 3100
},
{
"epoch": 1.3088607594936708,
"grad_norm": 1.1657692193984985,
"learning_rate": 9.454650973662957e-05,
"loss": 0.7281571626663208,
"step": 3102
},
{
"epoch": 1.309704641350211,
"grad_norm": 1.6169127225875854,
"learning_rate": 9.453559859271301e-05,
"loss": 0.8038214445114136,
"step": 3104
},
{
"epoch": 1.310548523206751,
"grad_norm": 1.1256520748138428,
"learning_rate": 9.452467717528918e-05,
"loss": 0.6488606333732605,
"step": 3106
},
{
"epoch": 1.311392405063291,
"grad_norm": 1.1224530935287476,
"learning_rate": 9.451374548687745e-05,
"loss": 0.6897066235542297,
"step": 3108
},
{
"epoch": 1.3122362869198312,
"grad_norm": 1.1123055219650269,
"learning_rate": 9.450280352999952e-05,
"loss": 0.6332913041114807,
"step": 3110
},
{
"epoch": 1.3130801687763713,
"grad_norm": 1.1688940525054932,
"learning_rate": 9.449185130717952e-05,
"loss": 0.7426630854606628,
"step": 3112
},
{
"epoch": 1.3139240506329113,
"grad_norm": 1.1898044347763062,
"learning_rate": 9.44808888209439e-05,
"loss": 0.7156099677085876,
"step": 3114
},
{
"epoch": 1.3147679324894515,
"grad_norm": 1.3030686378479004,
"learning_rate": 9.44699160738215e-05,
"loss": 0.7150979042053223,
"step": 3116
},
{
"epoch": 1.3156118143459916,
"grad_norm": 1.1539074182510376,
"learning_rate": 9.445893306834352e-05,
"loss": 0.6687285900115967,
"step": 3118
},
{
"epoch": 1.3164556962025316,
"grad_norm": 1.311808466911316,
"learning_rate": 9.444793980704355e-05,
"loss": 0.7340983152389526,
"step": 3120
},
{
"epoch": 1.3172995780590717,
"grad_norm": 1.3325430154800415,
"learning_rate": 9.44369362924575e-05,
"loss": 0.6620677709579468,
"step": 3122
},
{
"epoch": 1.3181434599156119,
"grad_norm": 1.201518177986145,
"learning_rate": 9.442592252712365e-05,
"loss": 0.6169955134391785,
"step": 3124
},
{
"epoch": 1.3189873417721518,
"grad_norm": 1.2124013900756836,
"learning_rate": 9.441489851358272e-05,
"loss": 0.6696792840957642,
"step": 3126
},
{
"epoch": 1.319831223628692,
"grad_norm": 1.2186850309371948,
"learning_rate": 9.440386425437768e-05,
"loss": 0.7303428649902344,
"step": 3128
},
{
"epoch": 1.3206751054852321,
"grad_norm": 1.3780523538589478,
"learning_rate": 9.439281975205396e-05,
"loss": 0.7093026638031006,
"step": 3130
},
{
"epoch": 1.321518987341772,
"grad_norm": 1.233353614807129,
"learning_rate": 9.438176500915932e-05,
"loss": 0.6821767687797546,
"step": 3132
},
{
"epoch": 1.3223628691983123,
"grad_norm": 1.2425329685211182,
"learning_rate": 9.437070002824385e-05,
"loss": 0.700680136680603,
"step": 3134
},
{
"epoch": 1.3232067510548524,
"grad_norm": 1.1600432395935059,
"learning_rate": 9.435962481186003e-05,
"loss": 0.6173145771026611,
"step": 3136
},
{
"epoch": 1.3240506329113924,
"grad_norm": 1.279336929321289,
"learning_rate": 9.434853936256272e-05,
"loss": 0.6597106456756592,
"step": 3138
},
{
"epoch": 1.3248945147679325,
"grad_norm": 1.1787258386611938,
"learning_rate": 9.433744368290909e-05,
"loss": 0.6655287742614746,
"step": 3140
},
{
"epoch": 1.3257383966244727,
"grad_norm": 1.3658509254455566,
"learning_rate": 9.432633777545874e-05,
"loss": 0.6312944889068604,
"step": 3142
},
{
"epoch": 1.3265822784810126,
"grad_norm": 1.1220000982284546,
"learning_rate": 9.431522164277356e-05,
"loss": 0.6696156859397888,
"step": 3144
},
{
"epoch": 1.3274261603375528,
"grad_norm": 1.224761724472046,
"learning_rate": 9.430409528741783e-05,
"loss": 0.6586571335792542,
"step": 3146
},
{
"epoch": 1.328270042194093,
"grad_norm": 1.227510929107666,
"learning_rate": 9.429295871195821e-05,
"loss": 0.64905846118927,
"step": 3148
},
{
"epoch": 1.3291139240506329,
"grad_norm": 1.1359103918075562,
"learning_rate": 9.428181191896366e-05,
"loss": 0.6407933831214905,
"step": 3150
},
{
"epoch": 1.329957805907173,
"grad_norm": 1.2729473114013672,
"learning_rate": 9.427065491100556e-05,
"loss": 0.7004884481430054,
"step": 3152
},
{
"epoch": 1.3308016877637132,
"grad_norm": 1.1182841062545776,
"learning_rate": 9.42594876906576e-05,
"loss": 0.6835907101631165,
"step": 3154
},
{
"epoch": 1.3316455696202532,
"grad_norm": 1.2309781312942505,
"learning_rate": 9.424831026049585e-05,
"loss": 0.7476315498352051,
"step": 3156
},
{
"epoch": 1.3324894514767933,
"grad_norm": 1.0857728719711304,
"learning_rate": 9.423712262309873e-05,
"loss": 0.6811426281929016,
"step": 3158
},
{
"epoch": 1.3333333333333333,
"grad_norm": 1.299680233001709,
"learning_rate": 9.4225924781047e-05,
"loss": 0.6403942108154297,
"step": 3160
},
{
"epoch": 1.3341772151898734,
"grad_norm": 1.226472020149231,
"learning_rate": 9.421471673692382e-05,
"loss": 0.6758930683135986,
"step": 3162
},
{
"epoch": 1.3350210970464136,
"grad_norm": 1.1403205394744873,
"learning_rate": 9.420349849331463e-05,
"loss": 0.7119444608688354,
"step": 3164
},
{
"epoch": 1.3358649789029535,
"grad_norm": 1.2888442277908325,
"learning_rate": 9.419227005280729e-05,
"loss": 0.7411463260650635,
"step": 3166
},
{
"epoch": 1.3367088607594937,
"grad_norm": 1.1929190158843994,
"learning_rate": 9.418103141799197e-05,
"loss": 0.5992606282234192,
"step": 3168
},
{
"epoch": 1.3375527426160336,
"grad_norm": 1.2574355602264404,
"learning_rate": 9.416978259146122e-05,
"loss": 0.6728890538215637,
"step": 3170
},
{
"epoch": 1.3383966244725738,
"grad_norm": 0.9653727412223816,
"learning_rate": 9.415852357580992e-05,
"loss": 0.6294883489608765,
"step": 3172
},
{
"epoch": 1.339240506329114,
"grad_norm": 1.2107670307159424,
"learning_rate": 9.414725437363532e-05,
"loss": 0.6816665530204773,
"step": 3174
},
{
"epoch": 1.340084388185654,
"grad_norm": 1.024849534034729,
"learning_rate": 9.4135974987537e-05,
"loss": 0.6186381578445435,
"step": 3176
},
{
"epoch": 1.340928270042194,
"grad_norm": 1.1556614637374878,
"learning_rate": 9.41246854201169e-05,
"loss": 0.6071005463600159,
"step": 3178
},
{
"epoch": 1.3417721518987342,
"grad_norm": 1.2382808923721313,
"learning_rate": 9.41133856739793e-05,
"loss": 0.7871434092521667,
"step": 3180
},
{
"epoch": 1.3426160337552742,
"grad_norm": 1.0499578714370728,
"learning_rate": 9.410207575173082e-05,
"loss": 0.6578201651573181,
"step": 3182
},
{
"epoch": 1.3434599156118143,
"grad_norm": 1.2048250436782837,
"learning_rate": 9.409075565598049e-05,
"loss": 0.6271620392799377,
"step": 3184
},
{
"epoch": 1.3443037974683545,
"grad_norm": 1.0287591218948364,
"learning_rate": 9.407942538933958e-05,
"loss": 0.5773864388465881,
"step": 3186
},
{
"epoch": 1.3451476793248944,
"grad_norm": 1.1125097274780273,
"learning_rate": 9.406808495442181e-05,
"loss": 0.6745175719261169,
"step": 3188
},
{
"epoch": 1.3459915611814346,
"grad_norm": 1.036125898361206,
"learning_rate": 9.405673435384319e-05,
"loss": 0.6001214385032654,
"step": 3190
},
{
"epoch": 1.3468354430379748,
"grad_norm": 1.2771985530853271,
"learning_rate": 9.404537359022207e-05,
"loss": 0.6703945994377136,
"step": 3192
},
{
"epoch": 1.3476793248945147,
"grad_norm": 1.0891097784042358,
"learning_rate": 9.403400266617918e-05,
"loss": 0.6159096360206604,
"step": 3194
},
{
"epoch": 1.3485232067510549,
"grad_norm": 1.1926233768463135,
"learning_rate": 9.402262158433755e-05,
"loss": 0.6439315676689148,
"step": 3196
},
{
"epoch": 1.349367088607595,
"grad_norm": 1.272557020187378,
"learning_rate": 9.40112303473226e-05,
"loss": 0.7125352025032043,
"step": 3198
},
{
"epoch": 1.350210970464135,
"grad_norm": 1.052037239074707,
"learning_rate": 9.399982895776207e-05,
"loss": 0.594719648361206,
"step": 3200
},
{
"epoch": 1.350210970464135,
"eval_loss": 0.7200453281402588,
"eval_runtime": 846.2953,
"eval_samples_per_second": 2.49,
"eval_steps_per_second": 2.49,
"step": 3200
},
{
"epoch": 1.3510548523206751,
"grad_norm": 1.204728126525879,
"learning_rate": 9.398841741828601e-05,
"loss": 0.6390520334243774,
"step": 3202
},
{
"epoch": 1.3518987341772153,
"grad_norm": 1.0873899459838867,
"learning_rate": 9.397699573152689e-05,
"loss": 0.6010531187057495,
"step": 3204
},
{
"epoch": 1.3527426160337552,
"grad_norm": 1.3124359846115112,
"learning_rate": 9.396556390011944e-05,
"loss": 0.724280834197998,
"step": 3206
},
{
"epoch": 1.3535864978902954,
"grad_norm": 1.2179948091506958,
"learning_rate": 9.395412192670075e-05,
"loss": 0.6430405378341675,
"step": 3208
},
{
"epoch": 1.3544303797468356,
"grad_norm": 1.2617219686508179,
"learning_rate": 9.394266981391031e-05,
"loss": 0.7188641428947449,
"step": 3210
},
{
"epoch": 1.3552742616033755,
"grad_norm": 1.2151501178741455,
"learning_rate": 9.393120756438988e-05,
"loss": 0.6724364757537842,
"step": 3212
},
{
"epoch": 1.3561181434599157,
"grad_norm": 1.221528172492981,
"learning_rate": 9.391973518078357e-05,
"loss": 0.6340664625167847,
"step": 3214
},
{
"epoch": 1.3569620253164558,
"grad_norm": 1.3180092573165894,
"learning_rate": 9.390825266573786e-05,
"loss": 0.6914255023002625,
"step": 3216
},
{
"epoch": 1.3578059071729958,
"grad_norm": 1.103994369506836,
"learning_rate": 9.38967600219015e-05,
"loss": 0.6137136220932007,
"step": 3218
},
{
"epoch": 1.358649789029536,
"grad_norm": 1.33389413356781,
"learning_rate": 9.38852572519257e-05,
"loss": 0.7173700332641602,
"step": 3220
},
{
"epoch": 1.3594936708860759,
"grad_norm": 1.1074159145355225,
"learning_rate": 9.387374435846386e-05,
"loss": 0.5942243933677673,
"step": 3222
},
{
"epoch": 1.360337552742616,
"grad_norm": 1.1157063245773315,
"learning_rate": 9.386222134417182e-05,
"loss": 0.6362866163253784,
"step": 3224
},
{
"epoch": 1.3611814345991562,
"grad_norm": 1.1717792749404907,
"learning_rate": 9.38506882117077e-05,
"loss": 0.6784523129463196,
"step": 3226
},
{
"epoch": 1.3620253164556961,
"grad_norm": 1.0946043729782104,
"learning_rate": 9.383914496373197e-05,
"loss": 0.6647377014160156,
"step": 3228
},
{
"epoch": 1.3628691983122363,
"grad_norm": 1.1519699096679688,
"learning_rate": 9.382759160290746e-05,
"loss": 0.6302075982093811,
"step": 3230
},
{
"epoch": 1.3637130801687762,
"grad_norm": 0.9928684830665588,
"learning_rate": 9.381602813189929e-05,
"loss": 0.5979090332984924,
"step": 3232
},
{
"epoch": 1.3645569620253164,
"grad_norm": 1.2488124370574951,
"learning_rate": 9.380445455337492e-05,
"loss": 0.6949353218078613,
"step": 3234
},
{
"epoch": 1.3654008438818566,
"grad_norm": 1.3884797096252441,
"learning_rate": 9.379287087000416e-05,
"loss": 0.7225558161735535,
"step": 3236
},
{
"epoch": 1.3662447257383965,
"grad_norm": 1.2981176376342773,
"learning_rate": 9.378127708445917e-05,
"loss": 0.6993390917778015,
"step": 3238
},
{
"epoch": 1.3670886075949367,
"grad_norm": 0.9884640574455261,
"learning_rate": 9.376967319941438e-05,
"loss": 0.6983805894851685,
"step": 3240
},
{
"epoch": 1.3679324894514768,
"grad_norm": 1.2051894664764404,
"learning_rate": 9.375805921754659e-05,
"loss": 0.7062534689903259,
"step": 3242
},
{
"epoch": 1.3687763713080168,
"grad_norm": 1.1943434476852417,
"learning_rate": 9.374643514153494e-05,
"loss": 0.6405107378959656,
"step": 3244
},
{
"epoch": 1.369620253164557,
"grad_norm": 1.249214768409729,
"learning_rate": 9.373480097406086e-05,
"loss": 0.6844781637191772,
"step": 3246
},
{
"epoch": 1.370464135021097,
"grad_norm": 1.1847131252288818,
"learning_rate": 9.372315671780813e-05,
"loss": 0.6048306226730347,
"step": 3248
},
{
"epoch": 1.371308016877637,
"grad_norm": 1.125545859336853,
"learning_rate": 9.37115023754629e-05,
"loss": 0.6772685050964355,
"step": 3250
},
{
"epoch": 1.3721518987341772,
"grad_norm": 1.466615915298462,
"learning_rate": 9.369983794971354e-05,
"loss": 0.7536272406578064,
"step": 3252
},
{
"epoch": 1.3729957805907174,
"grad_norm": 1.066699504852295,
"learning_rate": 9.368816344325084e-05,
"loss": 0.6640655398368835,
"step": 3254
},
{
"epoch": 1.3738396624472573,
"grad_norm": 1.4793988466262817,
"learning_rate": 9.367647885876787e-05,
"loss": 0.7029458284378052,
"step": 3256
},
{
"epoch": 1.3746835443037975,
"grad_norm": 1.258540153503418,
"learning_rate": 9.366478419896006e-05,
"loss": 0.7231863737106323,
"step": 3258
},
{
"epoch": 1.3755274261603376,
"grad_norm": 1.176106333732605,
"learning_rate": 9.365307946652512e-05,
"loss": 0.6679144501686096,
"step": 3260
},
{
"epoch": 1.3763713080168776,
"grad_norm": 1.3301753997802734,
"learning_rate": 9.364136466416316e-05,
"loss": 0.6282188296318054,
"step": 3262
},
{
"epoch": 1.3772151898734177,
"grad_norm": 1.3616732358932495,
"learning_rate": 9.362963979457648e-05,
"loss": 0.6870840191841125,
"step": 3264
},
{
"epoch": 1.378059071729958,
"grad_norm": 1.1982418298721313,
"learning_rate": 9.361790486046985e-05,
"loss": 0.6823731660842896,
"step": 3266
},
{
"epoch": 1.3789029535864978,
"grad_norm": 1.1869033575057983,
"learning_rate": 9.360615986455024e-05,
"loss": 0.6582897305488586,
"step": 3268
},
{
"epoch": 1.379746835443038,
"grad_norm": 1.1192975044250488,
"learning_rate": 9.359440480952703e-05,
"loss": 0.716654360294342,
"step": 3270
},
{
"epoch": 1.3805907172995782,
"grad_norm": 1.2210016250610352,
"learning_rate": 9.358263969811189e-05,
"loss": 0.6880061626434326,
"step": 3272
},
{
"epoch": 1.381434599156118,
"grad_norm": 1.0358284711837769,
"learning_rate": 9.357086453301878e-05,
"loss": 0.666864812374115,
"step": 3274
},
{
"epoch": 1.3822784810126583,
"grad_norm": 1.2790803909301758,
"learning_rate": 9.355907931696401e-05,
"loss": 0.6872087121009827,
"step": 3276
},
{
"epoch": 1.3831223628691984,
"grad_norm": 1.182991623878479,
"learning_rate": 9.354728405266623e-05,
"loss": 0.5929665565490723,
"step": 3278
},
{
"epoch": 1.3839662447257384,
"grad_norm": 1.1071184873580933,
"learning_rate": 9.353547874284634e-05,
"loss": 0.5928181409835815,
"step": 3280
},
{
"epoch": 1.3848101265822785,
"grad_norm": 1.3139623403549194,
"learning_rate": 9.352366339022763e-05,
"loss": 0.6783652901649475,
"step": 3282
},
{
"epoch": 1.3856540084388187,
"grad_norm": 1.2534632682800293,
"learning_rate": 9.351183799753567e-05,
"loss": 0.7652941346168518,
"step": 3284
},
{
"epoch": 1.3864978902953586,
"grad_norm": 1.4487930536270142,
"learning_rate": 9.350000256749833e-05,
"loss": 0.7430433630943298,
"step": 3286
},
{
"epoch": 1.3873417721518988,
"grad_norm": 1.0786021947860718,
"learning_rate": 9.348815710284584e-05,
"loss": 0.5854598879814148,
"step": 3288
},
{
"epoch": 1.3881856540084387,
"grad_norm": 1.0544480085372925,
"learning_rate": 9.347630160631071e-05,
"loss": 0.6365222334861755,
"step": 3290
},
{
"epoch": 1.389029535864979,
"grad_norm": 0.9989988207817078,
"learning_rate": 9.346443608062778e-05,
"loss": 0.6485803127288818,
"step": 3292
},
{
"epoch": 1.389873417721519,
"grad_norm": 1.100951910018921,
"learning_rate": 9.345256052853419e-05,
"loss": 0.6417753100395203,
"step": 3294
},
{
"epoch": 1.390717299578059,
"grad_norm": 1.1398471593856812,
"learning_rate": 9.344067495276942e-05,
"loss": 0.6333693861961365,
"step": 3296
},
{
"epoch": 1.3915611814345992,
"grad_norm": 1.1745941638946533,
"learning_rate": 9.342877935607521e-05,
"loss": 0.677288293838501,
"step": 3298
},
{
"epoch": 1.3924050632911391,
"grad_norm": 1.2651115655899048,
"learning_rate": 9.34168737411957e-05,
"loss": 0.7408396005630493,
"step": 3300
},
{
"epoch": 1.3924050632911391,
"eval_loss": 0.7173135876655579,
"eval_runtime": 853.5344,
"eval_samples_per_second": 2.469,
"eval_steps_per_second": 2.469,
"step": 3300
},
{
"epoch": 1.3932489451476793,
"grad_norm": 1.0747730731964111,
"learning_rate": 9.340495811087723e-05,
"loss": 0.6810371279716492,
"step": 3302
},
{
"epoch": 1.3940928270042194,
"grad_norm": 1.2857651710510254,
"learning_rate": 9.339303246786854e-05,
"loss": 0.6693953275680542,
"step": 3304
},
{
"epoch": 1.3949367088607594,
"grad_norm": 1.4544212818145752,
"learning_rate": 9.338109681492063e-05,
"loss": 0.7019274234771729,
"step": 3306
},
{
"epoch": 1.3957805907172995,
"grad_norm": 1.687755823135376,
"learning_rate": 9.336915115478685e-05,
"loss": 0.6074224710464478,
"step": 3308
},
{
"epoch": 1.3966244725738397,
"grad_norm": 1.1645431518554688,
"learning_rate": 9.33571954902228e-05,
"loss": 0.6981383562088013,
"step": 3310
},
{
"epoch": 1.3974683544303796,
"grad_norm": 1.6173527240753174,
"learning_rate": 9.334522982398646e-05,
"loss": 0.7282926440238953,
"step": 3312
},
{
"epoch": 1.3983122362869198,
"grad_norm": 1.3132909536361694,
"learning_rate": 9.333325415883804e-05,
"loss": 0.6574883460998535,
"step": 3314
},
{
"epoch": 1.39915611814346,
"grad_norm": 1.1629762649536133,
"learning_rate": 9.332126849754014e-05,
"loss": 0.6559937596321106,
"step": 3316
},
{
"epoch": 1.4,
"grad_norm": 1.1666897535324097,
"learning_rate": 9.33092728428576e-05,
"loss": 0.683718740940094,
"step": 3318
},
{
"epoch": 1.40084388185654,
"grad_norm": 1.2269554138183594,
"learning_rate": 9.329726719755756e-05,
"loss": 0.6909779906272888,
"step": 3320
},
{
"epoch": 1.4016877637130802,
"grad_norm": 1.1010066270828247,
"learning_rate": 9.328525156440952e-05,
"loss": 0.6051948666572571,
"step": 3322
},
{
"epoch": 1.4025316455696202,
"grad_norm": 1.127143144607544,
"learning_rate": 9.327322594618528e-05,
"loss": 0.6266679763793945,
"step": 3324
},
{
"epoch": 1.4033755274261603,
"grad_norm": 1.2160708904266357,
"learning_rate": 9.326119034565887e-05,
"loss": 0.6587526202201843,
"step": 3326
},
{
"epoch": 1.4042194092827005,
"grad_norm": 1.0853947401046753,
"learning_rate": 9.32491447656067e-05,
"loss": 0.5916946530342102,
"step": 3328
},
{
"epoch": 1.4050632911392404,
"grad_norm": 1.2205027341842651,
"learning_rate": 9.323708920880744e-05,
"loss": 0.6032452583312988,
"step": 3330
},
{
"epoch": 1.4059071729957806,
"grad_norm": 1.1964668035507202,
"learning_rate": 9.32250236780421e-05,
"loss": 0.6649114489555359,
"step": 3332
},
{
"epoch": 1.4067510548523208,
"grad_norm": 1.2507994174957275,
"learning_rate": 9.321294817609394e-05,
"loss": 0.7142994403839111,
"step": 3334
},
{
"epoch": 1.4075949367088607,
"grad_norm": 1.1310259103775024,
"learning_rate": 9.320086270574854e-05,
"loss": 0.709568977355957,
"step": 3336
},
{
"epoch": 1.4084388185654009,
"grad_norm": 1.2454090118408203,
"learning_rate": 9.318876726979385e-05,
"loss": 0.7800853848457336,
"step": 3338
},
{
"epoch": 1.409282700421941,
"grad_norm": 1.1168389320373535,
"learning_rate": 9.317666187101996e-05,
"loss": 0.6187908053398132,
"step": 3340
},
{
"epoch": 1.410126582278481,
"grad_norm": 1.6696287393569946,
"learning_rate": 9.316454651221942e-05,
"loss": 0.6222613453865051,
"step": 3342
},
{
"epoch": 1.4109704641350211,
"grad_norm": 0.9500295519828796,
"learning_rate": 9.315242119618698e-05,
"loss": 0.6116594672203064,
"step": 3344
},
{
"epoch": 1.4118143459915613,
"grad_norm": 1.186358094215393,
"learning_rate": 9.314028592571973e-05,
"loss": 0.633224368095398,
"step": 3346
},
{
"epoch": 1.4126582278481012,
"grad_norm": 1.1855978965759277,
"learning_rate": 9.312814070361705e-05,
"loss": 0.6675921082496643,
"step": 3348
},
{
"epoch": 1.4135021097046414,
"grad_norm": 1.2465872764587402,
"learning_rate": 9.311598553268059e-05,
"loss": 0.7268879413604736,
"step": 3350
},
{
"epoch": 1.4143459915611816,
"grad_norm": 1.151274561882019,
"learning_rate": 9.310382041571435e-05,
"loss": 0.6147416830062866,
"step": 3352
},
{
"epoch": 1.4151898734177215,
"grad_norm": 1.1226807832717896,
"learning_rate": 9.309164535552453e-05,
"loss": 0.6678543090820312,
"step": 3354
},
{
"epoch": 1.4160337552742617,
"grad_norm": 1.375842571258545,
"learning_rate": 9.307946035491975e-05,
"loss": 0.6334129571914673,
"step": 3356
},
{
"epoch": 1.4168776371308016,
"grad_norm": 1.058353066444397,
"learning_rate": 9.306726541671081e-05,
"loss": 0.6582583785057068,
"step": 3358
},
{
"epoch": 1.4177215189873418,
"grad_norm": 1.0511330366134644,
"learning_rate": 9.305506054371084e-05,
"loss": 0.5877419114112854,
"step": 3360
},
{
"epoch": 1.4185654008438817,
"grad_norm": 1.2246462106704712,
"learning_rate": 9.304284573873532e-05,
"loss": 0.711665689945221,
"step": 3362
},
{
"epoch": 1.4194092827004219,
"grad_norm": 1.0242294073104858,
"learning_rate": 9.303062100460193e-05,
"loss": 0.6743642687797546,
"step": 3364
},
{
"epoch": 1.420253164556962,
"grad_norm": 1.1432100534439087,
"learning_rate": 9.301838634413069e-05,
"loss": 0.6825576424598694,
"step": 3366
},
{
"epoch": 1.421097046413502,
"grad_norm": 1.0128604173660278,
"learning_rate": 9.30061417601439e-05,
"loss": 0.624455988407135,
"step": 3368
},
{
"epoch": 1.4219409282700421,
"grad_norm": 1.2738330364227295,
"learning_rate": 9.299388725546617e-05,
"loss": 0.7029586434364319,
"step": 3370
},
{
"epoch": 1.4227848101265823,
"grad_norm": 1.0857324600219727,
"learning_rate": 9.298162283292435e-05,
"loss": 0.5994319915771484,
"step": 3372
},
{
"epoch": 1.4236286919831223,
"grad_norm": 1.0811917781829834,
"learning_rate": 9.296934849534763e-05,
"loss": 0.6537772417068481,
"step": 3374
},
{
"epoch": 1.4244725738396624,
"grad_norm": 1.006913185119629,
"learning_rate": 9.295706424556745e-05,
"loss": 0.5775008201599121,
"step": 3376
},
{
"epoch": 1.4253164556962026,
"grad_norm": 1.2306486368179321,
"learning_rate": 9.294477008641755e-05,
"loss": 0.7445536255836487,
"step": 3378
},
{
"epoch": 1.4261603375527425,
"grad_norm": 1.223608374595642,
"learning_rate": 9.293246602073398e-05,
"loss": 0.6081538796424866,
"step": 3380
},
{
"epoch": 1.4270042194092827,
"grad_norm": 1.0933321714401245,
"learning_rate": 9.2920152051355e-05,
"loss": 0.6134634613990784,
"step": 3382
},
{
"epoch": 1.4278481012658228,
"grad_norm": 1.1738401651382446,
"learning_rate": 9.290782818112127e-05,
"loss": 0.5961087346076965,
"step": 3384
},
{
"epoch": 1.4286919831223628,
"grad_norm": 1.1493438482284546,
"learning_rate": 9.289549441287561e-05,
"loss": 0.6284122467041016,
"step": 3386
},
{
"epoch": 1.429535864978903,
"grad_norm": 1.1907998323440552,
"learning_rate": 9.288315074946324e-05,
"loss": 0.6654639840126038,
"step": 3388
},
{
"epoch": 1.4303797468354431,
"grad_norm": 1.3423025608062744,
"learning_rate": 9.287079719373157e-05,
"loss": 0.652850329875946,
"step": 3390
},
{
"epoch": 1.431223628691983,
"grad_norm": 1.3932039737701416,
"learning_rate": 9.285843374853034e-05,
"loss": 0.703445315361023,
"step": 3392
},
{
"epoch": 1.4320675105485232,
"grad_norm": 5.349400043487549,
"learning_rate": 9.284606041671155e-05,
"loss": 0.693265438079834,
"step": 3394
},
{
"epoch": 1.4329113924050634,
"grad_norm": 1.0921961069107056,
"learning_rate": 9.28336772011295e-05,
"loss": 0.6578536033630371,
"step": 3396
},
{
"epoch": 1.4337552742616033,
"grad_norm": 1.184157133102417,
"learning_rate": 9.282128410464074e-05,
"loss": 0.7092277407646179,
"step": 3398
},
{
"epoch": 1.4345991561181435,
"grad_norm": 1.0923491716384888,
"learning_rate": 9.280888113010415e-05,
"loss": 0.6866328120231628,
"step": 3400
},
{
"epoch": 1.4345991561181435,
"eval_loss": 0.715917706489563,
"eval_runtime": 868.51,
"eval_samples_per_second": 2.426,
"eval_steps_per_second": 2.426,
"step": 3400
},
{
"epoch": 1.4354430379746836,
"grad_norm": 1.2515597343444824,
"learning_rate": 9.279646828038083e-05,
"loss": 0.6617444157600403,
"step": 3402
},
{
"epoch": 1.4362869198312236,
"grad_norm": 1.2122540473937988,
"learning_rate": 9.278404555833422e-05,
"loss": 0.6373176574707031,
"step": 3404
},
{
"epoch": 1.4371308016877637,
"grad_norm": 1.191904902458191,
"learning_rate": 9.277161296682997e-05,
"loss": 0.6506488919258118,
"step": 3406
},
{
"epoch": 1.437974683544304,
"grad_norm": 1.2492214441299438,
"learning_rate": 9.275917050873606e-05,
"loss": 0.7172291874885559,
"step": 3408
},
{
"epoch": 1.4388185654008439,
"grad_norm": 1.0518640279769897,
"learning_rate": 9.274671818692272e-05,
"loss": 0.6180248260498047,
"step": 3410
},
{
"epoch": 1.439662447257384,
"grad_norm": 1.150563359260559,
"learning_rate": 9.273425600426245e-05,
"loss": 0.6828892827033997,
"step": 3412
},
{
"epoch": 1.4405063291139242,
"grad_norm": 1.76945960521698,
"learning_rate": 9.272178396363005e-05,
"loss": 0.6585919857025146,
"step": 3414
},
{
"epoch": 1.4413502109704641,
"grad_norm": 1.2367758750915527,
"learning_rate": 9.270930206790257e-05,
"loss": 0.7548692226409912,
"step": 3416
},
{
"epoch": 1.4421940928270043,
"grad_norm": 1.2292778491973877,
"learning_rate": 9.269681031995936e-05,
"loss": 0.7017102837562561,
"step": 3418
},
{
"epoch": 1.4430379746835442,
"grad_norm": 1.2193396091461182,
"learning_rate": 9.268430872268202e-05,
"loss": 0.6657648682594299,
"step": 3420
},
{
"epoch": 1.4438818565400844,
"grad_norm": 1.0505954027175903,
"learning_rate": 9.267179727895443e-05,
"loss": 0.6950910091400146,
"step": 3422
},
{
"epoch": 1.4447257383966245,
"grad_norm": 1.1560698747634888,
"learning_rate": 9.265927599166272e-05,
"loss": 0.689308226108551,
"step": 3424
},
{
"epoch": 1.4455696202531645,
"grad_norm": 1.189336895942688,
"learning_rate": 9.264674486369533e-05,
"loss": 0.6481659412384033,
"step": 3426
},
{
"epoch": 1.4464135021097047,
"grad_norm": 1.3527976274490356,
"learning_rate": 9.263420389794294e-05,
"loss": 0.6626612544059753,
"step": 3428
},
{
"epoch": 1.4472573839662446,
"grad_norm": 1.096303105354309,
"learning_rate": 9.262165309729854e-05,
"loss": 0.690841794013977,
"step": 3430
},
{
"epoch": 1.4481012658227848,
"grad_norm": 1.2131421566009521,
"learning_rate": 9.260909246465732e-05,
"loss": 0.6497649550437927,
"step": 3432
},
{
"epoch": 1.448945147679325,
"grad_norm": 1.1831032037734985,
"learning_rate": 9.259652200291678e-05,
"loss": 0.6236130595207214,
"step": 3434
},
{
"epoch": 1.4497890295358649,
"grad_norm": 0.9745979309082031,
"learning_rate": 9.25839417149767e-05,
"loss": 0.5223423838615417,
"step": 3436
},
{
"epoch": 1.450632911392405,
"grad_norm": 1.372460126876831,
"learning_rate": 9.257135160373912e-05,
"loss": 0.6642022728919983,
"step": 3438
},
{
"epoch": 1.4514767932489452,
"grad_norm": 1.421044111251831,
"learning_rate": 9.255875167210832e-05,
"loss": 0.5426992774009705,
"step": 3440
},
{
"epoch": 1.4523206751054851,
"grad_norm": 1.1694250106811523,
"learning_rate": 9.254614192299086e-05,
"loss": 0.6260567307472229,
"step": 3442
},
{
"epoch": 1.4531645569620253,
"grad_norm": 1.0892298221588135,
"learning_rate": 9.253352235929558e-05,
"loss": 0.5776100158691406,
"step": 3444
},
{
"epoch": 1.4540084388185655,
"grad_norm": 1.1841259002685547,
"learning_rate": 9.252089298393356e-05,
"loss": 0.6495202779769897,
"step": 3446
},
{
"epoch": 1.4548523206751054,
"grad_norm": 1.1133549213409424,
"learning_rate": 9.250825379981815e-05,
"loss": 0.6570594906806946,
"step": 3448
},
{
"epoch": 1.4556962025316456,
"grad_norm": 1.197100281715393,
"learning_rate": 9.249560480986498e-05,
"loss": 0.6496587991714478,
"step": 3450
},
{
"epoch": 1.4565400843881857,
"grad_norm": 1.1661107540130615,
"learning_rate": 9.248294601699193e-05,
"loss": 0.6644704341888428,
"step": 3452
},
{
"epoch": 1.4573839662447257,
"grad_norm": 1.2257879972457886,
"learning_rate": 9.247027742411912e-05,
"loss": 0.6451231241226196,
"step": 3454
},
{
"epoch": 1.4582278481012658,
"grad_norm": 1.3634982109069824,
"learning_rate": 9.245759903416897e-05,
"loss": 0.6108601093292236,
"step": 3456
},
{
"epoch": 1.459071729957806,
"grad_norm": 1.1802605390548706,
"learning_rate": 9.244491085006615e-05,
"loss": 0.6080004572868347,
"step": 3458
},
{
"epoch": 1.459915611814346,
"grad_norm": 1.280831217765808,
"learning_rate": 9.243221287473756e-05,
"loss": 0.6406423449516296,
"step": 3460
},
{
"epoch": 1.460759493670886,
"grad_norm": 1.3127192258834839,
"learning_rate": 9.241950511111237e-05,
"loss": 0.7320113778114319,
"step": 3462
},
{
"epoch": 1.4616033755274263,
"grad_norm": 1.1711835861206055,
"learning_rate": 9.240678756212204e-05,
"loss": 0.572110652923584,
"step": 3464
},
{
"epoch": 1.4624472573839662,
"grad_norm": 1.347143292427063,
"learning_rate": 9.239406023070028e-05,
"loss": 0.7446795105934143,
"step": 3466
},
{
"epoch": 1.4632911392405064,
"grad_norm": 1.4953652620315552,
"learning_rate": 9.238132311978299e-05,
"loss": 0.6709978580474854,
"step": 3468
},
{
"epoch": 1.4641350210970465,
"grad_norm": 1.2199387550354004,
"learning_rate": 9.236857623230842e-05,
"loss": 0.6691445112228394,
"step": 3470
},
{
"epoch": 1.4649789029535865,
"grad_norm": 1.0959199666976929,
"learning_rate": 9.235581957121702e-05,
"loss": 0.6964292526245117,
"step": 3472
},
{
"epoch": 1.4658227848101266,
"grad_norm": 1.455505609512329,
"learning_rate": 9.234305313945149e-05,
"loss": 0.6880454421043396,
"step": 3474
},
{
"epoch": 1.4666666666666668,
"grad_norm": 1.2820862531661987,
"learning_rate": 9.233027693995681e-05,
"loss": 0.6737138032913208,
"step": 3476
},
{
"epoch": 1.4675105485232067,
"grad_norm": 1.3459213972091675,
"learning_rate": 9.231749097568023e-05,
"loss": 0.6874006390571594,
"step": 3478
},
{
"epoch": 1.4683544303797469,
"grad_norm": 1.2815442085266113,
"learning_rate": 9.230469524957119e-05,
"loss": 0.7179469466209412,
"step": 3480
},
{
"epoch": 1.469198312236287,
"grad_norm": 1.6181597709655762,
"learning_rate": 9.229188976458145e-05,
"loss": 0.7525522112846375,
"step": 3482
},
{
"epoch": 1.470042194092827,
"grad_norm": 1.0633227825164795,
"learning_rate": 9.227907452366495e-05,
"loss": 0.5918128490447998,
"step": 3484
},
{
"epoch": 1.4708860759493672,
"grad_norm": 1.2055985927581787,
"learning_rate": 9.226624952977796e-05,
"loss": 0.6686186194419861,
"step": 3486
},
{
"epoch": 1.471729957805907,
"grad_norm": 1.2495088577270508,
"learning_rate": 9.225341478587893e-05,
"loss": 0.764410674571991,
"step": 3488
},
{
"epoch": 1.4725738396624473,
"grad_norm": 1.174229383468628,
"learning_rate": 9.22405702949286e-05,
"loss": 0.7066780924797058,
"step": 3490
},
{
"epoch": 1.4734177215189874,
"grad_norm": 1.0970302820205688,
"learning_rate": 9.222771605988995e-05,
"loss": 0.6740228533744812,
"step": 3492
},
{
"epoch": 1.4742616033755274,
"grad_norm": 1.2470436096191406,
"learning_rate": 9.221485208372822e-05,
"loss": 0.698371410369873,
"step": 3494
},
{
"epoch": 1.4751054852320675,
"grad_norm": 1.0750112533569336,
"learning_rate": 9.220197836941084e-05,
"loss": 0.6354188919067383,
"step": 3496
},
{
"epoch": 1.4759493670886075,
"grad_norm": 1.2656232118606567,
"learning_rate": 9.218909491990757e-05,
"loss": 0.7268608212471008,
"step": 3498
},
{
"epoch": 1.4767932489451476,
"grad_norm": 1.2389028072357178,
"learning_rate": 9.217620173819037e-05,
"loss": 0.6652966141700745,
"step": 3500
},
{
"epoch": 1.4767932489451476,
"eval_loss": 0.7155047059059143,
"eval_runtime": 855.8428,
"eval_samples_per_second": 2.462,
"eval_steps_per_second": 2.462,
"step": 3500
},
{
"epoch": 1.4776371308016878,
"grad_norm": 1.218304991722107,
"learning_rate": 9.216329882723343e-05,
"loss": 0.6845020651817322,
"step": 3502
},
{
"epoch": 1.4784810126582277,
"grad_norm": 1.123903512954712,
"learning_rate": 9.21503861900132e-05,
"loss": 0.6972519755363464,
"step": 3504
},
{
"epoch": 1.479324894514768,
"grad_norm": 1.1827739477157593,
"learning_rate": 9.213746382950839e-05,
"loss": 0.6699702739715576,
"step": 3506
},
{
"epoch": 1.480168776371308,
"grad_norm": 0.9934872984886169,
"learning_rate": 9.212453174869995e-05,
"loss": 0.5623225569725037,
"step": 3508
},
{
"epoch": 1.481012658227848,
"grad_norm": 1.221093773841858,
"learning_rate": 9.211158995057105e-05,
"loss": 0.6527173519134521,
"step": 3510
},
{
"epoch": 1.4818565400843882,
"grad_norm": 1.4569166898727417,
"learning_rate": 9.209863843810711e-05,
"loss": 0.7015712261199951,
"step": 3512
},
{
"epoch": 1.4827004219409283,
"grad_norm": 1.0764813423156738,
"learning_rate": 9.208567721429581e-05,
"loss": 0.6442505717277527,
"step": 3514
},
{
"epoch": 1.4835443037974683,
"grad_norm": 2.1307506561279297,
"learning_rate": 9.207270628212704e-05,
"loss": 0.666451096534729,
"step": 3516
},
{
"epoch": 1.4843881856540084,
"grad_norm": 1.180590271949768,
"learning_rate": 9.205972564459296e-05,
"loss": 0.6354807019233704,
"step": 3518
},
{
"epoch": 1.4852320675105486,
"grad_norm": 1.2999447584152222,
"learning_rate": 9.204673530468795e-05,
"loss": 0.6080324053764343,
"step": 3520
},
{
"epoch": 1.4860759493670885,
"grad_norm": 1.1680655479431152,
"learning_rate": 9.203373526540862e-05,
"loss": 0.6411244869232178,
"step": 3522
},
{
"epoch": 1.4869198312236287,
"grad_norm": 1.0565013885498047,
"learning_rate": 9.202072552975383e-05,
"loss": 0.6498287916183472,
"step": 3524
},
{
"epoch": 1.4877637130801689,
"grad_norm": 1.246267318725586,
"learning_rate": 9.20077061007247e-05,
"loss": 0.633613109588623,
"step": 3526
},
{
"epoch": 1.4886075949367088,
"grad_norm": 1.0626300573349,
"learning_rate": 9.199467698132453e-05,
"loss": 0.6102107167243958,
"step": 3528
},
{
"epoch": 1.489451476793249,
"grad_norm": 1.256600260734558,
"learning_rate": 9.198163817455892e-05,
"loss": 0.669352114200592,
"step": 3530
},
{
"epoch": 1.4902953586497891,
"grad_norm": 1.143188238143921,
"learning_rate": 9.196858968343565e-05,
"loss": 0.6305804252624512,
"step": 3532
},
{
"epoch": 1.491139240506329,
"grad_norm": 1.1471205949783325,
"learning_rate": 9.195553151096475e-05,
"loss": 0.6256994605064392,
"step": 3534
},
{
"epoch": 1.4919831223628692,
"grad_norm": 1.1771589517593384,
"learning_rate": 9.194246366015851e-05,
"loss": 0.6395107507705688,
"step": 3536
},
{
"epoch": 1.4928270042194094,
"grad_norm": 1.1997097730636597,
"learning_rate": 9.192938613403144e-05,
"loss": 0.6875160932540894,
"step": 3538
},
{
"epoch": 1.4936708860759493,
"grad_norm": 1.3962169885635376,
"learning_rate": 9.191629893560024e-05,
"loss": 0.7216510772705078,
"step": 3540
},
{
"epoch": 1.4945147679324895,
"grad_norm": 1.1835654973983765,
"learning_rate": 9.19032020678839e-05,
"loss": 0.6870693564414978,
"step": 3542
},
{
"epoch": 1.4953586497890297,
"grad_norm": 1.112331509590149,
"learning_rate": 9.18900955339036e-05,
"loss": 0.6266092658042908,
"step": 3544
},
{
"epoch": 1.4962025316455696,
"grad_norm": 1.0298354625701904,
"learning_rate": 9.187697933668278e-05,
"loss": 0.5906343460083008,
"step": 3546
},
{
"epoch": 1.4970464135021098,
"grad_norm": 1.2650012969970703,
"learning_rate": 9.186385347924709e-05,
"loss": 0.6203610897064209,
"step": 3548
},
{
"epoch": 1.49789029535865,
"grad_norm": 1.1208417415618896,
"learning_rate": 9.185071796462441e-05,
"loss": 0.6841281652450562,
"step": 3550
},
{
"epoch": 1.4987341772151899,
"grad_norm": 1.1319488286972046,
"learning_rate": 9.183757279584486e-05,
"loss": 0.7089514136314392,
"step": 3552
},
{
"epoch": 1.49957805907173,
"grad_norm": 1.1104235649108887,
"learning_rate": 9.182441797594076e-05,
"loss": 0.6663861870765686,
"step": 3554
},
{
"epoch": 1.5004219409282702,
"grad_norm": 1.161412000656128,
"learning_rate": 9.18112535079467e-05,
"loss": 0.6713237762451172,
"step": 3556
},
{
"epoch": 1.5012658227848101,
"grad_norm": 1.2925246953964233,
"learning_rate": 9.179807939489945e-05,
"loss": 0.6665274500846863,
"step": 3558
},
{
"epoch": 1.50210970464135,
"grad_norm": 1.0968270301818848,
"learning_rate": 9.178489563983802e-05,
"loss": 0.6881593465805054,
"step": 3560
},
{
"epoch": 1.5029535864978905,
"grad_norm": 1.111439824104309,
"learning_rate": 9.177170224580368e-05,
"loss": 0.631568431854248,
"step": 3562
},
{
"epoch": 1.5037974683544304,
"grad_norm": 1.6731075048446655,
"learning_rate": 9.175849921583986e-05,
"loss": 0.6896167397499084,
"step": 3564
},
{
"epoch": 1.5046413502109703,
"grad_norm": 1.226739525794983,
"learning_rate": 9.174528655299226e-05,
"loss": 0.6285277605056763,
"step": 3566
},
{
"epoch": 1.5054852320675105,
"grad_norm": 1.2030941247940063,
"learning_rate": 9.17320642603088e-05,
"loss": 0.6256678700447083,
"step": 3568
},
{
"epoch": 1.5063291139240507,
"grad_norm": 1.1980781555175781,
"learning_rate": 9.171883234083958e-05,
"loss": 0.6895992159843445,
"step": 3570
},
{
"epoch": 1.5071729957805906,
"grad_norm": 1.2083429098129272,
"learning_rate": 9.170559079763696e-05,
"loss": 0.6642275452613831,
"step": 3572
},
{
"epoch": 1.5080168776371308,
"grad_norm": 1.134020209312439,
"learning_rate": 9.169233963375552e-05,
"loss": 0.7441924214363098,
"step": 3574
},
{
"epoch": 1.508860759493671,
"grad_norm": 1.8178621530532837,
"learning_rate": 9.167907885225204e-05,
"loss": 0.6435995101928711,
"step": 3576
},
{
"epoch": 1.5097046413502109,
"grad_norm": 1.3850326538085938,
"learning_rate": 9.166580845618553e-05,
"loss": 0.6933603882789612,
"step": 3578
},
{
"epoch": 1.510548523206751,
"grad_norm": 1.2500641345977783,
"learning_rate": 9.165252844861723e-05,
"loss": 0.6686714887619019,
"step": 3580
},
{
"epoch": 1.5113924050632912,
"grad_norm": 1.0226643085479736,
"learning_rate": 9.163923883261056e-05,
"loss": 0.607890248298645,
"step": 3582
},
{
"epoch": 1.5122362869198311,
"grad_norm": 1.233402132987976,
"learning_rate": 9.162593961123118e-05,
"loss": 0.6604583859443665,
"step": 3584
},
{
"epoch": 1.5130801687763713,
"grad_norm": 1.2609056234359741,
"learning_rate": 9.161263078754698e-05,
"loss": 0.6756428480148315,
"step": 3586
},
{
"epoch": 1.5139240506329115,
"grad_norm": 1.22673761844635,
"learning_rate": 9.159931236462805e-05,
"loss": 0.6990940570831299,
"step": 3588
},
{
"epoch": 1.5147679324894514,
"grad_norm": 1.1386182308197021,
"learning_rate": 9.158598434554668e-05,
"loss": 0.6436648964881897,
"step": 3590
},
{
"epoch": 1.5156118143459916,
"grad_norm": 1.1136831045150757,
"learning_rate": 9.157264673337739e-05,
"loss": 0.6420145034790039,
"step": 3592
},
{
"epoch": 1.5164556962025317,
"grad_norm": 1.1957908868789673,
"learning_rate": 9.155929953119693e-05,
"loss": 0.6518592834472656,
"step": 3594
},
{
"epoch": 1.5172995780590717,
"grad_norm": 1.1049647331237793,
"learning_rate": 9.154594274208422e-05,
"loss": 0.6891129612922668,
"step": 3596
},
{
"epoch": 1.5181434599156118,
"grad_norm": 1.243675947189331,
"learning_rate": 9.153257636912043e-05,
"loss": 0.6945107579231262,
"step": 3598
},
{
"epoch": 1.518987341772152,
"grad_norm": 1.2633713483810425,
"learning_rate": 9.15192004153889e-05,
"loss": 0.7011660933494568,
"step": 3600
},
{
"epoch": 1.518987341772152,
"eval_loss": 0.7118256688117981,
"eval_runtime": 851.3079,
"eval_samples_per_second": 2.475,
"eval_steps_per_second": 2.475,
"step": 3600
},
{
"epoch": 1.519831223628692,
"grad_norm": 1.2995525598526,
"learning_rate": 9.150581488397525e-05,
"loss": 0.6843758821487427,
"step": 3602
},
{
"epoch": 1.520675105485232,
"grad_norm": 1.3140910863876343,
"learning_rate": 9.149241977796723e-05,
"loss": 0.6699353456497192,
"step": 3604
},
{
"epoch": 1.5215189873417723,
"grad_norm": 1.2674909830093384,
"learning_rate": 9.147901510045485e-05,
"loss": 0.7269271612167358,
"step": 3606
},
{
"epoch": 1.5223628691983122,
"grad_norm": 1.0232038497924805,
"learning_rate": 9.146560085453031e-05,
"loss": 0.5556837916374207,
"step": 3608
},
{
"epoch": 1.5232067510548524,
"grad_norm": 1.2598992586135864,
"learning_rate": 9.1452177043288e-05,
"loss": 0.7273092269897461,
"step": 3610
},
{
"epoch": 1.5240506329113925,
"grad_norm": 1.2002917528152466,
"learning_rate": 9.143874366982455e-05,
"loss": 0.6897470355033875,
"step": 3612
},
{
"epoch": 1.5248945147679325,
"grad_norm": 1.0959099531173706,
"learning_rate": 9.142530073723878e-05,
"loss": 0.6060715913772583,
"step": 3614
},
{
"epoch": 1.5257383966244724,
"grad_norm": 1.9890750646591187,
"learning_rate": 9.141184824863173e-05,
"loss": 0.6585046052932739,
"step": 3616
},
{
"epoch": 1.5265822784810128,
"grad_norm": 1.1460137367248535,
"learning_rate": 9.139838620710663e-05,
"loss": 0.6022046804428101,
"step": 3618
},
{
"epoch": 1.5274261603375527,
"grad_norm": 1.193206548690796,
"learning_rate": 9.138491461576888e-05,
"loss": 0.6332581639289856,
"step": 3620
},
{
"epoch": 1.5282700421940927,
"grad_norm": 1.2813689708709717,
"learning_rate": 9.137143347772614e-05,
"loss": 0.6690208315849304,
"step": 3622
},
{
"epoch": 1.529113924050633,
"grad_norm": 1.0950052738189697,
"learning_rate": 9.135794279608827e-05,
"loss": 0.6034293174743652,
"step": 3624
},
{
"epoch": 1.529957805907173,
"grad_norm": 1.208884358406067,
"learning_rate": 9.134444257396729e-05,
"loss": 0.7077960968017578,
"step": 3626
},
{
"epoch": 1.530801687763713,
"grad_norm": 1.093759298324585,
"learning_rate": 9.133093281447742e-05,
"loss": 0.6741147637367249,
"step": 3628
},
{
"epoch": 1.5316455696202531,
"grad_norm": 1.1280012130737305,
"learning_rate": 9.131741352073514e-05,
"loss": 0.6816818118095398,
"step": 3630
},
{
"epoch": 1.5324894514767933,
"grad_norm": 1.2868385314941406,
"learning_rate": 9.130388469585907e-05,
"loss": 0.7149180769920349,
"step": 3632
},
{
"epoch": 1.5333333333333332,
"grad_norm": 0.9654553532600403,
"learning_rate": 9.129034634297007e-05,
"loss": 0.613467812538147,
"step": 3634
},
{
"epoch": 1.5341772151898734,
"grad_norm": 1.8958736658096313,
"learning_rate": 9.127679846519115e-05,
"loss": 0.7034116387367249,
"step": 3636
},
{
"epoch": 1.5350210970464135,
"grad_norm": 1.305284857749939,
"learning_rate": 9.126324106564757e-05,
"loss": 0.7076106667518616,
"step": 3638
},
{
"epoch": 1.5358649789029535,
"grad_norm": 1.1843762397766113,
"learning_rate": 9.124967414746675e-05,
"loss": 0.6671180725097656,
"step": 3640
},
{
"epoch": 1.5367088607594936,
"grad_norm": 1.0460047721862793,
"learning_rate": 9.123609771377832e-05,
"loss": 0.667533814907074,
"step": 3642
},
{
"epoch": 1.5375527426160338,
"grad_norm": 1.0441135168075562,
"learning_rate": 9.122251176771409e-05,
"loss": 0.6454499959945679,
"step": 3644
},
{
"epoch": 1.5383966244725737,
"grad_norm": 1.5647634267807007,
"learning_rate": 9.120891631240811e-05,
"loss": 0.677007794380188,
"step": 3646
},
{
"epoch": 1.539240506329114,
"grad_norm": 1.0650273561477661,
"learning_rate": 9.119531135099655e-05,
"loss": 0.7017449736595154,
"step": 3648
},
{
"epoch": 1.540084388185654,
"grad_norm": 1.2904767990112305,
"learning_rate": 9.118169688661784e-05,
"loss": 0.683830738067627,
"step": 3650
},
{
"epoch": 1.540928270042194,
"grad_norm": 1.1278672218322754,
"learning_rate": 9.116807292241257e-05,
"loss": 0.5923286080360413,
"step": 3652
},
{
"epoch": 1.5417721518987342,
"grad_norm": 1.1107184886932373,
"learning_rate": 9.115443946152352e-05,
"loss": 0.6595140099525452,
"step": 3654
},
{
"epoch": 1.5426160337552743,
"grad_norm": 1.0917898416519165,
"learning_rate": 9.114079650709566e-05,
"loss": 0.655241072177887,
"step": 3656
},
{
"epoch": 1.5434599156118143,
"grad_norm": 1.1922433376312256,
"learning_rate": 9.11271440622762e-05,
"loss": 0.5987096428871155,
"step": 3658
},
{
"epoch": 1.5443037974683544,
"grad_norm": 0.9974617958068848,
"learning_rate": 9.111348213021445e-05,
"loss": 0.5710145235061646,
"step": 3660
},
{
"epoch": 1.5451476793248946,
"grad_norm": 1.133683443069458,
"learning_rate": 9.109981071406197e-05,
"loss": 0.6067734360694885,
"step": 3662
},
{
"epoch": 1.5459915611814345,
"grad_norm": 1.1958736181259155,
"learning_rate": 9.108612981697248e-05,
"loss": 0.622981071472168,
"step": 3664
},
{
"epoch": 1.5468354430379747,
"grad_norm": 1.234328031539917,
"learning_rate": 9.107243944210194e-05,
"loss": 0.6520710587501526,
"step": 3666
},
{
"epoch": 1.5476793248945149,
"grad_norm": 1.0374714136123657,
"learning_rate": 9.105873959260842e-05,
"loss": 0.5993341207504272,
"step": 3668
},
{
"epoch": 1.5485232067510548,
"grad_norm": 0.9987428784370422,
"learning_rate": 9.104503027165223e-05,
"loss": 0.6564813852310181,
"step": 3670
},
{
"epoch": 1.549367088607595,
"grad_norm": 1.0823339223861694,
"learning_rate": 9.103131148239584e-05,
"loss": 0.61710524559021,
"step": 3672
},
{
"epoch": 1.5502109704641351,
"grad_norm": 1.3481065034866333,
"learning_rate": 9.101758322800391e-05,
"loss": 0.687752366065979,
"step": 3674
},
{
"epoch": 1.551054852320675,
"grad_norm": 1.2243965864181519,
"learning_rate": 9.10038455116433e-05,
"loss": 0.5981095433235168,
"step": 3676
},
{
"epoch": 1.5518987341772152,
"grad_norm": 1.1384631395339966,
"learning_rate": 9.0990098336483e-05,
"loss": 0.7181004285812378,
"step": 3678
},
{
"epoch": 1.5527426160337554,
"grad_norm": 1.042925477027893,
"learning_rate": 9.097634170569426e-05,
"loss": 0.6137188076972961,
"step": 3680
},
{
"epoch": 1.5535864978902953,
"grad_norm": 1.372023105621338,
"learning_rate": 9.096257562245045e-05,
"loss": 0.6761168241500854,
"step": 3682
},
{
"epoch": 1.5544303797468353,
"grad_norm": 1.0574673414230347,
"learning_rate": 9.094880008992714e-05,
"loss": 0.614276647567749,
"step": 3684
},
{
"epoch": 1.5552742616033757,
"grad_norm": 1.2894645929336548,
"learning_rate": 9.093501511130208e-05,
"loss": 0.668122410774231,
"step": 3686
},
{
"epoch": 1.5561181434599156,
"grad_norm": 1.2241230010986328,
"learning_rate": 9.092122068975523e-05,
"loss": 0.6305631399154663,
"step": 3688
},
{
"epoch": 1.5569620253164556,
"grad_norm": 1.1316208839416504,
"learning_rate": 9.090741682846866e-05,
"loss": 0.633276641368866,
"step": 3690
},
{
"epoch": 1.557805907172996,
"grad_norm": 1.2857953310012817,
"learning_rate": 9.089360353062666e-05,
"loss": 0.6657599806785583,
"step": 3692
},
{
"epoch": 1.5586497890295359,
"grad_norm": 1.2325671911239624,
"learning_rate": 9.087978079941573e-05,
"loss": 0.6379332542419434,
"step": 3694
},
{
"epoch": 1.5594936708860758,
"grad_norm": 1.3286080360412598,
"learning_rate": 9.086594863802445e-05,
"loss": 0.6841909885406494,
"step": 3696
},
{
"epoch": 1.560337552742616,
"grad_norm": 1.261890172958374,
"learning_rate": 9.085210704964368e-05,
"loss": 0.6735964417457581,
"step": 3698
},
{
"epoch": 1.5611814345991561,
"grad_norm": 1.0922305583953857,
"learning_rate": 9.083825603746639e-05,
"loss": 0.6602351665496826,
"step": 3700
},
{
"epoch": 1.5611814345991561,
"eval_loss": 0.7099412679672241,
"eval_runtime": 857.2273,
"eval_samples_per_second": 2.458,
"eval_steps_per_second": 2.458,
"step": 3700
},
{
"epoch": 1.562025316455696,
"grad_norm": 1.1113468408584595,
"learning_rate": 9.082439560468774e-05,
"loss": 0.6590834259986877,
"step": 3702
},
{
"epoch": 1.5628691983122363,
"grad_norm": 1.1476659774780273,
"learning_rate": 9.081052575450508e-05,
"loss": 0.6397460103034973,
"step": 3704
},
{
"epoch": 1.5637130801687764,
"grad_norm": 1.2270452976226807,
"learning_rate": 9.07966464901179e-05,
"loss": 0.6337460279464722,
"step": 3706
},
{
"epoch": 1.5645569620253164,
"grad_norm": 1.233667016029358,
"learning_rate": 9.07827578147279e-05,
"loss": 0.680374801158905,
"step": 3708
},
{
"epoch": 1.5654008438818565,
"grad_norm": 1.0761466026306152,
"learning_rate": 9.076885973153891e-05,
"loss": 0.6234241724014282,
"step": 3710
},
{
"epoch": 1.5662447257383967,
"grad_norm": 0.9219012260437012,
"learning_rate": 9.075495224375697e-05,
"loss": 0.6096800565719604,
"step": 3712
},
{
"epoch": 1.5670886075949366,
"grad_norm": 1.151168942451477,
"learning_rate": 9.074103535459026e-05,
"loss": 0.649919867515564,
"step": 3714
},
{
"epoch": 1.5679324894514768,
"grad_norm": 1.1380470991134644,
"learning_rate": 9.072710906724914e-05,
"loss": 0.6704574227333069,
"step": 3716
},
{
"epoch": 1.568776371308017,
"grad_norm": 1.2184447050094604,
"learning_rate": 9.071317338494614e-05,
"loss": 0.6619362831115723,
"step": 3718
},
{
"epoch": 1.5696202531645569,
"grad_norm": 1.131170630455017,
"learning_rate": 9.069922831089594e-05,
"loss": 0.6179121732711792,
"step": 3720
},
{
"epoch": 1.570464135021097,
"grad_norm": 1.2668405771255493,
"learning_rate": 9.06852738483154e-05,
"loss": 0.594958484172821,
"step": 3722
},
{
"epoch": 1.5713080168776372,
"grad_norm": 1.1624782085418701,
"learning_rate": 9.067131000042359e-05,
"loss": 0.6323778629302979,
"step": 3724
},
{
"epoch": 1.5721518987341772,
"grad_norm": 1.2936128377914429,
"learning_rate": 9.065733677044166e-05,
"loss": 0.628058910369873,
"step": 3726
},
{
"epoch": 1.5729957805907173,
"grad_norm": 1.1847784519195557,
"learning_rate": 9.064335416159296e-05,
"loss": 0.6472614407539368,
"step": 3728
},
{
"epoch": 1.5738396624472575,
"grad_norm": 1.8903449773788452,
"learning_rate": 9.062936217710305e-05,
"loss": 0.6395491361618042,
"step": 3730
},
{
"epoch": 1.5746835443037974,
"grad_norm": 1.1150785684585571,
"learning_rate": 9.061536082019956e-05,
"loss": 0.6911961436271667,
"step": 3732
},
{
"epoch": 1.5755274261603376,
"grad_norm": 1.1206107139587402,
"learning_rate": 9.060135009411239e-05,
"loss": 0.7051874399185181,
"step": 3734
},
{
"epoch": 1.5763713080168777,
"grad_norm": 1.27924382686615,
"learning_rate": 9.05873300020735e-05,
"loss": 0.7012752890586853,
"step": 3736
},
{
"epoch": 1.5772151898734177,
"grad_norm": 1.3970832824707031,
"learning_rate": 9.057330054731707e-05,
"loss": 0.7185142040252686,
"step": 3738
},
{
"epoch": 1.5780590717299579,
"grad_norm": 0.9732457995414734,
"learning_rate": 9.055926173307945e-05,
"loss": 0.6298858523368835,
"step": 3740
},
{
"epoch": 1.578902953586498,
"grad_norm": 1.230928897857666,
"learning_rate": 9.054521356259909e-05,
"loss": 0.7142943739891052,
"step": 3742
},
{
"epoch": 1.579746835443038,
"grad_norm": 1.1297426223754883,
"learning_rate": 9.053115603911664e-05,
"loss": 0.6535376310348511,
"step": 3744
},
{
"epoch": 1.580590717299578,
"grad_norm": 1.2132076025009155,
"learning_rate": 9.051708916587491e-05,
"loss": 0.6236510872840881,
"step": 3746
},
{
"epoch": 1.5814345991561183,
"grad_norm": 1.201319932937622,
"learning_rate": 9.050301294611885e-05,
"loss": 0.6752219200134277,
"step": 3748
},
{
"epoch": 1.5822784810126582,
"grad_norm": 1.2969163656234741,
"learning_rate": 9.048892738309559e-05,
"loss": 0.7248554825782776,
"step": 3750
},
{
"epoch": 1.5831223628691982,
"grad_norm": 1.0721957683563232,
"learning_rate": 9.047483248005439e-05,
"loss": 0.6488997340202332,
"step": 3752
},
{
"epoch": 1.5839662447257385,
"grad_norm": 0.9988508820533752,
"learning_rate": 9.046072824024667e-05,
"loss": 0.6191130876541138,
"step": 3754
},
{
"epoch": 1.5848101265822785,
"grad_norm": 1.260183572769165,
"learning_rate": 9.0446614666926e-05,
"loss": 0.6681985259056091,
"step": 3756
},
{
"epoch": 1.5856540084388184,
"grad_norm": 1.1288834810256958,
"learning_rate": 9.043249176334812e-05,
"loss": 0.662024736404419,
"step": 3758
},
{
"epoch": 1.5864978902953588,
"grad_norm": 1.4384263753890991,
"learning_rate": 9.04183595327709e-05,
"loss": 0.609916627407074,
"step": 3760
},
{
"epoch": 1.5873417721518988,
"grad_norm": 1.1109941005706787,
"learning_rate": 9.04042179784544e-05,
"loss": 0.6532528400421143,
"step": 3762
},
{
"epoch": 1.5881856540084387,
"grad_norm": 1.0959233045578003,
"learning_rate": 9.039006710366078e-05,
"loss": 0.7136290669441223,
"step": 3764
},
{
"epoch": 1.5890295358649789,
"grad_norm": 1.2313964366912842,
"learning_rate": 9.037590691165439e-05,
"loss": 0.6907190084457397,
"step": 3766
},
{
"epoch": 1.589873417721519,
"grad_norm": 1.3127682209014893,
"learning_rate": 9.036173740570172e-05,
"loss": 0.7114790678024292,
"step": 3768
},
{
"epoch": 1.590717299578059,
"grad_norm": 1.0038903951644897,
"learning_rate": 9.034755858907138e-05,
"loss": 0.6257581114768982,
"step": 3770
},
{
"epoch": 1.5915611814345991,
"grad_norm": 1.1058061122894287,
"learning_rate": 9.033337046503416e-05,
"loss": 0.578145444393158,
"step": 3772
},
{
"epoch": 1.5924050632911393,
"grad_norm": 1.0893515348434448,
"learning_rate": 9.0319173036863e-05,
"loss": 0.6312620043754578,
"step": 3774
},
{
"epoch": 1.5932489451476792,
"grad_norm": 1.1091047525405884,
"learning_rate": 9.030496630783297e-05,
"loss": 0.6799508333206177,
"step": 3776
},
{
"epoch": 1.5940928270042194,
"grad_norm": 1.1103609800338745,
"learning_rate": 9.029075028122127e-05,
"loss": 0.678726315498352,
"step": 3778
},
{
"epoch": 1.5949367088607596,
"grad_norm": 1.1918376684188843,
"learning_rate": 9.027652496030728e-05,
"loss": 0.7357890009880066,
"step": 3780
},
{
"epoch": 1.5957805907172995,
"grad_norm": 1.0541924238204956,
"learning_rate": 9.026229034837253e-05,
"loss": 0.6079391241073608,
"step": 3782
},
{
"epoch": 1.5966244725738397,
"grad_norm": 1.195845603942871,
"learning_rate": 9.024804644870062e-05,
"loss": 0.7173702120780945,
"step": 3784
},
{
"epoch": 1.5974683544303798,
"grad_norm": 1.1362866163253784,
"learning_rate": 9.023379326457737e-05,
"loss": 0.6431670188903809,
"step": 3786
},
{
"epoch": 1.5983122362869198,
"grad_norm": 1.2327499389648438,
"learning_rate": 9.021953079929074e-05,
"loss": 0.6346777677536011,
"step": 3788
},
{
"epoch": 1.59915611814346,
"grad_norm": 1.1623177528381348,
"learning_rate": 9.020525905613078e-05,
"loss": 0.6852784156799316,
"step": 3790
},
{
"epoch": 1.6,
"grad_norm": 1.0258424282073975,
"learning_rate": 9.019097803838971e-05,
"loss": 0.6357095241546631,
"step": 3792
},
{
"epoch": 1.60084388185654,
"grad_norm": 1.0825177431106567,
"learning_rate": 9.017668774936188e-05,
"loss": 0.6663659811019897,
"step": 3794
},
{
"epoch": 1.6016877637130802,
"grad_norm": 1.1190401315689087,
"learning_rate": 9.016238819234381e-05,
"loss": 0.6009758710861206,
"step": 3796
},
{
"epoch": 1.6025316455696204,
"grad_norm": 1.09871244430542,
"learning_rate": 9.01480793706341e-05,
"loss": 0.6907890439033508,
"step": 3798
},
{
"epoch": 1.6033755274261603,
"grad_norm": 1.2046958208084106,
"learning_rate": 9.013376128753354e-05,
"loss": 0.6709389090538025,
"step": 3800
},
{
"epoch": 1.6033755274261603,
"eval_loss": 0.7080941200256348,
"eval_runtime": 865.6774,
"eval_samples_per_second": 2.434,
"eval_steps_per_second": 2.434,
"step": 3800
},
{
"epoch": 1.6042194092827005,
"grad_norm": 1.0671489238739014,
"learning_rate": 9.011943394634505e-05,
"loss": 0.653937041759491,
"step": 3802
},
{
"epoch": 1.6050632911392406,
"grad_norm": 1.4205375909805298,
"learning_rate": 9.010509735037364e-05,
"loss": 0.6647229194641113,
"step": 3804
},
{
"epoch": 1.6059071729957806,
"grad_norm": 1.3793799877166748,
"learning_rate": 9.009075150292652e-05,
"loss": 0.6981267929077148,
"step": 3806
},
{
"epoch": 1.6067510548523207,
"grad_norm": 1.0534380674362183,
"learning_rate": 9.007639640731298e-05,
"loss": 0.6151314973831177,
"step": 3808
},
{
"epoch": 1.6075949367088609,
"grad_norm": 1.1359853744506836,
"learning_rate": 9.006203206684447e-05,
"loss": 0.6671237349510193,
"step": 3810
},
{
"epoch": 1.6084388185654008,
"grad_norm": 1.2385475635528564,
"learning_rate": 9.004765848483456e-05,
"loss": 0.7145646810531616,
"step": 3812
},
{
"epoch": 1.6092827004219408,
"grad_norm": 1.1323930025100708,
"learning_rate": 9.003327566459899e-05,
"loss": 0.6524789929389954,
"step": 3814
},
{
"epoch": 1.6101265822784812,
"grad_norm": 1.1863508224487305,
"learning_rate": 9.001888360945555e-05,
"loss": 0.7574670314788818,
"step": 3816
},
{
"epoch": 1.610970464135021,
"grad_norm": 1.0288994312286377,
"learning_rate": 9.000448232272425e-05,
"loss": 0.5858811736106873,
"step": 3818
},
{
"epoch": 1.611814345991561,
"grad_norm": 1.2674148082733154,
"learning_rate": 8.999007180772719e-05,
"loss": 0.6834250688552856,
"step": 3820
},
{
"epoch": 1.6126582278481014,
"grad_norm": 1.2014318704605103,
"learning_rate": 8.997565206778856e-05,
"loss": 0.6435309052467346,
"step": 3822
},
{
"epoch": 1.6135021097046414,
"grad_norm": 1.205741286277771,
"learning_rate": 8.996122310623476e-05,
"loss": 0.6212471127510071,
"step": 3824
},
{
"epoch": 1.6143459915611813,
"grad_norm": 1.0866186618804932,
"learning_rate": 8.994678492639426e-05,
"loss": 0.6832143664360046,
"step": 3826
},
{
"epoch": 1.6151898734177215,
"grad_norm": 1.0786924362182617,
"learning_rate": 8.993233753159768e-05,
"loss": 0.6129988431930542,
"step": 3828
},
{
"epoch": 1.6160337552742616,
"grad_norm": 1.176597237586975,
"learning_rate": 8.991788092517775e-05,
"loss": 0.6376019716262817,
"step": 3830
},
{
"epoch": 1.6168776371308016,
"grad_norm": 1.149990200996399,
"learning_rate": 8.99034151104693e-05,
"loss": 0.7300569415092468,
"step": 3832
},
{
"epoch": 1.6177215189873417,
"grad_norm": 1.0655301809310913,
"learning_rate": 8.988894009080936e-05,
"loss": 0.6163336634635925,
"step": 3834
},
{
"epoch": 1.618565400843882,
"grad_norm": 1.1596909761428833,
"learning_rate": 8.987445586953703e-05,
"loss": 0.6459008455276489,
"step": 3836
},
{
"epoch": 1.6194092827004218,
"grad_norm": 1.201897382736206,
"learning_rate": 8.985996244999352e-05,
"loss": 0.6166399121284485,
"step": 3838
},
{
"epoch": 1.620253164556962,
"grad_norm": 1.1000950336456299,
"learning_rate": 8.984545983552219e-05,
"loss": 0.6438087224960327,
"step": 3840
},
{
"epoch": 1.6210970464135022,
"grad_norm": 0.9962409734725952,
"learning_rate": 8.983094802946854e-05,
"loss": 0.6238043308258057,
"step": 3842
},
{
"epoch": 1.621940928270042,
"grad_norm": 1.2501682043075562,
"learning_rate": 8.981642703518015e-05,
"loss": 0.6445946097373962,
"step": 3844
},
{
"epoch": 1.6227848101265823,
"grad_norm": 1.2027913331985474,
"learning_rate": 8.980189685600673e-05,
"loss": 0.7147613167762756,
"step": 3846
},
{
"epoch": 1.6236286919831224,
"grad_norm": 1.1382197141647339,
"learning_rate": 8.97873574953001e-05,
"loss": 0.6531714200973511,
"step": 3848
},
{
"epoch": 1.6244725738396624,
"grad_norm": 1.2600723505020142,
"learning_rate": 8.977280895641425e-05,
"loss": 0.6811055541038513,
"step": 3850
},
{
"epoch": 1.6253164556962025,
"grad_norm": 0.9908071160316467,
"learning_rate": 8.97582512427052e-05,
"loss": 0.6142261624336243,
"step": 3852
},
{
"epoch": 1.6261603375527427,
"grad_norm": 1.171557068824768,
"learning_rate": 8.974368435753117e-05,
"loss": 0.6408987045288086,
"step": 3854
},
{
"epoch": 1.6270042194092826,
"grad_norm": 1.1839419603347778,
"learning_rate": 8.972910830425247e-05,
"loss": 0.7352069616317749,
"step": 3856
},
{
"epoch": 1.6278481012658228,
"grad_norm": 1.233730673789978,
"learning_rate": 8.971452308623148e-05,
"loss": 0.7663040161132812,
"step": 3858
},
{
"epoch": 1.628691983122363,
"grad_norm": 1.3636224269866943,
"learning_rate": 8.969992870683273e-05,
"loss": 0.6496971249580383,
"step": 3860
},
{
"epoch": 1.629535864978903,
"grad_norm": 1.2819573879241943,
"learning_rate": 8.96853251694229e-05,
"loss": 0.6079609394073486,
"step": 3862
},
{
"epoch": 1.630379746835443,
"grad_norm": 1.087265968322754,
"learning_rate": 8.967071247737071e-05,
"loss": 0.6299422979354858,
"step": 3864
},
{
"epoch": 1.6312236286919832,
"grad_norm": 1.24200439453125,
"learning_rate": 8.965609063404706e-05,
"loss": 0.6691840291023254,
"step": 3866
},
{
"epoch": 1.6320675105485232,
"grad_norm": 1.0771806240081787,
"learning_rate": 8.96414596428249e-05,
"loss": 0.6623613238334656,
"step": 3868
},
{
"epoch": 1.6329113924050633,
"grad_norm": 1.1830974817276,
"learning_rate": 8.962681950707932e-05,
"loss": 0.6663276553153992,
"step": 3870
},
{
"epoch": 1.6337552742616035,
"grad_norm": 1.1107177734375,
"learning_rate": 8.961217023018754e-05,
"loss": 0.6426810622215271,
"step": 3872
},
{
"epoch": 1.6345991561181434,
"grad_norm": 1.2528507709503174,
"learning_rate": 8.959751181552886e-05,
"loss": 0.7113696336746216,
"step": 3874
},
{
"epoch": 1.6354430379746834,
"grad_norm": 1.0656070709228516,
"learning_rate": 8.958284426648467e-05,
"loss": 0.6211581230163574,
"step": 3876
},
{
"epoch": 1.6362869198312238,
"grad_norm": 1.0627381801605225,
"learning_rate": 8.956816758643852e-05,
"loss": 0.5950066447257996,
"step": 3878
},
{
"epoch": 1.6371308016877637,
"grad_norm": 0.9812912344932556,
"learning_rate": 8.955348177877603e-05,
"loss": 0.6519815325737,
"step": 3880
},
{
"epoch": 1.6379746835443036,
"grad_norm": 1.1843842267990112,
"learning_rate": 8.953878684688493e-05,
"loss": 0.6830767393112183,
"step": 3882
},
{
"epoch": 1.638818565400844,
"grad_norm": 1.0393236875534058,
"learning_rate": 8.952408279415507e-05,
"loss": 0.5920302271842957,
"step": 3884
},
{
"epoch": 1.639662447257384,
"grad_norm": 0.9931944608688354,
"learning_rate": 8.950936962397838e-05,
"loss": 0.6269177198410034,
"step": 3886
},
{
"epoch": 1.640506329113924,
"grad_norm": 1.1461358070373535,
"learning_rate": 8.949464733974891e-05,
"loss": 0.7021532654762268,
"step": 3888
},
{
"epoch": 1.6413502109704643,
"grad_norm": 1.2654093503952026,
"learning_rate": 8.947991594486279e-05,
"loss": 0.7331246733665466,
"step": 3890
},
{
"epoch": 1.6421940928270042,
"grad_norm": 1.1487081050872803,
"learning_rate": 8.946517544271831e-05,
"loss": 0.6438513994216919,
"step": 3892
},
{
"epoch": 1.6430379746835442,
"grad_norm": 1.0876784324645996,
"learning_rate": 8.945042583671579e-05,
"loss": 0.6779276728630066,
"step": 3894
},
{
"epoch": 1.6438818565400843,
"grad_norm": 1.2382020950317383,
"learning_rate": 8.943566713025768e-05,
"loss": 0.7255419492721558,
"step": 3896
},
{
"epoch": 1.6447257383966245,
"grad_norm": 1.3502718210220337,
"learning_rate": 8.942089932674855e-05,
"loss": 0.7068934440612793,
"step": 3898
},
{
"epoch": 1.6455696202531644,
"grad_norm": 1.050878643989563,
"learning_rate": 8.940612242959503e-05,
"loss": 0.608700156211853,
"step": 3900
},
{
"epoch": 1.6455696202531644,
"eval_loss": 0.7049403786659241,
"eval_runtime": 854.9866,
"eval_samples_per_second": 2.464,
"eval_steps_per_second": 2.464,
"step": 3900
},
{
"epoch": 1.6464135021097046,
"grad_norm": 1.0536954402923584,
"learning_rate": 8.939133644220588e-05,
"loss": 0.6257222890853882,
"step": 3902
},
{
"epoch": 1.6472573839662448,
"grad_norm": 1.1903947591781616,
"learning_rate": 8.937654136799195e-05,
"loss": 0.6823404431343079,
"step": 3904
},
{
"epoch": 1.6481012658227847,
"grad_norm": 1.225679874420166,
"learning_rate": 8.936173721036616e-05,
"loss": 0.6596478819847107,
"step": 3906
},
{
"epoch": 1.6489451476793249,
"grad_norm": 1.0071430206298828,
"learning_rate": 8.934692397274354e-05,
"loss": 0.5638422966003418,
"step": 3908
},
{
"epoch": 1.649789029535865,
"grad_norm": 1.0146223306655884,
"learning_rate": 8.933210165854125e-05,
"loss": 0.5743419528007507,
"step": 3910
},
{
"epoch": 1.650632911392405,
"grad_norm": 1.122976541519165,
"learning_rate": 8.931727027117848e-05,
"loss": 0.6775169372558594,
"step": 3912
},
{
"epoch": 1.6514767932489451,
"grad_norm": 0.9223271012306213,
"learning_rate": 8.930242981407656e-05,
"loss": 0.5984215140342712,
"step": 3914
},
{
"epoch": 1.6523206751054853,
"grad_norm": 1.1599735021591187,
"learning_rate": 8.928758029065891e-05,
"loss": 0.6342158913612366,
"step": 3916
},
{
"epoch": 1.6531645569620252,
"grad_norm": 1.2680121660232544,
"learning_rate": 8.927272170435101e-05,
"loss": 0.678507924079895,
"step": 3918
},
{
"epoch": 1.6540084388185654,
"grad_norm": 1.3628549575805664,
"learning_rate": 8.925785405858047e-05,
"loss": 0.6739710569381714,
"step": 3920
},
{
"epoch": 1.6548523206751056,
"grad_norm": 1.163482427597046,
"learning_rate": 8.924297735677694e-05,
"loss": 0.7050020098686218,
"step": 3922
},
{
"epoch": 1.6556962025316455,
"grad_norm": 1.2057000398635864,
"learning_rate": 8.922809160237222e-05,
"loss": 0.6847540140151978,
"step": 3924
},
{
"epoch": 1.6565400843881857,
"grad_norm": 1.2784082889556885,
"learning_rate": 8.921319679880016e-05,
"loss": 0.7079069018363953,
"step": 3926
},
{
"epoch": 1.6573839662447258,
"grad_norm": 1.1701157093048096,
"learning_rate": 8.919829294949671e-05,
"loss": 0.665060818195343,
"step": 3928
},
{
"epoch": 1.6582278481012658,
"grad_norm": 1.3886606693267822,
"learning_rate": 8.918338005789988e-05,
"loss": 0.7547550201416016,
"step": 3930
},
{
"epoch": 1.659071729957806,
"grad_norm": 0.9504727721214294,
"learning_rate": 8.91684581274498e-05,
"loss": 0.5718522667884827,
"step": 3932
},
{
"epoch": 1.659915611814346,
"grad_norm": 1.1185030937194824,
"learning_rate": 8.915352716158869e-05,
"loss": 0.5984254479408264,
"step": 3934
},
{
"epoch": 1.660759493670886,
"grad_norm": 1.1489602327346802,
"learning_rate": 8.913858716376081e-05,
"loss": 0.6749780774116516,
"step": 3936
},
{
"epoch": 1.6616033755274262,
"grad_norm": 1.389431118965149,
"learning_rate": 8.912363813741255e-05,
"loss": 0.6537864804267883,
"step": 3938
},
{
"epoch": 1.6624472573839664,
"grad_norm": 1.0958757400512695,
"learning_rate": 8.910868008599235e-05,
"loss": 0.6033569574356079,
"step": 3940
},
{
"epoch": 1.6632911392405063,
"grad_norm": 1.2735344171524048,
"learning_rate": 8.909371301295075e-05,
"loss": 0.7404987215995789,
"step": 3942
},
{
"epoch": 1.6641350210970463,
"grad_norm": 1.123336911201477,
"learning_rate": 8.907873692174038e-05,
"loss": 0.6265006065368652,
"step": 3944
},
{
"epoch": 1.6649789029535866,
"grad_norm": 1.259470820426941,
"learning_rate": 8.90637518158159e-05,
"loss": 0.650705099105835,
"step": 3946
},
{
"epoch": 1.6658227848101266,
"grad_norm": 1.4020485877990723,
"learning_rate": 8.904875769863412e-05,
"loss": 0.7813970446586609,
"step": 3948
},
{
"epoch": 1.6666666666666665,
"grad_norm": 1.1709671020507812,
"learning_rate": 8.903375457365389e-05,
"loss": 0.6499447822570801,
"step": 3950
},
{
"epoch": 1.667510548523207,
"grad_norm": 1.085585355758667,
"learning_rate": 8.901874244433612e-05,
"loss": 0.6141875386238098,
"step": 3952
},
{
"epoch": 1.6683544303797468,
"grad_norm": 1.2340166568756104,
"learning_rate": 8.900372131414386e-05,
"loss": 0.7080221176147461,
"step": 3954
},
{
"epoch": 1.6691983122362868,
"grad_norm": 1.148576259613037,
"learning_rate": 8.898869118654216e-05,
"loss": 0.6340513229370117,
"step": 3956
},
{
"epoch": 1.6700421940928272,
"grad_norm": 1.2231999635696411,
"learning_rate": 8.89736520649982e-05,
"loss": 0.6999116539955139,
"step": 3958
},
{
"epoch": 1.6708860759493671,
"grad_norm": 1.1600396633148193,
"learning_rate": 8.895860395298121e-05,
"loss": 0.7177759408950806,
"step": 3960
},
{
"epoch": 1.671729957805907,
"grad_norm": 1.3019158840179443,
"learning_rate": 8.894354685396251e-05,
"loss": 0.6485702395439148,
"step": 3962
},
{
"epoch": 1.6725738396624472,
"grad_norm": 1.0153226852416992,
"learning_rate": 8.892848077141546e-05,
"loss": 0.6189450025558472,
"step": 3964
},
{
"epoch": 1.6734177215189874,
"grad_norm": 1.1953094005584717,
"learning_rate": 8.891340570881555e-05,
"loss": 0.6756728291511536,
"step": 3966
},
{
"epoch": 1.6742616033755273,
"grad_norm": 1.3376187086105347,
"learning_rate": 8.889832166964027e-05,
"loss": 0.6851167678833008,
"step": 3968
},
{
"epoch": 1.6751054852320675,
"grad_norm": 1.0045926570892334,
"learning_rate": 8.888322865736924e-05,
"loss": 0.5991915464401245,
"step": 3970
},
{
"epoch": 1.6759493670886076,
"grad_norm": 1.2115750312805176,
"learning_rate": 8.886812667548414e-05,
"loss": 0.713362455368042,
"step": 3972
},
{
"epoch": 1.6767932489451476,
"grad_norm": 1.1887929439544678,
"learning_rate": 8.88530157274687e-05,
"loss": 0.7058883309364319,
"step": 3974
},
{
"epoch": 1.6776371308016877,
"grad_norm": 1.1465295553207397,
"learning_rate": 8.883789581680868e-05,
"loss": 0.6501380801200867,
"step": 3976
},
{
"epoch": 1.678481012658228,
"grad_norm": 1.184693694114685,
"learning_rate": 8.882276694699204e-05,
"loss": 0.6109840273857117,
"step": 3978
},
{
"epoch": 1.6793248945147679,
"grad_norm": 1.2034777402877808,
"learning_rate": 8.880762912150862e-05,
"loss": 0.6815584897994995,
"step": 3980
},
{
"epoch": 1.680168776371308,
"grad_norm": 1.1312000751495361,
"learning_rate": 8.879248234385052e-05,
"loss": 0.6859248876571655,
"step": 3982
},
{
"epoch": 1.6810126582278482,
"grad_norm": 1.2273681163787842,
"learning_rate": 8.877732661751173e-05,
"loss": 0.6426702737808228,
"step": 3984
},
{
"epoch": 1.6818565400843881,
"grad_norm": 1.2550326585769653,
"learning_rate": 8.876216194598844e-05,
"loss": 0.6462456583976746,
"step": 3986
},
{
"epoch": 1.6827004219409283,
"grad_norm": 1.3111321926116943,
"learning_rate": 8.874698833277884e-05,
"loss": 0.6293925046920776,
"step": 3988
},
{
"epoch": 1.6835443037974684,
"grad_norm": 1.037883996963501,
"learning_rate": 8.873180578138316e-05,
"loss": 0.59798264503479,
"step": 3990
},
{
"epoch": 1.6843881856540084,
"grad_norm": 1.2411901950836182,
"learning_rate": 8.871661429530376e-05,
"loss": 0.6741529703140259,
"step": 3992
},
{
"epoch": 1.6852320675105485,
"grad_norm": 1.206354022026062,
"learning_rate": 8.8701413878045e-05,
"loss": 0.5972680449485779,
"step": 3994
},
{
"epoch": 1.6860759493670887,
"grad_norm": 1.1922144889831543,
"learning_rate": 8.868620453311334e-05,
"loss": 0.5879245400428772,
"step": 3996
},
{
"epoch": 1.6869198312236287,
"grad_norm": 1.3499996662139893,
"learning_rate": 8.867098626401729e-05,
"loss": 0.7381167411804199,
"step": 3998
},
{
"epoch": 1.6877637130801688,
"grad_norm": 1.3601514101028442,
"learning_rate": 8.865575907426737e-05,
"loss": 0.6590276956558228,
"step": 4000
},
{
"epoch": 1.6877637130801688,
"eval_loss": 0.7027890682220459,
"eval_runtime": 848.7529,
"eval_samples_per_second": 2.482,
"eval_steps_per_second": 2.482,
"step": 4000
},
{
"epoch": 1.688607594936709,
"grad_norm": 1.1060529947280884,
"learning_rate": 8.864052296737624e-05,
"loss": 0.5958077907562256,
"step": 4002
},
{
"epoch": 1.689451476793249,
"grad_norm": 1.2067371606826782,
"learning_rate": 8.862527794685858e-05,
"loss": 0.6802279353141785,
"step": 4004
},
{
"epoch": 1.690295358649789,
"grad_norm": 1.0094636678695679,
"learning_rate": 8.86100240162311e-05,
"loss": 0.5701603889465332,
"step": 4006
},
{
"epoch": 1.6911392405063292,
"grad_norm": 1.0976500511169434,
"learning_rate": 8.85947611790126e-05,
"loss": 0.6580625176429749,
"step": 4008
},
{
"epoch": 1.6919831223628692,
"grad_norm": 0.9448981285095215,
"learning_rate": 8.857948943872392e-05,
"loss": 0.5947542190551758,
"step": 4010
},
{
"epoch": 1.6928270042194091,
"grad_norm": 1.219609260559082,
"learning_rate": 8.856420879888796e-05,
"loss": 0.6361464262008667,
"step": 4012
},
{
"epoch": 1.6936708860759495,
"grad_norm": 1.2395503520965576,
"learning_rate": 8.854891926302966e-05,
"loss": 0.608664333820343,
"step": 4014
},
{
"epoch": 1.6945147679324895,
"grad_norm": 1.1300057172775269,
"learning_rate": 8.853362083467604e-05,
"loss": 0.6932460069656372,
"step": 4016
},
{
"epoch": 1.6953586497890294,
"grad_norm": 1.2300254106521606,
"learning_rate": 8.851831351735616e-05,
"loss": 0.646004855632782,
"step": 4018
},
{
"epoch": 1.6962025316455698,
"grad_norm": 1.2328956127166748,
"learning_rate": 8.85029973146011e-05,
"loss": 0.6760826110839844,
"step": 4020
},
{
"epoch": 1.6970464135021097,
"grad_norm": 1.1252286434173584,
"learning_rate": 8.848767222994401e-05,
"loss": 0.5943224430084229,
"step": 4022
},
{
"epoch": 1.6978902953586497,
"grad_norm": 1.1587592363357544,
"learning_rate": 8.847233826692012e-05,
"loss": 0.7535276412963867,
"step": 4024
},
{
"epoch": 1.6987341772151898,
"grad_norm": 1.0294606685638428,
"learning_rate": 8.845699542906667e-05,
"loss": 0.5903090834617615,
"step": 4026
},
{
"epoch": 1.69957805907173,
"grad_norm": 1.1940597295761108,
"learning_rate": 8.844164371992295e-05,
"loss": 0.6031379699707031,
"step": 4028
},
{
"epoch": 1.70042194092827,
"grad_norm": 1.0416409969329834,
"learning_rate": 8.842628314303031e-05,
"loss": 0.6185168623924255,
"step": 4030
},
{
"epoch": 1.70126582278481,
"grad_norm": 1.8715689182281494,
"learning_rate": 8.841091370193214e-05,
"loss": 0.6325570344924927,
"step": 4032
},
{
"epoch": 1.7021097046413503,
"grad_norm": 1.230658769607544,
"learning_rate": 8.839553540017387e-05,
"loss": 0.7413952350616455,
"step": 4034
},
{
"epoch": 1.7029535864978902,
"grad_norm": 1.298003077507019,
"learning_rate": 8.838014824130299e-05,
"loss": 0.6973189115524292,
"step": 4036
},
{
"epoch": 1.7037974683544304,
"grad_norm": 1.0246652364730835,
"learning_rate": 8.836475222886902e-05,
"loss": 0.6582493185997009,
"step": 4038
},
{
"epoch": 1.7046413502109705,
"grad_norm": 1.3652594089508057,
"learning_rate": 8.834934736642351e-05,
"loss": 0.6934399008750916,
"step": 4040
},
{
"epoch": 1.7054852320675105,
"grad_norm": 1.029778242111206,
"learning_rate": 8.833393365752007e-05,
"loss": 0.6437561511993408,
"step": 4042
},
{
"epoch": 1.7063291139240506,
"grad_norm": 1.1993004083633423,
"learning_rate": 8.831851110571437e-05,
"loss": 0.605059027671814,
"step": 4044
},
{
"epoch": 1.7071729957805908,
"grad_norm": 1.286389946937561,
"learning_rate": 8.830307971456406e-05,
"loss": 0.7035017609596252,
"step": 4046
},
{
"epoch": 1.7080168776371307,
"grad_norm": 1.1211459636688232,
"learning_rate": 8.82876394876289e-05,
"loss": 0.6429924964904785,
"step": 4048
},
{
"epoch": 1.7088607594936709,
"grad_norm": 1.1284868717193604,
"learning_rate": 8.827219042847064e-05,
"loss": 0.6454769968986511,
"step": 4050
},
{
"epoch": 1.709704641350211,
"grad_norm": 1.1934884786605835,
"learning_rate": 8.825673254065306e-05,
"loss": 0.707233190536499,
"step": 4052
},
{
"epoch": 1.710548523206751,
"grad_norm": 1.1560680866241455,
"learning_rate": 8.824126582774203e-05,
"loss": 0.6790444254875183,
"step": 4054
},
{
"epoch": 1.7113924050632912,
"grad_norm": 1.1924364566802979,
"learning_rate": 8.822579029330541e-05,
"loss": 0.6115295886993408,
"step": 4056
},
{
"epoch": 1.7122362869198313,
"grad_norm": 1.107370138168335,
"learning_rate": 8.82103059409131e-05,
"loss": 0.7039182186126709,
"step": 4058
},
{
"epoch": 1.7130801687763713,
"grad_norm": 1.2554657459259033,
"learning_rate": 8.819481277413707e-05,
"loss": 0.6580052971839905,
"step": 4060
},
{
"epoch": 1.7139240506329114,
"grad_norm": 1.2873135805130005,
"learning_rate": 8.817931079655127e-05,
"loss": 0.6042479276657104,
"step": 4062
},
{
"epoch": 1.7147679324894516,
"grad_norm": 1.027056097984314,
"learning_rate": 8.816380001173172e-05,
"loss": 0.5992372632026672,
"step": 4064
},
{
"epoch": 1.7156118143459915,
"grad_norm": 1.0694721937179565,
"learning_rate": 8.814828042325644e-05,
"loss": 0.7078655362129211,
"step": 4066
},
{
"epoch": 1.7164556962025317,
"grad_norm": 1.194984793663025,
"learning_rate": 8.813275203470555e-05,
"loss": 0.6618752479553223,
"step": 4068
},
{
"epoch": 1.7172995780590719,
"grad_norm": 1.1713165044784546,
"learning_rate": 8.811721484966109e-05,
"loss": 0.6328625679016113,
"step": 4070
},
{
"epoch": 1.7181434599156118,
"grad_norm": 0.9993656277656555,
"learning_rate": 8.810166887170724e-05,
"loss": 0.5916416645050049,
"step": 4072
},
{
"epoch": 1.7189873417721517,
"grad_norm": 1.172642707824707,
"learning_rate": 8.808611410443011e-05,
"loss": 0.6490002274513245,
"step": 4074
},
{
"epoch": 1.7198312236286921,
"grad_norm": 1.1404821872711182,
"learning_rate": 8.807055055141793e-05,
"loss": 0.6571791172027588,
"step": 4076
},
{
"epoch": 1.720675105485232,
"grad_norm": 1.2104214429855347,
"learning_rate": 8.80549782162609e-05,
"loss": 0.6233854293823242,
"step": 4078
},
{
"epoch": 1.721518987341772,
"grad_norm": 1.1691396236419678,
"learning_rate": 8.803939710255126e-05,
"loss": 0.6331531405448914,
"step": 4080
},
{
"epoch": 1.7223628691983124,
"grad_norm": 1.263174057006836,
"learning_rate": 8.802380721388325e-05,
"loss": 0.6321156620979309,
"step": 4082
},
{
"epoch": 1.7232067510548523,
"grad_norm": 1.0685606002807617,
"learning_rate": 8.80082085538532e-05,
"loss": 0.644904613494873,
"step": 4084
},
{
"epoch": 1.7240506329113923,
"grad_norm": 1.2289735078811646,
"learning_rate": 8.799260112605938e-05,
"loss": 0.6743831634521484,
"step": 4086
},
{
"epoch": 1.7248945147679327,
"grad_norm": 1.0661355257034302,
"learning_rate": 8.797698493410216e-05,
"loss": 0.6866999268531799,
"step": 4088
},
{
"epoch": 1.7257383966244726,
"grad_norm": 1.1001228094100952,
"learning_rate": 8.796135998158386e-05,
"loss": 0.691387414932251,
"step": 4090
},
{
"epoch": 1.7265822784810125,
"grad_norm": 1.1078115701675415,
"learning_rate": 8.794572627210887e-05,
"loss": 0.5882864594459534,
"step": 4092
},
{
"epoch": 1.7274261603375527,
"grad_norm": 1.0483999252319336,
"learning_rate": 8.79300838092836e-05,
"loss": 0.6192089319229126,
"step": 4094
},
{
"epoch": 1.7282700421940929,
"grad_norm": 1.1194913387298584,
"learning_rate": 8.791443259671645e-05,
"loss": 0.603322446346283,
"step": 4096
},
{
"epoch": 1.7291139240506328,
"grad_norm": 1.1800397634506226,
"learning_rate": 8.789877263801787e-05,
"loss": 0.6141818165779114,
"step": 4098
},
{
"epoch": 1.729957805907173,
"grad_norm": 1.261768102645874,
"learning_rate": 8.78831039368003e-05,
"loss": 0.6707983016967773,
"step": 4100
},
{
"epoch": 1.729957805907173,
"eval_loss": 0.7022181153297424,
"eval_runtime": 844.6405,
"eval_samples_per_second": 2.495,
"eval_steps_per_second": 2.495,
"step": 4100
},
{
"epoch": 1.7308016877637131,
"grad_norm": 1.2505232095718384,
"learning_rate": 8.786742649667822e-05,
"loss": 0.6440353989601135,
"step": 4102
},
{
"epoch": 1.731645569620253,
"grad_norm": 1.2631809711456299,
"learning_rate": 8.78517403212681e-05,
"loss": 0.6712808012962341,
"step": 4104
},
{
"epoch": 1.7324894514767932,
"grad_norm": 1.2781071662902832,
"learning_rate": 8.783604541418845e-05,
"loss": 0.6854958534240723,
"step": 4106
},
{
"epoch": 1.7333333333333334,
"grad_norm": 1.1065936088562012,
"learning_rate": 8.782034177905976e-05,
"loss": 0.6281477808952332,
"step": 4108
},
{
"epoch": 1.7341772151898733,
"grad_norm": 1.010961890220642,
"learning_rate": 8.780462941950457e-05,
"loss": 0.6835165619850159,
"step": 4110
},
{
"epoch": 1.7350210970464135,
"grad_norm": 1.1467366218566895,
"learning_rate": 8.778890833914744e-05,
"loss": 0.6674962639808655,
"step": 4112
},
{
"epoch": 1.7358649789029537,
"grad_norm": 1.0221859216690063,
"learning_rate": 8.77731785416149e-05,
"loss": 0.5967551469802856,
"step": 4114
},
{
"epoch": 1.7367088607594936,
"grad_norm": 1.347937822341919,
"learning_rate": 8.775744003053552e-05,
"loss": 0.7356855869293213,
"step": 4116
},
{
"epoch": 1.7375527426160338,
"grad_norm": 1.2952557802200317,
"learning_rate": 8.774169280953988e-05,
"loss": 0.6932644844055176,
"step": 4118
},
{
"epoch": 1.738396624472574,
"grad_norm": 1.0157089233398438,
"learning_rate": 8.772593688226052e-05,
"loss": 0.5917407870292664,
"step": 4120
},
{
"epoch": 1.7392405063291139,
"grad_norm": 1.1537878513336182,
"learning_rate": 8.77101722523321e-05,
"loss": 0.6335760354995728,
"step": 4122
},
{
"epoch": 1.740084388185654,
"grad_norm": 1.0989667177200317,
"learning_rate": 8.769439892339115e-05,
"loss": 0.6892110109329224,
"step": 4124
},
{
"epoch": 1.7409282700421942,
"grad_norm": 1.1293572187423706,
"learning_rate": 8.767861689907633e-05,
"loss": 0.5966230630874634,
"step": 4126
},
{
"epoch": 1.7417721518987341,
"grad_norm": 1.1167775392532349,
"learning_rate": 8.76628261830282e-05,
"loss": 0.5981804728507996,
"step": 4128
},
{
"epoch": 1.7426160337552743,
"grad_norm": 1.0572419166564941,
"learning_rate": 8.76470267788894e-05,
"loss": 0.5539529919624329,
"step": 4130
},
{
"epoch": 1.7434599156118145,
"grad_norm": 0.937256932258606,
"learning_rate": 8.763121869030456e-05,
"loss": 0.6238219141960144,
"step": 4132
},
{
"epoch": 1.7443037974683544,
"grad_norm": 1.082932472229004,
"learning_rate": 8.761540192092029e-05,
"loss": 0.6033329963684082,
"step": 4134
},
{
"epoch": 1.7451476793248946,
"grad_norm": 1.0495184659957886,
"learning_rate": 8.75995764743852e-05,
"loss": 0.5567626357078552,
"step": 4136
},
{
"epoch": 1.7459915611814347,
"grad_norm": 1.3143779039382935,
"learning_rate": 8.758374235434994e-05,
"loss": 0.6759346127510071,
"step": 4138
},
{
"epoch": 1.7468354430379747,
"grad_norm": 1.2385786771774292,
"learning_rate": 8.756789956446713e-05,
"loss": 0.6439400315284729,
"step": 4140
},
{
"epoch": 1.7476793248945146,
"grad_norm": 1.0453747510910034,
"learning_rate": 8.75520481083914e-05,
"loss": 0.627493679523468,
"step": 4142
},
{
"epoch": 1.748523206751055,
"grad_norm": 1.09946608543396,
"learning_rate": 8.753618798977935e-05,
"loss": 0.677209198474884,
"step": 4144
},
{
"epoch": 1.749367088607595,
"grad_norm": 1.2207063436508179,
"learning_rate": 8.752031921228965e-05,
"loss": 0.6874014735221863,
"step": 4146
},
{
"epoch": 1.7502109704641349,
"grad_norm": 1.2520697116851807,
"learning_rate": 8.750444177958288e-05,
"loss": 0.6332831382751465,
"step": 4148
},
{
"epoch": 1.7510548523206753,
"grad_norm": 1.2463186979293823,
"learning_rate": 8.748855569532168e-05,
"loss": 0.682744562625885,
"step": 4150
},
{
"epoch": 1.7518987341772152,
"grad_norm": 1.1895235776901245,
"learning_rate": 8.747266096317069e-05,
"loss": 0.7006803750991821,
"step": 4152
},
{
"epoch": 1.7527426160337551,
"grad_norm": 1.1627185344696045,
"learning_rate": 8.745675758679646e-05,
"loss": 0.6751191020011902,
"step": 4154
},
{
"epoch": 1.7535864978902953,
"grad_norm": 1.324127197265625,
"learning_rate": 8.744084556986764e-05,
"loss": 0.661848247051239,
"step": 4156
},
{
"epoch": 1.7544303797468355,
"grad_norm": 1.226809024810791,
"learning_rate": 8.74249249160548e-05,
"loss": 0.7057217955589294,
"step": 4158
},
{
"epoch": 1.7552742616033754,
"grad_norm": 1.2341214418411255,
"learning_rate": 8.740899562903056e-05,
"loss": 0.6856105923652649,
"step": 4160
},
{
"epoch": 1.7561181434599156,
"grad_norm": 1.3907564878463745,
"learning_rate": 8.739305771246946e-05,
"loss": 0.6616930365562439,
"step": 4162
},
{
"epoch": 1.7569620253164557,
"grad_norm": 1.2756825685501099,
"learning_rate": 8.737711117004812e-05,
"loss": 0.5791551470756531,
"step": 4164
},
{
"epoch": 1.7578059071729957,
"grad_norm": 1.2861095666885376,
"learning_rate": 8.736115600544506e-05,
"loss": 0.7074756622314453,
"step": 4166
},
{
"epoch": 1.7586497890295358,
"grad_norm": 1.2198424339294434,
"learning_rate": 8.734519222234083e-05,
"loss": 0.6494167447090149,
"step": 4168
},
{
"epoch": 1.759493670886076,
"grad_norm": 1.19169020652771,
"learning_rate": 8.732921982441799e-05,
"loss": 0.6546841859817505,
"step": 4170
},
{
"epoch": 1.760337552742616,
"grad_norm": 1.11533784866333,
"learning_rate": 8.731323881536108e-05,
"loss": 0.6701815724372864,
"step": 4172
},
{
"epoch": 1.761181434599156,
"grad_norm": 1.2148140668869019,
"learning_rate": 8.729724919885657e-05,
"loss": 0.6678179502487183,
"step": 4174
},
{
"epoch": 1.7620253164556963,
"grad_norm": 1.1968709230422974,
"learning_rate": 8.728125097859298e-05,
"loss": 0.6505144834518433,
"step": 4176
},
{
"epoch": 1.7628691983122362,
"grad_norm": 1.0954766273498535,
"learning_rate": 8.726524415826079e-05,
"loss": 0.6531696915626526,
"step": 4178
},
{
"epoch": 1.7637130801687764,
"grad_norm": 1.5149537324905396,
"learning_rate": 8.724922874155246e-05,
"loss": 0.710014283657074,
"step": 4180
},
{
"epoch": 1.7645569620253165,
"grad_norm": 1.145113229751587,
"learning_rate": 8.723320473216245e-05,
"loss": 0.714016318321228,
"step": 4182
},
{
"epoch": 1.7654008438818565,
"grad_norm": 0.9454524517059326,
"learning_rate": 8.721717213378719e-05,
"loss": 0.6775414347648621,
"step": 4184
},
{
"epoch": 1.7662447257383966,
"grad_norm": 1.1414754390716553,
"learning_rate": 8.720113095012507e-05,
"loss": 0.6279728412628174,
"step": 4186
},
{
"epoch": 1.7670886075949368,
"grad_norm": 1.212802767753601,
"learning_rate": 8.718508118487652e-05,
"loss": 0.5894309282302856,
"step": 4188
},
{
"epoch": 1.7679324894514767,
"grad_norm": 1.5213478803634644,
"learning_rate": 8.716902284174388e-05,
"loss": 0.6124046444892883,
"step": 4190
},
{
"epoch": 1.768776371308017,
"grad_norm": 0.9973840713500977,
"learning_rate": 8.715295592443154e-05,
"loss": 0.5990801453590393,
"step": 4192
},
{
"epoch": 1.769620253164557,
"grad_norm": 1.1084294319152832,
"learning_rate": 8.713688043664579e-05,
"loss": 0.6485559344291687,
"step": 4194
},
{
"epoch": 1.770464135021097,
"grad_norm": 1.1401913166046143,
"learning_rate": 8.712079638209493e-05,
"loss": 0.7083099484443665,
"step": 4196
},
{
"epoch": 1.7713080168776372,
"grad_norm": 1.278105616569519,
"learning_rate": 8.71047037644893e-05,
"loss": 0.7237915992736816,
"step": 4198
},
{
"epoch": 1.7721518987341773,
"grad_norm": 1.2407530546188354,
"learning_rate": 8.708860258754108e-05,
"loss": 0.6259870529174805,
"step": 4200
},
{
"epoch": 1.7721518987341773,
"eval_loss": 0.6993561387062073,
"eval_runtime": 542.0281,
"eval_samples_per_second": 3.887,
"eval_steps_per_second": 3.887,
"step": 4200
},
{
"epoch": 1.7729957805907173,
"grad_norm": 1.102859616279602,
"learning_rate": 8.707249285496457e-05,
"loss": 0.6604248285293579,
"step": 4202
},
{
"epoch": 1.7738396624472574,
"grad_norm": 1.2478244304656982,
"learning_rate": 8.705637457047594e-05,
"loss": 0.6799775958061218,
"step": 4204
},
{
"epoch": 1.7746835443037976,
"grad_norm": 1.1178022623062134,
"learning_rate": 8.704024773779338e-05,
"loss": 0.6136477589607239,
"step": 4206
},
{
"epoch": 1.7755274261603375,
"grad_norm": 1.904076337814331,
"learning_rate": 8.702411236063703e-05,
"loss": 0.6568390130996704,
"step": 4208
},
{
"epoch": 1.7763713080168775,
"grad_norm": 1.0902835130691528,
"learning_rate": 8.700796844272903e-05,
"loss": 0.6404406428337097,
"step": 4210
},
{
"epoch": 1.7772151898734179,
"grad_norm": 1.1858288049697876,
"learning_rate": 8.699181598779347e-05,
"loss": 0.6924911737442017,
"step": 4212
},
{
"epoch": 1.7780590717299578,
"grad_norm": 1.0015727281570435,
"learning_rate": 8.69756549995564e-05,
"loss": 0.572692334651947,
"step": 4214
},
{
"epoch": 1.7789029535864977,
"grad_norm": 1.440079689025879,
"learning_rate": 8.695948548174583e-05,
"loss": 0.7196018695831299,
"step": 4216
},
{
"epoch": 1.7797468354430381,
"grad_norm": 1.1320992708206177,
"learning_rate": 8.69433074380918e-05,
"loss": 0.5870906710624695,
"step": 4218
},
{
"epoch": 1.780590717299578,
"grad_norm": 1.3156964778900146,
"learning_rate": 8.692712087232626e-05,
"loss": 0.6501539349555969,
"step": 4220
},
{
"epoch": 1.781434599156118,
"grad_norm": 1.1869803667068481,
"learning_rate": 8.691092578818311e-05,
"loss": 0.7017278075218201,
"step": 4222
},
{
"epoch": 1.7822784810126582,
"grad_norm": 0.9708380699157715,
"learning_rate": 8.689472218939829e-05,
"loss": 0.5954802632331848,
"step": 4224
},
{
"epoch": 1.7831223628691983,
"grad_norm": 1.0753228664398193,
"learning_rate": 8.687851007970962e-05,
"loss": 0.6494144797325134,
"step": 4226
},
{
"epoch": 1.7839662447257383,
"grad_norm": 1.1038413047790527,
"learning_rate": 8.686228946285695e-05,
"loss": 0.7247282862663269,
"step": 4228
},
{
"epoch": 1.7848101265822784,
"grad_norm": 0.9666786789894104,
"learning_rate": 8.684606034258206e-05,
"loss": 0.5673812627792358,
"step": 4230
},
{
"epoch": 1.7856540084388186,
"grad_norm": 1.1972676515579224,
"learning_rate": 8.682982272262869e-05,
"loss": 0.5950504541397095,
"step": 4232
},
{
"epoch": 1.7864978902953585,
"grad_norm": 1.23736572265625,
"learning_rate": 8.681357660674255e-05,
"loss": 0.6477514505386353,
"step": 4234
},
{
"epoch": 1.7873417721518987,
"grad_norm": 1.0238158702850342,
"learning_rate": 8.679732199867127e-05,
"loss": 0.6180200576782227,
"step": 4236
},
{
"epoch": 1.7881856540084389,
"grad_norm": 1.0333375930786133,
"learning_rate": 8.678105890216455e-05,
"loss": 0.5771099328994751,
"step": 4238
},
{
"epoch": 1.7890295358649788,
"grad_norm": 1.30390202999115,
"learning_rate": 8.676478732097393e-05,
"loss": 0.6592516899108887,
"step": 4240
},
{
"epoch": 1.789873417721519,
"grad_norm": 1.115160346031189,
"learning_rate": 8.674850725885294e-05,
"loss": 0.6662757396697998,
"step": 4242
},
{
"epoch": 1.7907172995780591,
"grad_norm": 1.2130142450332642,
"learning_rate": 8.67322187195571e-05,
"loss": 0.6673333048820496,
"step": 4244
},
{
"epoch": 1.791561181434599,
"grad_norm": 1.1505554914474487,
"learning_rate": 8.671592170684386e-05,
"loss": 0.6698325872421265,
"step": 4246
},
{
"epoch": 1.7924050632911392,
"grad_norm": 1.0758062601089478,
"learning_rate": 8.669961622447262e-05,
"loss": 0.6216199398040771,
"step": 4248
},
{
"epoch": 1.7932489451476794,
"grad_norm": 0.9300920367240906,
"learning_rate": 8.668330227620475e-05,
"loss": 0.6460495591163635,
"step": 4250
},
{
"epoch": 1.7940928270042193,
"grad_norm": 1.3860046863555908,
"learning_rate": 8.666697986580357e-05,
"loss": 0.6949506998062134,
"step": 4252
},
{
"epoch": 1.7949367088607595,
"grad_norm": 1.2287555932998657,
"learning_rate": 8.665064899703433e-05,
"loss": 0.6320405602455139,
"step": 4254
},
{
"epoch": 1.7957805907172997,
"grad_norm": 1.1585466861724854,
"learning_rate": 8.663430967366426e-05,
"loss": 0.6635019779205322,
"step": 4256
},
{
"epoch": 1.7966244725738396,
"grad_norm": 1.1007941961288452,
"learning_rate": 8.661796189946252e-05,
"loss": 0.645052969455719,
"step": 4258
},
{
"epoch": 1.7974683544303798,
"grad_norm": 1.2059847116470337,
"learning_rate": 8.660160567820023e-05,
"loss": 0.70420902967453,
"step": 4260
},
{
"epoch": 1.79831223628692,
"grad_norm": 1.0648717880249023,
"learning_rate": 8.658524101365044e-05,
"loss": 0.6263765096664429,
"step": 4262
},
{
"epoch": 1.7991561181434599,
"grad_norm": 1.017052412033081,
"learning_rate": 8.656886790958821e-05,
"loss": 0.6199937462806702,
"step": 4264
},
{
"epoch": 1.8,
"grad_norm": 1.1153450012207031,
"learning_rate": 8.655248636979045e-05,
"loss": 0.5891271233558655,
"step": 4266
},
{
"epoch": 1.8008438818565402,
"grad_norm": 1.0661747455596924,
"learning_rate": 8.65360963980361e-05,
"loss": 0.5442121028900146,
"step": 4268
},
{
"epoch": 1.8016877637130801,
"grad_norm": 1.3049758672714233,
"learning_rate": 8.6519697998106e-05,
"loss": 0.6988245248794556,
"step": 4270
},
{
"epoch": 1.80253164556962,
"grad_norm": 1.2679938077926636,
"learning_rate": 8.650329117378294e-05,
"loss": 0.7260398864746094,
"step": 4272
},
{
"epoch": 1.8033755274261605,
"grad_norm": 1.0899536609649658,
"learning_rate": 8.648687592885168e-05,
"loss": 0.5757678151130676,
"step": 4274
},
{
"epoch": 1.8042194092827004,
"grad_norm": 1.4088575839996338,
"learning_rate": 8.647045226709887e-05,
"loss": 0.7042108178138733,
"step": 4276
},
{
"epoch": 1.8050632911392404,
"grad_norm": 1.2143783569335938,
"learning_rate": 8.645402019231316e-05,
"loss": 0.641275942325592,
"step": 4278
},
{
"epoch": 1.8059071729957807,
"grad_norm": 1.4072896242141724,
"learning_rate": 8.64375797082851e-05,
"loss": 0.7657124996185303,
"step": 4280
},
{
"epoch": 1.8067510548523207,
"grad_norm": 1.2563380002975464,
"learning_rate": 8.642113081880718e-05,
"loss": 0.713768720626831,
"step": 4282
},
{
"epoch": 1.8075949367088606,
"grad_norm": 1.1195416450500488,
"learning_rate": 8.64046735276739e-05,
"loss": 0.6276429295539856,
"step": 4284
},
{
"epoch": 1.808438818565401,
"grad_norm": 1.2472422122955322,
"learning_rate": 8.638820783868158e-05,
"loss": 0.5641238689422607,
"step": 4286
},
{
"epoch": 1.809282700421941,
"grad_norm": 1.1974313259124756,
"learning_rate": 8.637173375562855e-05,
"loss": 0.6312015056610107,
"step": 4288
},
{
"epoch": 1.810126582278481,
"grad_norm": 1.1673604249954224,
"learning_rate": 8.63552512823151e-05,
"loss": 0.6674410104751587,
"step": 4290
},
{
"epoch": 1.810970464135021,
"grad_norm": 1.199095368385315,
"learning_rate": 8.633876042254337e-05,
"loss": 0.6772016286849976,
"step": 4292
},
{
"epoch": 1.8118143459915612,
"grad_norm": 1.2302746772766113,
"learning_rate": 8.632226118011752e-05,
"loss": 0.6621671915054321,
"step": 4294
},
{
"epoch": 1.8126582278481012,
"grad_norm": 1.304010033607483,
"learning_rate": 8.63057535588436e-05,
"loss": 0.6965363621711731,
"step": 4296
},
{
"epoch": 1.8135021097046413,
"grad_norm": 1.223366618156433,
"learning_rate": 8.62892375625296e-05,
"loss": 0.6300807595252991,
"step": 4298
},
{
"epoch": 1.8143459915611815,
"grad_norm": 1.028496265411377,
"learning_rate": 8.627271319498544e-05,
"loss": 0.5610660910606384,
"step": 4300
},
{
"epoch": 1.8143459915611815,
"eval_loss": 0.6981000900268555,
"eval_runtime": 514.4659,
"eval_samples_per_second": 4.096,
"eval_steps_per_second": 4.096,
"step": 4300
},
{
"epoch": 1.8151898734177214,
"grad_norm": 1.2050007581710815,
"learning_rate": 8.625618046002298e-05,
"loss": 0.6666551232337952,
"step": 4302
},
{
"epoch": 1.8160337552742616,
"grad_norm": 1.1233220100402832,
"learning_rate": 8.6239639361456e-05,
"loss": 0.6631835103034973,
"step": 4304
},
{
"epoch": 1.8168776371308017,
"grad_norm": 1.1262956857681274,
"learning_rate": 8.622308990310021e-05,
"loss": 0.6395270228385925,
"step": 4306
},
{
"epoch": 1.8177215189873417,
"grad_norm": 1.0448222160339355,
"learning_rate": 8.620653208877328e-05,
"loss": 0.6165015697479248,
"step": 4308
},
{
"epoch": 1.8185654008438819,
"grad_norm": 1.1555759906768799,
"learning_rate": 8.618996592229473e-05,
"loss": 0.5915844440460205,
"step": 4310
},
{
"epoch": 1.819409282700422,
"grad_norm": 1.5407506227493286,
"learning_rate": 8.617339140748608e-05,
"loss": 0.6491456627845764,
"step": 4312
},
{
"epoch": 1.820253164556962,
"grad_norm": 1.3690788745880127,
"learning_rate": 8.615680854817077e-05,
"loss": 0.6053901314735413,
"step": 4314
},
{
"epoch": 1.8210970464135021,
"grad_norm": 1.052583932876587,
"learning_rate": 8.614021734817413e-05,
"loss": 0.5821644067764282,
"step": 4316
},
{
"epoch": 1.8219409282700423,
"grad_norm": 1.090567708015442,
"learning_rate": 8.612361781132344e-05,
"loss": 0.645878255367279,
"step": 4318
},
{
"epoch": 1.8227848101265822,
"grad_norm": 1.122719645500183,
"learning_rate": 8.610700994144787e-05,
"loss": 0.6883123517036438,
"step": 4320
},
{
"epoch": 1.8236286919831224,
"grad_norm": 1.3273001909255981,
"learning_rate": 8.609039374237856e-05,
"loss": 0.6918330788612366,
"step": 4322
},
{
"epoch": 1.8244725738396625,
"grad_norm": 1.0628443956375122,
"learning_rate": 8.607376921794855e-05,
"loss": 0.6292204856872559,
"step": 4324
},
{
"epoch": 1.8253164556962025,
"grad_norm": 1.287466287612915,
"learning_rate": 8.605713637199279e-05,
"loss": 0.6136105060577393,
"step": 4326
},
{
"epoch": 1.8261603375527427,
"grad_norm": 1.1399345397949219,
"learning_rate": 8.604049520834816e-05,
"loss": 0.6099681854248047,
"step": 4328
},
{
"epoch": 1.8270042194092828,
"grad_norm": 1.1131435632705688,
"learning_rate": 8.602384573085345e-05,
"loss": 0.6267056465148926,
"step": 4330
},
{
"epoch": 1.8278481012658228,
"grad_norm": 1.1312925815582275,
"learning_rate": 8.600718794334939e-05,
"loss": 0.609437882900238,
"step": 4332
},
{
"epoch": 1.828691983122363,
"grad_norm": 1.3711494207382202,
"learning_rate": 8.599052184967859e-05,
"loss": 0.727881669998169,
"step": 4334
},
{
"epoch": 1.829535864978903,
"grad_norm": 1.1403605937957764,
"learning_rate": 8.597384745368562e-05,
"loss": 0.6771696209907532,
"step": 4336
},
{
"epoch": 1.830379746835443,
"grad_norm": 1.2769951820373535,
"learning_rate": 8.595716475921693e-05,
"loss": 0.6812924742698669,
"step": 4338
},
{
"epoch": 1.831223628691983,
"grad_norm": 1.055721402168274,
"learning_rate": 8.59404737701209e-05,
"loss": 0.6403515338897705,
"step": 4340
},
{
"epoch": 1.8320675105485233,
"grad_norm": 1.1047639846801758,
"learning_rate": 8.592377449024784e-05,
"loss": 0.663240373134613,
"step": 4342
},
{
"epoch": 1.8329113924050633,
"grad_norm": 1.0808883905410767,
"learning_rate": 8.590706692344991e-05,
"loss": 0.6398993134498596,
"step": 4344
},
{
"epoch": 1.8337552742616032,
"grad_norm": 1.2433407306671143,
"learning_rate": 8.589035107358125e-05,
"loss": 0.6838348507881165,
"step": 4346
},
{
"epoch": 1.8345991561181436,
"grad_norm": 1.031216025352478,
"learning_rate": 8.58736269444979e-05,
"loss": 0.640884280204773,
"step": 4348
},
{
"epoch": 1.8354430379746836,
"grad_norm": 1.1417057514190674,
"learning_rate": 8.585689454005776e-05,
"loss": 0.6346741914749146,
"step": 4350
},
{
"epoch": 1.8362869198312235,
"grad_norm": 1.210988998413086,
"learning_rate": 8.584015386412072e-05,
"loss": 0.6209521889686584,
"step": 4352
},
{
"epoch": 1.8371308016877637,
"grad_norm": 1.2120760679244995,
"learning_rate": 8.582340492054847e-05,
"loss": 0.6699252128601074,
"step": 4354
},
{
"epoch": 1.8379746835443038,
"grad_norm": 1.1768114566802979,
"learning_rate": 8.580664771320475e-05,
"loss": 0.6472980380058289,
"step": 4356
},
{
"epoch": 1.8388185654008438,
"grad_norm": 1.060070276260376,
"learning_rate": 8.578988224595506e-05,
"loss": 0.6440452933311462,
"step": 4358
},
{
"epoch": 1.839662447257384,
"grad_norm": 1.1366443634033203,
"learning_rate": 8.57731085226669e-05,
"loss": 0.5894474387168884,
"step": 4360
},
{
"epoch": 1.840506329113924,
"grad_norm": 1.1571751832962036,
"learning_rate": 8.575632654720963e-05,
"loss": 0.5868900418281555,
"step": 4362
},
{
"epoch": 1.841350210970464,
"grad_norm": 1.1983840465545654,
"learning_rate": 8.573953632345453e-05,
"loss": 0.5841533541679382,
"step": 4364
},
{
"epoch": 1.8421940928270042,
"grad_norm": 1.101806640625,
"learning_rate": 8.572273785527481e-05,
"loss": 0.5503215193748474,
"step": 4366
},
{
"epoch": 1.8430379746835444,
"grad_norm": 1.0327471494674683,
"learning_rate": 8.570593114654552e-05,
"loss": 0.6131128072738647,
"step": 4368
},
{
"epoch": 1.8438818565400843,
"grad_norm": 1.1421098709106445,
"learning_rate": 8.568911620114368e-05,
"loss": 0.6614060401916504,
"step": 4370
},
{
"epoch": 1.8447257383966245,
"grad_norm": 1.1707026958465576,
"learning_rate": 8.567229302294814e-05,
"loss": 0.6392307877540588,
"step": 4372
},
{
"epoch": 1.8455696202531646,
"grad_norm": 1.1704418659210205,
"learning_rate": 8.565546161583969e-05,
"loss": 0.6560825109481812,
"step": 4374
},
{
"epoch": 1.8464135021097046,
"grad_norm": 1.3618037700653076,
"learning_rate": 8.563862198370103e-05,
"loss": 0.6996290683746338,
"step": 4376
},
{
"epoch": 1.8472573839662447,
"grad_norm": 1.116645097732544,
"learning_rate": 8.562177413041674e-05,
"loss": 0.6776535511016846,
"step": 4378
},
{
"epoch": 1.8481012658227849,
"grad_norm": 1.1669151782989502,
"learning_rate": 8.560491805987327e-05,
"loss": 0.6390423774719238,
"step": 4380
},
{
"epoch": 1.8489451476793248,
"grad_norm": 1.2188117504119873,
"learning_rate": 8.558805377595904e-05,
"loss": 0.6554020047187805,
"step": 4382
},
{
"epoch": 1.849789029535865,
"grad_norm": 1.216829776763916,
"learning_rate": 8.557118128256425e-05,
"loss": 0.6291787624359131,
"step": 4384
},
{
"epoch": 1.8506329113924052,
"grad_norm": 1.0431596040725708,
"learning_rate": 8.555430058358111e-05,
"loss": 0.6484442949295044,
"step": 4386
},
{
"epoch": 1.851476793248945,
"grad_norm": 1.3015289306640625,
"learning_rate": 8.553741168290367e-05,
"loss": 0.7034047842025757,
"step": 4388
},
{
"epoch": 1.8523206751054853,
"grad_norm": 1.2062040567398071,
"learning_rate": 8.552051458442785e-05,
"loss": 0.644135594367981,
"step": 4390
},
{
"epoch": 1.8531645569620254,
"grad_norm": 1.238461971282959,
"learning_rate": 8.55036092920515e-05,
"loss": 0.6767282485961914,
"step": 4392
},
{
"epoch": 1.8540084388185654,
"grad_norm": 1.2978830337524414,
"learning_rate": 8.548669580967435e-05,
"loss": 0.7292267680168152,
"step": 4394
},
{
"epoch": 1.8548523206751055,
"grad_norm": 1.1448328495025635,
"learning_rate": 8.546977414119801e-05,
"loss": 0.6788421273231506,
"step": 4396
},
{
"epoch": 1.8556962025316457,
"grad_norm": 1.0685368776321411,
"learning_rate": 8.5452844290526e-05,
"loss": 0.6745942234992981,
"step": 4398
},
{
"epoch": 1.8565400843881856,
"grad_norm": 1.125707983970642,
"learning_rate": 8.543590626156368e-05,
"loss": 0.6351125836372375,
"step": 4400
},
{
"epoch": 1.8565400843881856,
"eval_loss": 0.6961485147476196,
"eval_runtime": 513.5724,
"eval_samples_per_second": 4.103,
"eval_steps_per_second": 4.103,
"step": 4400
},
{
"epoch": 1.8573839662447258,
"grad_norm": 1.072179913520813,
"learning_rate": 8.541896005821835e-05,
"loss": 0.5840762257575989,
"step": 4402
},
{
"epoch": 1.858227848101266,
"grad_norm": 1.2572803497314453,
"learning_rate": 8.540200568439915e-05,
"loss": 0.6431074738502502,
"step": 4404
},
{
"epoch": 1.859071729957806,
"grad_norm": 1.3294413089752197,
"learning_rate": 8.538504314401718e-05,
"loss": 0.708808183670044,
"step": 4406
},
{
"epoch": 1.8599156118143458,
"grad_norm": 1.1775587797164917,
"learning_rate": 8.536807244098533e-05,
"loss": 0.6580085754394531,
"step": 4408
},
{
"epoch": 1.8607594936708862,
"grad_norm": 1.1880089044570923,
"learning_rate": 8.53510935792184e-05,
"loss": 0.6500136256217957,
"step": 4410
},
{
"epoch": 1.8616033755274262,
"grad_norm": 1.2166204452514648,
"learning_rate": 8.533410656263313e-05,
"loss": 0.6922352313995361,
"step": 4412
},
{
"epoch": 1.862447257383966,
"grad_norm": 1.0405415296554565,
"learning_rate": 8.531711139514808e-05,
"loss": 0.6761626601219177,
"step": 4414
},
{
"epoch": 1.8632911392405065,
"grad_norm": 1.0674270391464233,
"learning_rate": 8.530010808068371e-05,
"loss": 0.672576904296875,
"step": 4416
},
{
"epoch": 1.8641350210970464,
"grad_norm": 1.0584741830825806,
"learning_rate": 8.528309662316236e-05,
"loss": 0.5521218180656433,
"step": 4418
},
{
"epoch": 1.8649789029535864,
"grad_norm": 1.3619039058685303,
"learning_rate": 8.526607702650824e-05,
"loss": 0.6546680927276611,
"step": 4420
},
{
"epoch": 1.8658227848101265,
"grad_norm": 0.9904745221138,
"learning_rate": 8.524904929464745e-05,
"loss": 0.6043933629989624,
"step": 4422
},
{
"epoch": 1.8666666666666667,
"grad_norm": 1.3046703338623047,
"learning_rate": 8.523201343150795e-05,
"loss": 0.7106801271438599,
"step": 4424
},
{
"epoch": 1.8675105485232066,
"grad_norm": 1.1166832447052002,
"learning_rate": 8.52149694410196e-05,
"loss": 0.6456703543663025,
"step": 4426
},
{
"epoch": 1.8683544303797468,
"grad_norm": 1.1260632276535034,
"learning_rate": 8.519791732711412e-05,
"loss": 0.5963318347930908,
"step": 4428
},
{
"epoch": 1.869198312236287,
"grad_norm": 1.0990599393844604,
"learning_rate": 8.51808570937251e-05,
"loss": 0.6295356750488281,
"step": 4430
},
{
"epoch": 1.870042194092827,
"grad_norm": 1.3689274787902832,
"learning_rate": 8.516378874478801e-05,
"loss": 0.6984617114067078,
"step": 4432
},
{
"epoch": 1.870886075949367,
"grad_norm": 1.0986580848693848,
"learning_rate": 8.514671228424018e-05,
"loss": 0.5598900318145752,
"step": 4434
},
{
"epoch": 1.8717299578059072,
"grad_norm": 0.9570761322975159,
"learning_rate": 8.512962771602085e-05,
"loss": 0.6286435723304749,
"step": 4436
},
{
"epoch": 1.8725738396624472,
"grad_norm": 1.1480669975280762,
"learning_rate": 8.511253504407107e-05,
"loss": 0.5956313014030457,
"step": 4438
},
{
"epoch": 1.8734177215189873,
"grad_norm": 1.1132479906082153,
"learning_rate": 8.50954342723338e-05,
"loss": 0.6523844599723816,
"step": 4440
},
{
"epoch": 1.8742616033755275,
"grad_norm": 1.1569167375564575,
"learning_rate": 8.507832540475387e-05,
"loss": 0.6231355667114258,
"step": 4442
},
{
"epoch": 1.8751054852320674,
"grad_norm": 1.1327043771743774,
"learning_rate": 8.506120844527796e-05,
"loss": 0.660773754119873,
"step": 4444
},
{
"epoch": 1.8759493670886076,
"grad_norm": 0.8939630389213562,
"learning_rate": 8.504408339785463e-05,
"loss": 0.6319235563278198,
"step": 4446
},
{
"epoch": 1.8767932489451478,
"grad_norm": 1.1910638809204102,
"learning_rate": 8.50269502664343e-05,
"loss": 0.6753001809120178,
"step": 4448
},
{
"epoch": 1.8776371308016877,
"grad_norm": 1.1502408981323242,
"learning_rate": 8.500980905496923e-05,
"loss": 0.6300671696662903,
"step": 4450
},
{
"epoch": 1.8784810126582279,
"grad_norm": 1.0639009475708008,
"learning_rate": 8.49926597674136e-05,
"loss": 0.6196691989898682,
"step": 4452
},
{
"epoch": 1.879324894514768,
"grad_norm": 1.1072754859924316,
"learning_rate": 8.497550240772341e-05,
"loss": 0.7029181122779846,
"step": 4454
},
{
"epoch": 1.880168776371308,
"grad_norm": 1.0440188646316528,
"learning_rate": 8.495833697985652e-05,
"loss": 0.65432208776474,
"step": 4456
},
{
"epoch": 1.8810126582278481,
"grad_norm": 1.0646617412567139,
"learning_rate": 8.494116348777269e-05,
"loss": 0.6446614861488342,
"step": 4458
},
{
"epoch": 1.8818565400843883,
"grad_norm": 1.2163805961608887,
"learning_rate": 8.492398193543349e-05,
"loss": 0.6430497765541077,
"step": 4460
},
{
"epoch": 1.8827004219409282,
"grad_norm": 1.2715297937393188,
"learning_rate": 8.490679232680241e-05,
"loss": 0.6609845161437988,
"step": 4462
},
{
"epoch": 1.8835443037974684,
"grad_norm": 1.0435588359832764,
"learning_rate": 8.488959466584469e-05,
"loss": 0.5791062712669373,
"step": 4464
},
{
"epoch": 1.8843881856540086,
"grad_norm": 1.229202151298523,
"learning_rate": 8.487238895652759e-05,
"loss": 0.6312171220779419,
"step": 4466
},
{
"epoch": 1.8852320675105485,
"grad_norm": 1.0713022947311401,
"learning_rate": 8.485517520282008e-05,
"loss": 0.6698815226554871,
"step": 4468
},
{
"epoch": 1.8860759493670884,
"grad_norm": 1.0172312259674072,
"learning_rate": 8.483795340869305e-05,
"loss": 0.6283810138702393,
"step": 4470
},
{
"epoch": 1.8869198312236288,
"grad_norm": 1.2880207300186157,
"learning_rate": 8.482072357811926e-05,
"loss": 0.6659437417984009,
"step": 4472
},
{
"epoch": 1.8877637130801688,
"grad_norm": 1.0840508937835693,
"learning_rate": 8.480348571507329e-05,
"loss": 0.6190289258956909,
"step": 4474
},
{
"epoch": 1.8886075949367087,
"grad_norm": 1.1101994514465332,
"learning_rate": 8.478623982353156e-05,
"loss": 0.5760066509246826,
"step": 4476
},
{
"epoch": 1.889451476793249,
"grad_norm": 1.2388770580291748,
"learning_rate": 8.476898590747237e-05,
"loss": 0.6151811480522156,
"step": 4478
},
{
"epoch": 1.890295358649789,
"grad_norm": 0.9986408948898315,
"learning_rate": 8.475172397087591e-05,
"loss": 0.5991593599319458,
"step": 4480
},
{
"epoch": 1.891139240506329,
"grad_norm": 1.1380778551101685,
"learning_rate": 8.473445401772415e-05,
"loss": 0.7262179255485535,
"step": 4482
},
{
"epoch": 1.8919831223628694,
"grad_norm": 1.3933676481246948,
"learning_rate": 8.471717605200092e-05,
"loss": 0.5806916356086731,
"step": 4484
},
{
"epoch": 1.8928270042194093,
"grad_norm": 1.0242944955825806,
"learning_rate": 8.469989007769194e-05,
"loss": 0.617904782295227,
"step": 4486
},
{
"epoch": 1.8936708860759492,
"grad_norm": 1.0909028053283691,
"learning_rate": 8.468259609878475e-05,
"loss": 0.6488202810287476,
"step": 4488
},
{
"epoch": 1.8945147679324894,
"grad_norm": 1.042611002922058,
"learning_rate": 8.466529411926874e-05,
"loss": 0.6015118956565857,
"step": 4490
},
{
"epoch": 1.8953586497890296,
"grad_norm": 1.3965784311294556,
"learning_rate": 8.46479841431351e-05,
"loss": 0.7035272717475891,
"step": 4492
},
{
"epoch": 1.8962025316455695,
"grad_norm": 1.1486462354660034,
"learning_rate": 8.463066617437698e-05,
"loss": 0.6611229777336121,
"step": 4494
},
{
"epoch": 1.8970464135021097,
"grad_norm": 1.0845859050750732,
"learning_rate": 8.461334021698925e-05,
"loss": 0.6378056406974792,
"step": 4496
},
{
"epoch": 1.8978902953586498,
"grad_norm": 0.936612069606781,
"learning_rate": 8.459600627496869e-05,
"loss": 0.642429769039154,
"step": 4498
},
{
"epoch": 1.8987341772151898,
"grad_norm": 1.1905454397201538,
"learning_rate": 8.457866435231391e-05,
"loss": 0.6341768503189087,
"step": 4500
},
{
"epoch": 1.8987341772151898,
"eval_loss": 0.6938078999519348,
"eval_runtime": 513.615,
"eval_samples_per_second": 4.102,
"eval_steps_per_second": 4.102,
"step": 4500
}
],
"logging_steps": 2,
"max_steps": 14220,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 500,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 5,
"early_stopping_threshold": 0.001
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.673687407282723e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}