CARO / trainer_state.json
MYI203's picture
Upload folder using huggingface_hub
b425fcd verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.994161801501251,
"eval_steps": 500,
"global_step": 447,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006672226855713094,
"grad_norm": 0.3732840765814082,
"learning_rate": 2.2222222222222224e-07,
"loss": 1.6647,
"step": 1
},
{
"epoch": 0.013344453711426188,
"grad_norm": 0.383618969512302,
"learning_rate": 4.444444444444445e-07,
"loss": 1.7073,
"step": 2
},
{
"epoch": 0.020016680567139282,
"grad_norm": 0.40769314152645725,
"learning_rate": 6.666666666666667e-07,
"loss": 1.7505,
"step": 3
},
{
"epoch": 0.026688907422852376,
"grad_norm": 0.4016260533661679,
"learning_rate": 8.88888888888889e-07,
"loss": 1.6952,
"step": 4
},
{
"epoch": 0.03336113427856547,
"grad_norm": 0.40174340733938796,
"learning_rate": 1.111111111111111e-06,
"loss": 1.7844,
"step": 5
},
{
"epoch": 0.040033361134278564,
"grad_norm": 0.40847133525152324,
"learning_rate": 1.3333333333333334e-06,
"loss": 1.7765,
"step": 6
},
{
"epoch": 0.04670558798999166,
"grad_norm": 0.40604889051688275,
"learning_rate": 1.5555555555555558e-06,
"loss": 1.6995,
"step": 7
},
{
"epoch": 0.05337781484570475,
"grad_norm": 0.3894313779958969,
"learning_rate": 1.777777777777778e-06,
"loss": 1.7128,
"step": 8
},
{
"epoch": 0.060050041701417846,
"grad_norm": 0.38627201738251066,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.7789,
"step": 9
},
{
"epoch": 0.06672226855713094,
"grad_norm": 0.379007955799278,
"learning_rate": 2.222222222222222e-06,
"loss": 1.6907,
"step": 10
},
{
"epoch": 0.07339449541284404,
"grad_norm": 0.40293473844915795,
"learning_rate": 2.4444444444444447e-06,
"loss": 1.712,
"step": 11
},
{
"epoch": 0.08006672226855713,
"grad_norm": 0.3796921388422985,
"learning_rate": 2.666666666666667e-06,
"loss": 1.717,
"step": 12
},
{
"epoch": 0.08673894912427023,
"grad_norm": 0.363416530641245,
"learning_rate": 2.888888888888889e-06,
"loss": 1.6075,
"step": 13
},
{
"epoch": 0.09341117597998332,
"grad_norm": 0.42180934217352195,
"learning_rate": 3.1111111111111116e-06,
"loss": 1.7974,
"step": 14
},
{
"epoch": 0.10008340283569642,
"grad_norm": 0.4120160113974808,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.7485,
"step": 15
},
{
"epoch": 0.1067556296914095,
"grad_norm": 0.39278536183126506,
"learning_rate": 3.555555555555556e-06,
"loss": 1.5707,
"step": 16
},
{
"epoch": 0.1134278565471226,
"grad_norm": 0.4167599840383631,
"learning_rate": 3.777777777777778e-06,
"loss": 1.613,
"step": 17
},
{
"epoch": 0.12010008340283569,
"grad_norm": 0.4238875949015934,
"learning_rate": 4.000000000000001e-06,
"loss": 1.6816,
"step": 18
},
{
"epoch": 0.1267723102585488,
"grad_norm": 0.4720616555526343,
"learning_rate": 4.222222222222223e-06,
"loss": 1.7593,
"step": 19
},
{
"epoch": 0.13344453711426188,
"grad_norm": 0.4525263926368364,
"learning_rate": 4.444444444444444e-06,
"loss": 1.775,
"step": 20
},
{
"epoch": 0.14011676396997497,
"grad_norm": 0.48266597054805727,
"learning_rate": 4.666666666666667e-06,
"loss": 1.791,
"step": 21
},
{
"epoch": 0.14678899082568808,
"grad_norm": 0.45939880890244206,
"learning_rate": 4.888888888888889e-06,
"loss": 1.7338,
"step": 22
},
{
"epoch": 0.15346121768140117,
"grad_norm": 0.47764567898261373,
"learning_rate": 5.1111111111111115e-06,
"loss": 1.7483,
"step": 23
},
{
"epoch": 0.16013344453711426,
"grad_norm": 0.5017631959610884,
"learning_rate": 5.333333333333334e-06,
"loss": 1.7481,
"step": 24
},
{
"epoch": 0.16680567139282734,
"grad_norm": 0.4893567689357815,
"learning_rate": 5.555555555555557e-06,
"loss": 1.7136,
"step": 25
},
{
"epoch": 0.17347789824854046,
"grad_norm": 0.5105291717073872,
"learning_rate": 5.777777777777778e-06,
"loss": 1.7602,
"step": 26
},
{
"epoch": 0.18015012510425354,
"grad_norm": 0.46974781533253324,
"learning_rate": 6e-06,
"loss": 1.6153,
"step": 27
},
{
"epoch": 0.18682235195996663,
"grad_norm": 0.5383405971129123,
"learning_rate": 6.222222222222223e-06,
"loss": 1.6602,
"step": 28
},
{
"epoch": 0.19349457881567975,
"grad_norm": 0.5952748064288939,
"learning_rate": 6.444444444444445e-06,
"loss": 1.7177,
"step": 29
},
{
"epoch": 0.20016680567139283,
"grad_norm": 0.5399245254089347,
"learning_rate": 6.666666666666667e-06,
"loss": 1.7092,
"step": 30
},
{
"epoch": 0.20683903252710592,
"grad_norm": 0.5598787056956456,
"learning_rate": 6.88888888888889e-06,
"loss": 1.67,
"step": 31
},
{
"epoch": 0.213511259382819,
"grad_norm": 0.5992687712634374,
"learning_rate": 7.111111111111112e-06,
"loss": 1.7355,
"step": 32
},
{
"epoch": 0.22018348623853212,
"grad_norm": 0.6046798477314653,
"learning_rate": 7.333333333333333e-06,
"loss": 1.7322,
"step": 33
},
{
"epoch": 0.2268557130942452,
"grad_norm": 0.6320084975028277,
"learning_rate": 7.555555555555556e-06,
"loss": 1.8031,
"step": 34
},
{
"epoch": 0.2335279399499583,
"grad_norm": 0.5904036648454649,
"learning_rate": 7.77777777777778e-06,
"loss": 1.6945,
"step": 35
},
{
"epoch": 0.24020016680567138,
"grad_norm": 0.6104299430520795,
"learning_rate": 8.000000000000001e-06,
"loss": 1.6777,
"step": 36
},
{
"epoch": 0.2468723936613845,
"grad_norm": 0.6047786223470227,
"learning_rate": 8.222222222222222e-06,
"loss": 1.6366,
"step": 37
},
{
"epoch": 0.2535446205170976,
"grad_norm": 0.6387094238927912,
"learning_rate": 8.444444444444446e-06,
"loss": 1.6636,
"step": 38
},
{
"epoch": 0.2602168473728107,
"grad_norm": 0.6120207071048721,
"learning_rate": 8.666666666666668e-06,
"loss": 1.6292,
"step": 39
},
{
"epoch": 0.26688907422852376,
"grad_norm": 0.6088068565354802,
"learning_rate": 8.888888888888888e-06,
"loss": 1.6047,
"step": 40
},
{
"epoch": 0.2735613010842369,
"grad_norm": 0.6091668716984593,
"learning_rate": 9.111111111111112e-06,
"loss": 1.6258,
"step": 41
},
{
"epoch": 0.28023352793994993,
"grad_norm": 0.5682506824419054,
"learning_rate": 9.333333333333334e-06,
"loss": 1.5699,
"step": 42
},
{
"epoch": 0.28690575479566305,
"grad_norm": 0.5635810130587849,
"learning_rate": 9.555555555555556e-06,
"loss": 1.6314,
"step": 43
},
{
"epoch": 0.29357798165137616,
"grad_norm": 0.5841358241959886,
"learning_rate": 9.777777777777779e-06,
"loss": 1.5456,
"step": 44
},
{
"epoch": 0.3002502085070892,
"grad_norm": 0.5617293142392159,
"learning_rate": 1e-05,
"loss": 1.5272,
"step": 45
},
{
"epoch": 0.30692243536280234,
"grad_norm": 0.47772319074173303,
"learning_rate": 9.999847318844664e-06,
"loss": 1.5373,
"step": 46
},
{
"epoch": 0.31359466221851545,
"grad_norm": 0.510823048338545,
"learning_rate": 9.999389284703265e-06,
"loss": 1.6097,
"step": 47
},
{
"epoch": 0.3202668890742285,
"grad_norm": 0.5010538752769951,
"learning_rate": 9.99862592554908e-06,
"loss": 1.5344,
"step": 48
},
{
"epoch": 0.3269391159299416,
"grad_norm": 0.47226467172138603,
"learning_rate": 9.99755728800233e-06,
"loss": 1.5023,
"step": 49
},
{
"epoch": 0.3336113427856547,
"grad_norm": 0.4849966025454849,
"learning_rate": 9.996183437327342e-06,
"loss": 1.4975,
"step": 50
},
{
"epoch": 0.3402835696413678,
"grad_norm": 0.47126668835079394,
"learning_rate": 9.994504457428557e-06,
"loss": 1.5127,
"step": 51
},
{
"epoch": 0.3469557964970809,
"grad_norm": 0.4301425546280333,
"learning_rate": 9.992520450845415e-06,
"loss": 1.4456,
"step": 52
},
{
"epoch": 0.353628023352794,
"grad_norm": 0.4378182305752446,
"learning_rate": 9.99023153874608e-06,
"loss": 1.4695,
"step": 53
},
{
"epoch": 0.3603002502085071,
"grad_norm": 0.4297960171566437,
"learning_rate": 9.987637860920053e-06,
"loss": 1.5113,
"step": 54
},
{
"epoch": 0.3669724770642202,
"grad_norm": 0.4154036526185639,
"learning_rate": 9.984739575769619e-06,
"loss": 1.3419,
"step": 55
},
{
"epoch": 0.37364470391993326,
"grad_norm": 0.42590048753833515,
"learning_rate": 9.981536860300191e-06,
"loss": 1.4191,
"step": 56
},
{
"epoch": 0.3803169307756464,
"grad_norm": 0.4067719643823426,
"learning_rate": 9.978029910109491e-06,
"loss": 1.3983,
"step": 57
},
{
"epoch": 0.3869891576313595,
"grad_norm": 0.3969278459704296,
"learning_rate": 9.9742189393756e-06,
"loss": 1.3794,
"step": 58
},
{
"epoch": 0.39366138448707255,
"grad_norm": 0.3854670128621615,
"learning_rate": 9.970104180843881e-06,
"loss": 1.4305,
"step": 59
},
{
"epoch": 0.40033361134278567,
"grad_norm": 0.37743755476343405,
"learning_rate": 9.965685885812773e-06,
"loss": 1.4283,
"step": 60
},
{
"epoch": 0.4070058381984987,
"grad_norm": 0.3610253450226918,
"learning_rate": 9.960964324118428e-06,
"loss": 1.4135,
"step": 61
},
{
"epoch": 0.41367806505421184,
"grad_norm": 0.38314151063998836,
"learning_rate": 9.955939784118246e-06,
"loss": 1.4194,
"step": 62
},
{
"epoch": 0.42035029190992496,
"grad_norm": 0.391502563403777,
"learning_rate": 9.950612572673255e-06,
"loss": 1.3669,
"step": 63
},
{
"epoch": 0.427022518765638,
"grad_norm": 0.3606335039104152,
"learning_rate": 9.944983015129376e-06,
"loss": 1.3249,
"step": 64
},
{
"epoch": 0.43369474562135113,
"grad_norm": 0.3836886872726812,
"learning_rate": 9.939051455297548e-06,
"loss": 1.3323,
"step": 65
},
{
"epoch": 0.44036697247706424,
"grad_norm": 0.39513066746369385,
"learning_rate": 9.932818255432733e-06,
"loss": 1.3228,
"step": 66
},
{
"epoch": 0.4470391993327773,
"grad_norm": 0.3776395809498074,
"learning_rate": 9.926283796211796e-06,
"loss": 1.4202,
"step": 67
},
{
"epoch": 0.4537114261884904,
"grad_norm": 0.3536441979654702,
"learning_rate": 9.919448476710248e-06,
"loss": 1.3169,
"step": 68
},
{
"epoch": 0.4603836530442035,
"grad_norm": 0.35401704664244255,
"learning_rate": 9.91231271437788e-06,
"loss": 1.2869,
"step": 69
},
{
"epoch": 0.4670558798999166,
"grad_norm": 0.364086698791685,
"learning_rate": 9.904876945013272e-06,
"loss": 1.3558,
"step": 70
},
{
"epoch": 0.4737281067556297,
"grad_norm": 0.36643453456415837,
"learning_rate": 9.89714162273716e-06,
"loss": 1.2972,
"step": 71
},
{
"epoch": 0.48040033361134277,
"grad_norm": 0.35341632594055883,
"learning_rate": 9.889107219964726e-06,
"loss": 1.288,
"step": 72
},
{
"epoch": 0.4870725604670559,
"grad_norm": 0.35727226233776593,
"learning_rate": 9.880774227376727e-06,
"loss": 1.2919,
"step": 73
},
{
"epoch": 0.493744787322769,
"grad_norm": 0.36655136111497844,
"learning_rate": 9.872143153889538e-06,
"loss": 1.2864,
"step": 74
},
{
"epoch": 0.5004170141784821,
"grad_norm": 0.3580519341051248,
"learning_rate": 9.863214526624065e-06,
"loss": 1.3545,
"step": 75
},
{
"epoch": 0.5070892410341952,
"grad_norm": 0.3251286992451364,
"learning_rate": 9.853988890873563e-06,
"loss": 1.2463,
"step": 76
},
{
"epoch": 0.5137614678899083,
"grad_norm": 0.35154208731758474,
"learning_rate": 9.844466810070319e-06,
"loss": 1.2842,
"step": 77
},
{
"epoch": 0.5204336947456214,
"grad_norm": 0.35360327701201805,
"learning_rate": 9.834648865751254e-06,
"loss": 1.2074,
"step": 78
},
{
"epoch": 0.5271059216013344,
"grad_norm": 0.33827984275163614,
"learning_rate": 9.8245356575224e-06,
"loss": 1.2719,
"step": 79
},
{
"epoch": 0.5337781484570475,
"grad_norm": 0.34688277661237193,
"learning_rate": 9.814127803022281e-06,
"loss": 1.2809,
"step": 80
},
{
"epoch": 0.5404503753127606,
"grad_norm": 0.30446458893187955,
"learning_rate": 9.803425937884202e-06,
"loss": 1.2302,
"step": 81
},
{
"epoch": 0.5471226021684737,
"grad_norm": 0.31831189291520345,
"learning_rate": 9.792430715697412e-06,
"loss": 1.2329,
"step": 82
},
{
"epoch": 0.5537948290241869,
"grad_norm": 0.29801313071352337,
"learning_rate": 9.781142807967205e-06,
"loss": 1.2464,
"step": 83
},
{
"epoch": 0.5604670558798999,
"grad_norm": 0.3118984625981495,
"learning_rate": 9.769562904073896e-06,
"loss": 1.2388,
"step": 84
},
{
"epoch": 0.567139282735613,
"grad_norm": 0.28346870486129144,
"learning_rate": 9.757691711230728e-06,
"loss": 1.222,
"step": 85
},
{
"epoch": 0.5738115095913261,
"grad_norm": 0.3235682827850219,
"learning_rate": 9.745529954440675e-06,
"loss": 1.2167,
"step": 86
},
{
"epoch": 0.5804837364470392,
"grad_norm": 0.3025602896103518,
"learning_rate": 9.733078376452172e-06,
"loss": 1.2439,
"step": 87
},
{
"epoch": 0.5871559633027523,
"grad_norm": 0.2943206117574144,
"learning_rate": 9.72033773771374e-06,
"loss": 1.1714,
"step": 88
},
{
"epoch": 0.5938281901584654,
"grad_norm": 0.3043570350280811,
"learning_rate": 9.707308816327557e-06,
"loss": 1.2466,
"step": 89
},
{
"epoch": 0.6005004170141784,
"grad_norm": 0.3034637832523981,
"learning_rate": 9.693992408001934e-06,
"loss": 1.1737,
"step": 90
},
{
"epoch": 0.6071726438698916,
"grad_norm": 0.29378370873758436,
"learning_rate": 9.680389326002708e-06,
"loss": 1.2823,
"step": 91
},
{
"epoch": 0.6138448707256047,
"grad_norm": 0.25233208089640863,
"learning_rate": 9.666500401103595e-06,
"loss": 1.191,
"step": 92
},
{
"epoch": 0.6205170975813178,
"grad_norm": 0.264841896316846,
"learning_rate": 9.652326481535434e-06,
"loss": 1.1842,
"step": 93
},
{
"epoch": 0.6271893244370309,
"grad_norm": 0.276234903477221,
"learning_rate": 9.63786843293439e-06,
"loss": 1.1062,
"step": 94
},
{
"epoch": 0.6338615512927439,
"grad_norm": 0.25250932151404143,
"learning_rate": 9.623127138289087e-06,
"loss": 1.1036,
"step": 95
},
{
"epoch": 0.640533778148457,
"grad_norm": 0.27277526043956535,
"learning_rate": 9.608103497886687e-06,
"loss": 1.1332,
"step": 96
},
{
"epoch": 0.6472060050041701,
"grad_norm": 0.27812831319172243,
"learning_rate": 9.592798429257899e-06,
"loss": 1.1704,
"step": 97
},
{
"epoch": 0.6538782318598833,
"grad_norm": 0.24189134780796812,
"learning_rate": 9.577212867120947e-06,
"loss": 1.1644,
"step": 98
},
{
"epoch": 0.6605504587155964,
"grad_norm": 0.25645020937055246,
"learning_rate": 9.561347763324484e-06,
"loss": 1.147,
"step": 99
},
{
"epoch": 0.6672226855713094,
"grad_norm": 0.2595686790235878,
"learning_rate": 9.545204086789461e-06,
"loss": 1.2334,
"step": 100
},
{
"epoch": 0.6738949124270225,
"grad_norm": 0.24949960135623445,
"learning_rate": 9.528782823449954e-06,
"loss": 1.0824,
"step": 101
},
{
"epoch": 0.6805671392827356,
"grad_norm": 0.2457329952973373,
"learning_rate": 9.512084976192944e-06,
"loss": 1.1335,
"step": 102
},
{
"epoch": 0.6872393661384487,
"grad_norm": 0.26799579166131554,
"learning_rate": 9.495111564797073e-06,
"loss": 1.1403,
"step": 103
},
{
"epoch": 0.6939115929941618,
"grad_norm": 0.24843110380217398,
"learning_rate": 9.477863625870371e-06,
"loss": 1.1659,
"step": 104
},
{
"epoch": 0.700583819849875,
"grad_norm": 0.26596297978049915,
"learning_rate": 9.460342212786933e-06,
"loss": 1.1191,
"step": 105
},
{
"epoch": 0.707256046705588,
"grad_norm": 0.24609569923205743,
"learning_rate": 9.442548395622596e-06,
"loss": 1.1388,
"step": 106
},
{
"epoch": 0.7139282735613011,
"grad_norm": 0.30159400597802477,
"learning_rate": 9.424483261089584e-06,
"loss": 1.2705,
"step": 107
},
{
"epoch": 0.7206005004170142,
"grad_norm": 0.24911328963823176,
"learning_rate": 9.406147912470142e-06,
"loss": 1.1393,
"step": 108
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.2461854661834755,
"learning_rate": 9.387543469549156e-06,
"loss": 1.0897,
"step": 109
},
{
"epoch": 0.7339449541284404,
"grad_norm": 0.24427724214296034,
"learning_rate": 9.368671068545761e-06,
"loss": 1.0527,
"step": 110
},
{
"epoch": 0.7406171809841534,
"grad_norm": 0.24775041629357836,
"learning_rate": 9.349531862043952e-06,
"loss": 1.0695,
"step": 111
},
{
"epoch": 0.7472894078398665,
"grad_norm": 0.24100289437186362,
"learning_rate": 9.330127018922195e-06,
"loss": 1.0972,
"step": 112
},
{
"epoch": 0.7539616346955796,
"grad_norm": 0.22817385479165772,
"learning_rate": 9.310457724282034e-06,
"loss": 1.1354,
"step": 113
},
{
"epoch": 0.7606338615512928,
"grad_norm": 0.24351101122361835,
"learning_rate": 9.290525179375722e-06,
"loss": 1.1867,
"step": 114
},
{
"epoch": 0.7673060884070059,
"grad_norm": 0.23252679169251178,
"learning_rate": 9.270330601532855e-06,
"loss": 0.9749,
"step": 115
},
{
"epoch": 0.773978315262719,
"grad_norm": 0.24891852642577655,
"learning_rate": 9.249875224086023e-06,
"loss": 1.0428,
"step": 116
},
{
"epoch": 0.780650542118432,
"grad_norm": 0.2519892667083049,
"learning_rate": 9.229160296295488e-06,
"loss": 1.1525,
"step": 117
},
{
"epoch": 0.7873227689741451,
"grad_norm": 0.22842656407732698,
"learning_rate": 9.208187083272895e-06,
"loss": 1.0893,
"step": 118
},
{
"epoch": 0.7939949958298582,
"grad_norm": 0.23530353826931574,
"learning_rate": 9.186956865904004e-06,
"loss": 1.1461,
"step": 119
},
{
"epoch": 0.8006672226855713,
"grad_norm": 0.25930781535544006,
"learning_rate": 9.165470940770458e-06,
"loss": 1.0863,
"step": 120
},
{
"epoch": 0.8073394495412844,
"grad_norm": 0.24794978516182076,
"learning_rate": 9.143730620070609e-06,
"loss": 1.1216,
"step": 121
},
{
"epoch": 0.8140116763969975,
"grad_norm": 0.23948920252711808,
"learning_rate": 9.121737231539369e-06,
"loss": 1.1121,
"step": 122
},
{
"epoch": 0.8206839032527106,
"grad_norm": 0.2271047573553413,
"learning_rate": 9.099492118367123e-06,
"loss": 1.1543,
"step": 123
},
{
"epoch": 0.8273561301084237,
"grad_norm": 0.25315676717796304,
"learning_rate": 9.076996639117708e-06,
"loss": 1.1309,
"step": 124
},
{
"epoch": 0.8340283569641368,
"grad_norm": 0.24992513383867146,
"learning_rate": 9.054252167645426e-06,
"loss": 1.064,
"step": 125
},
{
"epoch": 0.8407005838198499,
"grad_norm": 0.24372502494677808,
"learning_rate": 9.03126009301115e-06,
"loss": 1.1069,
"step": 126
},
{
"epoch": 0.8473728106755629,
"grad_norm": 0.23106765610762486,
"learning_rate": 9.008021819397488e-06,
"loss": 1.1344,
"step": 127
},
{
"epoch": 0.854045037531276,
"grad_norm": 0.2638490777582001,
"learning_rate": 8.984538766023024e-06,
"loss": 1.1099,
"step": 128
},
{
"epoch": 0.8607172643869891,
"grad_norm": 0.2436361177633829,
"learning_rate": 8.960812367055646e-06,
"loss": 1.0879,
"step": 129
},
{
"epoch": 0.8673894912427023,
"grad_norm": 0.2435286627690007,
"learning_rate": 8.93684407152496e-06,
"loss": 1.1281,
"step": 130
},
{
"epoch": 0.8740617180984154,
"grad_norm": 0.2292808626156267,
"learning_rate": 8.912635343233784e-06,
"loss": 1.0638,
"step": 131
},
{
"epoch": 0.8807339449541285,
"grad_norm": 0.22525031331854853,
"learning_rate": 8.888187660668762e-06,
"loss": 1.1003,
"step": 132
},
{
"epoch": 0.8874061718098415,
"grad_norm": 0.22555143759216476,
"learning_rate": 8.863502516910058e-06,
"loss": 0.9963,
"step": 133
},
{
"epoch": 0.8940783986655546,
"grad_norm": 0.23206135282498955,
"learning_rate": 8.838581419540183e-06,
"loss": 1.0354,
"step": 134
},
{
"epoch": 0.9007506255212677,
"grad_norm": 0.2332933329851816,
"learning_rate": 8.81342589055191e-06,
"loss": 1.0629,
"step": 135
},
{
"epoch": 0.9074228523769808,
"grad_norm": 0.21598906842259502,
"learning_rate": 8.788037466255334e-06,
"loss": 1.0368,
"step": 136
},
{
"epoch": 0.914095079232694,
"grad_norm": 0.2510622429683598,
"learning_rate": 8.762417697184034e-06,
"loss": 1.018,
"step": 137
},
{
"epoch": 0.920767306088407,
"grad_norm": 0.25761970215631363,
"learning_rate": 8.736568148000386e-06,
"loss": 1.0496,
"step": 138
},
{
"epoch": 0.9274395329441201,
"grad_norm": 0.267375239616011,
"learning_rate": 8.710490397400007e-06,
"loss": 1.1176,
"step": 139
},
{
"epoch": 0.9341117597998332,
"grad_norm": 0.23104284046945558,
"learning_rate": 8.684186038015327e-06,
"loss": 1.0221,
"step": 140
},
{
"epoch": 0.9407839866555463,
"grad_norm": 0.2560542899746131,
"learning_rate": 8.657656676318346e-06,
"loss": 1.0761,
"step": 141
},
{
"epoch": 0.9474562135112594,
"grad_norm": 0.25675258631662545,
"learning_rate": 8.630903932522496e-06,
"loss": 1.0371,
"step": 142
},
{
"epoch": 0.9541284403669725,
"grad_norm": 0.2609140179674428,
"learning_rate": 8.603929440483714e-06,
"loss": 1.0895,
"step": 143
},
{
"epoch": 0.9608006672226855,
"grad_norm": 0.23996253406165996,
"learning_rate": 8.576734847600639e-06,
"loss": 1.0905,
"step": 144
},
{
"epoch": 0.9674728940783986,
"grad_norm": 0.2545575065577051,
"learning_rate": 8.549321814714018e-06,
"loss": 1.1054,
"step": 145
},
{
"epoch": 0.9741451209341118,
"grad_norm": 0.2514055738350408,
"learning_rate": 8.521692016005262e-06,
"loss": 1.1183,
"step": 146
},
{
"epoch": 0.9808173477898249,
"grad_norm": 0.2792023557673277,
"learning_rate": 8.49384713889421e-06,
"loss": 1.106,
"step": 147
},
{
"epoch": 0.987489574645538,
"grad_norm": 0.22340105561162074,
"learning_rate": 8.46578888393606e-06,
"loss": 1.0232,
"step": 148
},
{
"epoch": 0.994161801501251,
"grad_norm": 0.25568989349658056,
"learning_rate": 8.43751896471753e-06,
"loss": 1.0899,
"step": 149
},
{
"epoch": 1.0066722268557131,
"grad_norm": 0.7802055347834813,
"learning_rate": 8.40903910775219e-06,
"loss": 1.9774,
"step": 150
},
{
"epoch": 1.0133444537114262,
"grad_norm": 0.2595541681581563,
"learning_rate": 8.380351052375023e-06,
"loss": 1.045,
"step": 151
},
{
"epoch": 1.0200166805671393,
"grad_norm": 0.25723642565217814,
"learning_rate": 8.35145655063621e-06,
"loss": 1.0245,
"step": 152
},
{
"epoch": 1.0266889074228525,
"grad_norm": 0.27362291141864464,
"learning_rate": 8.32235736719411e-06,
"loss": 1.0868,
"step": 153
},
{
"epoch": 1.0333611342785656,
"grad_norm": 0.2458791941756008,
"learning_rate": 8.293055279207503e-06,
"loss": 1.0889,
"step": 154
},
{
"epoch": 1.0400333611342785,
"grad_norm": 0.31147103624378575,
"learning_rate": 8.263552076227048e-06,
"loss": 0.9933,
"step": 155
},
{
"epoch": 1.0467055879899916,
"grad_norm": 0.2672690864798242,
"learning_rate": 8.233849560085994e-06,
"loss": 1.0815,
"step": 156
},
{
"epoch": 1.0533778148457047,
"grad_norm": 0.2422074236035313,
"learning_rate": 8.203949544790131e-06,
"loss": 1.1015,
"step": 157
},
{
"epoch": 1.0600500417014178,
"grad_norm": 0.27579765876295836,
"learning_rate": 8.173853856407011e-06,
"loss": 1.0386,
"step": 158
},
{
"epoch": 1.066722268557131,
"grad_norm": 0.25869695071730053,
"learning_rate": 8.143564332954426e-06,
"loss": 1.0408,
"step": 159
},
{
"epoch": 1.073394495412844,
"grad_norm": 0.2902505945569461,
"learning_rate": 8.113082824288145e-06,
"loss": 1.0253,
"step": 160
},
{
"epoch": 1.0800667222685572,
"grad_norm": 0.23657511088258748,
"learning_rate": 8.082411191988956e-06,
"loss": 1.0282,
"step": 161
},
{
"epoch": 1.0867389491242703,
"grad_norm": 0.2841268408399986,
"learning_rate": 8.051551309248961e-06,
"loss": 1.1055,
"step": 162
},
{
"epoch": 1.0934111759799834,
"grad_norm": 0.26372063674010376,
"learning_rate": 8.02050506075718e-06,
"loss": 1.0589,
"step": 163
},
{
"epoch": 1.1000834028356965,
"grad_norm": 0.27690358093295814,
"learning_rate": 7.989274342584446e-06,
"loss": 0.949,
"step": 164
},
{
"epoch": 1.1067556296914094,
"grad_norm": 0.2712773483518729,
"learning_rate": 7.957861062067614e-06,
"loss": 1.0729,
"step": 165
},
{
"epoch": 1.1134278565471225,
"grad_norm": 0.30516214609809805,
"learning_rate": 7.926267137693066e-06,
"loss": 1.037,
"step": 166
},
{
"epoch": 1.1201000834028356,
"grad_norm": 0.2576349226719825,
"learning_rate": 7.894494498979558e-06,
"loss": 1.0497,
"step": 167
},
{
"epoch": 1.1267723102585487,
"grad_norm": 0.24801992242605783,
"learning_rate": 7.86254508636036e-06,
"loss": 1.0313,
"step": 168
},
{
"epoch": 1.1334445371142619,
"grad_norm": 0.3166344694109662,
"learning_rate": 7.830420851064767e-06,
"loss": 1.0967,
"step": 169
},
{
"epoch": 1.140116763969975,
"grad_norm": 0.24751199793145093,
"learning_rate": 7.798123754998922e-06,
"loss": 1.0031,
"step": 170
},
{
"epoch": 1.146788990825688,
"grad_norm": 0.2586884189404244,
"learning_rate": 7.765655770625997e-06,
"loss": 1.0219,
"step": 171
},
{
"epoch": 1.1534612176814012,
"grad_norm": 0.235632902703973,
"learning_rate": 7.733018880845747e-06,
"loss": 1.0207,
"step": 172
},
{
"epoch": 1.1601334445371143,
"grad_norm": 0.25234095064849044,
"learning_rate": 7.70021507887338e-06,
"loss": 1.0061,
"step": 173
},
{
"epoch": 1.1668056713928274,
"grad_norm": 0.2326635498094972,
"learning_rate": 7.667246368117852e-06,
"loss": 1.0587,
"step": 174
},
{
"epoch": 1.1734778982485405,
"grad_norm": 0.2649027893834019,
"learning_rate": 7.634114762059504e-06,
"loss": 0.9703,
"step": 175
},
{
"epoch": 1.1801501251042534,
"grad_norm": 0.2728476550775381,
"learning_rate": 7.600822284127091e-06,
"loss": 1.0392,
"step": 176
},
{
"epoch": 1.1868223519599665,
"grad_norm": 0.24549914761915478,
"learning_rate": 7.56737096757421e-06,
"loss": 0.9954,
"step": 177
},
{
"epoch": 1.1934945788156797,
"grad_norm": 0.2815251829331306,
"learning_rate": 7.533762855355126e-06,
"loss": 1.0291,
"step": 178
},
{
"epoch": 1.2001668056713928,
"grad_norm": 0.26632117503517927,
"learning_rate": 7.500000000000001e-06,
"loss": 1.1419,
"step": 179
},
{
"epoch": 1.206839032527106,
"grad_norm": 0.2966134642339934,
"learning_rate": 7.466084463489537e-06,
"loss": 1.0036,
"step": 180
},
{
"epoch": 1.213511259382819,
"grad_norm": 0.26223635549866403,
"learning_rate": 7.432018317129056e-06,
"loss": 1.0285,
"step": 181
},
{
"epoch": 1.2201834862385321,
"grad_norm": 0.30911626688190436,
"learning_rate": 7.39780364142199e-06,
"loss": 1.0041,
"step": 182
},
{
"epoch": 1.2268557130942452,
"grad_norm": 0.283762152043488,
"learning_rate": 7.363442525942827e-06,
"loss": 1.0517,
"step": 183
},
{
"epoch": 1.2335279399499584,
"grad_norm": 0.3425818937559889,
"learning_rate": 7.32893706920949e-06,
"loss": 1.0177,
"step": 184
},
{
"epoch": 1.2402001668056715,
"grad_norm": 0.35518044492578266,
"learning_rate": 7.294289378555179e-06,
"loss": 1.039,
"step": 185
},
{
"epoch": 1.2468723936613846,
"grad_norm": 0.2953658322519591,
"learning_rate": 7.25950156999967e-06,
"loss": 1.0239,
"step": 186
},
{
"epoch": 1.2535446205170975,
"grad_norm": 0.30127312537141965,
"learning_rate": 7.2245757681200835e-06,
"loss": 0.9763,
"step": 187
},
{
"epoch": 1.2602168473728108,
"grad_norm": 0.282352211179054,
"learning_rate": 7.189514105921132e-06,
"loss": 0.991,
"step": 188
},
{
"epoch": 1.2668890742285237,
"grad_norm": 0.27942599700546883,
"learning_rate": 7.1543187247048525e-06,
"loss": 0.9997,
"step": 189
},
{
"epoch": 1.2735613010842368,
"grad_norm": 0.34032643799945067,
"learning_rate": 7.118991773939832e-06,
"loss": 1.0323,
"step": 190
},
{
"epoch": 1.28023352793995,
"grad_norm": 0.2833978681025291,
"learning_rate": 7.083535411129934e-06,
"loss": 0.965,
"step": 191
},
{
"epoch": 1.286905754795663,
"grad_norm": 0.28066028962170614,
"learning_rate": 7.047951801682533e-06,
"loss": 0.9827,
"step": 192
},
{
"epoch": 1.2935779816513762,
"grad_norm": 0.3451242223474228,
"learning_rate": 7.01224311877627e-06,
"loss": 1.013,
"step": 193
},
{
"epoch": 1.3002502085070893,
"grad_norm": 0.28576160899229214,
"learning_rate": 6.976411543228328e-06,
"loss": 1.0147,
"step": 194
},
{
"epoch": 1.3069224353628024,
"grad_norm": 0.26405586969472095,
"learning_rate": 6.9404592633612486e-06,
"loss": 1.0222,
"step": 195
},
{
"epoch": 1.3135946622185155,
"grad_norm": 0.23979743466387593,
"learning_rate": 6.904388474869284e-06,
"loss": 0.9763,
"step": 196
},
{
"epoch": 1.3202668890742286,
"grad_norm": 0.26278250196725494,
"learning_rate": 6.8682013806842985e-06,
"loss": 1.0177,
"step": 197
},
{
"epoch": 1.3269391159299415,
"grad_norm": 0.3616613201338565,
"learning_rate": 6.831900190841232e-06,
"loss": 1.0189,
"step": 198
},
{
"epoch": 1.3336113427856546,
"grad_norm": 0.317805954218629,
"learning_rate": 6.795487122343124e-06,
"loss": 1.0761,
"step": 199
},
{
"epoch": 1.3402835696413677,
"grad_norm": 0.3267310563691638,
"learning_rate": 6.758964399025721e-06,
"loss": 1.0103,
"step": 200
},
{
"epoch": 1.3469557964970809,
"grad_norm": 0.3367394677015302,
"learning_rate": 6.722334251421665e-06,
"loss": 0.9504,
"step": 201
},
{
"epoch": 1.353628023352794,
"grad_norm": 0.323985054612315,
"learning_rate": 6.685598916624254e-06,
"loss": 1.0425,
"step": 202
},
{
"epoch": 1.360300250208507,
"grad_norm": 0.31744623672148714,
"learning_rate": 6.648760638150833e-06,
"loss": 1.0284,
"step": 203
},
{
"epoch": 1.3669724770642202,
"grad_norm": 0.32506007928496355,
"learning_rate": 6.611821665805769e-06,
"loss": 1.0494,
"step": 204
},
{
"epoch": 1.3736447039199333,
"grad_norm": 0.31683692701232863,
"learning_rate": 6.574784255543052e-06,
"loss": 0.9341,
"step": 205
},
{
"epoch": 1.3803169307756464,
"grad_norm": 0.339014481901513,
"learning_rate": 6.537650669328518e-06,
"loss": 0.9351,
"step": 206
},
{
"epoch": 1.3869891576313595,
"grad_norm": 0.2908501088905977,
"learning_rate": 6.500423175001705e-06,
"loss": 0.9636,
"step": 207
},
{
"epoch": 1.3936613844870727,
"grad_norm": 0.29622441402682287,
"learning_rate": 6.4631040461373494e-06,
"loss": 0.991,
"step": 208
},
{
"epoch": 1.4003336113427856,
"grad_norm": 0.259706989545213,
"learning_rate": 6.4256955619065375e-06,
"loss": 1.0265,
"step": 209
},
{
"epoch": 1.4070058381984987,
"grad_norm": 0.27351346614577177,
"learning_rate": 6.388200006937503e-06,
"loss": 1.1127,
"step": 210
},
{
"epoch": 1.4136780650542118,
"grad_norm": 0.2569362561775431,
"learning_rate": 6.350619671176111e-06,
"loss": 0.9848,
"step": 211
},
{
"epoch": 1.420350291909925,
"grad_norm": 0.3031398363213745,
"learning_rate": 6.312956849745993e-06,
"loss": 1.0439,
"step": 212
},
{
"epoch": 1.427022518765638,
"grad_norm": 0.29741800552671255,
"learning_rate": 6.275213842808383e-06,
"loss": 1.0295,
"step": 213
},
{
"epoch": 1.4336947456213511,
"grad_norm": 0.2858593332853522,
"learning_rate": 6.237392955421644e-06,
"loss": 0.9632,
"step": 214
},
{
"epoch": 1.4403669724770642,
"grad_norm": 0.2717253920058818,
"learning_rate": 6.19949649740049e-06,
"loss": 0.9708,
"step": 215
},
{
"epoch": 1.4470391993327774,
"grad_norm": 0.2755282096882548,
"learning_rate": 6.161526783174917e-06,
"loss": 1.0894,
"step": 216
},
{
"epoch": 1.4537114261884905,
"grad_norm": 0.277037632676125,
"learning_rate": 6.123486131648859e-06,
"loss": 1.0682,
"step": 217
},
{
"epoch": 1.4603836530442034,
"grad_norm": 0.3020180443959382,
"learning_rate": 6.085376866058569e-06,
"loss": 1.0672,
"step": 218
},
{
"epoch": 1.4670558798999167,
"grad_norm": 0.2819163434265071,
"learning_rate": 6.047201313830724e-06,
"loss": 0.9994,
"step": 219
},
{
"epoch": 1.4737281067556296,
"grad_norm": 0.2800183935957482,
"learning_rate": 6.0089618064402896e-06,
"loss": 1.022,
"step": 220
},
{
"epoch": 1.4804003336113427,
"grad_norm": 0.2719182825453672,
"learning_rate": 5.970660679268139e-06,
"loss": 1.0183,
"step": 221
},
{
"epoch": 1.4870725604670558,
"grad_norm": 0.2922309116254843,
"learning_rate": 5.932300271458406e-06,
"loss": 0.9777,
"step": 222
},
{
"epoch": 1.493744787322769,
"grad_norm": 0.2744738064356238,
"learning_rate": 5.893882925775648e-06,
"loss": 0.9841,
"step": 223
},
{
"epoch": 1.500417014178482,
"grad_norm": 0.2618241413789887,
"learning_rate": 5.85541098846175e-06,
"loss": 0.9789,
"step": 224
},
{
"epoch": 1.5070892410341952,
"grad_norm": 0.29331697175604643,
"learning_rate": 5.816886809092651e-06,
"loss": 0.9889,
"step": 225
},
{
"epoch": 1.5137614678899083,
"grad_norm": 0.293938259486948,
"learning_rate": 5.778312740434835e-06,
"loss": 1.0398,
"step": 226
},
{
"epoch": 1.5204336947456214,
"grad_norm": 0.3149805373750932,
"learning_rate": 5.73969113830165e-06,
"loss": 1.0356,
"step": 227
},
{
"epoch": 1.5271059216013345,
"grad_norm": 0.2882296792753476,
"learning_rate": 5.701024361409431e-06,
"loss": 1.066,
"step": 228
},
{
"epoch": 1.5337781484570474,
"grad_norm": 0.3264587076590878,
"learning_rate": 5.66231477123344e-06,
"loss": 0.9717,
"step": 229
},
{
"epoch": 1.5404503753127607,
"grad_norm": 0.28816187489358347,
"learning_rate": 5.623564731863664e-06,
"loss": 1.0055,
"step": 230
},
{
"epoch": 1.5471226021684736,
"grad_norm": 0.26876813399349714,
"learning_rate": 5.584776609860414e-06,
"loss": 1.0118,
"step": 231
},
{
"epoch": 1.553794829024187,
"grad_norm": 0.25656136721296807,
"learning_rate": 5.545952774109798e-06,
"loss": 1.0441,
"step": 232
},
{
"epoch": 1.5604670558798999,
"grad_norm": 0.2927283912839115,
"learning_rate": 5.507095595679059e-06,
"loss": 0.9875,
"step": 233
},
{
"epoch": 1.567139282735613,
"grad_norm": 0.26847113941733736,
"learning_rate": 5.468207447671755e-06,
"loss": 0.942,
"step": 234
},
{
"epoch": 1.573811509591326,
"grad_norm": 0.35285592222755574,
"learning_rate": 5.42929070508283e-06,
"loss": 1.0382,
"step": 235
},
{
"epoch": 1.5804837364470392,
"grad_norm": 0.28225631872241425,
"learning_rate": 5.390347744653576e-06,
"loss": 1.0041,
"step": 236
},
{
"epoch": 1.5871559633027523,
"grad_norm": 0.2894670959272741,
"learning_rate": 5.351380944726465e-06,
"loss": 1.0017,
"step": 237
},
{
"epoch": 1.5938281901584654,
"grad_norm": 0.28265496307817667,
"learning_rate": 5.312392685099915e-06,
"loss": 1.0073,
"step": 238
},
{
"epoch": 1.6005004170141786,
"grad_norm": 0.3412461315872378,
"learning_rate": 5.2733853468829295e-06,
"loss": 0.9839,
"step": 239
},
{
"epoch": 1.6071726438698914,
"grad_norm": 0.2954038595818766,
"learning_rate": 5.234361312349701e-06,
"loss": 1.0132,
"step": 240
},
{
"epoch": 1.6138448707256048,
"grad_norm": 0.2840364660523181,
"learning_rate": 5.195322964794098e-06,
"loss": 0.9473,
"step": 241
},
{
"epoch": 1.6205170975813177,
"grad_norm": 0.2986907397693315,
"learning_rate": 5.156272688384123e-06,
"loss": 1.0077,
"step": 242
},
{
"epoch": 1.627189324437031,
"grad_norm": 0.292176824846797,
"learning_rate": 5.117212868016303e-06,
"loss": 0.9315,
"step": 243
},
{
"epoch": 1.633861551292744,
"grad_norm": 0.3135774476957044,
"learning_rate": 5.07814588917004e-06,
"loss": 1.0088,
"step": 244
},
{
"epoch": 1.640533778148457,
"grad_norm": 0.31002794668270833,
"learning_rate": 5.03907413776192e-06,
"loss": 1.007,
"step": 245
},
{
"epoch": 1.6472060050041701,
"grad_norm": 0.3045456413966571,
"learning_rate": 5e-06,
"loss": 0.9872,
"step": 246
},
{
"epoch": 1.6538782318598833,
"grad_norm": 0.344623415610902,
"learning_rate": 4.96092586223808e-06,
"loss": 0.9343,
"step": 247
},
{
"epoch": 1.6605504587155964,
"grad_norm": 0.2888473591105003,
"learning_rate": 4.921854110829962e-06,
"loss": 1.0071,
"step": 248
},
{
"epoch": 1.6672226855713093,
"grad_norm": 0.3424869438582918,
"learning_rate": 4.882787131983698e-06,
"loss": 1.0513,
"step": 249
},
{
"epoch": 1.6738949124270226,
"grad_norm": 0.3044802192543269,
"learning_rate": 4.84372731161588e-06,
"loss": 1.0034,
"step": 250
},
{
"epoch": 1.6805671392827355,
"grad_norm": 0.3676369886348423,
"learning_rate": 4.804677035205903e-06,
"loss": 1.0679,
"step": 251
},
{
"epoch": 1.6872393661384488,
"grad_norm": 0.2821236895464674,
"learning_rate": 4.765638687650299e-06,
"loss": 0.9711,
"step": 252
},
{
"epoch": 1.6939115929941617,
"grad_norm": 0.32097634201487724,
"learning_rate": 4.726614653117071e-06,
"loss": 1.0368,
"step": 253
},
{
"epoch": 1.700583819849875,
"grad_norm": 0.30833620328617434,
"learning_rate": 4.687607314900087e-06,
"loss": 0.9419,
"step": 254
},
{
"epoch": 1.707256046705588,
"grad_norm": 0.3161695903169762,
"learning_rate": 4.6486190552735375e-06,
"loss": 0.947,
"step": 255
},
{
"epoch": 1.713928273561301,
"grad_norm": 0.3222992290291398,
"learning_rate": 4.6096522553464265e-06,
"loss": 0.9391,
"step": 256
},
{
"epoch": 1.7206005004170142,
"grad_norm": 0.27889539086250337,
"learning_rate": 4.57070929491717e-06,
"loss": 1.0359,
"step": 257
},
{
"epoch": 1.7272727272727273,
"grad_norm": 0.2916579428323436,
"learning_rate": 4.531792552328247e-06,
"loss": 1.0094,
"step": 258
},
{
"epoch": 1.7339449541284404,
"grad_norm": 0.24997244028612386,
"learning_rate": 4.492904404320942e-06,
"loss": 0.7959,
"step": 259
},
{
"epoch": 1.7406171809841533,
"grad_norm": 0.29868404836632323,
"learning_rate": 4.454047225890204e-06,
"loss": 1.0109,
"step": 260
},
{
"epoch": 1.7472894078398666,
"grad_norm": 0.2969807612243251,
"learning_rate": 4.415223390139588e-06,
"loss": 0.9875,
"step": 261
},
{
"epoch": 1.7539616346955795,
"grad_norm": 0.3604232218198932,
"learning_rate": 4.3764352681363365e-06,
"loss": 0.989,
"step": 262
},
{
"epoch": 1.7606338615512929,
"grad_norm": 0.36824762919465986,
"learning_rate": 4.337685228766561e-06,
"loss": 1.0049,
"step": 263
},
{
"epoch": 1.7673060884070058,
"grad_norm": 0.3500725820945927,
"learning_rate": 4.2989756385905715e-06,
"loss": 0.9718,
"step": 264
},
{
"epoch": 1.773978315262719,
"grad_norm": 0.2834505359886034,
"learning_rate": 4.260308861698351e-06,
"loss": 1.0465,
"step": 265
},
{
"epoch": 1.780650542118432,
"grad_norm": 0.3576818688058849,
"learning_rate": 4.221687259565166e-06,
"loss": 1.0372,
"step": 266
},
{
"epoch": 1.787322768974145,
"grad_norm": 0.31757170580776606,
"learning_rate": 4.183113190907349e-06,
"loss": 0.9565,
"step": 267
},
{
"epoch": 1.7939949958298582,
"grad_norm": 0.3231504978728471,
"learning_rate": 4.144589011538251e-06,
"loss": 0.9622,
"step": 268
},
{
"epoch": 1.8006672226855713,
"grad_norm": 0.30845575748951204,
"learning_rate": 4.106117074224354e-06,
"loss": 1.0258,
"step": 269
},
{
"epoch": 1.8073394495412844,
"grad_norm": 0.30421622601970244,
"learning_rate": 4.067699728541595e-06,
"loss": 1.0356,
"step": 270
},
{
"epoch": 1.8140116763969973,
"grad_norm": 0.2920513251222264,
"learning_rate": 4.029339320731862e-06,
"loss": 0.9939,
"step": 271
},
{
"epoch": 1.8206839032527107,
"grad_norm": 0.27807604400588937,
"learning_rate": 3.99103819355971e-06,
"loss": 0.9878,
"step": 272
},
{
"epoch": 1.8273561301084236,
"grad_norm": 0.3266996176440113,
"learning_rate": 3.952798686169279e-06,
"loss": 1.0041,
"step": 273
},
{
"epoch": 1.834028356964137,
"grad_norm": 0.3053490567275808,
"learning_rate": 3.914623133941432e-06,
"loss": 0.9872,
"step": 274
},
{
"epoch": 1.8407005838198498,
"grad_norm": 0.31477790189099164,
"learning_rate": 3.876513868351142e-06,
"loss": 0.9359,
"step": 275
},
{
"epoch": 1.847372810675563,
"grad_norm": 0.27149053344889523,
"learning_rate": 3.838473216825085e-06,
"loss": 1.0067,
"step": 276
},
{
"epoch": 1.854045037531276,
"grad_norm": 0.3204843702460214,
"learning_rate": 3.800503502599511e-06,
"loss": 0.9585,
"step": 277
},
{
"epoch": 1.8607172643869891,
"grad_norm": 0.3498778905670181,
"learning_rate": 3.762607044578357e-06,
"loss": 0.9697,
"step": 278
},
{
"epoch": 1.8673894912427023,
"grad_norm": 0.3180400555344581,
"learning_rate": 3.7247861571916183e-06,
"loss": 1.0226,
"step": 279
},
{
"epoch": 1.8740617180984154,
"grad_norm": 0.3480360514374549,
"learning_rate": 3.6870431502540096e-06,
"loss": 1.0358,
"step": 280
},
{
"epoch": 1.8807339449541285,
"grad_norm": 0.3187430568425493,
"learning_rate": 3.6493803288238894e-06,
"loss": 1.0073,
"step": 281
},
{
"epoch": 1.8874061718098414,
"grad_norm": 0.3464506380317717,
"learning_rate": 3.611799993062497e-06,
"loss": 0.943,
"step": 282
},
{
"epoch": 1.8940783986655547,
"grad_norm": 0.2880834965055193,
"learning_rate": 3.5743044380934655e-06,
"loss": 0.9721,
"step": 283
},
{
"epoch": 1.9007506255212676,
"grad_norm": 0.3210835495717625,
"learning_rate": 3.536895953862652e-06,
"loss": 1.0209,
"step": 284
},
{
"epoch": 1.907422852376981,
"grad_norm": 0.3047811453432249,
"learning_rate": 3.4995768249982975e-06,
"loss": 1.0284,
"step": 285
},
{
"epoch": 1.9140950792326938,
"grad_norm": 0.35570769296041677,
"learning_rate": 3.462349330671484e-06,
"loss": 0.9161,
"step": 286
},
{
"epoch": 1.920767306088407,
"grad_norm": 0.35749623557347504,
"learning_rate": 3.4252157444569478e-06,
"loss": 0.9152,
"step": 287
},
{
"epoch": 1.92743953294412,
"grad_norm": 0.3377788624601496,
"learning_rate": 3.388178334194232e-06,
"loss": 0.925,
"step": 288
},
{
"epoch": 1.9341117597998332,
"grad_norm": 0.3798661249068725,
"learning_rate": 3.351239361849168e-06,
"loss": 0.9846,
"step": 289
},
{
"epoch": 1.9407839866555463,
"grad_norm": 0.29999286355310334,
"learning_rate": 3.314401083375748e-06,
"loss": 0.9753,
"step": 290
},
{
"epoch": 1.9474562135112594,
"grad_norm": 0.35781167804812974,
"learning_rate": 3.2776657485783357e-06,
"loss": 0.9487,
"step": 291
},
{
"epoch": 1.9541284403669725,
"grad_norm": 0.3084397268778666,
"learning_rate": 3.2410356009742784e-06,
"loss": 1.0787,
"step": 292
},
{
"epoch": 1.9608006672226854,
"grad_norm": 0.317448351646333,
"learning_rate": 3.2045128776568783e-06,
"loss": 0.9257,
"step": 293
},
{
"epoch": 1.9674728940783988,
"grad_norm": 0.3686880962008835,
"learning_rate": 3.168099809158769e-06,
"loss": 0.9529,
"step": 294
},
{
"epoch": 1.9741451209341117,
"grad_norm": 0.30324127420271724,
"learning_rate": 3.1317986193157023e-06,
"loss": 0.9185,
"step": 295
},
{
"epoch": 1.980817347789825,
"grad_norm": 0.3439386421881959,
"learning_rate": 3.095611525130716e-06,
"loss": 1.1113,
"step": 296
},
{
"epoch": 1.9874895746455379,
"grad_norm": 0.31828525154875525,
"learning_rate": 3.059540736638751e-06,
"loss": 0.9624,
"step": 297
},
{
"epoch": 1.994161801501251,
"grad_norm": 0.3528601662749728,
"learning_rate": 3.023588456771674e-06,
"loss": 1.025,
"step": 298
},
{
"epoch": 2.006672226855713,
"grad_norm": 1.0381811083452561,
"learning_rate": 2.9877568812237325e-06,
"loss": 1.8823,
"step": 299
},
{
"epoch": 2.0133444537114262,
"grad_norm": 0.28745904791303756,
"learning_rate": 2.9520481983174675e-06,
"loss": 1.0051,
"step": 300
},
{
"epoch": 2.020016680567139,
"grad_norm": 0.3037470770313497,
"learning_rate": 2.916464588870067e-06,
"loss": 1.0253,
"step": 301
},
{
"epoch": 2.0266889074228525,
"grad_norm": 0.3264286058702715,
"learning_rate": 2.881008226060168e-06,
"loss": 0.9722,
"step": 302
},
{
"epoch": 2.0333611342785654,
"grad_norm": 0.3168792873407282,
"learning_rate": 2.8456812752951483e-06,
"loss": 0.9199,
"step": 303
},
{
"epoch": 2.0400333611342787,
"grad_norm": 0.3658230155927171,
"learning_rate": 2.8104858940788705e-06,
"loss": 0.9747,
"step": 304
},
{
"epoch": 2.0467055879899916,
"grad_norm": 0.3196066484806665,
"learning_rate": 2.7754242318799174e-06,
"loss": 0.9597,
"step": 305
},
{
"epoch": 2.053377814845705,
"grad_norm": 0.36214258330419746,
"learning_rate": 2.740498430000332e-06,
"loss": 0.9637,
"step": 306
},
{
"epoch": 2.060050041701418,
"grad_norm": 0.32144023431445257,
"learning_rate": 2.7057106214448216e-06,
"loss": 1.0071,
"step": 307
},
{
"epoch": 2.066722268557131,
"grad_norm": 0.31788167947698615,
"learning_rate": 2.671062930790511e-06,
"loss": 0.9324,
"step": 308
},
{
"epoch": 2.073394495412844,
"grad_norm": 0.32411973935094873,
"learning_rate": 2.636557474057173e-06,
"loss": 0.9213,
"step": 309
},
{
"epoch": 2.080066722268557,
"grad_norm": 0.3061895381140455,
"learning_rate": 2.6021963585780106e-06,
"loss": 0.9829,
"step": 310
},
{
"epoch": 2.0867389491242703,
"grad_norm": 0.2829970996541261,
"learning_rate": 2.567981682870946e-06,
"loss": 1.0112,
"step": 311
},
{
"epoch": 2.093411175979983,
"grad_norm": 0.3251773046313095,
"learning_rate": 2.533915536510464e-06,
"loss": 0.962,
"step": 312
},
{
"epoch": 2.1000834028356965,
"grad_norm": 0.3117638913905796,
"learning_rate": 2.5000000000000015e-06,
"loss": 1.0286,
"step": 313
},
{
"epoch": 2.1067556296914094,
"grad_norm": 0.3055554951200164,
"learning_rate": 2.466237144644874e-06,
"loss": 0.9782,
"step": 314
},
{
"epoch": 2.1134278565471227,
"grad_norm": 0.3093110501459989,
"learning_rate": 2.4326290324257896e-06,
"loss": 1.0203,
"step": 315
},
{
"epoch": 2.1201000834028356,
"grad_norm": 0.348167844578145,
"learning_rate": 2.3991777158729102e-06,
"loss": 0.9729,
"step": 316
},
{
"epoch": 2.126772310258549,
"grad_norm": 0.35896036414914057,
"learning_rate": 2.3658852379404973e-06,
"loss": 0.9687,
"step": 317
},
{
"epoch": 2.133444537114262,
"grad_norm": 0.33836897224758,
"learning_rate": 2.3327536318821496e-06,
"loss": 0.979,
"step": 318
},
{
"epoch": 2.140116763969975,
"grad_norm": 0.37686877855977563,
"learning_rate": 2.299784921126622e-06,
"loss": 0.9513,
"step": 319
},
{
"epoch": 2.146788990825688,
"grad_norm": 0.355801020401147,
"learning_rate": 2.2669811191542547e-06,
"loss": 0.9859,
"step": 320
},
{
"epoch": 2.153461217681401,
"grad_norm": 0.3010215082303293,
"learning_rate": 2.234344229374003e-06,
"loss": 0.9951,
"step": 321
},
{
"epoch": 2.1601334445371143,
"grad_norm": 0.32911989661603686,
"learning_rate": 2.20187624500108e-06,
"loss": 0.9708,
"step": 322
},
{
"epoch": 2.166805671392827,
"grad_norm": 0.3623670709775593,
"learning_rate": 2.1695791489352346e-06,
"loss": 0.9734,
"step": 323
},
{
"epoch": 2.1734778982485405,
"grad_norm": 0.31909897986159624,
"learning_rate": 2.1374549136396417e-06,
"loss": 0.9517,
"step": 324
},
{
"epoch": 2.1801501251042534,
"grad_norm": 0.31749439512516914,
"learning_rate": 2.1055055010204427e-06,
"loss": 0.9848,
"step": 325
},
{
"epoch": 2.1868223519599668,
"grad_norm": 0.29264160902990144,
"learning_rate": 2.073732862306935e-06,
"loss": 1.0053,
"step": 326
},
{
"epoch": 2.1934945788156797,
"grad_norm": 0.3668210964835384,
"learning_rate": 2.042138937932388e-06,
"loss": 1.0157,
"step": 327
},
{
"epoch": 2.200166805671393,
"grad_norm": 0.3133785285507638,
"learning_rate": 2.0107256574155564e-06,
"loss": 0.9808,
"step": 328
},
{
"epoch": 2.206839032527106,
"grad_norm": 0.3124366924714227,
"learning_rate": 1.979494939242822e-06,
"loss": 0.979,
"step": 329
},
{
"epoch": 2.213511259382819,
"grad_norm": 0.29545310894796867,
"learning_rate": 1.9484486907510405e-06,
"loss": 0.997,
"step": 330
},
{
"epoch": 2.220183486238532,
"grad_norm": 0.32367677669293343,
"learning_rate": 1.917588808011045e-06,
"loss": 0.8975,
"step": 331
},
{
"epoch": 2.226855713094245,
"grad_norm": 0.3299013625375276,
"learning_rate": 1.8869171757118554e-06,
"loss": 1.0025,
"step": 332
},
{
"epoch": 2.2335279399499584,
"grad_norm": 0.395626306344832,
"learning_rate": 1.856435667045577e-06,
"loss": 1.0349,
"step": 333
},
{
"epoch": 2.2402001668056712,
"grad_norm": 0.3255623940901644,
"learning_rate": 1.8261461435929895e-06,
"loss": 0.9994,
"step": 334
},
{
"epoch": 2.2468723936613846,
"grad_norm": 0.28569255358940876,
"learning_rate": 1.796050455209869e-06,
"loss": 0.9491,
"step": 335
},
{
"epoch": 2.2535446205170975,
"grad_norm": 0.29893680579074805,
"learning_rate": 1.7661504399140066e-06,
"loss": 0.979,
"step": 336
},
{
"epoch": 2.260216847372811,
"grad_norm": 0.3459559430460262,
"learning_rate": 1.7364479237729526e-06,
"loss": 0.9042,
"step": 337
},
{
"epoch": 2.2668890742285237,
"grad_norm": 0.3113640341471141,
"learning_rate": 1.7069447207924994e-06,
"loss": 0.9476,
"step": 338
},
{
"epoch": 2.273561301084237,
"grad_norm": 0.29758501306847707,
"learning_rate": 1.677642632805892e-06,
"loss": 0.9388,
"step": 339
},
{
"epoch": 2.28023352793995,
"grad_norm": 0.31704153540027663,
"learning_rate": 1.6485434493637915e-06,
"loss": 0.9918,
"step": 340
},
{
"epoch": 2.2869057547956633,
"grad_norm": 0.3475571370836834,
"learning_rate": 1.6196489476249777e-06,
"loss": 1.0017,
"step": 341
},
{
"epoch": 2.293577981651376,
"grad_norm": 0.3114674457835494,
"learning_rate": 1.5909608922478108e-06,
"loss": 1.0053,
"step": 342
},
{
"epoch": 2.300250208507089,
"grad_norm": 0.30298813780086953,
"learning_rate": 1.5624810352824709e-06,
"loss": 0.9641,
"step": 343
},
{
"epoch": 2.3069224353628024,
"grad_norm": 0.35762529414587724,
"learning_rate": 1.5342111160639412e-06,
"loss": 0.9616,
"step": 344
},
{
"epoch": 2.3135946622185153,
"grad_norm": 0.37005906694872853,
"learning_rate": 1.5061528611057917e-06,
"loss": 1.0249,
"step": 345
},
{
"epoch": 2.3202668890742286,
"grad_norm": 0.30759447735274864,
"learning_rate": 1.4783079839947396e-06,
"loss": 0.9851,
"step": 346
},
{
"epoch": 2.3269391159299415,
"grad_norm": 0.3014932344261013,
"learning_rate": 1.4506781852859836e-06,
"loss": 1.0052,
"step": 347
},
{
"epoch": 2.333611342785655,
"grad_norm": 0.31167411900209185,
"learning_rate": 1.4232651523993635e-06,
"loss": 0.956,
"step": 348
},
{
"epoch": 2.3402835696413677,
"grad_norm": 0.32652379262644254,
"learning_rate": 1.3960705595162876e-06,
"loss": 0.9825,
"step": 349
},
{
"epoch": 2.346955796497081,
"grad_norm": 0.3209030562572718,
"learning_rate": 1.369096067477505e-06,
"loss": 1.0177,
"step": 350
},
{
"epoch": 2.353628023352794,
"grad_norm": 0.3562229549612574,
"learning_rate": 1.3423433236816563e-06,
"loss": 0.9955,
"step": 351
},
{
"epoch": 2.360300250208507,
"grad_norm": 0.31080353838712754,
"learning_rate": 1.3158139619846734e-06,
"loss": 0.993,
"step": 352
},
{
"epoch": 2.36697247706422,
"grad_norm": 0.32276338892191286,
"learning_rate": 1.289509602599996e-06,
"loss": 0.9847,
"step": 353
},
{
"epoch": 2.373644703919933,
"grad_norm": 0.3162201171120057,
"learning_rate": 1.2634318519996148e-06,
"loss": 0.9645,
"step": 354
},
{
"epoch": 2.3803169307756464,
"grad_norm": 0.31907787951020794,
"learning_rate": 1.2375823028159667e-06,
"loss": 0.9949,
"step": 355
},
{
"epoch": 2.3869891576313593,
"grad_norm": 0.330999449587722,
"learning_rate": 1.2119625337446673e-06,
"loss": 0.9617,
"step": 356
},
{
"epoch": 2.3936613844870727,
"grad_norm": 0.37560901242187644,
"learning_rate": 1.186574109448091e-06,
"loss": 0.965,
"step": 357
},
{
"epoch": 2.4003336113427856,
"grad_norm": 0.3032851792093919,
"learning_rate": 1.16141858045982e-06,
"loss": 0.969,
"step": 358
},
{
"epoch": 2.407005838198499,
"grad_norm": 0.3120802157192034,
"learning_rate": 1.1364974830899438e-06,
"loss": 0.9821,
"step": 359
},
{
"epoch": 2.413678065054212,
"grad_norm": 0.34540220887991835,
"learning_rate": 1.1118123393312397e-06,
"loss": 0.9535,
"step": 360
},
{
"epoch": 2.420350291909925,
"grad_norm": 0.29306217474195717,
"learning_rate": 1.0873646567662165e-06,
"loss": 0.9548,
"step": 361
},
{
"epoch": 2.427022518765638,
"grad_norm": 0.3275471084256797,
"learning_rate": 1.0631559284750398e-06,
"loss": 0.9208,
"step": 362
},
{
"epoch": 2.4336947456213514,
"grad_norm": 0.3237098002556768,
"learning_rate": 1.0391876329443534e-06,
"loss": 0.9896,
"step": 363
},
{
"epoch": 2.4403669724770642,
"grad_norm": 0.31683152915795526,
"learning_rate": 1.0154612339769777e-06,
"loss": 0.9515,
"step": 364
},
{
"epoch": 2.447039199332777,
"grad_norm": 0.31260220662008054,
"learning_rate": 9.919781806025136e-07,
"loss": 0.9284,
"step": 365
},
{
"epoch": 2.4537114261884905,
"grad_norm": 0.3196141247915384,
"learning_rate": 9.687399069888515e-07,
"loss": 1.0529,
"step": 366
},
{
"epoch": 2.4603836530442034,
"grad_norm": 0.3390686830414771,
"learning_rate": 9.457478323545749e-07,
"loss": 0.906,
"step": 367
},
{
"epoch": 2.4670558798999167,
"grad_norm": 0.2976435803727035,
"learning_rate": 9.23003360882293e-07,
"loss": 0.9914,
"step": 368
},
{
"epoch": 2.4737281067556296,
"grad_norm": 0.3256745610569351,
"learning_rate": 9.005078816328772e-07,
"loss": 0.9923,
"step": 369
},
{
"epoch": 2.480400333611343,
"grad_norm": 0.3105480823035307,
"learning_rate": 8.782627684606332e-07,
"loss": 0.9996,
"step": 370
},
{
"epoch": 2.487072560467056,
"grad_norm": 0.33299836342381517,
"learning_rate": 8.562693799293931e-07,
"loss": 0.96,
"step": 371
},
{
"epoch": 2.493744787322769,
"grad_norm": 0.3114815813330825,
"learning_rate": 8.345290592295429e-07,
"loss": 0.9143,
"step": 372
},
{
"epoch": 2.500417014178482,
"grad_norm": 0.28937151523319904,
"learning_rate": 8.130431340959982e-07,
"loss": 0.9746,
"step": 373
},
{
"epoch": 2.507089241034195,
"grad_norm": 0.32619060975827346,
"learning_rate": 7.918129167271055e-07,
"loss": 0.9561,
"step": 374
},
{
"epoch": 2.5137614678899083,
"grad_norm": 0.31360364241751837,
"learning_rate": 7.708397037045129e-07,
"loss": 0.8092,
"step": 375
},
{
"epoch": 2.5204336947456216,
"grad_norm": 0.30558210448525414,
"learning_rate": 7.50124775913979e-07,
"loss": 0.9529,
"step": 376
},
{
"epoch": 2.5271059216013345,
"grad_norm": 0.32199398622868297,
"learning_rate": 7.296693984671465e-07,
"loss": 0.9798,
"step": 377
},
{
"epoch": 2.5337781484570474,
"grad_norm": 0.32592296410779964,
"learning_rate": 7.094748206242797e-07,
"loss": 1.0314,
"step": 378
},
{
"epoch": 2.5404503753127607,
"grad_norm": 0.3464187438731346,
"learning_rate": 6.895422757179682e-07,
"loss": 0.9811,
"step": 379
},
{
"epoch": 2.5471226021684736,
"grad_norm": 0.36090219930856193,
"learning_rate": 6.698729810778065e-07,
"loss": 0.9489,
"step": 380
},
{
"epoch": 2.553794829024187,
"grad_norm": 0.31856176752128185,
"learning_rate": 6.50468137956049e-07,
"loss": 0.9542,
"step": 381
},
{
"epoch": 2.5604670558799,
"grad_norm": 0.31996625435359055,
"learning_rate": 6.313289314542392e-07,
"loss": 0.9717,
"step": 382
},
{
"epoch": 2.5671392827356128,
"grad_norm": 0.3486660778765644,
"learning_rate": 6.12456530450844e-07,
"loss": 1.047,
"step": 383
},
{
"epoch": 2.573811509591326,
"grad_norm": 0.40268147548442224,
"learning_rate": 5.938520875298587e-07,
"loss": 1.0224,
"step": 384
},
{
"epoch": 2.5804837364470394,
"grad_norm": 0.3676526483372923,
"learning_rate": 5.755167389104166e-07,
"loss": 0.9871,
"step": 385
},
{
"epoch": 2.5871559633027523,
"grad_norm": 0.29607309034948626,
"learning_rate": 5.574516043774059e-07,
"loss": 0.9608,
"step": 386
},
{
"epoch": 2.593828190158465,
"grad_norm": 0.3351255483218255,
"learning_rate": 5.396577872130676e-07,
"loss": 0.9818,
"step": 387
},
{
"epoch": 2.6005004170141786,
"grad_norm": 0.3153076169997922,
"learning_rate": 5.221363741296298e-07,
"loss": 0.9962,
"step": 388
},
{
"epoch": 2.6071726438698914,
"grad_norm": 0.3816251930803852,
"learning_rate": 5.048884352029271e-07,
"loss": 0.9757,
"step": 389
},
{
"epoch": 2.613844870725605,
"grad_norm": 0.31190657046064496,
"learning_rate": 4.879150238070585e-07,
"loss": 0.9948,
"step": 390
},
{
"epoch": 2.6205170975813177,
"grad_norm": 0.38637780955296236,
"learning_rate": 4.712171765500484e-07,
"loss": 0.9479,
"step": 391
},
{
"epoch": 2.627189324437031,
"grad_norm": 0.3428247774477413,
"learning_rate": 4.5479591321053895e-07,
"loss": 0.9882,
"step": 392
},
{
"epoch": 2.633861551292744,
"grad_norm": 0.3418954616257825,
"learning_rate": 4.386522366755169e-07,
"loss": 0.9817,
"step": 393
},
{
"epoch": 2.6405337781484572,
"grad_norm": 0.30360358878330007,
"learning_rate": 4.2278713287905335e-07,
"loss": 0.9702,
"step": 394
},
{
"epoch": 2.64720600500417,
"grad_norm": 0.3262516329197568,
"learning_rate": 4.072015707421006e-07,
"loss": 0.9566,
"step": 395
},
{
"epoch": 2.653878231859883,
"grad_norm": 0.29298754809339406,
"learning_rate": 3.918965021133131e-07,
"loss": 0.9991,
"step": 396
},
{
"epoch": 2.6605504587155964,
"grad_norm": 0.3523770371797201,
"learning_rate": 3.7687286171091355e-07,
"loss": 0.9052,
"step": 397
},
{
"epoch": 2.6672226855713093,
"grad_norm": 0.3286065151191008,
"learning_rate": 3.621315670656117e-07,
"loss": 1.0313,
"step": 398
},
{
"epoch": 2.6738949124270226,
"grad_norm": 0.3327745593564552,
"learning_rate": 3.4767351846456744e-07,
"loss": 0.9391,
"step": 399
},
{
"epoch": 2.6805671392827355,
"grad_norm": 0.33904862886378573,
"learning_rate": 3.3349959889640516e-07,
"loss": 0.9358,
"step": 400
},
{
"epoch": 2.687239366138449,
"grad_norm": 0.327976050954257,
"learning_rate": 3.196106739972926e-07,
"loss": 1.0326,
"step": 401
},
{
"epoch": 2.6939115929941617,
"grad_norm": 0.3193292187888942,
"learning_rate": 3.0600759199806815e-07,
"loss": 0.9541,
"step": 402
},
{
"epoch": 2.700583819849875,
"grad_norm": 0.3774921796009617,
"learning_rate": 2.9269118367244385e-07,
"loss": 0.9841,
"step": 403
},
{
"epoch": 2.707256046705588,
"grad_norm": 0.31482273028522967,
"learning_rate": 2.7966226228626156e-07,
"loss": 0.9702,
"step": 404
},
{
"epoch": 2.713928273561301,
"grad_norm": 0.2902681756785998,
"learning_rate": 2.669216235478295e-07,
"loss": 0.8949,
"step": 405
},
{
"epoch": 2.720600500417014,
"grad_norm": 0.3358268040426097,
"learning_rate": 2.544700455593252e-07,
"loss": 0.9286,
"step": 406
},
{
"epoch": 2.7272727272727275,
"grad_norm": 0.3223051627272287,
"learning_rate": 2.4230828876927293e-07,
"loss": 0.9739,
"step": 407
},
{
"epoch": 2.7339449541284404,
"grad_norm": 0.31524135263431996,
"learning_rate": 2.3043709592610486e-07,
"loss": 0.9046,
"step": 408
},
{
"epoch": 2.7406171809841533,
"grad_norm": 0.30097063718894235,
"learning_rate": 2.1885719203279587e-07,
"loss": 0.9438,
"step": 409
},
{
"epoch": 2.7472894078398666,
"grad_norm": 0.35005277308808397,
"learning_rate": 2.0756928430258838e-07,
"loss": 0.986,
"step": 410
},
{
"epoch": 2.7539616346955795,
"grad_norm": 0.28128342646548465,
"learning_rate": 1.9657406211579966e-07,
"loss": 0.9488,
"step": 411
},
{
"epoch": 2.760633861551293,
"grad_norm": 0.30197051093171356,
"learning_rate": 1.8587219697771942e-07,
"loss": 0.9738,
"step": 412
},
{
"epoch": 2.7673060884070058,
"grad_norm": 0.31320873231724333,
"learning_rate": 1.7546434247760147e-07,
"loss": 0.9077,
"step": 413
},
{
"epoch": 2.773978315262719,
"grad_norm": 0.3281071984763327,
"learning_rate": 1.6535113424874683e-07,
"loss": 0.9133,
"step": 414
},
{
"epoch": 2.780650542118432,
"grad_norm": 0.34179101949267504,
"learning_rate": 1.555331899296808e-07,
"loss": 1.0089,
"step": 415
},
{
"epoch": 2.7873227689741453,
"grad_norm": 0.31629478659692434,
"learning_rate": 1.460111091264377e-07,
"loss": 0.9674,
"step": 416
},
{
"epoch": 2.793994995829858,
"grad_norm": 0.3248237469597133,
"learning_rate": 1.3678547337593494e-07,
"loss": 1.0218,
"step": 417
},
{
"epoch": 2.800667222685571,
"grad_norm": 0.31720738943716026,
"learning_rate": 1.2785684611046345e-07,
"loss": 0.9407,
"step": 418
},
{
"epoch": 2.8073394495412844,
"grad_norm": 0.2767770769664715,
"learning_rate": 1.1922577262327374e-07,
"loss": 0.9387,
"step": 419
},
{
"epoch": 2.8140116763969973,
"grad_norm": 0.34714214699914475,
"learning_rate": 1.1089278003527438e-07,
"loss": 1.0192,
"step": 420
},
{
"epoch": 2.8206839032527107,
"grad_norm": 0.29122863365999424,
"learning_rate": 1.0285837726283999e-07,
"loss": 0.9915,
"step": 421
},
{
"epoch": 2.8273561301084236,
"grad_norm": 0.35153123474849546,
"learning_rate": 9.512305498672936e-08,
"loss": 1.0139,
"step": 422
},
{
"epoch": 2.834028356964137,
"grad_norm": 0.3686890788414913,
"learning_rate": 8.768728562211948e-08,
"loss": 0.9661,
"step": 423
},
{
"epoch": 2.84070058381985,
"grad_norm": 0.3017566191381606,
"learning_rate": 8.055152328975357e-08,
"loss": 0.9603,
"step": 424
},
{
"epoch": 2.847372810675563,
"grad_norm": 0.37821440247787674,
"learning_rate": 7.371620378820555e-08,
"loss": 1.0185,
"step": 425
},
{
"epoch": 2.854045037531276,
"grad_norm": 0.31177981740624416,
"learning_rate": 6.718174456726789e-08,
"loss": 0.9297,
"step": 426
},
{
"epoch": 2.860717264386989,
"grad_norm": 0.31802350259904644,
"learning_rate": 6.094854470245326e-08,
"loss": 0.9828,
"step": 427
},
{
"epoch": 2.8673894912427023,
"grad_norm": 0.3344812561095061,
"learning_rate": 5.501698487062446e-08,
"loss": 0.9445,
"step": 428
},
{
"epoch": 2.8740617180984156,
"grad_norm": 0.31576512427963244,
"learning_rate": 4.9387427326745287e-08,
"loss": 0.9475,
"step": 429
},
{
"epoch": 2.8807339449541285,
"grad_norm": 0.3157169459636376,
"learning_rate": 4.4060215881755466e-08,
"loss": 0.9258,
"step": 430
},
{
"epoch": 2.8874061718098414,
"grad_norm": 0.3121956356964619,
"learning_rate": 3.903567588157353e-08,
"loss": 0.9958,
"step": 431
},
{
"epoch": 2.8940783986655547,
"grad_norm": 0.34863887849526165,
"learning_rate": 3.431411418722941e-08,
"loss": 0.9645,
"step": 432
},
{
"epoch": 2.9007506255212676,
"grad_norm": 0.35248694493395094,
"learning_rate": 2.989581915611994e-08,
"loss": 0.9588,
"step": 433
},
{
"epoch": 2.907422852376981,
"grad_norm": 0.3201864823159724,
"learning_rate": 2.5781060624401888e-08,
"loss": 0.9367,
"step": 434
},
{
"epoch": 2.914095079232694,
"grad_norm": 0.30304305615722416,
"learning_rate": 2.1970089890509527e-08,
"loss": 0.9279,
"step": 435
},
{
"epoch": 2.9207673060884067,
"grad_norm": 0.32097833547711685,
"learning_rate": 1.8463139699808618e-08,
"loss": 0.9111,
"step": 436
},
{
"epoch": 2.92743953294412,
"grad_norm": 0.3304128602909116,
"learning_rate": 1.5260424230382763e-08,
"loss": 0.9618,
"step": 437
},
{
"epoch": 2.9341117597998334,
"grad_norm": 0.3149926167057802,
"learning_rate": 1.2362139079949431e-08,
"loss": 0.9075,
"step": 438
},
{
"epoch": 2.9407839866555463,
"grad_norm": 0.34302106973635493,
"learning_rate": 9.768461253920614e-09,
"loss": 0.9448,
"step": 439
},
{
"epoch": 2.947456213511259,
"grad_norm": 0.3856805298465123,
"learning_rate": 7.479549154585376e-09,
"loss": 0.9606,
"step": 440
},
{
"epoch": 2.9541284403669725,
"grad_norm": 0.31010665321437775,
"learning_rate": 5.495542571443135e-09,
"loss": 0.9413,
"step": 441
},
{
"epoch": 2.9608006672226854,
"grad_norm": 0.3193071342884102,
"learning_rate": 3.816562672658841e-09,
"loss": 0.9947,
"step": 442
},
{
"epoch": 2.9674728940783988,
"grad_norm": 0.36599714891183205,
"learning_rate": 2.442711997670544e-09,
"loss": 1.0042,
"step": 443
},
{
"epoch": 2.9741451209341117,
"grad_norm": 0.33943389099897936,
"learning_rate": 1.3740744509205263e-09,
"loss": 1.0029,
"step": 444
},
{
"epoch": 2.980817347789825,
"grad_norm": 0.3000657499209369,
"learning_rate": 6.107152967349539e-10,
"loss": 0.9445,
"step": 445
},
{
"epoch": 2.987489574645538,
"grad_norm": 0.3424593362346738,
"learning_rate": 1.526811553370644e-10,
"loss": 0.9586,
"step": 446
},
{
"epoch": 2.994161801501251,
"grad_norm": 0.325670824774226,
"learning_rate": 0.0,
"loss": 0.9889,
"step": 447
},
{
"epoch": 2.994161801501251,
"step": 447,
"total_flos": 1153541636358144.0,
"train_loss": 1.1157796034876932,
"train_runtime": 7535.4115,
"train_samples_per_second": 2.864,
"train_steps_per_second": 0.059
}
],
"logging_steps": 1,
"max_steps": 447,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 30,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1153541636358144.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}