xp185 / checkpoint-440 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
2d83c46 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.993948562783661,
"eval_steps": 500,
"global_step": 440,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0045385779122541605,
"grad_norm": 12.459190368652344,
"learning_rate": 5.0000000000000004e-08,
"loss": 4.6369,
"step": 1
},
{
"epoch": 0.009077155824508321,
"grad_norm": 12.756917953491211,
"learning_rate": 1.0000000000000001e-07,
"loss": 4.6738,
"step": 2
},
{
"epoch": 0.01361573373676248,
"grad_norm": 15.50593090057373,
"learning_rate": 1.5000000000000002e-07,
"loss": 4.6681,
"step": 3
},
{
"epoch": 0.018154311649016642,
"grad_norm": 12.976861000061035,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.246,
"step": 4
},
{
"epoch": 0.0226928895612708,
"grad_norm": 14.594775199890137,
"learning_rate": 2.5000000000000004e-07,
"loss": 4.4452,
"step": 5
},
{
"epoch": 0.02723146747352496,
"grad_norm": 16.087888717651367,
"learning_rate": 3.0000000000000004e-07,
"loss": 4.5528,
"step": 6
},
{
"epoch": 0.03177004538577912,
"grad_norm": 13.62125301361084,
"learning_rate": 3.5000000000000004e-07,
"loss": 4.5628,
"step": 7
},
{
"epoch": 0.036308623298033284,
"grad_norm": 17.010616302490234,
"learning_rate": 4.0000000000000003e-07,
"loss": 4.5768,
"step": 8
},
{
"epoch": 0.04084720121028744,
"grad_norm": 14.567586898803711,
"learning_rate": 4.5000000000000003e-07,
"loss": 4.3937,
"step": 9
},
{
"epoch": 0.0453857791225416,
"grad_norm": 15.238479614257812,
"learning_rate": 5.000000000000001e-07,
"loss": 4.495,
"step": 10
},
{
"epoch": 0.049924357034795766,
"grad_norm": 13.89387321472168,
"learning_rate": 5.5e-07,
"loss": 4.4705,
"step": 11
},
{
"epoch": 0.05446293494704992,
"grad_norm": 13.949710845947266,
"learning_rate": 6.000000000000001e-07,
"loss": 4.328,
"step": 12
},
{
"epoch": 0.059001512859304085,
"grad_norm": 13.15122127532959,
"learning_rate": 6.5e-07,
"loss": 4.4296,
"step": 13
},
{
"epoch": 0.06354009077155824,
"grad_norm": 14.076905250549316,
"learning_rate": 7.000000000000001e-07,
"loss": 4.5285,
"step": 14
},
{
"epoch": 0.0680786686838124,
"grad_norm": 14.299891471862793,
"learning_rate": 7.5e-07,
"loss": 4.5849,
"step": 15
},
{
"epoch": 0.07261724659606657,
"grad_norm": 13.573644638061523,
"learning_rate": 8.000000000000001e-07,
"loss": 4.3638,
"step": 16
},
{
"epoch": 0.07715582450832073,
"grad_norm": 14.140484809875488,
"learning_rate": 8.500000000000001e-07,
"loss": 4.4449,
"step": 17
},
{
"epoch": 0.08169440242057488,
"grad_norm": 11.941351890563965,
"learning_rate": 9.000000000000001e-07,
"loss": 4.3646,
"step": 18
},
{
"epoch": 0.08623298033282904,
"grad_norm": 10.630327224731445,
"learning_rate": 9.500000000000001e-07,
"loss": 4.2902,
"step": 19
},
{
"epoch": 0.0907715582450832,
"grad_norm": 11.662637710571289,
"learning_rate": 1.0000000000000002e-06,
"loss": 4.1961,
"step": 20
},
{
"epoch": 0.09531013615733737,
"grad_norm": 12.008113861083984,
"learning_rate": 1.0500000000000001e-06,
"loss": 4.1941,
"step": 21
},
{
"epoch": 0.09984871406959153,
"grad_norm": 12.686023712158203,
"learning_rate": 1.1e-06,
"loss": 4.1346,
"step": 22
},
{
"epoch": 0.1043872919818457,
"grad_norm": 12.529243469238281,
"learning_rate": 1.1500000000000002e-06,
"loss": 4.1137,
"step": 23
},
{
"epoch": 0.10892586989409984,
"grad_norm": 11.19096565246582,
"learning_rate": 1.2000000000000002e-06,
"loss": 4.0772,
"step": 24
},
{
"epoch": 0.11346444780635401,
"grad_norm": 11.182024955749512,
"learning_rate": 1.25e-06,
"loss": 3.8888,
"step": 25
},
{
"epoch": 0.11800302571860817,
"grad_norm": 9.899381637573242,
"learning_rate": 1.3e-06,
"loss": 4.0117,
"step": 26
},
{
"epoch": 0.12254160363086233,
"grad_norm": 9.448798179626465,
"learning_rate": 1.3500000000000002e-06,
"loss": 3.8384,
"step": 27
},
{
"epoch": 0.12708018154311648,
"grad_norm": 10.632583618164062,
"learning_rate": 1.4000000000000001e-06,
"loss": 3.7857,
"step": 28
},
{
"epoch": 0.13161875945537066,
"grad_norm": 10.56238079071045,
"learning_rate": 1.45e-06,
"loss": 3.4783,
"step": 29
},
{
"epoch": 0.1361573373676248,
"grad_norm": 12.410117149353027,
"learning_rate": 1.5e-06,
"loss": 3.4828,
"step": 30
},
{
"epoch": 0.14069591527987896,
"grad_norm": 10.183599472045898,
"learning_rate": 1.5500000000000002e-06,
"loss": 3.6337,
"step": 31
},
{
"epoch": 0.14523449319213314,
"grad_norm": 9.421585083007812,
"learning_rate": 1.6000000000000001e-06,
"loss": 3.4453,
"step": 32
},
{
"epoch": 0.14977307110438728,
"grad_norm": 9.230025291442871,
"learning_rate": 1.6500000000000003e-06,
"loss": 3.3481,
"step": 33
},
{
"epoch": 0.15431164901664146,
"grad_norm": 8.295567512512207,
"learning_rate": 1.7000000000000002e-06,
"loss": 3.3145,
"step": 34
},
{
"epoch": 0.1588502269288956,
"grad_norm": 9.138203620910645,
"learning_rate": 1.75e-06,
"loss": 3.1872,
"step": 35
},
{
"epoch": 0.16338880484114976,
"grad_norm": 11.864872932434082,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.9837,
"step": 36
},
{
"epoch": 0.16792738275340394,
"grad_norm": 12.373150825500488,
"learning_rate": 1.85e-06,
"loss": 3.0049,
"step": 37
},
{
"epoch": 0.17246596066565809,
"grad_norm": 21.665483474731445,
"learning_rate": 1.9000000000000002e-06,
"loss": 2.7664,
"step": 38
},
{
"epoch": 0.17700453857791226,
"grad_norm": 22.663740158081055,
"learning_rate": 1.9500000000000004e-06,
"loss": 2.7272,
"step": 39
},
{
"epoch": 0.1815431164901664,
"grad_norm": 26.674400329589844,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.8077,
"step": 40
},
{
"epoch": 0.18608169440242056,
"grad_norm": 28.503612518310547,
"learning_rate": 2.05e-06,
"loss": 2.7306,
"step": 41
},
{
"epoch": 0.19062027231467474,
"grad_norm": 29.03409767150879,
"learning_rate": 2.1000000000000002e-06,
"loss": 2.7405,
"step": 42
},
{
"epoch": 0.1951588502269289,
"grad_norm": 17.844894409179688,
"learning_rate": 2.15e-06,
"loss": 2.6464,
"step": 43
},
{
"epoch": 0.19969742813918306,
"grad_norm": 19.220829010009766,
"learning_rate": 2.2e-06,
"loss": 2.4554,
"step": 44
},
{
"epoch": 0.2042360060514372,
"grad_norm": 25.161415100097656,
"learning_rate": 2.25e-06,
"loss": 2.2773,
"step": 45
},
{
"epoch": 0.2087745839636914,
"grad_norm": 17.46828269958496,
"learning_rate": 2.3000000000000004e-06,
"loss": 2.3392,
"step": 46
},
{
"epoch": 0.21331316187594554,
"grad_norm": 13.194672584533691,
"learning_rate": 2.35e-06,
"loss": 2.0409,
"step": 47
},
{
"epoch": 0.2178517397881997,
"grad_norm": 7.290500164031982,
"learning_rate": 2.4000000000000003e-06,
"loss": 2.1879,
"step": 48
},
{
"epoch": 0.22239031770045387,
"grad_norm": 7.56943941116333,
"learning_rate": 2.4500000000000003e-06,
"loss": 2.1656,
"step": 49
},
{
"epoch": 0.22692889561270801,
"grad_norm": 8.335527420043945,
"learning_rate": 2.5e-06,
"loss": 2.1294,
"step": 50
},
{
"epoch": 0.2314674735249622,
"grad_norm": 10.013853073120117,
"learning_rate": 2.55e-06,
"loss": 2.1816,
"step": 51
},
{
"epoch": 0.23600605143721634,
"grad_norm": 8.674482345581055,
"learning_rate": 2.6e-06,
"loss": 2.0078,
"step": 52
},
{
"epoch": 0.2405446293494705,
"grad_norm": 6.09174919128418,
"learning_rate": 2.6500000000000005e-06,
"loss": 2.1328,
"step": 53
},
{
"epoch": 0.24508320726172467,
"grad_norm": 7.3330488204956055,
"learning_rate": 2.7000000000000004e-06,
"loss": 1.8961,
"step": 54
},
{
"epoch": 0.24962178517397882,
"grad_norm": 8.764911651611328,
"learning_rate": 2.7500000000000004e-06,
"loss": 1.7026,
"step": 55
},
{
"epoch": 0.25416036308623297,
"grad_norm": 12.413115501403809,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.8172,
"step": 56
},
{
"epoch": 0.2586989409984871,
"grad_norm": 9.009276390075684,
"learning_rate": 2.85e-06,
"loss": 1.6527,
"step": 57
},
{
"epoch": 0.2632375189107413,
"grad_norm": 9.965579986572266,
"learning_rate": 2.9e-06,
"loss": 1.6348,
"step": 58
},
{
"epoch": 0.26777609682299547,
"grad_norm": 7.091963768005371,
"learning_rate": 2.95e-06,
"loss": 1.7016,
"step": 59
},
{
"epoch": 0.2723146747352496,
"grad_norm": 5.276648998260498,
"learning_rate": 3e-06,
"loss": 1.7088,
"step": 60
},
{
"epoch": 0.27685325264750377,
"grad_norm": 9.634596824645996,
"learning_rate": 3.05e-06,
"loss": 1.4961,
"step": 61
},
{
"epoch": 0.2813918305597579,
"grad_norm": 6.663750648498535,
"learning_rate": 3.1000000000000004e-06,
"loss": 1.522,
"step": 62
},
{
"epoch": 0.2859304084720121,
"grad_norm": 36.79426956176758,
"learning_rate": 3.1500000000000003e-06,
"loss": 1.4855,
"step": 63
},
{
"epoch": 0.29046898638426627,
"grad_norm": 31.88567352294922,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.5058,
"step": 64
},
{
"epoch": 0.2950075642965204,
"grad_norm": 12.319960594177246,
"learning_rate": 3.2500000000000002e-06,
"loss": 1.4547,
"step": 65
},
{
"epoch": 0.29954614220877457,
"grad_norm": 4.246046543121338,
"learning_rate": 3.3000000000000006e-06,
"loss": 1.3153,
"step": 66
},
{
"epoch": 0.3040847201210287,
"grad_norm": 5.3949503898620605,
"learning_rate": 3.3500000000000005e-06,
"loss": 1.4237,
"step": 67
},
{
"epoch": 0.3086232980332829,
"grad_norm": 9.975737571716309,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.327,
"step": 68
},
{
"epoch": 0.31316187594553707,
"grad_norm": 6.356144428253174,
"learning_rate": 3.45e-06,
"loss": 1.2685,
"step": 69
},
{
"epoch": 0.3177004538577912,
"grad_norm": 3.9029836654663086,
"learning_rate": 3.5e-06,
"loss": 1.3856,
"step": 70
},
{
"epoch": 0.32223903177004537,
"grad_norm": 4.215930938720703,
"learning_rate": 3.5500000000000003e-06,
"loss": 1.1844,
"step": 71
},
{
"epoch": 0.3267776096822995,
"grad_norm": 27.889633178710938,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.2535,
"step": 72
},
{
"epoch": 0.3313161875945537,
"grad_norm": 23.77273178100586,
"learning_rate": 3.65e-06,
"loss": 1.3189,
"step": 73
},
{
"epoch": 0.3358547655068079,
"grad_norm": 9.203927040100098,
"learning_rate": 3.7e-06,
"loss": 1.1993,
"step": 74
},
{
"epoch": 0.340393343419062,
"grad_norm": 3.1091806888580322,
"learning_rate": 3.7500000000000005e-06,
"loss": 1.1804,
"step": 75
},
{
"epoch": 0.34493192133131617,
"grad_norm": 9.0440034866333,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.2318,
"step": 76
},
{
"epoch": 0.3494704992435703,
"grad_norm": 7.17051362991333,
"learning_rate": 3.85e-06,
"loss": 1.2018,
"step": 77
},
{
"epoch": 0.3540090771558245,
"grad_norm": 8.164457321166992,
"learning_rate": 3.900000000000001e-06,
"loss": 1.0911,
"step": 78
},
{
"epoch": 0.3585476550680787,
"grad_norm": 3.3671042919158936,
"learning_rate": 3.95e-06,
"loss": 1.1747,
"step": 79
},
{
"epoch": 0.3630862329803328,
"grad_norm": 4.223191261291504,
"learning_rate": 4.000000000000001e-06,
"loss": 1.1141,
"step": 80
},
{
"epoch": 0.367624810892587,
"grad_norm": 10.657241821289062,
"learning_rate": 4.05e-06,
"loss": 1.0506,
"step": 81
},
{
"epoch": 0.3721633888048411,
"grad_norm": 2.664783239364624,
"learning_rate": 4.1e-06,
"loss": 1.1271,
"step": 82
},
{
"epoch": 0.3767019667170953,
"grad_norm": 5.334985733032227,
"learning_rate": 4.15e-06,
"loss": 1.0812,
"step": 83
},
{
"epoch": 0.3812405446293495,
"grad_norm": 7.471070289611816,
"learning_rate": 4.2000000000000004e-06,
"loss": 1.1027,
"step": 84
},
{
"epoch": 0.3857791225416036,
"grad_norm": 3.270421028137207,
"learning_rate": 4.25e-06,
"loss": 1.054,
"step": 85
},
{
"epoch": 0.3903177004538578,
"grad_norm": 1.9382085800170898,
"learning_rate": 4.3e-06,
"loss": 0.975,
"step": 86
},
{
"epoch": 0.394856278366112,
"grad_norm": 24.2485408782959,
"learning_rate": 4.350000000000001e-06,
"loss": 1.0288,
"step": 87
},
{
"epoch": 0.39939485627836613,
"grad_norm": 42.359432220458984,
"learning_rate": 4.4e-06,
"loss": 1.111,
"step": 88
},
{
"epoch": 0.4039334341906203,
"grad_norm": 25.72220230102539,
"learning_rate": 4.450000000000001e-06,
"loss": 1.0472,
"step": 89
},
{
"epoch": 0.4084720121028744,
"grad_norm": 3.1196579933166504,
"learning_rate": 4.5e-06,
"loss": 1.0055,
"step": 90
},
{
"epoch": 0.4130105900151286,
"grad_norm": 4.584676742553711,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.9848,
"step": 91
},
{
"epoch": 0.4175491679273828,
"grad_norm": 6.140016078948975,
"learning_rate": 4.600000000000001e-06,
"loss": 0.948,
"step": 92
},
{
"epoch": 0.42208774583963693,
"grad_norm": 5.266363620758057,
"learning_rate": 4.65e-06,
"loss": 0.9261,
"step": 93
},
{
"epoch": 0.4266263237518911,
"grad_norm": 3.6768720149993896,
"learning_rate": 4.7e-06,
"loss": 0.9772,
"step": 94
},
{
"epoch": 0.43116490166414523,
"grad_norm": 8.903961181640625,
"learning_rate": 4.75e-06,
"loss": 0.9514,
"step": 95
},
{
"epoch": 0.4357034795763994,
"grad_norm": 15.684305191040039,
"learning_rate": 4.800000000000001e-06,
"loss": 0.9629,
"step": 96
},
{
"epoch": 0.4402420574886536,
"grad_norm": 21.329519271850586,
"learning_rate": 4.85e-06,
"loss": 1.0118,
"step": 97
},
{
"epoch": 0.44478063540090773,
"grad_norm": 7.824005603790283,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.9094,
"step": 98
},
{
"epoch": 0.4493192133131619,
"grad_norm": 2.283174514770508,
"learning_rate": 4.95e-06,
"loss": 0.8877,
"step": 99
},
{
"epoch": 0.45385779122541603,
"grad_norm": 5.028634548187256,
"learning_rate": 5e-06,
"loss": 0.8946,
"step": 100
},
{
"epoch": 0.4583963691376702,
"grad_norm": 6.072951793670654,
"learning_rate": 4.9999917112344245e-06,
"loss": 0.9248,
"step": 101
},
{
"epoch": 0.4629349470499244,
"grad_norm": 7.111969470977783,
"learning_rate": 4.999966844992657e-06,
"loss": 0.9172,
"step": 102
},
{
"epoch": 0.46747352496217853,
"grad_norm": 3.792372465133667,
"learning_rate": 4.999925401439588e-06,
"loss": 0.8718,
"step": 103
},
{
"epoch": 0.4720121028744327,
"grad_norm": 4.017702579498291,
"learning_rate": 4.999867380850031e-06,
"loss": 0.9121,
"step": 104
},
{
"epoch": 0.47655068078668683,
"grad_norm": 6.903019905090332,
"learning_rate": 4.99979278360872e-06,
"loss": 0.9359,
"step": 105
},
{
"epoch": 0.481089258698941,
"grad_norm": 3.740607500076294,
"learning_rate": 4.999701610210309e-06,
"loss": 0.8474,
"step": 106
},
{
"epoch": 0.4856278366111952,
"grad_norm": 3.379190683364868,
"learning_rate": 4.999593861259373e-06,
"loss": 0.8641,
"step": 107
},
{
"epoch": 0.49016641452344933,
"grad_norm": 3.0796492099761963,
"learning_rate": 4.999469537470394e-06,
"loss": 0.8255,
"step": 108
},
{
"epoch": 0.4947049924357035,
"grad_norm": 3.3708953857421875,
"learning_rate": 4.999328639667765e-06,
"loss": 0.8823,
"step": 109
},
{
"epoch": 0.49924357034795763,
"grad_norm": 3.2106549739837646,
"learning_rate": 4.999171168785783e-06,
"loss": 0.8584,
"step": 110
},
{
"epoch": 0.5037821482602118,
"grad_norm": 6.520877838134766,
"learning_rate": 4.998997125868638e-06,
"loss": 0.8718,
"step": 111
},
{
"epoch": 0.5083207261724659,
"grad_norm": 1.0179104804992676,
"learning_rate": 4.99880651207041e-06,
"loss": 0.871,
"step": 112
},
{
"epoch": 0.5128593040847201,
"grad_norm": 1.1883283853530884,
"learning_rate": 4.998599328655063e-06,
"loss": 0.8689,
"step": 113
},
{
"epoch": 0.5173978819969742,
"grad_norm": 1.2722241878509521,
"learning_rate": 4.998375576996431e-06,
"loss": 0.8594,
"step": 114
},
{
"epoch": 0.5219364599092284,
"grad_norm": 1.8931000232696533,
"learning_rate": 4.9981352585782154e-06,
"loss": 0.8327,
"step": 115
},
{
"epoch": 0.5264750378214826,
"grad_norm": 3.093480348587036,
"learning_rate": 4.997878374993971e-06,
"loss": 0.8372,
"step": 116
},
{
"epoch": 0.5310136157337367,
"grad_norm": 5.2650957107543945,
"learning_rate": 4.9976049279470955e-06,
"loss": 0.8386,
"step": 117
},
{
"epoch": 0.5355521936459909,
"grad_norm": 4.467101573944092,
"learning_rate": 4.997314919250818e-06,
"loss": 0.7788,
"step": 118
},
{
"epoch": 0.540090771558245,
"grad_norm": 3.614868640899658,
"learning_rate": 4.997008350828192e-06,
"loss": 0.874,
"step": 119
},
{
"epoch": 0.5446293494704992,
"grad_norm": 12.209319114685059,
"learning_rate": 4.996685224712077e-06,
"loss": 0.8223,
"step": 120
},
{
"epoch": 0.5491679273827534,
"grad_norm": 4.1903252601623535,
"learning_rate": 4.9963455430451245e-06,
"loss": 0.8455,
"step": 121
},
{
"epoch": 0.5537065052950075,
"grad_norm": 2.649549722671509,
"learning_rate": 4.9959893080797675e-06,
"loss": 0.7875,
"step": 122
},
{
"epoch": 0.5582450832072617,
"grad_norm": 4.935393810272217,
"learning_rate": 4.995616522178207e-06,
"loss": 0.8691,
"step": 123
},
{
"epoch": 0.5627836611195158,
"grad_norm": 5.565356731414795,
"learning_rate": 4.995227187812389e-06,
"loss": 0.8267,
"step": 124
},
{
"epoch": 0.56732223903177,
"grad_norm": 5.23801851272583,
"learning_rate": 4.994821307563995e-06,
"loss": 0.826,
"step": 125
},
{
"epoch": 0.5718608169440242,
"grad_norm": 4.2979736328125,
"learning_rate": 4.994398884124422e-06,
"loss": 0.8068,
"step": 126
},
{
"epoch": 0.5763993948562783,
"grad_norm": 6.285053730010986,
"learning_rate": 4.993959920294764e-06,
"loss": 0.819,
"step": 127
},
{
"epoch": 0.5809379727685325,
"grad_norm": 10.453060150146484,
"learning_rate": 4.9935044189857975e-06,
"loss": 0.773,
"step": 128
},
{
"epoch": 0.5854765506807866,
"grad_norm": 31.77552604675293,
"learning_rate": 4.993032383217957e-06,
"loss": 0.8738,
"step": 129
},
{
"epoch": 0.5900151285930408,
"grad_norm": 1.1456351280212402,
"learning_rate": 4.992543816121317e-06,
"loss": 0.7518,
"step": 130
},
{
"epoch": 0.594553706505295,
"grad_norm": 1.3343666791915894,
"learning_rate": 4.992038720935572e-06,
"loss": 0.8108,
"step": 131
},
{
"epoch": 0.5990922844175491,
"grad_norm": 0.8912076354026794,
"learning_rate": 4.991517101010015e-06,
"loss": 0.8159,
"step": 132
},
{
"epoch": 0.6036308623298033,
"grad_norm": 2.249366044998169,
"learning_rate": 4.990978959803513e-06,
"loss": 0.8124,
"step": 133
},
{
"epoch": 0.6081694402420574,
"grad_norm": 1.2822734117507935,
"learning_rate": 4.990424300884488e-06,
"loss": 0.8213,
"step": 134
},
{
"epoch": 0.6127080181543116,
"grad_norm": 6.053490161895752,
"learning_rate": 4.98985312793089e-06,
"loss": 0.7829,
"step": 135
},
{
"epoch": 0.6172465960665658,
"grad_norm": 2.2606236934661865,
"learning_rate": 4.989265444730176e-06,
"loss": 0.7729,
"step": 136
},
{
"epoch": 0.6217851739788199,
"grad_norm": 3.8894989490509033,
"learning_rate": 4.988661255179276e-06,
"loss": 0.774,
"step": 137
},
{
"epoch": 0.6263237518910741,
"grad_norm": 5.648194313049316,
"learning_rate": 4.988040563284582e-06,
"loss": 0.7251,
"step": 138
},
{
"epoch": 0.6308623298033282,
"grad_norm": 1.7583339214324951,
"learning_rate": 4.98740337316191e-06,
"loss": 0.7875,
"step": 139
},
{
"epoch": 0.6354009077155824,
"grad_norm": 1.3959944248199463,
"learning_rate": 4.9867496890364734e-06,
"loss": 0.7726,
"step": 140
},
{
"epoch": 0.6399394856278366,
"grad_norm": 1.7879443168640137,
"learning_rate": 4.986079515242861e-06,
"loss": 0.786,
"step": 141
},
{
"epoch": 0.6444780635400907,
"grad_norm": 2.0919816493988037,
"learning_rate": 4.985392856225003e-06,
"loss": 0.7802,
"step": 142
},
{
"epoch": 0.649016641452345,
"grad_norm": 1.272477626800537,
"learning_rate": 4.984689716536145e-06,
"loss": 0.7842,
"step": 143
},
{
"epoch": 0.653555219364599,
"grad_norm": 1.1265331506729126,
"learning_rate": 4.983970100838814e-06,
"loss": 0.736,
"step": 144
},
{
"epoch": 0.6580937972768532,
"grad_norm": 0.8514362573623657,
"learning_rate": 4.983234013904791e-06,
"loss": 0.749,
"step": 145
},
{
"epoch": 0.6626323751891074,
"grad_norm": 3.148453950881958,
"learning_rate": 4.9824814606150774e-06,
"loss": 0.7884,
"step": 146
},
{
"epoch": 0.6671709531013615,
"grad_norm": 1.531554937362671,
"learning_rate": 4.981712445959864e-06,
"loss": 0.767,
"step": 147
},
{
"epoch": 0.6717095310136157,
"grad_norm": 1.7979401350021362,
"learning_rate": 4.980926975038496e-06,
"loss": 0.7575,
"step": 148
},
{
"epoch": 0.6762481089258698,
"grad_norm": 1.1131621599197388,
"learning_rate": 4.9801250530594415e-06,
"loss": 0.76,
"step": 149
},
{
"epoch": 0.680786686838124,
"grad_norm": 1.2112400531768799,
"learning_rate": 4.9793066853402535e-06,
"loss": 0.769,
"step": 150
},
{
"epoch": 0.6853252647503782,
"grad_norm": 1.276172161102295,
"learning_rate": 4.978471877307541e-06,
"loss": 0.7641,
"step": 151
},
{
"epoch": 0.6898638426626323,
"grad_norm": 7.614717960357666,
"learning_rate": 4.977620634496926e-06,
"loss": 0.7614,
"step": 152
},
{
"epoch": 0.6944024205748865,
"grad_norm": 0.9541272521018982,
"learning_rate": 4.976752962553008e-06,
"loss": 0.7406,
"step": 153
},
{
"epoch": 0.6989409984871406,
"grad_norm": 0.9793027639389038,
"learning_rate": 4.975868867229332e-06,
"loss": 0.7538,
"step": 154
},
{
"epoch": 0.7034795763993948,
"grad_norm": 8.540267944335938,
"learning_rate": 4.974968354388346e-06,
"loss": 0.7616,
"step": 155
},
{
"epoch": 0.708018154311649,
"grad_norm": 2.279240608215332,
"learning_rate": 4.97405143000136e-06,
"loss": 0.7157,
"step": 156
},
{
"epoch": 0.7125567322239031,
"grad_norm": 2.947227716445923,
"learning_rate": 4.973118100148513e-06,
"loss": 0.7348,
"step": 157
},
{
"epoch": 0.7170953101361573,
"grad_norm": 2.211785316467285,
"learning_rate": 4.9721683710187255e-06,
"loss": 0.7144,
"step": 158
},
{
"epoch": 0.7216338880484114,
"grad_norm": 1.3755372762680054,
"learning_rate": 4.971202248909662e-06,
"loss": 0.6857,
"step": 159
},
{
"epoch": 0.7261724659606656,
"grad_norm": 2.564708709716797,
"learning_rate": 4.970219740227693e-06,
"loss": 0.7124,
"step": 160
},
{
"epoch": 0.7307110438729199,
"grad_norm": 11.76566219329834,
"learning_rate": 4.9692208514878445e-06,
"loss": 0.7815,
"step": 161
},
{
"epoch": 0.735249621785174,
"grad_norm": 3.7665200233459473,
"learning_rate": 4.9682055893137605e-06,
"loss": 0.7021,
"step": 162
},
{
"epoch": 0.7397881996974282,
"grad_norm": 2.9076344966888428,
"learning_rate": 4.967173960437657e-06,
"loss": 0.7083,
"step": 163
},
{
"epoch": 0.7443267776096822,
"grad_norm": 2.336026191711426,
"learning_rate": 4.966125971700277e-06,
"loss": 0.7455,
"step": 164
},
{
"epoch": 0.7488653555219364,
"grad_norm": 6.48813009262085,
"learning_rate": 4.965061630050848e-06,
"loss": 0.7628,
"step": 165
},
{
"epoch": 0.7534039334341907,
"grad_norm": 1.9123872518539429,
"learning_rate": 4.9639809425470324e-06,
"loss": 0.7212,
"step": 166
},
{
"epoch": 0.7579425113464447,
"grad_norm": 6.638424873352051,
"learning_rate": 4.962883916354882e-06,
"loss": 0.7523,
"step": 167
},
{
"epoch": 0.762481089258699,
"grad_norm": 1.3606716394424438,
"learning_rate": 4.961770558748793e-06,
"loss": 0.7651,
"step": 168
},
{
"epoch": 0.7670196671709532,
"grad_norm": 1.2726523876190186,
"learning_rate": 4.960640877111451e-06,
"loss": 0.7729,
"step": 169
},
{
"epoch": 0.7715582450832073,
"grad_norm": 0.9216910600662231,
"learning_rate": 4.959494878933792e-06,
"loss": 0.7162,
"step": 170
},
{
"epoch": 0.7760968229954615,
"grad_norm": 2.3702430725097656,
"learning_rate": 4.958332571814941e-06,
"loss": 0.7038,
"step": 171
},
{
"epoch": 0.7806354009077155,
"grad_norm": 4.6101555824279785,
"learning_rate": 4.957153963462172e-06,
"loss": 0.7141,
"step": 172
},
{
"epoch": 0.7851739788199698,
"grad_norm": 1.7314170598983765,
"learning_rate": 4.955959061690853e-06,
"loss": 0.7374,
"step": 173
},
{
"epoch": 0.789712556732224,
"grad_norm": 2.91019868850708,
"learning_rate": 4.9547478744243914e-06,
"loss": 0.7048,
"step": 174
},
{
"epoch": 0.794251134644478,
"grad_norm": 2.872775077819824,
"learning_rate": 4.953520409694186e-06,
"loss": 0.6912,
"step": 175
},
{
"epoch": 0.7987897125567323,
"grad_norm": 2.2696948051452637,
"learning_rate": 4.952276675639569e-06,
"loss": 0.7432,
"step": 176
},
{
"epoch": 0.8033282904689864,
"grad_norm": 1.8855810165405273,
"learning_rate": 4.951016680507757e-06,
"loss": 0.7056,
"step": 177
},
{
"epoch": 0.8078668683812406,
"grad_norm": 5.202772617340088,
"learning_rate": 4.9497404326537954e-06,
"loss": 0.7114,
"step": 178
},
{
"epoch": 0.8124054462934948,
"grad_norm": 0.7916449904441833,
"learning_rate": 4.948447940540497e-06,
"loss": 0.7202,
"step": 179
},
{
"epoch": 0.8169440242057489,
"grad_norm": 1.5009609460830688,
"learning_rate": 4.947139212738395e-06,
"loss": 0.7245,
"step": 180
},
{
"epoch": 0.8214826021180031,
"grad_norm": 1.858067512512207,
"learning_rate": 4.945814257925679e-06,
"loss": 0.6962,
"step": 181
},
{
"epoch": 0.8260211800302572,
"grad_norm": 0.7835391163825989,
"learning_rate": 4.94447308488814e-06,
"loss": 0.6875,
"step": 182
},
{
"epoch": 0.8305597579425114,
"grad_norm": 21.755929946899414,
"learning_rate": 4.943115702519115e-06,
"loss": 0.7304,
"step": 183
},
{
"epoch": 0.8350983358547656,
"grad_norm": 6.944667816162109,
"learning_rate": 4.941742119819421e-06,
"loss": 0.7381,
"step": 184
},
{
"epoch": 0.8396369137670197,
"grad_norm": 0.9813210368156433,
"learning_rate": 4.940352345897304e-06,
"loss": 0.682,
"step": 185
},
{
"epoch": 0.8441754916792739,
"grad_norm": 1.8010449409484863,
"learning_rate": 4.938946389968372e-06,
"loss": 0.7639,
"step": 186
},
{
"epoch": 0.848714069591528,
"grad_norm": 2.293980121612549,
"learning_rate": 4.937524261355535e-06,
"loss": 0.7027,
"step": 187
},
{
"epoch": 0.8532526475037822,
"grad_norm": 1.9937771558761597,
"learning_rate": 4.9360859694889475e-06,
"loss": 0.688,
"step": 188
},
{
"epoch": 0.8577912254160364,
"grad_norm": 1.8442484140396118,
"learning_rate": 4.934631523905938e-06,
"loss": 0.6261,
"step": 189
},
{
"epoch": 0.8623298033282905,
"grad_norm": 9.626107215881348,
"learning_rate": 4.933160934250957e-06,
"loss": 0.7605,
"step": 190
},
{
"epoch": 0.8668683812405447,
"grad_norm": 14.051777839660645,
"learning_rate": 4.931674210275499e-06,
"loss": 0.7183,
"step": 191
},
{
"epoch": 0.8714069591527988,
"grad_norm": 2.1191225051879883,
"learning_rate": 4.930171361838052e-06,
"loss": 0.697,
"step": 192
},
{
"epoch": 0.875945537065053,
"grad_norm": 1.4051584005355835,
"learning_rate": 4.928652398904022e-06,
"loss": 0.6985,
"step": 193
},
{
"epoch": 0.8804841149773072,
"grad_norm": 0.7633137106895447,
"learning_rate": 4.92711733154567e-06,
"loss": 0.7018,
"step": 194
},
{
"epoch": 0.8850226928895613,
"grad_norm": 1.5257729291915894,
"learning_rate": 4.925566169942048e-06,
"loss": 0.7108,
"step": 195
},
{
"epoch": 0.8895612708018155,
"grad_norm": 1.7885994911193848,
"learning_rate": 4.9239989243789275e-06,
"loss": 0.7251,
"step": 196
},
{
"epoch": 0.8940998487140696,
"grad_norm": 1.8133364915847778,
"learning_rate": 4.922415605248734e-06,
"loss": 0.691,
"step": 197
},
{
"epoch": 0.8986384266263238,
"grad_norm": 1.3306565284729004,
"learning_rate": 4.920816223050475e-06,
"loss": 0.6496,
"step": 198
},
{
"epoch": 0.903177004538578,
"grad_norm": 2.5053746700286865,
"learning_rate": 4.919200788389675e-06,
"loss": 0.7174,
"step": 199
},
{
"epoch": 0.9077155824508321,
"grad_norm": 2.8250479698181152,
"learning_rate": 4.917569311978301e-06,
"loss": 0.6992,
"step": 200
},
{
"epoch": 0.9122541603630863,
"grad_norm": 0.6993988752365112,
"learning_rate": 4.915921804634693e-06,
"loss": 0.6983,
"step": 201
},
{
"epoch": 0.9167927382753404,
"grad_norm": 2.591536045074463,
"learning_rate": 4.914258277283494e-06,
"loss": 0.6686,
"step": 202
},
{
"epoch": 0.9213313161875946,
"grad_norm": 1.7433080673217773,
"learning_rate": 4.912578740955573e-06,
"loss": 0.686,
"step": 203
},
{
"epoch": 0.9258698940998488,
"grad_norm": 1.5048431158065796,
"learning_rate": 4.910883206787958e-06,
"loss": 0.7043,
"step": 204
},
{
"epoch": 0.9304084720121029,
"grad_norm": 0.7160290479660034,
"learning_rate": 4.9091716860237545e-06,
"loss": 0.6703,
"step": 205
},
{
"epoch": 0.9349470499243571,
"grad_norm": 16.788084030151367,
"learning_rate": 4.907444190012081e-06,
"loss": 0.7465,
"step": 206
},
{
"epoch": 0.9394856278366112,
"grad_norm": 6.705326557159424,
"learning_rate": 4.905700730207983e-06,
"loss": 0.6692,
"step": 207
},
{
"epoch": 0.9440242057488654,
"grad_norm": 1.1539785861968994,
"learning_rate": 4.903941318172365e-06,
"loss": 0.6769,
"step": 208
},
{
"epoch": 0.9485627836611196,
"grad_norm": 0.806441068649292,
"learning_rate": 4.902165965571911e-06,
"loss": 0.6788,
"step": 209
},
{
"epoch": 0.9531013615733737,
"grad_norm": 2.599201202392578,
"learning_rate": 4.900374684179005e-06,
"loss": 0.6845,
"step": 210
},
{
"epoch": 0.9576399394856279,
"grad_norm": 1.9495007991790771,
"learning_rate": 4.898567485871656e-06,
"loss": 0.68,
"step": 211
},
{
"epoch": 0.962178517397882,
"grad_norm": 1.3471159934997559,
"learning_rate": 4.896744382633419e-06,
"loss": 0.6799,
"step": 212
},
{
"epoch": 0.9667170953101362,
"grad_norm": 2.436737537384033,
"learning_rate": 4.894905386553316e-06,
"loss": 0.6688,
"step": 213
},
{
"epoch": 0.9712556732223904,
"grad_norm": 2.486992597579956,
"learning_rate": 4.893050509825749e-06,
"loss": 0.6866,
"step": 214
},
{
"epoch": 0.9757942511346445,
"grad_norm": 2.137023687362671,
"learning_rate": 4.891179764750434e-06,
"loss": 0.6891,
"step": 215
},
{
"epoch": 0.9803328290468987,
"grad_norm": 1.3739172220230103,
"learning_rate": 4.8892931637323e-06,
"loss": 0.6219,
"step": 216
},
{
"epoch": 0.9848714069591528,
"grad_norm": 1.6083399057388306,
"learning_rate": 4.887390719281423e-06,
"loss": 0.6805,
"step": 217
},
{
"epoch": 0.989409984871407,
"grad_norm": 0.7767590284347534,
"learning_rate": 4.885472444012937e-06,
"loss": 0.6857,
"step": 218
},
{
"epoch": 0.9939485627836612,
"grad_norm": 0.8085631728172302,
"learning_rate": 4.883538350646949e-06,
"loss": 0.6594,
"step": 219
},
{
"epoch": 0.9984871406959153,
"grad_norm": 0.7762560844421387,
"learning_rate": 4.881588452008457e-06,
"loss": 0.6972,
"step": 220
},
{
"epoch": 1.0,
"grad_norm": 0.7762560844421387,
"learning_rate": 4.8796227610272615e-06,
"loss": 0.2152,
"step": 221
},
{
"epoch": 1.0045385779122542,
"grad_norm": 0.8768157958984375,
"learning_rate": 4.8776412907378845e-06,
"loss": 0.6847,
"step": 222
},
{
"epoch": 1.0090771558245084,
"grad_norm": 2.3720204830169678,
"learning_rate": 4.8756440542794805e-06,
"loss": 0.6646,
"step": 223
},
{
"epoch": 1.0136157337367624,
"grad_norm": 2.1314537525177,
"learning_rate": 4.873631064895749e-06,
"loss": 0.676,
"step": 224
},
{
"epoch": 1.0181543116490166,
"grad_norm": 1.7491836547851562,
"learning_rate": 4.871602335934847e-06,
"loss": 0.6474,
"step": 225
},
{
"epoch": 1.0226928895612708,
"grad_norm": 1.7630263566970825,
"learning_rate": 4.8695578808493034e-06,
"loss": 0.6541,
"step": 226
},
{
"epoch": 1.027231467473525,
"grad_norm": 1.195518970489502,
"learning_rate": 4.867497713195925e-06,
"loss": 0.6529,
"step": 227
},
{
"epoch": 1.0317700453857792,
"grad_norm": 5.6356377601623535,
"learning_rate": 4.8654218466357066e-06,
"loss": 0.6666,
"step": 228
},
{
"epoch": 1.0363086232980332,
"grad_norm": 21.327880859375,
"learning_rate": 4.863330294933748e-06,
"loss": 0.6721,
"step": 229
},
{
"epoch": 1.0408472012102874,
"grad_norm": 14.34103012084961,
"learning_rate": 4.8612230719591535e-06,
"loss": 0.6374,
"step": 230
},
{
"epoch": 1.0453857791225416,
"grad_norm": 7.112085819244385,
"learning_rate": 4.859100191684946e-06,
"loss": 0.6729,
"step": 231
},
{
"epoch": 1.0499243570347958,
"grad_norm": 2.3837637901306152,
"learning_rate": 4.856961668187968e-06,
"loss": 0.6741,
"step": 232
},
{
"epoch": 1.05446293494705,
"grad_norm": 1.5125519037246704,
"learning_rate": 4.854807515648799e-06,
"loss": 0.6584,
"step": 233
},
{
"epoch": 1.059001512859304,
"grad_norm": 1.8248745203018188,
"learning_rate": 4.852637748351651e-06,
"loss": 0.6481,
"step": 234
},
{
"epoch": 1.0635400907715582,
"grad_norm": 2.260824203491211,
"learning_rate": 4.850452380684275e-06,
"loss": 0.6695,
"step": 235
},
{
"epoch": 1.0680786686838124,
"grad_norm": 1.764312982559204,
"learning_rate": 4.848251427137875e-06,
"loss": 0.6638,
"step": 236
},
{
"epoch": 1.0726172465960666,
"grad_norm": 0.9826205968856812,
"learning_rate": 4.846034902306997e-06,
"loss": 0.6515,
"step": 237
},
{
"epoch": 1.0771558245083208,
"grad_norm": 1.1290264129638672,
"learning_rate": 4.8438028208894496e-06,
"loss": 0.6483,
"step": 238
},
{
"epoch": 1.0816944024205748,
"grad_norm": 4.97009801864624,
"learning_rate": 4.841555197686189e-06,
"loss": 0.6605,
"step": 239
},
{
"epoch": 1.086232980332829,
"grad_norm": 6.06207799911499,
"learning_rate": 4.839292047601234e-06,
"loss": 0.6147,
"step": 240
},
{
"epoch": 1.0907715582450832,
"grad_norm": 1.2093101739883423,
"learning_rate": 4.837013385641562e-06,
"loss": 0.6739,
"step": 241
},
{
"epoch": 1.0953101361573374,
"grad_norm": 0.9535529017448425,
"learning_rate": 4.834719226917007e-06,
"loss": 0.6392,
"step": 242
},
{
"epoch": 1.0998487140695916,
"grad_norm": 1.4470645189285278,
"learning_rate": 4.832409586640164e-06,
"loss": 0.6357,
"step": 243
},
{
"epoch": 1.1043872919818456,
"grad_norm": 1.4899321794509888,
"learning_rate": 4.830084480126288e-06,
"loss": 0.6704,
"step": 244
},
{
"epoch": 1.1089258698940998,
"grad_norm": 0.7240656614303589,
"learning_rate": 4.827743922793189e-06,
"loss": 0.621,
"step": 245
},
{
"epoch": 1.113464447806354,
"grad_norm": 0.8096688985824585,
"learning_rate": 4.8253879301611315e-06,
"loss": 0.6561,
"step": 246
},
{
"epoch": 1.1180030257186082,
"grad_norm": 1.7580249309539795,
"learning_rate": 4.823016517852731e-06,
"loss": 0.5893,
"step": 247
},
{
"epoch": 1.1225416036308624,
"grad_norm": 2.5382940769195557,
"learning_rate": 4.820629701592853e-06,
"loss": 0.6548,
"step": 248
},
{
"epoch": 1.1270801815431164,
"grad_norm": 1.0767178535461426,
"learning_rate": 4.8182274972085065e-06,
"loss": 0.6801,
"step": 249
},
{
"epoch": 1.1316187594553706,
"grad_norm": 0.7919514179229736,
"learning_rate": 4.815809920628738e-06,
"loss": 0.6314,
"step": 250
},
{
"epoch": 1.1361573373676248,
"grad_norm": 1.305253267288208,
"learning_rate": 4.813376987884527e-06,
"loss": 0.6347,
"step": 251
},
{
"epoch": 1.140695915279879,
"grad_norm": 1.4656856060028076,
"learning_rate": 4.810928715108683e-06,
"loss": 0.6253,
"step": 252
},
{
"epoch": 1.1452344931921332,
"grad_norm": 1.2813221216201782,
"learning_rate": 4.808465118535732e-06,
"loss": 0.6751,
"step": 253
},
{
"epoch": 1.1497730711043872,
"grad_norm": 3.507342576980591,
"learning_rate": 4.805986214501813e-06,
"loss": 0.6606,
"step": 254
},
{
"epoch": 1.1543116490166414,
"grad_norm": 4.23391056060791,
"learning_rate": 4.803492019444571e-06,
"loss": 0.6278,
"step": 255
},
{
"epoch": 1.1588502269288956,
"grad_norm": 2.3074967861175537,
"learning_rate": 4.8009825499030426e-06,
"loss": 0.6175,
"step": 256
},
{
"epoch": 1.1633888048411498,
"grad_norm": 1.3244863748550415,
"learning_rate": 4.798457822517554e-06,
"loss": 0.6392,
"step": 257
},
{
"epoch": 1.167927382753404,
"grad_norm": 1.0530226230621338,
"learning_rate": 4.795917854029601e-06,
"loss": 0.6305,
"step": 258
},
{
"epoch": 1.172465960665658,
"grad_norm": 2.187415599822998,
"learning_rate": 4.79336266128175e-06,
"loss": 0.6432,
"step": 259
},
{
"epoch": 1.1770045385779122,
"grad_norm": 1.4672960042953491,
"learning_rate": 4.790792261217513e-06,
"loss": 0.649,
"step": 260
},
{
"epoch": 1.1815431164901664,
"grad_norm": 1.1620965003967285,
"learning_rate": 4.788206670881245e-06,
"loss": 0.6507,
"step": 261
},
{
"epoch": 1.1860816944024206,
"grad_norm": 0.7209274768829346,
"learning_rate": 4.785605907418029e-06,
"loss": 0.6502,
"step": 262
},
{
"epoch": 1.1906202723146748,
"grad_norm": 3.6349446773529053,
"learning_rate": 4.78298998807356e-06,
"loss": 0.6251,
"step": 263
},
{
"epoch": 1.1951588502269288,
"grad_norm": 13.263801574707031,
"learning_rate": 4.7803589301940306e-06,
"loss": 0.6663,
"step": 264
},
{
"epoch": 1.199697428139183,
"grad_norm": 12.560731887817383,
"learning_rate": 4.777712751226019e-06,
"loss": 0.6709,
"step": 265
},
{
"epoch": 1.2042360060514372,
"grad_norm": 3.721285820007324,
"learning_rate": 4.775051468716371e-06,
"loss": 0.6555,
"step": 266
},
{
"epoch": 1.2087745839636914,
"grad_norm": 0.974590539932251,
"learning_rate": 4.772375100312084e-06,
"loss": 0.6308,
"step": 267
},
{
"epoch": 1.2133131618759456,
"grad_norm": 1.3410248756408691,
"learning_rate": 4.769683663760191e-06,
"loss": 0.6506,
"step": 268
},
{
"epoch": 1.2178517397881996,
"grad_norm": 1.4115511178970337,
"learning_rate": 4.7669771769076395e-06,
"loss": 0.6296,
"step": 269
},
{
"epoch": 1.2223903177004538,
"grad_norm": 1.355098843574524,
"learning_rate": 4.764255657701179e-06,
"loss": 0.6756,
"step": 270
},
{
"epoch": 1.226928895612708,
"grad_norm": 0.9310200810432434,
"learning_rate": 4.761519124187237e-06,
"loss": 0.6724,
"step": 271
},
{
"epoch": 1.2314674735249622,
"grad_norm": 1.0098122358322144,
"learning_rate": 4.758767594511801e-06,
"loss": 0.6595,
"step": 272
},
{
"epoch": 1.2360060514372164,
"grad_norm": 2.7444238662719727,
"learning_rate": 4.7560010869202985e-06,
"loss": 0.582,
"step": 273
},
{
"epoch": 1.2405446293494704,
"grad_norm": 2.2694830894470215,
"learning_rate": 4.753219619757477e-06,
"loss": 0.6411,
"step": 274
},
{
"epoch": 1.2450832072617246,
"grad_norm": 1.1762354373931885,
"learning_rate": 4.750423211467278e-06,
"loss": 0.6358,
"step": 275
},
{
"epoch": 1.2496217851739788,
"grad_norm": 0.86478191614151,
"learning_rate": 4.7476118805927214e-06,
"loss": 0.6234,
"step": 276
},
{
"epoch": 1.254160363086233,
"grad_norm": 1.143272876739502,
"learning_rate": 4.7447856457757765e-06,
"loss": 0.6627,
"step": 277
},
{
"epoch": 1.258698940998487,
"grad_norm": 1.7226762771606445,
"learning_rate": 4.7419445257572414e-06,
"loss": 0.6248,
"step": 278
},
{
"epoch": 1.2632375189107412,
"grad_norm": 1.428463101387024,
"learning_rate": 4.739088539376618e-06,
"loss": 0.6577,
"step": 279
},
{
"epoch": 1.2677760968229954,
"grad_norm": 0.9464501142501831,
"learning_rate": 4.736217705571989e-06,
"loss": 0.6464,
"step": 280
},
{
"epoch": 1.2723146747352496,
"grad_norm": 0.8889546394348145,
"learning_rate": 4.733332043379889e-06,
"loss": 0.6249,
"step": 281
},
{
"epoch": 1.2768532526475038,
"grad_norm": 0.7456269860267639,
"learning_rate": 4.730431571935178e-06,
"loss": 0.6242,
"step": 282
},
{
"epoch": 1.281391830559758,
"grad_norm": 9.802299499511719,
"learning_rate": 4.72751631047092e-06,
"loss": 0.6576,
"step": 283
},
{
"epoch": 1.2859304084720122,
"grad_norm": 15.863835334777832,
"learning_rate": 4.72458627831825e-06,
"loss": 0.6916,
"step": 284
},
{
"epoch": 1.2904689863842662,
"grad_norm": 15.025418281555176,
"learning_rate": 4.721641494906247e-06,
"loss": 0.7036,
"step": 285
},
{
"epoch": 1.2950075642965204,
"grad_norm": 3.8970537185668945,
"learning_rate": 4.718681979761806e-06,
"loss": 0.6166,
"step": 286
},
{
"epoch": 1.2995461422087746,
"grad_norm": 0.6507979035377502,
"learning_rate": 4.715707752509512e-06,
"loss": 0.613,
"step": 287
},
{
"epoch": 1.3040847201210286,
"grad_norm": 1.1878042221069336,
"learning_rate": 4.712718832871499e-06,
"loss": 0.6474,
"step": 288
},
{
"epoch": 1.3086232980332828,
"grad_norm": 1.0940614938735962,
"learning_rate": 4.709715240667332e-06,
"loss": 0.6577,
"step": 289
},
{
"epoch": 1.313161875945537,
"grad_norm": 0.9987061619758606,
"learning_rate": 4.706696995813869e-06,
"loss": 0.6571,
"step": 290
},
{
"epoch": 1.3177004538577912,
"grad_norm": 1.5589380264282227,
"learning_rate": 4.7036641183251285e-06,
"loss": 0.6495,
"step": 291
},
{
"epoch": 1.3222390317700454,
"grad_norm": 1.525474190711975,
"learning_rate": 4.700616628312159e-06,
"loss": 0.5986,
"step": 292
},
{
"epoch": 1.3267776096822996,
"grad_norm": 0.8548336625099182,
"learning_rate": 4.697554545982904e-06,
"loss": 0.6034,
"step": 293
},
{
"epoch": 1.3313161875945538,
"grad_norm": 4.231250286102295,
"learning_rate": 4.6944778916420705e-06,
"loss": 0.6405,
"step": 294
},
{
"epoch": 1.3358547655068078,
"grad_norm": 8.273162841796875,
"learning_rate": 4.691386685690993e-06,
"loss": 0.6635,
"step": 295
},
{
"epoch": 1.340393343419062,
"grad_norm": 4.974193096160889,
"learning_rate": 4.6882809486274934e-06,
"loss": 0.6289,
"step": 296
},
{
"epoch": 1.3449319213313162,
"grad_norm": 3.757338523864746,
"learning_rate": 4.685160701045757e-06,
"loss": 0.6227,
"step": 297
},
{
"epoch": 1.3494704992435702,
"grad_norm": 1.2015799283981323,
"learning_rate": 4.68202596363618e-06,
"loss": 0.6237,
"step": 298
},
{
"epoch": 1.3540090771558244,
"grad_norm": 0.7638722658157349,
"learning_rate": 4.678876757185248e-06,
"loss": 0.6063,
"step": 299
},
{
"epoch": 1.3585476550680786,
"grad_norm": 1.2864232063293457,
"learning_rate": 4.675713102575389e-06,
"loss": 0.5997,
"step": 300
},
{
"epoch": 1.3630862329803328,
"grad_norm": 1.1902930736541748,
"learning_rate": 4.672535020784833e-06,
"loss": 0.6352,
"step": 301
},
{
"epoch": 1.367624810892587,
"grad_norm": 1.4321516752243042,
"learning_rate": 4.669342532887482e-06,
"loss": 0.6531,
"step": 302
},
{
"epoch": 1.3721633888048412,
"grad_norm": 1.139543890953064,
"learning_rate": 4.666135660052764e-06,
"loss": 0.6235,
"step": 303
},
{
"epoch": 1.3767019667170954,
"grad_norm": 0.7234447598457336,
"learning_rate": 4.66291442354549e-06,
"loss": 0.6012,
"step": 304
},
{
"epoch": 1.3812405446293494,
"grad_norm": 3.093146324157715,
"learning_rate": 4.659678844725722e-06,
"loss": 0.6058,
"step": 305
},
{
"epoch": 1.3857791225416036,
"grad_norm": 2.840275764465332,
"learning_rate": 4.656428945048622e-06,
"loss": 0.6139,
"step": 306
},
{
"epoch": 1.3903177004538578,
"grad_norm": 1.0061054229736328,
"learning_rate": 4.653164746064315e-06,
"loss": 0.6288,
"step": 307
},
{
"epoch": 1.394856278366112,
"grad_norm": 0.9403374195098877,
"learning_rate": 4.649886269417746e-06,
"loss": 0.6435,
"step": 308
},
{
"epoch": 1.399394856278366,
"grad_norm": 1.0838265419006348,
"learning_rate": 4.646593536848535e-06,
"loss": 0.6485,
"step": 309
},
{
"epoch": 1.4039334341906202,
"grad_norm": 1.2738953828811646,
"learning_rate": 4.643286570190832e-06,
"loss": 0.5993,
"step": 310
},
{
"epoch": 1.4084720121028744,
"grad_norm": 1.3124756813049316,
"learning_rate": 4.639965391373173e-06,
"loss": 0.6154,
"step": 311
},
{
"epoch": 1.4130105900151286,
"grad_norm": 0.7026720643043518,
"learning_rate": 4.636630022418337e-06,
"loss": 0.6493,
"step": 312
},
{
"epoch": 1.4175491679273828,
"grad_norm": 1.101508617401123,
"learning_rate": 4.6332804854431986e-06,
"loss": 0.6437,
"step": 313
},
{
"epoch": 1.422087745839637,
"grad_norm": 0.6824156641960144,
"learning_rate": 4.6299168026585775e-06,
"loss": 0.6017,
"step": 314
},
{
"epoch": 1.426626323751891,
"grad_norm": 0.8083431124687195,
"learning_rate": 4.626538996369096e-06,
"loss": 0.6338,
"step": 315
},
{
"epoch": 1.4311649016641452,
"grad_norm": 0.9624136090278625,
"learning_rate": 4.623147088973031e-06,
"loss": 0.5804,
"step": 316
},
{
"epoch": 1.4357034795763994,
"grad_norm": 0.8000622987747192,
"learning_rate": 4.619741102962161e-06,
"loss": 0.6242,
"step": 317
},
{
"epoch": 1.4402420574886536,
"grad_norm": 1.2038214206695557,
"learning_rate": 4.6163210609216234e-06,
"loss": 0.6259,
"step": 318
},
{
"epoch": 1.4447806354009076,
"grad_norm": 0.8374214768409729,
"learning_rate": 4.612886985529759e-06,
"loss": 0.6078,
"step": 319
},
{
"epoch": 1.4493192133131618,
"grad_norm": 1.0167770385742188,
"learning_rate": 4.609438899557964e-06,
"loss": 0.5972,
"step": 320
},
{
"epoch": 1.453857791225416,
"grad_norm": 0.8266498446464539,
"learning_rate": 4.60597682587054e-06,
"loss": 0.6211,
"step": 321
},
{
"epoch": 1.4583963691376702,
"grad_norm": 0.7585692405700684,
"learning_rate": 4.6025007874245405e-06,
"loss": 0.6233,
"step": 322
},
{
"epoch": 1.4629349470499244,
"grad_norm": 1.6358634233474731,
"learning_rate": 4.59901080726962e-06,
"loss": 0.6075,
"step": 323
},
{
"epoch": 1.4674735249621786,
"grad_norm": 1.1722335815429688,
"learning_rate": 4.595506908547881e-06,
"loss": 0.6066,
"step": 324
},
{
"epoch": 1.4720121028744326,
"grad_norm": 0.9726622104644775,
"learning_rate": 4.591989114493718e-06,
"loss": 0.6506,
"step": 325
},
{
"epoch": 1.4765506807866868,
"grad_norm": 0.8073020577430725,
"learning_rate": 4.588457448433667e-06,
"loss": 0.6077,
"step": 326
},
{
"epoch": 1.481089258698941,
"grad_norm": 0.71394282579422,
"learning_rate": 4.584911933786252e-06,
"loss": 0.5882,
"step": 327
},
{
"epoch": 1.4856278366111952,
"grad_norm": 4.143211364746094,
"learning_rate": 4.581352594061824e-06,
"loss": 0.6047,
"step": 328
},
{
"epoch": 1.4901664145234492,
"grad_norm": 3.5801639556884766,
"learning_rate": 4.5777794528624075e-06,
"loss": 0.6094,
"step": 329
},
{
"epoch": 1.4947049924357034,
"grad_norm": 0.9617034792900085,
"learning_rate": 4.574192533881547e-06,
"loss": 0.6291,
"step": 330
},
{
"epoch": 1.4992435703479576,
"grad_norm": 0.8535535931587219,
"learning_rate": 4.570591860904149e-06,
"loss": 0.6587,
"step": 331
},
{
"epoch": 1.5037821482602118,
"grad_norm": 1.426477074623108,
"learning_rate": 4.566977457806317e-06,
"loss": 0.6347,
"step": 332
},
{
"epoch": 1.508320726172466,
"grad_norm": 1.6053332090377808,
"learning_rate": 4.563349348555207e-06,
"loss": 0.603,
"step": 333
},
{
"epoch": 1.5128593040847202,
"grad_norm": 1.3673542737960815,
"learning_rate": 4.5597075572088545e-06,
"loss": 0.6443,
"step": 334
},
{
"epoch": 1.5173978819969742,
"grad_norm": 1.0444583892822266,
"learning_rate": 4.556052107916023e-06,
"loss": 0.6033,
"step": 335
},
{
"epoch": 1.5219364599092284,
"grad_norm": 2.568854331970215,
"learning_rate": 4.552383024916044e-06,
"loss": 0.6364,
"step": 336
},
{
"epoch": 1.5264750378214826,
"grad_norm": 0.8063260316848755,
"learning_rate": 4.54870033253865e-06,
"loss": 0.6406,
"step": 337
},
{
"epoch": 1.5310136157337366,
"grad_norm": 0.8449574112892151,
"learning_rate": 4.545004055203823e-06,
"loss": 0.5977,
"step": 338
},
{
"epoch": 1.5355521936459908,
"grad_norm": 0.7573151588439941,
"learning_rate": 4.541294217421622e-06,
"loss": 0.6098,
"step": 339
},
{
"epoch": 1.540090771558245,
"grad_norm": 0.7103497982025146,
"learning_rate": 4.537570843792028e-06,
"loss": 0.6344,
"step": 340
},
{
"epoch": 1.5446293494704992,
"grad_norm": 0.7327162623405457,
"learning_rate": 4.5338339590047795e-06,
"loss": 0.6318,
"step": 341
},
{
"epoch": 1.5491679273827534,
"grad_norm": 0.7688593864440918,
"learning_rate": 4.530083587839204e-06,
"loss": 0.6089,
"step": 342
},
{
"epoch": 1.5537065052950076,
"grad_norm": 0.9933049082756042,
"learning_rate": 4.52631975516406e-06,
"loss": 0.6003,
"step": 343
},
{
"epoch": 1.5582450832072618,
"grad_norm": 0.7319652438163757,
"learning_rate": 4.522542485937369e-06,
"loss": 0.6054,
"step": 344
},
{
"epoch": 1.5627836611195158,
"grad_norm": 0.8350914120674133,
"learning_rate": 4.518751805206251e-06,
"loss": 0.606,
"step": 345
},
{
"epoch": 1.56732223903177,
"grad_norm": 1.7987092733383179,
"learning_rate": 4.514947738106755e-06,
"loss": 0.6637,
"step": 346
},
{
"epoch": 1.5718608169440242,
"grad_norm": 0.6338518261909485,
"learning_rate": 4.5111303098637005e-06,
"loss": 0.5778,
"step": 347
},
{
"epoch": 1.5763993948562782,
"grad_norm": 0.854932427406311,
"learning_rate": 4.5072995457905e-06,
"loss": 0.598,
"step": 348
},
{
"epoch": 1.5809379727685324,
"grad_norm": 1.217940330505371,
"learning_rate": 4.503455471288998e-06,
"loss": 0.6087,
"step": 349
},
{
"epoch": 1.5854765506807866,
"grad_norm": 1.329987645149231,
"learning_rate": 4.499598111849299e-06,
"loss": 0.6321,
"step": 350
},
{
"epoch": 1.5900151285930408,
"grad_norm": 1.9689991474151611,
"learning_rate": 4.495727493049604e-06,
"loss": 0.6361,
"step": 351
},
{
"epoch": 1.594553706505295,
"grad_norm": 1.3316866159439087,
"learning_rate": 4.491843640556033e-06,
"loss": 0.6097,
"step": 352
},
{
"epoch": 1.5990922844175492,
"grad_norm": 1.2030465602874756,
"learning_rate": 4.4879465801224605e-06,
"loss": 0.6302,
"step": 353
},
{
"epoch": 1.6036308623298035,
"grad_norm": 0.9129522442817688,
"learning_rate": 4.484036337590343e-06,
"loss": 0.6398,
"step": 354
},
{
"epoch": 1.6081694402420574,
"grad_norm": 1.6810179948806763,
"learning_rate": 4.4801129388885475e-06,
"loss": 0.6234,
"step": 355
},
{
"epoch": 1.6127080181543116,
"grad_norm": 3.6033570766448975,
"learning_rate": 4.476176410033179e-06,
"loss": 0.6145,
"step": 356
},
{
"epoch": 1.6172465960665658,
"grad_norm": 1.2315465211868286,
"learning_rate": 4.472226777127412e-06,
"loss": 0.6274,
"step": 357
},
{
"epoch": 1.6217851739788198,
"grad_norm": 0.7585744261741638,
"learning_rate": 4.468264066361308e-06,
"loss": 0.5897,
"step": 358
},
{
"epoch": 1.626323751891074,
"grad_norm": 0.945957362651825,
"learning_rate": 4.464288304011652e-06,
"loss": 0.6078,
"step": 359
},
{
"epoch": 1.6308623298033282,
"grad_norm": 1.0154330730438232,
"learning_rate": 4.460299516441777e-06,
"loss": 0.5899,
"step": 360
},
{
"epoch": 1.6354009077155824,
"grad_norm": 0.8923754096031189,
"learning_rate": 4.456297730101379e-06,
"loss": 0.6204,
"step": 361
},
{
"epoch": 1.6399394856278366,
"grad_norm": 0.9550593495368958,
"learning_rate": 4.452282971526355e-06,
"loss": 0.5699,
"step": 362
},
{
"epoch": 1.6444780635400909,
"grad_norm": 0.7987310886383057,
"learning_rate": 4.448255267338619e-06,
"loss": 0.6325,
"step": 363
},
{
"epoch": 1.649016641452345,
"grad_norm": 0.830464780330658,
"learning_rate": 4.444214644245928e-06,
"loss": 0.6367,
"step": 364
},
{
"epoch": 1.653555219364599,
"grad_norm": 1.604446530342102,
"learning_rate": 4.440161129041704e-06,
"loss": 0.5668,
"step": 365
},
{
"epoch": 1.6580937972768532,
"grad_norm": 0.6995673179626465,
"learning_rate": 4.436094748604856e-06,
"loss": 0.596,
"step": 366
},
{
"epoch": 1.6626323751891074,
"grad_norm": 0.6944538950920105,
"learning_rate": 4.432015529899604e-06,
"loss": 0.6082,
"step": 367
},
{
"epoch": 1.6671709531013614,
"grad_norm": 0.9797276854515076,
"learning_rate": 4.427923499975298e-06,
"loss": 0.6104,
"step": 368
},
{
"epoch": 1.6717095310136156,
"grad_norm": 0.7624075412750244,
"learning_rate": 4.423818685966239e-06,
"loss": 0.5721,
"step": 369
},
{
"epoch": 1.6762481089258698,
"grad_norm": 0.8912142515182495,
"learning_rate": 4.4197011150915e-06,
"loss": 0.6162,
"step": 370
},
{
"epoch": 1.680786686838124,
"grad_norm": 1.4948642253875732,
"learning_rate": 4.415570814654746e-06,
"loss": 0.6355,
"step": 371
},
{
"epoch": 1.6853252647503782,
"grad_norm": 3.306320905685425,
"learning_rate": 4.4114278120440494e-06,
"loss": 0.6077,
"step": 372
},
{
"epoch": 1.6898638426626325,
"grad_norm": 0.6849818229675293,
"learning_rate": 4.407272134731711e-06,
"loss": 0.621,
"step": 373
},
{
"epoch": 1.6944024205748867,
"grad_norm": 0.9957187175750732,
"learning_rate": 4.403103810274082e-06,
"loss": 0.6468,
"step": 374
},
{
"epoch": 1.6989409984871406,
"grad_norm": 0.926688551902771,
"learning_rate": 4.398922866311371e-06,
"loss": 0.6021,
"step": 375
},
{
"epoch": 1.7034795763993948,
"grad_norm": 0.8220088481903076,
"learning_rate": 4.394729330567471e-06,
"loss": 0.5753,
"step": 376
},
{
"epoch": 1.708018154311649,
"grad_norm": 0.8064286112785339,
"learning_rate": 4.390523230849769e-06,
"loss": 0.6275,
"step": 377
},
{
"epoch": 1.712556732223903,
"grad_norm": 0.7482770681381226,
"learning_rate": 4.386304595048966e-06,
"loss": 0.6103,
"step": 378
},
{
"epoch": 1.7170953101361572,
"grad_norm": 1.6559797525405884,
"learning_rate": 4.382073451138887e-06,
"loss": 0.6366,
"step": 379
},
{
"epoch": 1.7216338880484114,
"grad_norm": 0.6992952227592468,
"learning_rate": 4.3778298271762995e-06,
"loss": 0.6188,
"step": 380
},
{
"epoch": 1.7261724659606656,
"grad_norm": 0.6812805533409119,
"learning_rate": 4.373573751300729e-06,
"loss": 0.6103,
"step": 381
},
{
"epoch": 1.7307110438729199,
"grad_norm": 0.767241358757019,
"learning_rate": 4.369305251734267e-06,
"loss": 0.6089,
"step": 382
},
{
"epoch": 1.735249621785174,
"grad_norm": 1.5500905513763428,
"learning_rate": 4.365024356781386e-06,
"loss": 0.6087,
"step": 383
},
{
"epoch": 1.7397881996974283,
"grad_norm": 0.8380416631698608,
"learning_rate": 4.360731094828755e-06,
"loss": 0.6074,
"step": 384
},
{
"epoch": 1.7443267776096822,
"grad_norm": 0.915096640586853,
"learning_rate": 4.356425494345047e-06,
"loss": 0.5962,
"step": 385
},
{
"epoch": 1.7488653555219364,
"grad_norm": 0.9544028639793396,
"learning_rate": 4.352107583880753e-06,
"loss": 0.5766,
"step": 386
},
{
"epoch": 1.7534039334341907,
"grad_norm": 0.7770220041275024,
"learning_rate": 4.347777392067991e-06,
"loss": 0.5879,
"step": 387
},
{
"epoch": 1.7579425113464446,
"grad_norm": 3.470493793487549,
"learning_rate": 4.343434947620316e-06,
"loss": 0.6107,
"step": 388
},
{
"epoch": 1.7624810892586988,
"grad_norm": 1.7937536239624023,
"learning_rate": 4.339080279332531e-06,
"loss": 0.5892,
"step": 389
},
{
"epoch": 1.767019667170953,
"grad_norm": 1.262220859527588,
"learning_rate": 4.334713416080498e-06,
"loss": 0.6321,
"step": 390
},
{
"epoch": 1.7715582450832073,
"grad_norm": 0.6722662448883057,
"learning_rate": 4.33033438682094e-06,
"loss": 0.6366,
"step": 391
},
{
"epoch": 1.7760968229954615,
"grad_norm": 1.0483866930007935,
"learning_rate": 4.3259432205912544e-06,
"loss": 0.5867,
"step": 392
},
{
"epoch": 1.7806354009077157,
"grad_norm": 1.2742741107940674,
"learning_rate": 4.32153994650932e-06,
"loss": 0.6045,
"step": 393
},
{
"epoch": 1.7851739788199699,
"grad_norm": 1.2766985893249512,
"learning_rate": 4.317124593773301e-06,
"loss": 0.5952,
"step": 394
},
{
"epoch": 1.789712556732224,
"grad_norm": 0.8167585134506226,
"learning_rate": 4.312697191661457e-06,
"loss": 0.5621,
"step": 395
},
{
"epoch": 1.794251134644478,
"grad_norm": 0.7818560004234314,
"learning_rate": 4.308257769531947e-06,
"loss": 0.5897,
"step": 396
},
{
"epoch": 1.7987897125567323,
"grad_norm": 1.0983150005340576,
"learning_rate": 4.303806356822635e-06,
"loss": 0.6189,
"step": 397
},
{
"epoch": 1.8033282904689862,
"grad_norm": 3.0957119464874268,
"learning_rate": 4.299342983050892e-06,
"loss": 0.5743,
"step": 398
},
{
"epoch": 1.8078668683812404,
"grad_norm": 1.364321231842041,
"learning_rate": 4.294867677813407e-06,
"loss": 0.5722,
"step": 399
},
{
"epoch": 1.8124054462934946,
"grad_norm": 1.0932508707046509,
"learning_rate": 4.290380470785984e-06,
"loss": 0.6074,
"step": 400
},
{
"epoch": 1.8169440242057489,
"grad_norm": 0.8298100829124451,
"learning_rate": 4.285881391723348e-06,
"loss": 0.6143,
"step": 401
},
{
"epoch": 1.821482602118003,
"grad_norm": 0.7821558117866516,
"learning_rate": 4.2813704704589504e-06,
"loss": 0.6148,
"step": 402
},
{
"epoch": 1.8260211800302573,
"grad_norm": 1.2309906482696533,
"learning_rate": 4.276847736904765e-06,
"loss": 0.6039,
"step": 403
},
{
"epoch": 1.8305597579425115,
"grad_norm": 0.7675696015357971,
"learning_rate": 4.272313221051094e-06,
"loss": 0.5869,
"step": 404
},
{
"epoch": 1.8350983358547657,
"grad_norm": 0.6467660069465637,
"learning_rate": 4.267766952966369e-06,
"loss": 0.5999,
"step": 405
},
{
"epoch": 1.8396369137670197,
"grad_norm": 2.9403133392333984,
"learning_rate": 4.263208962796951e-06,
"loss": 0.5859,
"step": 406
},
{
"epoch": 1.8441754916792739,
"grad_norm": 1.017529845237732,
"learning_rate": 4.2586392807669286e-06,
"loss": 0.5771,
"step": 407
},
{
"epoch": 1.8487140695915278,
"grad_norm": 0.7939811944961548,
"learning_rate": 4.25405793717792e-06,
"loss": 0.5968,
"step": 408
},
{
"epoch": 1.853252647503782,
"grad_norm": 0.9015148878097534,
"learning_rate": 4.2494649624088724e-06,
"loss": 0.5791,
"step": 409
},
{
"epoch": 1.8577912254160363,
"grad_norm": 1.0004379749298096,
"learning_rate": 4.2448603869158585e-06,
"loss": 0.5969,
"step": 410
},
{
"epoch": 1.8623298033282905,
"grad_norm": 0.8573418855667114,
"learning_rate": 4.2402442412318765e-06,
"loss": 0.6308,
"step": 411
},
{
"epoch": 1.8668683812405447,
"grad_norm": 0.7616469860076904,
"learning_rate": 4.235616555966646e-06,
"loss": 0.5955,
"step": 412
},
{
"epoch": 1.8714069591527989,
"grad_norm": 0.838377833366394,
"learning_rate": 4.2309773618064035e-06,
"loss": 0.6135,
"step": 413
},
{
"epoch": 1.875945537065053,
"grad_norm": 0.6853629350662231,
"learning_rate": 4.226326689513705e-06,
"loss": 0.5962,
"step": 414
},
{
"epoch": 1.8804841149773073,
"grad_norm": 3.4511594772338867,
"learning_rate": 4.221664569927217e-06,
"loss": 0.632,
"step": 415
},
{
"epoch": 1.8850226928895613,
"grad_norm": 2.316239833831787,
"learning_rate": 4.216991033961511e-06,
"loss": 0.5712,
"step": 416
},
{
"epoch": 1.8895612708018155,
"grad_norm": 0.7679340243339539,
"learning_rate": 4.212306112606863e-06,
"loss": 0.5849,
"step": 417
},
{
"epoch": 1.8940998487140694,
"grad_norm": 0.8144194483757019,
"learning_rate": 4.207609836929045e-06,
"loss": 0.5586,
"step": 418
},
{
"epoch": 1.8986384266263236,
"grad_norm": 0.7225912809371948,
"learning_rate": 4.2029022380691195e-06,
"loss": 0.606,
"step": 419
},
{
"epoch": 1.9031770045385779,
"grad_norm": 0.800234854221344,
"learning_rate": 4.198183347243233e-06,
"loss": 0.6024,
"step": 420
},
{
"epoch": 1.907715582450832,
"grad_norm": 0.7729604840278625,
"learning_rate": 4.1934531957424095e-06,
"loss": 0.598,
"step": 421
},
{
"epoch": 1.9122541603630863,
"grad_norm": 0.6805166602134705,
"learning_rate": 4.188711814932343e-06,
"loss": 0.6148,
"step": 422
},
{
"epoch": 1.9167927382753405,
"grad_norm": 1.0123629570007324,
"learning_rate": 4.1839592362531875e-06,
"loss": 0.6029,
"step": 423
},
{
"epoch": 1.9213313161875947,
"grad_norm": 0.8777531385421753,
"learning_rate": 4.179195491219353e-06,
"loss": 0.5721,
"step": 424
},
{
"epoch": 1.9258698940998489,
"grad_norm": 0.8781999945640564,
"learning_rate": 4.1744206114192895e-06,
"loss": 0.5761,
"step": 425
},
{
"epoch": 1.9304084720121029,
"grad_norm": 0.909726083278656,
"learning_rate": 4.169634628515288e-06,
"loss": 0.6101,
"step": 426
},
{
"epoch": 1.934947049924357,
"grad_norm": 0.8270307779312134,
"learning_rate": 4.164837574243259e-06,
"loss": 0.5635,
"step": 427
},
{
"epoch": 1.939485627836611,
"grad_norm": 0.8078930974006653,
"learning_rate": 4.16002948041253e-06,
"loss": 0.6117,
"step": 428
},
{
"epoch": 1.9440242057488653,
"grad_norm": 1.2975406646728516,
"learning_rate": 4.155210378905629e-06,
"loss": 0.6157,
"step": 429
},
{
"epoch": 1.9485627836611195,
"grad_norm": 1.0585848093032837,
"learning_rate": 4.15038030167808e-06,
"loss": 0.6252,
"step": 430
},
{
"epoch": 1.9531013615733737,
"grad_norm": 1.106614112854004,
"learning_rate": 4.145539280758184e-06,
"loss": 0.5781,
"step": 431
},
{
"epoch": 1.9576399394856279,
"grad_norm": 0.7705745697021484,
"learning_rate": 4.140687348246814e-06,
"loss": 0.6134,
"step": 432
},
{
"epoch": 1.962178517397882,
"grad_norm": 1.1777352094650269,
"learning_rate": 4.1358245363171905e-06,
"loss": 0.6231,
"step": 433
},
{
"epoch": 1.9667170953101363,
"grad_norm": 0.9292231798171997,
"learning_rate": 4.130950877214683e-06,
"loss": 0.6086,
"step": 434
},
{
"epoch": 1.9712556732223905,
"grad_norm": 1.0536510944366455,
"learning_rate": 4.126066403256585e-06,
"loss": 0.6077,
"step": 435
},
{
"epoch": 1.9757942511346445,
"grad_norm": 0.7694706916809082,
"learning_rate": 4.121171146831905e-06,
"loss": 0.6318,
"step": 436
},
{
"epoch": 1.9803328290468987,
"grad_norm": 1.4091219902038574,
"learning_rate": 4.116265140401148e-06,
"loss": 0.5873,
"step": 437
},
{
"epoch": 1.9848714069591527,
"grad_norm": 1.4843878746032715,
"learning_rate": 4.111348416496104e-06,
"loss": 0.5748,
"step": 438
},
{
"epoch": 1.9894099848714069,
"grad_norm": 2.431475877761841,
"learning_rate": 4.106421007719631e-06,
"loss": 0.6155,
"step": 439
},
{
"epoch": 1.993948562783661,
"grad_norm": 0.689834475517273,
"learning_rate": 4.101482946745438e-06,
"loss": 0.5792,
"step": 440
}
],
"logging_steps": 1,
"max_steps": 1320,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 220,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.3334113794167669e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}