textclean-4B / trainer_state.json
sumuks's picture
Upload folder using huggingface_hub
cd54abd verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.1946587537091988,
"eval_steps": 126,
"global_step": 252,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0,
"eval_loss": 3.176351308822632,
"eval_runtime": 160.501,
"eval_samples_per_second": 11.19,
"eval_steps_per_second": 0.704,
"step": 0
},
{
"epoch": 0.004747774480712166,
"grad_norm": 14.394155502319336,
"learning_rate": 0.0,
"loss": 3.5173,
"step": 1
},
{
"epoch": 0.009495548961424332,
"grad_norm": 15.094048500061035,
"learning_rate": 3.174603174603175e-07,
"loss": 3.7637,
"step": 2
},
{
"epoch": 0.014243323442136498,
"grad_norm": 15.361183166503906,
"learning_rate": 6.34920634920635e-07,
"loss": 3.7578,
"step": 3
},
{
"epoch": 0.018991097922848664,
"grad_norm": 15.74487018585205,
"learning_rate": 9.523809523809525e-07,
"loss": 3.7525,
"step": 4
},
{
"epoch": 0.02373887240356083,
"grad_norm": 13.999384880065918,
"learning_rate": 1.26984126984127e-06,
"loss": 3.6421,
"step": 5
},
{
"epoch": 0.028486646884272996,
"grad_norm": 14.453660011291504,
"learning_rate": 1.5873015873015873e-06,
"loss": 3.656,
"step": 6
},
{
"epoch": 0.03323442136498516,
"grad_norm": 14.581559181213379,
"learning_rate": 1.904761904761905e-06,
"loss": 3.7368,
"step": 7
},
{
"epoch": 0.03798219584569733,
"grad_norm": 13.600449562072754,
"learning_rate": 2.222222222222222e-06,
"loss": 3.5658,
"step": 8
},
{
"epoch": 0.042729970326409496,
"grad_norm": 11.520191192626953,
"learning_rate": 2.53968253968254e-06,
"loss": 3.3966,
"step": 9
},
{
"epoch": 0.04747774480712166,
"grad_norm": 11.848709106445312,
"learning_rate": 2.8571428571428573e-06,
"loss": 3.4279,
"step": 10
},
{
"epoch": 0.05222551928783383,
"grad_norm": 11.579791069030762,
"learning_rate": 3.1746031746031746e-06,
"loss": 3.4126,
"step": 11
},
{
"epoch": 0.05697329376854599,
"grad_norm": 10.861617088317871,
"learning_rate": 3.492063492063492e-06,
"loss": 3.1806,
"step": 12
},
{
"epoch": 0.06172106824925816,
"grad_norm": 11.614352226257324,
"learning_rate": 3.80952380952381e-06,
"loss": 3.0254,
"step": 13
},
{
"epoch": 0.06646884272997032,
"grad_norm": 11.77472972869873,
"learning_rate": 4.126984126984127e-06,
"loss": 2.9602,
"step": 14
},
{
"epoch": 0.0712166172106825,
"grad_norm": 11.360806465148926,
"learning_rate": 4.444444444444444e-06,
"loss": 2.9155,
"step": 15
},
{
"epoch": 0.07596439169139466,
"grad_norm": 12.338720321655273,
"learning_rate": 4.761904761904762e-06,
"loss": 2.4742,
"step": 16
},
{
"epoch": 0.08071216617210683,
"grad_norm": 13.61253833770752,
"learning_rate": 5.07936507936508e-06,
"loss": 2.4264,
"step": 17
},
{
"epoch": 0.08545994065281899,
"grad_norm": 13.436763763427734,
"learning_rate": 5.396825396825397e-06,
"loss": 2.323,
"step": 18
},
{
"epoch": 0.09020771513353115,
"grad_norm": 11.5920991897583,
"learning_rate": 5.7142857142857145e-06,
"loss": 2.0951,
"step": 19
},
{
"epoch": 0.09495548961424333,
"grad_norm": 10.541767120361328,
"learning_rate": 6.031746031746032e-06,
"loss": 1.9129,
"step": 20
},
{
"epoch": 0.09970326409495549,
"grad_norm": 10.552443504333496,
"learning_rate": 6.349206349206349e-06,
"loss": 1.7977,
"step": 21
},
{
"epoch": 0.10445103857566766,
"grad_norm": 10.990846633911133,
"learning_rate": 6.666666666666667e-06,
"loss": 1.584,
"step": 22
},
{
"epoch": 0.10919881305637982,
"grad_norm": 9.763855934143066,
"learning_rate": 6.984126984126984e-06,
"loss": 1.401,
"step": 23
},
{
"epoch": 0.11394658753709198,
"grad_norm": 7.580389499664307,
"learning_rate": 7.301587301587301e-06,
"loss": 1.2337,
"step": 24
},
{
"epoch": 0.11869436201780416,
"grad_norm": 7.879193305969238,
"learning_rate": 7.61904761904762e-06,
"loss": 1.185,
"step": 25
},
{
"epoch": 0.12344213649851632,
"grad_norm": 7.337639331817627,
"learning_rate": 7.936507936507936e-06,
"loss": 1.0778,
"step": 26
},
{
"epoch": 0.1281899109792285,
"grad_norm": 5.66098690032959,
"learning_rate": 8.253968253968254e-06,
"loss": 0.9721,
"step": 27
},
{
"epoch": 0.13293768545994064,
"grad_norm": 4.907323360443115,
"learning_rate": 8.571428571428571e-06,
"loss": 0.9584,
"step": 28
},
{
"epoch": 0.13768545994065282,
"grad_norm": 3.510972023010254,
"learning_rate": 8.888888888888888e-06,
"loss": 0.8819,
"step": 29
},
{
"epoch": 0.142433234421365,
"grad_norm": 3.220461368560791,
"learning_rate": 9.206349206349207e-06,
"loss": 0.8042,
"step": 30
},
{
"epoch": 0.14718100890207717,
"grad_norm": 2.63944935798645,
"learning_rate": 9.523809523809525e-06,
"loss": 0.7757,
"step": 31
},
{
"epoch": 0.1519287833827893,
"grad_norm": 11.566150665283203,
"learning_rate": 9.841269841269842e-06,
"loss": 0.6714,
"step": 32
},
{
"epoch": 0.1566765578635015,
"grad_norm": 2.1101737022399902,
"learning_rate": 1.015873015873016e-05,
"loss": 0.6488,
"step": 33
},
{
"epoch": 0.16142433234421366,
"grad_norm": 3.368835926055908,
"learning_rate": 1.0476190476190477e-05,
"loss": 0.6551,
"step": 34
},
{
"epoch": 0.1661721068249258,
"grad_norm": 2.465441942214966,
"learning_rate": 1.0793650793650794e-05,
"loss": 0.6039,
"step": 35
},
{
"epoch": 0.17091988130563798,
"grad_norm": 1.3464806079864502,
"learning_rate": 1.1111111111111113e-05,
"loss": 0.5671,
"step": 36
},
{
"epoch": 0.17566765578635016,
"grad_norm": 2.223472833633423,
"learning_rate": 1.1428571428571429e-05,
"loss": 0.5821,
"step": 37
},
{
"epoch": 0.1804154302670623,
"grad_norm": 1.5176966190338135,
"learning_rate": 1.1746031746031748e-05,
"loss": 0.5397,
"step": 38
},
{
"epoch": 0.18516320474777448,
"grad_norm": 2.944708824157715,
"learning_rate": 1.2063492063492064e-05,
"loss": 0.5455,
"step": 39
},
{
"epoch": 0.18991097922848665,
"grad_norm": 1.5701731443405151,
"learning_rate": 1.2380952380952383e-05,
"loss": 0.5256,
"step": 40
},
{
"epoch": 0.1946587537091988,
"grad_norm": 1.0416580438613892,
"learning_rate": 1.2698412698412699e-05,
"loss": 0.4746,
"step": 41
},
{
"epoch": 0.19940652818991098,
"grad_norm": 1.0439484119415283,
"learning_rate": 1.3015873015873018e-05,
"loss": 0.4844,
"step": 42
},
{
"epoch": 0.20415430267062315,
"grad_norm": 2.974339485168457,
"learning_rate": 1.3333333333333333e-05,
"loss": 0.509,
"step": 43
},
{
"epoch": 0.20890207715133532,
"grad_norm": 1.0552765130996704,
"learning_rate": 1.3650793650793652e-05,
"loss": 0.5047,
"step": 44
},
{
"epoch": 0.21364985163204747,
"grad_norm": 3.1413824558258057,
"learning_rate": 1.3968253968253968e-05,
"loss": 0.4924,
"step": 45
},
{
"epoch": 0.21839762611275965,
"grad_norm": 1.9047034978866577,
"learning_rate": 1.4285714285714287e-05,
"loss": 0.456,
"step": 46
},
{
"epoch": 0.22314540059347182,
"grad_norm": 1.2171484231948853,
"learning_rate": 1.4603174603174603e-05,
"loss": 0.4459,
"step": 47
},
{
"epoch": 0.22789317507418397,
"grad_norm": 4.339561462402344,
"learning_rate": 1.4920634920634922e-05,
"loss": 0.4565,
"step": 48
},
{
"epoch": 0.23264094955489614,
"grad_norm": 2.3773863315582275,
"learning_rate": 1.523809523809524e-05,
"loss": 0.4568,
"step": 49
},
{
"epoch": 0.23738872403560832,
"grad_norm": 1.8882341384887695,
"learning_rate": 1.555555555555556e-05,
"loss": 0.4452,
"step": 50
},
{
"epoch": 0.24213649851632046,
"grad_norm": 1.1871311664581299,
"learning_rate": 1.5873015873015872e-05,
"loss": 0.4238,
"step": 51
},
{
"epoch": 0.24688427299703264,
"grad_norm": 2.8959860801696777,
"learning_rate": 1.6190476190476193e-05,
"loss": 0.426,
"step": 52
},
{
"epoch": 0.2516320474777448,
"grad_norm": 0.9582217335700989,
"learning_rate": 1.6507936507936507e-05,
"loss": 0.4246,
"step": 53
},
{
"epoch": 0.256379821958457,
"grad_norm": 0.8898813128471375,
"learning_rate": 1.6825396825396828e-05,
"loss": 0.4072,
"step": 54
},
{
"epoch": 0.26112759643916916,
"grad_norm": 0.9023370742797852,
"learning_rate": 1.7142857142857142e-05,
"loss": 0.4083,
"step": 55
},
{
"epoch": 0.2658753709198813,
"grad_norm": 1.1952933073043823,
"learning_rate": 1.7460317460317463e-05,
"loss": 0.401,
"step": 56
},
{
"epoch": 0.27062314540059346,
"grad_norm": 1.1986902952194214,
"learning_rate": 1.7777777777777777e-05,
"loss": 0.4006,
"step": 57
},
{
"epoch": 0.27537091988130563,
"grad_norm": 0.6974486112594604,
"learning_rate": 1.8095238095238097e-05,
"loss": 0.4173,
"step": 58
},
{
"epoch": 0.2801186943620178,
"grad_norm": 1.2794380187988281,
"learning_rate": 1.8412698412698415e-05,
"loss": 0.3893,
"step": 59
},
{
"epoch": 0.28486646884273,
"grad_norm": 0.7234415411949158,
"learning_rate": 1.8730158730158732e-05,
"loss": 0.3904,
"step": 60
},
{
"epoch": 0.28961424332344216,
"grad_norm": 0.6361059546470642,
"learning_rate": 1.904761904761905e-05,
"loss": 0.4136,
"step": 61
},
{
"epoch": 0.29436201780415433,
"grad_norm": 1.7151949405670166,
"learning_rate": 1.9365079365079367e-05,
"loss": 0.3943,
"step": 62
},
{
"epoch": 0.29910979228486645,
"grad_norm": 0.6753223538398743,
"learning_rate": 1.9682539682539684e-05,
"loss": 0.3938,
"step": 63
},
{
"epoch": 0.3038575667655786,
"grad_norm": 2.0604114532470703,
"learning_rate": 2e-05,
"loss": 0.3909,
"step": 64
},
{
"epoch": 0.3086053412462908,
"grad_norm": 0.7779602408409119,
"learning_rate": 1.9999846502070808e-05,
"loss": 0.3661,
"step": 65
},
{
"epoch": 0.313353115727003,
"grad_norm": 0.7002611756324768,
"learning_rate": 1.9999386012995554e-05,
"loss": 0.3522,
"step": 66
},
{
"epoch": 0.31810089020771515,
"grad_norm": 0.6202103495597839,
"learning_rate": 1.999861854691106e-05,
"loss": 0.3831,
"step": 67
},
{
"epoch": 0.3228486646884273,
"grad_norm": 0.44983768463134766,
"learning_rate": 1.9997544127378217e-05,
"loss": 0.37,
"step": 68
},
{
"epoch": 0.32759643916913944,
"grad_norm": 0.6840509176254272,
"learning_rate": 1.999616278738126e-05,
"loss": 0.3751,
"step": 69
},
{
"epoch": 0.3323442136498516,
"grad_norm": 0.416205495595932,
"learning_rate": 1.999447456932676e-05,
"loss": 0.3535,
"step": 70
},
{
"epoch": 0.3370919881305638,
"grad_norm": 0.5574557185173035,
"learning_rate": 1.9992479525042305e-05,
"loss": 0.3792,
"step": 71
},
{
"epoch": 0.34183976261127597,
"grad_norm": 0.46201279759407043,
"learning_rate": 1.9990177715774927e-05,
"loss": 0.3519,
"step": 72
},
{
"epoch": 0.34658753709198814,
"grad_norm": 0.4977426826953888,
"learning_rate": 1.9987569212189224e-05,
"loss": 0.3488,
"step": 73
},
{
"epoch": 0.3513353115727003,
"grad_norm": 0.45472055673599243,
"learning_rate": 1.9984654094365175e-05,
"loss": 0.3602,
"step": 74
},
{
"epoch": 0.3560830860534125,
"grad_norm": 0.4730567932128906,
"learning_rate": 1.9981432451795687e-05,
"loss": 0.3583,
"step": 75
},
{
"epoch": 0.3608308605341246,
"grad_norm": 0.49239611625671387,
"learning_rate": 1.997790438338385e-05,
"loss": 0.3474,
"step": 76
},
{
"epoch": 0.3655786350148368,
"grad_norm": 0.3924044370651245,
"learning_rate": 1.997406999743991e-05,
"loss": 0.3538,
"step": 77
},
{
"epoch": 0.37032640949554896,
"grad_norm": 0.4983470141887665,
"learning_rate": 1.996992941167792e-05,
"loss": 0.3699,
"step": 78
},
{
"epoch": 0.37507418397626113,
"grad_norm": 0.4259602129459381,
"learning_rate": 1.9965482753212154e-05,
"loss": 0.3529,
"step": 79
},
{
"epoch": 0.3798219584569733,
"grad_norm": 0.3953029215335846,
"learning_rate": 1.9960730158553186e-05,
"loss": 0.3514,
"step": 80
},
{
"epoch": 0.3845697329376855,
"grad_norm": 0.7401227951049805,
"learning_rate": 1.99556717736037e-05,
"loss": 0.3795,
"step": 81
},
{
"epoch": 0.3893175074183976,
"grad_norm": 2.0949933528900146,
"learning_rate": 1.9950307753654016e-05,
"loss": 0.3747,
"step": 82
},
{
"epoch": 0.3940652818991098,
"grad_norm": 0.48020580410957336,
"learning_rate": 1.9944638263377332e-05,
"loss": 0.3678,
"step": 83
},
{
"epoch": 0.39881305637982195,
"grad_norm": 0.4131617248058319,
"learning_rate": 1.9938663476824646e-05,
"loss": 0.3787,
"step": 84
},
{
"epoch": 0.4035608308605341,
"grad_norm": 0.4024046063423157,
"learning_rate": 1.9932383577419432e-05,
"loss": 0.3402,
"step": 85
},
{
"epoch": 0.4083086053412463,
"grad_norm": 0.35955172777175903,
"learning_rate": 1.9925798757952003e-05,
"loss": 0.3605,
"step": 86
},
{
"epoch": 0.4130563798219585,
"grad_norm": 0.4338579773902893,
"learning_rate": 1.9918909220573588e-05,
"loss": 0.357,
"step": 87
},
{
"epoch": 0.41780415430267065,
"grad_norm": 0.3916882872581482,
"learning_rate": 1.991171517679013e-05,
"loss": 0.354,
"step": 88
},
{
"epoch": 0.42255192878338277,
"grad_norm": 0.39583539962768555,
"learning_rate": 1.9904216847455795e-05,
"loss": 0.3284,
"step": 89
},
{
"epoch": 0.42729970326409494,
"grad_norm": 0.43609434366226196,
"learning_rate": 1.9896414462766188e-05,
"loss": 0.3424,
"step": 90
},
{
"epoch": 0.4320474777448071,
"grad_norm": 0.4122212529182434,
"learning_rate": 1.9888308262251286e-05,
"loss": 0.3386,
"step": 91
},
{
"epoch": 0.4367952522255193,
"grad_norm": 0.3249572515487671,
"learning_rate": 1.9879898494768093e-05,
"loss": 0.3157,
"step": 92
},
{
"epoch": 0.44154302670623147,
"grad_norm": 0.745229959487915,
"learning_rate": 1.9871185418492978e-05,
"loss": 0.3738,
"step": 93
},
{
"epoch": 0.44629080118694364,
"grad_norm": 0.39437487721443176,
"learning_rate": 1.9862169300913784e-05,
"loss": 0.3386,
"step": 94
},
{
"epoch": 0.45103857566765576,
"grad_norm": 0.3622061312198639,
"learning_rate": 1.985285041882158e-05,
"loss": 0.3234,
"step": 95
},
{
"epoch": 0.45578635014836794,
"grad_norm": 0.4053417146205902,
"learning_rate": 1.9843229058302192e-05,
"loss": 0.3608,
"step": 96
},
{
"epoch": 0.4605341246290801,
"grad_norm": 0.4641744792461395,
"learning_rate": 1.9833305514727396e-05,
"loss": 0.3431,
"step": 97
},
{
"epoch": 0.4652818991097923,
"grad_norm": 0.39851275086402893,
"learning_rate": 1.9823080092745878e-05,
"loss": 0.3531,
"step": 98
},
{
"epoch": 0.47002967359050446,
"grad_norm": 0.3545701205730438,
"learning_rate": 1.9812553106273848e-05,
"loss": 0.3842,
"step": 99
},
{
"epoch": 0.47477744807121663,
"grad_norm": 0.37058180570602417,
"learning_rate": 1.9801724878485438e-05,
"loss": 0.3407,
"step": 100
},
{
"epoch": 0.4795252225519288,
"grad_norm": 0.33017316460609436,
"learning_rate": 1.9790595741802757e-05,
"loss": 0.3369,
"step": 101
},
{
"epoch": 0.48427299703264093,
"grad_norm": 0.3639221787452698,
"learning_rate": 1.9779166037885692e-05,
"loss": 0.3427,
"step": 102
},
{
"epoch": 0.4890207715133531,
"grad_norm": 0.43298575282096863,
"learning_rate": 1.9767436117621416e-05,
"loss": 0.3693,
"step": 103
},
{
"epoch": 0.4937685459940653,
"grad_norm": 0.33556169271469116,
"learning_rate": 1.9755406341113622e-05,
"loss": 0.3266,
"step": 104
},
{
"epoch": 0.49851632047477745,
"grad_norm": 0.33984947204589844,
"learning_rate": 1.974307707767147e-05,
"loss": 0.3309,
"step": 105
},
{
"epoch": 0.5032640949554896,
"grad_norm": 0.35068896412849426,
"learning_rate": 1.973044870579824e-05,
"loss": 0.3516,
"step": 106
},
{
"epoch": 0.5080118694362018,
"grad_norm": 0.3809346556663513,
"learning_rate": 1.971752161317972e-05,
"loss": 0.3361,
"step": 107
},
{
"epoch": 0.512759643916914,
"grad_norm": 0.359453946352005,
"learning_rate": 1.9704296196672298e-05,
"loss": 0.3737,
"step": 108
},
{
"epoch": 0.5175074183976262,
"grad_norm": 0.3776707947254181,
"learning_rate": 1.969077286229078e-05,
"loss": 0.336,
"step": 109
},
{
"epoch": 0.5222551928783383,
"grad_norm": 0.37052029371261597,
"learning_rate": 1.9676952025195937e-05,
"loss": 0.3278,
"step": 110
},
{
"epoch": 0.5270029673590505,
"grad_norm": 0.37363845109939575,
"learning_rate": 1.966283410968174e-05,
"loss": 0.3481,
"step": 111
},
{
"epoch": 0.5317507418397626,
"grad_norm": 0.3355433940887451,
"learning_rate": 1.964841954916235e-05,
"loss": 0.3379,
"step": 112
},
{
"epoch": 0.5364985163204747,
"grad_norm": 0.3838210105895996,
"learning_rate": 1.9633708786158803e-05,
"loss": 0.3388,
"step": 113
},
{
"epoch": 0.5412462908011869,
"grad_norm": 0.3923501670360565,
"learning_rate": 1.9618702272285434e-05,
"loss": 0.3749,
"step": 114
},
{
"epoch": 0.5459940652818991,
"grad_norm": 0.4121781885623932,
"learning_rate": 1.9603400468236e-05,
"loss": 0.3149,
"step": 115
},
{
"epoch": 0.5507418397626113,
"grad_norm": 0.31165602803230286,
"learning_rate": 1.9587803843769547e-05,
"loss": 0.3162,
"step": 116
},
{
"epoch": 0.5554896142433234,
"grad_norm": 0.3426300883293152,
"learning_rate": 1.9571912877695995e-05,
"loss": 0.3408,
"step": 117
},
{
"epoch": 0.5602373887240356,
"grad_norm": 0.33053115010261536,
"learning_rate": 1.955572805786141e-05,
"loss": 0.3397,
"step": 118
},
{
"epoch": 0.5649851632047478,
"grad_norm": 0.30746808648109436,
"learning_rate": 1.9539249881133062e-05,
"loss": 0.3287,
"step": 119
},
{
"epoch": 0.56973293768546,
"grad_norm": 0.3441820442676544,
"learning_rate": 1.9522478853384154e-05,
"loss": 0.3225,
"step": 120
},
{
"epoch": 0.5744807121661721,
"grad_norm": 0.35982808470726013,
"learning_rate": 1.9505415489478293e-05,
"loss": 0.3429,
"step": 121
},
{
"epoch": 0.5792284866468843,
"grad_norm": 0.3274133801460266,
"learning_rate": 1.948806031325368e-05,
"loss": 0.3436,
"step": 122
},
{
"epoch": 0.5839762611275965,
"grad_norm": 0.38329532742500305,
"learning_rate": 1.9470413857507036e-05,
"loss": 0.3211,
"step": 123
},
{
"epoch": 0.5887240356083087,
"grad_norm": 0.38898247480392456,
"learning_rate": 1.945247666397725e-05,
"loss": 0.3409,
"step": 124
},
{
"epoch": 0.5934718100890207,
"grad_norm": 0.3356146216392517,
"learning_rate": 1.943424928332873e-05,
"loss": 0.3223,
"step": 125
},
{
"epoch": 0.5982195845697329,
"grad_norm": 0.33519861102104187,
"learning_rate": 1.9415732275134515e-05,
"loss": 0.3369,
"step": 126
},
{
"epoch": 0.5982195845697329,
"eval_loss": 0.15260468423366547,
"eval_runtime": 142.1665,
"eval_samples_per_second": 12.633,
"eval_steps_per_second": 0.795,
"step": 126
},
{
"epoch": 0.6029673590504451,
"grad_norm": 0.31465238332748413,
"learning_rate": 1.9396926207859085e-05,
"loss": 0.3189,
"step": 127
},
{
"epoch": 0.6077151335311572,
"grad_norm": 0.33517467975616455,
"learning_rate": 1.937783165884092e-05,
"loss": 0.3525,
"step": 128
},
{
"epoch": 0.6124629080118694,
"grad_norm": 0.35138949751853943,
"learning_rate": 1.9358449214274763e-05,
"loss": 0.3466,
"step": 129
},
{
"epoch": 0.6172106824925816,
"grad_norm": 0.3277546167373657,
"learning_rate": 1.9338779469193638e-05,
"loss": 0.3304,
"step": 130
},
{
"epoch": 0.6219584569732938,
"grad_norm": 5.854161262512207,
"learning_rate": 1.931882302745057e-05,
"loss": 0.4189,
"step": 131
},
{
"epoch": 0.626706231454006,
"grad_norm": 0.3688313961029053,
"learning_rate": 1.9298580501700058e-05,
"loss": 0.3393,
"step": 132
},
{
"epoch": 0.6314540059347181,
"grad_norm": 0.3257688581943512,
"learning_rate": 1.9278052513379256e-05,
"loss": 0.331,
"step": 133
},
{
"epoch": 0.6362017804154303,
"grad_norm": 0.33901330828666687,
"learning_rate": 1.9257239692688907e-05,
"loss": 0.342,
"step": 134
},
{
"epoch": 0.6409495548961425,
"grad_norm": 0.3109529912471771,
"learning_rate": 1.9236142678573983e-05,
"loss": 0.3151,
"step": 135
},
{
"epoch": 0.6456973293768546,
"grad_norm": 0.34090420603752136,
"learning_rate": 1.921476211870408e-05,
"loss": 0.3293,
"step": 136
},
{
"epoch": 0.6504451038575668,
"grad_norm": 0.3669479787349701,
"learning_rate": 1.9193098669453532e-05,
"loss": 0.3574,
"step": 137
},
{
"epoch": 0.6551928783382789,
"grad_norm": 0.3427859842777252,
"learning_rate": 1.9171152995881257e-05,
"loss": 0.3352,
"step": 138
},
{
"epoch": 0.6599406528189911,
"grad_norm": 0.3781029284000397,
"learning_rate": 1.9148925771710347e-05,
"loss": 0.3384,
"step": 139
},
{
"epoch": 0.6646884272997032,
"grad_norm": 0.4394756257534027,
"learning_rate": 1.912641767930738e-05,
"loss": 0.3234,
"step": 140
},
{
"epoch": 0.6694362017804154,
"grad_norm": 0.3640480637550354,
"learning_rate": 1.9103629409661468e-05,
"loss": 0.3266,
"step": 141
},
{
"epoch": 0.6741839762611276,
"grad_norm": 0.32769304513931274,
"learning_rate": 1.908056166236305e-05,
"loss": 0.3255,
"step": 142
},
{
"epoch": 0.6789317507418398,
"grad_norm": 0.3409084379673004,
"learning_rate": 1.9057215145582418e-05,
"loss": 0.3285,
"step": 143
},
{
"epoch": 0.6836795252225519,
"grad_norm": 0.3296876549720764,
"learning_rate": 1.9033590576047967e-05,
"loss": 0.3197,
"step": 144
},
{
"epoch": 0.6884272997032641,
"grad_norm": 0.3788384199142456,
"learning_rate": 1.900968867902419e-05,
"loss": 0.33,
"step": 145
},
{
"epoch": 0.6931750741839763,
"grad_norm": 0.31414154171943665,
"learning_rate": 1.898551018828944e-05,
"loss": 0.313,
"step": 146
},
{
"epoch": 0.6979228486646885,
"grad_norm": 0.4816839396953583,
"learning_rate": 1.8961055846113358e-05,
"loss": 0.3407,
"step": 147
},
{
"epoch": 0.7026706231454006,
"grad_norm": 0.3418005406856537,
"learning_rate": 1.8936326403234125e-05,
"loss": 0.3311,
"step": 148
},
{
"epoch": 0.7074183976261128,
"grad_norm": 0.31916749477386475,
"learning_rate": 1.8911322618835393e-05,
"loss": 0.3042,
"step": 149
},
{
"epoch": 0.712166172106825,
"grad_norm": 1.1523184776306152,
"learning_rate": 1.888604526052299e-05,
"loss": 0.3218,
"step": 150
},
{
"epoch": 0.716913946587537,
"grad_norm": 0.2973329424858093,
"learning_rate": 1.8860495104301346e-05,
"loss": 0.3061,
"step": 151
},
{
"epoch": 0.7216617210682492,
"grad_norm": 0.3566143214702606,
"learning_rate": 1.8834672934549677e-05,
"loss": 0.3287,
"step": 152
},
{
"epoch": 0.7264094955489614,
"grad_norm": 0.3308376967906952,
"learning_rate": 1.8808579543997892e-05,
"loss": 0.3034,
"step": 153
},
{
"epoch": 0.7311572700296736,
"grad_norm": 0.33520743250846863,
"learning_rate": 1.8782215733702286e-05,
"loss": 0.3296,
"step": 154
},
{
"epoch": 0.7359050445103857,
"grad_norm": 0.3926364481449127,
"learning_rate": 1.8755582313020912e-05,
"loss": 0.3135,
"step": 155
},
{
"epoch": 0.7406528189910979,
"grad_norm": 0.37003567814826965,
"learning_rate": 1.8728680099588748e-05,
"loss": 0.3455,
"step": 156
},
{
"epoch": 0.7454005934718101,
"grad_norm": 0.31512436270713806,
"learning_rate": 1.870150991929261e-05,
"loss": 0.3146,
"step": 157
},
{
"epoch": 0.7501483679525223,
"grad_norm": 0.35992729663848877,
"learning_rate": 1.867407260624578e-05,
"loss": 0.3138,
"step": 158
},
{
"epoch": 0.7548961424332344,
"grad_norm": 0.33734387159347534,
"learning_rate": 1.864636900276241e-05,
"loss": 0.3453,
"step": 159
},
{
"epoch": 0.7596439169139466,
"grad_norm": 0.403726726770401,
"learning_rate": 1.8618399959331642e-05,
"loss": 0.3176,
"step": 160
},
{
"epoch": 0.7643916913946588,
"grad_norm": 0.33617129921913147,
"learning_rate": 1.8590166334591533e-05,
"loss": 0.3235,
"step": 161
},
{
"epoch": 0.769139465875371,
"grad_norm": 0.3692370355129242,
"learning_rate": 1.8561668995302668e-05,
"loss": 0.3167,
"step": 162
},
{
"epoch": 0.7738872403560831,
"grad_norm": 0.31387221813201904,
"learning_rate": 1.8532908816321557e-05,
"loss": 0.2978,
"step": 163
},
{
"epoch": 0.7786350148367952,
"grad_norm": 0.39496850967407227,
"learning_rate": 1.850388668057379e-05,
"loss": 0.3167,
"step": 164
},
{
"epoch": 0.7833827893175074,
"grad_norm": 0.29015660285949707,
"learning_rate": 1.8474603479026912e-05,
"loss": 0.3111,
"step": 165
},
{
"epoch": 0.7881305637982196,
"grad_norm": 0.4949202835559845,
"learning_rate": 1.844506011066308e-05,
"loss": 0.3231,
"step": 166
},
{
"epoch": 0.7928783382789317,
"grad_norm": 0.3161013126373291,
"learning_rate": 1.841525748245147e-05,
"loss": 0.3235,
"step": 167
},
{
"epoch": 0.7976261127596439,
"grad_norm": 0.3269205093383789,
"learning_rate": 1.8385196509320424e-05,
"loss": 0.3302,
"step": 168
},
{
"epoch": 0.8023738872403561,
"grad_norm": 0.2890471816062927,
"learning_rate": 1.8354878114129368e-05,
"loss": 0.297,
"step": 169
},
{
"epoch": 0.8071216617210683,
"grad_norm": 0.3379496932029724,
"learning_rate": 1.8324303227640472e-05,
"loss": 0.3032,
"step": 170
},
{
"epoch": 0.8118694362017804,
"grad_norm": 0.3012542128562927,
"learning_rate": 1.8293472788490096e-05,
"loss": 0.3119,
"step": 171
},
{
"epoch": 0.8166172106824926,
"grad_norm": 0.31910914182662964,
"learning_rate": 1.826238774315995e-05,
"loss": 0.3249,
"step": 172
},
{
"epoch": 0.8213649851632048,
"grad_norm": 0.2994840741157532,
"learning_rate": 1.8231049045948054e-05,
"loss": 0.3106,
"step": 173
},
{
"epoch": 0.826112759643917,
"grad_norm": 0.31799837946891785,
"learning_rate": 1.8199457658939425e-05,
"loss": 0.3109,
"step": 174
},
{
"epoch": 0.8308605341246291,
"grad_norm": 0.3020702302455902,
"learning_rate": 1.816761455197657e-05,
"loss": 0.3016,
"step": 175
},
{
"epoch": 0.8356083086053413,
"grad_norm": 0.33007511496543884,
"learning_rate": 1.8135520702629677e-05,
"loss": 0.3326,
"step": 176
},
{
"epoch": 0.8403560830860534,
"grad_norm": 0.34143343567848206,
"learning_rate": 1.8103177096166632e-05,
"loss": 0.3245,
"step": 177
},
{
"epoch": 0.8451038575667655,
"grad_norm": 0.325300008058548,
"learning_rate": 1.8070584725522763e-05,
"loss": 0.32,
"step": 178
},
{
"epoch": 0.8498516320474777,
"grad_norm": 0.38607487082481384,
"learning_rate": 1.803774459127034e-05,
"loss": 0.289,
"step": 179
},
{
"epoch": 0.8545994065281899,
"grad_norm": 0.31634339690208435,
"learning_rate": 1.8004657701587893e-05,
"loss": 0.3185,
"step": 180
},
{
"epoch": 0.8593471810089021,
"grad_norm": 0.32501840591430664,
"learning_rate": 1.7971325072229227e-05,
"loss": 0.3291,
"step": 181
},
{
"epoch": 0.8640949554896142,
"grad_norm": 0.3348029553890228,
"learning_rate": 1.7937747726492256e-05,
"loss": 0.324,
"step": 182
},
{
"epoch": 0.8688427299703264,
"grad_norm": 0.34350311756134033,
"learning_rate": 1.7903926695187595e-05,
"loss": 0.3351,
"step": 183
},
{
"epoch": 0.8735905044510386,
"grad_norm": 0.34770363569259644,
"learning_rate": 1.7869863016606893e-05,
"loss": 0.3368,
"step": 184
},
{
"epoch": 0.8783382789317508,
"grad_norm": 0.40538617968559265,
"learning_rate": 1.783555773649097e-05,
"loss": 0.3027,
"step": 185
},
{
"epoch": 0.8830860534124629,
"grad_norm": 0.37002649903297424,
"learning_rate": 1.7801011907997728e-05,
"loss": 0.3296,
"step": 186
},
{
"epoch": 0.8878338278931751,
"grad_norm": 0.3094744384288788,
"learning_rate": 1.7766226591669787e-05,
"loss": 0.3211,
"step": 187
},
{
"epoch": 0.8925816023738873,
"grad_norm": 0.3551942706108093,
"learning_rate": 1.773120285540195e-05,
"loss": 0.315,
"step": 188
},
{
"epoch": 0.8973293768545995,
"grad_norm": 0.3153332471847534,
"learning_rate": 1.7695941774408424e-05,
"loss": 0.312,
"step": 189
},
{
"epoch": 0.9020771513353115,
"grad_norm": 0.30479609966278076,
"learning_rate": 1.766044443118978e-05,
"loss": 0.3176,
"step": 190
},
{
"epoch": 0.9068249258160237,
"grad_norm": 0.3118721842765808,
"learning_rate": 1.7624711915499767e-05,
"loss": 0.3366,
"step": 191
},
{
"epoch": 0.9115727002967359,
"grad_norm": 0.3304102122783661,
"learning_rate": 1.75887453243118e-05,
"loss": 0.3321,
"step": 192
},
{
"epoch": 0.916320474777448,
"grad_norm": 0.29333484172821045,
"learning_rate": 1.755254576178535e-05,
"loss": 0.306,
"step": 193
},
{
"epoch": 0.9210682492581602,
"grad_norm": 0.34330320358276367,
"learning_rate": 1.7516114339231984e-05,
"loss": 0.3173,
"step": 194
},
{
"epoch": 0.9258160237388724,
"grad_norm": 0.28864824771881104,
"learning_rate": 1.747945217508129e-05,
"loss": 0.2955,
"step": 195
},
{
"epoch": 0.9305637982195846,
"grad_norm": 0.32548072934150696,
"learning_rate": 1.7442560394846518e-05,
"loss": 0.3284,
"step": 196
},
{
"epoch": 0.9353115727002967,
"grad_norm": 0.3424006402492523,
"learning_rate": 1.740544013109005e-05,
"loss": 0.315,
"step": 197
},
{
"epoch": 0.9400593471810089,
"grad_norm": 0.31073182821273804,
"learning_rate": 1.73680925233886e-05,
"loss": 0.3186,
"step": 198
},
{
"epoch": 0.9448071216617211,
"grad_norm": 0.37291258573532104,
"learning_rate": 1.7330518718298263e-05,
"loss": 0.2982,
"step": 199
},
{
"epoch": 0.9495548961424333,
"grad_norm": 0.2813059091567993,
"learning_rate": 1.7292719869319295e-05,
"loss": 0.3189,
"step": 200
},
{
"epoch": 0.9543026706231454,
"grad_norm": 0.297116219997406,
"learning_rate": 1.72546971368607e-05,
"loss": 0.2888,
"step": 201
},
{
"epoch": 0.9590504451038576,
"grad_norm": 0.3253256380558014,
"learning_rate": 1.7216451688204623e-05,
"loss": 0.327,
"step": 202
},
{
"epoch": 0.9637982195845697,
"grad_norm": 0.3983827233314514,
"learning_rate": 1.717798469747049e-05,
"loss": 0.309,
"step": 203
},
{
"epoch": 0.9685459940652819,
"grad_norm": 0.30336683988571167,
"learning_rate": 1.7139297345578992e-05,
"loss": 0.3222,
"step": 204
},
{
"epoch": 0.973293768545994,
"grad_norm": 0.3853195309638977,
"learning_rate": 1.7100390820215805e-05,
"loss": 0.3182,
"step": 205
},
{
"epoch": 0.9780415430267062,
"grad_norm": 0.3312664031982422,
"learning_rate": 1.7061266315795146e-05,
"loss": 0.2972,
"step": 206
},
{
"epoch": 0.9827893175074184,
"grad_norm": 0.3694183826446533,
"learning_rate": 1.7021925033423096e-05,
"loss": 0.3177,
"step": 207
},
{
"epoch": 0.9875370919881306,
"grad_norm": 0.3218733072280884,
"learning_rate": 1.698236818086073e-05,
"loss": 0.304,
"step": 208
},
{
"epoch": 0.9922848664688427,
"grad_norm": 0.33860743045806885,
"learning_rate": 1.694259697248704e-05,
"loss": 0.3031,
"step": 209
},
{
"epoch": 0.9970326409495549,
"grad_norm": 0.42556554079055786,
"learning_rate": 1.690261262926165e-05,
"loss": 0.3196,
"step": 210
},
{
"epoch": 1.0,
"grad_norm": 0.38205158710479736,
"learning_rate": 1.686241637868734e-05,
"loss": 0.2881,
"step": 211
},
{
"epoch": 1.004747774480712,
"grad_norm": 0.36370348930358887,
"learning_rate": 1.682200945477235e-05,
"loss": 0.3082,
"step": 212
},
{
"epoch": 1.0094955489614243,
"grad_norm": 0.3192894160747528,
"learning_rate": 1.6781393097992512e-05,
"loss": 0.3233,
"step": 213
},
{
"epoch": 1.0142433234421364,
"grad_norm": 0.34595000743865967,
"learning_rate": 1.6740568555253153e-05,
"loss": 0.2932,
"step": 214
},
{
"epoch": 1.0189910979228487,
"grad_norm": 0.32576438784599304,
"learning_rate": 1.669953707985084e-05,
"loss": 0.2944,
"step": 215
},
{
"epoch": 1.0237388724035608,
"grad_norm": 0.3424221873283386,
"learning_rate": 1.6658299931434857e-05,
"loss": 0.305,
"step": 216
},
{
"epoch": 1.028486646884273,
"grad_norm": 0.32766616344451904,
"learning_rate": 1.6616858375968596e-05,
"loss": 0.3266,
"step": 217
},
{
"epoch": 1.0332344213649851,
"grad_norm": 0.34646332263946533,
"learning_rate": 1.657521368569064e-05,
"loss": 0.316,
"step": 218
},
{
"epoch": 1.0379821958456974,
"grad_norm": 0.30172574520111084,
"learning_rate": 1.6533367139075732e-05,
"loss": 0.2973,
"step": 219
},
{
"epoch": 1.0427299703264095,
"grad_norm": 0.33457738161087036,
"learning_rate": 1.649132002079552e-05,
"loss": 0.3015,
"step": 220
},
{
"epoch": 1.0474777448071217,
"grad_norm": 0.32709312438964844,
"learning_rate": 1.6449073621679128e-05,
"loss": 0.2967,
"step": 221
},
{
"epoch": 1.0522255192878338,
"grad_norm": 0.32702985405921936,
"learning_rate": 1.6406629238673507e-05,
"loss": 0.3375,
"step": 222
},
{
"epoch": 1.056973293768546,
"grad_norm": 0.3030819296836853,
"learning_rate": 1.6363988174803638e-05,
"loss": 0.3171,
"step": 223
},
{
"epoch": 1.0617210682492582,
"grad_norm": 0.3053749203681946,
"learning_rate": 1.632115173913252e-05,
"loss": 0.3188,
"step": 224
},
{
"epoch": 1.0664688427299702,
"grad_norm": 0.33132317662239075,
"learning_rate": 1.627812124672099e-05,
"loss": 0.325,
"step": 225
},
{
"epoch": 1.0712166172106825,
"grad_norm": 0.32221683859825134,
"learning_rate": 1.6234898018587336e-05,
"loss": 0.3249,
"step": 226
},
{
"epoch": 1.0759643916913946,
"grad_norm": 0.32629886269569397,
"learning_rate": 1.619148338166677e-05,
"loss": 0.308,
"step": 227
},
{
"epoch": 1.0807121661721069,
"grad_norm": 0.31524035334587097,
"learning_rate": 1.614787866877066e-05,
"loss": 0.304,
"step": 228
},
{
"epoch": 1.085459940652819,
"grad_norm": 0.33473050594329834,
"learning_rate": 1.6104085218545633e-05,
"loss": 0.3222,
"step": 229
},
{
"epoch": 1.0902077151335312,
"grad_norm": 0.28698474168777466,
"learning_rate": 1.6060104375432476e-05,
"loss": 0.3057,
"step": 230
},
{
"epoch": 1.0949554896142433,
"grad_norm": 0.2982139587402344,
"learning_rate": 1.601593748962485e-05,
"loss": 0.3074,
"step": 231
},
{
"epoch": 1.0997032640949556,
"grad_norm": 0.29852068424224854,
"learning_rate": 1.5971585917027864e-05,
"loss": 0.3152,
"step": 232
},
{
"epoch": 1.1044510385756676,
"grad_norm": 0.28795862197875977,
"learning_rate": 1.5927051019216428e-05,
"loss": 0.3084,
"step": 233
},
{
"epoch": 1.10919881305638,
"grad_norm": 0.27636635303497314,
"learning_rate": 1.588233416339345e-05,
"loss": 0.3079,
"step": 234
},
{
"epoch": 1.113946587537092,
"grad_norm": 0.3473345637321472,
"learning_rate": 1.5837436722347902e-05,
"loss": 0.2836,
"step": 235
},
{
"epoch": 1.1186943620178043,
"grad_norm": 0.3350068926811218,
"learning_rate": 1.5792360074412612e-05,
"loss": 0.3066,
"step": 236
},
{
"epoch": 1.1234421364985163,
"grad_norm": 0.33414971828460693,
"learning_rate": 1.5747105603422013e-05,
"loss": 0.3082,
"step": 237
},
{
"epoch": 1.1281899109792284,
"grad_norm": 0.32059019804000854,
"learning_rate": 1.570167469866962e-05,
"loss": 0.3103,
"step": 238
},
{
"epoch": 1.1329376854599407,
"grad_norm": 0.32460328936576843,
"learning_rate": 1.5656068754865388e-05,
"loss": 0.3151,
"step": 239
},
{
"epoch": 1.1376854599406527,
"grad_norm": 0.3252885341644287,
"learning_rate": 1.561028917209291e-05,
"loss": 0.3248,
"step": 240
},
{
"epoch": 1.142433234421365,
"grad_norm": 0.3324680030345917,
"learning_rate": 1.5564337355766412e-05,
"loss": 0.3119,
"step": 241
},
{
"epoch": 1.147181008902077,
"grad_norm": 0.3315082788467407,
"learning_rate": 1.551821471658763e-05,
"loss": 0.3094,
"step": 242
},
{
"epoch": 1.1519287833827894,
"grad_norm": 0.3459841310977936,
"learning_rate": 1.5471922670502472e-05,
"loss": 0.2982,
"step": 243
},
{
"epoch": 1.1566765578635014,
"grad_norm": 0.30599355697631836,
"learning_rate": 1.5425462638657597e-05,
"loss": 0.2802,
"step": 244
},
{
"epoch": 1.1614243323442137,
"grad_norm": 0.3451800048351288,
"learning_rate": 1.5378836047356725e-05,
"loss": 0.3065,
"step": 245
},
{
"epoch": 1.1661721068249258,
"grad_norm": 0.28921040892601013,
"learning_rate": 1.5332044328016916e-05,
"loss": 0.2935,
"step": 246
},
{
"epoch": 1.170919881305638,
"grad_norm": 0.3126339912414551,
"learning_rate": 1.5285088917124555e-05,
"loss": 0.2794,
"step": 247
},
{
"epoch": 1.1756676557863501,
"grad_norm": 0.33224689960479736,
"learning_rate": 1.5237971256191325e-05,
"loss": 0.3068,
"step": 248
},
{
"epoch": 1.1804154302670624,
"grad_norm": 0.33340543508529663,
"learning_rate": 1.5190692791709891e-05,
"loss": 0.2895,
"step": 249
},
{
"epoch": 1.1851632047477745,
"grad_norm": 0.3247314989566803,
"learning_rate": 1.5143254975109538e-05,
"loss": 0.3005,
"step": 250
},
{
"epoch": 1.1899109792284865,
"grad_norm": 0.3434915542602539,
"learning_rate": 1.5095659262711588e-05,
"loss": 0.2991,
"step": 251
},
{
"epoch": 1.1946587537091988,
"grad_norm": 0.28737980127334595,
"learning_rate": 1.5047907115684695e-05,
"loss": 0.2603,
"step": 252
},
{
"epoch": 1.1946587537091988,
"eval_loss": 0.15092316269874573,
"eval_runtime": 136.3285,
"eval_samples_per_second": 13.174,
"eval_steps_per_second": 0.829,
"step": 252
}
],
"logging_steps": 1,
"max_steps": 630,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 126,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 5.752167478826369e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}