model_ea265075 / checkpoint-210 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
466e48a verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 210,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.023809523809523808,
"grad_norm": 26.742956161499023,
"learning_rate": 5.0000000000000004e-08,
"loss": 4.1563,
"step": 1
},
{
"epoch": 0.047619047619047616,
"grad_norm": 26.336389541625977,
"learning_rate": 1.0000000000000001e-07,
"loss": 4.0633,
"step": 2
},
{
"epoch": 0.07142857142857142,
"grad_norm": 28.836008071899414,
"learning_rate": 1.5000000000000002e-07,
"loss": 4.3575,
"step": 3
},
{
"epoch": 0.09523809523809523,
"grad_norm": 27.459413528442383,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.1943,
"step": 4
},
{
"epoch": 0.11904761904761904,
"grad_norm": 27.914960861206055,
"learning_rate": 2.5000000000000004e-07,
"loss": 4.3336,
"step": 5
},
{
"epoch": 0.14285714285714285,
"grad_norm": 27.47397232055664,
"learning_rate": 3.0000000000000004e-07,
"loss": 4.2354,
"step": 6
},
{
"epoch": 0.16666666666666666,
"grad_norm": 26.212989807128906,
"learning_rate": 3.5000000000000004e-07,
"loss": 4.1001,
"step": 7
},
{
"epoch": 0.19047619047619047,
"grad_norm": 26.80431365966797,
"learning_rate": 4.0000000000000003e-07,
"loss": 4.1827,
"step": 8
},
{
"epoch": 0.21428571428571427,
"grad_norm": 27.641605377197266,
"learning_rate": 4.5000000000000003e-07,
"loss": 4.2596,
"step": 9
},
{
"epoch": 0.23809523809523808,
"grad_norm": 27.783071517944336,
"learning_rate": 5.000000000000001e-07,
"loss": 4.2694,
"step": 10
},
{
"epoch": 0.2619047619047619,
"grad_norm": 26.553335189819336,
"learning_rate": 5.5e-07,
"loss": 4.0739,
"step": 11
},
{
"epoch": 0.2857142857142857,
"grad_norm": 26.77140998840332,
"learning_rate": 6.000000000000001e-07,
"loss": 4.1049,
"step": 12
},
{
"epoch": 0.30952380952380953,
"grad_norm": 25.07087516784668,
"learning_rate": 6.5e-07,
"loss": 3.8982,
"step": 13
},
{
"epoch": 0.3333333333333333,
"grad_norm": 25.834062576293945,
"learning_rate": 7.000000000000001e-07,
"loss": 4.0768,
"step": 14
},
{
"epoch": 0.35714285714285715,
"grad_norm": 25.00474739074707,
"learning_rate": 7.5e-07,
"loss": 4.0104,
"step": 15
},
{
"epoch": 0.38095238095238093,
"grad_norm": 23.692975997924805,
"learning_rate": 8.000000000000001e-07,
"loss": 3.8469,
"step": 16
},
{
"epoch": 0.40476190476190477,
"grad_norm": 22.197919845581055,
"learning_rate": 8.500000000000001e-07,
"loss": 3.7738,
"step": 17
},
{
"epoch": 0.42857142857142855,
"grad_norm": 20.92680549621582,
"learning_rate": 9.000000000000001e-07,
"loss": 3.6514,
"step": 18
},
{
"epoch": 0.4523809523809524,
"grad_norm": 20.251178741455078,
"learning_rate": 9.500000000000001e-07,
"loss": 3.6739,
"step": 19
},
{
"epoch": 0.47619047619047616,
"grad_norm": 17.55536460876465,
"learning_rate": 1.0000000000000002e-06,
"loss": 3.2675,
"step": 20
},
{
"epoch": 0.5,
"grad_norm": 17.066797256469727,
"learning_rate": 1.0500000000000001e-06,
"loss": 3.4232,
"step": 21
},
{
"epoch": 0.5238095238095238,
"grad_norm": 16.1475887298584,
"learning_rate": 1.1e-06,
"loss": 3.1988,
"step": 22
},
{
"epoch": 0.5476190476190477,
"grad_norm": 15.61026382446289,
"learning_rate": 1.1500000000000002e-06,
"loss": 3.1338,
"step": 23
},
{
"epoch": 0.5714285714285714,
"grad_norm": 15.409480094909668,
"learning_rate": 1.2000000000000002e-06,
"loss": 2.9836,
"step": 24
},
{
"epoch": 0.5952380952380952,
"grad_norm": 15.391901969909668,
"learning_rate": 1.25e-06,
"loss": 2.9064,
"step": 25
},
{
"epoch": 0.6190476190476191,
"grad_norm": 16.92401885986328,
"learning_rate": 1.3e-06,
"loss": 2.906,
"step": 26
},
{
"epoch": 0.6428571428571429,
"grad_norm": 17.880958557128906,
"learning_rate": 1.3500000000000002e-06,
"loss": 2.702,
"step": 27
},
{
"epoch": 0.6666666666666666,
"grad_norm": 18.114517211914062,
"learning_rate": 1.4000000000000001e-06,
"loss": 2.4876,
"step": 28
},
{
"epoch": 0.6904761904761905,
"grad_norm": 17.608840942382812,
"learning_rate": 1.45e-06,
"loss": 2.2996,
"step": 29
},
{
"epoch": 0.7142857142857143,
"grad_norm": 17.055673599243164,
"learning_rate": 1.5e-06,
"loss": 2.2709,
"step": 30
},
{
"epoch": 0.7380952380952381,
"grad_norm": 14.92151927947998,
"learning_rate": 1.5500000000000002e-06,
"loss": 2.0406,
"step": 31
},
{
"epoch": 0.7619047619047619,
"grad_norm": 13.657073020935059,
"learning_rate": 1.6000000000000001e-06,
"loss": 1.8564,
"step": 32
},
{
"epoch": 0.7857142857142857,
"grad_norm": 13.274576187133789,
"learning_rate": 1.6500000000000003e-06,
"loss": 1.7382,
"step": 33
},
{
"epoch": 0.8095238095238095,
"grad_norm": 13.728348731994629,
"learning_rate": 1.7000000000000002e-06,
"loss": 1.6629,
"step": 34
},
{
"epoch": 0.8333333333333334,
"grad_norm": 13.521151542663574,
"learning_rate": 1.75e-06,
"loss": 1.5301,
"step": 35
},
{
"epoch": 0.8571428571428571,
"grad_norm": 13.709525108337402,
"learning_rate": 1.8000000000000001e-06,
"loss": 1.4122,
"step": 36
},
{
"epoch": 0.8809523809523809,
"grad_norm": 12.587928771972656,
"learning_rate": 1.85e-06,
"loss": 1.2014,
"step": 37
},
{
"epoch": 0.9047619047619048,
"grad_norm": 13.494888305664062,
"learning_rate": 1.9000000000000002e-06,
"loss": 1.1793,
"step": 38
},
{
"epoch": 0.9285714285714286,
"grad_norm": 13.417922019958496,
"learning_rate": 1.9500000000000004e-06,
"loss": 1.0282,
"step": 39
},
{
"epoch": 0.9523809523809523,
"grad_norm": 12.754359245300293,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.8525,
"step": 40
},
{
"epoch": 0.9761904761904762,
"grad_norm": 12.766407012939453,
"learning_rate": 2.05e-06,
"loss": 0.7043,
"step": 41
},
{
"epoch": 1.0,
"grad_norm": 11.440421104431152,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.6245,
"step": 42
},
{
"epoch": 1.0238095238095237,
"grad_norm": 9.672205924987793,
"learning_rate": 2.15e-06,
"loss": 0.4768,
"step": 43
},
{
"epoch": 1.0476190476190477,
"grad_norm": 7.8501057624816895,
"learning_rate": 2.2e-06,
"loss": 0.3679,
"step": 44
},
{
"epoch": 1.0714285714285714,
"grad_norm": 6.751816749572754,
"learning_rate": 2.25e-06,
"loss": 0.2708,
"step": 45
},
{
"epoch": 1.0952380952380953,
"grad_norm": 5.267884731292725,
"learning_rate": 2.3000000000000004e-06,
"loss": 0.2143,
"step": 46
},
{
"epoch": 1.119047619047619,
"grad_norm": 3.251101016998291,
"learning_rate": 2.35e-06,
"loss": 0.1759,
"step": 47
},
{
"epoch": 1.1428571428571428,
"grad_norm": 2.585360050201416,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.1855,
"step": 48
},
{
"epoch": 1.1666666666666667,
"grad_norm": 2.0107483863830566,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.1251,
"step": 49
},
{
"epoch": 1.1904761904761905,
"grad_norm": 1.941689133644104,
"learning_rate": 2.5e-06,
"loss": 0.1441,
"step": 50
},
{
"epoch": 1.2142857142857142,
"grad_norm": 1.4036344289779663,
"learning_rate": 2.55e-06,
"loss": 0.1097,
"step": 51
},
{
"epoch": 1.2380952380952381,
"grad_norm": 1.2856179475784302,
"learning_rate": 2.6e-06,
"loss": 0.1303,
"step": 52
},
{
"epoch": 1.2619047619047619,
"grad_norm": 1.3184994459152222,
"learning_rate": 2.6500000000000005e-06,
"loss": 0.1021,
"step": 53
},
{
"epoch": 1.2857142857142856,
"grad_norm": 1.144294261932373,
"learning_rate": 2.7000000000000004e-06,
"loss": 0.115,
"step": 54
},
{
"epoch": 1.3095238095238095,
"grad_norm": 0.9276831150054932,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.1028,
"step": 55
},
{
"epoch": 1.3333333333333333,
"grad_norm": 0.9152742028236389,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.0979,
"step": 56
},
{
"epoch": 1.3571428571428572,
"grad_norm": 0.8525308966636658,
"learning_rate": 2.85e-06,
"loss": 0.0908,
"step": 57
},
{
"epoch": 1.380952380952381,
"grad_norm": 0.9806348085403442,
"learning_rate": 2.9e-06,
"loss": 0.0817,
"step": 58
},
{
"epoch": 1.4047619047619047,
"grad_norm": 0.606792151927948,
"learning_rate": 2.95e-06,
"loss": 0.0904,
"step": 59
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.7274054884910583,
"learning_rate": 3e-06,
"loss": 0.0811,
"step": 60
},
{
"epoch": 1.4523809523809523,
"grad_norm": 1.0523946285247803,
"learning_rate": 3.05e-06,
"loss": 0.0881,
"step": 61
},
{
"epoch": 1.4761904761904763,
"grad_norm": 0.5840473175048828,
"learning_rate": 3.1000000000000004e-06,
"loss": 0.0848,
"step": 62
},
{
"epoch": 1.5,
"grad_norm": 0.7410831451416016,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.0788,
"step": 63
},
{
"epoch": 1.5238095238095237,
"grad_norm": 0.828996479511261,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.0866,
"step": 64
},
{
"epoch": 1.5476190476190477,
"grad_norm": 0.7505109310150146,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.076,
"step": 65
},
{
"epoch": 1.5714285714285714,
"grad_norm": 0.9672189354896545,
"learning_rate": 3.3000000000000006e-06,
"loss": 0.0765,
"step": 66
},
{
"epoch": 1.5952380952380953,
"grad_norm": 0.5925746560096741,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.0706,
"step": 67
},
{
"epoch": 1.619047619047619,
"grad_norm": 0.6671133637428284,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.0713,
"step": 68
},
{
"epoch": 1.6428571428571428,
"grad_norm": 0.5542609095573425,
"learning_rate": 3.45e-06,
"loss": 0.0715,
"step": 69
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.541200578212738,
"learning_rate": 3.5e-06,
"loss": 0.0701,
"step": 70
},
{
"epoch": 1.6904761904761905,
"grad_norm": 0.4222320020198822,
"learning_rate": 3.5500000000000003e-06,
"loss": 0.0669,
"step": 71
},
{
"epoch": 1.7142857142857144,
"grad_norm": 0.717410147190094,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.0782,
"step": 72
},
{
"epoch": 1.7380952380952381,
"grad_norm": 0.6776471734046936,
"learning_rate": 3.65e-06,
"loss": 0.0695,
"step": 73
},
{
"epoch": 1.7619047619047619,
"grad_norm": 0.5480474829673767,
"learning_rate": 3.7e-06,
"loss": 0.0662,
"step": 74
},
{
"epoch": 1.7857142857142856,
"grad_norm": 0.4779343605041504,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.0743,
"step": 75
},
{
"epoch": 1.8095238095238095,
"grad_norm": 0.43138471245765686,
"learning_rate": 3.8000000000000005e-06,
"loss": 0.0666,
"step": 76
},
{
"epoch": 1.8333333333333335,
"grad_norm": 0.6058762669563293,
"learning_rate": 3.85e-06,
"loss": 0.0696,
"step": 77
},
{
"epoch": 1.8571428571428572,
"grad_norm": 1.3352755308151245,
"learning_rate": 3.900000000000001e-06,
"loss": 0.0891,
"step": 78
},
{
"epoch": 1.880952380952381,
"grad_norm": 0.5319089293479919,
"learning_rate": 3.95e-06,
"loss": 0.0617,
"step": 79
},
{
"epoch": 1.9047619047619047,
"grad_norm": 0.5629184246063232,
"learning_rate": 4.000000000000001e-06,
"loss": 0.0622,
"step": 80
},
{
"epoch": 1.9285714285714286,
"grad_norm": 0.37953704595565796,
"learning_rate": 4.05e-06,
"loss": 0.0676,
"step": 81
},
{
"epoch": 1.9523809523809523,
"grad_norm": 0.37576770782470703,
"learning_rate": 4.1e-06,
"loss": 0.0719,
"step": 82
},
{
"epoch": 1.9761904761904763,
"grad_norm": 0.4720636010169983,
"learning_rate": 4.15e-06,
"loss": 0.0662,
"step": 83
},
{
"epoch": 2.0,
"grad_norm": 0.4793304204940796,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.0664,
"step": 84
},
{
"epoch": 2.0238095238095237,
"grad_norm": 0.44540268182754517,
"learning_rate": 4.25e-06,
"loss": 0.0627,
"step": 85
},
{
"epoch": 2.0476190476190474,
"grad_norm": 0.40029338002204895,
"learning_rate": 4.3e-06,
"loss": 0.0615,
"step": 86
},
{
"epoch": 2.0714285714285716,
"grad_norm": 0.44975095987319946,
"learning_rate": 4.350000000000001e-06,
"loss": 0.0556,
"step": 87
},
{
"epoch": 2.0952380952380953,
"grad_norm": 0.49270153045654297,
"learning_rate": 4.4e-06,
"loss": 0.0509,
"step": 88
},
{
"epoch": 2.119047619047619,
"grad_norm": 0.5457497239112854,
"learning_rate": 4.450000000000001e-06,
"loss": 0.0654,
"step": 89
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.5662055611610413,
"learning_rate": 4.5e-06,
"loss": 0.0584,
"step": 90
},
{
"epoch": 2.1666666666666665,
"grad_norm": 0.4589175581932068,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.05,
"step": 91
},
{
"epoch": 2.1904761904761907,
"grad_norm": 0.4754093289375305,
"learning_rate": 4.600000000000001e-06,
"loss": 0.0409,
"step": 92
},
{
"epoch": 2.2142857142857144,
"grad_norm": 0.5139522552490234,
"learning_rate": 4.65e-06,
"loss": 0.0548,
"step": 93
},
{
"epoch": 2.238095238095238,
"grad_norm": 0.6404117345809937,
"learning_rate": 4.7e-06,
"loss": 0.0575,
"step": 94
},
{
"epoch": 2.261904761904762,
"grad_norm": 0.4840497672557831,
"learning_rate": 4.75e-06,
"loss": 0.0545,
"step": 95
},
{
"epoch": 2.2857142857142856,
"grad_norm": 0.468397319316864,
"learning_rate": 4.800000000000001e-06,
"loss": 0.0532,
"step": 96
},
{
"epoch": 2.3095238095238093,
"grad_norm": 0.41116851568222046,
"learning_rate": 4.85e-06,
"loss": 0.0508,
"step": 97
},
{
"epoch": 2.3333333333333335,
"grad_norm": 0.4745863080024719,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.0571,
"step": 98
},
{
"epoch": 2.357142857142857,
"grad_norm": 0.5628818869590759,
"learning_rate": 4.95e-06,
"loss": 0.0527,
"step": 99
},
{
"epoch": 2.380952380952381,
"grad_norm": 0.5258535146713257,
"learning_rate": 5e-06,
"loss": 0.0602,
"step": 100
},
{
"epoch": 2.4047619047619047,
"grad_norm": 0.4047287702560425,
"learning_rate": 4.999466041969828e-06,
"loss": 0.0511,
"step": 101
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.493963360786438,
"learning_rate": 4.997864395968252e-06,
"loss": 0.0496,
"step": 102
},
{
"epoch": 2.4523809523809526,
"grad_norm": 0.4637805223464966,
"learning_rate": 4.9951957461646705e-06,
"loss": 0.047,
"step": 103
},
{
"epoch": 2.4761904761904763,
"grad_norm": 0.4990857243537903,
"learning_rate": 4.991461232516675e-06,
"loss": 0.0566,
"step": 104
},
{
"epoch": 2.5,
"grad_norm": 0.41547098755836487,
"learning_rate": 4.986662450283107e-06,
"loss": 0.0496,
"step": 105
},
{
"epoch": 2.5238095238095237,
"grad_norm": 0.5748231410980225,
"learning_rate": 4.9808014493426124e-06,
"loss": 0.0362,
"step": 106
},
{
"epoch": 2.5476190476190474,
"grad_norm": 0.481645792722702,
"learning_rate": 4.973880733318007e-06,
"loss": 0.0418,
"step": 107
},
{
"epoch": 2.571428571428571,
"grad_norm": 0.49727940559387207,
"learning_rate": 4.965903258506806e-06,
"loss": 0.0436,
"step": 108
},
{
"epoch": 2.5952380952380953,
"grad_norm": 0.6753897070884705,
"learning_rate": 4.956872432618399e-06,
"loss": 0.0486,
"step": 109
},
{
"epoch": 2.619047619047619,
"grad_norm": 0.6681185960769653,
"learning_rate": 4.9467921133183864e-06,
"loss": 0.0426,
"step": 110
},
{
"epoch": 2.642857142857143,
"grad_norm": 0.6316296458244324,
"learning_rate": 4.935666606580719e-06,
"loss": 0.0377,
"step": 111
},
{
"epoch": 2.6666666666666665,
"grad_norm": 0.43960002064704895,
"learning_rate": 4.923500664848327e-06,
"loss": 0.0355,
"step": 112
},
{
"epoch": 2.6904761904761907,
"grad_norm": 0.437860906124115,
"learning_rate": 4.910299485003034e-06,
"loss": 0.0369,
"step": 113
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.5523399710655212,
"learning_rate": 4.896068706145632e-06,
"loss": 0.0488,
"step": 114
},
{
"epoch": 2.738095238095238,
"grad_norm": 0.6876885890960693,
"learning_rate": 4.880814407187037e-06,
"loss": 0.0492,
"step": 115
},
{
"epoch": 2.761904761904762,
"grad_norm": 0.48951950669288635,
"learning_rate": 4.864543104251587e-06,
"loss": 0.0464,
"step": 116
},
{
"epoch": 2.7857142857142856,
"grad_norm": 0.6003612875938416,
"learning_rate": 4.8472617478935744e-06,
"loss": 0.0376,
"step": 117
},
{
"epoch": 2.8095238095238093,
"grad_norm": 0.5665768384933472,
"learning_rate": 4.828977720128198e-06,
"loss": 0.0364,
"step": 118
},
{
"epoch": 2.8333333333333335,
"grad_norm": 0.6751538515090942,
"learning_rate": 4.809698831278217e-06,
"loss": 0.0415,
"step": 119
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.6295377612113953,
"learning_rate": 4.789433316637644e-06,
"loss": 0.0364,
"step": 120
},
{
"epoch": 2.880952380952381,
"grad_norm": 0.4297006130218506,
"learning_rate": 4.7681898329539004e-06,
"loss": 0.0282,
"step": 121
},
{
"epoch": 2.9047619047619047,
"grad_norm": 0.48039913177490234,
"learning_rate": 4.745977454729947e-06,
"loss": 0.0319,
"step": 122
},
{
"epoch": 2.928571428571429,
"grad_norm": 0.5199178457260132,
"learning_rate": 4.722805670347963e-06,
"loss": 0.0299,
"step": 123
},
{
"epoch": 2.9523809523809526,
"grad_norm": 0.5075429677963257,
"learning_rate": 4.698684378016223e-06,
"loss": 0.0265,
"step": 124
},
{
"epoch": 2.9761904761904763,
"grad_norm": 0.5364718437194824,
"learning_rate": 4.673623881540917e-06,
"loss": 0.0351,
"step": 125
},
{
"epoch": 3.0,
"grad_norm": 0.5693520903587341,
"learning_rate": 4.647634885924713e-06,
"loss": 0.0286,
"step": 126
},
{
"epoch": 3.0238095238095237,
"grad_norm": 0.502586305141449,
"learning_rate": 4.620728492793934e-06,
"loss": 0.0222,
"step": 127
},
{
"epoch": 3.0476190476190474,
"grad_norm": 0.42680972814559937,
"learning_rate": 4.592916195656322e-06,
"loss": 0.0183,
"step": 128
},
{
"epoch": 3.0714285714285716,
"grad_norm": 0.6365283131599426,
"learning_rate": 4.56420987499139e-06,
"loss": 0.025,
"step": 129
},
{
"epoch": 3.0952380952380953,
"grad_norm": 0.4085887670516968,
"learning_rate": 4.534621793175488e-06,
"loss": 0.0166,
"step": 130
},
{
"epoch": 3.119047619047619,
"grad_norm": 0.4094793498516083,
"learning_rate": 4.504164589243721e-06,
"loss": 0.0158,
"step": 131
},
{
"epoch": 3.142857142857143,
"grad_norm": 0.6991594433784485,
"learning_rate": 4.472851273490985e-06,
"loss": 0.0253,
"step": 132
},
{
"epoch": 3.1666666666666665,
"grad_norm": 0.4174569547176361,
"learning_rate": 4.440695221914394e-06,
"loss": 0.0165,
"step": 133
},
{
"epoch": 3.1904761904761907,
"grad_norm": 0.4691311717033386,
"learning_rate": 4.407710170499517e-06,
"loss": 0.0187,
"step": 134
},
{
"epoch": 3.2142857142857144,
"grad_norm": 0.6574212312698364,
"learning_rate": 4.373910209352816e-06,
"loss": 0.018,
"step": 135
},
{
"epoch": 3.238095238095238,
"grad_norm": 0.40424057841300964,
"learning_rate": 4.33930977668283e-06,
"loss": 0.0115,
"step": 136
},
{
"epoch": 3.261904761904762,
"grad_norm": 0.5574136972427368,
"learning_rate": 4.303923652632656e-06,
"loss": 0.0143,
"step": 137
},
{
"epoch": 3.2857142857142856,
"grad_norm": 0.7671014666557312,
"learning_rate": 4.267766952966369e-06,
"loss": 0.0123,
"step": 138
},
{
"epoch": 3.3095238095238093,
"grad_norm": 0.5041195750236511,
"learning_rate": 4.2308551226120745e-06,
"loss": 0.0105,
"step": 139
},
{
"epoch": 3.3333333333333335,
"grad_norm": 0.7552913427352905,
"learning_rate": 4.1932039290643534e-06,
"loss": 0.0132,
"step": 140
},
{
"epoch": 3.357142857142857,
"grad_norm": 0.4549916982650757,
"learning_rate": 4.154829455648916e-06,
"loss": 0.0073,
"step": 141
},
{
"epoch": 3.380952380952381,
"grad_norm": 0.5453060269355774,
"learning_rate": 4.115748094652352e-06,
"loss": 0.0065,
"step": 142
},
{
"epoch": 3.4047619047619047,
"grad_norm": 0.6225312948226929,
"learning_rate": 4.075976540319888e-06,
"loss": 0.0087,
"step": 143
},
{
"epoch": 3.4285714285714284,
"grad_norm": 0.629115641117096,
"learning_rate": 4.0355317817241705e-06,
"loss": 0.0118,
"step": 144
},
{
"epoch": 3.4523809523809526,
"grad_norm": 0.537651538848877,
"learning_rate": 3.994431095508102e-06,
"loss": 0.0066,
"step": 145
},
{
"epoch": 3.4761904761904763,
"grad_norm": 0.9304003715515137,
"learning_rate": 3.9526920385048465e-06,
"loss": 0.0082,
"step": 146
},
{
"epoch": 3.5,
"grad_norm": 0.557716965675354,
"learning_rate": 3.9103324402381285e-06,
"loss": 0.0063,
"step": 147
},
{
"epoch": 3.5238095238095237,
"grad_norm": 0.3688383102416992,
"learning_rate": 3.8673703953060685e-06,
"loss": 0.0049,
"step": 148
},
{
"epoch": 3.5476190476190474,
"grad_norm": 0.44352808594703674,
"learning_rate": 3.8238242556517725e-06,
"loss": 0.0043,
"step": 149
},
{
"epoch": 3.571428571428571,
"grad_norm": 0.5826281905174255,
"learning_rate": 3.779712622724003e-06,
"loss": 0.0063,
"step": 150
},
{
"epoch": 3.5952380952380953,
"grad_norm": 0.600183367729187,
"learning_rate": 3.7350543395312604e-06,
"loss": 0.0057,
"step": 151
},
{
"epoch": 3.619047619047619,
"grad_norm": 0.6133769750595093,
"learning_rate": 3.6898684825926845e-06,
"loss": 0.007,
"step": 152
},
{
"epoch": 3.642857142857143,
"grad_norm": 0.5637931227684021,
"learning_rate": 3.6441743537892045e-06,
"loss": 0.0059,
"step": 153
},
{
"epoch": 3.6666666666666665,
"grad_norm": 0.27519500255584717,
"learning_rate": 3.5979914721184263e-06,
"loss": 0.002,
"step": 154
},
{
"epoch": 3.6904761904761907,
"grad_norm": 0.3693721294403076,
"learning_rate": 3.551339565356769e-06,
"loss": 0.0042,
"step": 155
},
{
"epoch": 3.7142857142857144,
"grad_norm": 0.733023464679718,
"learning_rate": 3.5042385616324243e-06,
"loss": 0.0031,
"step": 156
},
{
"epoch": 3.738095238095238,
"grad_norm": 0.4825158417224884,
"learning_rate": 3.4567085809127247e-06,
"loss": 0.0034,
"step": 157
},
{
"epoch": 3.761904761904762,
"grad_norm": 0.745896577835083,
"learning_rate": 3.4087699264095746e-06,
"loss": 0.006,
"step": 158
},
{
"epoch": 3.7857142857142856,
"grad_norm": 0.606246829032898,
"learning_rate": 3.360443075906597e-06,
"loss": 0.0028,
"step": 159
},
{
"epoch": 3.8095238095238093,
"grad_norm": 0.35037049651145935,
"learning_rate": 3.3117486730117092e-06,
"loss": 0.0022,
"step": 160
},
{
"epoch": 3.8333333333333335,
"grad_norm": 0.3873816132545471,
"learning_rate": 3.2627075183388725e-06,
"loss": 0.0024,
"step": 161
},
{
"epoch": 3.857142857142857,
"grad_norm": 0.2003440409898758,
"learning_rate": 3.2133405606227636e-06,
"loss": 0.0014,
"step": 162
},
{
"epoch": 3.880952380952381,
"grad_norm": 1.2881035804748535,
"learning_rate": 3.163668887770181e-06,
"loss": 0.0052,
"step": 163
},
{
"epoch": 3.9047619047619047,
"grad_norm": 0.16532732546329498,
"learning_rate": 3.1137137178519983e-06,
"loss": 0.0006,
"step": 164
},
{
"epoch": 3.928571428571429,
"grad_norm": 0.42558538913726807,
"learning_rate": 3.063496390039516e-06,
"loss": 0.0018,
"step": 165
},
{
"epoch": 3.9523809523809526,
"grad_norm": 0.5848471522331238,
"learning_rate": 3.013038355489086e-06,
"loss": 0.002,
"step": 166
},
{
"epoch": 3.9761904761904763,
"grad_norm": 0.18496190011501312,
"learning_rate": 2.9623611681788967e-06,
"loss": 0.0012,
"step": 167
},
{
"epoch": 4.0,
"grad_norm": 0.9864285588264465,
"learning_rate": 2.911486475701835e-06,
"loss": 0.0021,
"step": 168
},
{
"epoch": 4.023809523809524,
"grad_norm": 0.28038740158081055,
"learning_rate": 2.860436010018367e-06,
"loss": 0.0016,
"step": 169
},
{
"epoch": 4.0476190476190474,
"grad_norm": 0.174965500831604,
"learning_rate": 2.80923157817337e-06,
"loss": 0.0006,
"step": 170
},
{
"epoch": 4.071428571428571,
"grad_norm": 0.111990787088871,
"learning_rate": 2.7578950529808927e-06,
"loss": 0.0003,
"step": 171
},
{
"epoch": 4.095238095238095,
"grad_norm": 0.0902976393699646,
"learning_rate": 2.7064483636808314e-06,
"loss": 0.0004,
"step": 172
},
{
"epoch": 4.119047619047619,
"grad_norm": 0.23398929834365845,
"learning_rate": 2.654913486571487e-06,
"loss": 0.0007,
"step": 173
},
{
"epoch": 4.142857142857143,
"grad_norm": 0.3977113664150238,
"learning_rate": 2.603312435622033e-06,
"loss": 0.0012,
"step": 174
},
{
"epoch": 4.166666666666667,
"grad_norm": 0.18411217629909515,
"learning_rate": 2.5516672530688864e-06,
"loss": 0.0006,
"step": 175
},
{
"epoch": 4.190476190476191,
"grad_norm": 0.3521343171596527,
"learning_rate": 2.5e-06,
"loss": 0.0017,
"step": 176
},
{
"epoch": 4.214285714285714,
"grad_norm": 0.1081409603357315,
"learning_rate": 2.448332746931115e-06,
"loss": 0.0004,
"step": 177
},
{
"epoch": 4.238095238095238,
"grad_norm": 0.10767076164484024,
"learning_rate": 2.396687564377967e-06,
"loss": 0.0003,
"step": 178
},
{
"epoch": 4.261904761904762,
"grad_norm": 0.0657818615436554,
"learning_rate": 2.345086513428514e-06,
"loss": 0.0003,
"step": 179
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.03795298561453819,
"learning_rate": 2.2935516363191695e-06,
"loss": 0.0002,
"step": 180
},
{
"epoch": 4.309523809523809,
"grad_norm": 0.09580468386411667,
"learning_rate": 2.2421049470191077e-06,
"loss": 0.0002,
"step": 181
},
{
"epoch": 4.333333333333333,
"grad_norm": 0.13761642575263977,
"learning_rate": 2.190768421826631e-06,
"loss": 0.0004,
"step": 182
},
{
"epoch": 4.357142857142857,
"grad_norm": 0.10877614468336105,
"learning_rate": 2.139563989981633e-06,
"loss": 0.0003,
"step": 183
},
{
"epoch": 4.380952380952381,
"grad_norm": 0.14662612974643707,
"learning_rate": 2.088513524298165e-06,
"loss": 0.0003,
"step": 184
},
{
"epoch": 4.404761904761905,
"grad_norm": 0.29953065514564514,
"learning_rate": 2.037638831821104e-06,
"loss": 0.0004,
"step": 185
},
{
"epoch": 4.428571428571429,
"grad_norm": 0.20808857679367065,
"learning_rate": 1.9869616445109146e-06,
"loss": 0.0003,
"step": 186
},
{
"epoch": 4.4523809523809526,
"grad_norm": 0.03407036140561104,
"learning_rate": 1.9365036099604853e-06,
"loss": 0.0002,
"step": 187
},
{
"epoch": 4.476190476190476,
"grad_norm": 0.3132052421569824,
"learning_rate": 1.8862862821480023e-06,
"loss": 0.0006,
"step": 188
},
{
"epoch": 4.5,
"grad_norm": 0.2567755877971649,
"learning_rate": 1.83633111222982e-06,
"loss": 0.0007,
"step": 189
},
{
"epoch": 4.523809523809524,
"grad_norm": 0.11175425350666046,
"learning_rate": 1.7866594393772375e-06,
"loss": 0.0003,
"step": 190
},
{
"epoch": 4.5476190476190474,
"grad_norm": 0.594926118850708,
"learning_rate": 1.7372924816611283e-06,
"loss": 0.001,
"step": 191
},
{
"epoch": 4.571428571428571,
"grad_norm": 0.03871627524495125,
"learning_rate": 1.6882513269882916e-06,
"loss": 0.0002,
"step": 192
},
{
"epoch": 4.595238095238095,
"grad_norm": 0.06972040235996246,
"learning_rate": 1.6395569240934042e-06,
"loss": 0.0002,
"step": 193
},
{
"epoch": 4.619047619047619,
"grad_norm": 0.057279668748378754,
"learning_rate": 1.5912300735904252e-06,
"loss": 0.0002,
"step": 194
},
{
"epoch": 4.642857142857143,
"grad_norm": 0.09757455438375473,
"learning_rate": 1.5432914190872757e-06,
"loss": 0.0003,
"step": 195
},
{
"epoch": 4.666666666666667,
"grad_norm": 0.1020384356379509,
"learning_rate": 1.495761438367577e-06,
"loss": 0.0003,
"step": 196
},
{
"epoch": 4.690476190476191,
"grad_norm": 0.08006434142589569,
"learning_rate": 1.4486604346432311e-06,
"loss": 0.0003,
"step": 197
},
{
"epoch": 4.714285714285714,
"grad_norm": 0.0277456846088171,
"learning_rate": 1.4020085278815745e-06,
"loss": 0.0001,
"step": 198
},
{
"epoch": 4.738095238095238,
"grad_norm": 0.018923059105873108,
"learning_rate": 1.3558256462107965e-06,
"loss": 0.0002,
"step": 199
},
{
"epoch": 4.761904761904762,
"grad_norm": 0.01575353741645813,
"learning_rate": 1.3101315174073162e-06,
"loss": 0.0001,
"step": 200
},
{
"epoch": 4.785714285714286,
"grad_norm": 0.014921327121555805,
"learning_rate": 1.2649456604687404e-06,
"loss": 0.0002,
"step": 201
},
{
"epoch": 4.809523809523809,
"grad_norm": 0.03840240091085434,
"learning_rate": 1.2202873772759983e-06,
"loss": 0.0002,
"step": 202
},
{
"epoch": 4.833333333333333,
"grad_norm": 0.04131830111145973,
"learning_rate": 1.1761757443482285e-06,
"loss": 0.0002,
"step": 203
},
{
"epoch": 4.857142857142857,
"grad_norm": 0.02138919197022915,
"learning_rate": 1.1326296046939334e-06,
"loss": 0.0002,
"step": 204
},
{
"epoch": 4.880952380952381,
"grad_norm": 0.016792714595794678,
"learning_rate": 1.0896675597618725e-06,
"loss": 0.0001,
"step": 205
},
{
"epoch": 4.904761904761905,
"grad_norm": 0.017704278230667114,
"learning_rate": 1.0473079614951546e-06,
"loss": 0.0002,
"step": 206
},
{
"epoch": 4.928571428571429,
"grad_norm": 0.020070459693670273,
"learning_rate": 1.0055689044918979e-06,
"loss": 0.0001,
"step": 207
},
{
"epoch": 4.9523809523809526,
"grad_norm": 0.035451311618089676,
"learning_rate": 9.644682182758305e-07,
"loss": 0.0002,
"step": 208
},
{
"epoch": 4.976190476190476,
"grad_norm": 0.02921333909034729,
"learning_rate": 9.240234596801125e-07,
"loss": 0.0002,
"step": 209
},
{
"epoch": 5.0,
"grad_norm": 0.006285260431468487,
"learning_rate": 8.842519053476476e-07,
"loss": 0.0001,
"step": 210
}
],
"logging_steps": 1,
"max_steps": 252,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 42,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.704300308417741e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}