Xinging's picture
Upload trainer_state.json with huggingface_hub
0444f74 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 375,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008,
"grad_norm": 63.19746780395508,
"learning_rate": 0.0,
"loss": 1.9169,
"step": 1
},
{
"epoch": 0.016,
"grad_norm": 54.364017486572266,
"learning_rate": 8.333333333333333e-07,
"loss": 1.605,
"step": 2
},
{
"epoch": 0.024,
"grad_norm": 64.55960083007812,
"learning_rate": 1.6666666666666667e-06,
"loss": 2.0971,
"step": 3
},
{
"epoch": 0.032,
"grad_norm": 60.74430465698242,
"learning_rate": 2.5e-06,
"loss": 1.7494,
"step": 4
},
{
"epoch": 0.04,
"grad_norm": 19.428855895996094,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.2111,
"step": 5
},
{
"epoch": 0.048,
"grad_norm": 12.349410057067871,
"learning_rate": 4.166666666666667e-06,
"loss": 1.4206,
"step": 6
},
{
"epoch": 0.056,
"grad_norm": 10.965296745300293,
"learning_rate": 5e-06,
"loss": 1.21,
"step": 7
},
{
"epoch": 0.064,
"grad_norm": 7.55981969833374,
"learning_rate": 5.833333333333334e-06,
"loss": 1.2954,
"step": 8
},
{
"epoch": 0.072,
"grad_norm": 7.904179096221924,
"learning_rate": 6.666666666666667e-06,
"loss": 1.2917,
"step": 9
},
{
"epoch": 0.08,
"grad_norm": 6.269378662109375,
"learning_rate": 7.500000000000001e-06,
"loss": 1.1015,
"step": 10
},
{
"epoch": 0.088,
"grad_norm": 4.897161960601807,
"learning_rate": 8.333333333333334e-06,
"loss": 0.989,
"step": 11
},
{
"epoch": 0.096,
"grad_norm": 6.417965412139893,
"learning_rate": 9.166666666666666e-06,
"loss": 1.0415,
"step": 12
},
{
"epoch": 0.104,
"grad_norm": 5.108837604522705,
"learning_rate": 1e-05,
"loss": 1.0252,
"step": 13
},
{
"epoch": 0.112,
"grad_norm": 4.97706413269043,
"learning_rate": 9.999812749151968e-06,
"loss": 1.005,
"step": 14
},
{
"epoch": 0.12,
"grad_norm": 5.717547416687012,
"learning_rate": 9.99925101063302e-06,
"loss": 1.0801,
"step": 15
},
{
"epoch": 0.128,
"grad_norm": 5.650694847106934,
"learning_rate": 9.998314826517564e-06,
"loss": 1.1121,
"step": 16
},
{
"epoch": 0.136,
"grad_norm": 5.324012279510498,
"learning_rate": 9.997004266926105e-06,
"loss": 0.9955,
"step": 17
},
{
"epoch": 0.144,
"grad_norm": 4.853263854980469,
"learning_rate": 9.995319430020004e-06,
"loss": 1.0795,
"step": 18
},
{
"epoch": 0.152,
"grad_norm": 5.379968643188477,
"learning_rate": 9.993260441994116e-06,
"loss": 1.006,
"step": 19
},
{
"epoch": 0.16,
"grad_norm": 6.044646739959717,
"learning_rate": 9.990827457067342e-06,
"loss": 1.1865,
"step": 20
},
{
"epoch": 0.168,
"grad_norm": 4.568022727966309,
"learning_rate": 9.988020657471078e-06,
"loss": 0.9736,
"step": 21
},
{
"epoch": 0.176,
"grad_norm": 4.817960739135742,
"learning_rate": 9.984840253435569e-06,
"loss": 0.9791,
"step": 22
},
{
"epoch": 0.184,
"grad_norm": 4.852054119110107,
"learning_rate": 9.98128648317415e-06,
"loss": 0.9301,
"step": 23
},
{
"epoch": 0.192,
"grad_norm": 4.597924709320068,
"learning_rate": 9.977359612865424e-06,
"loss": 0.9372,
"step": 24
},
{
"epoch": 0.2,
"grad_norm": 4.0566086769104,
"learning_rate": 9.973059936633308e-06,
"loss": 0.9346,
"step": 25
},
{
"epoch": 0.208,
"grad_norm": 4.211592197418213,
"learning_rate": 9.968387776525009e-06,
"loss": 0.9242,
"step": 26
},
{
"epoch": 0.216,
"grad_norm": 5.917508602142334,
"learning_rate": 9.963343482486907e-06,
"loss": 1.0374,
"step": 27
},
{
"epoch": 0.224,
"grad_norm": 4.746499061584473,
"learning_rate": 9.957927432338332e-06,
"loss": 0.9748,
"step": 28
},
{
"epoch": 0.232,
"grad_norm": 4.545176029205322,
"learning_rate": 9.952140031743282e-06,
"loss": 0.9685,
"step": 29
},
{
"epoch": 0.24,
"grad_norm": 4.560300350189209,
"learning_rate": 9.945981714180021e-06,
"loss": 1.0719,
"step": 30
},
{
"epoch": 0.248,
"grad_norm": 2.5730977058410645,
"learning_rate": 9.939452940908627e-06,
"loss": 0.4685,
"step": 31
},
{
"epoch": 0.256,
"grad_norm": 4.694375991821289,
"learning_rate": 9.932554200936428e-06,
"loss": 1.089,
"step": 32
},
{
"epoch": 0.264,
"grad_norm": 5.270295143127441,
"learning_rate": 9.925286010981394e-06,
"loss": 0.9073,
"step": 33
},
{
"epoch": 0.272,
"grad_norm": 4.562716960906982,
"learning_rate": 9.917648915433413e-06,
"loss": 0.9438,
"step": 34
},
{
"epoch": 0.28,
"grad_norm": 4.3254804611206055,
"learning_rate": 9.909643486313533e-06,
"loss": 0.9134,
"step": 35
},
{
"epoch": 0.288,
"grad_norm": 3.8351776599884033,
"learning_rate": 9.901270323231114e-06,
"loss": 0.8006,
"step": 36
},
{
"epoch": 0.296,
"grad_norm": 4.756275177001953,
"learning_rate": 9.892530053338909e-06,
"loss": 1.088,
"step": 37
},
{
"epoch": 0.304,
"grad_norm": 4.424313068389893,
"learning_rate": 9.883423331286096e-06,
"loss": 1.014,
"step": 38
},
{
"epoch": 0.312,
"grad_norm": 4.789685249328613,
"learning_rate": 9.873950839169248e-06,
"loss": 0.9863,
"step": 39
},
{
"epoch": 0.32,
"grad_norm": 4.4344635009765625,
"learning_rate": 9.864113286481237e-06,
"loss": 1.028,
"step": 40
},
{
"epoch": 0.328,
"grad_norm": 4.32478141784668,
"learning_rate": 9.853911410058097e-06,
"loss": 0.9658,
"step": 41
},
{
"epoch": 0.336,
"grad_norm": 4.358556270599365,
"learning_rate": 9.843345974023833e-06,
"loss": 0.938,
"step": 42
},
{
"epoch": 0.344,
"grad_norm": 3.7750961780548096,
"learning_rate": 9.832417769733185e-06,
"loss": 0.8794,
"step": 43
},
{
"epoch": 0.352,
"grad_norm": 4.18930721282959,
"learning_rate": 9.821127615712365e-06,
"loss": 0.939,
"step": 44
},
{
"epoch": 0.36,
"grad_norm": 5.1670241355896,
"learning_rate": 9.809476357597738e-06,
"loss": 1.0395,
"step": 45
},
{
"epoch": 0.368,
"grad_norm": 3.9613327980041504,
"learning_rate": 9.797464868072489e-06,
"loss": 0.9811,
"step": 46
},
{
"epoch": 0.376,
"grad_norm": 4.249776840209961,
"learning_rate": 9.785094046801256e-06,
"loss": 0.9531,
"step": 47
},
{
"epoch": 0.384,
"grad_norm": 5.6319580078125,
"learning_rate": 9.77236482036275e-06,
"loss": 0.9624,
"step": 48
},
{
"epoch": 0.392,
"grad_norm": 4.717684268951416,
"learning_rate": 9.759278142180348e-06,
"loss": 0.9947,
"step": 49
},
{
"epoch": 0.4,
"grad_norm": 4.638187885284424,
"learning_rate": 9.745834992450688e-06,
"loss": 1.0151,
"step": 50
},
{
"epoch": 0.408,
"grad_norm": 4.343395233154297,
"learning_rate": 9.732036378070243e-06,
"loss": 0.8039,
"step": 51
},
{
"epoch": 0.416,
"grad_norm": 3.4665777683258057,
"learning_rate": 9.717883332559911e-06,
"loss": 0.6634,
"step": 52
},
{
"epoch": 0.424,
"grad_norm": 3.683824062347412,
"learning_rate": 9.703376915987601e-06,
"loss": 0.8551,
"step": 53
},
{
"epoch": 0.432,
"grad_norm": 4.645051956176758,
"learning_rate": 9.688518214888836e-06,
"loss": 1.0924,
"step": 54
},
{
"epoch": 0.44,
"grad_norm": 4.223948001861572,
"learning_rate": 9.673308342185366e-06,
"loss": 1.0766,
"step": 55
},
{
"epoch": 0.448,
"grad_norm": 4.355952262878418,
"learning_rate": 9.657748437101819e-06,
"loss": 0.9852,
"step": 56
},
{
"epoch": 0.456,
"grad_norm": 3.6771440505981445,
"learning_rate": 9.641839665080363e-06,
"loss": 0.8836,
"step": 57
},
{
"epoch": 0.464,
"grad_norm": 4.1932830810546875,
"learning_rate": 9.625583217693419e-06,
"loss": 0.9244,
"step": 58
},
{
"epoch": 0.472,
"grad_norm": 3.90061616897583,
"learning_rate": 9.60898031255441e-06,
"loss": 0.8808,
"step": 59
},
{
"epoch": 0.48,
"grad_norm": 4.961562633514404,
"learning_rate": 9.592032193226564e-06,
"loss": 0.9509,
"step": 60
},
{
"epoch": 0.488,
"grad_norm": 4.50408411026001,
"learning_rate": 9.574740129129767e-06,
"loss": 0.9867,
"step": 61
},
{
"epoch": 0.496,
"grad_norm": 4.07206916809082,
"learning_rate": 9.557105415445485e-06,
"loss": 0.8523,
"step": 62
},
{
"epoch": 0.504,
"grad_norm": 4.185204982757568,
"learning_rate": 9.539129373019755e-06,
"loss": 0.9761,
"step": 63
},
{
"epoch": 0.512,
"grad_norm": 4.2349772453308105,
"learning_rate": 9.520813348264252e-06,
"loss": 0.939,
"step": 64
},
{
"epoch": 0.52,
"grad_norm": 4.373566150665283,
"learning_rate": 9.502158713055444e-06,
"loss": 0.9199,
"step": 65
},
{
"epoch": 0.528,
"grad_norm": 4.522472858428955,
"learning_rate": 9.483166864631837e-06,
"loss": 0.9225,
"step": 66
},
{
"epoch": 0.536,
"grad_norm": 4.246692657470703,
"learning_rate": 9.46383922548932e-06,
"loss": 1.0082,
"step": 67
},
{
"epoch": 0.544,
"grad_norm": 4.236417770385742,
"learning_rate": 9.444177243274619e-06,
"loss": 0.9418,
"step": 68
},
{
"epoch": 0.552,
"grad_norm": 4.69387674331665,
"learning_rate": 9.424182390676872e-06,
"loss": 0.9943,
"step": 69
},
{
"epoch": 0.56,
"grad_norm": 4.219841957092285,
"learning_rate": 9.403856165317322e-06,
"loss": 0.8946,
"step": 70
},
{
"epoch": 0.568,
"grad_norm": 3.8949694633483887,
"learning_rate": 9.383200089637143e-06,
"loss": 0.9754,
"step": 71
},
{
"epoch": 0.576,
"grad_norm": 4.357705116271973,
"learning_rate": 9.362215710783411e-06,
"loss": 0.9958,
"step": 72
},
{
"epoch": 0.584,
"grad_norm": 3.827737808227539,
"learning_rate": 9.34090460049322e-06,
"loss": 0.9192,
"step": 73
},
{
"epoch": 0.592,
"grad_norm": 4.311795711517334,
"learning_rate": 9.319268354975958e-06,
"loss": 0.9435,
"step": 74
},
{
"epoch": 0.6,
"grad_norm": 5.0150837898254395,
"learning_rate": 9.297308594793757e-06,
"loss": 0.9718,
"step": 75
},
{
"epoch": 0.608,
"grad_norm": 4.2315826416015625,
"learning_rate": 9.275026964740101e-06,
"loss": 0.9144,
"step": 76
},
{
"epoch": 0.616,
"grad_norm": 4.205296516418457,
"learning_rate": 9.252425133716639e-06,
"loss": 0.776,
"step": 77
},
{
"epoch": 0.624,
"grad_norm": 4.385368347167969,
"learning_rate": 9.229504794608182e-06,
"loss": 0.8877,
"step": 78
},
{
"epoch": 0.632,
"grad_norm": 4.200435638427734,
"learning_rate": 9.206267664155906e-06,
"loss": 0.9757,
"step": 79
},
{
"epoch": 0.64,
"grad_norm": 4.507739543914795,
"learning_rate": 9.182715482828764e-06,
"loss": 0.924,
"step": 80
},
{
"epoch": 0.648,
"grad_norm": 4.2180609703063965,
"learning_rate": 9.158850014693123e-06,
"loss": 0.9122,
"step": 81
},
{
"epoch": 0.656,
"grad_norm": 4.155662536621094,
"learning_rate": 9.134673047280644e-06,
"loss": 0.8199,
"step": 82
},
{
"epoch": 0.664,
"grad_norm": 4.729318141937256,
"learning_rate": 9.110186391454389e-06,
"loss": 0.965,
"step": 83
},
{
"epoch": 0.672,
"grad_norm": 4.260819911956787,
"learning_rate": 9.085391881273182e-06,
"loss": 0.9748,
"step": 84
},
{
"epoch": 0.68,
"grad_norm": 6.071133613586426,
"learning_rate": 9.060291373854252e-06,
"loss": 0.9425,
"step": 85
},
{
"epoch": 0.688,
"grad_norm": 5.414233684539795,
"learning_rate": 9.034886749234112e-06,
"loss": 0.9162,
"step": 86
},
{
"epoch": 0.696,
"grad_norm": 4.333293914794922,
"learning_rate": 9.009179910227767e-06,
"loss": 1.111,
"step": 87
},
{
"epoch": 0.704,
"grad_norm": 4.5121684074401855,
"learning_rate": 8.98317278228618e-06,
"loss": 1.0085,
"step": 88
},
{
"epoch": 0.712,
"grad_norm": 4.061527252197266,
"learning_rate": 8.956867313352055e-06,
"loss": 0.9694,
"step": 89
},
{
"epoch": 0.72,
"grad_norm": 4.977546215057373,
"learning_rate": 8.930265473713939e-06,
"loss": 0.9319,
"step": 90
},
{
"epoch": 0.728,
"grad_norm": 4.120169639587402,
"learning_rate": 8.90336925585864e-06,
"loss": 0.9306,
"step": 91
},
{
"epoch": 0.736,
"grad_norm": 4.532111167907715,
"learning_rate": 8.876180674322006e-06,
"loss": 1.0475,
"step": 92
},
{
"epoch": 0.744,
"grad_norm": 4.304579257965088,
"learning_rate": 8.84870176553801e-06,
"loss": 0.8554,
"step": 93
},
{
"epoch": 0.752,
"grad_norm": 4.010624885559082,
"learning_rate": 8.820934587686247e-06,
"loss": 0.9153,
"step": 94
},
{
"epoch": 0.76,
"grad_norm": 4.0051703453063965,
"learning_rate": 8.792881220537752e-06,
"loss": 0.9669,
"step": 95
},
{
"epoch": 0.768,
"grad_norm": 4.608728885650635,
"learning_rate": 8.764543765299245e-06,
"loss": 0.9256,
"step": 96
},
{
"epoch": 0.776,
"grad_norm": 5.0266194343566895,
"learning_rate": 8.735924344455732e-06,
"loss": 1.0329,
"step": 97
},
{
"epoch": 0.784,
"grad_norm": 3.2545340061187744,
"learning_rate": 8.707025101611546e-06,
"loss": 0.6418,
"step": 98
},
{
"epoch": 0.792,
"grad_norm": 3.816377878189087,
"learning_rate": 8.677848201329775e-06,
"loss": 0.8577,
"step": 99
},
{
"epoch": 0.8,
"grad_norm": 4.356125831604004,
"learning_rate": 8.64839582897015e-06,
"loss": 0.954,
"step": 100
},
{
"epoch": 0.808,
"grad_norm": 4.043392181396484,
"learning_rate": 8.61867019052535e-06,
"loss": 0.9001,
"step": 101
},
{
"epoch": 0.816,
"grad_norm": 4.296792030334473,
"learning_rate": 8.588673512455781e-06,
"loss": 0.9039,
"step": 102
},
{
"epoch": 0.824,
"grad_norm": 3.7618377208709717,
"learning_rate": 8.558408041522801e-06,
"loss": 0.8102,
"step": 103
},
{
"epoch": 0.832,
"grad_norm": 3.152364492416382,
"learning_rate": 8.527876044620453e-06,
"loss": 0.7018,
"step": 104
},
{
"epoch": 0.84,
"grad_norm": 3.8274290561676025,
"learning_rate": 8.497079808605659e-06,
"loss": 0.882,
"step": 105
},
{
"epoch": 0.848,
"grad_norm": 4.074039936065674,
"learning_rate": 8.466021640126946e-06,
"loss": 0.9745,
"step": 106
},
{
"epoch": 0.856,
"grad_norm": 3.847959518432617,
"learning_rate": 8.434703865451666e-06,
"loss": 0.8646,
"step": 107
},
{
"epoch": 0.864,
"grad_norm": 3.3844175338745117,
"learning_rate": 8.403128830291767e-06,
"loss": 0.718,
"step": 108
},
{
"epoch": 0.872,
"grad_norm": 4.180184841156006,
"learning_rate": 8.371298899628091e-06,
"loss": 0.9106,
"step": 109
},
{
"epoch": 0.88,
"grad_norm": 4.034883975982666,
"learning_rate": 8.339216457533244e-06,
"loss": 0.9639,
"step": 110
},
{
"epoch": 0.888,
"grad_norm": 2.6540067195892334,
"learning_rate": 8.306883906993022e-06,
"loss": 0.6191,
"step": 111
},
{
"epoch": 0.896,
"grad_norm": 3.755506992340088,
"learning_rate": 8.274303669726427e-06,
"loss": 0.9765,
"step": 112
},
{
"epoch": 0.904,
"grad_norm": 3.90189266204834,
"learning_rate": 8.24147818600428e-06,
"loss": 0.9972,
"step": 113
},
{
"epoch": 0.912,
"grad_norm": 4.449428558349609,
"learning_rate": 8.20840991446645e-06,
"loss": 0.8947,
"step": 114
},
{
"epoch": 0.92,
"grad_norm": 3.762423515319824,
"learning_rate": 8.175101331937692e-06,
"loss": 0.8339,
"step": 115
},
{
"epoch": 0.928,
"grad_norm": 4.51316499710083,
"learning_rate": 8.141554933242135e-06,
"loss": 1.0236,
"step": 116
},
{
"epoch": 0.936,
"grad_norm": 4.525044918060303,
"learning_rate": 8.10777323101642e-06,
"loss": 1.0495,
"step": 117
},
{
"epoch": 0.944,
"grad_norm": 3.1055142879486084,
"learning_rate": 8.073758755521506e-06,
"loss": 0.7492,
"step": 118
},
{
"epoch": 0.952,
"grad_norm": 3.1218674182891846,
"learning_rate": 8.03951405445314e-06,
"loss": 0.6767,
"step": 119
},
{
"epoch": 0.96,
"grad_norm": 3.8074257373809814,
"learning_rate": 8.005041692751055e-06,
"loss": 0.8988,
"step": 120
},
{
"epoch": 0.968,
"grad_norm": 4.078440189361572,
"learning_rate": 7.970344252406832e-06,
"loss": 0.9037,
"step": 121
},
{
"epoch": 0.976,
"grad_norm": 3.414644241333008,
"learning_rate": 7.935424332270523e-06,
"loss": 0.8125,
"step": 122
},
{
"epoch": 0.984,
"grad_norm": 3.545790195465088,
"learning_rate": 7.900284547855992e-06,
"loss": 0.7914,
"step": 123
},
{
"epoch": 0.992,
"grad_norm": 3.7655632495880127,
"learning_rate": 7.864927531145012e-06,
"loss": 0.9433,
"step": 124
},
{
"epoch": 1.0,
"grad_norm": 3.767230272293091,
"learning_rate": 7.829355930390126e-06,
"loss": 0.8899,
"step": 125
},
{
"epoch": 1.008,
"grad_norm": 3.349618434906006,
"learning_rate": 7.7935724099163e-06,
"loss": 0.5341,
"step": 126
},
{
"epoch": 1.016,
"grad_norm": 3.1290650367736816,
"learning_rate": 7.757579649921354e-06,
"loss": 0.4674,
"step": 127
},
{
"epoch": 1.024,
"grad_norm": 3.2256603240966797,
"learning_rate": 7.721380346275221e-06,
"loss": 0.4881,
"step": 128
},
{
"epoch": 1.032,
"grad_norm": 3.359593629837036,
"learning_rate": 7.684977210318024e-06,
"loss": 0.4716,
"step": 129
},
{
"epoch": 1.04,
"grad_norm": 3.384568929672241,
"learning_rate": 7.648372968656995e-06,
"loss": 0.4201,
"step": 130
},
{
"epoch": 1.048,
"grad_norm": 3.3462984561920166,
"learning_rate": 7.611570362962247e-06,
"loss": 0.4432,
"step": 131
},
{
"epoch": 1.056,
"grad_norm": 3.522052049636841,
"learning_rate": 7.574572149761437e-06,
"loss": 0.3776,
"step": 132
},
{
"epoch": 1.064,
"grad_norm": 3.079897403717041,
"learning_rate": 7.5373811002332785e-06,
"loss": 0.3753,
"step": 133
},
{
"epoch": 1.072,
"grad_norm": 3.354200601577759,
"learning_rate": 7.500000000000001e-06,
"loss": 0.3753,
"step": 134
},
{
"epoch": 1.08,
"grad_norm": 4.20035982131958,
"learning_rate": 7.462431648918689e-06,
"loss": 0.4666,
"step": 135
},
{
"epoch": 1.088,
"grad_norm": 3.8432064056396484,
"learning_rate": 7.424678860871584e-06,
"loss": 0.4115,
"step": 136
},
{
"epoch": 1.096,
"grad_norm": 3.5697290897369385,
"learning_rate": 7.3867444635553165e-06,
"loss": 0.4684,
"step": 137
},
{
"epoch": 1.104,
"grad_norm": 3.6465344429016113,
"learning_rate": 7.3486312982691134e-06,
"loss": 0.3734,
"step": 138
},
{
"epoch": 1.112,
"grad_norm": 4.413166046142578,
"learning_rate": 7.310342219701981e-06,
"loss": 0.4894,
"step": 139
},
{
"epoch": 1.12,
"grad_norm": 3.4555704593658447,
"learning_rate": 7.271880095718895e-06,
"loss": 0.4029,
"step": 140
},
{
"epoch": 1.1280000000000001,
"grad_norm": 4.0380778312683105,
"learning_rate": 7.233247807145989e-06,
"loss": 0.4223,
"step": 141
},
{
"epoch": 1.1360000000000001,
"grad_norm": 3.5603420734405518,
"learning_rate": 7.19444824755478e-06,
"loss": 0.4051,
"step": 142
},
{
"epoch": 1.144,
"grad_norm": 4.031632900238037,
"learning_rate": 7.155484323045442e-06,
"loss": 0.3679,
"step": 143
},
{
"epoch": 1.152,
"grad_norm": 3.6648731231689453,
"learning_rate": 7.11635895202914e-06,
"loss": 0.4015,
"step": 144
},
{
"epoch": 1.16,
"grad_norm": 1.7989451885223389,
"learning_rate": 7.0770750650094335e-06,
"loss": 0.2012,
"step": 145
},
{
"epoch": 1.168,
"grad_norm": 3.7954277992248535,
"learning_rate": 7.037635604362786e-06,
"loss": 0.4156,
"step": 146
},
{
"epoch": 1.176,
"grad_norm": 3.169431209564209,
"learning_rate": 6.9980435241181785e-06,
"loss": 0.3139,
"step": 147
},
{
"epoch": 1.184,
"grad_norm": 2.9651694297790527,
"learning_rate": 6.958301789735853e-06,
"loss": 0.3734,
"step": 148
},
{
"epoch": 1.192,
"grad_norm": 3.9157493114471436,
"learning_rate": 6.918413377885193e-06,
"loss": 0.4212,
"step": 149
},
{
"epoch": 1.2,
"grad_norm": 3.39764666557312,
"learning_rate": 6.878381276221777e-06,
"loss": 0.4176,
"step": 150
},
{
"epoch": 1.208,
"grad_norm": 3.67905855178833,
"learning_rate": 6.838208483163601e-06,
"loss": 0.3854,
"step": 151
},
{
"epoch": 1.216,
"grad_norm": 3.049020767211914,
"learning_rate": 6.797898007666493e-06,
"loss": 0.3203,
"step": 152
},
{
"epoch": 1.224,
"grad_norm": 3.756164073944092,
"learning_rate": 6.757452868998737e-06,
"loss": 0.4249,
"step": 153
},
{
"epoch": 1.232,
"grad_norm": 3.4420013427734375,
"learning_rate": 6.716876096514944e-06,
"loss": 0.3951,
"step": 154
},
{
"epoch": 1.24,
"grad_norm": 3.7635958194732666,
"learning_rate": 6.676170729429132e-06,
"loss": 0.4374,
"step": 155
},
{
"epoch": 1.248,
"grad_norm": 4.019578456878662,
"learning_rate": 6.635339816587109e-06,
"loss": 0.4227,
"step": 156
},
{
"epoch": 1.256,
"grad_norm": 3.9594626426696777,
"learning_rate": 6.594386416238095e-06,
"loss": 0.4282,
"step": 157
},
{
"epoch": 1.264,
"grad_norm": 3.038630723953247,
"learning_rate": 6.553313595805666e-06,
"loss": 0.2928,
"step": 158
},
{
"epoch": 1.272,
"grad_norm": 3.7673959732055664,
"learning_rate": 6.512124431658006e-06,
"loss": 0.3923,
"step": 159
},
{
"epoch": 1.28,
"grad_norm": 3.17903470993042,
"learning_rate": 6.470822008877482e-06,
"loss": 0.3951,
"step": 160
},
{
"epoch": 1.288,
"grad_norm": 2.7710318565368652,
"learning_rate": 6.4294094210295725e-06,
"loss": 0.3492,
"step": 161
},
{
"epoch": 1.296,
"grad_norm": 3.473247766494751,
"learning_rate": 6.3878897699311525e-06,
"loss": 0.3576,
"step": 162
},
{
"epoch": 1.304,
"grad_norm": 3.619020938873291,
"learning_rate": 6.346266165418173e-06,
"loss": 0.3943,
"step": 163
},
{
"epoch": 1.312,
"grad_norm": 3.4859511852264404,
"learning_rate": 6.304541725112734e-06,
"loss": 0.4049,
"step": 164
},
{
"epoch": 1.32,
"grad_norm": 2.91342830657959,
"learning_rate": 6.262719574189564e-06,
"loss": 0.3378,
"step": 165
},
{
"epoch": 1.328,
"grad_norm": 3.9546265602111816,
"learning_rate": 6.2208028451419575e-06,
"loss": 0.4169,
"step": 166
},
{
"epoch": 1.336,
"grad_norm": 3.054875373840332,
"learning_rate": 6.178794677547138e-06,
"loss": 0.3729,
"step": 167
},
{
"epoch": 1.3439999999999999,
"grad_norm": 3.465587615966797,
"learning_rate": 6.136698217831106e-06,
"loss": 0.3959,
"step": 168
},
{
"epoch": 1.3519999999999999,
"grad_norm": 3.6577236652374268,
"learning_rate": 6.094516619032975e-06,
"loss": 0.411,
"step": 169
},
{
"epoch": 1.3599999999999999,
"grad_norm": 3.6556050777435303,
"learning_rate": 6.052253040568804e-06,
"loss": 0.3896,
"step": 170
},
{
"epoch": 1.3679999999999999,
"grad_norm": 3.785115957260132,
"learning_rate": 6.009910647994956e-06,
"loss": 0.4316,
"step": 171
},
{
"epoch": 1.376,
"grad_norm": 3.3568155765533447,
"learning_rate": 5.967492612770999e-06,
"loss": 0.3907,
"step": 172
},
{
"epoch": 1.384,
"grad_norm": 3.668944835662842,
"learning_rate": 5.925002112022158e-06,
"loss": 0.4016,
"step": 173
},
{
"epoch": 1.392,
"grad_norm": 3.598724603652954,
"learning_rate": 5.882442328301356e-06,
"loss": 0.3852,
"step": 174
},
{
"epoch": 1.4,
"grad_norm": 3.049689531326294,
"learning_rate": 5.839816449350824e-06,
"loss": 0.3445,
"step": 175
},
{
"epoch": 1.408,
"grad_norm": 3.61523699760437,
"learning_rate": 5.7971276678633625e-06,
"loss": 0.4098,
"step": 176
},
{
"epoch": 1.416,
"grad_norm": 2.924088478088379,
"learning_rate": 5.754379181243179e-06,
"loss": 0.3861,
"step": 177
},
{
"epoch": 1.424,
"grad_norm": 2.239245653152466,
"learning_rate": 5.711574191366427e-06,
"loss": 0.2368,
"step": 178
},
{
"epoch": 1.432,
"grad_norm": 3.4147002696990967,
"learning_rate": 5.668715904341365e-06,
"loss": 0.3753,
"step": 179
},
{
"epoch": 1.44,
"grad_norm": 3.592535972595215,
"learning_rate": 5.62580753026823e-06,
"loss": 0.4537,
"step": 180
},
{
"epoch": 1.448,
"grad_norm": 3.212759256362915,
"learning_rate": 5.5828522829987965e-06,
"loss": 0.3578,
"step": 181
},
{
"epoch": 1.456,
"grad_norm": 3.454314947128296,
"learning_rate": 5.539853379895656e-06,
"loss": 0.4014,
"step": 182
},
{
"epoch": 1.464,
"grad_norm": 3.1235597133636475,
"learning_rate": 5.496814041591234e-06,
"loss": 0.3665,
"step": 183
},
{
"epoch": 1.472,
"grad_norm": 3.6323885917663574,
"learning_rate": 5.453737491746572e-06,
"loss": 0.4384,
"step": 184
},
{
"epoch": 1.48,
"grad_norm": 3.9696545600891113,
"learning_rate": 5.410626956809864e-06,
"loss": 0.3937,
"step": 185
},
{
"epoch": 1.488,
"grad_norm": 3.2491090297698975,
"learning_rate": 5.367485665774802e-06,
"loss": 0.3604,
"step": 186
},
{
"epoch": 1.496,
"grad_norm": 3.255113363265991,
"learning_rate": 5.324316849938715e-06,
"loss": 0.3593,
"step": 187
},
{
"epoch": 1.504,
"grad_norm": 3.255185127258301,
"learning_rate": 5.281123742660558e-06,
"loss": 0.3686,
"step": 188
},
{
"epoch": 1.512,
"grad_norm": 3.1836984157562256,
"learning_rate": 5.237909579118713e-06,
"loss": 0.3952,
"step": 189
},
{
"epoch": 1.52,
"grad_norm": 2.8069233894348145,
"learning_rate": 5.194677596068689e-06,
"loss": 0.3691,
"step": 190
},
{
"epoch": 1.528,
"grad_norm": 3.9060983657836914,
"learning_rate": 5.1514310316006835e-06,
"loss": 0.4391,
"step": 191
},
{
"epoch": 1.536,
"grad_norm": 3.008826971054077,
"learning_rate": 5.1081731248970435e-06,
"loss": 0.3786,
"step": 192
},
{
"epoch": 1.544,
"grad_norm": 3.1414108276367188,
"learning_rate": 5.064907115989655e-06,
"loss": 0.383,
"step": 193
},
{
"epoch": 1.552,
"grad_norm": 2.948110580444336,
"learning_rate": 5.021636245517261e-06,
"loss": 0.3169,
"step": 194
},
{
"epoch": 1.56,
"grad_norm": 2.9156086444854736,
"learning_rate": 4.978363754482741e-06,
"loss": 0.3347,
"step": 195
},
{
"epoch": 1.568,
"grad_norm": 3.908154249191284,
"learning_rate": 4.935092884010347e-06,
"loss": 0.3732,
"step": 196
},
{
"epoch": 1.576,
"grad_norm": 3.8506288528442383,
"learning_rate": 4.891826875102958e-06,
"loss": 0.3979,
"step": 197
},
{
"epoch": 1.584,
"grad_norm": 3.697066068649292,
"learning_rate": 4.848568968399317e-06,
"loss": 0.4163,
"step": 198
},
{
"epoch": 1.592,
"grad_norm": 3.3716206550598145,
"learning_rate": 4.805322403931312e-06,
"loss": 0.3483,
"step": 199
},
{
"epoch": 1.6,
"grad_norm": 3.373406171798706,
"learning_rate": 4.762090420881289e-06,
"loss": 0.3273,
"step": 200
},
{
"epoch": 1.608,
"grad_norm": 2.6628761291503906,
"learning_rate": 4.718876257339444e-06,
"loss": 0.2997,
"step": 201
},
{
"epoch": 1.616,
"grad_norm": 3.2516515254974365,
"learning_rate": 4.6756831500612846e-06,
"loss": 0.3358,
"step": 202
},
{
"epoch": 1.624,
"grad_norm": 3.141937732696533,
"learning_rate": 4.632514334225201e-06,
"loss": 0.3863,
"step": 203
},
{
"epoch": 1.6320000000000001,
"grad_norm": 3.4185385704040527,
"learning_rate": 4.589373043190137e-06,
"loss": 0.3659,
"step": 204
},
{
"epoch": 1.6400000000000001,
"grad_norm": 3.6510894298553467,
"learning_rate": 4.546262508253429e-06,
"loss": 0.4285,
"step": 205
},
{
"epoch": 1.6480000000000001,
"grad_norm": 3.842224597930908,
"learning_rate": 4.503185958408767e-06,
"loss": 0.4413,
"step": 206
},
{
"epoch": 1.6560000000000001,
"grad_norm": 3.594395160675049,
"learning_rate": 4.460146620104347e-06,
"loss": 0.3886,
"step": 207
},
{
"epoch": 1.6640000000000001,
"grad_norm": 3.3284106254577637,
"learning_rate": 4.417147717001205e-06,
"loss": 0.3472,
"step": 208
},
{
"epoch": 1.6720000000000002,
"grad_norm": 2.773998737335205,
"learning_rate": 4.374192469731771e-06,
"loss": 0.3241,
"step": 209
},
{
"epoch": 1.6800000000000002,
"grad_norm": 3.6991684436798096,
"learning_rate": 4.331284095658637e-06,
"loss": 0.3699,
"step": 210
},
{
"epoch": 1.688,
"grad_norm": 3.5816473960876465,
"learning_rate": 4.2884258086335755e-06,
"loss": 0.3456,
"step": 211
},
{
"epoch": 1.696,
"grad_norm": 3.5288710594177246,
"learning_rate": 4.245620818756822e-06,
"loss": 0.3499,
"step": 212
},
{
"epoch": 1.704,
"grad_norm": 3.8552918434143066,
"learning_rate": 4.202872332136639e-06,
"loss": 0.3945,
"step": 213
},
{
"epoch": 1.712,
"grad_norm": 3.64853835105896,
"learning_rate": 4.160183550649176e-06,
"loss": 0.3783,
"step": 214
},
{
"epoch": 1.72,
"grad_norm": 3.5275673866271973,
"learning_rate": 4.117557671698648e-06,
"loss": 0.3804,
"step": 215
},
{
"epoch": 1.728,
"grad_norm": 3.250561237335205,
"learning_rate": 4.074997887977843e-06,
"loss": 0.3737,
"step": 216
},
{
"epoch": 1.736,
"grad_norm": 3.6299099922180176,
"learning_rate": 4.032507387229002e-06,
"loss": 0.4378,
"step": 217
},
{
"epoch": 1.744,
"grad_norm": 2.887838125228882,
"learning_rate": 3.9900893520050446e-06,
"loss": 0.3499,
"step": 218
},
{
"epoch": 1.752,
"grad_norm": 3.857203483581543,
"learning_rate": 3.9477469594311975e-06,
"loss": 0.4015,
"step": 219
},
{
"epoch": 1.76,
"grad_norm": 3.387779712677002,
"learning_rate": 3.905483380967027e-06,
"loss": 0.3364,
"step": 220
},
{
"epoch": 1.768,
"grad_norm": 2.478400707244873,
"learning_rate": 3.863301782168896e-06,
"loss": 0.2731,
"step": 221
},
{
"epoch": 1.776,
"grad_norm": 3.101461172103882,
"learning_rate": 3.821205322452863e-06,
"loss": 0.348,
"step": 222
},
{
"epoch": 1.784,
"grad_norm": 3.1707546710968018,
"learning_rate": 3.779197154858044e-06,
"loss": 0.3515,
"step": 223
},
{
"epoch": 1.792,
"grad_norm": 3.789344072341919,
"learning_rate": 3.7372804258104367e-06,
"loss": 0.4217,
"step": 224
},
{
"epoch": 1.8,
"grad_norm": 3.623223066329956,
"learning_rate": 3.695458274887268e-06,
"loss": 0.346,
"step": 225
},
{
"epoch": 1.808,
"grad_norm": 3.591975688934326,
"learning_rate": 3.6537338345818273e-06,
"loss": 0.3849,
"step": 226
},
{
"epoch": 1.8159999999999998,
"grad_norm": 3.0750720500946045,
"learning_rate": 3.6121102300688504e-06,
"loss": 0.3282,
"step": 227
},
{
"epoch": 1.8239999999999998,
"grad_norm": 3.2269608974456787,
"learning_rate": 3.5705905789704296e-06,
"loss": 0.3346,
"step": 228
},
{
"epoch": 1.8319999999999999,
"grad_norm": 2.9104883670806885,
"learning_rate": 3.529177991122519e-06,
"loss": 0.2984,
"step": 229
},
{
"epoch": 1.8399999999999999,
"grad_norm": 3.574671745300293,
"learning_rate": 3.487875568341995e-06,
"loss": 0.4261,
"step": 230
},
{
"epoch": 1.8479999999999999,
"grad_norm": 3.389310836791992,
"learning_rate": 3.446686404194337e-06,
"loss": 0.3784,
"step": 231
},
{
"epoch": 1.8559999999999999,
"grad_norm": 3.1779441833496094,
"learning_rate": 3.4056135837619077e-06,
"loss": 0.386,
"step": 232
},
{
"epoch": 1.8639999999999999,
"grad_norm": 3.3408713340759277,
"learning_rate": 3.3646601834128924e-06,
"loss": 0.3833,
"step": 233
},
{
"epoch": 1.8719999999999999,
"grad_norm": 3.0924367904663086,
"learning_rate": 3.3238292705708675e-06,
"loss": 0.4116,
"step": 234
},
{
"epoch": 1.88,
"grad_norm": 3.2094311714172363,
"learning_rate": 3.2831239034850593e-06,
"loss": 0.3484,
"step": 235
},
{
"epoch": 1.888,
"grad_norm": 3.543527841567993,
"learning_rate": 3.2425471310012645e-06,
"loss": 0.3819,
"step": 236
},
{
"epoch": 1.896,
"grad_norm": 3.1445436477661133,
"learning_rate": 3.2021019923335093e-06,
"loss": 0.3715,
"step": 237
},
{
"epoch": 1.904,
"grad_norm": 3.702820301055908,
"learning_rate": 3.1617915168363994e-06,
"loss": 0.3562,
"step": 238
},
{
"epoch": 1.912,
"grad_norm": 3.1192967891693115,
"learning_rate": 3.121618723778225e-06,
"loss": 0.323,
"step": 239
},
{
"epoch": 1.92,
"grad_norm": 3.54319167137146,
"learning_rate": 3.081586622114809e-06,
"loss": 0.3706,
"step": 240
},
{
"epoch": 1.928,
"grad_norm": 3.581674337387085,
"learning_rate": 3.041698210264149e-06,
"loss": 0.3759,
"step": 241
},
{
"epoch": 1.936,
"grad_norm": 3.044254779815674,
"learning_rate": 3.001956475881822e-06,
"loss": 0.3056,
"step": 242
},
{
"epoch": 1.944,
"grad_norm": 2.597248077392578,
"learning_rate": 2.962364395637216e-06,
"loss": 0.302,
"step": 243
},
{
"epoch": 1.952,
"grad_norm": 3.0271735191345215,
"learning_rate": 2.9229249349905686e-06,
"loss": 0.3556,
"step": 244
},
{
"epoch": 1.96,
"grad_norm": 3.4029977321624756,
"learning_rate": 2.8836410479708625e-06,
"loss": 0.3427,
"step": 245
},
{
"epoch": 1.968,
"grad_norm": 2.715904951095581,
"learning_rate": 2.84451567695456e-06,
"loss": 0.3521,
"step": 246
},
{
"epoch": 1.976,
"grad_norm": 3.090629816055298,
"learning_rate": 2.805551752445222e-06,
"loss": 0.3613,
"step": 247
},
{
"epoch": 1.984,
"grad_norm": 4.626333236694336,
"learning_rate": 2.766752192854012e-06,
"loss": 0.3681,
"step": 248
},
{
"epoch": 1.992,
"grad_norm": 3.3586225509643555,
"learning_rate": 2.728119904281105e-06,
"loss": 0.3449,
"step": 249
},
{
"epoch": 2.0,
"grad_norm": 3.76904296875,
"learning_rate": 2.689657780298019e-06,
"loss": 0.442,
"step": 250
},
{
"epoch": 2.008,
"grad_norm": 2.4120800495147705,
"learning_rate": 2.651368701730889e-06,
"loss": 0.1552,
"step": 251
},
{
"epoch": 2.016,
"grad_norm": 2.1722428798675537,
"learning_rate": 2.6132555364446856e-06,
"loss": 0.1471,
"step": 252
},
{
"epoch": 2.024,
"grad_norm": 1.9220709800720215,
"learning_rate": 2.5753211391284172e-06,
"loss": 0.1288,
"step": 253
},
{
"epoch": 2.032,
"grad_norm": 1.6846203804016113,
"learning_rate": 2.537568351081311e-06,
"loss": 0.1133,
"step": 254
},
{
"epoch": 2.04,
"grad_norm": 2.1855199337005615,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.1387,
"step": 255
},
{
"epoch": 2.048,
"grad_norm": 1.647302508354187,
"learning_rate": 2.4626188997667224e-06,
"loss": 0.114,
"step": 256
},
{
"epoch": 2.056,
"grad_norm": 2.058126449584961,
"learning_rate": 2.425427850238565e-06,
"loss": 0.1312,
"step": 257
},
{
"epoch": 2.064,
"grad_norm": 1.7993108034133911,
"learning_rate": 2.388429637037753e-06,
"loss": 0.1141,
"step": 258
},
{
"epoch": 2.072,
"grad_norm": 1.7768778800964355,
"learning_rate": 2.3516270313430085e-06,
"loss": 0.0932,
"step": 259
},
{
"epoch": 2.08,
"grad_norm": 2.0583088397979736,
"learning_rate": 2.3150227896819782e-06,
"loss": 0.1132,
"step": 260
},
{
"epoch": 2.088,
"grad_norm": 1.990849256515503,
"learning_rate": 2.278619653724781e-06,
"loss": 0.1163,
"step": 261
},
{
"epoch": 2.096,
"grad_norm": 2.4085659980773926,
"learning_rate": 2.2424203500786473e-06,
"loss": 0.1231,
"step": 262
},
{
"epoch": 2.104,
"grad_norm": 2.169581174850464,
"learning_rate": 2.206427590083703e-06,
"loss": 0.0897,
"step": 263
},
{
"epoch": 2.112,
"grad_norm": 2.2898690700531006,
"learning_rate": 2.170644069609876e-06,
"loss": 0.1067,
"step": 264
},
{
"epoch": 2.12,
"grad_norm": 2.2840161323547363,
"learning_rate": 2.1350724688549906e-06,
"loss": 0.1318,
"step": 265
},
{
"epoch": 2.128,
"grad_norm": 1.61070716381073,
"learning_rate": 2.09971545214401e-06,
"loss": 0.072,
"step": 266
},
{
"epoch": 2.136,
"grad_norm": 2.397213935852051,
"learning_rate": 2.0645756677294788e-06,
"loss": 0.1338,
"step": 267
},
{
"epoch": 2.144,
"grad_norm": 2.593963146209717,
"learning_rate": 2.029655747593169e-06,
"loss": 0.1292,
"step": 268
},
{
"epoch": 2.152,
"grad_norm": 2.615065097808838,
"learning_rate": 1.9949583072489455e-06,
"loss": 0.1222,
"step": 269
},
{
"epoch": 2.16,
"grad_norm": 2.576918601989746,
"learning_rate": 1.9604859455468587e-06,
"loss": 0.1225,
"step": 270
},
{
"epoch": 2.168,
"grad_norm": 2.4940643310546875,
"learning_rate": 1.926241244478496e-06,
"loss": 0.099,
"step": 271
},
{
"epoch": 2.176,
"grad_norm": 2.0329341888427734,
"learning_rate": 1.8922267689835806e-06,
"loss": 0.0982,
"step": 272
},
{
"epoch": 2.184,
"grad_norm": 2.4752538204193115,
"learning_rate": 1.8584450667578656e-06,
"loss": 0.1165,
"step": 273
},
{
"epoch": 2.192,
"grad_norm": 2.5044078826904297,
"learning_rate": 1.8248986680623077e-06,
"loss": 0.1143,
"step": 274
},
{
"epoch": 2.2,
"grad_norm": 2.878774642944336,
"learning_rate": 1.7915900855335506e-06,
"loss": 0.1267,
"step": 275
},
{
"epoch": 2.208,
"grad_norm": 2.43483829498291,
"learning_rate": 1.7585218139957205e-06,
"loss": 0.1225,
"step": 276
},
{
"epoch": 2.216,
"grad_norm": 2.408351182937622,
"learning_rate": 1.7256963302735752e-06,
"loss": 0.1141,
"step": 277
},
{
"epoch": 2.224,
"grad_norm": 1.8009055852890015,
"learning_rate": 1.6931160930069789e-06,
"loss": 0.0855,
"step": 278
},
{
"epoch": 2.232,
"grad_norm": 2.0538299083709717,
"learning_rate": 1.6607835424667578e-06,
"loss": 0.1083,
"step": 279
},
{
"epoch": 2.24,
"grad_norm": 1.9968758821487427,
"learning_rate": 1.6287011003719105e-06,
"loss": 0.1242,
"step": 280
},
{
"epoch": 2.248,
"grad_norm": 2.342061996459961,
"learning_rate": 1.596871169708235e-06,
"loss": 0.1095,
"step": 281
},
{
"epoch": 2.2560000000000002,
"grad_norm": 1.7806224822998047,
"learning_rate": 1.5652961345483353e-06,
"loss": 0.0796,
"step": 282
},
{
"epoch": 2.2640000000000002,
"grad_norm": 2.1462020874023438,
"learning_rate": 1.5339783598730568e-06,
"loss": 0.1188,
"step": 283
},
{
"epoch": 2.2720000000000002,
"grad_norm": 1.9209872484207153,
"learning_rate": 1.5029201913943425e-06,
"loss": 0.0935,
"step": 284
},
{
"epoch": 2.2800000000000002,
"grad_norm": 2.2289228439331055,
"learning_rate": 1.4721239553795485e-06,
"loss": 0.101,
"step": 285
},
{
"epoch": 2.288,
"grad_norm": 2.047801971435547,
"learning_rate": 1.4415919584771999e-06,
"loss": 0.1052,
"step": 286
},
{
"epoch": 2.296,
"grad_norm": 2.281033754348755,
"learning_rate": 1.4113264875442201e-06,
"loss": 0.1318,
"step": 287
},
{
"epoch": 2.304,
"grad_norm": 2.387455463409424,
"learning_rate": 1.3813298094746491e-06,
"loss": 0.1213,
"step": 288
},
{
"epoch": 2.312,
"grad_norm": 1.8978601694107056,
"learning_rate": 1.35160417102985e-06,
"loss": 0.0952,
"step": 289
},
{
"epoch": 2.32,
"grad_norm": 2.128934383392334,
"learning_rate": 1.3221517986702249e-06,
"loss": 0.104,
"step": 290
},
{
"epoch": 2.328,
"grad_norm": 2.6699206829071045,
"learning_rate": 1.292974898388456e-06,
"loss": 0.1118,
"step": 291
},
{
"epoch": 2.336,
"grad_norm": 1.8229273557662964,
"learning_rate": 1.2640756555442684e-06,
"loss": 0.0806,
"step": 292
},
{
"epoch": 2.344,
"grad_norm": 2.197240114212036,
"learning_rate": 1.235456234700756e-06,
"loss": 0.1052,
"step": 293
},
{
"epoch": 2.352,
"grad_norm": 1.7712138891220093,
"learning_rate": 1.207118779462248e-06,
"loss": 0.08,
"step": 294
},
{
"epoch": 2.36,
"grad_norm": 2.1612143516540527,
"learning_rate": 1.1790654123137552e-06,
"loss": 0.1032,
"step": 295
},
{
"epoch": 2.368,
"grad_norm": 1.9840272665023804,
"learning_rate": 1.1512982344619904e-06,
"loss": 0.1081,
"step": 296
},
{
"epoch": 2.376,
"grad_norm": 2.7584738731384277,
"learning_rate": 1.1238193256779955e-06,
"loss": 0.144,
"step": 297
},
{
"epoch": 2.384,
"grad_norm": 1.7986438274383545,
"learning_rate": 1.0966307441413598e-06,
"loss": 0.0914,
"step": 298
},
{
"epoch": 2.392,
"grad_norm": 2.0376522541046143,
"learning_rate": 1.0697345262860638e-06,
"loss": 0.0957,
"step": 299
},
{
"epoch": 2.4,
"grad_norm": 2.506294012069702,
"learning_rate": 1.0431326866479457e-06,
"loss": 0.1412,
"step": 300
},
{
"epoch": 2.408,
"grad_norm": 2.2676737308502197,
"learning_rate": 1.01682721771382e-06,
"loss": 0.1173,
"step": 301
},
{
"epoch": 2.416,
"grad_norm": 2.264312982559204,
"learning_rate": 9.908200897722332e-07,
"loss": 0.0989,
"step": 302
},
{
"epoch": 2.424,
"grad_norm": 1.778229832649231,
"learning_rate": 9.6511325076589e-07,
"loss": 0.0897,
"step": 303
},
{
"epoch": 2.432,
"grad_norm": 2.3181490898132324,
"learning_rate": 9.397086261457511e-07,
"loss": 0.1087,
"step": 304
},
{
"epoch": 2.44,
"grad_norm": 2.592851161956787,
"learning_rate": 9.146081187268185e-07,
"loss": 0.1214,
"step": 305
},
{
"epoch": 2.448,
"grad_norm": 2.143307685852051,
"learning_rate": 8.898136085456127e-07,
"loss": 0.1101,
"step": 306
},
{
"epoch": 2.456,
"grad_norm": 2.1241097450256348,
"learning_rate": 8.65326952719357e-07,
"loss": 0.092,
"step": 307
},
{
"epoch": 2.464,
"grad_norm": 2.362586259841919,
"learning_rate": 8.411499853068783e-07,
"loss": 0.1037,
"step": 308
},
{
"epoch": 2.472,
"grad_norm": 2.715766668319702,
"learning_rate": 8.172845171712379e-07,
"loss": 0.1426,
"step": 309
},
{
"epoch": 2.48,
"grad_norm": 2.5036025047302246,
"learning_rate": 7.937323358440935e-07,
"loss": 0.1008,
"step": 310
},
{
"epoch": 2.488,
"grad_norm": 2.048517942428589,
"learning_rate": 7.70495205391818e-07,
"loss": 0.094,
"step": 311
},
{
"epoch": 2.496,
"grad_norm": 2.401021957397461,
"learning_rate": 7.475748662833615e-07,
"loss": 0.1199,
"step": 312
},
{
"epoch": 2.504,
"grad_norm": 2.090749979019165,
"learning_rate": 7.249730352599e-07,
"loss": 0.0962,
"step": 313
},
{
"epoch": 2.512,
"grad_norm": 2.547435760498047,
"learning_rate": 7.026914052062433e-07,
"loss": 0.1004,
"step": 314
},
{
"epoch": 2.52,
"grad_norm": 2.3148531913757324,
"learning_rate": 6.807316450240425e-07,
"loss": 0.1038,
"step": 315
},
{
"epoch": 2.528,
"grad_norm": 2.1704905033111572,
"learning_rate": 6.590953995067812e-07,
"loss": 0.1011,
"step": 316
},
{
"epoch": 2.536,
"grad_norm": 2.492382287979126,
"learning_rate": 6.377842892165892e-07,
"loss": 0.1237,
"step": 317
},
{
"epoch": 2.544,
"grad_norm": 2.05409574508667,
"learning_rate": 6.167999103628569e-07,
"loss": 0.0918,
"step": 318
},
{
"epoch": 2.552,
"grad_norm": 1.8564343452453613,
"learning_rate": 5.961438346826792e-07,
"loss": 0.0923,
"step": 319
},
{
"epoch": 2.56,
"grad_norm": 1.69510817527771,
"learning_rate": 5.758176093231294e-07,
"loss": 0.0786,
"step": 320
},
{
"epoch": 2.568,
"grad_norm": 2.1585428714752197,
"learning_rate": 5.558227567253832e-07,
"loss": 0.1026,
"step": 321
},
{
"epoch": 2.576,
"grad_norm": 2.2504143714904785,
"learning_rate": 5.361607745106817e-07,
"loss": 0.1039,
"step": 322
},
{
"epoch": 2.584,
"grad_norm": 1.841838002204895,
"learning_rate": 5.168331353681643e-07,
"loss": 0.0741,
"step": 323
},
{
"epoch": 2.592,
"grad_norm": 1.9490315914154053,
"learning_rate": 4.97841286944557e-07,
"loss": 0.0916,
"step": 324
},
{
"epoch": 2.6,
"grad_norm": 1.7840054035186768,
"learning_rate": 4.791866517357491e-07,
"loss": 0.0901,
"step": 325
},
{
"epoch": 2.608,
"grad_norm": 2.383761167526245,
"learning_rate": 4.608706269802471e-07,
"loss": 0.1122,
"step": 326
},
{
"epoch": 2.616,
"grad_norm": 1.987309455871582,
"learning_rate": 4.428945845545168e-07,
"loss": 0.0962,
"step": 327
},
{
"epoch": 2.624,
"grad_norm": 1.6439216136932373,
"learning_rate": 4.2525987087023433e-07,
"loss": 0.0912,
"step": 328
},
{
"epoch": 2.632,
"grad_norm": 2.315098285675049,
"learning_rate": 4.0796780677343606e-07,
"loss": 0.1073,
"step": 329
},
{
"epoch": 2.64,
"grad_norm": 1.6819984912872314,
"learning_rate": 3.910196874455896e-07,
"loss": 0.0793,
"step": 330
},
{
"epoch": 2.648,
"grad_norm": 2.155921220779419,
"learning_rate": 3.744167823065814e-07,
"loss": 0.1005,
"step": 331
},
{
"epoch": 2.656,
"grad_norm": 1.8949271440505981,
"learning_rate": 3.581603349196372e-07,
"loss": 0.0875,
"step": 332
},
{
"epoch": 2.664,
"grad_norm": 1.8800361156463623,
"learning_rate": 3.4225156289818096e-07,
"loss": 0.1002,
"step": 333
},
{
"epoch": 2.672,
"grad_norm": 1.8953866958618164,
"learning_rate": 3.26691657814634e-07,
"loss": 0.0949,
"step": 334
},
{
"epoch": 2.68,
"grad_norm": 1.751220464706421,
"learning_rate": 3.1148178511116624e-07,
"loss": 0.0916,
"step": 335
},
{
"epoch": 2.6879999999999997,
"grad_norm": 2.4002716541290283,
"learning_rate": 2.966230840124007e-07,
"loss": 0.1033,
"step": 336
},
{
"epoch": 2.6959999999999997,
"grad_norm": 2.0156638622283936,
"learning_rate": 2.821166674400905e-07,
"loss": 0.0859,
"step": 337
},
{
"epoch": 2.7039999999999997,
"grad_norm": 1.1491763591766357,
"learning_rate": 2.6796362192975766e-07,
"loss": 0.0636,
"step": 338
},
{
"epoch": 2.7119999999999997,
"grad_norm": 2.7888073921203613,
"learning_rate": 2.5416500754931294e-07,
"loss": 0.1411,
"step": 339
},
{
"epoch": 2.7199999999999998,
"grad_norm": 1.975098967552185,
"learning_rate": 2.407218578196524e-07,
"loss": 0.08,
"step": 340
},
{
"epoch": 2.7279999999999998,
"grad_norm": 2.278658628463745,
"learning_rate": 2.2763517963725169e-07,
"loss": 0.1183,
"step": 341
},
{
"epoch": 2.7359999999999998,
"grad_norm": 2.2175419330596924,
"learning_rate": 2.1490595319874574e-07,
"loss": 0.1112,
"step": 342
},
{
"epoch": 2.7439999999999998,
"grad_norm": 2.107234001159668,
"learning_rate": 2.0253513192751374e-07,
"loss": 0.1044,
"step": 343
},
{
"epoch": 2.752,
"grad_norm": 2.2658917903900146,
"learning_rate": 1.905236424022633e-07,
"loss": 0.1068,
"step": 344
},
{
"epoch": 2.76,
"grad_norm": 1.6075682640075684,
"learning_rate": 1.7887238428763553e-07,
"loss": 0.0964,
"step": 345
},
{
"epoch": 2.768,
"grad_norm": 2.3293445110321045,
"learning_rate": 1.6758223026681507e-07,
"loss": 0.1187,
"step": 346
},
{
"epoch": 2.776,
"grad_norm": 2.2633495330810547,
"learning_rate": 1.5665402597616842e-07,
"loss": 0.1069,
"step": 347
},
{
"epoch": 2.784,
"grad_norm": 2.2504401206970215,
"learning_rate": 1.4608858994190344e-07,
"loss": 0.1117,
"step": 348
},
{
"epoch": 2.792,
"grad_norm": 2.3323943614959717,
"learning_rate": 1.358867135187636e-07,
"loss": 0.1135,
"step": 349
},
{
"epoch": 2.8,
"grad_norm": 2.3143439292907715,
"learning_rate": 1.2604916083075236e-07,
"loss": 0.107,
"step": 350
},
{
"epoch": 2.808,
"grad_norm": 2.3315892219543457,
"learning_rate": 1.1657666871390471e-07,
"loss": 0.1173,
"step": 351
},
{
"epoch": 2.816,
"grad_norm": 2.2593605518341064,
"learning_rate": 1.0746994666109234e-07,
"loss": 0.1224,
"step": 352
},
{
"epoch": 2.824,
"grad_norm": 2.1315793991088867,
"learning_rate": 9.872967676888611e-08,
"loss": 0.1105,
"step": 353
},
{
"epoch": 2.832,
"grad_norm": 1.957345962524414,
"learning_rate": 9.035651368646647e-08,
"loss": 0.0775,
"step": 354
},
{
"epoch": 2.84,
"grad_norm": 2.2879207134246826,
"learning_rate": 8.235108456658814e-08,
"loss": 0.1117,
"step": 355
},
{
"epoch": 2.848,
"grad_norm": 2.33064341545105,
"learning_rate": 7.471398901860772e-08,
"loss": 0.105,
"step": 356
},
{
"epoch": 2.856,
"grad_norm": 1.8177984952926636,
"learning_rate": 6.744579906357185e-08,
"loss": 0.0923,
"step": 357
},
{
"epoch": 2.864,
"grad_norm": 1.6573913097381592,
"learning_rate": 6.054705909137426e-08,
"loss": 0.0889,
"step": 358
},
{
"epoch": 2.872,
"grad_norm": 2.089919328689575,
"learning_rate": 5.401828581997948e-08,
"loss": 0.089,
"step": 359
},
{
"epoch": 2.88,
"grad_norm": 2.2780609130859375,
"learning_rate": 4.7859968256719344e-08,
"loss": 0.1334,
"step": 360
},
{
"epoch": 2.888,
"grad_norm": 2.2533175945281982,
"learning_rate": 4.207256766166845e-08,
"loss": 0.0862,
"step": 361
},
{
"epoch": 2.896,
"grad_norm": 1.9914261102676392,
"learning_rate": 3.665651751309451e-08,
"loss": 0.0963,
"step": 362
},
{
"epoch": 2.904,
"grad_norm": 2.1606249809265137,
"learning_rate": 3.16122234749916e-08,
"loss": 0.1011,
"step": 363
},
{
"epoch": 2.912,
"grad_norm": 2.2203593254089355,
"learning_rate": 2.6940063366693303e-08,
"loss": 0.1113,
"step": 364
},
{
"epoch": 2.92,
"grad_norm": 2.3014585971832275,
"learning_rate": 2.264038713457706e-08,
"loss": 0.1201,
"step": 365
},
{
"epoch": 2.928,
"grad_norm": 2.5968284606933594,
"learning_rate": 1.8713516825851207e-08,
"loss": 0.1163,
"step": 366
},
{
"epoch": 2.936,
"grad_norm": 1.6080836057662964,
"learning_rate": 1.51597465644332e-08,
"loss": 0.0786,
"step": 367
},
{
"epoch": 2.944,
"grad_norm": 1.9814245700836182,
"learning_rate": 1.1979342528922189e-08,
"loss": 0.0897,
"step": 368
},
{
"epoch": 2.952,
"grad_norm": 2.3356709480285645,
"learning_rate": 9.1725429326589e-09,
"loss": 0.1121,
"step": 369
},
{
"epoch": 2.96,
"grad_norm": 2.4194469451904297,
"learning_rate": 6.739558005884883e-09,
"loss": 0.1275,
"step": 370
},
{
"epoch": 2.968,
"grad_norm": 2.294349193572998,
"learning_rate": 4.6805699799967744e-09,
"loss": 0.0957,
"step": 371
},
{
"epoch": 2.976,
"grad_norm": 1.9899872541427612,
"learning_rate": 2.995733073895557e-09,
"loss": 0.0954,
"step": 372
},
{
"epoch": 2.984,
"grad_norm": 2.400726079940796,
"learning_rate": 1.6851734824380184e-09,
"loss": 0.131,
"step": 373
},
{
"epoch": 2.992,
"grad_norm": 1.9476231336593628,
"learning_rate": 7.48989366980979e-10,
"loss": 0.0798,
"step": 374
},
{
"epoch": 3.0,
"grad_norm": 1.9087032079696655,
"learning_rate": 1.872508480332824e-10,
"loss": 0.0816,
"step": 375
},
{
"epoch": 3.0,
"step": 375,
"total_flos": 32886087155712.0,
"train_loss": 0.4883111825188001,
"train_runtime": 5573.6343,
"train_samples_per_second": 4.306,
"train_steps_per_second": 0.067
}
],
"logging_steps": 1,
"max_steps": 375,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 32886087155712.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}