SimsChat-Llama-3-8B / trainer_state.json
yangbh217's picture
Upload 12 files
832df92 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.0,
"eval_steps": 500,
"global_step": 336,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.023809523809523808,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 6.0844,
"step": 1
},
{
"epoch": 0.047619047619047616,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 6.0252,
"step": 2
},
{
"epoch": 0.07142857142857142,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 6.0438,
"step": 3
},
{
"epoch": 0.09523809523809523,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 6.0429,
"step": 4
},
{
"epoch": 0.11904761904761904,
"grad_norm": 93.3423219258017,
"learning_rate": 0.0,
"loss": 6.0812,
"step": 5
},
{
"epoch": 0.14285714285714285,
"grad_norm": 93.3423219258017,
"learning_rate": 0.0,
"loss": 6.651,
"step": 6
},
{
"epoch": 0.16666666666666666,
"grad_norm": 93.3423219258017,
"learning_rate": 0.0,
"loss": 6.7137,
"step": 7
},
{
"epoch": 0.19047619047619047,
"grad_norm": 155.98556373507773,
"learning_rate": 4.515449934959717e-06,
"loss": 6.6528,
"step": 8
},
{
"epoch": 0.21428571428571427,
"grad_norm": 153.27439778262055,
"learning_rate": 7.156818820794936e-06,
"loss": 6.8056,
"step": 9
},
{
"epoch": 0.23809523809523808,
"grad_norm": 128.63341961024358,
"learning_rate": 9.030899869919434e-06,
"loss": 5.5597,
"step": 10
},
{
"epoch": 0.2619047619047619,
"grad_norm": 41.370296890930696,
"learning_rate": 1.048455006504028e-05,
"loss": 4.8139,
"step": 11
},
{
"epoch": 0.2857142857142857,
"grad_norm": 21.03013047842198,
"learning_rate": 1.1672268755754653e-05,
"loss": 4.2806,
"step": 12
},
{
"epoch": 0.30952380952380953,
"grad_norm": 24.83019480831252,
"learning_rate": 1.2676470600213851e-05,
"loss": 4.1729,
"step": 13
},
{
"epoch": 0.3333333333333333,
"grad_norm": 32.27380722524846,
"learning_rate": 1.354634980487915e-05,
"loss": 3.9768,
"step": 14
},
{
"epoch": 0.35714285714285715,
"grad_norm": 18.563619560619266,
"learning_rate": 1.4313637641589872e-05,
"loss": 3.8396,
"step": 15
},
{
"epoch": 0.38095238095238093,
"grad_norm": 43.97596777999888,
"learning_rate": 1.4999999999999999e-05,
"loss": 3.9596,
"step": 16
},
{
"epoch": 0.40476190476190477,
"grad_norm": 15.46861535216019,
"learning_rate": 1.5620890277373374e-05,
"loss": 3.8266,
"step": 17
},
{
"epoch": 0.42857142857142855,
"grad_norm": 15.172334611022396,
"learning_rate": 1.618771869071437e-05,
"loss": 3.574,
"step": 18
},
{
"epoch": 0.4523809523809524,
"grad_norm": 9.316822252242597,
"learning_rate": 1.6709150284602548e-05,
"loss": 3.4986,
"step": 19
},
{
"epoch": 0.47619047619047616,
"grad_norm": 12.838250015175294,
"learning_rate": 1.7191920535173565e-05,
"loss": 3.3713,
"step": 20
},
{
"epoch": 0.5,
"grad_norm": 7.974071924624334,
"learning_rate": 1.7641368885835218e-05,
"loss": 3.3618,
"step": 21
},
{
"epoch": 0.5238095238095238,
"grad_norm": 11.124851182949525,
"learning_rate": 1.806179973983887e-05,
"loss": 3.3299,
"step": 22
},
{
"epoch": 0.5476190476190477,
"grad_norm": 13.444595237986201,
"learning_rate": 1.8456733820674108e-05,
"loss": 3.2033,
"step": 23
},
{
"epoch": 0.5714285714285714,
"grad_norm": 11.735830000922256,
"learning_rate": 1.8829087576549588e-05,
"loss": 3.1576,
"step": 24
},
{
"epoch": 0.5952380952380952,
"grad_norm": 7.925276347736291,
"learning_rate": 1.918130401429243e-05,
"loss": 3.1197,
"step": 25
},
{
"epoch": 0.6190476190476191,
"grad_norm": 8.910617761228385,
"learning_rate": 1.9515449934959718e-05,
"loss": 3.0446,
"step": 26
},
{
"epoch": 0.6428571428571429,
"grad_norm": 7.775966581751595,
"learning_rate": 1.9833289421008787e-05,
"loss": 3.0266,
"step": 27
},
{
"epoch": 0.6666666666666666,
"grad_norm": 26.14272163588271,
"learning_rate": 2.0136340212333093e-05,
"loss": 2.955,
"step": 28
},
{
"epoch": 0.6904761904761905,
"grad_norm": 13.112722121867469,
"learning_rate": 2.042591754026389e-05,
"loss": 2.9005,
"step": 29
},
{
"epoch": 0.7142857142857143,
"grad_norm": 12.748020990905621,
"learning_rate": 2.0703168625674088e-05,
"loss": 2.7926,
"step": 30
},
{
"epoch": 0.7380952380952381,
"grad_norm": 58.496978420897946,
"learning_rate": 2.096910013008056e-05,
"loss": 2.8962,
"step": 31
},
{
"epoch": 0.7619047619047619,
"grad_norm": 21.63134954650599,
"learning_rate": 2.1224600219562266e-05,
"loss": 2.7644,
"step": 32
},
{
"epoch": 0.7857142857142857,
"grad_norm": 30.895967229002963,
"learning_rate": 2.1470456462384807e-05,
"loss": 2.8059,
"step": 33
},
{
"epoch": 0.8095238095238095,
"grad_norm": 9.035992601395845,
"learning_rate": 2.1707370470133284e-05,
"loss": 2.6909,
"step": 34
},
{
"epoch": 0.8333333333333334,
"grad_norm": 10.810607744577958,
"learning_rate": 2.193596996848434e-05,
"loss": 2.6255,
"step": 35
},
{
"epoch": 0.8571428571428571,
"grad_norm": 9.050441533061212,
"learning_rate": 2.2156818820794933e-05,
"loss": 2.5836,
"step": 36
},
{
"epoch": 0.8809523809523809,
"grad_norm": 15.503923473314948,
"learning_rate": 2.2370425407514087e-05,
"loss": 2.4422,
"step": 37
},
{
"epoch": 0.9047619047619048,
"grad_norm": 17.82162509551028,
"learning_rate": 2.2577249674798588e-05,
"loss": 2.4802,
"step": 38
},
{
"epoch": 0.9285714285714286,
"grad_norm": 8.302121215376507,
"learning_rate": 2.277770909816831e-05,
"loss": 2.3523,
"step": 39
},
{
"epoch": 0.9523809523809523,
"grad_norm": 9.600730121186789,
"learning_rate": 2.2972183755633823e-05,
"loss": 2.3562,
"step": 40
},
{
"epoch": 0.9761904761904762,
"grad_norm": 15.411788966104558,
"learning_rate": 2.316102066525413e-05,
"loss": 2.2544,
"step": 41
},
{
"epoch": 1.0,
"grad_norm": 20.352600141493745,
"learning_rate": 2.3344537511509307e-05,
"loss": 2.2646,
"step": 42
},
{
"epoch": 1.0238095238095237,
"grad_norm": 7.825870151096693,
"learning_rate": 2.3523025861004922e-05,
"loss": 2.0573,
"step": 43
},
{
"epoch": 1.0476190476190477,
"grad_norm": 8.440773110077108,
"learning_rate": 2.369675394925215e-05,
"loss": 2.03,
"step": 44
},
{
"epoch": 1.0714285714285714,
"grad_norm": 13.130562902210485,
"learning_rate": 2.3865969105397485e-05,
"loss": 1.9244,
"step": 45
},
{
"epoch": 1.0952380952380953,
"grad_norm": 9.706537257928328,
"learning_rate": 2.4030899869919433e-05,
"loss": 1.9136,
"step": 46
},
{
"epoch": 1.119047619047619,
"grad_norm": 8.26887203250488,
"learning_rate": 2.419175785079603e-05,
"loss": 1.8925,
"step": 47
},
{
"epoch": 1.1428571428571428,
"grad_norm": 6.737854379594813,
"learning_rate": 2.4348739355968506e-05,
"loss": 1.7605,
"step": 48
},
{
"epoch": 1.1666666666666667,
"grad_norm": 7.488911124151458,
"learning_rate": 2.4502026833693792e-05,
"loss": 1.8126,
"step": 49
},
{
"epoch": 1.1904761904761905,
"grad_norm": 20.960641944897286,
"learning_rate": 2.4651790147292808e-05,
"loss": 1.7724,
"step": 50
},
{
"epoch": 1.2142857142857142,
"grad_norm": 7.7652997998432305,
"learning_rate": 2.479818770663015e-05,
"loss": 1.7901,
"step": 51
},
{
"epoch": 1.2380952380952381,
"grad_norm": 11.205373796375733,
"learning_rate": 2.4941367475223608e-05,
"loss": 1.7508,
"step": 52
},
{
"epoch": 1.2619047619047619,
"grad_norm": 8.507384290331483,
"learning_rate": 2.5081467869035756e-05,
"loss": 1.7626,
"step": 53
},
{
"epoch": 1.2857142857142856,
"grad_norm": 22.937802013934743,
"learning_rate": 2.5218618560633803e-05,
"loss": 1.7217,
"step": 54
},
{
"epoch": 1.3095238095238095,
"grad_norm": 10.301692964250464,
"learning_rate": 2.5352941200427702e-05,
"loss": 1.7415,
"step": 55
},
{
"epoch": 1.3333333333333333,
"grad_norm": 17.951896818031077,
"learning_rate": 2.548455006504028e-05,
"loss": 1.752,
"step": 56
},
{
"epoch": 1.3571428571428572,
"grad_norm": 10.56352057407687,
"learning_rate": 2.5613552641469042e-05,
"loss": 1.7535,
"step": 57
},
{
"epoch": 1.380952380952381,
"grad_norm": 8.163599904657278,
"learning_rate": 2.5740050154521985e-05,
"loss": 1.5751,
"step": 58
},
{
"epoch": 1.4047619047619047,
"grad_norm": 9.664658524213282,
"learning_rate": 2.5864138044011835e-05,
"loss": 1.5757,
"step": 59
},
{
"epoch": 1.4285714285714286,
"grad_norm": 10.240043456462203,
"learning_rate": 2.5985906397344525e-05,
"loss": 1.659,
"step": 60
},
{
"epoch": 1.4523809523809523,
"grad_norm": 7.15057707905289,
"learning_rate": 2.6105440342413657e-05,
"loss": 1.5929,
"step": 61
},
{
"epoch": 1.4761904761904763,
"grad_norm": 7.041967742924048,
"learning_rate": 2.6222820405093003e-05,
"loss": 1.5825,
"step": 62
},
{
"epoch": 1.5,
"grad_norm": 6.0217336577697615,
"learning_rate": 2.6338122835087368e-05,
"loss": 1.531,
"step": 63
},
{
"epoch": 1.5238095238095237,
"grad_norm": 8.731514723416229,
"learning_rate": 2.6451419903444055e-05,
"loss": 1.5912,
"step": 64
},
{
"epoch": 1.5476190476190477,
"grad_norm": 5.176654739087756,
"learning_rate": 2.656278017463216e-05,
"loss": 1.4837,
"step": 65
},
{
"epoch": 1.5714285714285714,
"grad_norm": 11.303467912141844,
"learning_rate": 2.667226875575465e-05,
"loss": 1.6031,
"step": 66
},
{
"epoch": 1.5952380952380953,
"grad_norm": 7.596904837119394,
"learning_rate": 2.67799475251615e-05,
"loss": 1.4663,
"step": 67
},
{
"epoch": 1.619047619047619,
"grad_norm": 8.162880727116912,
"learning_rate": 2.6885875342473806e-05,
"loss": 1.4976,
"step": 68
},
{
"epoch": 1.6428571428571428,
"grad_norm": 8.525918824917467,
"learning_rate": 2.699010824180372e-05,
"loss": 1.5139,
"step": 69
},
{
"epoch": 1.6666666666666665,
"grad_norm": 15.718252003480599,
"learning_rate": 2.70926996097583e-05,
"loss": 1.4613,
"step": 70
},
{
"epoch": 1.6904761904761905,
"grad_norm": 8.02078170429474,
"learning_rate": 2.719370034964283e-05,
"loss": 1.5557,
"step": 71
},
{
"epoch": 1.7142857142857144,
"grad_norm": 12.86839702876173,
"learning_rate": 2.7293159033128027e-05,
"loss": 1.511,
"step": 72
},
{
"epoch": 1.7380952380952381,
"grad_norm": 13.248242531588152,
"learning_rate": 2.739112204051239e-05,
"loss": 1.5252,
"step": 73
},
{
"epoch": 1.7619047619047619,
"grad_norm": 10.039010061876667,
"learning_rate": 2.7487633690593542e-05,
"loss": 1.5813,
"step": 74
},
{
"epoch": 1.7857142857142856,
"grad_norm": 9.119551612807086,
"learning_rate": 2.758273636105883e-05,
"loss": 1.4967,
"step": 75
},
{
"epoch": 1.8095238095238095,
"grad_norm": 4.214044051365114,
"learning_rate": 2.767647060021385e-05,
"loss": 1.4807,
"step": 76
},
{
"epoch": 1.8333333333333335,
"grad_norm": 4.989644007091576,
"learning_rate": 2.7768875230786127e-05,
"loss": 1.3716,
"step": 77
},
{
"epoch": 1.8571428571428572,
"grad_norm": 4.790095306595125,
"learning_rate": 2.7859987446469022e-05,
"loss": 1.4332,
"step": 78
},
{
"epoch": 1.880952380952381,
"grad_norm": 4.020659500607359,
"learning_rate": 2.7949842901806834e-05,
"loss": 1.5005,
"step": 79
},
{
"epoch": 1.9047619047619047,
"grad_norm": 4.712132949093397,
"learning_rate": 2.803847579596464e-05,
"loss": 1.4549,
"step": 80
},
{
"epoch": 1.9285714285714286,
"grad_norm": 4.139477840795092,
"learning_rate": 2.8125918950875494e-05,
"loss": 1.3747,
"step": 81
},
{
"epoch": 1.9523809523809523,
"grad_norm": 4.710882234011957,
"learning_rate": 2.821220388421187e-05,
"loss": 1.3988,
"step": 82
},
{
"epoch": 1.9761904761904763,
"grad_norm": 7.06911408610742,
"learning_rate": 2.8297360877587227e-05,
"loss": 1.3773,
"step": 83
},
{
"epoch": 2.0,
"grad_norm": 4.318190206857227,
"learning_rate": 2.8381419040357204e-05,
"loss": 1.4186,
"step": 84
},
{
"epoch": 2.0238095238095237,
"grad_norm": 8.828397655496394,
"learning_rate": 2.846440636935662e-05,
"loss": 1.0226,
"step": 85
},
{
"epoch": 2.0476190476190474,
"grad_norm": 4.797325684688875,
"learning_rate": 2.854634980487915e-05,
"loss": 0.9971,
"step": 86
},
{
"epoch": 2.0714285714285716,
"grad_norm": 5.001207396638769,
"learning_rate": 2.8627275283179744e-05,
"loss": 0.9646,
"step": 87
},
{
"epoch": 2.0952380952380953,
"grad_norm": 5.020135960352548,
"learning_rate": 2.870720778575575e-05,
"loss": 0.8586,
"step": 88
},
{
"epoch": 2.119047619047619,
"grad_norm": 3.6417220267853416,
"learning_rate": 2.8786171385641107e-05,
"loss": 0.8771,
"step": 89
},
{
"epoch": 2.142857142857143,
"grad_norm": 4.600814266283937,
"learning_rate": 2.8864189290928218e-05,
"loss": 0.872,
"step": 90
},
{
"epoch": 2.1666666666666665,
"grad_norm": 4.344323209635797,
"learning_rate": 2.894128388571439e-05,
"loss": 0.9124,
"step": 91
},
{
"epoch": 2.1904761904761907,
"grad_norm": 7.885657229128257,
"learning_rate": 2.9017476768653508e-05,
"loss": 0.8747,
"step": 92
},
{
"epoch": 2.2142857142857144,
"grad_norm": 5.691946115331204,
"learning_rate": 2.9092788789279274e-05,
"loss": 0.9159,
"step": 93
},
{
"epoch": 2.238095238095238,
"grad_norm": 5.4268612318908795,
"learning_rate": 2.9167240082252527e-05,
"loss": 0.9019,
"step": 94
},
{
"epoch": 2.261904761904762,
"grad_norm": 4.0468401226175645,
"learning_rate": 2.9240850099673687e-05,
"loss": 0.8495,
"step": 95
},
{
"epoch": 2.2857142857142856,
"grad_norm": 4.992044031890445,
"learning_rate": 2.9313637641589867e-05,
"loss": 0.8451,
"step": 96
},
{
"epoch": 2.3095238095238093,
"grad_norm": 7.6099863296365955,
"learning_rate": 2.93856208848164e-05,
"loss": 0.9557,
"step": 97
},
{
"epoch": 2.3333333333333335,
"grad_norm": 6.979618919484822,
"learning_rate": 2.9456817410183327e-05,
"loss": 0.8591,
"step": 98
},
{
"epoch": 2.357142857142857,
"grad_norm": 4.677732947872956,
"learning_rate": 2.9527244228309025e-05,
"loss": 0.9462,
"step": 99
},
{
"epoch": 2.380952380952381,
"grad_norm": 4.501165451022709,
"learning_rate": 2.9596917803995475e-05,
"loss": 0.9646,
"step": 100
},
{
"epoch": 2.4047619047619047,
"grad_norm": 4.061976472426795,
"learning_rate": 2.966585407933271e-05,
"loss": 0.9045,
"step": 101
},
{
"epoch": 2.4285714285714284,
"grad_norm": 3.775395424450519,
"learning_rate": 2.9734068495593522e-05,
"loss": 0.8882,
"step": 102
},
{
"epoch": 2.4523809523809526,
"grad_norm": 3.664921576479252,
"learning_rate": 2.980157601399367e-05,
"loss": 0.7533,
"step": 103
},
{
"epoch": 2.4761904761904763,
"grad_norm": 3.674594213179295,
"learning_rate": 2.986839113538742e-05,
"loss": 0.9367,
"step": 104
},
{
"epoch": 2.5,
"grad_norm": 3.9611599650914084,
"learning_rate": 2.9934527918963243e-05,
"loss": 0.8714,
"step": 105
},
{
"epoch": 2.5238095238095237,
"grad_norm": 4.990853717987593,
"learning_rate": 2.9999999999999997e-05,
"loss": 0.8654,
"step": 106
},
{
"epoch": 2.5476190476190474,
"grad_norm": 6.208263046924998,
"learning_rate": 3e-05,
"loss": 0.9548,
"step": 107
},
{
"epoch": 2.571428571428571,
"grad_norm": 3.44590660733813,
"learning_rate": 2.9872881355932204e-05,
"loss": 0.9073,
"step": 108
},
{
"epoch": 2.5952380952380953,
"grad_norm": 2.7385065745159336,
"learning_rate": 2.9745762711864407e-05,
"loss": 0.8989,
"step": 109
},
{
"epoch": 2.619047619047619,
"grad_norm": 10.640186183132265,
"learning_rate": 2.961864406779661e-05,
"loss": 0.9787,
"step": 110
},
{
"epoch": 2.642857142857143,
"grad_norm": 4.130706308112674,
"learning_rate": 2.9491525423728817e-05,
"loss": 1.0558,
"step": 111
},
{
"epoch": 2.6666666666666665,
"grad_norm": 2.9151215048015318,
"learning_rate": 2.9364406779661017e-05,
"loss": 0.9567,
"step": 112
},
{
"epoch": 2.6904761904761907,
"grad_norm": 6.435366082437753,
"learning_rate": 2.923728813559322e-05,
"loss": 1.0037,
"step": 113
},
{
"epoch": 2.7142857142857144,
"grad_norm": 3.426679191220126,
"learning_rate": 2.9110169491525423e-05,
"loss": 0.9372,
"step": 114
},
{
"epoch": 2.738095238095238,
"grad_norm": 4.477068434747137,
"learning_rate": 2.8983050847457626e-05,
"loss": 0.8885,
"step": 115
},
{
"epoch": 2.761904761904762,
"grad_norm": 5.242235163601309,
"learning_rate": 2.885593220338983e-05,
"loss": 0.9532,
"step": 116
},
{
"epoch": 2.7857142857142856,
"grad_norm": 3.145624233816483,
"learning_rate": 2.8728813559322036e-05,
"loss": 0.887,
"step": 117
},
{
"epoch": 2.8095238095238093,
"grad_norm": 2.7837231387832406,
"learning_rate": 2.860169491525424e-05,
"loss": 0.9098,
"step": 118
},
{
"epoch": 2.8333333333333335,
"grad_norm": 3.584454797124237,
"learning_rate": 2.8474576271186442e-05,
"loss": 0.8836,
"step": 119
},
{
"epoch": 2.857142857142857,
"grad_norm": 3.3913078499460156,
"learning_rate": 2.8347457627118645e-05,
"loss": 0.9176,
"step": 120
},
{
"epoch": 2.880952380952381,
"grad_norm": 3.5996469960391604,
"learning_rate": 2.822033898305085e-05,
"loss": 0.9451,
"step": 121
},
{
"epoch": 2.9047619047619047,
"grad_norm": 3.4578397161116845,
"learning_rate": 2.809322033898305e-05,
"loss": 0.9872,
"step": 122
},
{
"epoch": 2.928571428571429,
"grad_norm": 3.1817282986021116,
"learning_rate": 2.7966101694915255e-05,
"loss": 0.9363,
"step": 123
},
{
"epoch": 2.9523809523809526,
"grad_norm": 4.818055154499765,
"learning_rate": 2.7838983050847458e-05,
"loss": 0.9404,
"step": 124
},
{
"epoch": 2.9761904761904763,
"grad_norm": 2.906003880077484,
"learning_rate": 2.771186440677966e-05,
"loss": 0.8959,
"step": 125
},
{
"epoch": 3.0,
"grad_norm": 3.3767730859052176,
"learning_rate": 2.7584745762711864e-05,
"loss": 0.9118,
"step": 126
},
{
"epoch": 3.0238095238095237,
"grad_norm": 4.095726875378205,
"learning_rate": 2.7457627118644068e-05,
"loss": 0.3995,
"step": 127
},
{
"epoch": 3.0476190476190474,
"grad_norm": 2.8808779130759476,
"learning_rate": 2.733050847457627e-05,
"loss": 0.3645,
"step": 128
},
{
"epoch": 3.0714285714285716,
"grad_norm": 3.9860021522389837,
"learning_rate": 2.7203389830508477e-05,
"loss": 0.3695,
"step": 129
},
{
"epoch": 3.0952380952380953,
"grad_norm": 4.895632778956821,
"learning_rate": 2.707627118644068e-05,
"loss": 0.3574,
"step": 130
},
{
"epoch": 3.119047619047619,
"grad_norm": 3.5156913242441696,
"learning_rate": 2.6949152542372884e-05,
"loss": 0.4022,
"step": 131
},
{
"epoch": 3.142857142857143,
"grad_norm": 3.233502698834368,
"learning_rate": 2.6822033898305083e-05,
"loss": 0.3601,
"step": 132
},
{
"epoch": 3.1666666666666665,
"grad_norm": 3.746216836603699,
"learning_rate": 2.6694915254237287e-05,
"loss": 0.3411,
"step": 133
},
{
"epoch": 3.1904761904761907,
"grad_norm": 3.0441572539605803,
"learning_rate": 2.656779661016949e-05,
"loss": 0.3786,
"step": 134
},
{
"epoch": 3.2142857142857144,
"grad_norm": 3.081479950346458,
"learning_rate": 2.6440677966101696e-05,
"loss": 0.3422,
"step": 135
},
{
"epoch": 3.238095238095238,
"grad_norm": 4.702766886412663,
"learning_rate": 2.63135593220339e-05,
"loss": 0.3261,
"step": 136
},
{
"epoch": 3.261904761904762,
"grad_norm": 2.9964530958110305,
"learning_rate": 2.6186440677966103e-05,
"loss": 0.3794,
"step": 137
},
{
"epoch": 3.2857142857142856,
"grad_norm": 5.550148956774296,
"learning_rate": 2.6059322033898306e-05,
"loss": 0.3815,
"step": 138
},
{
"epoch": 3.3095238095238093,
"grad_norm": 2.986882459587685,
"learning_rate": 2.593220338983051e-05,
"loss": 0.3155,
"step": 139
},
{
"epoch": 3.3333333333333335,
"grad_norm": 3.193858368410506,
"learning_rate": 2.5805084745762712e-05,
"loss": 0.3264,
"step": 140
},
{
"epoch": 3.357142857142857,
"grad_norm": 3.088750633817057,
"learning_rate": 2.567796610169492e-05,
"loss": 0.291,
"step": 141
},
{
"epoch": 3.380952380952381,
"grad_norm": 3.2854070579051253,
"learning_rate": 2.555084745762712e-05,
"loss": 0.3297,
"step": 142
},
{
"epoch": 3.4047619047619047,
"grad_norm": 4.2601580677346895,
"learning_rate": 2.5423728813559322e-05,
"loss": 0.3141,
"step": 143
},
{
"epoch": 3.4285714285714284,
"grad_norm": 4.332088046995794,
"learning_rate": 2.5296610169491525e-05,
"loss": 0.3325,
"step": 144
},
{
"epoch": 3.4523809523809526,
"grad_norm": 4.242209531048916,
"learning_rate": 2.5169491525423728e-05,
"loss": 0.3305,
"step": 145
},
{
"epoch": 3.4761904761904763,
"grad_norm": 3.2248827831642077,
"learning_rate": 2.504237288135593e-05,
"loss": 0.3584,
"step": 146
},
{
"epoch": 3.5,
"grad_norm": 3.7136292663486024,
"learning_rate": 2.4915254237288138e-05,
"loss": 0.3851,
"step": 147
},
{
"epoch": 3.5238095238095237,
"grad_norm": 2.310251225108216,
"learning_rate": 2.478813559322034e-05,
"loss": 0.3495,
"step": 148
},
{
"epoch": 3.5476190476190474,
"grad_norm": 2.959900203775221,
"learning_rate": 2.4661016949152544e-05,
"loss": 0.3467,
"step": 149
},
{
"epoch": 3.571428571428571,
"grad_norm": 2.769025360950123,
"learning_rate": 2.4533898305084747e-05,
"loss": 0.3339,
"step": 150
},
{
"epoch": 3.5952380952380953,
"grad_norm": 2.526586310015553,
"learning_rate": 2.440677966101695e-05,
"loss": 0.3162,
"step": 151
},
{
"epoch": 3.619047619047619,
"grad_norm": 2.6259041092064024,
"learning_rate": 2.427966101694915e-05,
"loss": 0.3545,
"step": 152
},
{
"epoch": 3.642857142857143,
"grad_norm": 3.4110128718535617,
"learning_rate": 2.4152542372881357e-05,
"loss": 0.3352,
"step": 153
},
{
"epoch": 3.6666666666666665,
"grad_norm": 2.758008220524809,
"learning_rate": 2.402542372881356e-05,
"loss": 0.3132,
"step": 154
},
{
"epoch": 3.6904761904761907,
"grad_norm": 2.982383183176492,
"learning_rate": 2.3898305084745763e-05,
"loss": 0.3262,
"step": 155
},
{
"epoch": 3.7142857142857144,
"grad_norm": 2.762691946826555,
"learning_rate": 2.3771186440677966e-05,
"loss": 0.3659,
"step": 156
},
{
"epoch": 3.738095238095238,
"grad_norm": 4.026553708275229,
"learning_rate": 2.364406779661017e-05,
"loss": 0.3273,
"step": 157
},
{
"epoch": 3.761904761904762,
"grad_norm": 3.3587989490663213,
"learning_rate": 2.3516949152542373e-05,
"loss": 0.357,
"step": 158
},
{
"epoch": 3.7857142857142856,
"grad_norm": 3.2118411683637,
"learning_rate": 2.338983050847458e-05,
"loss": 0.3639,
"step": 159
},
{
"epoch": 3.8095238095238093,
"grad_norm": 3.443920312675099,
"learning_rate": 2.3262711864406782e-05,
"loss": 0.3878,
"step": 160
},
{
"epoch": 3.8333333333333335,
"grad_norm": 2.5647254212407695,
"learning_rate": 2.3135593220338986e-05,
"loss": 0.3918,
"step": 161
},
{
"epoch": 3.857142857142857,
"grad_norm": 3.3664168960781855,
"learning_rate": 2.3008474576271185e-05,
"loss": 0.3296,
"step": 162
},
{
"epoch": 3.880952380952381,
"grad_norm": 2.9872591951116823,
"learning_rate": 2.288135593220339e-05,
"loss": 0.3431,
"step": 163
},
{
"epoch": 3.9047619047619047,
"grad_norm": 2.7362377688229067,
"learning_rate": 2.275423728813559e-05,
"loss": 0.3312,
"step": 164
},
{
"epoch": 3.928571428571429,
"grad_norm": 3.105404566455441,
"learning_rate": 2.2627118644067798e-05,
"loss": 0.3432,
"step": 165
},
{
"epoch": 3.9523809523809526,
"grad_norm": 2.676166322942077,
"learning_rate": 2.25e-05,
"loss": 0.3815,
"step": 166
},
{
"epoch": 3.9761904761904763,
"grad_norm": 2.5293095061822632,
"learning_rate": 2.2372881355932205e-05,
"loss": 0.3407,
"step": 167
},
{
"epoch": 4.0,
"grad_norm": 2.5720338941509175,
"learning_rate": 2.2245762711864408e-05,
"loss": 0.3547,
"step": 168
},
{
"epoch": 4.023809523809524,
"grad_norm": 1.7347029642231182,
"learning_rate": 2.211864406779661e-05,
"loss": 0.1338,
"step": 169
},
{
"epoch": 4.0476190476190474,
"grad_norm": 1.8221230732233151,
"learning_rate": 2.1991525423728814e-05,
"loss": 0.1139,
"step": 170
},
{
"epoch": 4.071428571428571,
"grad_norm": 1.6738494907293027,
"learning_rate": 2.1864406779661017e-05,
"loss": 0.1065,
"step": 171
},
{
"epoch": 4.095238095238095,
"grad_norm": 2.5108940229942505,
"learning_rate": 2.173728813559322e-05,
"loss": 0.1141,
"step": 172
},
{
"epoch": 4.119047619047619,
"grad_norm": 2.323467024081342,
"learning_rate": 2.1610169491525424e-05,
"loss": 0.1283,
"step": 173
},
{
"epoch": 4.142857142857143,
"grad_norm": 2.821038234674706,
"learning_rate": 2.1483050847457627e-05,
"loss": 0.1143,
"step": 174
},
{
"epoch": 4.166666666666667,
"grad_norm": 2.338894608561856,
"learning_rate": 2.135593220338983e-05,
"loss": 0.1229,
"step": 175
},
{
"epoch": 4.190476190476191,
"grad_norm": 1.942202000092003,
"learning_rate": 2.1228813559322033e-05,
"loss": 0.1137,
"step": 176
},
{
"epoch": 4.214285714285714,
"grad_norm": 2.3412837976886913,
"learning_rate": 2.110169491525424e-05,
"loss": 0.1241,
"step": 177
},
{
"epoch": 4.238095238095238,
"grad_norm": 2.2036006765683434,
"learning_rate": 2.0974576271186443e-05,
"loss": 0.1309,
"step": 178
},
{
"epoch": 4.261904761904762,
"grad_norm": 2.010523528057446,
"learning_rate": 2.0847457627118646e-05,
"loss": 0.1191,
"step": 179
},
{
"epoch": 4.285714285714286,
"grad_norm": 2.3507423364696316,
"learning_rate": 2.072033898305085e-05,
"loss": 0.117,
"step": 180
},
{
"epoch": 4.309523809523809,
"grad_norm": 2.2804992694648654,
"learning_rate": 2.059322033898305e-05,
"loss": 0.1203,
"step": 181
},
{
"epoch": 4.333333333333333,
"grad_norm": 2.3869499931131366,
"learning_rate": 2.0466101694915252e-05,
"loss": 0.1275,
"step": 182
},
{
"epoch": 4.357142857142857,
"grad_norm": 1.8100471341861415,
"learning_rate": 2.033898305084746e-05,
"loss": 0.1169,
"step": 183
},
{
"epoch": 4.380952380952381,
"grad_norm": 1.8180032907966064,
"learning_rate": 2.0211864406779662e-05,
"loss": 0.1202,
"step": 184
},
{
"epoch": 4.404761904761905,
"grad_norm": 1.758298543914329,
"learning_rate": 2.0084745762711865e-05,
"loss": 0.114,
"step": 185
},
{
"epoch": 4.428571428571429,
"grad_norm": 1.8184074288972953,
"learning_rate": 1.9957627118644068e-05,
"loss": 0.1089,
"step": 186
},
{
"epoch": 4.4523809523809526,
"grad_norm": 2.401631369065544,
"learning_rate": 1.983050847457627e-05,
"loss": 0.1198,
"step": 187
},
{
"epoch": 4.476190476190476,
"grad_norm": 1.9255007526016845,
"learning_rate": 1.9703389830508475e-05,
"loss": 0.1203,
"step": 188
},
{
"epoch": 4.5,
"grad_norm": 2.464389571771537,
"learning_rate": 1.957627118644068e-05,
"loss": 0.116,
"step": 189
},
{
"epoch": 4.523809523809524,
"grad_norm": 1.818353081353233,
"learning_rate": 1.9449152542372884e-05,
"loss": 0.1247,
"step": 190
},
{
"epoch": 4.5476190476190474,
"grad_norm": 2.2738583099925376,
"learning_rate": 1.9322033898305084e-05,
"loss": 0.1187,
"step": 191
},
{
"epoch": 4.571428571428571,
"grad_norm": 1.9905028157486173,
"learning_rate": 1.9194915254237287e-05,
"loss": 0.1227,
"step": 192
},
{
"epoch": 4.595238095238095,
"grad_norm": 1.9747858317662208,
"learning_rate": 1.906779661016949e-05,
"loss": 0.1491,
"step": 193
},
{
"epoch": 4.619047619047619,
"grad_norm": 2.8317175540164836,
"learning_rate": 1.8940677966101694e-05,
"loss": 0.1259,
"step": 194
},
{
"epoch": 4.642857142857143,
"grad_norm": 1.9273372654676268,
"learning_rate": 1.88135593220339e-05,
"loss": 0.1365,
"step": 195
},
{
"epoch": 4.666666666666667,
"grad_norm": 2.534976711881009,
"learning_rate": 1.8686440677966103e-05,
"loss": 0.131,
"step": 196
},
{
"epoch": 4.690476190476191,
"grad_norm": 1.942221886598183,
"learning_rate": 1.8559322033898307e-05,
"loss": 0.1226,
"step": 197
},
{
"epoch": 4.714285714285714,
"grad_norm": 2.2644640875066173,
"learning_rate": 1.843220338983051e-05,
"loss": 0.1243,
"step": 198
},
{
"epoch": 4.738095238095238,
"grad_norm": 2.8057010275094925,
"learning_rate": 1.8305084745762713e-05,
"loss": 0.1214,
"step": 199
},
{
"epoch": 4.761904761904762,
"grad_norm": 1.5251903915374738,
"learning_rate": 1.8177966101694916e-05,
"loss": 0.1146,
"step": 200
},
{
"epoch": 4.785714285714286,
"grad_norm": 2.4773804670612587,
"learning_rate": 1.805084745762712e-05,
"loss": 0.1397,
"step": 201
},
{
"epoch": 4.809523809523809,
"grad_norm": 2.5083854234763723,
"learning_rate": 1.7923728813559322e-05,
"loss": 0.1175,
"step": 202
},
{
"epoch": 4.833333333333333,
"grad_norm": 1.703911905747183,
"learning_rate": 1.7796610169491526e-05,
"loss": 0.1076,
"step": 203
},
{
"epoch": 4.857142857142857,
"grad_norm": 1.6878019698488502,
"learning_rate": 1.766949152542373e-05,
"loss": 0.1133,
"step": 204
},
{
"epoch": 4.880952380952381,
"grad_norm": 3.3963465078581763,
"learning_rate": 1.7542372881355932e-05,
"loss": 0.1277,
"step": 205
},
{
"epoch": 4.904761904761905,
"grad_norm": 1.9038847961594523,
"learning_rate": 1.7415254237288135e-05,
"loss": 0.125,
"step": 206
},
{
"epoch": 4.928571428571429,
"grad_norm": 2.016132616101433,
"learning_rate": 1.728813559322034e-05,
"loss": 0.1444,
"step": 207
},
{
"epoch": 4.9523809523809526,
"grad_norm": 1.8498359710558154,
"learning_rate": 1.7161016949152545e-05,
"loss": 0.1112,
"step": 208
},
{
"epoch": 4.976190476190476,
"grad_norm": 1.75481223175137,
"learning_rate": 1.7033898305084748e-05,
"loss": 0.1267,
"step": 209
},
{
"epoch": 5.0,
"grad_norm": 1.684924031591121,
"learning_rate": 1.690677966101695e-05,
"loss": 0.1176,
"step": 210
},
{
"epoch": 5.023809523809524,
"grad_norm": 0.9847874155928602,
"learning_rate": 1.677966101694915e-05,
"loss": 0.0409,
"step": 211
},
{
"epoch": 5.0476190476190474,
"grad_norm": 0.947187377214817,
"learning_rate": 1.6652542372881354e-05,
"loss": 0.0455,
"step": 212
},
{
"epoch": 5.071428571428571,
"grad_norm": 1.1544070118677283,
"learning_rate": 1.652542372881356e-05,
"loss": 0.0477,
"step": 213
},
{
"epoch": 5.095238095238095,
"grad_norm": 1.0731830389716441,
"learning_rate": 1.6398305084745764e-05,
"loss": 0.0384,
"step": 214
},
{
"epoch": 5.119047619047619,
"grad_norm": 1.3731392494264483,
"learning_rate": 1.6271186440677967e-05,
"loss": 0.0426,
"step": 215
},
{
"epoch": 5.142857142857143,
"grad_norm": 1.1486500653620535,
"learning_rate": 1.614406779661017e-05,
"loss": 0.0423,
"step": 216
},
{
"epoch": 5.166666666666667,
"grad_norm": 1.2739508126703059,
"learning_rate": 1.6016949152542373e-05,
"loss": 0.0445,
"step": 217
},
{
"epoch": 5.190476190476191,
"grad_norm": 1.4855159189558325,
"learning_rate": 1.5889830508474576e-05,
"loss": 0.0559,
"step": 218
},
{
"epoch": 5.214285714285714,
"grad_norm": 1.3546715846551274,
"learning_rate": 1.5762711864406783e-05,
"loss": 0.0534,
"step": 219
},
{
"epoch": 5.238095238095238,
"grad_norm": 1.5296075537016234,
"learning_rate": 1.5635593220338986e-05,
"loss": 0.0551,
"step": 220
},
{
"epoch": 5.261904761904762,
"grad_norm": 1.3680859910362466,
"learning_rate": 1.5508474576271186e-05,
"loss": 0.0564,
"step": 221
},
{
"epoch": 5.285714285714286,
"grad_norm": 1.50262793175621,
"learning_rate": 1.538135593220339e-05,
"loss": 0.0623,
"step": 222
},
{
"epoch": 5.309523809523809,
"grad_norm": 1.633219617985254,
"learning_rate": 1.5254237288135592e-05,
"loss": 0.0795,
"step": 223
},
{
"epoch": 5.333333333333333,
"grad_norm": 1.1269587313348959,
"learning_rate": 1.5127118644067797e-05,
"loss": 0.0444,
"step": 224
},
{
"epoch": 5.357142857142857,
"grad_norm": 1.5820728437819247,
"learning_rate": 1.5e-05,
"loss": 0.0538,
"step": 225
},
{
"epoch": 5.380952380952381,
"grad_norm": 1.4009194539405676,
"learning_rate": 1.4872881355932204e-05,
"loss": 0.0678,
"step": 226
},
{
"epoch": 5.404761904761905,
"grad_norm": 1.3170165959053346,
"learning_rate": 1.4745762711864408e-05,
"loss": 0.055,
"step": 227
},
{
"epoch": 5.428571428571429,
"grad_norm": 1.4104093287552704,
"learning_rate": 1.461864406779661e-05,
"loss": 0.0641,
"step": 228
},
{
"epoch": 5.4523809523809526,
"grad_norm": 5.240983941394039,
"learning_rate": 1.4491525423728813e-05,
"loss": 0.084,
"step": 229
},
{
"epoch": 5.476190476190476,
"grad_norm": 1.5245126696016669,
"learning_rate": 1.4364406779661018e-05,
"loss": 0.0602,
"step": 230
},
{
"epoch": 5.5,
"grad_norm": 1.5831501336103715,
"learning_rate": 1.4237288135593221e-05,
"loss": 0.0607,
"step": 231
},
{
"epoch": 5.523809523809524,
"grad_norm": 1.4783183158934612,
"learning_rate": 1.4110169491525424e-05,
"loss": 0.0607,
"step": 232
},
{
"epoch": 5.5476190476190474,
"grad_norm": 1.3152701798707096,
"learning_rate": 1.3983050847457627e-05,
"loss": 0.0574,
"step": 233
},
{
"epoch": 5.571428571428571,
"grad_norm": 1.6495440055294004,
"learning_rate": 1.385593220338983e-05,
"loss": 0.061,
"step": 234
},
{
"epoch": 5.595238095238095,
"grad_norm": 1.4014723256509312,
"learning_rate": 1.3728813559322034e-05,
"loss": 0.0553,
"step": 235
},
{
"epoch": 5.619047619047619,
"grad_norm": 1.2860362741536593,
"learning_rate": 1.3601694915254239e-05,
"loss": 0.0586,
"step": 236
},
{
"epoch": 5.642857142857143,
"grad_norm": 1.5936888140732792,
"learning_rate": 1.3474576271186442e-05,
"loss": 0.0549,
"step": 237
},
{
"epoch": 5.666666666666667,
"grad_norm": 1.3860932564758723,
"learning_rate": 1.3347457627118643e-05,
"loss": 0.0544,
"step": 238
},
{
"epoch": 5.690476190476191,
"grad_norm": 1.2865840320207638,
"learning_rate": 1.3220338983050848e-05,
"loss": 0.067,
"step": 239
},
{
"epoch": 5.714285714285714,
"grad_norm": 1.525884140616022,
"learning_rate": 1.3093220338983051e-05,
"loss": 0.0544,
"step": 240
},
{
"epoch": 5.738095238095238,
"grad_norm": 2.822419370271642,
"learning_rate": 1.2966101694915255e-05,
"loss": 0.0567,
"step": 241
},
{
"epoch": 5.761904761904762,
"grad_norm": 1.3121526349299137,
"learning_rate": 1.283898305084746e-05,
"loss": 0.0526,
"step": 242
},
{
"epoch": 5.785714285714286,
"grad_norm": 1.8688208805790765,
"learning_rate": 1.2711864406779661e-05,
"loss": 0.0575,
"step": 243
},
{
"epoch": 5.809523809523809,
"grad_norm": 1.6726910034731386,
"learning_rate": 1.2584745762711864e-05,
"loss": 0.059,
"step": 244
},
{
"epoch": 5.833333333333333,
"grad_norm": 1.2544500770016287,
"learning_rate": 1.2457627118644069e-05,
"loss": 0.0572,
"step": 245
},
{
"epoch": 5.857142857142857,
"grad_norm": 1.6707279869728848,
"learning_rate": 1.2330508474576272e-05,
"loss": 0.0619,
"step": 246
},
{
"epoch": 5.880952380952381,
"grad_norm": 1.6163231202805466,
"learning_rate": 1.2203389830508475e-05,
"loss": 0.0552,
"step": 247
},
{
"epoch": 5.904761904761905,
"grad_norm": 1.2781355150904805,
"learning_rate": 1.2076271186440678e-05,
"loss": 0.0548,
"step": 248
},
{
"epoch": 5.928571428571429,
"grad_norm": 1.1342395607037203,
"learning_rate": 1.1949152542372882e-05,
"loss": 0.0428,
"step": 249
},
{
"epoch": 5.9523809523809526,
"grad_norm": 1.3899772078202892,
"learning_rate": 1.1822033898305085e-05,
"loss": 0.0526,
"step": 250
},
{
"epoch": 5.976190476190476,
"grad_norm": 1.7638455766915386,
"learning_rate": 1.169491525423729e-05,
"loss": 0.054,
"step": 251
},
{
"epoch": 6.0,
"grad_norm": 1.202261924809845,
"learning_rate": 1.1567796610169493e-05,
"loss": 0.0433,
"step": 252
},
{
"epoch": 6.023809523809524,
"grad_norm": 0.7792668157530976,
"learning_rate": 1.1440677966101694e-05,
"loss": 0.025,
"step": 253
},
{
"epoch": 6.0476190476190474,
"grad_norm": 1.6204842398258763,
"learning_rate": 1.1313559322033899e-05,
"loss": 0.0238,
"step": 254
},
{
"epoch": 6.071428571428571,
"grad_norm": 0.7988078667419986,
"learning_rate": 1.1186440677966102e-05,
"loss": 0.021,
"step": 255
},
{
"epoch": 6.095238095238095,
"grad_norm": 0.6878327734739803,
"learning_rate": 1.1059322033898305e-05,
"loss": 0.018,
"step": 256
},
{
"epoch": 6.119047619047619,
"grad_norm": 0.7401351258265407,
"learning_rate": 1.0932203389830509e-05,
"loss": 0.0178,
"step": 257
},
{
"epoch": 6.142857142857143,
"grad_norm": 0.8125527071363077,
"learning_rate": 1.0805084745762712e-05,
"loss": 0.0188,
"step": 258
},
{
"epoch": 6.166666666666667,
"grad_norm": 0.8934818425737778,
"learning_rate": 1.0677966101694915e-05,
"loss": 0.0234,
"step": 259
},
{
"epoch": 6.190476190476191,
"grad_norm": 1.1136388906935009,
"learning_rate": 1.055084745762712e-05,
"loss": 0.0222,
"step": 260
},
{
"epoch": 6.214285714285714,
"grad_norm": 0.8422767351254755,
"learning_rate": 1.0423728813559323e-05,
"loss": 0.0221,
"step": 261
},
{
"epoch": 6.238095238095238,
"grad_norm": 0.8497766089003508,
"learning_rate": 1.0296610169491524e-05,
"loss": 0.0215,
"step": 262
},
{
"epoch": 6.261904761904762,
"grad_norm": 0.8694157539978863,
"learning_rate": 1.016949152542373e-05,
"loss": 0.0245,
"step": 263
},
{
"epoch": 6.285714285714286,
"grad_norm": 0.8427092526638704,
"learning_rate": 1.0042372881355933e-05,
"loss": 0.0221,
"step": 264
},
{
"epoch": 6.309523809523809,
"grad_norm": 0.9005053730496708,
"learning_rate": 9.915254237288136e-06,
"loss": 0.0228,
"step": 265
},
{
"epoch": 6.333333333333333,
"grad_norm": 1.028410904852562,
"learning_rate": 9.78813559322034e-06,
"loss": 0.027,
"step": 266
},
{
"epoch": 6.357142857142857,
"grad_norm": 3.6639056792383426,
"learning_rate": 9.661016949152542e-06,
"loss": 0.0406,
"step": 267
},
{
"epoch": 6.380952380952381,
"grad_norm": 1.0809192658800129,
"learning_rate": 9.533898305084745e-06,
"loss": 0.019,
"step": 268
},
{
"epoch": 6.404761904761905,
"grad_norm": 0.9792628984982289,
"learning_rate": 9.40677966101695e-06,
"loss": 0.031,
"step": 269
},
{
"epoch": 6.428571428571429,
"grad_norm": 0.6758933663388803,
"learning_rate": 9.279661016949153e-06,
"loss": 0.0183,
"step": 270
},
{
"epoch": 6.4523809523809526,
"grad_norm": 0.7343806002788874,
"learning_rate": 9.152542372881356e-06,
"loss": 0.02,
"step": 271
},
{
"epoch": 6.476190476190476,
"grad_norm": 0.7771938313593274,
"learning_rate": 9.02542372881356e-06,
"loss": 0.0195,
"step": 272
},
{
"epoch": 6.5,
"grad_norm": 0.7589725385480159,
"learning_rate": 8.898305084745763e-06,
"loss": 0.0226,
"step": 273
},
{
"epoch": 6.523809523809524,
"grad_norm": 0.7445448530941859,
"learning_rate": 8.771186440677966e-06,
"loss": 0.0176,
"step": 274
},
{
"epoch": 6.5476190476190474,
"grad_norm": 0.8116343728820773,
"learning_rate": 8.64406779661017e-06,
"loss": 0.0217,
"step": 275
},
{
"epoch": 6.571428571428571,
"grad_norm": 0.7608128956754773,
"learning_rate": 8.516949152542374e-06,
"loss": 0.0193,
"step": 276
},
{
"epoch": 6.595238095238095,
"grad_norm": 0.8653201024144109,
"learning_rate": 8.389830508474575e-06,
"loss": 0.0226,
"step": 277
},
{
"epoch": 6.619047619047619,
"grad_norm": 0.7538291615026249,
"learning_rate": 8.26271186440678e-06,
"loss": 0.0188,
"step": 278
},
{
"epoch": 6.642857142857143,
"grad_norm": 6.260384134810009,
"learning_rate": 8.135593220338983e-06,
"loss": 0.0536,
"step": 279
},
{
"epoch": 6.666666666666667,
"grad_norm": 0.9234681022724129,
"learning_rate": 8.008474576271187e-06,
"loss": 0.0227,
"step": 280
},
{
"epoch": 6.690476190476191,
"grad_norm": 1.0008785441719268,
"learning_rate": 7.881355932203392e-06,
"loss": 0.0262,
"step": 281
},
{
"epoch": 6.714285714285714,
"grad_norm": 0.7366747457685526,
"learning_rate": 7.754237288135593e-06,
"loss": 0.0157,
"step": 282
},
{
"epoch": 6.738095238095238,
"grad_norm": 0.7310739957484974,
"learning_rate": 7.627118644067796e-06,
"loss": 0.0202,
"step": 283
},
{
"epoch": 6.761904761904762,
"grad_norm": 0.7924483522499,
"learning_rate": 7.5e-06,
"loss": 0.0185,
"step": 284
},
{
"epoch": 6.785714285714286,
"grad_norm": 0.8427793429666177,
"learning_rate": 7.372881355932204e-06,
"loss": 0.0198,
"step": 285
},
{
"epoch": 6.809523809523809,
"grad_norm": 0.8521521040780511,
"learning_rate": 7.2457627118644065e-06,
"loss": 0.0232,
"step": 286
},
{
"epoch": 6.833333333333333,
"grad_norm": 0.6987645042974961,
"learning_rate": 7.1186440677966106e-06,
"loss": 0.0162,
"step": 287
},
{
"epoch": 6.857142857142857,
"grad_norm": 0.7844335921695264,
"learning_rate": 6.991525423728814e-06,
"loss": 0.0189,
"step": 288
},
{
"epoch": 6.880952380952381,
"grad_norm": 0.7516639054184951,
"learning_rate": 6.864406779661017e-06,
"loss": 0.0205,
"step": 289
},
{
"epoch": 6.904761904761905,
"grad_norm": 1.003888022409457,
"learning_rate": 6.737288135593221e-06,
"loss": 0.0306,
"step": 290
},
{
"epoch": 6.928571428571429,
"grad_norm": 0.8565419402468065,
"learning_rate": 6.610169491525424e-06,
"loss": 0.017,
"step": 291
},
{
"epoch": 6.9523809523809526,
"grad_norm": 0.8529951660515682,
"learning_rate": 6.483050847457627e-06,
"loss": 0.0199,
"step": 292
},
{
"epoch": 6.976190476190476,
"grad_norm": 0.6362048924912456,
"learning_rate": 6.3559322033898304e-06,
"loss": 0.0157,
"step": 293
},
{
"epoch": 7.0,
"grad_norm": 0.7325131779028904,
"learning_rate": 6.2288135593220344e-06,
"loss": 0.019,
"step": 294
},
{
"epoch": 7.023809523809524,
"grad_norm": 0.4276066768013448,
"learning_rate": 6.101694915254238e-06,
"loss": 0.0074,
"step": 295
},
{
"epoch": 7.0476190476190474,
"grad_norm": 0.3424457276809797,
"learning_rate": 5.974576271186441e-06,
"loss": 0.0056,
"step": 296
},
{
"epoch": 7.071428571428571,
"grad_norm": 0.3182719291929001,
"learning_rate": 5.847457627118645e-06,
"loss": 0.0059,
"step": 297
},
{
"epoch": 7.095238095238095,
"grad_norm": 0.39177003884566686,
"learning_rate": 5.720338983050847e-06,
"loss": 0.0061,
"step": 298
},
{
"epoch": 7.119047619047619,
"grad_norm": 0.3862024000460221,
"learning_rate": 5.593220338983051e-06,
"loss": 0.007,
"step": 299
},
{
"epoch": 7.142857142857143,
"grad_norm": 0.40135341351489007,
"learning_rate": 5.466101694915254e-06,
"loss": 0.0057,
"step": 300
},
{
"epoch": 7.166666666666667,
"grad_norm": 0.43499371765247974,
"learning_rate": 5.3389830508474575e-06,
"loss": 0.008,
"step": 301
},
{
"epoch": 7.190476190476191,
"grad_norm": 0.3115886153675231,
"learning_rate": 5.2118644067796615e-06,
"loss": 0.0041,
"step": 302
},
{
"epoch": 7.214285714285714,
"grad_norm": 0.35144776485376555,
"learning_rate": 5.084745762711865e-06,
"loss": 0.0048,
"step": 303
},
{
"epoch": 7.238095238095238,
"grad_norm": 0.44394337846273446,
"learning_rate": 4.957627118644068e-06,
"loss": 0.0071,
"step": 304
},
{
"epoch": 7.261904761904762,
"grad_norm": 0.3812479433410703,
"learning_rate": 4.830508474576271e-06,
"loss": 0.0047,
"step": 305
},
{
"epoch": 7.285714285714286,
"grad_norm": 0.4813556703754386,
"learning_rate": 4.703389830508475e-06,
"loss": 0.0083,
"step": 306
},
{
"epoch": 7.309523809523809,
"grad_norm": 0.38041567265748727,
"learning_rate": 4.576271186440678e-06,
"loss": 0.0053,
"step": 307
},
{
"epoch": 7.333333333333333,
"grad_norm": 0.5956664027167392,
"learning_rate": 4.449152542372881e-06,
"loss": 0.0074,
"step": 308
},
{
"epoch": 7.357142857142857,
"grad_norm": 0.4137001251285983,
"learning_rate": 4.322033898305085e-06,
"loss": 0.0057,
"step": 309
},
{
"epoch": 7.380952380952381,
"grad_norm": 0.43147697626111425,
"learning_rate": 4.194915254237288e-06,
"loss": 0.0056,
"step": 310
},
{
"epoch": 7.404761904761905,
"grad_norm": 0.56710398876272,
"learning_rate": 4.067796610169492e-06,
"loss": 0.0089,
"step": 311
},
{
"epoch": 7.428571428571429,
"grad_norm": 0.5970059016621321,
"learning_rate": 3.940677966101696e-06,
"loss": 0.0059,
"step": 312
},
{
"epoch": 7.4523809523809526,
"grad_norm": 0.44525206306317977,
"learning_rate": 3.813559322033898e-06,
"loss": 0.0094,
"step": 313
},
{
"epoch": 7.476190476190476,
"grad_norm": 0.42977446629691524,
"learning_rate": 3.686440677966102e-06,
"loss": 0.0052,
"step": 314
},
{
"epoch": 7.5,
"grad_norm": 0.504670266249874,
"learning_rate": 3.5593220338983053e-06,
"loss": 0.0059,
"step": 315
},
{
"epoch": 7.523809523809524,
"grad_norm": 0.42225599745539055,
"learning_rate": 3.4322033898305084e-06,
"loss": 0.0043,
"step": 316
},
{
"epoch": 7.5476190476190474,
"grad_norm": 0.453951066212684,
"learning_rate": 3.305084745762712e-06,
"loss": 0.0076,
"step": 317
},
{
"epoch": 7.571428571428571,
"grad_norm": 0.3933552504079856,
"learning_rate": 3.1779661016949152e-06,
"loss": 0.0055,
"step": 318
},
{
"epoch": 7.595238095238095,
"grad_norm": 0.2823733043982809,
"learning_rate": 3.050847457627119e-06,
"loss": 0.0032,
"step": 319
},
{
"epoch": 7.619047619047619,
"grad_norm": 0.3976694071153818,
"learning_rate": 2.9237288135593224e-06,
"loss": 0.0048,
"step": 320
},
{
"epoch": 7.642857142857143,
"grad_norm": 1.0108677293146413,
"learning_rate": 2.7966101694915256e-06,
"loss": 0.0123,
"step": 321
},
{
"epoch": 7.666666666666667,
"grad_norm": 0.5048805399092803,
"learning_rate": 2.6694915254237287e-06,
"loss": 0.0064,
"step": 322
},
{
"epoch": 7.690476190476191,
"grad_norm": 0.400074308168976,
"learning_rate": 2.5423728813559323e-06,
"loss": 0.0045,
"step": 323
},
{
"epoch": 7.714285714285714,
"grad_norm": 0.333169523112313,
"learning_rate": 2.4152542372881355e-06,
"loss": 0.0036,
"step": 324
},
{
"epoch": 7.738095238095238,
"grad_norm": 0.44225031403206416,
"learning_rate": 2.288135593220339e-06,
"loss": 0.0052,
"step": 325
},
{
"epoch": 7.761904761904762,
"grad_norm": 0.4480064287363033,
"learning_rate": 2.1610169491525427e-06,
"loss": 0.0075,
"step": 326
},
{
"epoch": 7.785714285714286,
"grad_norm": 0.40347019394025346,
"learning_rate": 2.033898305084746e-06,
"loss": 0.0061,
"step": 327
},
{
"epoch": 7.809523809523809,
"grad_norm": 0.4048599523778917,
"learning_rate": 1.906779661016949e-06,
"loss": 0.0054,
"step": 328
},
{
"epoch": 7.833333333333333,
"grad_norm": 0.38056809461932223,
"learning_rate": 1.7796610169491526e-06,
"loss": 0.0039,
"step": 329
},
{
"epoch": 7.857142857142857,
"grad_norm": 0.46407162628447673,
"learning_rate": 1.652542372881356e-06,
"loss": 0.0085,
"step": 330
},
{
"epoch": 7.880952380952381,
"grad_norm": 0.2991630447566035,
"learning_rate": 1.5254237288135594e-06,
"loss": 0.0049,
"step": 331
},
{
"epoch": 7.904761904761905,
"grad_norm": 0.34275195062570585,
"learning_rate": 1.3983050847457628e-06,
"loss": 0.0036,
"step": 332
},
{
"epoch": 7.928571428571429,
"grad_norm": 0.2849960007512429,
"learning_rate": 1.2711864406779662e-06,
"loss": 0.0031,
"step": 333
},
{
"epoch": 7.9523809523809526,
"grad_norm": 0.30486145922255475,
"learning_rate": 1.1440677966101696e-06,
"loss": 0.0035,
"step": 334
},
{
"epoch": 7.976190476190476,
"grad_norm": 0.4522757793742206,
"learning_rate": 1.016949152542373e-06,
"loss": 0.0042,
"step": 335
},
{
"epoch": 8.0,
"grad_norm": 0.713845770602019,
"learning_rate": 8.898305084745763e-07,
"loss": 0.0082,
"step": 336
}
],
"logging_steps": 1,
"max_steps": 336,
"num_input_tokens_seen": 0,
"num_train_epochs": 8,
"save_steps": 500,
"total_flos": 19394148433920.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}