sgptv / trainer_state.json
jinghan23's picture
Upload to Hugging Face
7066eea verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9972122660294703,
"eval_steps": 500,
"global_step": 313,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 10.092560768127441,
"learning_rate": 1.0000000000000002e-06,
"loss": 1.9497,
"step": 1
},
{
"epoch": 0.01,
"grad_norm": 8.963287353515625,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.951,
"step": 2
},
{
"epoch": 0.01,
"grad_norm": 9.27600383758545,
"learning_rate": 3e-06,
"loss": 1.9345,
"step": 3
},
{
"epoch": 0.01,
"grad_norm": 5.632405757904053,
"learning_rate": 4.000000000000001e-06,
"loss": 1.9157,
"step": 4
},
{
"epoch": 0.02,
"grad_norm": 5.668580055236816,
"learning_rate": 5e-06,
"loss": 1.9026,
"step": 5
},
{
"epoch": 0.02,
"grad_norm": 3.8046553134918213,
"learning_rate": 6e-06,
"loss": 1.8923,
"step": 6
},
{
"epoch": 0.02,
"grad_norm": 4.357985973358154,
"learning_rate": 7e-06,
"loss": 1.8241,
"step": 7
},
{
"epoch": 0.03,
"grad_norm": 4.685062885284424,
"learning_rate": 8.000000000000001e-06,
"loss": 1.8467,
"step": 8
},
{
"epoch": 0.03,
"grad_norm": 4.768229961395264,
"learning_rate": 9e-06,
"loss": 1.8199,
"step": 9
},
{
"epoch": 0.03,
"grad_norm": 4.796407699584961,
"learning_rate": 1e-05,
"loss": 1.8374,
"step": 10
},
{
"epoch": 0.04,
"grad_norm": 5.7536139488220215,
"learning_rate": 9.999731248679734e-06,
"loss": 1.779,
"step": 11
},
{
"epoch": 0.04,
"grad_norm": 4.202663898468018,
"learning_rate": 9.99892502360984e-06,
"loss": 1.7579,
"step": 12
},
{
"epoch": 0.04,
"grad_norm": 2.9114131927490234,
"learning_rate": 9.99758141145994e-06,
"loss": 1.7433,
"step": 13
},
{
"epoch": 0.04,
"grad_norm": 2.3823723793029785,
"learning_rate": 9.995700556669052e-06,
"loss": 1.7212,
"step": 14
},
{
"epoch": 0.05,
"grad_norm": 2.3254876136779785,
"learning_rate": 9.993282661430058e-06,
"loss": 1.7218,
"step": 15
},
{
"epoch": 0.05,
"grad_norm": 2.053166151046753,
"learning_rate": 9.990327985667972e-06,
"loss": 1.7256,
"step": 16
},
{
"epoch": 0.05,
"grad_norm": 2.3782012462615967,
"learning_rate": 9.986836847012001e-06,
"loss": 1.713,
"step": 17
},
{
"epoch": 0.06,
"grad_norm": 2.1311683654785156,
"learning_rate": 9.98280962076139e-06,
"loss": 1.6785,
"step": 18
},
{
"epoch": 0.06,
"grad_norm": 2.325747489929199,
"learning_rate": 9.978246739845095e-06,
"loss": 1.7167,
"step": 19
},
{
"epoch": 0.06,
"grad_norm": 2.1330366134643555,
"learning_rate": 9.973148694775217e-06,
"loss": 1.676,
"step": 20
},
{
"epoch": 0.07,
"grad_norm": 2.2632806301116943,
"learning_rate": 9.967516033594295e-06,
"loss": 1.7033,
"step": 21
},
{
"epoch": 0.07,
"grad_norm": 2.6582744121551514,
"learning_rate": 9.961349361816384e-06,
"loss": 1.6957,
"step": 22
},
{
"epoch": 0.07,
"grad_norm": 1.9663337469100952,
"learning_rate": 9.954649342361952e-06,
"loss": 1.6729,
"step": 23
},
{
"epoch": 0.08,
"grad_norm": 2.2557435035705566,
"learning_rate": 9.947416695486633e-06,
"loss": 1.6399,
"step": 24
},
{
"epoch": 0.08,
"grad_norm": 2.090054512023926,
"learning_rate": 9.939652198703785e-06,
"loss": 1.6792,
"step": 25
},
{
"epoch": 0.08,
"grad_norm": 2.1648776531219482,
"learning_rate": 9.93135668670091e-06,
"loss": 1.6625,
"step": 26
},
{
"epoch": 0.09,
"grad_norm": 2.1209850311279297,
"learning_rate": 9.92253105124993e-06,
"loss": 1.7057,
"step": 27
},
{
"epoch": 0.09,
"grad_norm": 2.2777392864227295,
"learning_rate": 9.91317624111132e-06,
"loss": 1.6052,
"step": 28
},
{
"epoch": 0.09,
"grad_norm": 2.1739561557769775,
"learning_rate": 9.903293261932106e-06,
"loss": 1.6139,
"step": 29
},
{
"epoch": 0.1,
"grad_norm": 2.4518871307373047,
"learning_rate": 9.89288317613777e-06,
"loss": 1.6759,
"step": 30
},
{
"epoch": 0.1,
"grad_norm": 1.795519471168518,
"learning_rate": 9.881947102818036e-06,
"loss": 1.7036,
"step": 31
},
{
"epoch": 0.1,
"grad_norm": 2.150223731994629,
"learning_rate": 9.870486217606557e-06,
"loss": 1.6469,
"step": 32
},
{
"epoch": 0.11,
"grad_norm": 1.91805899143219,
"learning_rate": 9.858501752554548e-06,
"loss": 1.6353,
"step": 33
},
{
"epoch": 0.11,
"grad_norm": 1.8129810094833374,
"learning_rate": 9.845994995998332e-06,
"loss": 1.6551,
"step": 34
},
{
"epoch": 0.11,
"grad_norm": 2.1308679580688477,
"learning_rate": 9.83296729242084e-06,
"loss": 1.617,
"step": 35
},
{
"epoch": 0.11,
"grad_norm": 1.8321012258529663,
"learning_rate": 9.819420042307091e-06,
"loss": 1.6213,
"step": 36
},
{
"epoch": 0.12,
"grad_norm": 1.9112164974212646,
"learning_rate": 9.805354701993624e-06,
"loss": 1.6245,
"step": 37
},
{
"epoch": 0.12,
"grad_norm": 2.1160471439361572,
"learning_rate": 9.79077278351195e-06,
"loss": 1.6405,
"step": 38
},
{
"epoch": 0.12,
"grad_norm": 2.6318371295928955,
"learning_rate": 9.77567585442601e-06,
"loss": 1.6234,
"step": 39
},
{
"epoch": 0.13,
"grad_norm": 3.238373279571533,
"learning_rate": 9.76006553766365e-06,
"loss": 1.6452,
"step": 40
},
{
"epoch": 0.13,
"grad_norm": 2.056736469268799,
"learning_rate": 9.743943511342168e-06,
"loss": 1.679,
"step": 41
},
{
"epoch": 0.13,
"grad_norm": 2.000826358795166,
"learning_rate": 9.727311508587907e-06,
"loss": 1.6904,
"step": 42
},
{
"epoch": 0.14,
"grad_norm": 2.089482069015503,
"learning_rate": 9.710171317349946e-06,
"loss": 1.62,
"step": 43
},
{
"epoch": 0.14,
"grad_norm": 2.5748705863952637,
"learning_rate": 9.692524780207897e-06,
"loss": 1.6669,
"step": 44
},
{
"epoch": 0.14,
"grad_norm": 1.9039987325668335,
"learning_rate": 9.674373794173818e-06,
"loss": 1.6489,
"step": 45
},
{
"epoch": 0.15,
"grad_norm": 1.8047181367874146,
"learning_rate": 9.655720310488298e-06,
"loss": 1.6227,
"step": 46
},
{
"epoch": 0.15,
"grad_norm": 2.0552868843078613,
"learning_rate": 9.636566334410682e-06,
"loss": 1.5898,
"step": 47
},
{
"epoch": 0.15,
"grad_norm": 2.252218008041382,
"learning_rate": 9.616913925003514e-06,
"loss": 1.6667,
"step": 48
},
{
"epoch": 0.16,
"grad_norm": 2.015887498855591,
"learning_rate": 9.596765194911182e-06,
"loss": 1.6668,
"step": 49
},
{
"epoch": 0.16,
"grad_norm": 2.582007884979248,
"learning_rate": 9.576122310132814e-06,
"loss": 1.6542,
"step": 50
},
{
"epoch": 0.16,
"grad_norm": 1.9070011377334595,
"learning_rate": 9.554987489789426e-06,
"loss": 1.691,
"step": 51
},
{
"epoch": 0.17,
"grad_norm": 1.9671483039855957,
"learning_rate": 9.533363005885362e-06,
"loss": 1.6116,
"step": 52
},
{
"epoch": 0.17,
"grad_norm": 2.4808287620544434,
"learning_rate": 9.511251183064068e-06,
"loss": 1.6653,
"step": 53
},
{
"epoch": 0.17,
"grad_norm": 2.237518548965454,
"learning_rate": 9.48865439835817e-06,
"loss": 1.6571,
"step": 54
},
{
"epoch": 0.18,
"grad_norm": 2.142627477645874,
"learning_rate": 9.465575080933959e-06,
"loss": 1.6247,
"step": 55
},
{
"epoch": 0.18,
"grad_norm": 2.0365779399871826,
"learning_rate": 9.442015711830246e-06,
"loss": 1.6323,
"step": 56
},
{
"epoch": 0.18,
"grad_norm": 2.427443742752075,
"learning_rate": 9.417978823691652e-06,
"loss": 1.6646,
"step": 57
},
{
"epoch": 0.18,
"grad_norm": 2.014340877532959,
"learning_rate": 9.393467000496345e-06,
"loss": 1.5754,
"step": 58
},
{
"epoch": 0.19,
"grad_norm": 2.6110949516296387,
"learning_rate": 9.368482877278264e-06,
"loss": 1.657,
"step": 59
},
{
"epoch": 0.19,
"grad_norm": 2.048231363296509,
"learning_rate": 9.34302913984385e-06,
"loss": 1.61,
"step": 60
},
{
"epoch": 0.19,
"grad_norm": 2.672330379486084,
"learning_rate": 9.317108524483319e-06,
"loss": 1.5894,
"step": 61
},
{
"epoch": 0.2,
"grad_norm": 2.6745121479034424,
"learning_rate": 9.29072381767651e-06,
"loss": 1.5623,
"step": 62
},
{
"epoch": 0.2,
"grad_norm": 2.1490442752838135,
"learning_rate": 9.26387785579334e-06,
"loss": 1.6668,
"step": 63
},
{
"epoch": 0.2,
"grad_norm": 3.805147171020508,
"learning_rate": 9.236573524788888e-06,
"loss": 1.6274,
"step": 64
},
{
"epoch": 0.21,
"grad_norm": 2.243023157119751,
"learning_rate": 9.208813759893158e-06,
"loss": 1.6496,
"step": 65
},
{
"epoch": 0.21,
"grad_norm": 2.5260114669799805,
"learning_rate": 9.180601545295535e-06,
"loss": 1.623,
"step": 66
},
{
"epoch": 0.21,
"grad_norm": 2.0329606533050537,
"learning_rate": 9.151939913823988e-06,
"loss": 1.5873,
"step": 67
},
{
"epoch": 0.22,
"grad_norm": 3.3269598484039307,
"learning_rate": 9.122831946619038e-06,
"loss": 1.6327,
"step": 68
},
{
"epoch": 0.22,
"grad_norm": 2.1953978538513184,
"learning_rate": 9.093280772802527e-06,
"loss": 1.6321,
"step": 69
},
{
"epoch": 0.22,
"grad_norm": 3.2001328468322754,
"learning_rate": 9.063289569141251e-06,
"loss": 1.6338,
"step": 70
},
{
"epoch": 0.23,
"grad_norm": 2.076502799987793,
"learning_rate": 9.032861559705442e-06,
"loss": 1.6648,
"step": 71
},
{
"epoch": 0.23,
"grad_norm": 2.0800352096557617,
"learning_rate": 9.002000015522182e-06,
"loss": 1.5694,
"step": 72
},
{
"epoch": 0.23,
"grad_norm": 2.0901432037353516,
"learning_rate": 8.970708254223768e-06,
"loss": 1.6571,
"step": 73
},
{
"epoch": 0.24,
"grad_norm": 3.1389923095703125,
"learning_rate": 8.938989639691068e-06,
"loss": 1.6151,
"step": 74
},
{
"epoch": 0.24,
"grad_norm": 2.0514352321624756,
"learning_rate": 8.90684758169189e-06,
"loss": 1.618,
"step": 75
},
{
"epoch": 0.24,
"grad_norm": 1.9462100267410278,
"learning_rate": 8.87428553551445e-06,
"loss": 1.6367,
"step": 76
},
{
"epoch": 0.25,
"grad_norm": 2.2343838214874268,
"learning_rate": 8.841307001595904e-06,
"loss": 1.6321,
"step": 77
},
{
"epoch": 0.25,
"grad_norm": 2.6064207553863525,
"learning_rate": 8.807915525146065e-06,
"loss": 1.5913,
"step": 78
},
{
"epoch": 0.25,
"grad_norm": 2.589186191558838,
"learning_rate": 8.774114695766286e-06,
"loss": 1.6,
"step": 79
},
{
"epoch": 0.25,
"grad_norm": 2.0082924365997314,
"learning_rate": 8.739908147063576e-06,
"loss": 1.6266,
"step": 80
},
{
"epoch": 0.26,
"grad_norm": 1.9739817380905151,
"learning_rate": 8.705299556259986e-06,
"loss": 1.5926,
"step": 81
},
{
"epoch": 0.26,
"grad_norm": 2.9566433429718018,
"learning_rate": 8.670292643797302e-06,
"loss": 1.624,
"step": 82
},
{
"epoch": 0.26,
"grad_norm": 2.10595703125,
"learning_rate": 8.634891172937102e-06,
"loss": 1.6437,
"step": 83
},
{
"epoch": 0.27,
"grad_norm": 1.7080891132354736,
"learning_rate": 8.599098949356201e-06,
"loss": 1.6131,
"step": 84
},
{
"epoch": 0.27,
"grad_norm": 2.358856678009033,
"learning_rate": 8.562919820737537e-06,
"loss": 1.5748,
"step": 85
},
{
"epoch": 0.27,
"grad_norm": 2.030383825302124,
"learning_rate": 8.526357676356538e-06,
"loss": 1.6389,
"step": 86
},
{
"epoch": 0.28,
"grad_norm": 2.3567280769348145,
"learning_rate": 8.489416446663037e-06,
"loss": 1.6288,
"step": 87
},
{
"epoch": 0.28,
"grad_norm": 2.1183040142059326,
"learning_rate": 8.452100102858734e-06,
"loss": 1.5929,
"step": 88
},
{
"epoch": 0.28,
"grad_norm": 2.467944860458374,
"learning_rate": 8.414412656470297e-06,
"loss": 1.5579,
"step": 89
},
{
"epoch": 0.29,
"grad_norm": 2.200274705886841,
"learning_rate": 8.376358158918114e-06,
"loss": 1.587,
"step": 90
},
{
"epoch": 0.29,
"grad_norm": 2.3279902935028076,
"learning_rate": 8.33794070108077e-06,
"loss": 1.6496,
"step": 91
},
{
"epoch": 0.29,
"grad_norm": 2.7403910160064697,
"learning_rate": 8.299164412855268e-06,
"loss": 1.5665,
"step": 92
},
{
"epoch": 0.3,
"grad_norm": 2.5097343921661377,
"learning_rate": 8.260033462713073e-06,
"loss": 1.6245,
"step": 93
},
{
"epoch": 0.3,
"grad_norm": 2.2657604217529297,
"learning_rate": 8.22055205725199e-06,
"loss": 1.5785,
"step": 94
},
{
"epoch": 0.3,
"grad_norm": 2.4038476943969727,
"learning_rate": 8.180724440743957e-06,
"loss": 1.6912,
"step": 95
},
{
"epoch": 0.31,
"grad_norm": 2.811049461364746,
"learning_rate": 8.14055489467878e-06,
"loss": 1.6059,
"step": 96
},
{
"epoch": 0.31,
"grad_norm": 2.474153518676758,
"learning_rate": 8.100047737303877e-06,
"loss": 1.6423,
"step": 97
},
{
"epoch": 0.31,
"grad_norm": 2.351736307144165,
"learning_rate": 8.059207323160057e-06,
"loss": 1.6199,
"step": 98
},
{
"epoch": 0.32,
"grad_norm": 2.3526997566223145,
"learning_rate": 8.018038042613407e-06,
"loss": 1.5856,
"step": 99
},
{
"epoch": 0.32,
"grad_norm": 2.075406789779663,
"learning_rate": 7.97654432138333e-06,
"loss": 1.5901,
"step": 100
},
{
"epoch": 0.32,
"grad_norm": 2.6973438262939453,
"learning_rate": 7.93473062006677e-06,
"loss": 1.5696,
"step": 101
},
{
"epoch": 0.32,
"grad_norm": 2.6449167728424072,
"learning_rate": 7.892601433658705e-06,
"loss": 1.5939,
"step": 102
},
{
"epoch": 0.33,
"grad_norm": 2.360849380493164,
"learning_rate": 7.850161291068915e-06,
"loss": 1.5449,
"step": 103
},
{
"epoch": 0.33,
"grad_norm": 2.467226028442383,
"learning_rate": 7.807414754635145e-06,
"loss": 1.5926,
"step": 104
},
{
"epoch": 0.33,
"grad_norm": 2.449989080429077,
"learning_rate": 7.764366419632636e-06,
"loss": 1.5591,
"step": 105
},
{
"epoch": 0.34,
"grad_norm": 5.518803119659424,
"learning_rate": 7.721020913780137e-06,
"loss": 1.5406,
"step": 106
},
{
"epoch": 0.34,
"grad_norm": 2.6841485500335693,
"learning_rate": 7.677382896742417e-06,
"loss": 1.5797,
"step": 107
},
{
"epoch": 0.34,
"grad_norm": 2.475085496902466,
"learning_rate": 7.63345705962935e-06,
"loss": 1.5653,
"step": 108
},
{
"epoch": 0.35,
"grad_norm": 2.3905832767486572,
"learning_rate": 7.589248124491627e-06,
"loss": 1.5641,
"step": 109
},
{
"epoch": 0.35,
"grad_norm": 2.7013638019561768,
"learning_rate": 7.544760843813122e-06,
"loss": 1.5837,
"step": 110
},
{
"epoch": 0.35,
"grad_norm": 2.500148296356201,
"learning_rate": 7.500000000000001e-06,
"loss": 1.6255,
"step": 111
},
{
"epoch": 0.36,
"grad_norm": 2.106887102127075,
"learning_rate": 7.454970404866612e-06,
"loss": 1.6069,
"step": 112
},
{
"epoch": 0.36,
"grad_norm": 2.1840200424194336,
"learning_rate": 7.409676899118213e-06,
"loss": 1.5959,
"step": 113
},
{
"epoch": 0.36,
"grad_norm": 5.758022308349609,
"learning_rate": 7.3641243518305915e-06,
"loss": 1.616,
"step": 114
},
{
"epoch": 0.37,
"grad_norm": 2.24625301361084,
"learning_rate": 7.318317659926637e-06,
"loss": 1.639,
"step": 115
},
{
"epoch": 0.37,
"grad_norm": 3.8651440143585205,
"learning_rate": 7.272261747649922e-06,
"loss": 1.6287,
"step": 116
},
{
"epoch": 0.37,
"grad_norm": 1.9262839555740356,
"learning_rate": 7.225961566035335e-06,
"loss": 1.5851,
"step": 117
},
{
"epoch": 0.38,
"grad_norm": 2.4312078952789307,
"learning_rate": 7.179422092376856e-06,
"loss": 1.5934,
"step": 118
},
{
"epoch": 0.38,
"grad_norm": 2.391693592071533,
"learning_rate": 7.132648329692478e-06,
"loss": 1.5719,
"step": 119
},
{
"epoch": 0.38,
"grad_norm": 2.10438871383667,
"learning_rate": 7.085645306186391e-06,
"loss": 1.5876,
"step": 120
},
{
"epoch": 0.39,
"grad_norm": 1.984710931777954,
"learning_rate": 7.038418074708444e-06,
"loss": 1.6506,
"step": 121
},
{
"epoch": 0.39,
"grad_norm": 2.4563817977905273,
"learning_rate": 6.990971712210966e-06,
"loss": 1.5578,
"step": 122
},
{
"epoch": 0.39,
"grad_norm": 1.856941819190979,
"learning_rate": 6.943311319202976e-06,
"loss": 1.5806,
"step": 123
},
{
"epoch": 0.4,
"grad_norm": 2.228983163833618,
"learning_rate": 6.895442019201898e-06,
"loss": 1.5639,
"step": 124
},
{
"epoch": 0.4,
"grad_norm": 2.9688351154327393,
"learning_rate": 6.8473689581827585e-06,
"loss": 1.5939,
"step": 125
},
{
"epoch": 0.4,
"grad_norm": 2.028876304626465,
"learning_rate": 6.7990973040250055e-06,
"loss": 1.6096,
"step": 126
},
{
"epoch": 0.4,
"grad_norm": 2.385794162750244,
"learning_rate": 6.750632245956954e-06,
"loss": 1.5617,
"step": 127
},
{
"epoch": 0.41,
"grad_norm": 2.0037996768951416,
"learning_rate": 6.701978993997942e-06,
"loss": 1.6028,
"step": 128
},
{
"epoch": 0.41,
"grad_norm": 2.0149664878845215,
"learning_rate": 6.653142778398247e-06,
"loss": 1.583,
"step": 129
},
{
"epoch": 0.41,
"grad_norm": 2.466585874557495,
"learning_rate": 6.6041288490768385e-06,
"loss": 1.6368,
"step": 130
},
{
"epoch": 0.42,
"grad_norm": 2.181319236755371,
"learning_rate": 6.554942475057003e-06,
"loss": 1.5819,
"step": 131
},
{
"epoch": 0.42,
"grad_norm": 1.9432387351989746,
"learning_rate": 6.505588943899923e-06,
"loss": 1.5551,
"step": 132
},
{
"epoch": 0.42,
"grad_norm": 2.277068614959717,
"learning_rate": 6.456073561136261e-06,
"loss": 1.5788,
"step": 133
},
{
"epoch": 0.43,
"grad_norm": 1.9018107652664185,
"learning_rate": 6.406401649695814e-06,
"loss": 1.5639,
"step": 134
},
{
"epoch": 0.43,
"grad_norm": 2.755958318710327,
"learning_rate": 6.356578549335295e-06,
"loss": 1.6015,
"step": 135
},
{
"epoch": 0.43,
"grad_norm": 2.2761142253875732,
"learning_rate": 6.306609616064304e-06,
"loss": 1.6054,
"step": 136
},
{
"epoch": 0.44,
"grad_norm": 3.212486505508423,
"learning_rate": 6.256500221569556e-06,
"loss": 1.5953,
"step": 137
},
{
"epoch": 0.44,
"grad_norm": 2.2175703048706055,
"learning_rate": 6.2062557526374226e-06,
"loss": 1.5745,
"step": 138
},
{
"epoch": 0.44,
"grad_norm": 1.9958215951919556,
"learning_rate": 6.15588161057485e-06,
"loss": 1.5981,
"step": 139
},
{
"epoch": 0.45,
"grad_norm": 2.4180896282196045,
"learning_rate": 6.10538321062871e-06,
"loss": 1.5836,
"step": 140
},
{
"epoch": 0.45,
"grad_norm": 2.0187716484069824,
"learning_rate": 6.0547659814036664e-06,
"loss": 1.6062,
"step": 141
},
{
"epoch": 0.45,
"grad_norm": 2.3086986541748047,
"learning_rate": 6.004035364278593e-06,
"loss": 1.5764,
"step": 142
},
{
"epoch": 0.46,
"grad_norm": 1.9949414730072021,
"learning_rate": 5.953196812821622e-06,
"loss": 1.6037,
"step": 143
},
{
"epoch": 0.46,
"grad_norm": 1.8632932901382446,
"learning_rate": 5.902255792203882e-06,
"loss": 1.6051,
"step": 144
},
{
"epoch": 0.46,
"grad_norm": 2.034745931625366,
"learning_rate": 5.851217778611994e-06,
"loss": 1.6386,
"step": 145
},
{
"epoch": 0.47,
"grad_norm": 2.1993603706359863,
"learning_rate": 5.800088258659371e-06,
"loss": 1.6081,
"step": 146
},
{
"epoch": 0.47,
"grad_norm": 2.0585718154907227,
"learning_rate": 5.748872728796409e-06,
"loss": 1.5955,
"step": 147
},
{
"epoch": 0.47,
"grad_norm": 2.2753782272338867,
"learning_rate": 5.697576694719616e-06,
"loss": 1.5655,
"step": 148
},
{
"epoch": 0.47,
"grad_norm": 3.237483024597168,
"learning_rate": 5.646205670779745e-06,
"loss": 1.6004,
"step": 149
},
{
"epoch": 0.48,
"grad_norm": 3.019814968109131,
"learning_rate": 5.594765179389003e-06,
"loss": 1.5721,
"step": 150
},
{
"epoch": 0.48,
"grad_norm": 2.3698084354400635,
"learning_rate": 5.543260750427373e-06,
"loss": 1.5946,
"step": 151
},
{
"epoch": 0.48,
"grad_norm": 2.441636085510254,
"learning_rate": 5.4916979206481745e-06,
"loss": 1.5522,
"step": 152
},
{
"epoch": 0.49,
"grad_norm": 2.149003267288208,
"learning_rate": 5.440082233082837e-06,
"loss": 1.5935,
"step": 153
},
{
"epoch": 0.49,
"grad_norm": 2.03369402885437,
"learning_rate": 5.388419236445033e-06,
"loss": 1.5912,
"step": 154
},
{
"epoch": 0.49,
"grad_norm": 1.9619389772415161,
"learning_rate": 5.336714484534183e-06,
"loss": 1.5324,
"step": 155
},
{
"epoch": 0.5,
"grad_norm": 3.591277837753296,
"learning_rate": 5.284973535638424e-06,
"loss": 1.5662,
"step": 156
},
{
"epoch": 0.5,
"grad_norm": 2.212984323501587,
"learning_rate": 5.233201951937088e-06,
"loss": 1.6589,
"step": 157
},
{
"epoch": 0.5,
"grad_norm": 1.835900902748108,
"learning_rate": 5.181405298902763e-06,
"loss": 1.597,
"step": 158
},
{
"epoch": 0.51,
"grad_norm": 2.046597480773926,
"learning_rate": 5.1295891447030056e-06,
"loss": 1.5742,
"step": 159
},
{
"epoch": 0.51,
"grad_norm": 2.5161941051483154,
"learning_rate": 5.077759059601756e-06,
"loss": 1.5848,
"step": 160
},
{
"epoch": 0.51,
"grad_norm": 2.1808998584747314,
"learning_rate": 5.025920615360532e-06,
"loss": 1.5684,
"step": 161
},
{
"epoch": 0.52,
"grad_norm": 2.5023722648620605,
"learning_rate": 4.974079384639469e-06,
"loss": 1.5815,
"step": 162
},
{
"epoch": 0.52,
"grad_norm": 2.056931734085083,
"learning_rate": 4.922240940398246e-06,
"loss": 1.5334,
"step": 163
},
{
"epoch": 0.52,
"grad_norm": 2.1214613914489746,
"learning_rate": 4.870410855296994e-06,
"loss": 1.5726,
"step": 164
},
{
"epoch": 0.53,
"grad_norm": 3.080734968185425,
"learning_rate": 4.818594701097239e-06,
"loss": 1.5869,
"step": 165
},
{
"epoch": 0.53,
"grad_norm": 2.13873028755188,
"learning_rate": 4.766798048062913e-06,
"loss": 1.6214,
"step": 166
},
{
"epoch": 0.53,
"grad_norm": 2.4381258487701416,
"learning_rate": 4.715026464361576e-06,
"loss": 1.5921,
"step": 167
},
{
"epoch": 0.54,
"grad_norm": 2.3654768466949463,
"learning_rate": 4.663285515465818e-06,
"loss": 1.5598,
"step": 168
},
{
"epoch": 0.54,
"grad_norm": 2.3201935291290283,
"learning_rate": 4.611580763554969e-06,
"loss": 1.5339,
"step": 169
},
{
"epoch": 0.54,
"grad_norm": 2.3482742309570312,
"learning_rate": 4.559917766917166e-06,
"loss": 1.5501,
"step": 170
},
{
"epoch": 0.54,
"grad_norm": 2.296869993209839,
"learning_rate": 4.508302079351827e-06,
"loss": 1.542,
"step": 171
},
{
"epoch": 0.55,
"grad_norm": 2.0012331008911133,
"learning_rate": 4.456739249572628e-06,
"loss": 1.5223,
"step": 172
},
{
"epoch": 0.55,
"grad_norm": 2.4259610176086426,
"learning_rate": 4.405234820611001e-06,
"loss": 1.5929,
"step": 173
},
{
"epoch": 0.55,
"grad_norm": 2.1162846088409424,
"learning_rate": 4.3537943292202555e-06,
"loss": 1.5213,
"step": 174
},
{
"epoch": 0.56,
"grad_norm": 4.398637771606445,
"learning_rate": 4.3024233052803855e-06,
"loss": 1.5698,
"step": 175
},
{
"epoch": 0.56,
"grad_norm": 2.08495831489563,
"learning_rate": 4.251127271203593e-06,
"loss": 1.6086,
"step": 176
},
{
"epoch": 0.56,
"grad_norm": 5.053515911102295,
"learning_rate": 4.199911741340631e-06,
"loss": 1.6036,
"step": 177
},
{
"epoch": 0.57,
"grad_norm": 3.257359266281128,
"learning_rate": 4.148782221388007e-06,
"loss": 1.6246,
"step": 178
},
{
"epoch": 0.57,
"grad_norm": 2.1245572566986084,
"learning_rate": 4.097744207796119e-06,
"loss": 1.5544,
"step": 179
},
{
"epoch": 0.57,
"grad_norm": 2.556488275527954,
"learning_rate": 4.04680318717838e-06,
"loss": 1.5516,
"step": 180
},
{
"epoch": 0.58,
"grad_norm": 2.438620090484619,
"learning_rate": 3.995964635721409e-06,
"loss": 1.5524,
"step": 181
},
{
"epoch": 0.58,
"grad_norm": 1.9453068971633911,
"learning_rate": 3.945234018596335e-06,
"loss": 1.5657,
"step": 182
},
{
"epoch": 0.58,
"grad_norm": 2.2514045238494873,
"learning_rate": 3.8946167893712916e-06,
"loss": 1.5916,
"step": 183
},
{
"epoch": 0.59,
"grad_norm": 2.2144711017608643,
"learning_rate": 3.844118389425154e-06,
"loss": 1.6051,
"step": 184
},
{
"epoch": 0.59,
"grad_norm": 2.4618723392486572,
"learning_rate": 3.7937442473625787e-06,
"loss": 1.5841,
"step": 185
},
{
"epoch": 0.59,
"grad_norm": 2.2561230659484863,
"learning_rate": 3.743499778430445e-06,
"loss": 1.5469,
"step": 186
},
{
"epoch": 0.6,
"grad_norm": 2.2580950260162354,
"learning_rate": 3.6933903839356983e-06,
"loss": 1.5618,
"step": 187
},
{
"epoch": 0.6,
"grad_norm": 1.8674182891845703,
"learning_rate": 3.6434214506647064e-06,
"loss": 1.5266,
"step": 188
},
{
"epoch": 0.6,
"grad_norm": 2.33337664604187,
"learning_rate": 3.5935983503041864e-06,
"loss": 1.5223,
"step": 189
},
{
"epoch": 0.61,
"grad_norm": 1.912142038345337,
"learning_rate": 3.5439264388637407e-06,
"loss": 1.5331,
"step": 190
},
{
"epoch": 0.61,
"grad_norm": 2.8374595642089844,
"learning_rate": 3.4944110561000785e-06,
"loss": 1.6068,
"step": 191
},
{
"epoch": 0.61,
"grad_norm": 2.329653024673462,
"learning_rate": 3.4450575249429975e-06,
"loss": 1.5324,
"step": 192
},
{
"epoch": 0.61,
"grad_norm": 2.3587117195129395,
"learning_rate": 3.3958711509231627e-06,
"loss": 1.5542,
"step": 193
},
{
"epoch": 0.62,
"grad_norm": 2.657672643661499,
"learning_rate": 3.3468572216017536e-06,
"loss": 1.5755,
"step": 194
},
{
"epoch": 0.62,
"grad_norm": 2.17305064201355,
"learning_rate": 3.29802100600206e-06,
"loss": 1.5489,
"step": 195
},
{
"epoch": 0.62,
"grad_norm": 2.5755293369293213,
"learning_rate": 3.249367754043047e-06,
"loss": 1.5835,
"step": 196
},
{
"epoch": 0.63,
"grad_norm": 3.943556070327759,
"learning_rate": 3.200902695974995e-06,
"loss": 1.6003,
"step": 197
},
{
"epoch": 0.63,
"grad_norm": 2.425208568572998,
"learning_rate": 3.152631041817244e-06,
"loss": 1.5649,
"step": 198
},
{
"epoch": 0.63,
"grad_norm": 2.0712738037109375,
"learning_rate": 3.104557980798104e-06,
"loss": 1.5363,
"step": 199
},
{
"epoch": 0.64,
"grad_norm": 2.3942067623138428,
"learning_rate": 3.056688680797024e-06,
"loss": 1.5522,
"step": 200
},
{
"epoch": 0.64,
"grad_norm": 2.139435052871704,
"learning_rate": 3.0090282877890376e-06,
"loss": 1.5759,
"step": 201
},
{
"epoch": 0.64,
"grad_norm": 2.4167027473449707,
"learning_rate": 2.961581925291557e-06,
"loss": 1.5254,
"step": 202
},
{
"epoch": 0.65,
"grad_norm": 1.9939026832580566,
"learning_rate": 2.9143546938136093e-06,
"loss": 1.5688,
"step": 203
},
{
"epoch": 0.65,
"grad_norm": 1.8560909032821655,
"learning_rate": 2.8673516703075247e-06,
"loss": 1.5346,
"step": 204
},
{
"epoch": 0.65,
"grad_norm": 4.581928730010986,
"learning_rate": 2.820577907623145e-06,
"loss": 1.5447,
"step": 205
},
{
"epoch": 0.66,
"grad_norm": 2.128434419631958,
"learning_rate": 2.7740384339646655e-06,
"loss": 1.5557,
"step": 206
},
{
"epoch": 0.66,
"grad_norm": 2.027505397796631,
"learning_rate": 2.7277382523500804e-06,
"loss": 1.527,
"step": 207
},
{
"epoch": 0.66,
"grad_norm": 2.5379273891448975,
"learning_rate": 2.6816823400733628e-06,
"loss": 1.5794,
"step": 208
},
{
"epoch": 0.67,
"grad_norm": 6.262667655944824,
"learning_rate": 2.6358756481694115e-06,
"loss": 1.5452,
"step": 209
},
{
"epoch": 0.67,
"grad_norm": 2.3182244300842285,
"learning_rate": 2.5903231008817888e-06,
"loss": 1.5354,
"step": 210
},
{
"epoch": 0.67,
"grad_norm": 2.3638322353363037,
"learning_rate": 2.5450295951333896e-06,
"loss": 1.5699,
"step": 211
},
{
"epoch": 0.68,
"grad_norm": 2.2807657718658447,
"learning_rate": 2.5000000000000015e-06,
"loss": 1.5958,
"step": 212
},
{
"epoch": 0.68,
"grad_norm": 2.4435348510742188,
"learning_rate": 2.4552391561868783e-06,
"loss": 1.5601,
"step": 213
},
{
"epoch": 0.68,
"grad_norm": 2.2643187046051025,
"learning_rate": 2.410751875508373e-06,
"loss": 1.5474,
"step": 214
},
{
"epoch": 0.68,
"grad_norm": 2.2094953060150146,
"learning_rate": 2.3665429403706506e-06,
"loss": 1.5888,
"step": 215
},
{
"epoch": 0.69,
"grad_norm": 2.147650718688965,
"learning_rate": 2.3226171032575856e-06,
"loss": 1.5937,
"step": 216
},
{
"epoch": 0.69,
"grad_norm": 1.9609376192092896,
"learning_rate": 2.278979086219863e-06,
"loss": 1.5873,
"step": 217
},
{
"epoch": 0.69,
"grad_norm": 2.404101610183716,
"learning_rate": 2.2356335803673655e-06,
"loss": 1.5707,
"step": 218
},
{
"epoch": 0.7,
"grad_norm": 2.1139962673187256,
"learning_rate": 2.192585245364856e-06,
"loss": 1.5839,
"step": 219
},
{
"epoch": 0.7,
"grad_norm": 2.373765707015991,
"learning_rate": 2.149838708931087e-06,
"loss": 1.5289,
"step": 220
},
{
"epoch": 0.7,
"grad_norm": 2.1613967418670654,
"learning_rate": 2.1073985663412984e-06,
"loss": 1.5599,
"step": 221
},
{
"epoch": 0.71,
"grad_norm": 2.6613214015960693,
"learning_rate": 2.0652693799332286e-06,
"loss": 1.5657,
"step": 222
},
{
"epoch": 0.71,
"grad_norm": 1.8200939893722534,
"learning_rate": 2.0234556786166715e-06,
"loss": 1.4937,
"step": 223
},
{
"epoch": 0.71,
"grad_norm": 1.8578108549118042,
"learning_rate": 1.9819619573865932e-06,
"loss": 1.5268,
"step": 224
},
{
"epoch": 0.72,
"grad_norm": 2.355733633041382,
"learning_rate": 1.9407926768399456e-06,
"loss": 1.5658,
"step": 225
},
{
"epoch": 0.72,
"grad_norm": 2.7907326221466064,
"learning_rate": 1.8999522626961254e-06,
"loss": 1.5777,
"step": 226
},
{
"epoch": 0.72,
"grad_norm": 2.2081241607666016,
"learning_rate": 1.859445105321221e-06,
"loss": 1.6039,
"step": 227
},
{
"epoch": 0.73,
"grad_norm": 2.7307095527648926,
"learning_rate": 1.8192755592560446e-06,
"loss": 1.554,
"step": 228
},
{
"epoch": 0.73,
"grad_norm": 2.100717306137085,
"learning_rate": 1.7794479427480115e-06,
"loss": 1.5687,
"step": 229
},
{
"epoch": 0.73,
"grad_norm": 2.900230884552002,
"learning_rate": 1.739966537286929e-06,
"loss": 1.5894,
"step": 230
},
{
"epoch": 0.74,
"grad_norm": 2.7056214809417725,
"learning_rate": 1.7008355871447345e-06,
"loss": 1.5262,
"step": 231
},
{
"epoch": 0.74,
"grad_norm": 2.2790305614471436,
"learning_rate": 1.6620592989192318e-06,
"loss": 1.5334,
"step": 232
},
{
"epoch": 0.74,
"grad_norm": 2.336102247238159,
"learning_rate": 1.6236418410818872e-06,
"loss": 1.6231,
"step": 233
},
{
"epoch": 0.75,
"grad_norm": 2.4769365787506104,
"learning_rate": 1.5855873435297042e-06,
"loss": 1.5542,
"step": 234
},
{
"epoch": 0.75,
"grad_norm": 2.2314493656158447,
"learning_rate": 1.5478998971412669e-06,
"loss": 1.582,
"step": 235
},
{
"epoch": 0.75,
"grad_norm": 2.37534236907959,
"learning_rate": 1.510583553336964e-06,
"loss": 1.5618,
"step": 236
},
{
"epoch": 0.76,
"grad_norm": 2.2861831188201904,
"learning_rate": 1.473642323643465e-06,
"loss": 1.5557,
"step": 237
},
{
"epoch": 0.76,
"grad_norm": 2.0637292861938477,
"learning_rate": 1.4370801792624656e-06,
"loss": 1.5812,
"step": 238
},
{
"epoch": 0.76,
"grad_norm": 2.4701573848724365,
"learning_rate": 1.4009010506437997e-06,
"loss": 1.5535,
"step": 239
},
{
"epoch": 0.76,
"grad_norm": 2.5278735160827637,
"learning_rate": 1.3651088270628992e-06,
"loss": 1.5792,
"step": 240
},
{
"epoch": 0.77,
"grad_norm": 2.0601749420166016,
"learning_rate": 1.3297073562026992e-06,
"loss": 1.5739,
"step": 241
},
{
"epoch": 0.77,
"grad_norm": 2.125802755355835,
"learning_rate": 1.2947004437400161e-06,
"loss": 1.579,
"step": 242
},
{
"epoch": 0.77,
"grad_norm": 2.429272413253784,
"learning_rate": 1.2600918529364253e-06,
"loss": 1.5575,
"step": 243
},
{
"epoch": 0.78,
"grad_norm": 2.331495761871338,
"learning_rate": 1.225885304233716e-06,
"loss": 1.5796,
"step": 244
},
{
"epoch": 0.78,
"grad_norm": 2.5032925605773926,
"learning_rate": 1.1920844748539373e-06,
"loss": 1.5205,
"step": 245
},
{
"epoch": 0.78,
"grad_norm": 2.1888771057128906,
"learning_rate": 1.1586929984040974e-06,
"loss": 1.5877,
"step": 246
},
{
"epoch": 0.79,
"grad_norm": 2.270721673965454,
"learning_rate": 1.125714464485551e-06,
"loss": 1.5382,
"step": 247
},
{
"epoch": 0.79,
"grad_norm": 3.260922431945801,
"learning_rate": 1.0931524183081105e-06,
"loss": 1.585,
"step": 248
},
{
"epoch": 0.79,
"grad_norm": 2.52512526512146,
"learning_rate": 1.0610103603089345e-06,
"loss": 1.5353,
"step": 249
},
{
"epoch": 0.8,
"grad_norm": 2.214529275894165,
"learning_rate": 1.0292917457762325e-06,
"loss": 1.6081,
"step": 250
},
{
"epoch": 0.8,
"grad_norm": 3.466500997543335,
"learning_rate": 9.979999844778203e-07,
"loss": 1.5661,
"step": 251
},
{
"epoch": 0.8,
"grad_norm": 2.2533483505249023,
"learning_rate": 9.671384402945588e-07,
"loss": 1.5718,
"step": 252
},
{
"epoch": 0.81,
"grad_norm": 1.9847800731658936,
"learning_rate": 9.367104308587493e-07,
"loss": 1.561,
"step": 253
},
{
"epoch": 0.81,
"grad_norm": 2.6065824031829834,
"learning_rate": 9.06719227197474e-07,
"loss": 1.5482,
"step": 254
},
{
"epoch": 0.81,
"grad_norm": 2.5435142517089844,
"learning_rate": 8.771680533809634e-07,
"loss": 1.5918,
"step": 255
},
{
"epoch": 0.82,
"grad_norm": 2.8875584602355957,
"learning_rate": 8.480600861760124e-07,
"loss": 1.6082,
"step": 256
},
{
"epoch": 0.82,
"grad_norm": 2.1082193851470947,
"learning_rate": 8.193984547044659e-07,
"loss": 1.594,
"step": 257
},
{
"epoch": 0.82,
"grad_norm": 2.4410855770111084,
"learning_rate": 7.911862401068431e-07,
"loss": 1.5938,
"step": 258
},
{
"epoch": 0.83,
"grad_norm": 2.8396708965301514,
"learning_rate": 7.634264752111131e-07,
"loss": 1.6215,
"step": 259
},
{
"epoch": 0.83,
"grad_norm": 2.493769407272339,
"learning_rate": 7.361221442066607e-07,
"loss": 1.5734,
"step": 260
},
{
"epoch": 0.83,
"grad_norm": 1.8893070220947266,
"learning_rate": 7.092761823234911e-07,
"loss": 1.5264,
"step": 261
},
{
"epoch": 0.83,
"grad_norm": 2.5776591300964355,
"learning_rate": 6.828914755166826e-07,
"loss": 1.5733,
"step": 262
},
{
"epoch": 0.84,
"grad_norm": 2.7408130168914795,
"learning_rate": 6.569708601561515e-07,
"loss": 1.5617,
"step": 263
},
{
"epoch": 0.84,
"grad_norm": 2.9031057357788086,
"learning_rate": 6.315171227217365e-07,
"loss": 1.5979,
"step": 264
},
{
"epoch": 0.84,
"grad_norm": 2.713923931121826,
"learning_rate": 6.065329995036573e-07,
"loss": 1.5364,
"step": 265
},
{
"epoch": 0.85,
"grad_norm": 2.8765623569488525,
"learning_rate": 5.820211763083494e-07,
"loss": 1.5547,
"step": 266
},
{
"epoch": 0.85,
"grad_norm": 2.5959739685058594,
"learning_rate": 5.579842881697556e-07,
"loss": 1.5372,
"step": 267
},
{
"epoch": 0.85,
"grad_norm": 2.1984755992889404,
"learning_rate": 5.344249190660427e-07,
"loss": 1.6001,
"step": 268
},
{
"epoch": 0.86,
"grad_norm": 2.3963303565979004,
"learning_rate": 5.113456016418305e-07,
"loss": 1.6344,
"step": 269
},
{
"epoch": 0.86,
"grad_norm": 4.728706359863281,
"learning_rate": 4.88748816935934e-07,
"loss": 1.5381,
"step": 270
},
{
"epoch": 0.86,
"grad_norm": 2.736025333404541,
"learning_rate": 4.666369941146376e-07,
"loss": 1.5458,
"step": 271
},
{
"epoch": 0.87,
"grad_norm": 3.1046762466430664,
"learning_rate": 4.4501251021057566e-07,
"loss": 1.5871,
"step": 272
},
{
"epoch": 0.87,
"grad_norm": 3.1005470752716064,
"learning_rate": 4.2387768986718644e-07,
"loss": 1.545,
"step": 273
},
{
"epoch": 0.87,
"grad_norm": 5.466526985168457,
"learning_rate": 4.03234805088818e-07,
"loss": 1.5337,
"step": 274
},
{
"epoch": 0.88,
"grad_norm": 2.6507673263549805,
"learning_rate": 3.8308607499648765e-07,
"loss": 1.5436,
"step": 275
},
{
"epoch": 0.88,
"grad_norm": 2.5536324977874756,
"learning_rate": 3.634336655893189e-07,
"loss": 1.5853,
"step": 276
},
{
"epoch": 0.88,
"grad_norm": 3.4438483715057373,
"learning_rate": 3.4427968951170287e-07,
"loss": 1.5655,
"step": 277
},
{
"epoch": 0.89,
"grad_norm": 2.799037456512451,
"learning_rate": 3.256262058261816e-07,
"loss": 1.6025,
"step": 278
},
{
"epoch": 0.89,
"grad_norm": 2.272508144378662,
"learning_rate": 3.0747521979210436e-07,
"loss": 1.5339,
"step": 279
},
{
"epoch": 0.89,
"grad_norm": 2.6367502212524414,
"learning_rate": 2.8982868265005457e-07,
"loss": 1.487,
"step": 280
},
{
"epoch": 0.9,
"grad_norm": 2.486656665802002,
"learning_rate": 2.726884914120936e-07,
"loss": 1.5738,
"step": 281
},
{
"epoch": 0.9,
"grad_norm": 2.579956293106079,
"learning_rate": 2.5605648865783315e-07,
"loss": 1.5955,
"step": 282
},
{
"epoch": 0.9,
"grad_norm": 2.506580114364624,
"learning_rate": 2.399344623363503e-07,
"loss": 1.5778,
"step": 283
},
{
"epoch": 0.9,
"grad_norm": 2.4960708618164062,
"learning_rate": 2.2432414557399197e-07,
"loss": 1.5922,
"step": 284
},
{
"epoch": 0.91,
"grad_norm": 3.1219966411590576,
"learning_rate": 2.0922721648805045e-07,
"loss": 1.5548,
"step": 285
},
{
"epoch": 0.91,
"grad_norm": 2.500851631164551,
"learning_rate": 1.9464529800637731e-07,
"loss": 1.5491,
"step": 286
},
{
"epoch": 0.91,
"grad_norm": 2.394024133682251,
"learning_rate": 1.805799576929107e-07,
"loss": 1.5433,
"step": 287
},
{
"epoch": 0.92,
"grad_norm": 2.6824209690093994,
"learning_rate": 1.6703270757916e-07,
"loss": 1.5465,
"step": 288
},
{
"epoch": 0.92,
"grad_norm": 2.4885001182556152,
"learning_rate": 1.540050040016694e-07,
"loss": 1.5682,
"step": 289
},
{
"epoch": 0.92,
"grad_norm": 4.021599769592285,
"learning_rate": 1.414982474454524e-07,
"loss": 1.5516,
"step": 290
},
{
"epoch": 0.93,
"grad_norm": 2.3498406410217285,
"learning_rate": 1.2951378239344337e-07,
"loss": 1.5837,
"step": 291
},
{
"epoch": 0.93,
"grad_norm": 2.3095269203186035,
"learning_rate": 1.1805289718196499e-07,
"loss": 1.5519,
"step": 292
},
{
"epoch": 0.93,
"grad_norm": 2.113964080810547,
"learning_rate": 1.0711682386222943e-07,
"loss": 1.5897,
"step": 293
},
{
"epoch": 0.94,
"grad_norm": 2.6248152256011963,
"learning_rate": 9.670673806789543e-08,
"loss": 1.5596,
"step": 294
},
{
"epoch": 0.94,
"grad_norm": 2.7190983295440674,
"learning_rate": 8.682375888868167e-08,
"loss": 1.5829,
"step": 295
},
{
"epoch": 0.94,
"grad_norm": 2.4610188007354736,
"learning_rate": 7.746894875007016e-08,
"loss": 1.6058,
"step": 296
},
{
"epoch": 0.95,
"grad_norm": 3.32197904586792,
"learning_rate": 6.864331329909102e-08,
"loss": 1.6013,
"step": 297
},
{
"epoch": 0.95,
"grad_norm": 3.1023738384246826,
"learning_rate": 6.034780129621664e-08,
"loss": 1.5767,
"step": 298
},
{
"epoch": 0.95,
"grad_norm": 2.545488119125366,
"learning_rate": 5.258330451336724e-08,
"loss": 1.5267,
"step": 299
},
{
"epoch": 0.96,
"grad_norm": 2.106576442718506,
"learning_rate": 4.535065763804802e-08,
"loss": 1.5447,
"step": 300
},
{
"epoch": 0.96,
"grad_norm": 2.0030553340911865,
"learning_rate": 3.8650638183617695e-08,
"loss": 1.5714,
"step": 301
},
{
"epoch": 0.96,
"grad_norm": 2.5803191661834717,
"learning_rate": 3.248396640570528e-08,
"loss": 1.5273,
"step": 302
},
{
"epoch": 0.97,
"grad_norm": 2.743903160095215,
"learning_rate": 2.685130522478485e-08,
"loss": 1.5455,
"step": 303
},
{
"epoch": 0.97,
"grad_norm": 2.4540319442749023,
"learning_rate": 2.1753260154906973e-08,
"loss": 1.5372,
"step": 304
},
{
"epoch": 0.97,
"grad_norm": 2.002195358276367,
"learning_rate": 1.7190379238609666e-08,
"loss": 1.5892,
"step": 305
},
{
"epoch": 0.97,
"grad_norm": 2.3258426189422607,
"learning_rate": 1.3163152988000527e-08,
"loss": 1.5832,
"step": 306
},
{
"epoch": 0.98,
"grad_norm": 3.36910343170166,
"learning_rate": 9.672014332028357e-09,
"loss": 1.5319,
"step": 307
},
{
"epoch": 0.98,
"grad_norm": 2.2311582565307617,
"learning_rate": 6.717338569942611e-09,
"loss": 1.5475,
"step": 308
},
{
"epoch": 0.98,
"grad_norm": 3.657569646835327,
"learning_rate": 4.299443330947895e-09,
"loss": 1.5227,
"step": 309
},
{
"epoch": 0.99,
"grad_norm": 2.1514527797698975,
"learning_rate": 2.4185885400596076e-09,
"loss": 1.5543,
"step": 310
},
{
"epoch": 0.99,
"grad_norm": 2.7922263145446777,
"learning_rate": 1.0749763901607425e-09,
"loss": 1.575,
"step": 311
},
{
"epoch": 0.99,
"grad_norm": 2.211779832839966,
"learning_rate": 2.6875132026760173e-10,
"loss": 1.5689,
"step": 312
},
{
"epoch": 1.0,
"grad_norm": 2.225865125656128,
"learning_rate": 0.0,
"loss": 1.5786,
"step": 313
},
{
"epoch": 1.0,
"step": 313,
"total_flos": 399880339259392.0,
"train_loss": 1.6020259019284964,
"train_runtime": 68378.6303,
"train_samples_per_second": 0.588,
"train_steps_per_second": 0.005
}
],
"logging_steps": 1.0,
"max_steps": 313,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 50,
"total_flos": 399880339259392.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}