gemma3-checkpoint-300 / trainer_state.json
WhiteDandelion's picture
Upload folder using huggingface_hub
d3602f0 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8531816565943833,
"eval_steps": 500,
"global_step": 300,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0028439388553146107,
"grad_norm": 1512.0,
"learning_rate": 2.0000000000000003e-06,
"loss": 10.5863,
"step": 1
},
{
"epoch": 0.005687877710629221,
"grad_norm": 2544.0,
"learning_rate": 4.000000000000001e-06,
"loss": 10.6061,
"step": 2
},
{
"epoch": 0.008531816565943833,
"grad_norm": 1344.0,
"learning_rate": 6e-06,
"loss": 9.0523,
"step": 3
},
{
"epoch": 0.011375755421258443,
"grad_norm": 604.0,
"learning_rate": 8.000000000000001e-06,
"loss": 6.3641,
"step": 4
},
{
"epoch": 0.014219694276573054,
"grad_norm": 358.0,
"learning_rate": 1e-05,
"loss": 5.1062,
"step": 5
},
{
"epoch": 0.017063633131887666,
"grad_norm": 402.0,
"learning_rate": 9.999793896876868e-06,
"loss": 4.2008,
"step": 6
},
{
"epoch": 0.019907571987202274,
"grad_norm": 155.0,
"learning_rate": 9.999175604498867e-06,
"loss": 3.4295,
"step": 7
},
{
"epoch": 0.022751510842516885,
"grad_norm": 81.0,
"learning_rate": 9.998145173838796e-06,
"loss": 2.7242,
"step": 8
},
{
"epoch": 0.025595449697831497,
"grad_norm": 40.0,
"learning_rate": 9.996702689846645e-06,
"loss": 2.4419,
"step": 9
},
{
"epoch": 0.028439388553146108,
"grad_norm": 28.5,
"learning_rate": 9.994848271442595e-06,
"loss": 2.1389,
"step": 10
},
{
"epoch": 0.031283327408460716,
"grad_norm": 42.75,
"learning_rate": 9.992582071507217e-06,
"loss": 1.9961,
"step": 11
},
{
"epoch": 0.03412726626377533,
"grad_norm": 17.375,
"learning_rate": 9.989904276868865e-06,
"loss": 1.9048,
"step": 12
},
{
"epoch": 0.03697120511908994,
"grad_norm": 32.5,
"learning_rate": 9.986815108288273e-06,
"loss": 1.7912,
"step": 13
},
{
"epoch": 0.03981514397440455,
"grad_norm": 15.25,
"learning_rate": 9.98331482044036e-06,
"loss": 1.8195,
"step": 14
},
{
"epoch": 0.04265908282971916,
"grad_norm": 11.8125,
"learning_rate": 9.979403701893226e-06,
"loss": 1.7316,
"step": 15
},
{
"epoch": 0.04550302168503377,
"grad_norm": 14.3125,
"learning_rate": 9.975082075084375e-06,
"loss": 1.7444,
"step": 16
},
{
"epoch": 0.048346960540348385,
"grad_norm": 9.0,
"learning_rate": 9.970350296294114e-06,
"loss": 1.7287,
"step": 17
},
{
"epoch": 0.05119089939566299,
"grad_norm": 9.5,
"learning_rate": 9.9652087556162e-06,
"loss": 1.6426,
"step": 18
},
{
"epoch": 0.0540348382509776,
"grad_norm": 26.25,
"learning_rate": 9.959657876925671e-06,
"loss": 1.6523,
"step": 19
},
{
"epoch": 0.056878777106292217,
"grad_norm": 8.4375,
"learning_rate": 9.9536981178439e-06,
"loss": 1.6343,
"step": 20
},
{
"epoch": 0.059722715961606825,
"grad_norm": 13.4375,
"learning_rate": 9.94732996970087e-06,
"loss": 1.6042,
"step": 21
},
{
"epoch": 0.06256665481692143,
"grad_norm": 8.75,
"learning_rate": 9.940553957494669e-06,
"loss": 1.6597,
"step": 22
},
{
"epoch": 0.06541059367223605,
"grad_norm": 7.90625,
"learning_rate": 9.93337063984821e-06,
"loss": 1.6111,
"step": 23
},
{
"epoch": 0.06825453252755066,
"grad_norm": 8.25,
"learning_rate": 9.925780608963173e-06,
"loss": 1.5849,
"step": 24
},
{
"epoch": 0.07109847138286526,
"grad_norm": 8.0625,
"learning_rate": 9.917784490571188e-06,
"loss": 1.5479,
"step": 25
},
{
"epoch": 0.07394241023817988,
"grad_norm": 6.78125,
"learning_rate": 9.909382943882238e-06,
"loss": 1.5466,
"step": 26
},
{
"epoch": 0.0767863490934945,
"grad_norm": 7.8125,
"learning_rate": 9.900576661530334e-06,
"loss": 1.5513,
"step": 27
},
{
"epoch": 0.0796302879488091,
"grad_norm": 5.65625,
"learning_rate": 9.89136636951639e-06,
"loss": 1.5127,
"step": 28
},
{
"epoch": 0.08247422680412371,
"grad_norm": 5.625,
"learning_rate": 9.881752827148391e-06,
"loss": 1.5828,
"step": 29
},
{
"epoch": 0.08531816565943832,
"grad_norm": 6.75,
"learning_rate": 9.871736826978776e-06,
"loss": 1.5587,
"step": 30
},
{
"epoch": 0.08816210451475294,
"grad_norm": 9.75,
"learning_rate": 9.861319194739109e-06,
"loss": 1.4726,
"step": 31
},
{
"epoch": 0.09100604337006754,
"grad_norm": 6.0,
"learning_rate": 9.85050078927201e-06,
"loss": 1.509,
"step": 32
},
{
"epoch": 0.09384998222538216,
"grad_norm": 5.21875,
"learning_rate": 9.83928250246034e-06,
"loss": 1.5211,
"step": 33
},
{
"epoch": 0.09669392108069677,
"grad_norm": 6.25,
"learning_rate": 9.82766525915368e-06,
"loss": 1.5568,
"step": 34
},
{
"epoch": 0.09953785993601137,
"grad_norm": 7.625,
"learning_rate": 9.815650017092078e-06,
"loss": 1.5097,
"step": 35
},
{
"epoch": 0.10238179879132599,
"grad_norm": 5.9375,
"learning_rate": 9.803237766827098e-06,
"loss": 1.5636,
"step": 36
},
{
"epoch": 0.1052257376466406,
"grad_norm": 4.875,
"learning_rate": 9.790429531640163e-06,
"loss": 1.5031,
"step": 37
},
{
"epoch": 0.1080696765019552,
"grad_norm": 4.3125,
"learning_rate": 9.77722636745818e-06,
"loss": 1.4816,
"step": 38
},
{
"epoch": 0.11091361535726982,
"grad_norm": 5.125,
"learning_rate": 9.763629362766495e-06,
"loss": 1.479,
"step": 39
},
{
"epoch": 0.11375755421258443,
"grad_norm": 4.96875,
"learning_rate": 9.749639638519167e-06,
"loss": 1.4666,
"step": 40
},
{
"epoch": 0.11660149306789903,
"grad_norm": 5.90625,
"learning_rate": 9.735258348046538e-06,
"loss": 1.5102,
"step": 41
},
{
"epoch": 0.11944543192321365,
"grad_norm": 7.34375,
"learning_rate": 9.720486676960157e-06,
"loss": 1.4814,
"step": 42
},
{
"epoch": 0.12228937077852826,
"grad_norm": 5.59375,
"learning_rate": 9.705325843055045e-06,
"loss": 1.4913,
"step": 43
},
{
"epoch": 0.12513330963384287,
"grad_norm": 5.8125,
"learning_rate": 9.689777096209287e-06,
"loss": 1.4853,
"step": 44
},
{
"epoch": 0.12797724848915748,
"grad_norm": 5.625,
"learning_rate": 9.673841718281e-06,
"loss": 1.4569,
"step": 45
},
{
"epoch": 0.1308211873444721,
"grad_norm": 4.65625,
"learning_rate": 9.657521023002644e-06,
"loss": 1.4717,
"step": 46
},
{
"epoch": 0.1336651261997867,
"grad_norm": 4.5625,
"learning_rate": 9.64081635587273e-06,
"loss": 1.455,
"step": 47
},
{
"epoch": 0.13650906505510133,
"grad_norm": 4.0,
"learning_rate": 9.623729094044882e-06,
"loss": 1.4136,
"step": 48
},
{
"epoch": 0.1393530039104159,
"grad_norm": 4.71875,
"learning_rate": 9.606260646214314e-06,
"loss": 1.4875,
"step": 49
},
{
"epoch": 0.14219694276573053,
"grad_norm": 4.6875,
"learning_rate": 9.588412452501686e-06,
"loss": 1.4892,
"step": 50
},
{
"epoch": 0.14504088162104514,
"grad_norm": 4.3125,
"learning_rate": 9.570185984334383e-06,
"loss": 1.4873,
"step": 51
},
{
"epoch": 0.14788482047635976,
"grad_norm": 13.0,
"learning_rate": 9.551582744325213e-06,
"loss": 1.4418,
"step": 52
},
{
"epoch": 0.15072875933167437,
"grad_norm": 8.6875,
"learning_rate": 9.532604266148521e-06,
"loss": 1.5109,
"step": 53
},
{
"epoch": 0.153572698186989,
"grad_norm": 11.875,
"learning_rate": 9.513252114413756e-06,
"loss": 1.4517,
"step": 54
},
{
"epoch": 0.1564166370423036,
"grad_norm": 6.0625,
"learning_rate": 9.493527884536487e-06,
"loss": 1.4724,
"step": 55
},
{
"epoch": 0.1592605758976182,
"grad_norm": 4.90625,
"learning_rate": 9.473433202606859e-06,
"loss": 1.4598,
"step": 56
},
{
"epoch": 0.1621045147529328,
"grad_norm": 3.984375,
"learning_rate": 9.452969725255558e-06,
"loss": 1.449,
"step": 57
},
{
"epoch": 0.16494845360824742,
"grad_norm": 6.71875,
"learning_rate": 9.432139139517222e-06,
"loss": 1.4507,
"step": 58
},
{
"epoch": 0.16779239246356203,
"grad_norm": 6.78125,
"learning_rate": 9.410943162691359e-06,
"loss": 1.4529,
"step": 59
},
{
"epoch": 0.17063633131887665,
"grad_norm": 5.3125,
"learning_rate": 9.389383542200779e-06,
"loss": 1.4399,
"step": 60
},
{
"epoch": 0.17348027017419126,
"grad_norm": 5.875,
"learning_rate": 9.367462055447528e-06,
"loss": 1.4295,
"step": 61
},
{
"epoch": 0.17632420902950588,
"grad_norm": 4.25,
"learning_rate": 9.34518050966636e-06,
"loss": 1.4768,
"step": 62
},
{
"epoch": 0.17916814788482047,
"grad_norm": 8.3125,
"learning_rate": 9.322540741775745e-06,
"loss": 1.4094,
"step": 63
},
{
"epoch": 0.18201208674013508,
"grad_norm": 4.90625,
"learning_rate": 9.299544618226428e-06,
"loss": 1.4451,
"step": 64
},
{
"epoch": 0.1848560255954497,
"grad_norm": 3.5,
"learning_rate": 9.276194034847565e-06,
"loss": 1.4308,
"step": 65
},
{
"epoch": 0.1876999644507643,
"grad_norm": 4.0,
"learning_rate": 9.252490916690422e-06,
"loss": 1.4588,
"step": 66
},
{
"epoch": 0.19054390330607893,
"grad_norm": 4.0625,
"learning_rate": 9.228437217869668e-06,
"loss": 1.447,
"step": 67
},
{
"epoch": 0.19338784216139354,
"grad_norm": 4.375,
"learning_rate": 9.204034921402282e-06,
"loss": 1.5121,
"step": 68
},
{
"epoch": 0.19623178101670813,
"grad_norm": 5.28125,
"learning_rate": 9.179286039044072e-06,
"loss": 1.3924,
"step": 69
},
{
"epoch": 0.19907571987202274,
"grad_norm": 4.75,
"learning_rate": 9.15419261112382e-06,
"loss": 1.4434,
"step": 70
},
{
"epoch": 0.20191965872733736,
"grad_norm": 4.125,
"learning_rate": 9.128756706375065e-06,
"loss": 1.4386,
"step": 71
},
{
"epoch": 0.20476359758265197,
"grad_norm": 4.1875,
"learning_rate": 9.102980421765575e-06,
"loss": 1.4598,
"step": 72
},
{
"epoch": 0.2076075364379666,
"grad_norm": 4.625,
"learning_rate": 9.076865882324453e-06,
"loss": 1.4183,
"step": 73
},
{
"epoch": 0.2104514752932812,
"grad_norm": 3.953125,
"learning_rate": 9.050415240966953e-06,
"loss": 1.3908,
"step": 74
},
{
"epoch": 0.21329541414859582,
"grad_norm": 3.625,
"learning_rate": 9.023630678316994e-06,
"loss": 1.3925,
"step": 75
},
{
"epoch": 0.2161393530039104,
"grad_norm": 4.1875,
"learning_rate": 8.996514402527383e-06,
"loss": 1.4293,
"step": 76
},
{
"epoch": 0.21898329185922502,
"grad_norm": 4.53125,
"learning_rate": 8.969068649097766e-06,
"loss": 1.423,
"step": 77
},
{
"epoch": 0.22182723071453964,
"grad_norm": 4.59375,
"learning_rate": 8.941295680690347e-06,
"loss": 1.409,
"step": 78
},
{
"epoch": 0.22467116956985425,
"grad_norm": 4.71875,
"learning_rate": 8.913197786943335e-06,
"loss": 1.4607,
"step": 79
},
{
"epoch": 0.22751510842516887,
"grad_norm": 4.5625,
"learning_rate": 8.884777284282193e-06,
"loss": 1.4299,
"step": 80
},
{
"epoch": 0.23035904728048348,
"grad_norm": 4.71875,
"learning_rate": 8.856036515728666e-06,
"loss": 1.4253,
"step": 81
},
{
"epoch": 0.23320298613579807,
"grad_norm": 7.40625,
"learning_rate": 8.826977850707612e-06,
"loss": 1.3903,
"step": 82
},
{
"epoch": 0.23604692499111268,
"grad_norm": 5.875,
"learning_rate": 8.797603684851685e-06,
"loss": 1.3905,
"step": 83
},
{
"epoch": 0.2388908638464273,
"grad_norm": 7.78125,
"learning_rate": 8.767916439803808e-06,
"loss": 1.3913,
"step": 84
},
{
"epoch": 0.2417348027017419,
"grad_norm": 3.859375,
"learning_rate": 8.737918563017553e-06,
"loss": 1.373,
"step": 85
},
{
"epoch": 0.24457874155705653,
"grad_norm": 3.640625,
"learning_rate": 8.707612527555356e-06,
"loss": 1.4412,
"step": 86
},
{
"epoch": 0.24742268041237114,
"grad_norm": 4.0625,
"learning_rate": 8.677000831884639e-06,
"loss": 1.4524,
"step": 87
},
{
"epoch": 0.25026661926768573,
"grad_norm": 4.40625,
"learning_rate": 8.646085999671838e-06,
"loss": 1.3731,
"step": 88
},
{
"epoch": 0.2531105581230004,
"grad_norm": 5.46875,
"learning_rate": 8.614870579574338e-06,
"loss": 1.432,
"step": 89
},
{
"epoch": 0.25595449697831496,
"grad_norm": 5.0,
"learning_rate": 8.58335714503037e-06,
"loss": 1.4008,
"step": 90
},
{
"epoch": 0.2587984358336296,
"grad_norm": 4.21875,
"learning_rate": 8.551548294046843e-06,
"loss": 1.4155,
"step": 91
},
{
"epoch": 0.2616423746889442,
"grad_norm": 3.6875,
"learning_rate": 8.519446648985173e-06,
"loss": 1.4792,
"step": 92
},
{
"epoch": 0.2644863135442588,
"grad_norm": 4.3125,
"learning_rate": 8.487054856345081e-06,
"loss": 1.3851,
"step": 93
},
{
"epoch": 0.2673302523995734,
"grad_norm": 8.875,
"learning_rate": 8.454375586546418e-06,
"loss": 1.3887,
"step": 94
},
{
"epoch": 0.270174191254888,
"grad_norm": 4.53125,
"learning_rate": 8.42141153370901e-06,
"loss": 1.3538,
"step": 95
},
{
"epoch": 0.27301813011020265,
"grad_norm": 5.0625,
"learning_rate": 8.388165415430551e-06,
"loss": 1.3666,
"step": 96
},
{
"epoch": 0.27586206896551724,
"grad_norm": 3.578125,
"learning_rate": 8.35463997256257e-06,
"loss": 1.3942,
"step": 97
},
{
"epoch": 0.2787060078208318,
"grad_norm": 3.765625,
"learning_rate": 8.320837968984456e-06,
"loss": 1.4039,
"step": 98
},
{
"epoch": 0.28154994667614647,
"grad_norm": 3.5625,
"learning_rate": 8.28676219137561e-06,
"loss": 1.3956,
"step": 99
},
{
"epoch": 0.28439388553146105,
"grad_norm": 3.53125,
"learning_rate": 8.25241544898571e-06,
"loss": 1.3483,
"step": 100
},
{
"epoch": 0.2872378243867757,
"grad_norm": 5.125,
"learning_rate": 8.217800573403105e-06,
"loss": 1.4364,
"step": 101
},
{
"epoch": 0.2900817632420903,
"grad_norm": 4.46875,
"learning_rate": 8.18292041832138e-06,
"loss": 1.4297,
"step": 102
},
{
"epoch": 0.2929257020974049,
"grad_norm": 17.0,
"learning_rate": 8.147777859304095e-06,
"loss": 1.4185,
"step": 103
},
{
"epoch": 0.2957696409527195,
"grad_norm": 3.65625,
"learning_rate": 8.112375793547718e-06,
"loss": 1.3912,
"step": 104
},
{
"epoch": 0.2986135798080341,
"grad_norm": 3.78125,
"learning_rate": 8.076717139642775e-06,
"loss": 1.3984,
"step": 105
},
{
"epoch": 0.30145751866334874,
"grad_norm": 4.59375,
"learning_rate": 8.040804837333243e-06,
"loss": 1.3932,
"step": 106
},
{
"epoch": 0.30430145751866333,
"grad_norm": 4.28125,
"learning_rate": 8.004641847274182e-06,
"loss": 1.4467,
"step": 107
},
{
"epoch": 0.307145396373978,
"grad_norm": 4.5625,
"learning_rate": 7.968231150787674e-06,
"loss": 1.4019,
"step": 108
},
{
"epoch": 0.30998933522929256,
"grad_norm": 4.8125,
"learning_rate": 7.931575749617027e-06,
"loss": 1.3639,
"step": 109
},
{
"epoch": 0.3128332740846072,
"grad_norm": 4.25,
"learning_rate": 7.894678665679298e-06,
"loss": 1.4065,
"step": 110
},
{
"epoch": 0.3156772129399218,
"grad_norm": 3.46875,
"learning_rate": 7.857542940816183e-06,
"loss": 1.4385,
"step": 111
},
{
"epoch": 0.3185211517952364,
"grad_norm": 3.671875,
"learning_rate": 7.820171636543233e-06,
"loss": 1.4302,
"step": 112
},
{
"epoch": 0.321365090650551,
"grad_norm": 4.6875,
"learning_rate": 7.782567833797458e-06,
"loss": 1.4194,
"step": 113
},
{
"epoch": 0.3242090295058656,
"grad_norm": 7.28125,
"learning_rate": 7.744734632683332e-06,
"loss": 1.424,
"step": 114
},
{
"epoch": 0.32705296836118025,
"grad_norm": 3.59375,
"learning_rate": 7.70667515221722e-06,
"loss": 1.3662,
"step": 115
},
{
"epoch": 0.32989690721649484,
"grad_norm": 4.78125,
"learning_rate": 7.668392530070238e-06,
"loss": 1.3813,
"step": 116
},
{
"epoch": 0.3327408460718095,
"grad_norm": 3.921875,
"learning_rate": 7.629889922309576e-06,
"loss": 1.3609,
"step": 117
},
{
"epoch": 0.33558478492712407,
"grad_norm": 4.75,
"learning_rate": 7.5911705031383235e-06,
"loss": 1.3524,
"step": 118
},
{
"epoch": 0.33842872378243866,
"grad_norm": 3.96875,
"learning_rate": 7.55223746463376e-06,
"loss": 1.4071,
"step": 119
},
{
"epoch": 0.3412726626377533,
"grad_norm": 3.359375,
"learning_rate": 7.513094016484225e-06,
"loss": 1.3828,
"step": 120
},
{
"epoch": 0.3441166014930679,
"grad_norm": 3.78125,
"learning_rate": 7.473743385724478e-06,
"loss": 1.4123,
"step": 121
},
{
"epoch": 0.34696054034838253,
"grad_norm": 3.265625,
"learning_rate": 7.434188816469681e-06,
"loss": 1.3437,
"step": 122
},
{
"epoch": 0.3498044792036971,
"grad_norm": 9.6875,
"learning_rate": 7.394433569647935e-06,
"loss": 1.3295,
"step": 123
},
{
"epoch": 0.35264841805901176,
"grad_norm": 4.0,
"learning_rate": 7.354480922731454e-06,
"loss": 1.4302,
"step": 124
},
{
"epoch": 0.35549235691432635,
"grad_norm": 3.546875,
"learning_rate": 7.3143341694663604e-06,
"loss": 1.4021,
"step": 125
},
{
"epoch": 0.35833629576964093,
"grad_norm": 4.65625,
"learning_rate": 7.273996619601146e-06,
"loss": 1.3688,
"step": 126
},
{
"epoch": 0.3611802346249556,
"grad_norm": 4.84375,
"learning_rate": 7.233471598613815e-06,
"loss": 1.4139,
"step": 127
},
{
"epoch": 0.36402417348027016,
"grad_norm": 3.578125,
"learning_rate": 7.192762447437722e-06,
"loss": 1.3491,
"step": 128
},
{
"epoch": 0.3668681123355848,
"grad_norm": 4.25,
"learning_rate": 7.151872522186147e-06,
"loss": 1.3805,
"step": 129
},
{
"epoch": 0.3697120511908994,
"grad_norm": 3.53125,
"learning_rate": 7.110805193875607e-06,
"loss": 1.3782,
"step": 130
},
{
"epoch": 0.372555990046214,
"grad_norm": 5.46875,
"learning_rate": 7.0695638481479565e-06,
"loss": 1.3411,
"step": 131
},
{
"epoch": 0.3753999289015286,
"grad_norm": 3.5625,
"learning_rate": 7.028151884991254e-06,
"loss": 1.3842,
"step": 132
},
{
"epoch": 0.3782438677568432,
"grad_norm": 4.75,
"learning_rate": 6.986572718459479e-06,
"loss": 1.3537,
"step": 133
},
{
"epoch": 0.38108780661215785,
"grad_norm": 3.9375,
"learning_rate": 6.94482977639106e-06,
"loss": 1.3981,
"step": 134
},
{
"epoch": 0.38393174546747244,
"grad_norm": 4.96875,
"learning_rate": 6.902926500126292e-06,
"loss": 1.4284,
"step": 135
},
{
"epoch": 0.3867756843227871,
"grad_norm": 4.4375,
"learning_rate": 6.8608663442236156e-06,
"loss": 1.4013,
"step": 136
},
{
"epoch": 0.38961962317810167,
"grad_norm": 3.71875,
"learning_rate": 6.818652776174828e-06,
"loss": 1.436,
"step": 137
},
{
"epoch": 0.39246356203341626,
"grad_norm": 4.1875,
"learning_rate": 6.776289276119214e-06,
"loss": 1.3652,
"step": 138
},
{
"epoch": 0.3953075008887309,
"grad_norm": 3.671875,
"learning_rate": 6.733779336556643e-06,
"loss": 1.4186,
"step": 139
},
{
"epoch": 0.3981514397440455,
"grad_norm": 11.3125,
"learning_rate": 6.691126462059636e-06,
"loss": 1.3899,
"step": 140
},
{
"epoch": 0.40099537859936013,
"grad_norm": 3.234375,
"learning_rate": 6.648334168984452e-06,
"loss": 1.3971,
"step": 141
},
{
"epoch": 0.4038393174546747,
"grad_norm": 5.0,
"learning_rate": 6.60540598518119e-06,
"loss": 1.4499,
"step": 142
},
{
"epoch": 0.40668325630998936,
"grad_norm": 3.90625,
"learning_rate": 6.562345449702952e-06,
"loss": 1.3721,
"step": 143
},
{
"epoch": 0.40952719516530395,
"grad_norm": 5.875,
"learning_rate": 6.519156112514074e-06,
"loss": 1.4166,
"step": 144
},
{
"epoch": 0.41237113402061853,
"grad_norm": 6.0,
"learning_rate": 6.4758415341974705e-06,
"loss": 1.3782,
"step": 145
},
{
"epoch": 0.4152150728759332,
"grad_norm": 5.21875,
"learning_rate": 6.432405285661087e-06,
"loss": 1.3798,
"step": 146
},
{
"epoch": 0.41805901173124776,
"grad_norm": 13.5625,
"learning_rate": 6.388850947843517e-06,
"loss": 1.3277,
"step": 147
},
{
"epoch": 0.4209029505865624,
"grad_norm": 5.46875,
"learning_rate": 6.345182111418781e-06,
"loss": 1.3801,
"step": 148
},
{
"epoch": 0.423746889441877,
"grad_norm": 3.703125,
"learning_rate": 6.301402376500306e-06,
"loss": 1.3789,
"step": 149
},
{
"epoch": 0.42659082829719164,
"grad_norm": 3.734375,
"learning_rate": 6.257515352344131e-06,
"loss": 1.3935,
"step": 150
},
{
"epoch": 0.4294347671525062,
"grad_norm": 3.46875,
"learning_rate": 6.213524657051354e-06,
"loss": 1.3667,
"step": 151
},
{
"epoch": 0.4322787060078208,
"grad_norm": 4.625,
"learning_rate": 6.16943391726985e-06,
"loss": 1.3096,
"step": 152
},
{
"epoch": 0.43512264486313545,
"grad_norm": 5.8125,
"learning_rate": 6.125246767895287e-06,
"loss": 1.3607,
"step": 153
},
{
"epoch": 0.43796658371845004,
"grad_norm": 3.421875,
"learning_rate": 6.0809668517714615e-06,
"loss": 1.3842,
"step": 154
},
{
"epoch": 0.4408105225737647,
"grad_norm": 12.6875,
"learning_rate": 6.036597819389972e-06,
"loss": 1.3758,
"step": 155
},
{
"epoch": 0.44365446142907927,
"grad_norm": 3.984375,
"learning_rate": 5.992143328589282e-06,
"loss": 1.3857,
"step": 156
},
{
"epoch": 0.44649840028439386,
"grad_norm": 3.734375,
"learning_rate": 5.947607044253142e-06,
"loss": 1.4109,
"step": 157
},
{
"epoch": 0.4493423391397085,
"grad_norm": 4.125,
"learning_rate": 5.902992638008475e-06,
"loss": 1.3819,
"step": 158
},
{
"epoch": 0.4521862779950231,
"grad_norm": 3.125,
"learning_rate": 5.858303787922663e-06,
"loss": 1.3695,
"step": 159
},
{
"epoch": 0.45503021685033773,
"grad_norm": 3.484375,
"learning_rate": 5.8135441782003354e-06,
"loss": 1.3792,
"step": 160
},
{
"epoch": 0.4578741557056523,
"grad_norm": 7.46875,
"learning_rate": 5.768717498879635e-06,
"loss": 1.3709,
"step": 161
},
{
"epoch": 0.46071809456096696,
"grad_norm": 3.609375,
"learning_rate": 5.723827445528003e-06,
"loss": 1.3485,
"step": 162
},
{
"epoch": 0.46356203341628155,
"grad_norm": 3.96875,
"learning_rate": 5.67887771893752e-06,
"loss": 1.4539,
"step": 163
},
{
"epoch": 0.46640597227159614,
"grad_norm": 3.359375,
"learning_rate": 5.633872024819796e-06,
"loss": 1.3531,
"step": 164
},
{
"epoch": 0.4692499111269108,
"grad_norm": 4.15625,
"learning_rate": 5.588814073500481e-06,
"loss": 1.362,
"step": 165
},
{
"epoch": 0.47209384998222537,
"grad_norm": 3.953125,
"learning_rate": 5.543707579613367e-06,
"loss": 1.3842,
"step": 166
},
{
"epoch": 0.47493778883754,
"grad_norm": 3.859375,
"learning_rate": 5.498556261794161e-06,
"loss": 1.4086,
"step": 167
},
{
"epoch": 0.4777817276928546,
"grad_norm": 3.921875,
"learning_rate": 5.45336384237391e-06,
"loss": 1.3866,
"step": 168
},
{
"epoch": 0.48062566654816924,
"grad_norm": 4.90625,
"learning_rate": 5.4081340470721286e-06,
"loss": 1.3711,
"step": 169
},
{
"epoch": 0.4834696054034838,
"grad_norm": 6.25,
"learning_rate": 5.362870604689643e-06,
"loss": 1.3825,
"step": 170
},
{
"epoch": 0.4863135442587984,
"grad_norm": 4.15625,
"learning_rate": 5.31757724680119e-06,
"loss": 1.3896,
"step": 171
},
{
"epoch": 0.48915748311411306,
"grad_norm": 3.25,
"learning_rate": 5.272257707447776e-06,
"loss": 1.3977,
"step": 172
},
{
"epoch": 0.49200142196942764,
"grad_norm": 3.765625,
"learning_rate": 5.22691572282884e-06,
"loss": 1.3932,
"step": 173
},
{
"epoch": 0.4948453608247423,
"grad_norm": 3.5625,
"learning_rate": 5.18155503099424e-06,
"loss": 1.3748,
"step": 174
},
{
"epoch": 0.4976892996800569,
"grad_norm": 4.65625,
"learning_rate": 5.136179371536076e-06,
"loss": 1.3921,
"step": 175
},
{
"epoch": 0.5005332385353715,
"grad_norm": 5.71875,
"learning_rate": 5.090792485280401e-06,
"loss": 1.3433,
"step": 176
},
{
"epoch": 0.5033771773906861,
"grad_norm": 3.1875,
"learning_rate": 5.045398113978816e-06,
"loss": 1.3821,
"step": 177
},
{
"epoch": 0.5062211162460007,
"grad_norm": 5.3125,
"learning_rate": 5e-06,
"loss": 1.3923,
"step": 178
},
{
"epoch": 0.5090650551013153,
"grad_norm": 7.34375,
"learning_rate": 4.9546018860211845e-06,
"loss": 1.3391,
"step": 179
},
{
"epoch": 0.5119089939566299,
"grad_norm": 3.453125,
"learning_rate": 4.9092075147196005e-06,
"loss": 1.384,
"step": 180
},
{
"epoch": 0.5147529328119446,
"grad_norm": 4.03125,
"learning_rate": 4.863820628463925e-06,
"loss": 1.3679,
"step": 181
},
{
"epoch": 0.5175968716672592,
"grad_norm": 3.734375,
"learning_rate": 4.818444969005762e-06,
"loss": 1.3672,
"step": 182
},
{
"epoch": 0.5204408105225737,
"grad_norm": 5.125,
"learning_rate": 4.773084277171161e-06,
"loss": 1.3627,
"step": 183
},
{
"epoch": 0.5232847493778884,
"grad_norm": 3.484375,
"learning_rate": 4.727742292552225e-06,
"loss": 1.3618,
"step": 184
},
{
"epoch": 0.526128688233203,
"grad_norm": 3.703125,
"learning_rate": 4.682422753198812e-06,
"loss": 1.3619,
"step": 185
},
{
"epoch": 0.5289726270885176,
"grad_norm": 3.390625,
"learning_rate": 4.637129395310359e-06,
"loss": 1.3711,
"step": 186
},
{
"epoch": 0.5318165659438322,
"grad_norm": 3.40625,
"learning_rate": 4.591865952927873e-06,
"loss": 1.3999,
"step": 187
},
{
"epoch": 0.5346605047991468,
"grad_norm": 3.109375,
"learning_rate": 4.546636157626091e-06,
"loss": 1.3572,
"step": 188
},
{
"epoch": 0.5375044436544615,
"grad_norm": 4.21875,
"learning_rate": 4.501443738205841e-06,
"loss": 1.3415,
"step": 189
},
{
"epoch": 0.540348382509776,
"grad_norm": 6.53125,
"learning_rate": 4.456292420386635e-06,
"loss": 1.3792,
"step": 190
},
{
"epoch": 0.5431923213650907,
"grad_norm": 5.46875,
"learning_rate": 4.41118592649952e-06,
"loss": 1.4282,
"step": 191
},
{
"epoch": 0.5460362602204053,
"grad_norm": 3.78125,
"learning_rate": 4.366127975180204e-06,
"loss": 1.363,
"step": 192
},
{
"epoch": 0.5488801990757198,
"grad_norm": 3.40625,
"learning_rate": 4.321122281062481e-06,
"loss": 1.3839,
"step": 193
},
{
"epoch": 0.5517241379310345,
"grad_norm": 4.1875,
"learning_rate": 4.276172554471998e-06,
"loss": 1.3801,
"step": 194
},
{
"epoch": 0.5545680767863491,
"grad_norm": 4.25,
"learning_rate": 4.231282501120366e-06,
"loss": 1.3819,
"step": 195
},
{
"epoch": 0.5574120156416636,
"grad_norm": 4.75,
"learning_rate": 4.1864558217996645e-06,
"loss": 1.3207,
"step": 196
},
{
"epoch": 0.5602559544969783,
"grad_norm": 5.8125,
"learning_rate": 4.14169621207734e-06,
"loss": 1.4097,
"step": 197
},
{
"epoch": 0.5630998933522929,
"grad_norm": 14.3125,
"learning_rate": 4.0970073619915264e-06,
"loss": 1.4066,
"step": 198
},
{
"epoch": 0.5659438322076076,
"grad_norm": 3.15625,
"learning_rate": 4.05239295574686e-06,
"loss": 1.3636,
"step": 199
},
{
"epoch": 0.5687877710629221,
"grad_norm": 3.609375,
"learning_rate": 4.00785667141072e-06,
"loss": 1.3239,
"step": 200
},
{
"epoch": 0.5716317099182368,
"grad_norm": 4.5625,
"learning_rate": 3.963402180610028e-06,
"loss": 1.3242,
"step": 201
},
{
"epoch": 0.5744756487735514,
"grad_norm": 5.375,
"learning_rate": 3.919033148228542e-06,
"loss": 1.3622,
"step": 202
},
{
"epoch": 0.5773195876288659,
"grad_norm": 23.125,
"learning_rate": 3.874753232104714e-06,
"loss": 1.369,
"step": 203
},
{
"epoch": 0.5801635264841806,
"grad_norm": 4.0625,
"learning_rate": 3.830566082730151e-06,
"loss": 1.3779,
"step": 204
},
{
"epoch": 0.5830074653394952,
"grad_norm": 6.875,
"learning_rate": 3.7864753429486475e-06,
"loss": 1.3449,
"step": 205
},
{
"epoch": 0.5858514041948099,
"grad_norm": 3.359375,
"learning_rate": 3.7424846476558716e-06,
"loss": 1.364,
"step": 206
},
{
"epoch": 0.5886953430501244,
"grad_norm": 5.15625,
"learning_rate": 3.6985976234996957e-06,
"loss": 1.3829,
"step": 207
},
{
"epoch": 0.591539281905439,
"grad_norm": 4.875,
"learning_rate": 3.6548178885812203e-06,
"loss": 1.3564,
"step": 208
},
{
"epoch": 0.5943832207607537,
"grad_norm": 4.5625,
"learning_rate": 3.611149052156483e-06,
"loss": 1.399,
"step": 209
},
{
"epoch": 0.5972271596160682,
"grad_norm": 3.375,
"learning_rate": 3.5675947143389144e-06,
"loss": 1.3727,
"step": 210
},
{
"epoch": 0.6000710984713828,
"grad_norm": 4.5,
"learning_rate": 3.524158465802531e-06,
"loss": 1.3863,
"step": 211
},
{
"epoch": 0.6029150373266975,
"grad_norm": 3.984375,
"learning_rate": 3.4808438874859274e-06,
"loss": 1.3578,
"step": 212
},
{
"epoch": 0.6057589761820121,
"grad_norm": 9.4375,
"learning_rate": 3.437654550297049e-06,
"loss": 1.3384,
"step": 213
},
{
"epoch": 0.6086029150373267,
"grad_norm": 4.59375,
"learning_rate": 3.3945940148188117e-06,
"loss": 1.3619,
"step": 214
},
{
"epoch": 0.6114468538926413,
"grad_norm": 3.578125,
"learning_rate": 3.3516658310155493e-06,
"loss": 1.3741,
"step": 215
},
{
"epoch": 0.614290792747956,
"grad_norm": 5.09375,
"learning_rate": 3.3088735379403648e-06,
"loss": 1.3866,
"step": 216
},
{
"epoch": 0.6171347316032705,
"grad_norm": 4.28125,
"learning_rate": 3.266220663443358e-06,
"loss": 1.3863,
"step": 217
},
{
"epoch": 0.6199786704585851,
"grad_norm": 6.53125,
"learning_rate": 3.223710723880786e-06,
"loss": 1.3828,
"step": 218
},
{
"epoch": 0.6228226093138998,
"grad_norm": 4.625,
"learning_rate": 3.1813472238251742e-06,
"loss": 1.3583,
"step": 219
},
{
"epoch": 0.6256665481692144,
"grad_norm": 4.03125,
"learning_rate": 3.139133655776386e-06,
"loss": 1.3711,
"step": 220
},
{
"epoch": 0.6285104870245289,
"grad_norm": 3.8125,
"learning_rate": 3.0970734998737095e-06,
"loss": 1.4109,
"step": 221
},
{
"epoch": 0.6313544258798436,
"grad_norm": 3.5625,
"learning_rate": 3.055170223608941e-06,
"loss": 1.4092,
"step": 222
},
{
"epoch": 0.6341983647351582,
"grad_norm": 3.890625,
"learning_rate": 3.013427281540523e-06,
"loss": 1.3846,
"step": 223
},
{
"epoch": 0.6370423035904728,
"grad_norm": 3.8125,
"learning_rate": 2.9718481150087475e-06,
"loss": 1.3378,
"step": 224
},
{
"epoch": 0.6398862424457874,
"grad_norm": 5.75,
"learning_rate": 2.9304361518520447e-06,
"loss": 1.4083,
"step": 225
},
{
"epoch": 0.642730181301102,
"grad_norm": 10.0625,
"learning_rate": 2.8891948061243925e-06,
"loss": 1.399,
"step": 226
},
{
"epoch": 0.6455741201564167,
"grad_norm": 3.984375,
"learning_rate": 2.8481274778138567e-06,
"loss": 1.3218,
"step": 227
},
{
"epoch": 0.6484180590117312,
"grad_norm": 3.34375,
"learning_rate": 2.80723755256228e-06,
"loss": 1.3727,
"step": 228
},
{
"epoch": 0.6512619978670459,
"grad_norm": 5.03125,
"learning_rate": 2.766528401386187e-06,
"loss": 1.3563,
"step": 229
},
{
"epoch": 0.6541059367223605,
"grad_norm": 3.703125,
"learning_rate": 2.726003380398854e-06,
"loss": 1.3015,
"step": 230
},
{
"epoch": 0.656949875577675,
"grad_norm": 4.875,
"learning_rate": 2.685665830533642e-06,
"loss": 1.3992,
"step": 231
},
{
"epoch": 0.6597938144329897,
"grad_norm": 5.28125,
"learning_rate": 2.6455190772685463e-06,
"loss": 1.3843,
"step": 232
},
{
"epoch": 0.6626377532883043,
"grad_norm": 5.375,
"learning_rate": 2.6055664303520655e-06,
"loss": 1.3088,
"step": 233
},
{
"epoch": 0.665481692143619,
"grad_norm": 5.0625,
"learning_rate": 2.5658111835303206e-06,
"loss": 1.329,
"step": 234
},
{
"epoch": 0.6683256309989335,
"grad_norm": 3.390625,
"learning_rate": 2.526256614275524e-06,
"loss": 1.375,
"step": 235
},
{
"epoch": 0.6711695698542481,
"grad_norm": 3.828125,
"learning_rate": 2.486905983515778e-06,
"loss": 1.363,
"step": 236
},
{
"epoch": 0.6740135087095628,
"grad_norm": 9.375,
"learning_rate": 2.44776253536624e-06,
"loss": 1.3671,
"step": 237
},
{
"epoch": 0.6768574475648773,
"grad_norm": 4.09375,
"learning_rate": 2.408829496861679e-06,
"loss": 1.3624,
"step": 238
},
{
"epoch": 0.679701386420192,
"grad_norm": 3.34375,
"learning_rate": 2.370110077690425e-06,
"loss": 1.3291,
"step": 239
},
{
"epoch": 0.6825453252755066,
"grad_norm": 4.25,
"learning_rate": 2.331607469929765e-06,
"loss": 1.3745,
"step": 240
},
{
"epoch": 0.6853892641308212,
"grad_norm": 4.875,
"learning_rate": 2.2933248477827814e-06,
"loss": 1.394,
"step": 241
},
{
"epoch": 0.6882332029861358,
"grad_norm": 3.890625,
"learning_rate": 2.2552653673166676e-06,
"loss": 1.409,
"step": 242
},
{
"epoch": 0.6910771418414504,
"grad_norm": 3.578125,
"learning_rate": 2.2174321662025427e-06,
"loss": 1.3998,
"step": 243
},
{
"epoch": 0.6939210806967651,
"grad_norm": 4.5,
"learning_rate": 2.179828363456768e-06,
"loss": 1.3898,
"step": 244
},
{
"epoch": 0.6967650195520796,
"grad_norm": 4.375,
"learning_rate": 2.1424570591838184e-06,
"loss": 1.3422,
"step": 245
},
{
"epoch": 0.6996089584073942,
"grad_norm": 3.25,
"learning_rate": 2.1053213343207045e-06,
"loss": 1.3986,
"step": 246
},
{
"epoch": 0.7024528972627089,
"grad_norm": 3.828125,
"learning_rate": 2.068424250382974e-06,
"loss": 1.3767,
"step": 247
},
{
"epoch": 0.7052968361180235,
"grad_norm": 3.34375,
"learning_rate": 2.031768849212326e-06,
"loss": 1.4013,
"step": 248
},
{
"epoch": 0.708140774973338,
"grad_norm": 3.0625,
"learning_rate": 1.995358152725818e-06,
"loss": 1.3931,
"step": 249
},
{
"epoch": 0.7109847138286527,
"grad_norm": 3.578125,
"learning_rate": 1.959195162666759e-06,
"loss": 1.3763,
"step": 250
},
{
"epoch": 0.7138286526839673,
"grad_norm": 19.375,
"learning_rate": 1.9232828603572255e-06,
"loss": 1.3099,
"step": 251
},
{
"epoch": 0.7166725915392819,
"grad_norm": 3.53125,
"learning_rate": 1.8876242064522833e-06,
"loss": 1.3458,
"step": 252
},
{
"epoch": 0.7195165303945965,
"grad_norm": 3.796875,
"learning_rate": 1.8522221406959063e-06,
"loss": 1.3616,
"step": 253
},
{
"epoch": 0.7223604692499112,
"grad_norm": 3.234375,
"learning_rate": 1.8170795816786202e-06,
"loss": 1.3627,
"step": 254
},
{
"epoch": 0.7252044081052257,
"grad_norm": 3.640625,
"learning_rate": 1.7821994265968962e-06,
"loss": 1.3499,
"step": 255
},
{
"epoch": 0.7280483469605403,
"grad_norm": 3.265625,
"learning_rate": 1.747584551014291e-06,
"loss": 1.3368,
"step": 256
},
{
"epoch": 0.730892285815855,
"grad_norm": 6.0625,
"learning_rate": 1.7132378086243907e-06,
"loss": 1.3931,
"step": 257
},
{
"epoch": 0.7337362246711696,
"grad_norm": 3.40625,
"learning_rate": 1.679162031015546e-06,
"loss": 1.4312,
"step": 258
},
{
"epoch": 0.7365801635264841,
"grad_norm": 3.09375,
"learning_rate": 1.64536002743743e-06,
"loss": 1.3419,
"step": 259
},
{
"epoch": 0.7394241023817988,
"grad_norm": 3.15625,
"learning_rate": 1.6118345845694489e-06,
"loss": 1.3392,
"step": 260
},
{
"epoch": 0.7422680412371134,
"grad_norm": 4.59375,
"learning_rate": 1.5785884662909917e-06,
"loss": 1.3735,
"step": 261
},
{
"epoch": 0.745111980092428,
"grad_norm": 3.921875,
"learning_rate": 1.5456244134535836e-06,
"loss": 1.3798,
"step": 262
},
{
"epoch": 0.7479559189477426,
"grad_norm": 3.90625,
"learning_rate": 1.5129451436549203e-06,
"loss": 1.4131,
"step": 263
},
{
"epoch": 0.7507998578030572,
"grad_norm": 3.6875,
"learning_rate": 1.4805533510148268e-06,
"loss": 1.338,
"step": 264
},
{
"epoch": 0.7536437966583719,
"grad_norm": 5.5,
"learning_rate": 1.4484517059531588e-06,
"loss": 1.3417,
"step": 265
},
{
"epoch": 0.7564877355136864,
"grad_norm": 4.65625,
"learning_rate": 1.416642854969632e-06,
"loss": 1.3793,
"step": 266
},
{
"epoch": 0.7593316743690011,
"grad_norm": 4.03125,
"learning_rate": 1.3851294204256638e-06,
"loss": 1.3768,
"step": 267
},
{
"epoch": 0.7621756132243157,
"grad_norm": 4.4375,
"learning_rate": 1.3539140003281647e-06,
"loss": 1.3619,
"step": 268
},
{
"epoch": 0.7650195520796302,
"grad_norm": 4.3125,
"learning_rate": 1.3229991681153632e-06,
"loss": 1.3664,
"step": 269
},
{
"epoch": 0.7678634909349449,
"grad_norm": 3.96875,
"learning_rate": 1.2923874724446472e-06,
"loss": 1.3429,
"step": 270
},
{
"epoch": 0.7707074297902595,
"grad_norm": 4.5,
"learning_rate": 1.262081436982448e-06,
"loss": 1.38,
"step": 271
},
{
"epoch": 0.7735513686455742,
"grad_norm": 4.78125,
"learning_rate": 1.2320835601961928e-06,
"loss": 1.3598,
"step": 272
},
{
"epoch": 0.7763953075008887,
"grad_norm": 5.21875,
"learning_rate": 1.2023963151483165e-06,
"loss": 1.3817,
"step": 273
},
{
"epoch": 0.7792392463562033,
"grad_norm": 3.5,
"learning_rate": 1.1730221492923882e-06,
"loss": 1.3474,
"step": 274
},
{
"epoch": 0.782083185211518,
"grad_norm": 4.0,
"learning_rate": 1.1439634842713371e-06,
"loss": 1.3306,
"step": 275
},
{
"epoch": 0.7849271240668325,
"grad_norm": 3.34375,
"learning_rate": 1.115222715717807e-06,
"loss": 1.3564,
"step": 276
},
{
"epoch": 0.7877710629221472,
"grad_norm": 6.84375,
"learning_rate": 1.0868022130566652e-06,
"loss": 1.3335,
"step": 277
},
{
"epoch": 0.7906150017774618,
"grad_norm": 3.609375,
"learning_rate": 1.0587043193096535e-06,
"loss": 1.3674,
"step": 278
},
{
"epoch": 0.7934589406327764,
"grad_norm": 3.40625,
"learning_rate": 1.030931350902235e-06,
"loss": 1.381,
"step": 279
},
{
"epoch": 0.796302879488091,
"grad_norm": 5.3125,
"learning_rate": 1.0034855974726194e-06,
"loss": 1.363,
"step": 280
},
{
"epoch": 0.7991468183434056,
"grad_norm": 4.25,
"learning_rate": 9.763693216830055e-07,
"loss": 1.3654,
"step": 281
},
{
"epoch": 0.8019907571987203,
"grad_norm": 5.09375,
"learning_rate": 9.495847590330486e-07,
"loss": 1.3075,
"step": 282
},
{
"epoch": 0.8048346960540348,
"grad_norm": 3.375,
"learning_rate": 9.231341176755487e-07,
"loss": 1.3967,
"step": 283
},
{
"epoch": 0.8076786349093494,
"grad_norm": 3.375,
"learning_rate": 8.970195782344266e-07,
"loss": 1.3306,
"step": 284
},
{
"epoch": 0.8105225737646641,
"grad_norm": 7.40625,
"learning_rate": 8.712432936249365e-07,
"loss": 1.3871,
"step": 285
},
{
"epoch": 0.8133665126199787,
"grad_norm": 3.703125,
"learning_rate": 8.458073888761826e-07,
"loss": 1.3071,
"step": 286
},
{
"epoch": 0.8162104514752933,
"grad_norm": 4.46875,
"learning_rate": 8.207139609559284e-07,
"loss": 1.3573,
"step": 287
},
{
"epoch": 0.8190543903306079,
"grad_norm": 4.9375,
"learning_rate": 7.959650785977179e-07,
"loss": 1.3381,
"step": 288
},
{
"epoch": 0.8218983291859225,
"grad_norm": 3.359375,
"learning_rate": 7.71562782130334e-07,
"loss": 1.3247,
"step": 289
},
{
"epoch": 0.8247422680412371,
"grad_norm": 3.328125,
"learning_rate": 7.475090833095799e-07,
"loss": 1.3795,
"step": 290
},
{
"epoch": 0.8275862068965517,
"grad_norm": 3.75,
"learning_rate": 7.238059651524354e-07,
"loss": 1.3904,
"step": 291
},
{
"epoch": 0.8304301457518664,
"grad_norm": 3.515625,
"learning_rate": 7.004553817735732e-07,
"loss": 1.357,
"step": 292
},
{
"epoch": 0.833274084607181,
"grad_norm": 3.84375,
"learning_rate": 6.774592582242567e-07,
"loss": 1.3426,
"step": 293
},
{
"epoch": 0.8361180234624955,
"grad_norm": 3.640625,
"learning_rate": 6.548194903336408e-07,
"loss": 1.4078,
"step": 294
},
{
"epoch": 0.8389619623178102,
"grad_norm": 3.828125,
"learning_rate": 6.325379445524732e-07,
"loss": 1.3953,
"step": 295
},
{
"epoch": 0.8418059011731248,
"grad_norm": 8.4375,
"learning_rate": 6.106164577992224e-07,
"loss": 1.395,
"step": 296
},
{
"epoch": 0.8446498400284393,
"grad_norm": 4.09375,
"learning_rate": 5.890568373086425e-07,
"loss": 1.3245,
"step": 297
},
{
"epoch": 0.847493778883754,
"grad_norm": 3.796875,
"learning_rate": 5.678608604827784e-07,
"loss": 1.4177,
"step": 298
},
{
"epoch": 0.8503377177390686,
"grad_norm": 3.203125,
"learning_rate": 5.470302747444428e-07,
"loss": 1.3888,
"step": 299
},
{
"epoch": 0.8531816565943833,
"grad_norm": 3.703125,
"learning_rate": 5.265667973931416e-07,
"loss": 1.3779,
"step": 300
}
],
"logging_steps": 1,
"max_steps": 351,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 150,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 5.360553841201152e+17,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}