proxyv_vicuna_7b_layer12 / trainer_state.json
craigwu's picture
Upload folder using huggingface_hub
9d14f63 verified
raw
history blame
231 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9996533795493934,
"eval_steps": 500,
"global_step": 1442,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 25.242776883326066,
"learning_rate": 9.090909090909091e-07,
"loss": 2.2486,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 26.24817385710333,
"learning_rate": 1.8181818181818183e-06,
"loss": 2.3213,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 25.61308035508189,
"learning_rate": 2.7272727272727272e-06,
"loss": 2.2611,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 23.555151274318433,
"learning_rate": 3.6363636363636366e-06,
"loss": 2.1199,
"step": 4
},
{
"epoch": 0.0,
"grad_norm": 17.147877363166824,
"learning_rate": 4.5454545454545455e-06,
"loss": 2.0128,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 17.911473433619907,
"learning_rate": 5.4545454545454545e-06,
"loss": 1.7526,
"step": 6
},
{
"epoch": 0.0,
"grad_norm": 10.230218696373884,
"learning_rate": 6.363636363636364e-06,
"loss": 1.5805,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 7.405478017967084,
"learning_rate": 7.272727272727273e-06,
"loss": 1.4655,
"step": 8
},
{
"epoch": 0.01,
"grad_norm": 4.4596216736498056,
"learning_rate": 8.181818181818183e-06,
"loss": 1.419,
"step": 9
},
{
"epoch": 0.01,
"grad_norm": 2.4442792637876205,
"learning_rate": 9.090909090909091e-06,
"loss": 1.2906,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 2.0806093829467627,
"learning_rate": 1e-05,
"loss": 1.2923,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 1.485986701211375,
"learning_rate": 1.0909090909090909e-05,
"loss": 1.2534,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 1.1683379372844354,
"learning_rate": 1.181818181818182e-05,
"loss": 1.1713,
"step": 13
},
{
"epoch": 0.01,
"grad_norm": 3.307146468488238,
"learning_rate": 1.2727272727272728e-05,
"loss": 1.1584,
"step": 14
},
{
"epoch": 0.01,
"grad_norm": 1.276308429072452,
"learning_rate": 1.3636363636363637e-05,
"loss": 1.1544,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 1.512795911598732,
"learning_rate": 1.4545454545454546e-05,
"loss": 1.1578,
"step": 16
},
{
"epoch": 0.01,
"grad_norm": 1.6295981881297716,
"learning_rate": 1.5454545454545454e-05,
"loss": 1.1269,
"step": 17
},
{
"epoch": 0.01,
"grad_norm": 1.2250424038416317,
"learning_rate": 1.6363636363636366e-05,
"loss": 1.0553,
"step": 18
},
{
"epoch": 0.01,
"grad_norm": 1.0939945875598105,
"learning_rate": 1.7272727272727274e-05,
"loss": 1.114,
"step": 19
},
{
"epoch": 0.01,
"grad_norm": 0.9751021617511467,
"learning_rate": 1.8181818181818182e-05,
"loss": 1.0814,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 2.659255146424915,
"learning_rate": 1.9090909090909094e-05,
"loss": 1.0763,
"step": 21
},
{
"epoch": 0.02,
"grad_norm": 0.9405131551648747,
"learning_rate": 2e-05,
"loss": 1.0214,
"step": 22
},
{
"epoch": 0.02,
"grad_norm": 0.9578364914941194,
"learning_rate": 2.090909090909091e-05,
"loss": 1.0975,
"step": 23
},
{
"epoch": 0.02,
"grad_norm": 0.9801838289794812,
"learning_rate": 2.1818181818181818e-05,
"loss": 1.045,
"step": 24
},
{
"epoch": 0.02,
"grad_norm": 1.1426746305221096,
"learning_rate": 2.2727272727272733e-05,
"loss": 1.04,
"step": 25
},
{
"epoch": 0.02,
"grad_norm": 1.0030086746477813,
"learning_rate": 2.363636363636364e-05,
"loss": 0.9969,
"step": 26
},
{
"epoch": 0.02,
"grad_norm": 0.8864565877172841,
"learning_rate": 2.454545454545455e-05,
"loss": 1.0203,
"step": 27
},
{
"epoch": 0.02,
"grad_norm": 1.0346513222899663,
"learning_rate": 2.5454545454545457e-05,
"loss": 1.0288,
"step": 28
},
{
"epoch": 0.02,
"grad_norm": 1.010844951900328,
"learning_rate": 2.6363636363636365e-05,
"loss": 1.0052,
"step": 29
},
{
"epoch": 0.02,
"grad_norm": 0.8722392406753867,
"learning_rate": 2.7272727272727273e-05,
"loss": 1.0212,
"step": 30
},
{
"epoch": 0.02,
"grad_norm": 1.1122299162753009,
"learning_rate": 2.8181818181818185e-05,
"loss": 1.0616,
"step": 31
},
{
"epoch": 0.02,
"grad_norm": 0.7897265899024061,
"learning_rate": 2.9090909090909093e-05,
"loss": 0.994,
"step": 32
},
{
"epoch": 0.02,
"grad_norm": 0.8118095551454805,
"learning_rate": 3.0000000000000004e-05,
"loss": 0.9967,
"step": 33
},
{
"epoch": 0.02,
"grad_norm": 0.8761261512756774,
"learning_rate": 3.090909090909091e-05,
"loss": 0.9719,
"step": 34
},
{
"epoch": 0.02,
"grad_norm": 0.8606540392409628,
"learning_rate": 3.181818181818182e-05,
"loss": 0.9997,
"step": 35
},
{
"epoch": 0.02,
"grad_norm": 0.7648262778445616,
"learning_rate": 3.272727272727273e-05,
"loss": 0.9535,
"step": 36
},
{
"epoch": 0.03,
"grad_norm": 0.7737890978856444,
"learning_rate": 3.363636363636364e-05,
"loss": 1.0277,
"step": 37
},
{
"epoch": 0.03,
"grad_norm": 0.8263780695616693,
"learning_rate": 3.454545454545455e-05,
"loss": 1.01,
"step": 38
},
{
"epoch": 0.03,
"grad_norm": 0.8999550511174016,
"learning_rate": 3.545454545454546e-05,
"loss": 0.9694,
"step": 39
},
{
"epoch": 0.03,
"grad_norm": 0.9385283710457742,
"learning_rate": 3.6363636363636364e-05,
"loss": 0.9423,
"step": 40
},
{
"epoch": 0.03,
"grad_norm": 0.7862785611653874,
"learning_rate": 3.7272727272727276e-05,
"loss": 0.9863,
"step": 41
},
{
"epoch": 0.03,
"grad_norm": 0.8160954637491998,
"learning_rate": 3.818181818181819e-05,
"loss": 1.0084,
"step": 42
},
{
"epoch": 0.03,
"grad_norm": 0.8667601802428322,
"learning_rate": 3.909090909090909e-05,
"loss": 0.9686,
"step": 43
},
{
"epoch": 0.03,
"grad_norm": 0.7378882404737006,
"learning_rate": 4e-05,
"loss": 1.0064,
"step": 44
},
{
"epoch": 0.03,
"grad_norm": 0.7836195281789288,
"learning_rate": 3.999994950071609e-05,
"loss": 0.9913,
"step": 45
},
{
"epoch": 0.03,
"grad_norm": 0.7966183302919377,
"learning_rate": 3.999979800311937e-05,
"loss": 0.9659,
"step": 46
},
{
"epoch": 0.03,
"grad_norm": 0.8192576824785897,
"learning_rate": 3.999954550797489e-05,
"loss": 0.9977,
"step": 47
},
{
"epoch": 0.03,
"grad_norm": 0.7546567166231344,
"learning_rate": 3.999919201655774e-05,
"loss": 0.9298,
"step": 48
},
{
"epoch": 0.03,
"grad_norm": 0.7353449537157848,
"learning_rate": 3.999873753065302e-05,
"loss": 1.0083,
"step": 49
},
{
"epoch": 0.03,
"grad_norm": 0.7527466426146829,
"learning_rate": 3.999818205255586e-05,
"loss": 0.9998,
"step": 50
},
{
"epoch": 0.04,
"grad_norm": 0.7748566771226466,
"learning_rate": 3.9997525585071365e-05,
"loss": 0.97,
"step": 51
},
{
"epoch": 0.04,
"grad_norm": 0.7266635550466299,
"learning_rate": 3.999676813151467e-05,
"loss": 1.0009,
"step": 52
},
{
"epoch": 0.04,
"grad_norm": 0.7469204590909783,
"learning_rate": 3.9995909695710856e-05,
"loss": 0.956,
"step": 53
},
{
"epoch": 0.04,
"grad_norm": 0.6881688131232699,
"learning_rate": 3.999495028199495e-05,
"loss": 1.0061,
"step": 54
},
{
"epoch": 0.04,
"grad_norm": 0.7816026885490107,
"learning_rate": 3.999388989521194e-05,
"loss": 0.953,
"step": 55
},
{
"epoch": 0.04,
"grad_norm": 0.7304477108062899,
"learning_rate": 3.999272854071669e-05,
"loss": 0.9502,
"step": 56
},
{
"epoch": 0.04,
"grad_norm": 0.8144049243864225,
"learning_rate": 3.9991466224373964e-05,
"loss": 0.948,
"step": 57
},
{
"epoch": 0.04,
"grad_norm": 0.7352923472979539,
"learning_rate": 3.999010295255838e-05,
"loss": 0.948,
"step": 58
},
{
"epoch": 0.04,
"grad_norm": 0.7214847580480483,
"learning_rate": 3.998863873215434e-05,
"loss": 0.9551,
"step": 59
},
{
"epoch": 0.04,
"grad_norm": 0.7221389972200254,
"learning_rate": 3.998707357055606e-05,
"loss": 0.9565,
"step": 60
},
{
"epoch": 0.04,
"grad_norm": 0.6784519549827787,
"learning_rate": 3.99854074756675e-05,
"loss": 0.9221,
"step": 61
},
{
"epoch": 0.04,
"grad_norm": 0.7417742174785682,
"learning_rate": 3.998364045590232e-05,
"loss": 0.9911,
"step": 62
},
{
"epoch": 0.04,
"grad_norm": 0.7096145214387224,
"learning_rate": 3.998177252018383e-05,
"loss": 0.9792,
"step": 63
},
{
"epoch": 0.04,
"grad_norm": 0.7008691056448438,
"learning_rate": 3.997980367794499e-05,
"loss": 0.9161,
"step": 64
},
{
"epoch": 0.05,
"grad_norm": 0.7124126949355876,
"learning_rate": 3.9977733939128304e-05,
"loss": 0.9286,
"step": 65
},
{
"epoch": 0.05,
"grad_norm": 0.7252049055812851,
"learning_rate": 3.99755633141858e-05,
"loss": 0.9139,
"step": 66
},
{
"epoch": 0.05,
"grad_norm": 0.665207713687371,
"learning_rate": 3.9973291814078996e-05,
"loss": 0.9542,
"step": 67
},
{
"epoch": 0.05,
"grad_norm": 0.6561240417317747,
"learning_rate": 3.997091945027878e-05,
"loss": 0.9575,
"step": 68
},
{
"epoch": 0.05,
"grad_norm": 0.7184162269491192,
"learning_rate": 3.996844623476543e-05,
"loss": 0.9595,
"step": 69
},
{
"epoch": 0.05,
"grad_norm": 0.7446753841118763,
"learning_rate": 3.996587218002852e-05,
"loss": 0.9207,
"step": 70
},
{
"epoch": 0.05,
"grad_norm": 0.7144147431983755,
"learning_rate": 3.996319729906682e-05,
"loss": 0.9037,
"step": 71
},
{
"epoch": 0.05,
"grad_norm": 0.6828973080234167,
"learning_rate": 3.99604216053883e-05,
"loss": 0.9226,
"step": 72
},
{
"epoch": 0.05,
"grad_norm": 0.6947476568417512,
"learning_rate": 3.995754511301002e-05,
"loss": 0.9273,
"step": 73
},
{
"epoch": 0.05,
"grad_norm": 0.6967933420470825,
"learning_rate": 3.995456783645805e-05,
"loss": 0.9151,
"step": 74
},
{
"epoch": 0.05,
"grad_norm": 0.6852930588579474,
"learning_rate": 3.995148979076741e-05,
"loss": 0.9474,
"step": 75
},
{
"epoch": 0.05,
"grad_norm": 0.6621382715578686,
"learning_rate": 3.994831099148205e-05,
"loss": 0.931,
"step": 76
},
{
"epoch": 0.05,
"grad_norm": 0.700362518754512,
"learning_rate": 3.994503145465464e-05,
"loss": 0.9482,
"step": 77
},
{
"epoch": 0.05,
"grad_norm": 0.6286338691983503,
"learning_rate": 3.994165119684663e-05,
"loss": 0.9067,
"step": 78
},
{
"epoch": 0.05,
"grad_norm": 0.669677454644009,
"learning_rate": 3.9938170235128074e-05,
"loss": 0.9881,
"step": 79
},
{
"epoch": 0.06,
"grad_norm": 0.6945486619794093,
"learning_rate": 3.993458858707756e-05,
"loss": 0.9309,
"step": 80
},
{
"epoch": 0.06,
"grad_norm": 0.6569936029765302,
"learning_rate": 3.993090627078219e-05,
"loss": 0.9317,
"step": 81
},
{
"epoch": 0.06,
"grad_norm": 0.7058253642392298,
"learning_rate": 3.992712330483737e-05,
"loss": 0.8983,
"step": 82
},
{
"epoch": 0.06,
"grad_norm": 0.6746306642388232,
"learning_rate": 3.992323970834682e-05,
"loss": 0.9198,
"step": 83
},
{
"epoch": 0.06,
"grad_norm": 0.7207765302646257,
"learning_rate": 3.9919255500922416e-05,
"loss": 0.9076,
"step": 84
},
{
"epoch": 0.06,
"grad_norm": 0.6926110406915819,
"learning_rate": 3.9915170702684125e-05,
"loss": 0.9534,
"step": 85
},
{
"epoch": 0.06,
"grad_norm": 0.6069390690034724,
"learning_rate": 3.991098533425988e-05,
"loss": 0.9448,
"step": 86
},
{
"epoch": 0.06,
"grad_norm": 0.6307159850656698,
"learning_rate": 3.990669941678551e-05,
"loss": 0.9282,
"step": 87
},
{
"epoch": 0.06,
"grad_norm": 0.6869236902325659,
"learning_rate": 3.990231297190456e-05,
"loss": 0.9399,
"step": 88
},
{
"epoch": 0.06,
"grad_norm": 0.6242859875899749,
"learning_rate": 3.989782602176829e-05,
"loss": 0.8966,
"step": 89
},
{
"epoch": 0.06,
"grad_norm": 0.7618882321375348,
"learning_rate": 3.989323858903548e-05,
"loss": 0.9548,
"step": 90
},
{
"epoch": 0.06,
"grad_norm": 0.6603883679145531,
"learning_rate": 3.988855069687231e-05,
"loss": 0.9146,
"step": 91
},
{
"epoch": 0.06,
"grad_norm": 0.6923873343270807,
"learning_rate": 3.988376236895231e-05,
"loss": 0.9864,
"step": 92
},
{
"epoch": 0.06,
"grad_norm": 0.6480460582873827,
"learning_rate": 3.9878873629456205e-05,
"loss": 0.9075,
"step": 93
},
{
"epoch": 0.07,
"grad_norm": 0.6666429958410881,
"learning_rate": 3.987388450307177e-05,
"loss": 0.9193,
"step": 94
},
{
"epoch": 0.07,
"grad_norm": 0.6936416146767364,
"learning_rate": 3.986879501499373e-05,
"loss": 0.9345,
"step": 95
},
{
"epoch": 0.07,
"grad_norm": 0.6509933337021381,
"learning_rate": 3.9863605190923655e-05,
"loss": 0.9414,
"step": 96
},
{
"epoch": 0.07,
"grad_norm": 0.682389931912625,
"learning_rate": 3.9858315057069765e-05,
"loss": 0.8886,
"step": 97
},
{
"epoch": 0.07,
"grad_norm": 0.6353224222353495,
"learning_rate": 3.985292464014686e-05,
"loss": 0.8951,
"step": 98
},
{
"epoch": 0.07,
"grad_norm": 0.6278998163165551,
"learning_rate": 3.9847433967376174e-05,
"loss": 0.9104,
"step": 99
},
{
"epoch": 0.07,
"grad_norm": 0.6270998048962071,
"learning_rate": 3.9841843066485194e-05,
"loss": 0.9027,
"step": 100
},
{
"epoch": 0.07,
"grad_norm": 0.6184222112069471,
"learning_rate": 3.9836151965707585e-05,
"loss": 0.8889,
"step": 101
},
{
"epoch": 0.07,
"grad_norm": 0.6118332308425215,
"learning_rate": 3.9830360693782984e-05,
"loss": 0.9467,
"step": 102
},
{
"epoch": 0.07,
"grad_norm": 0.6388133987028665,
"learning_rate": 3.9824469279956905e-05,
"loss": 0.9576,
"step": 103
},
{
"epoch": 0.07,
"grad_norm": 0.6154632940078371,
"learning_rate": 3.9818477753980566e-05,
"loss": 0.9224,
"step": 104
},
{
"epoch": 0.07,
"grad_norm": 0.5830878901542325,
"learning_rate": 3.981238614611075e-05,
"loss": 0.875,
"step": 105
},
{
"epoch": 0.07,
"grad_norm": 0.6267216316963703,
"learning_rate": 3.9806194487109636e-05,
"loss": 0.9536,
"step": 106
},
{
"epoch": 0.07,
"grad_norm": 0.6269214324575413,
"learning_rate": 3.979990280824465e-05,
"loss": 0.9579,
"step": 107
},
{
"epoch": 0.07,
"grad_norm": 0.6510734409199925,
"learning_rate": 3.979351114128833e-05,
"loss": 0.9654,
"step": 108
},
{
"epoch": 0.08,
"grad_norm": 0.5809004604690332,
"learning_rate": 3.978701951851814e-05,
"loss": 0.9695,
"step": 109
},
{
"epoch": 0.08,
"grad_norm": 0.6209617422673123,
"learning_rate": 3.9780427972716296e-05,
"loss": 0.9189,
"step": 110
},
{
"epoch": 0.08,
"grad_norm": 0.6563185007227816,
"learning_rate": 3.977373653716965e-05,
"loss": 0.8648,
"step": 111
},
{
"epoch": 0.08,
"grad_norm": 0.6494662937748996,
"learning_rate": 3.976694524566945e-05,
"loss": 0.8905,
"step": 112
},
{
"epoch": 0.08,
"grad_norm": 0.6493369538292527,
"learning_rate": 3.976005413251125e-05,
"loss": 0.9245,
"step": 113
},
{
"epoch": 0.08,
"grad_norm": 0.6337066690323254,
"learning_rate": 3.975306323249467e-05,
"loss": 0.9165,
"step": 114
},
{
"epoch": 0.08,
"grad_norm": 0.6321039160680165,
"learning_rate": 3.974597258092327e-05,
"loss": 0.953,
"step": 115
},
{
"epoch": 0.08,
"grad_norm": 0.6096857074648213,
"learning_rate": 3.9738782213604305e-05,
"loss": 0.9209,
"step": 116
},
{
"epoch": 0.08,
"grad_norm": 0.6363445928361933,
"learning_rate": 3.9731492166848633e-05,
"loss": 0.9243,
"step": 117
},
{
"epoch": 0.08,
"grad_norm": 0.646121219004708,
"learning_rate": 3.972410247747047e-05,
"loss": 0.9286,
"step": 118
},
{
"epoch": 0.08,
"grad_norm": 0.6354990932735374,
"learning_rate": 3.971661318278721e-05,
"loss": 0.9231,
"step": 119
},
{
"epoch": 0.08,
"grad_norm": 0.6707480534953454,
"learning_rate": 3.970902432061925e-05,
"loss": 0.9072,
"step": 120
},
{
"epoch": 0.08,
"grad_norm": 0.6395364641156271,
"learning_rate": 3.970133592928982e-05,
"loss": 0.924,
"step": 121
},
{
"epoch": 0.08,
"grad_norm": 0.6595766214739742,
"learning_rate": 3.969354804762473e-05,
"loss": 0.9103,
"step": 122
},
{
"epoch": 0.09,
"grad_norm": 0.6325532612571391,
"learning_rate": 3.968566071495222e-05,
"loss": 0.8847,
"step": 123
},
{
"epoch": 0.09,
"grad_norm": 0.5977677589837317,
"learning_rate": 3.9677673971102777e-05,
"loss": 0.9118,
"step": 124
},
{
"epoch": 0.09,
"grad_norm": 0.6938009191633233,
"learning_rate": 3.966958785640887e-05,
"loss": 0.9125,
"step": 125
},
{
"epoch": 0.09,
"grad_norm": 0.6382156817661678,
"learning_rate": 3.9661402411704794e-05,
"loss": 0.8822,
"step": 126
},
{
"epoch": 0.09,
"grad_norm": 0.6790574574084021,
"learning_rate": 3.9653117678326476e-05,
"loss": 0.9139,
"step": 127
},
{
"epoch": 0.09,
"grad_norm": 0.696316303131707,
"learning_rate": 3.9644733698111206e-05,
"loss": 0.9154,
"step": 128
},
{
"epoch": 0.09,
"grad_norm": 0.6233309718845259,
"learning_rate": 3.96362505133975e-05,
"loss": 0.8844,
"step": 129
},
{
"epoch": 0.09,
"grad_norm": 0.6745434810184168,
"learning_rate": 3.9627668167024834e-05,
"loss": 0.9352,
"step": 130
},
{
"epoch": 0.09,
"grad_norm": 0.637975158810312,
"learning_rate": 3.9618986702333424e-05,
"loss": 0.8965,
"step": 131
},
{
"epoch": 0.09,
"grad_norm": 0.629976377253493,
"learning_rate": 3.9610206163164064e-05,
"loss": 0.8793,
"step": 132
},
{
"epoch": 0.09,
"grad_norm": 0.5800708640312998,
"learning_rate": 3.9601326593857836e-05,
"loss": 0.8772,
"step": 133
},
{
"epoch": 0.09,
"grad_norm": 0.6142288594149091,
"learning_rate": 3.959234803925594e-05,
"loss": 0.9101,
"step": 134
},
{
"epoch": 0.09,
"grad_norm": 0.6067445146264143,
"learning_rate": 3.9583270544699426e-05,
"loss": 0.8971,
"step": 135
},
{
"epoch": 0.09,
"grad_norm": 0.5855484534760861,
"learning_rate": 3.957409415602899e-05,
"loss": 0.8987,
"step": 136
},
{
"epoch": 0.09,
"grad_norm": 0.6083769768470026,
"learning_rate": 3.956481891958475e-05,
"loss": 0.9137,
"step": 137
},
{
"epoch": 0.1,
"grad_norm": 0.6031018501934673,
"learning_rate": 3.9555444882205975e-05,
"loss": 0.8859,
"step": 138
},
{
"epoch": 0.1,
"grad_norm": 0.6921208375775008,
"learning_rate": 3.954597209123088e-05,
"loss": 0.9318,
"step": 139
},
{
"epoch": 0.1,
"grad_norm": 0.6212490077403071,
"learning_rate": 3.9536400594496386e-05,
"loss": 0.9095,
"step": 140
},
{
"epoch": 0.1,
"grad_norm": 0.6077918694786609,
"learning_rate": 3.952673044033787e-05,
"loss": 0.8848,
"step": 141
},
{
"epoch": 0.1,
"grad_norm": 0.6294534849606952,
"learning_rate": 3.951696167758891e-05,
"loss": 0.9377,
"step": 142
},
{
"epoch": 0.1,
"grad_norm": 0.6070625469665414,
"learning_rate": 3.950709435558106e-05,
"loss": 0.8741,
"step": 143
},
{
"epoch": 0.1,
"grad_norm": 0.6152538086151278,
"learning_rate": 3.9497128524143596e-05,
"loss": 0.876,
"step": 144
},
{
"epoch": 0.1,
"grad_norm": 0.6004112219499066,
"learning_rate": 3.948706423360325e-05,
"loss": 0.8909,
"step": 145
},
{
"epoch": 0.1,
"grad_norm": 0.7021351650479439,
"learning_rate": 3.947690153478396e-05,
"loss": 0.8851,
"step": 146
},
{
"epoch": 0.1,
"grad_norm": 0.5734794381744992,
"learning_rate": 3.946664047900664e-05,
"loss": 0.8969,
"step": 147
},
{
"epoch": 0.1,
"grad_norm": 0.5940392971794302,
"learning_rate": 3.9456281118088887e-05,
"loss": 0.9199,
"step": 148
},
{
"epoch": 0.1,
"grad_norm": 0.5950994939302906,
"learning_rate": 3.9445823504344725e-05,
"loss": 0.8622,
"step": 149
},
{
"epoch": 0.1,
"grad_norm": 0.6031247489682733,
"learning_rate": 3.943526769058435e-05,
"loss": 0.9011,
"step": 150
},
{
"epoch": 0.1,
"grad_norm": 0.628935829682089,
"learning_rate": 3.942461373011387e-05,
"loss": 0.8985,
"step": 151
},
{
"epoch": 0.11,
"grad_norm": 0.6333815572551225,
"learning_rate": 3.9413861676735034e-05,
"loss": 0.8773,
"step": 152
},
{
"epoch": 0.11,
"grad_norm": 0.6186587677959166,
"learning_rate": 3.9403011584744925e-05,
"loss": 0.8887,
"step": 153
},
{
"epoch": 0.11,
"grad_norm": 0.6211505125444288,
"learning_rate": 3.939206350893574e-05,
"loss": 0.8946,
"step": 154
},
{
"epoch": 0.11,
"grad_norm": 0.5830584451663235,
"learning_rate": 3.938101750459447e-05,
"loss": 0.9162,
"step": 155
},
{
"epoch": 0.11,
"grad_norm": 0.5946194210750824,
"learning_rate": 3.936987362750266e-05,
"loss": 0.9268,
"step": 156
},
{
"epoch": 0.11,
"grad_norm": 0.5629828355225477,
"learning_rate": 3.935863193393607e-05,
"loss": 0.9227,
"step": 157
},
{
"epoch": 0.11,
"grad_norm": 0.5781046970363167,
"learning_rate": 3.9347292480664465e-05,
"loss": 0.9013,
"step": 158
},
{
"epoch": 0.11,
"grad_norm": 0.5854354704485962,
"learning_rate": 3.9335855324951275e-05,
"loss": 0.8766,
"step": 159
},
{
"epoch": 0.11,
"grad_norm": 0.5942058005609583,
"learning_rate": 3.93243205245533e-05,
"loss": 0.9244,
"step": 160
},
{
"epoch": 0.11,
"grad_norm": 0.6186432256372385,
"learning_rate": 3.931268813772047e-05,
"loss": 0.8843,
"step": 161
},
{
"epoch": 0.11,
"grad_norm": 0.606162269201839,
"learning_rate": 3.93009582231955e-05,
"loss": 0.8807,
"step": 162
},
{
"epoch": 0.11,
"grad_norm": 0.6726053851975508,
"learning_rate": 3.928913084021361e-05,
"loss": 0.8745,
"step": 163
},
{
"epoch": 0.11,
"grad_norm": 0.6124902430587509,
"learning_rate": 3.927720604850226e-05,
"loss": 0.891,
"step": 164
},
{
"epoch": 0.11,
"grad_norm": 0.636469206103097,
"learning_rate": 3.926518390828077e-05,
"loss": 0.8472,
"step": 165
},
{
"epoch": 0.12,
"grad_norm": 0.6597428832783658,
"learning_rate": 3.925306448026011e-05,
"loss": 0.8792,
"step": 166
},
{
"epoch": 0.12,
"grad_norm": 0.5960409389126693,
"learning_rate": 3.92408478256425e-05,
"loss": 0.9063,
"step": 167
},
{
"epoch": 0.12,
"grad_norm": 0.6264580235880576,
"learning_rate": 3.922853400612119e-05,
"loss": 0.9385,
"step": 168
},
{
"epoch": 0.12,
"grad_norm": 0.5952217499023743,
"learning_rate": 3.9216123083880075e-05,
"loss": 0.8831,
"step": 169
},
{
"epoch": 0.12,
"grad_norm": 0.5953005522000298,
"learning_rate": 3.920361512159343e-05,
"loss": 0.881,
"step": 170
},
{
"epoch": 0.12,
"grad_norm": 0.6206981518975937,
"learning_rate": 3.9191010182425566e-05,
"loss": 0.8735,
"step": 171
},
{
"epoch": 0.12,
"grad_norm": 0.651815725592759,
"learning_rate": 3.9178308330030524e-05,
"loss": 0.9037,
"step": 172
},
{
"epoch": 0.12,
"grad_norm": 0.5603069787203854,
"learning_rate": 3.916550962855174e-05,
"loss": 0.8715,
"step": 173
},
{
"epoch": 0.12,
"grad_norm": 0.6400098050058027,
"learning_rate": 3.915261414262177e-05,
"loss": 0.9086,
"step": 174
},
{
"epoch": 0.12,
"grad_norm": 0.6183034880528574,
"learning_rate": 3.913962193736185e-05,
"loss": 0.8891,
"step": 175
},
{
"epoch": 0.12,
"grad_norm": 0.58959944731978,
"learning_rate": 3.912653307838173e-05,
"loss": 0.9007,
"step": 176
},
{
"epoch": 0.12,
"grad_norm": 0.5908221046896484,
"learning_rate": 3.9113347631779174e-05,
"loss": 0.9078,
"step": 177
},
{
"epoch": 0.12,
"grad_norm": 0.6086939225535511,
"learning_rate": 3.910006566413976e-05,
"loss": 0.8665,
"step": 178
},
{
"epoch": 0.12,
"grad_norm": 0.62154046635455,
"learning_rate": 3.908668724253649e-05,
"loss": 0.8502,
"step": 179
},
{
"epoch": 0.12,
"grad_norm": 0.575377574548445,
"learning_rate": 3.90732124345294e-05,
"loss": 0.9339,
"step": 180
},
{
"epoch": 0.13,
"grad_norm": 0.5912325908540916,
"learning_rate": 3.905964130816533e-05,
"loss": 0.8984,
"step": 181
},
{
"epoch": 0.13,
"grad_norm": 0.5672059615230085,
"learning_rate": 3.9045973931977495e-05,
"loss": 0.8387,
"step": 182
},
{
"epoch": 0.13,
"grad_norm": 0.5856109500417666,
"learning_rate": 3.903221037498515e-05,
"loss": 0.8957,
"step": 183
},
{
"epoch": 0.13,
"grad_norm": 0.6457220547090584,
"learning_rate": 3.901835070669329e-05,
"loss": 0.8445,
"step": 184
},
{
"epoch": 0.13,
"grad_norm": 0.6045788909840637,
"learning_rate": 3.900439499709224e-05,
"loss": 0.8468,
"step": 185
},
{
"epoch": 0.13,
"grad_norm": 0.6049131354732448,
"learning_rate": 3.899034331665733e-05,
"loss": 0.8766,
"step": 186
},
{
"epoch": 0.13,
"grad_norm": 0.5954754176729954,
"learning_rate": 3.897619573634855e-05,
"loss": 0.8909,
"step": 187
},
{
"epoch": 0.13,
"grad_norm": 0.5927906571700062,
"learning_rate": 3.896195232761016e-05,
"loss": 0.8495,
"step": 188
},
{
"epoch": 0.13,
"grad_norm": 0.5953154773986121,
"learning_rate": 3.8947613162370356e-05,
"loss": 0.8942,
"step": 189
},
{
"epoch": 0.13,
"grad_norm": 0.5680050624831924,
"learning_rate": 3.8933178313040895e-05,
"loss": 0.9039,
"step": 190
},
{
"epoch": 0.13,
"grad_norm": 0.5571078368051514,
"learning_rate": 3.891864785251673e-05,
"loss": 0.8952,
"step": 191
},
{
"epoch": 0.13,
"grad_norm": 0.57414491259907,
"learning_rate": 3.8904021854175654e-05,
"loss": 0.8759,
"step": 192
},
{
"epoch": 0.13,
"grad_norm": 0.6259883153342662,
"learning_rate": 3.888930039187791e-05,
"loss": 0.9051,
"step": 193
},
{
"epoch": 0.13,
"grad_norm": 0.5625157305120121,
"learning_rate": 3.887448353996582e-05,
"loss": 0.894,
"step": 194
},
{
"epoch": 0.14,
"grad_norm": 0.6343265428079294,
"learning_rate": 3.885957137326343e-05,
"loss": 0.8801,
"step": 195
},
{
"epoch": 0.14,
"grad_norm": 0.5971902398720431,
"learning_rate": 3.884456396707611e-05,
"loss": 0.8552,
"step": 196
},
{
"epoch": 0.14,
"grad_norm": 0.5809359778100228,
"learning_rate": 3.88294613971902e-05,
"loss": 0.8939,
"step": 197
},
{
"epoch": 0.14,
"grad_norm": 0.5718379857288207,
"learning_rate": 3.8814263739872584e-05,
"loss": 0.9053,
"step": 198
},
{
"epoch": 0.14,
"grad_norm": 0.5921554986338307,
"learning_rate": 3.8798971071870345e-05,
"loss": 0.8698,
"step": 199
},
{
"epoch": 0.14,
"grad_norm": 0.5408597582825745,
"learning_rate": 3.8783583470410365e-05,
"loss": 0.8625,
"step": 200
},
{
"epoch": 0.14,
"grad_norm": 0.5714670211520112,
"learning_rate": 3.876810101319893e-05,
"loss": 0.8839,
"step": 201
},
{
"epoch": 0.14,
"grad_norm": 0.5790625468577971,
"learning_rate": 3.875252377842133e-05,
"loss": 0.8904,
"step": 202
},
{
"epoch": 0.14,
"grad_norm": 0.5669500586836992,
"learning_rate": 3.87368518447415e-05,
"loss": 0.9013,
"step": 203
},
{
"epoch": 0.14,
"grad_norm": 0.5798283792530063,
"learning_rate": 3.8721085291301566e-05,
"loss": 0.8874,
"step": 204
},
{
"epoch": 0.14,
"grad_norm": 0.5839571977617171,
"learning_rate": 3.870522419772151e-05,
"loss": 0.851,
"step": 205
},
{
"epoch": 0.14,
"grad_norm": 0.5813966499485558,
"learning_rate": 3.8689268644098715e-05,
"loss": 0.857,
"step": 206
},
{
"epoch": 0.14,
"grad_norm": 0.5759019357685262,
"learning_rate": 3.867321871100758e-05,
"loss": 0.8639,
"step": 207
},
{
"epoch": 0.14,
"grad_norm": 0.6021365611111714,
"learning_rate": 3.8657074479499106e-05,
"loss": 0.8755,
"step": 208
},
{
"epoch": 0.14,
"grad_norm": 0.6059555381609605,
"learning_rate": 3.864083603110053e-05,
"loss": 0.9112,
"step": 209
},
{
"epoch": 0.15,
"grad_norm": 0.5814979902760298,
"learning_rate": 3.8624503447814844e-05,
"loss": 0.8621,
"step": 210
},
{
"epoch": 0.15,
"grad_norm": 0.6046454169131595,
"learning_rate": 3.860807681212042e-05,
"loss": 0.869,
"step": 211
},
{
"epoch": 0.15,
"grad_norm": 0.5793037804275853,
"learning_rate": 3.8591556206970594e-05,
"loss": 0.8476,
"step": 212
},
{
"epoch": 0.15,
"grad_norm": 0.5734924059398303,
"learning_rate": 3.857494171579324e-05,
"loss": 0.8395,
"step": 213
},
{
"epoch": 0.15,
"grad_norm": 0.5782463173020247,
"learning_rate": 3.8558233422490354e-05,
"loss": 0.8833,
"step": 214
},
{
"epoch": 0.15,
"grad_norm": 0.5514466198843485,
"learning_rate": 3.8541431411437616e-05,
"loss": 0.8568,
"step": 215
},
{
"epoch": 0.15,
"grad_norm": 0.589574805698321,
"learning_rate": 3.852453576748397e-05,
"loss": 0.8562,
"step": 216
},
{
"epoch": 0.15,
"grad_norm": 0.600355332970933,
"learning_rate": 3.850754657595122e-05,
"loss": 0.8551,
"step": 217
},
{
"epoch": 0.15,
"grad_norm": 0.5809963853998383,
"learning_rate": 3.8490463922633564e-05,
"loss": 0.8541,
"step": 218
},
{
"epoch": 0.15,
"grad_norm": 0.5477484431760421,
"learning_rate": 3.8473287893797176e-05,
"loss": 0.878,
"step": 219
},
{
"epoch": 0.15,
"grad_norm": 0.5758556521227449,
"learning_rate": 3.845601857617977e-05,
"loss": 0.8843,
"step": 220
},
{
"epoch": 0.15,
"grad_norm": 0.5445022224047396,
"learning_rate": 3.843865605699017e-05,
"loss": 0.9364,
"step": 221
},
{
"epoch": 0.15,
"grad_norm": 0.575606375284161,
"learning_rate": 3.842120042390784e-05,
"loss": 0.8567,
"step": 222
},
{
"epoch": 0.15,
"grad_norm": 0.5344343100612958,
"learning_rate": 3.8403651765082495e-05,
"loss": 0.8586,
"step": 223
},
{
"epoch": 0.16,
"grad_norm": 0.5705693770578576,
"learning_rate": 3.8386010169133596e-05,
"loss": 0.8622,
"step": 224
},
{
"epoch": 0.16,
"grad_norm": 0.584092382473973,
"learning_rate": 3.836827572514994e-05,
"loss": 0.8521,
"step": 225
},
{
"epoch": 0.16,
"grad_norm": 0.5814772517018288,
"learning_rate": 3.835044852268921e-05,
"loss": 0.8737,
"step": 226
},
{
"epoch": 0.16,
"grad_norm": 0.5850028692335194,
"learning_rate": 3.833252865177748e-05,
"loss": 0.8981,
"step": 227
},
{
"epoch": 0.16,
"grad_norm": 0.5710715676691404,
"learning_rate": 3.8314516202908834e-05,
"loss": 0.8631,
"step": 228
},
{
"epoch": 0.16,
"grad_norm": 0.5709568236148692,
"learning_rate": 3.8296411267044834e-05,
"loss": 0.9049,
"step": 229
},
{
"epoch": 0.16,
"grad_norm": 0.5457489092388222,
"learning_rate": 3.8278213935614126e-05,
"loss": 0.8709,
"step": 230
},
{
"epoch": 0.16,
"grad_norm": 0.5604660461247537,
"learning_rate": 3.8259924300511916e-05,
"loss": 0.9032,
"step": 231
},
{
"epoch": 0.16,
"grad_norm": 0.5896779960816604,
"learning_rate": 3.824154245409956e-05,
"loss": 0.8721,
"step": 232
},
{
"epoch": 0.16,
"grad_norm": 0.5735511552158461,
"learning_rate": 3.8223068489204064e-05,
"loss": 0.8594,
"step": 233
},
{
"epoch": 0.16,
"grad_norm": 0.5342615098283692,
"learning_rate": 3.820450249911763e-05,
"loss": 0.8836,
"step": 234
},
{
"epoch": 0.16,
"grad_norm": 0.5721745091701528,
"learning_rate": 3.818584457759717e-05,
"loss": 0.8831,
"step": 235
},
{
"epoch": 0.16,
"grad_norm": 0.5490502406081301,
"learning_rate": 3.816709481886386e-05,
"loss": 0.8584,
"step": 236
},
{
"epoch": 0.16,
"grad_norm": 0.5624387083686403,
"learning_rate": 3.814825331760264e-05,
"loss": 0.9117,
"step": 237
},
{
"epoch": 0.16,
"grad_norm": 0.586647118682912,
"learning_rate": 3.812932016896174e-05,
"loss": 0.8649,
"step": 238
},
{
"epoch": 0.17,
"grad_norm": 0.546619316031144,
"learning_rate": 3.81102954685522e-05,
"loss": 0.857,
"step": 239
},
{
"epoch": 0.17,
"grad_norm": 0.554159248657885,
"learning_rate": 3.8091179312447396e-05,
"loss": 0.8771,
"step": 240
},
{
"epoch": 0.17,
"grad_norm": 0.5910567844482829,
"learning_rate": 3.807197179718255e-05,
"loss": 0.8763,
"step": 241
},
{
"epoch": 0.17,
"grad_norm": 0.5640131079432247,
"learning_rate": 3.805267301975424e-05,
"loss": 0.8435,
"step": 242
},
{
"epoch": 0.17,
"grad_norm": 0.5652973013911139,
"learning_rate": 3.803328307761991e-05,
"loss": 0.8705,
"step": 243
},
{
"epoch": 0.17,
"grad_norm": 0.6040284726594483,
"learning_rate": 3.801380206869738e-05,
"loss": 0.9096,
"step": 244
},
{
"epoch": 0.17,
"grad_norm": 0.5564073830996712,
"learning_rate": 3.799423009136434e-05,
"loss": 0.893,
"step": 245
},
{
"epoch": 0.17,
"grad_norm": 0.5632822504443242,
"learning_rate": 3.7974567244457886e-05,
"loss": 0.8629,
"step": 246
},
{
"epoch": 0.17,
"grad_norm": 0.5281660478958515,
"learning_rate": 3.795481362727398e-05,
"loss": 0.848,
"step": 247
},
{
"epoch": 0.17,
"grad_norm": 0.5723886057136035,
"learning_rate": 3.793496933956699e-05,
"loss": 0.8609,
"step": 248
},
{
"epoch": 0.17,
"grad_norm": 0.5133688114660303,
"learning_rate": 3.7915034481549125e-05,
"loss": 0.8459,
"step": 249
},
{
"epoch": 0.17,
"grad_norm": 0.5422259628859921,
"learning_rate": 3.7895009153889996e-05,
"loss": 0.8817,
"step": 250
},
{
"epoch": 0.17,
"grad_norm": 0.5782809492804619,
"learning_rate": 3.7874893457716086e-05,
"loss": 0.9176,
"step": 251
},
{
"epoch": 0.17,
"grad_norm": 0.6052275512020318,
"learning_rate": 3.7854687494610205e-05,
"loss": 0.8439,
"step": 252
},
{
"epoch": 0.18,
"grad_norm": 0.5813455418636427,
"learning_rate": 3.783439136661103e-05,
"loss": 0.8729,
"step": 253
},
{
"epoch": 0.18,
"grad_norm": 0.6522045090684955,
"learning_rate": 3.7814005176212555e-05,
"loss": 0.8759,
"step": 254
},
{
"epoch": 0.18,
"grad_norm": 0.610556349577213,
"learning_rate": 3.779352902636357e-05,
"loss": 0.8489,
"step": 255
},
{
"epoch": 0.18,
"grad_norm": 0.5353925265369789,
"learning_rate": 3.777296302046719e-05,
"loss": 0.8697,
"step": 256
},
{
"epoch": 0.18,
"grad_norm": 0.6095361381054648,
"learning_rate": 3.775230726238023e-05,
"loss": 0.8579,
"step": 257
},
{
"epoch": 0.18,
"grad_norm": 0.5602956477077344,
"learning_rate": 3.773156185641283e-05,
"loss": 0.8424,
"step": 258
},
{
"epoch": 0.18,
"grad_norm": 0.5951283284024731,
"learning_rate": 3.771072690732779e-05,
"loss": 0.8157,
"step": 259
},
{
"epoch": 0.18,
"grad_norm": 0.5813588666582048,
"learning_rate": 3.7689802520340103e-05,
"loss": 0.867,
"step": 260
},
{
"epoch": 0.18,
"grad_norm": 0.5563409207206621,
"learning_rate": 3.766878880111644e-05,
"loss": 0.8424,
"step": 261
},
{
"epoch": 0.18,
"grad_norm": 0.599593168751392,
"learning_rate": 3.7647685855774565e-05,
"loss": 0.8456,
"step": 262
},
{
"epoch": 0.18,
"grad_norm": 0.5877627516119813,
"learning_rate": 3.7626493790882846e-05,
"loss": 0.8617,
"step": 263
},
{
"epoch": 0.18,
"grad_norm": 0.5928100373050182,
"learning_rate": 3.760521271345969e-05,
"loss": 0.7984,
"step": 264
},
{
"epoch": 0.18,
"grad_norm": 0.5377821832137306,
"learning_rate": 3.758384273097302e-05,
"loss": 0.8614,
"step": 265
},
{
"epoch": 0.18,
"grad_norm": 0.5996443150256331,
"learning_rate": 3.756238395133972e-05,
"loss": 0.8577,
"step": 266
},
{
"epoch": 0.19,
"grad_norm": 0.5629770154387104,
"learning_rate": 3.754083648292508e-05,
"loss": 0.8642,
"step": 267
},
{
"epoch": 0.19,
"grad_norm": 0.5945642738839505,
"learning_rate": 3.751920043454228e-05,
"loss": 0.8234,
"step": 268
},
{
"epoch": 0.19,
"grad_norm": 0.5686895725025655,
"learning_rate": 3.7497475915451806e-05,
"loss": 0.8567,
"step": 269
},
{
"epoch": 0.19,
"grad_norm": 0.589000627682597,
"learning_rate": 3.747566303536094e-05,
"loss": 0.8743,
"step": 270
},
{
"epoch": 0.19,
"grad_norm": 0.6041246541412626,
"learning_rate": 3.745376190442315e-05,
"loss": 0.9001,
"step": 271
},
{
"epoch": 0.19,
"grad_norm": 0.5784853602505518,
"learning_rate": 3.743177263323758e-05,
"loss": 0.8712,
"step": 272
},
{
"epoch": 0.19,
"grad_norm": 0.6060750614142157,
"learning_rate": 3.740969533284848e-05,
"loss": 0.8399,
"step": 273
},
{
"epoch": 0.19,
"grad_norm": 0.6011502613216172,
"learning_rate": 3.7387530114744636e-05,
"loss": 0.9374,
"step": 274
},
{
"epoch": 0.19,
"grad_norm": 0.5840634538197027,
"learning_rate": 3.7365277090858815e-05,
"loss": 0.812,
"step": 275
},
{
"epoch": 0.19,
"grad_norm": 0.5486563276898795,
"learning_rate": 3.734293637356719e-05,
"loss": 0.8299,
"step": 276
},
{
"epoch": 0.19,
"grad_norm": 0.675140329889203,
"learning_rate": 3.732050807568878e-05,
"loss": 0.8745,
"step": 277
},
{
"epoch": 0.19,
"grad_norm": 0.7443305843364,
"learning_rate": 3.729799231048488e-05,
"loss": 0.8449,
"step": 278
},
{
"epoch": 0.19,
"grad_norm": 0.5760709609736591,
"learning_rate": 3.7275389191658506e-05,
"loss": 0.8944,
"step": 279
},
{
"epoch": 0.19,
"grad_norm": 0.575849308645603,
"learning_rate": 3.725269883335378e-05,
"loss": 0.8827,
"step": 280
},
{
"epoch": 0.19,
"grad_norm": 0.5921225301203554,
"learning_rate": 3.722992135015539e-05,
"loss": 0.8607,
"step": 281
},
{
"epoch": 0.2,
"grad_norm": 0.579827928942737,
"learning_rate": 3.7207056857088e-05,
"loss": 0.851,
"step": 282
},
{
"epoch": 0.2,
"grad_norm": 0.5337234551523099,
"learning_rate": 3.718410546961565e-05,
"loss": 0.7846,
"step": 283
},
{
"epoch": 0.2,
"grad_norm": 0.5480153930319217,
"learning_rate": 3.71610673036412e-05,
"loss": 0.8395,
"step": 284
},
{
"epoch": 0.2,
"grad_norm": 0.5437408251204342,
"learning_rate": 3.713794247550577e-05,
"loss": 0.8724,
"step": 285
},
{
"epoch": 0.2,
"grad_norm": 0.5399621848534303,
"learning_rate": 3.711473110198805e-05,
"loss": 0.8235,
"step": 286
},
{
"epoch": 0.2,
"grad_norm": 0.5775047193341142,
"learning_rate": 3.709143330030383e-05,
"loss": 0.8779,
"step": 287
},
{
"epoch": 0.2,
"grad_norm": 0.5502391563566387,
"learning_rate": 3.7068049188105335e-05,
"loss": 0.853,
"step": 288
},
{
"epoch": 0.2,
"grad_norm": 0.5417100317270407,
"learning_rate": 3.704457888348067e-05,
"loss": 0.8642,
"step": 289
},
{
"epoch": 0.2,
"grad_norm": 0.5738297209690543,
"learning_rate": 3.702102250495318e-05,
"loss": 0.8338,
"step": 290
},
{
"epoch": 0.2,
"grad_norm": 0.569231525698671,
"learning_rate": 3.699738017148089e-05,
"loss": 0.8843,
"step": 291
},
{
"epoch": 0.2,
"grad_norm": 0.5692424093827875,
"learning_rate": 3.69736520024559e-05,
"loss": 0.8485,
"step": 292
},
{
"epoch": 0.2,
"grad_norm": 0.6229992275912963,
"learning_rate": 3.694983811770375e-05,
"loss": 0.8949,
"step": 293
},
{
"epoch": 0.2,
"grad_norm": 0.5543944766836694,
"learning_rate": 3.692593863748287e-05,
"loss": 0.8603,
"step": 294
},
{
"epoch": 0.2,
"grad_norm": 0.5835955096858937,
"learning_rate": 3.690195368248391e-05,
"loss": 0.8713,
"step": 295
},
{
"epoch": 0.21,
"grad_norm": 0.5676284482068652,
"learning_rate": 3.687788337382918e-05,
"loss": 0.8617,
"step": 296
},
{
"epoch": 0.21,
"grad_norm": 0.5500650843057224,
"learning_rate": 3.685372783307202e-05,
"loss": 0.8146,
"step": 297
},
{
"epoch": 0.21,
"grad_norm": 0.6020180858367112,
"learning_rate": 3.682948718219617e-05,
"loss": 0.8378,
"step": 298
},
{
"epoch": 0.21,
"grad_norm": 0.5376467635123586,
"learning_rate": 3.6805161543615186e-05,
"loss": 0.8241,
"step": 299
},
{
"epoch": 0.21,
"grad_norm": 0.5710040265140773,
"learning_rate": 3.678075104017181e-05,
"loss": 0.8512,
"step": 300
},
{
"epoch": 0.21,
"grad_norm": 0.5758167887918031,
"learning_rate": 3.675625579513733e-05,
"loss": 0.8202,
"step": 301
},
{
"epoch": 0.21,
"grad_norm": 0.5588154410251217,
"learning_rate": 3.673167593221097e-05,
"loss": 0.8579,
"step": 302
},
{
"epoch": 0.21,
"grad_norm": 0.5816859363343176,
"learning_rate": 3.670701157551928e-05,
"loss": 0.8661,
"step": 303
},
{
"epoch": 0.21,
"grad_norm": 0.5890521424816866,
"learning_rate": 3.668226284961552e-05,
"loss": 0.8976,
"step": 304
},
{
"epoch": 0.21,
"grad_norm": 0.5784433079622883,
"learning_rate": 3.665742987947895e-05,
"loss": 0.8535,
"step": 305
},
{
"epoch": 0.21,
"grad_norm": 0.5812611795009238,
"learning_rate": 3.663251279051431e-05,
"loss": 0.8164,
"step": 306
},
{
"epoch": 0.21,
"grad_norm": 0.5496149522737247,
"learning_rate": 3.660751170855111e-05,
"loss": 0.8722,
"step": 307
},
{
"epoch": 0.21,
"grad_norm": 0.5495870335117299,
"learning_rate": 3.658242675984302e-05,
"loss": 0.8265,
"step": 308
},
{
"epoch": 0.21,
"grad_norm": 0.5626725188608667,
"learning_rate": 3.6557258071067245e-05,
"loss": 0.8396,
"step": 309
},
{
"epoch": 0.21,
"grad_norm": 0.5818878617338686,
"learning_rate": 3.6532005769323857e-05,
"loss": 0.8204,
"step": 310
},
{
"epoch": 0.22,
"grad_norm": 0.5581192722680199,
"learning_rate": 3.6506669982135166e-05,
"loss": 0.8707,
"step": 311
},
{
"epoch": 0.22,
"grad_norm": 0.576935397692145,
"learning_rate": 3.6481250837445086e-05,
"loss": 0.8885,
"step": 312
},
{
"epoch": 0.22,
"grad_norm": 0.5315672877099014,
"learning_rate": 3.645574846361847e-05,
"loss": 0.8617,
"step": 313
},
{
"epoch": 0.22,
"grad_norm": 0.5320851004094815,
"learning_rate": 3.6430162989440495e-05,
"loss": 0.8386,
"step": 314
},
{
"epoch": 0.22,
"grad_norm": 0.5413691860510966,
"learning_rate": 3.640449454411597e-05,
"loss": 0.8645,
"step": 315
},
{
"epoch": 0.22,
"grad_norm": 0.5770415696703134,
"learning_rate": 3.6378743257268696e-05,
"loss": 0.8104,
"step": 316
},
{
"epoch": 0.22,
"grad_norm": 0.5294844381982502,
"learning_rate": 3.635290925894083e-05,
"loss": 0.8615,
"step": 317
},
{
"epoch": 0.22,
"grad_norm": 0.5475529629896969,
"learning_rate": 3.632699267959222e-05,
"loss": 0.8811,
"step": 318
},
{
"epoch": 0.22,
"grad_norm": 1.3785816334906371,
"learning_rate": 3.630099365009973e-05,
"loss": 0.8578,
"step": 319
},
{
"epoch": 0.22,
"grad_norm": 0.5394990743787105,
"learning_rate": 3.627491230175661e-05,
"loss": 0.8292,
"step": 320
},
{
"epoch": 0.22,
"grad_norm": 0.5938174376523856,
"learning_rate": 3.6248748766271787e-05,
"loss": 0.8709,
"step": 321
},
{
"epoch": 0.22,
"grad_norm": 0.5515964818002895,
"learning_rate": 3.622250317576923e-05,
"loss": 0.8607,
"step": 322
},
{
"epoch": 0.22,
"grad_norm": 0.5323592493348941,
"learning_rate": 3.6196175662787326e-05,
"loss": 0.8378,
"step": 323
},
{
"epoch": 0.22,
"grad_norm": 0.5248310904751864,
"learning_rate": 3.6169766360278106e-05,
"loss": 0.8425,
"step": 324
},
{
"epoch": 0.23,
"grad_norm": 0.5515567012847231,
"learning_rate": 3.614327540160667e-05,
"loss": 0.8824,
"step": 325
},
{
"epoch": 0.23,
"grad_norm": 0.5521808108577,
"learning_rate": 3.6116702920550445e-05,
"loss": 0.8392,
"step": 326
},
{
"epoch": 0.23,
"grad_norm": 0.5477475736709885,
"learning_rate": 3.609004905129857e-05,
"loss": 0.8555,
"step": 327
},
{
"epoch": 0.23,
"grad_norm": 0.5482567445624992,
"learning_rate": 3.606331392845117e-05,
"loss": 0.8486,
"step": 328
},
{
"epoch": 0.23,
"grad_norm": 0.5788070521309714,
"learning_rate": 3.6036497687018704e-05,
"loss": 0.8747,
"step": 329
},
{
"epoch": 0.23,
"grad_norm": 0.548762381214841,
"learning_rate": 3.6009600462421275e-05,
"loss": 0.8658,
"step": 330
},
{
"epoch": 0.23,
"grad_norm": 0.5433903409541285,
"learning_rate": 3.598262239048794e-05,
"loss": 0.8442,
"step": 331
},
{
"epoch": 0.23,
"grad_norm": 0.5422166394900134,
"learning_rate": 3.5955563607456025e-05,
"loss": 0.8399,
"step": 332
},
{
"epoch": 0.23,
"grad_norm": 0.5533496469554264,
"learning_rate": 3.592842424997045e-05,
"loss": 0.8688,
"step": 333
},
{
"epoch": 0.23,
"grad_norm": 0.570209671233707,
"learning_rate": 3.5901204455083025e-05,
"loss": 0.8677,
"step": 334
},
{
"epoch": 0.23,
"grad_norm": 0.5315602596051133,
"learning_rate": 3.5873904360251766e-05,
"loss": 0.8083,
"step": 335
},
{
"epoch": 0.23,
"grad_norm": 0.5674097470893089,
"learning_rate": 3.58465241033402e-05,
"loss": 0.8267,
"step": 336
},
{
"epoch": 0.23,
"grad_norm": 0.5599451864877204,
"learning_rate": 3.5819063822616656e-05,
"loss": 0.8533,
"step": 337
},
{
"epoch": 0.23,
"grad_norm": 0.5384280492479666,
"learning_rate": 3.579152365675359e-05,
"loss": 0.7963,
"step": 338
},
{
"epoch": 0.24,
"grad_norm": 0.5588133446291048,
"learning_rate": 3.576390374482687e-05,
"loss": 0.8361,
"step": 339
},
{
"epoch": 0.24,
"grad_norm": 0.5447503608252781,
"learning_rate": 3.573620422631507e-05,
"loss": 0.8318,
"step": 340
},
{
"epoch": 0.24,
"grad_norm": 0.548591514962669,
"learning_rate": 3.570842524109878e-05,
"loss": 0.7841,
"step": 341
},
{
"epoch": 0.24,
"grad_norm": 0.5282255740981779,
"learning_rate": 3.5680566929459884e-05,
"loss": 0.8654,
"step": 342
},
{
"epoch": 0.24,
"grad_norm": 0.5386887265337448,
"learning_rate": 3.565262943208085e-05,
"loss": 0.8252,
"step": 343
},
{
"epoch": 0.24,
"grad_norm": 0.5791779165928393,
"learning_rate": 3.562461289004406e-05,
"loss": 0.8563,
"step": 344
},
{
"epoch": 0.24,
"grad_norm": 0.5451264432109418,
"learning_rate": 3.559651744483102e-05,
"loss": 0.8756,
"step": 345
},
{
"epoch": 0.24,
"grad_norm": 0.5534496847824513,
"learning_rate": 3.556834323832174e-05,
"loss": 0.8272,
"step": 346
},
{
"epoch": 0.24,
"grad_norm": 0.5722621407101754,
"learning_rate": 3.5540090412793926e-05,
"loss": 0.8307,
"step": 347
},
{
"epoch": 0.24,
"grad_norm": 0.5725890577206151,
"learning_rate": 3.551175911092234e-05,
"loss": 0.8428,
"step": 348
},
{
"epoch": 0.24,
"grad_norm": 0.551561545305621,
"learning_rate": 3.548334947577802e-05,
"loss": 0.8621,
"step": 349
},
{
"epoch": 0.24,
"grad_norm": 0.5543592482366498,
"learning_rate": 3.545486165082759e-05,
"loss": 0.8759,
"step": 350
},
{
"epoch": 0.24,
"grad_norm": 0.528375924391524,
"learning_rate": 3.5426295779932525e-05,
"loss": 0.8276,
"step": 351
},
{
"epoch": 0.24,
"grad_norm": 0.5376783376931934,
"learning_rate": 3.539765200734842e-05,
"loss": 0.8237,
"step": 352
},
{
"epoch": 0.24,
"grad_norm": 0.5391466176744784,
"learning_rate": 3.53689304777243e-05,
"loss": 0.8656,
"step": 353
},
{
"epoch": 0.25,
"grad_norm": 0.537273035426167,
"learning_rate": 3.53401313361018e-05,
"loss": 0.8288,
"step": 354
},
{
"epoch": 0.25,
"grad_norm": 0.5656744495444984,
"learning_rate": 3.531125472791456e-05,
"loss": 0.7939,
"step": 355
},
{
"epoch": 0.25,
"grad_norm": 0.5645016228375516,
"learning_rate": 3.528230079898734e-05,
"loss": 0.8172,
"step": 356
},
{
"epoch": 0.25,
"grad_norm": 0.5548696736761473,
"learning_rate": 3.525326969553544e-05,
"loss": 0.868,
"step": 357
},
{
"epoch": 0.25,
"grad_norm": 0.563645096781112,
"learning_rate": 3.522416156416385e-05,
"loss": 0.838,
"step": 358
},
{
"epoch": 0.25,
"grad_norm": 0.5697975329304105,
"learning_rate": 3.5194976551866535e-05,
"loss": 0.8725,
"step": 359
},
{
"epoch": 0.25,
"grad_norm": 0.5850516326721115,
"learning_rate": 3.516571480602573e-05,
"loss": 0.811,
"step": 360
},
{
"epoch": 0.25,
"grad_norm": 0.5698074147880662,
"learning_rate": 3.513637647441115e-05,
"loss": 0.8524,
"step": 361
},
{
"epoch": 0.25,
"grad_norm": 0.5643168822697231,
"learning_rate": 3.510696170517927e-05,
"loss": 0.8173,
"step": 362
},
{
"epoch": 0.25,
"grad_norm": 0.5804531907202891,
"learning_rate": 3.507747064687257e-05,
"loss": 0.8886,
"step": 363
},
{
"epoch": 0.25,
"grad_norm": 0.5462924586446797,
"learning_rate": 3.5047903448418777e-05,
"loss": 0.8531,
"step": 364
},
{
"epoch": 0.25,
"grad_norm": 0.5501503602135707,
"learning_rate": 3.5018260259130134e-05,
"loss": 0.8487,
"step": 365
},
{
"epoch": 0.25,
"grad_norm": 0.5371009633397992,
"learning_rate": 3.498854122870263e-05,
"loss": 0.8492,
"step": 366
},
{
"epoch": 0.25,
"grad_norm": 0.5398426156901552,
"learning_rate": 3.495874650721521e-05,
"loss": 0.8772,
"step": 367
},
{
"epoch": 0.26,
"grad_norm": 0.5366881177110137,
"learning_rate": 3.492887624512912e-05,
"loss": 0.8122,
"step": 368
},
{
"epoch": 0.26,
"grad_norm": 0.5438392569141078,
"learning_rate": 3.4898930593287024e-05,
"loss": 0.845,
"step": 369
},
{
"epoch": 0.26,
"grad_norm": 0.587756668057189,
"learning_rate": 3.486890970291232e-05,
"loss": 0.8086,
"step": 370
},
{
"epoch": 0.26,
"grad_norm": 0.5582452811434767,
"learning_rate": 3.483881372560837e-05,
"loss": 0.8307,
"step": 371
},
{
"epoch": 0.26,
"grad_norm": 0.5340831359044327,
"learning_rate": 3.480864281335769e-05,
"loss": 0.8425,
"step": 372
},
{
"epoch": 0.26,
"grad_norm": 0.5654121117366306,
"learning_rate": 3.4778397118521225e-05,
"loss": 0.8175,
"step": 373
},
{
"epoch": 0.26,
"grad_norm": 0.541775348450145,
"learning_rate": 3.474807679383758e-05,
"loss": 0.8481,
"step": 374
},
{
"epoch": 0.26,
"grad_norm": 0.5196740998309944,
"learning_rate": 3.471768199242221e-05,
"loss": 0.8367,
"step": 375
},
{
"epoch": 0.26,
"grad_norm": 0.5334096416986893,
"learning_rate": 3.4687212867766696e-05,
"loss": 0.8345,
"step": 376
},
{
"epoch": 0.26,
"grad_norm": 0.5588051177170905,
"learning_rate": 3.4656669573737934e-05,
"loss": 0.8466,
"step": 377
},
{
"epoch": 0.26,
"grad_norm": 0.5328056650214266,
"learning_rate": 3.462605226457737e-05,
"loss": 0.8804,
"step": 378
},
{
"epoch": 0.26,
"grad_norm": 0.5460264679790893,
"learning_rate": 3.459536109490022e-05,
"loss": 0.8119,
"step": 379
},
{
"epoch": 0.26,
"grad_norm": 0.5361215975394485,
"learning_rate": 3.456459621969469e-05,
"loss": 0.859,
"step": 380
},
{
"epoch": 0.26,
"grad_norm": 0.5452141217380486,
"learning_rate": 3.4533757794321206e-05,
"loss": 0.826,
"step": 381
},
{
"epoch": 0.26,
"grad_norm": 0.5691208968203204,
"learning_rate": 3.450284597451161e-05,
"loss": 0.841,
"step": 382
},
{
"epoch": 0.27,
"grad_norm": 0.5690091776127425,
"learning_rate": 3.447186091636836e-05,
"loss": 0.8459,
"step": 383
},
{
"epoch": 0.27,
"grad_norm": 0.5075661516482275,
"learning_rate": 3.4440802776363805e-05,
"loss": 0.869,
"step": 384
},
{
"epoch": 0.27,
"grad_norm": 0.562219662654622,
"learning_rate": 3.440967171133932e-05,
"loss": 0.8327,
"step": 385
},
{
"epoch": 0.27,
"grad_norm": 0.5437940614861732,
"learning_rate": 3.437846787850454e-05,
"loss": 0.8447,
"step": 386
},
{
"epoch": 0.27,
"grad_norm": 0.5118074091704278,
"learning_rate": 3.4347191435436596e-05,
"loss": 0.8445,
"step": 387
},
{
"epoch": 0.27,
"grad_norm": 0.5230355566327006,
"learning_rate": 3.431584254007929e-05,
"loss": 0.8419,
"step": 388
},
{
"epoch": 0.27,
"grad_norm": 0.5239579003880459,
"learning_rate": 3.42844213507423e-05,
"loss": 0.8367,
"step": 389
},
{
"epoch": 0.27,
"grad_norm": 0.5326618577835538,
"learning_rate": 3.4252928026100374e-05,
"loss": 0.8405,
"step": 390
},
{
"epoch": 0.27,
"grad_norm": 0.556400075642037,
"learning_rate": 3.422136272519254e-05,
"loss": 0.8439,
"step": 391
},
{
"epoch": 0.27,
"grad_norm": 0.5404609138196239,
"learning_rate": 3.418972560742133e-05,
"loss": 0.8049,
"step": 392
},
{
"epoch": 0.27,
"grad_norm": 0.5497609575459946,
"learning_rate": 3.415801683255191e-05,
"loss": 0.8578,
"step": 393
},
{
"epoch": 0.27,
"grad_norm": 0.5249695649344484,
"learning_rate": 3.412623656071131e-05,
"loss": 0.8742,
"step": 394
},
{
"epoch": 0.27,
"grad_norm": 0.49586671025222956,
"learning_rate": 3.409438495238765e-05,
"loss": 0.8311,
"step": 395
},
{
"epoch": 0.27,
"grad_norm": 0.5445006042617271,
"learning_rate": 3.4062462168429267e-05,
"loss": 0.8431,
"step": 396
},
{
"epoch": 0.28,
"grad_norm": 0.5272689439904857,
"learning_rate": 3.403046837004392e-05,
"loss": 0.8395,
"step": 397
},
{
"epoch": 0.28,
"grad_norm": 0.5264175060654747,
"learning_rate": 3.3998403718798005e-05,
"loss": 0.8349,
"step": 398
},
{
"epoch": 0.28,
"grad_norm": 0.4830957422147818,
"learning_rate": 3.396626837661572e-05,
"loss": 0.833,
"step": 399
},
{
"epoch": 0.28,
"grad_norm": 0.5252208224976386,
"learning_rate": 3.393406250577824e-05,
"loss": 0.8319,
"step": 400
},
{
"epoch": 0.28,
"grad_norm": 0.5475531322065742,
"learning_rate": 3.390178626892291e-05,
"loss": 0.8338,
"step": 401
},
{
"epoch": 0.28,
"grad_norm": 0.5369392808493976,
"learning_rate": 3.38694398290424e-05,
"loss": 0.808,
"step": 402
},
{
"epoch": 0.28,
"grad_norm": 0.5205878015974401,
"learning_rate": 3.3837023349483935e-05,
"loss": 0.8496,
"step": 403
},
{
"epoch": 0.28,
"grad_norm": 0.5208533805845469,
"learning_rate": 3.38045369939484e-05,
"loss": 0.8065,
"step": 404
},
{
"epoch": 0.28,
"grad_norm": 0.5471813688799694,
"learning_rate": 3.377198092648957e-05,
"loss": 0.8748,
"step": 405
},
{
"epoch": 0.28,
"grad_norm": 0.5381694662008653,
"learning_rate": 3.373935531151326e-05,
"loss": 0.8612,
"step": 406
},
{
"epoch": 0.28,
"grad_norm": 0.5296110494890822,
"learning_rate": 3.370666031377648e-05,
"loss": 0.8508,
"step": 407
},
{
"epoch": 0.28,
"grad_norm": 0.5088712826428127,
"learning_rate": 3.3673896098386636e-05,
"loss": 0.8453,
"step": 408
},
{
"epoch": 0.28,
"grad_norm": 0.5434740308198905,
"learning_rate": 3.364106283080067e-05,
"loss": 0.8344,
"step": 409
},
{
"epoch": 0.28,
"grad_norm": 0.5333048134984156,
"learning_rate": 3.3608160676824216e-05,
"loss": 0.8589,
"step": 410
},
{
"epoch": 0.28,
"grad_norm": 0.5260719983318904,
"learning_rate": 3.3575189802610806e-05,
"loss": 0.8219,
"step": 411
},
{
"epoch": 0.29,
"grad_norm": 0.5414185201485092,
"learning_rate": 3.3542150374661e-05,
"loss": 0.8201,
"step": 412
},
{
"epoch": 0.29,
"grad_norm": 0.5434304504170734,
"learning_rate": 3.350904255982154e-05,
"loss": 0.832,
"step": 413
},
{
"epoch": 0.29,
"grad_norm": 0.5952290656631171,
"learning_rate": 3.347586652528452e-05,
"loss": 0.7939,
"step": 414
},
{
"epoch": 0.29,
"grad_norm": 0.5008049102829213,
"learning_rate": 3.344262243858652e-05,
"loss": 0.8186,
"step": 415
},
{
"epoch": 0.29,
"grad_norm": 0.5345443068848005,
"learning_rate": 3.3409310467607824e-05,
"loss": 0.8018,
"step": 416
},
{
"epoch": 0.29,
"grad_norm": 0.5455440758964238,
"learning_rate": 3.337593078057149e-05,
"loss": 0.8496,
"step": 417
},
{
"epoch": 0.29,
"grad_norm": 0.5580175724663092,
"learning_rate": 3.334248354604255e-05,
"loss": 0.843,
"step": 418
},
{
"epoch": 0.29,
"grad_norm": 0.5057216865859819,
"learning_rate": 3.330896893292714e-05,
"loss": 0.8494,
"step": 419
},
{
"epoch": 0.29,
"grad_norm": 0.5417030563006309,
"learning_rate": 3.327538711047165e-05,
"loss": 0.8082,
"step": 420
},
{
"epoch": 0.29,
"grad_norm": 0.5055763990507414,
"learning_rate": 3.324173824826189e-05,
"loss": 0.8199,
"step": 421
},
{
"epoch": 0.29,
"grad_norm": 0.5227060886384448,
"learning_rate": 3.3208022516222195e-05,
"loss": 0.8072,
"step": 422
},
{
"epoch": 0.29,
"grad_norm": 0.5226803221908503,
"learning_rate": 3.317424008461461e-05,
"loss": 0.8474,
"step": 423
},
{
"epoch": 0.29,
"grad_norm": 0.5678991556193892,
"learning_rate": 3.314039112403799e-05,
"loss": 0.8538,
"step": 424
},
{
"epoch": 0.29,
"grad_norm": 0.5043828200363204,
"learning_rate": 3.310647580542715e-05,
"loss": 0.8094,
"step": 425
},
{
"epoch": 0.3,
"grad_norm": 0.5515593265965845,
"learning_rate": 3.307249430005203e-05,
"loss": 0.8489,
"step": 426
},
{
"epoch": 0.3,
"grad_norm": 0.5444229675570493,
"learning_rate": 3.303844677951681e-05,
"loss": 0.8168,
"step": 427
},
{
"epoch": 0.3,
"grad_norm": 0.5107869221594872,
"learning_rate": 3.300433341575901e-05,
"loss": 0.8582,
"step": 428
},
{
"epoch": 0.3,
"grad_norm": 0.49823655694798175,
"learning_rate": 3.297015438104868e-05,
"loss": 0.827,
"step": 429
},
{
"epoch": 0.3,
"grad_norm": 0.5316611672099181,
"learning_rate": 3.2935909847987515e-05,
"loss": 0.8285,
"step": 430
},
{
"epoch": 0.3,
"grad_norm": 0.5093065689124268,
"learning_rate": 3.2901599989507935e-05,
"loss": 0.8335,
"step": 431
},
{
"epoch": 0.3,
"grad_norm": 0.5289608610594911,
"learning_rate": 3.286722497887227e-05,
"loss": 0.8201,
"step": 432
},
{
"epoch": 0.3,
"grad_norm": 0.5508101020379665,
"learning_rate": 3.283278498967187e-05,
"loss": 0.7845,
"step": 433
},
{
"epoch": 0.3,
"grad_norm": 0.5313367599028019,
"learning_rate": 3.279828019582621e-05,
"loss": 0.8653,
"step": 434
},
{
"epoch": 0.3,
"grad_norm": 0.5597187998244668,
"learning_rate": 3.276371077158203e-05,
"loss": 0.8654,
"step": 435
},
{
"epoch": 0.3,
"grad_norm": 0.5467250375065841,
"learning_rate": 3.272907689151245e-05,
"loss": 0.8605,
"step": 436
},
{
"epoch": 0.3,
"grad_norm": 0.5593802984164884,
"learning_rate": 3.2694378730516074e-05,
"loss": 0.8095,
"step": 437
},
{
"epoch": 0.3,
"grad_norm": 0.55568620290336,
"learning_rate": 3.2659616463816145e-05,
"loss": 0.843,
"step": 438
},
{
"epoch": 0.3,
"grad_norm": 0.5165735295161088,
"learning_rate": 3.2624790266959603e-05,
"loss": 0.816,
"step": 439
},
{
"epoch": 0.31,
"grad_norm": 0.5503905979647672,
"learning_rate": 3.2589900315816266e-05,
"loss": 0.8336,
"step": 440
},
{
"epoch": 0.31,
"grad_norm": 0.5182867036161137,
"learning_rate": 3.255494678657787e-05,
"loss": 0.8127,
"step": 441
},
{
"epoch": 0.31,
"grad_norm": 0.5135111467705474,
"learning_rate": 3.251992985575725e-05,
"loss": 0.8151,
"step": 442
},
{
"epoch": 0.31,
"grad_norm": 0.5264414987718659,
"learning_rate": 3.24848497001874e-05,
"loss": 0.8617,
"step": 443
},
{
"epoch": 0.31,
"grad_norm": 0.5394560686148242,
"learning_rate": 3.244970649702058e-05,
"loss": 0.8373,
"step": 444
},
{
"epoch": 0.31,
"grad_norm": 0.5409269737932116,
"learning_rate": 3.241450042372746e-05,
"loss": 0.8702,
"step": 445
},
{
"epoch": 0.31,
"grad_norm": 0.5456122419037231,
"learning_rate": 3.237923165809619e-05,
"loss": 0.8375,
"step": 446
},
{
"epoch": 0.31,
"grad_norm": 0.5163626938163164,
"learning_rate": 3.2343900378231504e-05,
"loss": 0.7962,
"step": 447
},
{
"epoch": 0.31,
"grad_norm": 0.5395372970573875,
"learning_rate": 3.230850676255384e-05,
"loss": 0.8384,
"step": 448
},
{
"epoch": 0.31,
"grad_norm": 0.5234211346103035,
"learning_rate": 3.227305098979842e-05,
"loss": 0.8268,
"step": 449
},
{
"epoch": 0.31,
"grad_norm": 0.5347894180906457,
"learning_rate": 3.223753323901435e-05,
"loss": 0.8262,
"step": 450
},
{
"epoch": 0.31,
"grad_norm": 0.5151391339225782,
"learning_rate": 3.2201953689563755e-05,
"loss": 0.8434,
"step": 451
},
{
"epoch": 0.31,
"grad_norm": 0.4999906317186312,
"learning_rate": 3.2166312521120775e-05,
"loss": 0.8128,
"step": 452
},
{
"epoch": 0.31,
"grad_norm": 0.5634944415811667,
"learning_rate": 3.213060991367079e-05,
"loss": 0.8383,
"step": 453
},
{
"epoch": 0.31,
"grad_norm": 0.5230459481987463,
"learning_rate": 3.209484604750939e-05,
"loss": 0.8558,
"step": 454
},
{
"epoch": 0.32,
"grad_norm": 0.567270203502747,
"learning_rate": 3.2059021103241556e-05,
"loss": 0.8296,
"step": 455
},
{
"epoch": 0.32,
"grad_norm": 0.625792417410671,
"learning_rate": 3.202313526178067e-05,
"loss": 0.8219,
"step": 456
},
{
"epoch": 0.32,
"grad_norm": 0.4922441752650774,
"learning_rate": 3.198718870434768e-05,
"loss": 0.8216,
"step": 457
},
{
"epoch": 0.32,
"grad_norm": 0.5504573210911217,
"learning_rate": 3.195118161247011e-05,
"loss": 0.8337,
"step": 458
},
{
"epoch": 0.32,
"grad_norm": 0.5280374651527217,
"learning_rate": 3.1915114167981216e-05,
"loss": 0.8107,
"step": 459
},
{
"epoch": 0.32,
"grad_norm": 0.5714654386527501,
"learning_rate": 3.1878986553019e-05,
"loss": 0.776,
"step": 460
},
{
"epoch": 0.32,
"grad_norm": 0.5287130813729668,
"learning_rate": 3.184279895002533e-05,
"loss": 0.8504,
"step": 461
},
{
"epoch": 0.32,
"grad_norm": 0.5106143971618994,
"learning_rate": 3.1806551541745e-05,
"loss": 0.8381,
"step": 462
},
{
"epoch": 0.32,
"grad_norm": 0.5624115388509457,
"learning_rate": 3.177024451122485e-05,
"loss": 0.8562,
"step": 463
},
{
"epoch": 0.32,
"grad_norm": 0.5645114930489444,
"learning_rate": 3.1733878041812756e-05,
"loss": 0.8675,
"step": 464
},
{
"epoch": 0.32,
"grad_norm": 0.5420054779002693,
"learning_rate": 3.169745231715681e-05,
"loss": 0.8874,
"step": 465
},
{
"epoch": 0.32,
"grad_norm": 0.5168530183061805,
"learning_rate": 3.16609675212043e-05,
"loss": 0.8265,
"step": 466
},
{
"epoch": 0.32,
"grad_norm": 0.5515072751965501,
"learning_rate": 3.1624423838200824e-05,
"loss": 0.8211,
"step": 467
},
{
"epoch": 0.32,
"grad_norm": 0.5203422982938014,
"learning_rate": 3.1587821452689386e-05,
"loss": 0.8053,
"step": 468
},
{
"epoch": 0.33,
"grad_norm": 0.49866336382794657,
"learning_rate": 3.155116054950939e-05,
"loss": 0.8013,
"step": 469
},
{
"epoch": 0.33,
"grad_norm": 0.5397719275362368,
"learning_rate": 3.151444131379579e-05,
"loss": 0.8124,
"step": 470
},
{
"epoch": 0.33,
"grad_norm": 0.5371648501586703,
"learning_rate": 3.14776639309781e-05,
"loss": 0.8403,
"step": 471
},
{
"epoch": 0.33,
"grad_norm": 0.5212694497882318,
"learning_rate": 3.144082858677944e-05,
"loss": 0.8323,
"step": 472
},
{
"epoch": 0.33,
"grad_norm": 0.5621834860657204,
"learning_rate": 3.140393546721569e-05,
"loss": 0.8503,
"step": 473
},
{
"epoch": 0.33,
"grad_norm": 0.5147544021776126,
"learning_rate": 3.136698475859444e-05,
"loss": 0.8652,
"step": 474
},
{
"epoch": 0.33,
"grad_norm": 0.5512988248226239,
"learning_rate": 3.132997664751415e-05,
"loss": 0.8253,
"step": 475
},
{
"epoch": 0.33,
"grad_norm": 0.5470920395690413,
"learning_rate": 3.1292911320863104e-05,
"loss": 0.8345,
"step": 476
},
{
"epoch": 0.33,
"grad_norm": 0.5135254587724415,
"learning_rate": 3.125578896581856e-05,
"loss": 0.8264,
"step": 477
},
{
"epoch": 0.33,
"grad_norm": 0.5190959950123696,
"learning_rate": 3.121860976984575e-05,
"loss": 0.8199,
"step": 478
},
{
"epoch": 0.33,
"grad_norm": 0.5008160906149964,
"learning_rate": 3.118137392069696e-05,
"loss": 0.7958,
"step": 479
},
{
"epoch": 0.33,
"grad_norm": 0.5371588705869763,
"learning_rate": 3.114408160641055e-05,
"loss": 0.8536,
"step": 480
},
{
"epoch": 0.33,
"grad_norm": 0.48776902758978885,
"learning_rate": 3.110673301531004e-05,
"loss": 0.8278,
"step": 481
},
{
"epoch": 0.33,
"grad_norm": 0.5490355144904475,
"learning_rate": 3.106932833600314e-05,
"loss": 0.8193,
"step": 482
},
{
"epoch": 0.33,
"grad_norm": 0.5032650545751033,
"learning_rate": 3.1031867757380805e-05,
"loss": 0.8102,
"step": 483
},
{
"epoch": 0.34,
"grad_norm": 0.5242753433957865,
"learning_rate": 3.099435146861627e-05,
"loss": 0.8136,
"step": 484
},
{
"epoch": 0.34,
"grad_norm": 0.5603553995646774,
"learning_rate": 3.095677965916411e-05,
"loss": 0.791,
"step": 485
},
{
"epoch": 0.34,
"grad_norm": 0.5317469797874985,
"learning_rate": 3.091915251875928e-05,
"loss": 0.8273,
"step": 486
},
{
"epoch": 0.34,
"grad_norm": 0.5220115636618828,
"learning_rate": 3.088147023741613e-05,
"loss": 0.7798,
"step": 487
},
{
"epoch": 0.34,
"grad_norm": 0.5267291167837851,
"learning_rate": 3.084373300542748e-05,
"loss": 0.8057,
"step": 488
},
{
"epoch": 0.34,
"grad_norm": 0.5106161464123965,
"learning_rate": 3.080594101336367e-05,
"loss": 0.7887,
"step": 489
},
{
"epoch": 0.34,
"grad_norm": 0.5159824792721203,
"learning_rate": 3.076809445207154e-05,
"loss": 0.8347,
"step": 490
},
{
"epoch": 0.34,
"grad_norm": 0.521210220783643,
"learning_rate": 3.0730193512673515e-05,
"loss": 0.8372,
"step": 491
},
{
"epoch": 0.34,
"grad_norm": 0.5059920957541553,
"learning_rate": 3.069223838656663e-05,
"loss": 0.8448,
"step": 492
},
{
"epoch": 0.34,
"grad_norm": 0.5208115856602449,
"learning_rate": 3.065422926542154e-05,
"loss": 0.821,
"step": 493
},
{
"epoch": 0.34,
"grad_norm": 0.5308265214871177,
"learning_rate": 3.06161663411816e-05,
"loss": 0.7916,
"step": 494
},
{
"epoch": 0.34,
"grad_norm": 0.5078892810692175,
"learning_rate": 3.057804980606185e-05,
"loss": 0.8205,
"step": 495
},
{
"epoch": 0.34,
"grad_norm": 0.5266654116543316,
"learning_rate": 3.053987985254806e-05,
"loss": 0.7994,
"step": 496
},
{
"epoch": 0.34,
"grad_norm": 0.5186149407494495,
"learning_rate": 3.0501656673395756e-05,
"loss": 0.8314,
"step": 497
},
{
"epoch": 0.35,
"grad_norm": 0.5055401177759212,
"learning_rate": 3.0463380461629266e-05,
"loss": 0.8243,
"step": 498
},
{
"epoch": 0.35,
"grad_norm": 0.5182292185573004,
"learning_rate": 3.0425051410540712e-05,
"loss": 0.7866,
"step": 499
},
{
"epoch": 0.35,
"grad_norm": 0.5220553352637599,
"learning_rate": 3.0386669713689057e-05,
"loss": 0.8267,
"step": 500
},
{
"epoch": 0.35,
"grad_norm": 0.5032655265953053,
"learning_rate": 3.0348235564899125e-05,
"loss": 0.807,
"step": 501
},
{
"epoch": 0.35,
"grad_norm": 0.5040743390244277,
"learning_rate": 3.0309749158260618e-05,
"loss": 0.8535,
"step": 502
},
{
"epoch": 0.35,
"grad_norm": 0.5388617893332508,
"learning_rate": 3.0271210688127123e-05,
"loss": 0.852,
"step": 503
},
{
"epoch": 0.35,
"grad_norm": 0.49625579381659835,
"learning_rate": 3.0232620349115163e-05,
"loss": 0.787,
"step": 504
},
{
"epoch": 0.35,
"grad_norm": 0.5145653944344013,
"learning_rate": 3.0193978336103188e-05,
"loss": 0.7988,
"step": 505
},
{
"epoch": 0.35,
"grad_norm": 0.5457265636145077,
"learning_rate": 3.015528484423059e-05,
"loss": 0.8027,
"step": 506
},
{
"epoch": 0.35,
"grad_norm": 0.545077014351,
"learning_rate": 3.011654006889674e-05,
"loss": 0.8171,
"step": 507
},
{
"epoch": 0.35,
"grad_norm": 0.4975551772256365,
"learning_rate": 3.0077744205759965e-05,
"loss": 0.8088,
"step": 508
},
{
"epoch": 0.35,
"grad_norm": 0.5430697403327615,
"learning_rate": 3.0038897450736612e-05,
"loss": 0.8157,
"step": 509
},
{
"epoch": 0.35,
"grad_norm": 0.5356810735902855,
"learning_rate": 3.0000000000000004e-05,
"loss": 0.821,
"step": 510
},
{
"epoch": 0.35,
"grad_norm": 0.49005392869617714,
"learning_rate": 2.9961052049979476e-05,
"loss": 0.7813,
"step": 511
},
{
"epoch": 0.35,
"grad_norm": 0.4971358386815748,
"learning_rate": 2.9922053797359406e-05,
"loss": 0.8428,
"step": 512
},
{
"epoch": 0.36,
"grad_norm": 0.5070601820286003,
"learning_rate": 2.9883005439078158e-05,
"loss": 0.8308,
"step": 513
},
{
"epoch": 0.36,
"grad_norm": 0.5103413315670275,
"learning_rate": 2.9843907172327155e-05,
"loss": 0.7898,
"step": 514
},
{
"epoch": 0.36,
"grad_norm": 0.49241190236289795,
"learning_rate": 2.980475919454984e-05,
"loss": 0.8127,
"step": 515
},
{
"epoch": 0.36,
"grad_norm": 0.5081068313804096,
"learning_rate": 2.9765561703440688e-05,
"loss": 0.836,
"step": 516
},
{
"epoch": 0.36,
"grad_norm": 0.497483541132421,
"learning_rate": 2.9726314896944247e-05,
"loss": 0.7771,
"step": 517
},
{
"epoch": 0.36,
"grad_norm": 0.5187189836633339,
"learning_rate": 2.9687018973254055e-05,
"loss": 0.8044,
"step": 518
},
{
"epoch": 0.36,
"grad_norm": 0.531229605790404,
"learning_rate": 2.964767413081173e-05,
"loss": 0.8238,
"step": 519
},
{
"epoch": 0.36,
"grad_norm": 0.5111531079054741,
"learning_rate": 2.96082805683059e-05,
"loss": 0.817,
"step": 520
},
{
"epoch": 0.36,
"grad_norm": 0.4832153184382499,
"learning_rate": 2.956883848467123e-05,
"loss": 0.7997,
"step": 521
},
{
"epoch": 0.36,
"grad_norm": 0.5249882558412495,
"learning_rate": 2.9529348079087436e-05,
"loss": 0.8046,
"step": 522
},
{
"epoch": 0.36,
"grad_norm": 0.5128381662850762,
"learning_rate": 2.9489809550978217e-05,
"loss": 0.8143,
"step": 523
},
{
"epoch": 0.36,
"grad_norm": 0.5005712121140015,
"learning_rate": 2.945022310001032e-05,
"loss": 0.8156,
"step": 524
},
{
"epoch": 0.36,
"grad_norm": 0.5113505143134516,
"learning_rate": 2.9410588926092495e-05,
"loss": 0.8119,
"step": 525
},
{
"epoch": 0.36,
"grad_norm": 0.49791880184921655,
"learning_rate": 2.937090722937446e-05,
"loss": 0.7933,
"step": 526
},
{
"epoch": 0.37,
"grad_norm": 0.5122790734379506,
"learning_rate": 2.9331178210245962e-05,
"loss": 0.8062,
"step": 527
},
{
"epoch": 0.37,
"grad_norm": 0.5122028120096329,
"learning_rate": 2.9291402069335695e-05,
"loss": 0.7438,
"step": 528
},
{
"epoch": 0.37,
"grad_norm": 0.4958383111586649,
"learning_rate": 2.925157900751032e-05,
"loss": 0.811,
"step": 529
},
{
"epoch": 0.37,
"grad_norm": 0.5714480673874728,
"learning_rate": 2.921170922587346e-05,
"loss": 0.7548,
"step": 530
},
{
"epoch": 0.37,
"grad_norm": 0.5287204723694635,
"learning_rate": 2.9171792925764638e-05,
"loss": 0.8258,
"step": 531
},
{
"epoch": 0.37,
"grad_norm": 0.5615251876277217,
"learning_rate": 2.9131830308758325e-05,
"loss": 0.8357,
"step": 532
},
{
"epoch": 0.37,
"grad_norm": 0.5841799692316811,
"learning_rate": 2.909182157666287e-05,
"loss": 0.805,
"step": 533
},
{
"epoch": 0.37,
"grad_norm": 0.5353663507295551,
"learning_rate": 2.9051766931519502e-05,
"loss": 0.8174,
"step": 534
},
{
"epoch": 0.37,
"grad_norm": 0.5417082748882768,
"learning_rate": 2.9011666575601323e-05,
"loss": 0.8099,
"step": 535
},
{
"epoch": 0.37,
"grad_norm": 0.48730982083744984,
"learning_rate": 2.897152071141225e-05,
"loss": 0.7875,
"step": 536
},
{
"epoch": 0.37,
"grad_norm": 0.5076624102569184,
"learning_rate": 2.8931329541686018e-05,
"loss": 0.8107,
"step": 537
},
{
"epoch": 0.37,
"grad_norm": 0.5851402156065318,
"learning_rate": 2.8891093269385164e-05,
"loss": 0.821,
"step": 538
},
{
"epoch": 0.37,
"grad_norm": 0.4875741304026489,
"learning_rate": 2.885081209769998e-05,
"loss": 0.8266,
"step": 539
},
{
"epoch": 0.37,
"grad_norm": 0.5160358060329432,
"learning_rate": 2.8810486230047497e-05,
"loss": 0.8204,
"step": 540
},
{
"epoch": 0.38,
"grad_norm": 0.4863611933674257,
"learning_rate": 2.877011587007046e-05,
"loss": 0.8264,
"step": 541
},
{
"epoch": 0.38,
"grad_norm": 0.5245944966817704,
"learning_rate": 2.8729701221636294e-05,
"loss": 0.7702,
"step": 542
},
{
"epoch": 0.38,
"grad_norm": 0.5285616480219155,
"learning_rate": 2.868924248883608e-05,
"loss": 0.8096,
"step": 543
},
{
"epoch": 0.38,
"grad_norm": 0.5254933946233273,
"learning_rate": 2.864873987598353e-05,
"loss": 0.8415,
"step": 544
},
{
"epoch": 0.38,
"grad_norm": 0.49067047173710926,
"learning_rate": 2.8608193587613917e-05,
"loss": 0.7966,
"step": 545
},
{
"epoch": 0.38,
"grad_norm": 0.5276907270557115,
"learning_rate": 2.8567603828483125e-05,
"loss": 0.8132,
"step": 546
},
{
"epoch": 0.38,
"grad_norm": 0.5250144097555784,
"learning_rate": 2.8526970803566504e-05,
"loss": 0.834,
"step": 547
},
{
"epoch": 0.38,
"grad_norm": 0.5217010881290475,
"learning_rate": 2.8486294718057936e-05,
"loss": 0.8241,
"step": 548
},
{
"epoch": 0.38,
"grad_norm": 0.5013040684136557,
"learning_rate": 2.844557577736873e-05,
"loss": 0.8316,
"step": 549
},
{
"epoch": 0.38,
"grad_norm": 0.5191425659765501,
"learning_rate": 2.840481418712662e-05,
"loss": 0.7737,
"step": 550
},
{
"epoch": 0.38,
"grad_norm": 0.5524478603674629,
"learning_rate": 2.8364010153174733e-05,
"loss": 0.7725,
"step": 551
},
{
"epoch": 0.38,
"grad_norm": 0.5173945071325455,
"learning_rate": 2.8323163881570505e-05,
"loss": 0.8114,
"step": 552
},
{
"epoch": 0.38,
"grad_norm": 0.5030658573643272,
"learning_rate": 2.8282275578584683e-05,
"loss": 0.8152,
"step": 553
},
{
"epoch": 0.38,
"grad_norm": 0.5415259930870986,
"learning_rate": 2.8241345450700275e-05,
"loss": 0.8163,
"step": 554
},
{
"epoch": 0.38,
"grad_norm": 0.5217444256526667,
"learning_rate": 2.8200373704611487e-05,
"loss": 0.8121,
"step": 555
},
{
"epoch": 0.39,
"grad_norm": 0.5324238033646188,
"learning_rate": 2.8159360547222716e-05,
"loss": 0.807,
"step": 556
},
{
"epoch": 0.39,
"grad_norm": 0.5006347868642604,
"learning_rate": 2.8118306185647458e-05,
"loss": 0.83,
"step": 557
},
{
"epoch": 0.39,
"grad_norm": 0.5330552791347976,
"learning_rate": 2.8077210827207304e-05,
"loss": 0.8542,
"step": 558
},
{
"epoch": 0.39,
"grad_norm": 0.5095544502005418,
"learning_rate": 2.8036074679430876e-05,
"loss": 0.8066,
"step": 559
},
{
"epoch": 0.39,
"grad_norm": 0.5397606650138748,
"learning_rate": 2.7994897950052764e-05,
"loss": 0.8287,
"step": 560
},
{
"epoch": 0.39,
"grad_norm": 0.4877803047723917,
"learning_rate": 2.7953680847012515e-05,
"loss": 0.8392,
"step": 561
},
{
"epoch": 0.39,
"grad_norm": 0.5017361364561864,
"learning_rate": 2.791242357845354e-05,
"loss": 0.8104,
"step": 562
},
{
"epoch": 0.39,
"grad_norm": 0.5210684320207916,
"learning_rate": 2.7871126352722086e-05,
"loss": 0.8298,
"step": 563
},
{
"epoch": 0.39,
"grad_norm": 0.5107946059489953,
"learning_rate": 2.7829789378366198e-05,
"loss": 0.8141,
"step": 564
},
{
"epoch": 0.39,
"grad_norm": 0.4933068507143492,
"learning_rate": 2.778841286413462e-05,
"loss": 0.8099,
"step": 565
},
{
"epoch": 0.39,
"grad_norm": 0.512575667987355,
"learning_rate": 2.7746997018975804e-05,
"loss": 0.8251,
"step": 566
},
{
"epoch": 0.39,
"grad_norm": 0.5441025564685552,
"learning_rate": 2.7705542052036797e-05,
"loss": 0.8137,
"step": 567
},
{
"epoch": 0.39,
"grad_norm": 0.5282801355089561,
"learning_rate": 2.7664048172662207e-05,
"loss": 0.8334,
"step": 568
},
{
"epoch": 0.39,
"grad_norm": 0.5170402144247147,
"learning_rate": 2.7622515590393158e-05,
"loss": 0.8073,
"step": 569
},
{
"epoch": 0.4,
"grad_norm": 0.536007073283325,
"learning_rate": 2.7580944514966216e-05,
"loss": 0.831,
"step": 570
},
{
"epoch": 0.4,
"grad_norm": 0.49951590928256506,
"learning_rate": 2.7539335156312336e-05,
"loss": 0.7974,
"step": 571
},
{
"epoch": 0.4,
"grad_norm": 0.49475267203778617,
"learning_rate": 2.74976877245558e-05,
"loss": 0.8115,
"step": 572
},
{
"epoch": 0.4,
"grad_norm": 0.5163381993115421,
"learning_rate": 2.745600243001315e-05,
"loss": 0.7978,
"step": 573
},
{
"epoch": 0.4,
"grad_norm": 0.501445567706877,
"learning_rate": 2.7414279483192142e-05,
"loss": 0.8203,
"step": 574
},
{
"epoch": 0.4,
"grad_norm": 0.5075669572165789,
"learning_rate": 2.737251909479068e-05,
"loss": 0.7813,
"step": 575
},
{
"epoch": 0.4,
"grad_norm": 0.5332244093680119,
"learning_rate": 2.733072147569572e-05,
"loss": 0.8472,
"step": 576
},
{
"epoch": 0.4,
"grad_norm": 0.4954497679746038,
"learning_rate": 2.7288886836982248e-05,
"loss": 0.8144,
"step": 577
},
{
"epoch": 0.4,
"grad_norm": 0.5277046929985244,
"learning_rate": 2.7247015389912203e-05,
"loss": 0.8053,
"step": 578
},
{
"epoch": 0.4,
"grad_norm": 0.5391388767291839,
"learning_rate": 2.7205107345933383e-05,
"loss": 0.8066,
"step": 579
},
{
"epoch": 0.4,
"grad_norm": 0.49876199303378155,
"learning_rate": 2.7163162916678416e-05,
"loss": 0.7877,
"step": 580
},
{
"epoch": 0.4,
"grad_norm": 0.49493896592323794,
"learning_rate": 2.7121182313963666e-05,
"loss": 0.7838,
"step": 581
},
{
"epoch": 0.4,
"grad_norm": 0.5181933415581349,
"learning_rate": 2.707916574978817e-05,
"loss": 0.7696,
"step": 582
},
{
"epoch": 0.4,
"grad_norm": 0.5040107766138328,
"learning_rate": 2.7037113436332565e-05,
"loss": 0.8109,
"step": 583
},
{
"epoch": 0.4,
"grad_norm": 0.4979548263999821,
"learning_rate": 2.6995025585958026e-05,
"loss": 0.8382,
"step": 584
},
{
"epoch": 0.41,
"grad_norm": 0.5147700667811426,
"learning_rate": 2.6952902411205178e-05,
"loss": 0.8056,
"step": 585
},
{
"epoch": 0.41,
"grad_norm": 0.5149570219066634,
"learning_rate": 2.6910744124793046e-05,
"loss": 0.8183,
"step": 586
},
{
"epoch": 0.41,
"grad_norm": 0.4860375910181333,
"learning_rate": 2.686855093961795e-05,
"loss": 0.7966,
"step": 587
},
{
"epoch": 0.41,
"grad_norm": 0.5090464542301267,
"learning_rate": 2.6826323068752462e-05,
"loss": 0.8544,
"step": 588
},
{
"epoch": 0.41,
"grad_norm": 0.4762189317791556,
"learning_rate": 2.6784060725444302e-05,
"loss": 0.8245,
"step": 589
},
{
"epoch": 0.41,
"grad_norm": 0.49640730778796094,
"learning_rate": 2.674176412311527e-05,
"loss": 0.8452,
"step": 590
},
{
"epoch": 0.41,
"grad_norm": 0.5210944671492388,
"learning_rate": 2.6699433475360186e-05,
"loss": 0.797,
"step": 591
},
{
"epoch": 0.41,
"grad_norm": 0.5600554413084173,
"learning_rate": 2.6657068995945786e-05,
"loss": 0.8167,
"step": 592
},
{
"epoch": 0.41,
"grad_norm": 0.4958281442810463,
"learning_rate": 2.6614670898809675e-05,
"loss": 0.8209,
"step": 593
},
{
"epoch": 0.41,
"grad_norm": 0.5043995987397324,
"learning_rate": 2.657223939805918e-05,
"loss": 0.8101,
"step": 594
},
{
"epoch": 0.41,
"grad_norm": 0.517652160037172,
"learning_rate": 2.652977470797035e-05,
"loss": 0.8328,
"step": 595
},
{
"epoch": 0.41,
"grad_norm": 0.48966531177614386,
"learning_rate": 2.648727704298685e-05,
"loss": 0.8106,
"step": 596
},
{
"epoch": 0.41,
"grad_norm": 0.5145904461553998,
"learning_rate": 2.6444746617718814e-05,
"loss": 0.8431,
"step": 597
},
{
"epoch": 0.41,
"grad_norm": 0.5074617747025607,
"learning_rate": 2.640218364694187e-05,
"loss": 0.8232,
"step": 598
},
{
"epoch": 0.42,
"grad_norm": 0.47539926444847314,
"learning_rate": 2.6359588345595956e-05,
"loss": 0.7935,
"step": 599
},
{
"epoch": 0.42,
"grad_norm": 0.4984806464754738,
"learning_rate": 2.631696092878429e-05,
"loss": 0.8357,
"step": 600
},
{
"epoch": 0.42,
"grad_norm": 0.49877469920091155,
"learning_rate": 2.6274301611772293e-05,
"loss": 0.8399,
"step": 601
},
{
"epoch": 0.42,
"grad_norm": 0.47007289319109724,
"learning_rate": 2.6231610609986442e-05,
"loss": 0.7622,
"step": 602
},
{
"epoch": 0.42,
"grad_norm": 0.4970365167564179,
"learning_rate": 2.6188888139013258e-05,
"loss": 0.8066,
"step": 603
},
{
"epoch": 0.42,
"grad_norm": 0.5150034019395904,
"learning_rate": 2.6146134414598145e-05,
"loss": 0.8058,
"step": 604
},
{
"epoch": 0.42,
"grad_norm": 0.5202339414160825,
"learning_rate": 2.6103349652644356e-05,
"loss": 0.7761,
"step": 605
},
{
"epoch": 0.42,
"grad_norm": 0.5229279925591594,
"learning_rate": 2.6060534069211877e-05,
"loss": 0.846,
"step": 606
},
{
"epoch": 0.42,
"grad_norm": 0.5436938324269776,
"learning_rate": 2.601768788051633e-05,
"loss": 0.8353,
"step": 607
},
{
"epoch": 0.42,
"grad_norm": 0.4970290067822917,
"learning_rate": 2.5974811302927907e-05,
"loss": 0.8161,
"step": 608
},
{
"epoch": 0.42,
"grad_norm": 0.518265837234093,
"learning_rate": 2.5931904552970256e-05,
"loss": 0.8225,
"step": 609
},
{
"epoch": 0.42,
"grad_norm": 0.4961814190073218,
"learning_rate": 2.5888967847319385e-05,
"loss": 0.8198,
"step": 610
},
{
"epoch": 0.42,
"grad_norm": 0.4843576113090071,
"learning_rate": 2.5846001402802594e-05,
"loss": 0.8331,
"step": 611
},
{
"epoch": 0.42,
"grad_norm": 0.49538644333804566,
"learning_rate": 2.5803005436397328e-05,
"loss": 0.8099,
"step": 612
},
{
"epoch": 0.42,
"grad_norm": 0.4889764305451256,
"learning_rate": 2.5759980165230164e-05,
"loss": 0.8001,
"step": 613
},
{
"epoch": 0.43,
"grad_norm": 0.5415405024711503,
"learning_rate": 2.5716925806575628e-05,
"loss": 0.8176,
"step": 614
},
{
"epoch": 0.43,
"grad_norm": 0.48631832215904225,
"learning_rate": 2.567384257785515e-05,
"loss": 0.8243,
"step": 615
},
{
"epoch": 0.43,
"grad_norm": 0.503534813084849,
"learning_rate": 2.5630730696635953e-05,
"loss": 0.7716,
"step": 616
},
{
"epoch": 0.43,
"grad_norm": 0.5099740812174416,
"learning_rate": 2.5587590380629947e-05,
"loss": 0.8105,
"step": 617
},
{
"epoch": 0.43,
"grad_norm": 0.495164335322031,
"learning_rate": 2.5544421847692637e-05,
"loss": 0.763,
"step": 618
},
{
"epoch": 0.43,
"grad_norm": 0.5063091622991113,
"learning_rate": 2.5501225315822025e-05,
"loss": 0.7908,
"step": 619
},
{
"epoch": 0.43,
"grad_norm": 0.5015097072421199,
"learning_rate": 2.54580010031575e-05,
"loss": 0.8131,
"step": 620
},
{
"epoch": 0.43,
"grad_norm": 0.4894193487318987,
"learning_rate": 2.5414749127978753e-05,
"loss": 0.7838,
"step": 621
},
{
"epoch": 0.43,
"grad_norm": 0.4733368368547407,
"learning_rate": 2.5371469908704655e-05,
"loss": 0.8053,
"step": 622
},
{
"epoch": 0.43,
"grad_norm": 0.47909573540440914,
"learning_rate": 2.5328163563892162e-05,
"loss": 0.7765,
"step": 623
},
{
"epoch": 0.43,
"grad_norm": 0.5026258353826587,
"learning_rate": 2.528483031223521e-05,
"loss": 0.8654,
"step": 624
},
{
"epoch": 0.43,
"grad_norm": 0.5402690411769058,
"learning_rate": 2.5241470372563624e-05,
"loss": 0.8313,
"step": 625
},
{
"epoch": 0.43,
"grad_norm": 0.4731470377140125,
"learning_rate": 2.5198083963841988e-05,
"loss": 0.8158,
"step": 626
},
{
"epoch": 0.43,
"grad_norm": 0.5149018360688673,
"learning_rate": 2.515467130516857e-05,
"loss": 0.8185,
"step": 627
},
{
"epoch": 0.44,
"grad_norm": 0.49161680680797987,
"learning_rate": 2.5111232615774174e-05,
"loss": 0.798,
"step": 628
},
{
"epoch": 0.44,
"grad_norm": 0.529381167285509,
"learning_rate": 2.5067768115021077e-05,
"loss": 0.7749,
"step": 629
},
{
"epoch": 0.44,
"grad_norm": 0.5387501064980478,
"learning_rate": 2.5024278022401897e-05,
"loss": 0.8087,
"step": 630
},
{
"epoch": 0.44,
"grad_norm": 0.507739059257939,
"learning_rate": 2.498076255753848e-05,
"loss": 0.8093,
"step": 631
},
{
"epoch": 0.44,
"grad_norm": 0.4983461111612683,
"learning_rate": 2.493722194018082e-05,
"loss": 0.8102,
"step": 632
},
{
"epoch": 0.44,
"grad_norm": 0.49516660618585095,
"learning_rate": 2.4893656390205912e-05,
"loss": 0.7827,
"step": 633
},
{
"epoch": 0.44,
"grad_norm": 0.5508672601482296,
"learning_rate": 2.4850066127616655e-05,
"loss": 0.7792,
"step": 634
},
{
"epoch": 0.44,
"grad_norm": 0.528582785005273,
"learning_rate": 2.4806451372540767e-05,
"loss": 0.797,
"step": 635
},
{
"epoch": 0.44,
"grad_norm": 0.4824231537977557,
"learning_rate": 2.4762812345229622e-05,
"loss": 0.7787,
"step": 636
},
{
"epoch": 0.44,
"grad_norm": 0.5007159578513001,
"learning_rate": 2.4719149266057202e-05,
"loss": 0.7989,
"step": 637
},
{
"epoch": 0.44,
"grad_norm": 0.5160759237737353,
"learning_rate": 2.467546235551892e-05,
"loss": 0.7954,
"step": 638
},
{
"epoch": 0.44,
"grad_norm": 0.49444173669378655,
"learning_rate": 2.463175183423054e-05,
"loss": 0.8042,
"step": 639
},
{
"epoch": 0.44,
"grad_norm": 0.5097452074329715,
"learning_rate": 2.4588017922927078e-05,
"loss": 0.778,
"step": 640
},
{
"epoch": 0.44,
"grad_norm": 0.47750544419780466,
"learning_rate": 2.4544260842461638e-05,
"loss": 0.792,
"step": 641
},
{
"epoch": 0.45,
"grad_norm": 0.5114159209423187,
"learning_rate": 2.4500480813804362e-05,
"loss": 0.8068,
"step": 642
},
{
"epoch": 0.45,
"grad_norm": 0.49371255986226426,
"learning_rate": 2.4456678058041248e-05,
"loss": 0.7801,
"step": 643
},
{
"epoch": 0.45,
"grad_norm": 0.508970212582854,
"learning_rate": 2.441285279637307e-05,
"loss": 0.7925,
"step": 644
},
{
"epoch": 0.45,
"grad_norm": 0.4745662222807768,
"learning_rate": 2.436900525011428e-05,
"loss": 0.7387,
"step": 645
},
{
"epoch": 0.45,
"grad_norm": 0.4707549636622784,
"learning_rate": 2.4325135640691823e-05,
"loss": 0.8097,
"step": 646
},
{
"epoch": 0.45,
"grad_norm": 0.49834091452846524,
"learning_rate": 2.4281244189644108e-05,
"loss": 0.8174,
"step": 647
},
{
"epoch": 0.45,
"grad_norm": 0.5054955184088168,
"learning_rate": 2.4237331118619813e-05,
"loss": 0.8012,
"step": 648
},
{
"epoch": 0.45,
"grad_norm": 0.5402145593922886,
"learning_rate": 2.4193396649376783e-05,
"loss": 0.8086,
"step": 649
},
{
"epoch": 0.45,
"grad_norm": 0.5170172289669904,
"learning_rate": 2.414944100378097e-05,
"loss": 0.7829,
"step": 650
},
{
"epoch": 0.45,
"grad_norm": 0.5171885563669723,
"learning_rate": 2.4105464403805217e-05,
"loss": 0.7691,
"step": 651
},
{
"epoch": 0.45,
"grad_norm": 0.5125453083523096,
"learning_rate": 2.4061467071528215e-05,
"loss": 0.8434,
"step": 652
},
{
"epoch": 0.45,
"grad_norm": 0.5328503963093509,
"learning_rate": 2.401744922913334e-05,
"loss": 0.8275,
"step": 653
},
{
"epoch": 0.45,
"grad_norm": 0.5527800064640043,
"learning_rate": 2.397341109890754e-05,
"loss": 0.804,
"step": 654
},
{
"epoch": 0.45,
"grad_norm": 0.4817569418045798,
"learning_rate": 2.3929352903240224e-05,
"loss": 0.8322,
"step": 655
},
{
"epoch": 0.45,
"grad_norm": 0.4704449983571816,
"learning_rate": 2.388527486462212e-05,
"loss": 0.7912,
"step": 656
},
{
"epoch": 0.46,
"grad_norm": 0.49783339260622966,
"learning_rate": 2.3841177205644165e-05,
"loss": 0.8277,
"step": 657
},
{
"epoch": 0.46,
"grad_norm": 0.5140472864141616,
"learning_rate": 2.3797060148996385e-05,
"loss": 0.7397,
"step": 658
},
{
"epoch": 0.46,
"grad_norm": 0.5211408953085369,
"learning_rate": 2.3752923917466763e-05,
"loss": 0.8095,
"step": 659
},
{
"epoch": 0.46,
"grad_norm": 0.49618871107233814,
"learning_rate": 2.370876873394009e-05,
"loss": 0.7662,
"step": 660
},
{
"epoch": 0.46,
"grad_norm": 0.5069982712996871,
"learning_rate": 2.3664594821396896e-05,
"loss": 0.8103,
"step": 661
},
{
"epoch": 0.46,
"grad_norm": 0.49533660707436866,
"learning_rate": 2.362040240291227e-05,
"loss": 0.8016,
"step": 662
},
{
"epoch": 0.46,
"grad_norm": 0.46740525896117124,
"learning_rate": 2.3576191701654763e-05,
"loss": 0.7999,
"step": 663
},
{
"epoch": 0.46,
"grad_norm": 0.5236988782016285,
"learning_rate": 2.353196294088525e-05,
"loss": 0.8407,
"step": 664
},
{
"epoch": 0.46,
"grad_norm": 0.49350062735485684,
"learning_rate": 2.34877163439558e-05,
"loss": 0.8158,
"step": 665
},
{
"epoch": 0.46,
"grad_norm": 0.48063257025922457,
"learning_rate": 2.3443452134308565e-05,
"loss": 0.7801,
"step": 666
},
{
"epoch": 0.46,
"grad_norm": 0.511184173334168,
"learning_rate": 2.3399170535474635e-05,
"loss": 0.806,
"step": 667
},
{
"epoch": 0.46,
"grad_norm": 0.4951305542150465,
"learning_rate": 2.3354871771072906e-05,
"loss": 0.7952,
"step": 668
},
{
"epoch": 0.46,
"grad_norm": 0.5119633501003498,
"learning_rate": 2.3310556064808977e-05,
"loss": 0.8166,
"step": 669
},
{
"epoch": 0.46,
"grad_norm": 0.5379374581190969,
"learning_rate": 2.3266223640473982e-05,
"loss": 0.8185,
"step": 670
},
{
"epoch": 0.47,
"grad_norm": 0.5067641524317017,
"learning_rate": 2.3221874721943495e-05,
"loss": 0.7718,
"step": 671
},
{
"epoch": 0.47,
"grad_norm": 0.5118269250662727,
"learning_rate": 2.3177509533176375e-05,
"loss": 0.7945,
"step": 672
},
{
"epoch": 0.47,
"grad_norm": 0.4889414574881745,
"learning_rate": 2.3133128298213647e-05,
"loss": 0.7675,
"step": 673
},
{
"epoch": 0.47,
"grad_norm": 0.498229922705657,
"learning_rate": 2.3088731241177378e-05,
"loss": 0.7703,
"step": 674
},
{
"epoch": 0.47,
"grad_norm": 0.4813001166475708,
"learning_rate": 2.3044318586269516e-05,
"loss": 0.7679,
"step": 675
},
{
"epoch": 0.47,
"grad_norm": 0.5088868247786725,
"learning_rate": 2.299989055777079e-05,
"loss": 0.8193,
"step": 676
},
{
"epoch": 0.47,
"grad_norm": 0.5092271961470778,
"learning_rate": 2.2955447380039576e-05,
"loss": 0.776,
"step": 677
},
{
"epoch": 0.47,
"grad_norm": 0.4705489726749848,
"learning_rate": 2.291098927751072e-05,
"loss": 0.7754,
"step": 678
},
{
"epoch": 0.47,
"grad_norm": 0.5008468159601925,
"learning_rate": 2.286651647469447e-05,
"loss": 0.8226,
"step": 679
},
{
"epoch": 0.47,
"grad_norm": 0.5248137663127183,
"learning_rate": 2.282202919617529e-05,
"loss": 0.7827,
"step": 680
},
{
"epoch": 0.47,
"grad_norm": 0.5125082433269635,
"learning_rate": 2.2777527666610742e-05,
"loss": 0.8413,
"step": 681
},
{
"epoch": 0.47,
"grad_norm": 0.5222609406044753,
"learning_rate": 2.2733012110730388e-05,
"loss": 0.7621,
"step": 682
},
{
"epoch": 0.47,
"grad_norm": 0.4922886901624958,
"learning_rate": 2.2688482753334568e-05,
"loss": 0.8208,
"step": 683
},
{
"epoch": 0.47,
"grad_norm": 0.5054213727479471,
"learning_rate": 2.264393981929337e-05,
"loss": 0.8176,
"step": 684
},
{
"epoch": 0.47,
"grad_norm": 0.4845245766921796,
"learning_rate": 2.259938353354542e-05,
"loss": 0.8168,
"step": 685
},
{
"epoch": 0.48,
"grad_norm": 0.5028459066674311,
"learning_rate": 2.2554814121096748e-05,
"loss": 0.8137,
"step": 686
},
{
"epoch": 0.48,
"grad_norm": 0.4888043154458139,
"learning_rate": 2.2510231807019722e-05,
"loss": 0.796,
"step": 687
},
{
"epoch": 0.48,
"grad_norm": 0.5064635794679937,
"learning_rate": 2.2465636816451818e-05,
"loss": 0.7748,
"step": 688
},
{
"epoch": 0.48,
"grad_norm": 0.505467460662524,
"learning_rate": 2.242102937459456e-05,
"loss": 0.8257,
"step": 689
},
{
"epoch": 0.48,
"grad_norm": 0.5160028144518809,
"learning_rate": 2.2376409706712327e-05,
"loss": 0.8354,
"step": 690
},
{
"epoch": 0.48,
"grad_norm": 0.5406566002222103,
"learning_rate": 2.2331778038131236e-05,
"loss": 0.7933,
"step": 691
},
{
"epoch": 0.48,
"grad_norm": 0.48539671687012415,
"learning_rate": 2.228713459423804e-05,
"loss": 0.7804,
"step": 692
},
{
"epoch": 0.48,
"grad_norm": 0.5089208024410964,
"learning_rate": 2.2242479600478917e-05,
"loss": 0.7477,
"step": 693
},
{
"epoch": 0.48,
"grad_norm": 0.5085554188807803,
"learning_rate": 2.219781328235839e-05,
"loss": 0.7932,
"step": 694
},
{
"epoch": 0.48,
"grad_norm": 0.4964039867842853,
"learning_rate": 2.215313586543818e-05,
"loss": 0.8257,
"step": 695
},
{
"epoch": 0.48,
"grad_norm": 0.4802372387133603,
"learning_rate": 2.2108447575336015e-05,
"loss": 0.805,
"step": 696
},
{
"epoch": 0.48,
"grad_norm": 0.5270842199114926,
"learning_rate": 2.206374863772459e-05,
"loss": 0.7378,
"step": 697
},
{
"epoch": 0.48,
"grad_norm": 0.48292385960552725,
"learning_rate": 2.2019039278330324e-05,
"loss": 0.8178,
"step": 698
},
{
"epoch": 0.48,
"grad_norm": 0.49317497404202026,
"learning_rate": 2.1974319722932286e-05,
"loss": 0.7704,
"step": 699
},
{
"epoch": 0.49,
"grad_norm": 0.5384276365414706,
"learning_rate": 2.1929590197361025e-05,
"loss": 0.7791,
"step": 700
},
{
"epoch": 0.49,
"grad_norm": 0.5014645497959508,
"learning_rate": 2.188485092749744e-05,
"loss": 0.8037,
"step": 701
},
{
"epoch": 0.49,
"grad_norm": 0.517842179606978,
"learning_rate": 2.1840102139271644e-05,
"loss": 0.7724,
"step": 702
},
{
"epoch": 0.49,
"grad_norm": 0.4729181459825356,
"learning_rate": 2.179534405866181e-05,
"loss": 0.7728,
"step": 703
},
{
"epoch": 0.49,
"grad_norm": 0.495022624030006,
"learning_rate": 2.1750576911693043e-05,
"loss": 0.8737,
"step": 704
},
{
"epoch": 0.49,
"grad_norm": 0.4997216859369346,
"learning_rate": 2.1705800924436232e-05,
"loss": 0.802,
"step": 705
},
{
"epoch": 0.49,
"grad_norm": 0.5212297889042845,
"learning_rate": 2.16610163230069e-05,
"loss": 0.8026,
"step": 706
},
{
"epoch": 0.49,
"grad_norm": 0.4700992141032843,
"learning_rate": 2.161622333356408e-05,
"loss": 0.8023,
"step": 707
},
{
"epoch": 0.49,
"grad_norm": 0.464148426279936,
"learning_rate": 2.157142218230916e-05,
"loss": 0.7755,
"step": 708
},
{
"epoch": 0.49,
"grad_norm": 0.49362161970831064,
"learning_rate": 2.152661309548475e-05,
"loss": 0.8174,
"step": 709
},
{
"epoch": 0.49,
"grad_norm": 0.5075115201325031,
"learning_rate": 2.148179629937352e-05,
"loss": 0.7589,
"step": 710
},
{
"epoch": 0.49,
"grad_norm": 0.4703628917207608,
"learning_rate": 2.1436972020297096e-05,
"loss": 0.8283,
"step": 711
},
{
"epoch": 0.49,
"grad_norm": 0.47359092016178495,
"learning_rate": 2.1392140484614865e-05,
"loss": 0.7987,
"step": 712
},
{
"epoch": 0.49,
"grad_norm": 0.48736276912670584,
"learning_rate": 2.134730191872288e-05,
"loss": 0.7837,
"step": 713
},
{
"epoch": 0.49,
"grad_norm": 0.4853771262340383,
"learning_rate": 2.130245654905268e-05,
"loss": 0.8583,
"step": 714
},
{
"epoch": 0.5,
"grad_norm": 0.4718518291731039,
"learning_rate": 2.125760460207017e-05,
"loss": 0.781,
"step": 715
},
{
"epoch": 0.5,
"grad_norm": 0.4800918432909751,
"learning_rate": 2.1212746304274482e-05,
"loss": 0.7753,
"step": 716
},
{
"epoch": 0.5,
"grad_norm": 0.4863383953627211,
"learning_rate": 2.11678818821968e-05,
"loss": 0.8306,
"step": 717
},
{
"epoch": 0.5,
"grad_norm": 0.5228676518304668,
"learning_rate": 2.112301156239924e-05,
"loss": 0.8122,
"step": 718
},
{
"epoch": 0.5,
"grad_norm": 0.5184501465797192,
"learning_rate": 2.1078135571473712e-05,
"loss": 0.7775,
"step": 719
},
{
"epoch": 0.5,
"grad_norm": 0.4899372010925384,
"learning_rate": 2.1033254136040747e-05,
"loss": 0.8292,
"step": 720
},
{
"epoch": 0.5,
"grad_norm": 0.4795805981367559,
"learning_rate": 2.098836748274839e-05,
"loss": 0.7672,
"step": 721
},
{
"epoch": 0.5,
"grad_norm": 0.5119121470808137,
"learning_rate": 2.094347583827102e-05,
"loss": 0.8044,
"step": 722
},
{
"epoch": 0.5,
"grad_norm": 0.5067453844581565,
"learning_rate": 2.089857942930822e-05,
"loss": 0.8106,
"step": 723
},
{
"epoch": 0.5,
"grad_norm": 0.4876912107425344,
"learning_rate": 2.0853678482583655e-05,
"loss": 0.7849,
"step": 724
},
{
"epoch": 0.5,
"grad_norm": 0.4988569958161731,
"learning_rate": 2.0808773224843882e-05,
"loss": 0.7889,
"step": 725
},
{
"epoch": 0.5,
"grad_norm": 0.4920752018715185,
"learning_rate": 2.0763863882857242e-05,
"loss": 0.7822,
"step": 726
},
{
"epoch": 0.5,
"grad_norm": 0.49789583703969204,
"learning_rate": 2.0718950683412693e-05,
"loss": 0.7689,
"step": 727
},
{
"epoch": 0.5,
"grad_norm": 0.5314360000062643,
"learning_rate": 2.0674033853318666e-05,
"loss": 0.829,
"step": 728
},
{
"epoch": 0.51,
"grad_norm": 0.501809926185261,
"learning_rate": 2.0629113619401958e-05,
"loss": 0.78,
"step": 729
},
{
"epoch": 0.51,
"grad_norm": 0.4867697548423698,
"learning_rate": 2.058419020850652e-05,
"loss": 0.8021,
"step": 730
},
{
"epoch": 0.51,
"grad_norm": 0.4895110637717211,
"learning_rate": 2.0539263847492355e-05,
"loss": 0.7746,
"step": 731
},
{
"epoch": 0.51,
"grad_norm": 0.4975959969413289,
"learning_rate": 2.0494334763234383e-05,
"loss": 0.8076,
"step": 732
},
{
"epoch": 0.51,
"grad_norm": 0.5020137375065207,
"learning_rate": 2.044940318262125e-05,
"loss": 0.7934,
"step": 733
},
{
"epoch": 0.51,
"grad_norm": 0.48253093956140286,
"learning_rate": 2.040446933255423e-05,
"loss": 0.7668,
"step": 734
},
{
"epoch": 0.51,
"grad_norm": 0.5107298368953785,
"learning_rate": 2.0359533439946047e-05,
"loss": 0.7736,
"step": 735
},
{
"epoch": 0.51,
"grad_norm": 0.48087733872553207,
"learning_rate": 2.031459573171973e-05,
"loss": 0.7722,
"step": 736
},
{
"epoch": 0.51,
"grad_norm": 0.47972168294440365,
"learning_rate": 2.0269656434807504e-05,
"loss": 0.8294,
"step": 737
},
{
"epoch": 0.51,
"grad_norm": 0.4896899019958654,
"learning_rate": 2.0224715776149585e-05,
"loss": 0.7743,
"step": 738
},
{
"epoch": 0.51,
"grad_norm": 0.5737437578369733,
"learning_rate": 2.0179773982693093e-05,
"loss": 0.8033,
"step": 739
},
{
"epoch": 0.51,
"grad_norm": 0.49199801310908875,
"learning_rate": 2.013483128139086e-05,
"loss": 0.7876,
"step": 740
},
{
"epoch": 0.51,
"grad_norm": 0.5012234855053803,
"learning_rate": 2.0089887899200307e-05,
"loss": 0.8195,
"step": 741
},
{
"epoch": 0.51,
"grad_norm": 0.504461254772644,
"learning_rate": 2.0044944063082307e-05,
"loss": 0.8276,
"step": 742
},
{
"epoch": 0.52,
"grad_norm": 0.5051369125431119,
"learning_rate": 2e-05,
"loss": 0.8305,
"step": 743
},
{
"epoch": 0.52,
"grad_norm": 0.5355060738322722,
"learning_rate": 1.99550559369177e-05,
"loss": 0.743,
"step": 744
},
{
"epoch": 0.52,
"grad_norm": 0.49000455120957126,
"learning_rate": 1.99101121007997e-05,
"loss": 0.8466,
"step": 745
},
{
"epoch": 0.52,
"grad_norm": 0.5265394066304375,
"learning_rate": 1.9865168718609142e-05,
"loss": 0.7984,
"step": 746
},
{
"epoch": 0.52,
"grad_norm": 0.5135579966602404,
"learning_rate": 1.9820226017306914e-05,
"loss": 0.8038,
"step": 747
},
{
"epoch": 0.52,
"grad_norm": 0.5179805904565409,
"learning_rate": 1.9775284223850418e-05,
"loss": 0.789,
"step": 748
},
{
"epoch": 0.52,
"grad_norm": 0.5072410725982022,
"learning_rate": 1.9730343565192506e-05,
"loss": 0.776,
"step": 749
},
{
"epoch": 0.52,
"grad_norm": 0.5082948840095385,
"learning_rate": 1.968540426828027e-05,
"loss": 0.7722,
"step": 750
},
{
"epoch": 0.52,
"grad_norm": 0.481082297051927,
"learning_rate": 1.9640466560053956e-05,
"loss": 0.7514,
"step": 751
},
{
"epoch": 0.52,
"grad_norm": 0.49999029858967015,
"learning_rate": 1.9595530667445775e-05,
"loss": 0.8226,
"step": 752
},
{
"epoch": 0.52,
"grad_norm": 0.4836119031152632,
"learning_rate": 1.9550596817378752e-05,
"loss": 0.7926,
"step": 753
},
{
"epoch": 0.52,
"grad_norm": 0.5006641169023739,
"learning_rate": 1.9505665236765624e-05,
"loss": 0.798,
"step": 754
},
{
"epoch": 0.52,
"grad_norm": 0.48622399020082735,
"learning_rate": 1.946073615250765e-05,
"loss": 0.8296,
"step": 755
},
{
"epoch": 0.52,
"grad_norm": 0.4942338546575008,
"learning_rate": 1.9415809791493484e-05,
"loss": 0.8151,
"step": 756
},
{
"epoch": 0.52,
"grad_norm": 0.5137706074875535,
"learning_rate": 1.9370886380598046e-05,
"loss": 0.8206,
"step": 757
},
{
"epoch": 0.53,
"grad_norm": 0.4973813254734366,
"learning_rate": 1.9325966146681337e-05,
"loss": 0.7688,
"step": 758
},
{
"epoch": 0.53,
"grad_norm": 0.5027606392694177,
"learning_rate": 1.9281049316587317e-05,
"loss": 0.786,
"step": 759
},
{
"epoch": 0.53,
"grad_norm": 0.5054270225670094,
"learning_rate": 1.9236136117142765e-05,
"loss": 0.7462,
"step": 760
},
{
"epoch": 0.53,
"grad_norm": 0.4956522652154512,
"learning_rate": 1.919122677515612e-05,
"loss": 0.8364,
"step": 761
},
{
"epoch": 0.53,
"grad_norm": 0.5022872245950014,
"learning_rate": 1.9146321517416348e-05,
"loss": 0.8274,
"step": 762
},
{
"epoch": 0.53,
"grad_norm": 0.48434378146491763,
"learning_rate": 1.9101420570691783e-05,
"loss": 0.7928,
"step": 763
},
{
"epoch": 0.53,
"grad_norm": 0.5143329284724251,
"learning_rate": 1.905652416172899e-05,
"loss": 0.752,
"step": 764
},
{
"epoch": 0.53,
"grad_norm": 0.5048067400573952,
"learning_rate": 1.901163251725162e-05,
"loss": 0.8408,
"step": 765
},
{
"epoch": 0.53,
"grad_norm": 0.47089381715712186,
"learning_rate": 1.8966745863959256e-05,
"loss": 0.7735,
"step": 766
},
{
"epoch": 0.53,
"grad_norm": 0.4995844068386091,
"learning_rate": 1.8921864428526295e-05,
"loss": 0.7202,
"step": 767
},
{
"epoch": 0.53,
"grad_norm": 0.4674169864075355,
"learning_rate": 1.8876988437600768e-05,
"loss": 0.7685,
"step": 768
},
{
"epoch": 0.53,
"grad_norm": 0.549481998446188,
"learning_rate": 1.883211811780321e-05,
"loss": 0.7871,
"step": 769
},
{
"epoch": 0.53,
"grad_norm": 0.47254254967708503,
"learning_rate": 1.8787253695725524e-05,
"loss": 0.7451,
"step": 770
},
{
"epoch": 0.53,
"grad_norm": 0.47884411114859504,
"learning_rate": 1.8742395397929833e-05,
"loss": 0.7542,
"step": 771
},
{
"epoch": 0.54,
"grad_norm": 0.5070288747827673,
"learning_rate": 1.8697543450947327e-05,
"loss": 0.7657,
"step": 772
},
{
"epoch": 0.54,
"grad_norm": 0.5009570449814584,
"learning_rate": 1.8652698081277127e-05,
"loss": 0.7617,
"step": 773
},
{
"epoch": 0.54,
"grad_norm": 0.48199888481899683,
"learning_rate": 1.860785951538514e-05,
"loss": 0.8031,
"step": 774
},
{
"epoch": 0.54,
"grad_norm": 0.48077416818754865,
"learning_rate": 1.8563027979702903e-05,
"loss": 0.7987,
"step": 775
},
{
"epoch": 0.54,
"grad_norm": 0.48742257198004213,
"learning_rate": 1.851820370062648e-05,
"loss": 0.7487,
"step": 776
},
{
"epoch": 0.54,
"grad_norm": 0.49184950237348357,
"learning_rate": 1.8473386904515256e-05,
"loss": 0.7936,
"step": 777
},
{
"epoch": 0.54,
"grad_norm": 0.511526316226081,
"learning_rate": 1.8428577817690844e-05,
"loss": 0.8526,
"step": 778
},
{
"epoch": 0.54,
"grad_norm": 0.4934928222593597,
"learning_rate": 1.8383776666435927e-05,
"loss": 0.7917,
"step": 779
},
{
"epoch": 0.54,
"grad_norm": 0.5039716026018762,
"learning_rate": 1.83389836769931e-05,
"loss": 0.7721,
"step": 780
},
{
"epoch": 0.54,
"grad_norm": 0.4879192126974817,
"learning_rate": 1.8294199075563774e-05,
"loss": 0.7814,
"step": 781
},
{
"epoch": 0.54,
"grad_norm": 0.47708194751026883,
"learning_rate": 1.824942308830696e-05,
"loss": 0.7976,
"step": 782
},
{
"epoch": 0.54,
"grad_norm": 0.488190847678098,
"learning_rate": 1.8204655941338193e-05,
"loss": 0.7676,
"step": 783
},
{
"epoch": 0.54,
"grad_norm": 0.5285511417719072,
"learning_rate": 1.8159897860728366e-05,
"loss": 0.7838,
"step": 784
},
{
"epoch": 0.54,
"grad_norm": 0.47712648144132735,
"learning_rate": 1.8115149072502564e-05,
"loss": 0.7774,
"step": 785
},
{
"epoch": 0.54,
"grad_norm": 0.49337718497369626,
"learning_rate": 1.8070409802638985e-05,
"loss": 0.7971,
"step": 786
},
{
"epoch": 0.55,
"grad_norm": 0.4813068995715721,
"learning_rate": 1.802568027706772e-05,
"loss": 0.8067,
"step": 787
},
{
"epoch": 0.55,
"grad_norm": 0.4961339097894079,
"learning_rate": 1.798096072166968e-05,
"loss": 0.7874,
"step": 788
},
{
"epoch": 0.55,
"grad_norm": 0.4779021897797907,
"learning_rate": 1.7936251362275415e-05,
"loss": 0.7877,
"step": 789
},
{
"epoch": 0.55,
"grad_norm": 0.49824194599287913,
"learning_rate": 1.789155242466398e-05,
"loss": 0.8062,
"step": 790
},
{
"epoch": 0.55,
"grad_norm": 0.4984034488199998,
"learning_rate": 1.7846864134561828e-05,
"loss": 0.8012,
"step": 791
},
{
"epoch": 0.55,
"grad_norm": 0.5056558887834203,
"learning_rate": 1.7802186717641613e-05,
"loss": 0.7994,
"step": 792
},
{
"epoch": 0.55,
"grad_norm": 0.506908445653823,
"learning_rate": 1.775752039952109e-05,
"loss": 0.778,
"step": 793
},
{
"epoch": 0.55,
"grad_norm": 0.5098465904321556,
"learning_rate": 1.7712865405761967e-05,
"loss": 0.7894,
"step": 794
},
{
"epoch": 0.55,
"grad_norm": 0.47517979023384277,
"learning_rate": 1.7668221961868764e-05,
"loss": 0.7655,
"step": 795
},
{
"epoch": 0.55,
"grad_norm": 0.47937069932156207,
"learning_rate": 1.762359029328768e-05,
"loss": 0.7904,
"step": 796
},
{
"epoch": 0.55,
"grad_norm": 0.5107641946721422,
"learning_rate": 1.757897062540545e-05,
"loss": 0.7583,
"step": 797
},
{
"epoch": 0.55,
"grad_norm": 0.512392785060477,
"learning_rate": 1.7534363183548185e-05,
"loss": 0.7579,
"step": 798
},
{
"epoch": 0.55,
"grad_norm": 0.5063429330590133,
"learning_rate": 1.7489768192980284e-05,
"loss": 0.7542,
"step": 799
},
{
"epoch": 0.55,
"grad_norm": 0.5151633771455881,
"learning_rate": 1.7445185878903252e-05,
"loss": 0.8442,
"step": 800
},
{
"epoch": 0.56,
"grad_norm": 0.5006824554519426,
"learning_rate": 1.740061646645459e-05,
"loss": 0.7913,
"step": 801
},
{
"epoch": 0.56,
"grad_norm": 0.49667516390468125,
"learning_rate": 1.7356060180706634e-05,
"loss": 0.8056,
"step": 802
},
{
"epoch": 0.56,
"grad_norm": 0.4786595852569132,
"learning_rate": 1.7311517246665435e-05,
"loss": 0.7842,
"step": 803
},
{
"epoch": 0.56,
"grad_norm": 0.4744405796126131,
"learning_rate": 1.7266987889269625e-05,
"loss": 0.7922,
"step": 804
},
{
"epoch": 0.56,
"grad_norm": 0.5088588855037889,
"learning_rate": 1.7222472333389254e-05,
"loss": 0.7759,
"step": 805
},
{
"epoch": 0.56,
"grad_norm": 0.49310461297831926,
"learning_rate": 1.7177970803824714e-05,
"loss": 0.7928,
"step": 806
},
{
"epoch": 0.56,
"grad_norm": 0.4970796961743678,
"learning_rate": 1.7133483525305536e-05,
"loss": 0.8173,
"step": 807
},
{
"epoch": 0.56,
"grad_norm": 0.4980137451486139,
"learning_rate": 1.7089010722489284e-05,
"loss": 0.8146,
"step": 808
},
{
"epoch": 0.56,
"grad_norm": 0.500205620409387,
"learning_rate": 1.7044552619960434e-05,
"loss": 0.7696,
"step": 809
},
{
"epoch": 0.56,
"grad_norm": 0.49353136564639616,
"learning_rate": 1.7000109442229208e-05,
"loss": 0.7841,
"step": 810
},
{
"epoch": 0.56,
"grad_norm": 0.4903699842854447,
"learning_rate": 1.695568141373049e-05,
"loss": 0.8141,
"step": 811
},
{
"epoch": 0.56,
"grad_norm": 0.4964091310140744,
"learning_rate": 1.691126875882263e-05,
"loss": 0.8028,
"step": 812
},
{
"epoch": 0.56,
"grad_norm": 0.5037536521027631,
"learning_rate": 1.686687170178636e-05,
"loss": 0.7806,
"step": 813
},
{
"epoch": 0.56,
"grad_norm": 0.5063083789553599,
"learning_rate": 1.6822490466823635e-05,
"loss": 0.7396,
"step": 814
},
{
"epoch": 0.56,
"grad_norm": 0.5052319830115505,
"learning_rate": 1.677812527805651e-05,
"loss": 0.8174,
"step": 815
},
{
"epoch": 0.57,
"grad_norm": 0.5022623545230387,
"learning_rate": 1.6733776359526024e-05,
"loss": 0.7939,
"step": 816
},
{
"epoch": 0.57,
"grad_norm": 0.4729238187986902,
"learning_rate": 1.668944393519103e-05,
"loss": 0.7347,
"step": 817
},
{
"epoch": 0.57,
"grad_norm": 0.5179451006448108,
"learning_rate": 1.6645128228927104e-05,
"loss": 0.7488,
"step": 818
},
{
"epoch": 0.57,
"grad_norm": 0.5474415839461859,
"learning_rate": 1.6600829464525375e-05,
"loss": 0.7929,
"step": 819
},
{
"epoch": 0.57,
"grad_norm": 0.48746900983440705,
"learning_rate": 1.655654786569144e-05,
"loss": 0.7776,
"step": 820
},
{
"epoch": 0.57,
"grad_norm": 0.48551174182160284,
"learning_rate": 1.6512283656044207e-05,
"loss": 0.8178,
"step": 821
},
{
"epoch": 0.57,
"grad_norm": 0.5266683036426705,
"learning_rate": 1.6468037059114758e-05,
"loss": 0.8272,
"step": 822
},
{
"epoch": 0.57,
"grad_norm": 0.5306870097314071,
"learning_rate": 1.6423808298345243e-05,
"loss": 0.7578,
"step": 823
},
{
"epoch": 0.57,
"grad_norm": 0.5323585200874378,
"learning_rate": 1.637959759708774e-05,
"loss": 0.7568,
"step": 824
},
{
"epoch": 0.57,
"grad_norm": 0.4992715763020537,
"learning_rate": 1.6335405178603104e-05,
"loss": 0.8113,
"step": 825
},
{
"epoch": 0.57,
"grad_norm": 0.48440579193821554,
"learning_rate": 1.6291231266059912e-05,
"loss": 0.8065,
"step": 826
},
{
"epoch": 0.57,
"grad_norm": 0.5116159667668598,
"learning_rate": 1.6247076082533244e-05,
"loss": 0.8223,
"step": 827
},
{
"epoch": 0.57,
"grad_norm": 0.4863155372830099,
"learning_rate": 1.6202939851003618e-05,
"loss": 0.7762,
"step": 828
},
{
"epoch": 0.57,
"grad_norm": 0.5524771528864563,
"learning_rate": 1.6158822794355845e-05,
"loss": 0.7394,
"step": 829
},
{
"epoch": 0.58,
"grad_norm": 0.48381309748478446,
"learning_rate": 1.6114725135377883e-05,
"loss": 0.7585,
"step": 830
},
{
"epoch": 0.58,
"grad_norm": 0.5020065704701435,
"learning_rate": 1.6070647096759782e-05,
"loss": 0.8317,
"step": 831
},
{
"epoch": 0.58,
"grad_norm": 0.4972697147590374,
"learning_rate": 1.6026588901092464e-05,
"loss": 0.7726,
"step": 832
},
{
"epoch": 0.58,
"grad_norm": 0.48619781653403904,
"learning_rate": 1.5982550770866665e-05,
"loss": 0.8035,
"step": 833
},
{
"epoch": 0.58,
"grad_norm": 0.5167527614205077,
"learning_rate": 1.593853292847179e-05,
"loss": 0.7419,
"step": 834
},
{
"epoch": 0.58,
"grad_norm": 0.494468492049781,
"learning_rate": 1.5894535596194783e-05,
"loss": 0.7549,
"step": 835
},
{
"epoch": 0.58,
"grad_norm": 0.5032873351901974,
"learning_rate": 1.585055899621904e-05,
"loss": 0.7698,
"step": 836
},
{
"epoch": 0.58,
"grad_norm": 0.47796027977198097,
"learning_rate": 1.580660335062322e-05,
"loss": 0.7985,
"step": 837
},
{
"epoch": 0.58,
"grad_norm": 0.4888767757243754,
"learning_rate": 1.57626688813802e-05,
"loss": 0.8162,
"step": 838
},
{
"epoch": 0.58,
"grad_norm": 0.47225601511066523,
"learning_rate": 1.5718755810355895e-05,
"loss": 0.7886,
"step": 839
},
{
"epoch": 0.58,
"grad_norm": 0.5159507607586354,
"learning_rate": 1.5674864359308174e-05,
"loss": 0.796,
"step": 840
},
{
"epoch": 0.58,
"grad_norm": 0.5007745781221807,
"learning_rate": 1.5630994749885726e-05,
"loss": 0.7475,
"step": 841
},
{
"epoch": 0.58,
"grad_norm": 0.4661734150185831,
"learning_rate": 1.5587147203626934e-05,
"loss": 0.8035,
"step": 842
},
{
"epoch": 0.58,
"grad_norm": 0.4641040655965332,
"learning_rate": 1.5543321941958762e-05,
"loss": 0.7579,
"step": 843
},
{
"epoch": 0.59,
"grad_norm": 0.49468669919875013,
"learning_rate": 1.549951918619564e-05,
"loss": 0.7401,
"step": 844
},
{
"epoch": 0.59,
"grad_norm": 0.48608318908334924,
"learning_rate": 1.5455739157538362e-05,
"loss": 0.7376,
"step": 845
},
{
"epoch": 0.59,
"grad_norm": 0.4866497498727417,
"learning_rate": 1.5411982077072925e-05,
"loss": 0.798,
"step": 846
},
{
"epoch": 0.59,
"grad_norm": 0.4653871403356585,
"learning_rate": 1.5368248165769465e-05,
"loss": 0.7819,
"step": 847
},
{
"epoch": 0.59,
"grad_norm": 0.48174940893949514,
"learning_rate": 1.532453764448109e-05,
"loss": 0.7822,
"step": 848
},
{
"epoch": 0.59,
"grad_norm": 0.48009917885009057,
"learning_rate": 1.52808507339428e-05,
"loss": 0.7687,
"step": 849
},
{
"epoch": 0.59,
"grad_norm": 0.5732077943049784,
"learning_rate": 1.5237187654770376e-05,
"loss": 0.7477,
"step": 850
},
{
"epoch": 0.59,
"grad_norm": 0.4772661221206767,
"learning_rate": 1.5193548627459238e-05,
"loss": 0.7677,
"step": 851
},
{
"epoch": 0.59,
"grad_norm": 0.4818376991024784,
"learning_rate": 1.5149933872383351e-05,
"loss": 0.8024,
"step": 852
},
{
"epoch": 0.59,
"grad_norm": 0.47977811889665084,
"learning_rate": 1.5106343609794098e-05,
"loss": 0.7911,
"step": 853
},
{
"epoch": 0.59,
"grad_norm": 0.5105499288964156,
"learning_rate": 1.5062778059819184e-05,
"loss": 0.7392,
"step": 854
},
{
"epoch": 0.59,
"grad_norm": 0.5044061370829088,
"learning_rate": 1.501923744246152e-05,
"loss": 0.7635,
"step": 855
},
{
"epoch": 0.59,
"grad_norm": 0.47648146157245835,
"learning_rate": 1.497572197759811e-05,
"loss": 0.7678,
"step": 856
},
{
"epoch": 0.59,
"grad_norm": 0.46293961763129765,
"learning_rate": 1.493223188497893e-05,
"loss": 0.808,
"step": 857
},
{
"epoch": 0.59,
"grad_norm": 0.49624534176747825,
"learning_rate": 1.488876738422584e-05,
"loss": 0.7665,
"step": 858
},
{
"epoch": 0.6,
"grad_norm": 0.5015883837367227,
"learning_rate": 1.4845328694831435e-05,
"loss": 0.7644,
"step": 859
},
{
"epoch": 0.6,
"grad_norm": 0.4812989929894248,
"learning_rate": 1.4801916036158017e-05,
"loss": 0.8021,
"step": 860
},
{
"epoch": 0.6,
"grad_norm": 0.5070168151568155,
"learning_rate": 1.4758529627436385e-05,
"loss": 0.7489,
"step": 861
},
{
"epoch": 0.6,
"grad_norm": 0.5050957617395391,
"learning_rate": 1.4715169687764796e-05,
"loss": 0.8054,
"step": 862
},
{
"epoch": 0.6,
"grad_norm": 0.5056847647443705,
"learning_rate": 1.4671836436107851e-05,
"loss": 0.7994,
"step": 863
},
{
"epoch": 0.6,
"grad_norm": 0.46289183060831274,
"learning_rate": 1.4628530091295348e-05,
"loss": 0.8141,
"step": 864
},
{
"epoch": 0.6,
"grad_norm": 0.4707960629031264,
"learning_rate": 1.458525087202125e-05,
"loss": 0.759,
"step": 865
},
{
"epoch": 0.6,
"grad_norm": 0.45940124342707994,
"learning_rate": 1.4541998996842503e-05,
"loss": 0.7806,
"step": 866
},
{
"epoch": 0.6,
"grad_norm": 0.4705220666916006,
"learning_rate": 1.4498774684177983e-05,
"loss": 0.7672,
"step": 867
},
{
"epoch": 0.6,
"grad_norm": 0.49050120476564424,
"learning_rate": 1.4455578152307377e-05,
"loss": 0.7761,
"step": 868
},
{
"epoch": 0.6,
"grad_norm": 0.5168463574458926,
"learning_rate": 1.4412409619370058e-05,
"loss": 0.7465,
"step": 869
},
{
"epoch": 0.6,
"grad_norm": 0.47400498720092105,
"learning_rate": 1.4369269303364056e-05,
"loss": 0.7768,
"step": 870
},
{
"epoch": 0.6,
"grad_norm": 0.4781395500565627,
"learning_rate": 1.4326157422144855e-05,
"loss": 0.8195,
"step": 871
},
{
"epoch": 0.6,
"grad_norm": 0.5058769340824008,
"learning_rate": 1.4283074193424379e-05,
"loss": 0.7907,
"step": 872
},
{
"epoch": 0.61,
"grad_norm": 0.5201492501797536,
"learning_rate": 1.4240019834769843e-05,
"loss": 0.7948,
"step": 873
},
{
"epoch": 0.61,
"grad_norm": 0.48359599494239164,
"learning_rate": 1.4196994563602674e-05,
"loss": 0.7567,
"step": 874
},
{
"epoch": 0.61,
"grad_norm": 0.48509106322537393,
"learning_rate": 1.4153998597197417e-05,
"loss": 0.8009,
"step": 875
},
{
"epoch": 0.61,
"grad_norm": 0.47477351642440896,
"learning_rate": 1.4111032152680621e-05,
"loss": 0.8167,
"step": 876
},
{
"epoch": 0.61,
"grad_norm": 0.4755850911633707,
"learning_rate": 1.4068095447029752e-05,
"loss": 0.7788,
"step": 877
},
{
"epoch": 0.61,
"grad_norm": 0.4851577533681598,
"learning_rate": 1.4025188697072098e-05,
"loss": 0.7585,
"step": 878
},
{
"epoch": 0.61,
"grad_norm": 0.4886058375280974,
"learning_rate": 1.3982312119483671e-05,
"loss": 0.7853,
"step": 879
},
{
"epoch": 0.61,
"grad_norm": 0.47510378969565265,
"learning_rate": 1.3939465930788128e-05,
"loss": 0.7796,
"step": 880
},
{
"epoch": 0.61,
"grad_norm": 0.4589655323624497,
"learning_rate": 1.3896650347355652e-05,
"loss": 0.761,
"step": 881
},
{
"epoch": 0.61,
"grad_norm": 0.4894458944192119,
"learning_rate": 1.3853865585401862e-05,
"loss": 0.8132,
"step": 882
},
{
"epoch": 0.61,
"grad_norm": 0.4767974050535542,
"learning_rate": 1.381111186098675e-05,
"loss": 0.8093,
"step": 883
},
{
"epoch": 0.61,
"grad_norm": 0.4880817405500377,
"learning_rate": 1.3768389390013558e-05,
"loss": 0.8005,
"step": 884
},
{
"epoch": 0.61,
"grad_norm": 0.49312237993918806,
"learning_rate": 1.3725698388227713e-05,
"loss": 0.786,
"step": 885
},
{
"epoch": 0.61,
"grad_norm": 0.5316214401133954,
"learning_rate": 1.3683039071215717e-05,
"loss": 0.8176,
"step": 886
},
{
"epoch": 0.61,
"grad_norm": 0.4965547142767931,
"learning_rate": 1.3640411654404058e-05,
"loss": 0.7861,
"step": 887
},
{
"epoch": 0.62,
"grad_norm": 0.5084016326683347,
"learning_rate": 1.3597816353058141e-05,
"loss": 0.7252,
"step": 888
},
{
"epoch": 0.62,
"grad_norm": 0.5231898927815883,
"learning_rate": 1.3555253382281185e-05,
"loss": 0.7612,
"step": 889
},
{
"epoch": 0.62,
"grad_norm": 0.47808759891123936,
"learning_rate": 1.3512722957013157e-05,
"loss": 0.7926,
"step": 890
},
{
"epoch": 0.62,
"grad_norm": 0.5319988221305536,
"learning_rate": 1.347022529202965e-05,
"loss": 0.7641,
"step": 891
},
{
"epoch": 0.62,
"grad_norm": 0.48673493653336203,
"learning_rate": 1.342776060194083e-05,
"loss": 0.731,
"step": 892
},
{
"epoch": 0.62,
"grad_norm": 0.4731126916195463,
"learning_rate": 1.3385329101190338e-05,
"loss": 0.7729,
"step": 893
},
{
"epoch": 0.62,
"grad_norm": 0.4823134154316384,
"learning_rate": 1.3342931004054212e-05,
"loss": 0.7477,
"step": 894
},
{
"epoch": 0.62,
"grad_norm": 0.5058280070533671,
"learning_rate": 1.3300566524639817e-05,
"loss": 0.7828,
"step": 895
},
{
"epoch": 0.62,
"grad_norm": 0.468233497744191,
"learning_rate": 1.3258235876884735e-05,
"loss": 0.7329,
"step": 896
},
{
"epoch": 0.62,
"grad_norm": 0.4924064440315043,
"learning_rate": 1.3215939274555711e-05,
"loss": 0.8312,
"step": 897
},
{
"epoch": 0.62,
"grad_norm": 0.4974643797277836,
"learning_rate": 1.3173676931247546e-05,
"loss": 0.7764,
"step": 898
},
{
"epoch": 0.62,
"grad_norm": 0.5021274054337511,
"learning_rate": 1.3131449060382053e-05,
"loss": 0.7559,
"step": 899
},
{
"epoch": 0.62,
"grad_norm": 0.48190114004891227,
"learning_rate": 1.308925587520696e-05,
"loss": 0.8314,
"step": 900
},
{
"epoch": 0.62,
"grad_norm": 0.4852738774578702,
"learning_rate": 1.3047097588794825e-05,
"loss": 0.7469,
"step": 901
},
{
"epoch": 0.63,
"grad_norm": 0.49321550955532084,
"learning_rate": 1.3004974414041987e-05,
"loss": 0.7483,
"step": 902
},
{
"epoch": 0.63,
"grad_norm": 0.4833771201169626,
"learning_rate": 1.2962886563667439e-05,
"loss": 0.7457,
"step": 903
},
{
"epoch": 0.63,
"grad_norm": 0.4760950035912892,
"learning_rate": 1.2920834250211838e-05,
"loss": 0.7206,
"step": 904
},
{
"epoch": 0.63,
"grad_norm": 0.48990098475786004,
"learning_rate": 1.287881768603634e-05,
"loss": 0.7795,
"step": 905
},
{
"epoch": 0.63,
"grad_norm": 0.4937113877123733,
"learning_rate": 1.283683708332159e-05,
"loss": 0.7283,
"step": 906
},
{
"epoch": 0.63,
"grad_norm": 0.48526745474425287,
"learning_rate": 1.2794892654066626e-05,
"loss": 0.7828,
"step": 907
},
{
"epoch": 0.63,
"grad_norm": 0.48573084446575754,
"learning_rate": 1.27529846100878e-05,
"loss": 0.7541,
"step": 908
},
{
"epoch": 0.63,
"grad_norm": 0.4929165811403132,
"learning_rate": 1.2711113163017757e-05,
"loss": 0.7499,
"step": 909
},
{
"epoch": 0.63,
"grad_norm": 0.49252727775940786,
"learning_rate": 1.266927852430429e-05,
"loss": 0.7869,
"step": 910
},
{
"epoch": 0.63,
"grad_norm": 0.4801495706828983,
"learning_rate": 1.2627480905209328e-05,
"loss": 0.8004,
"step": 911
},
{
"epoch": 0.63,
"grad_norm": 0.4912971713290677,
"learning_rate": 1.258572051680786e-05,
"loss": 0.7776,
"step": 912
},
{
"epoch": 0.63,
"grad_norm": 0.48114117135576295,
"learning_rate": 1.254399756998685e-05,
"loss": 0.8102,
"step": 913
},
{
"epoch": 0.63,
"grad_norm": 0.5279590939303451,
"learning_rate": 1.2502312275444205e-05,
"loss": 0.7746,
"step": 914
},
{
"epoch": 0.63,
"grad_norm": 0.48058510243816727,
"learning_rate": 1.246066484368767e-05,
"loss": 0.7697,
"step": 915
},
{
"epoch": 0.64,
"grad_norm": 0.4891882831808458,
"learning_rate": 1.2419055485033788e-05,
"loss": 0.7751,
"step": 916
},
{
"epoch": 0.64,
"grad_norm": 0.49510275761576966,
"learning_rate": 1.2377484409606848e-05,
"loss": 0.7611,
"step": 917
},
{
"epoch": 0.64,
"grad_norm": 0.484390687764419,
"learning_rate": 1.2335951827337796e-05,
"loss": 0.7551,
"step": 918
},
{
"epoch": 0.64,
"grad_norm": 0.4758268626613479,
"learning_rate": 1.2294457947963206e-05,
"loss": 0.7864,
"step": 919
},
{
"epoch": 0.64,
"grad_norm": 0.49387906808691107,
"learning_rate": 1.22530029810242e-05,
"loss": 0.7991,
"step": 920
},
{
"epoch": 0.64,
"grad_norm": 0.5568703555813375,
"learning_rate": 1.2211587135865384e-05,
"loss": 0.8054,
"step": 921
},
{
"epoch": 0.64,
"grad_norm": 0.5124567262642039,
"learning_rate": 1.217021062163381e-05,
"loss": 0.7655,
"step": 922
},
{
"epoch": 0.64,
"grad_norm": 0.5089029532006102,
"learning_rate": 1.2128873647277919e-05,
"loss": 0.762,
"step": 923
},
{
"epoch": 0.64,
"grad_norm": 0.4668696806483487,
"learning_rate": 1.2087576421546467e-05,
"loss": 0.7813,
"step": 924
},
{
"epoch": 0.64,
"grad_norm": 0.4689991390772118,
"learning_rate": 1.2046319152987493e-05,
"loss": 0.8165,
"step": 925
},
{
"epoch": 0.64,
"grad_norm": 0.5028133263314055,
"learning_rate": 1.200510204994724e-05,
"loss": 0.7806,
"step": 926
},
{
"epoch": 0.64,
"grad_norm": 0.5102721635771061,
"learning_rate": 1.1963925320569132e-05,
"loss": 0.7779,
"step": 927
},
{
"epoch": 0.64,
"grad_norm": 0.4844941377450217,
"learning_rate": 1.1922789172792695e-05,
"loss": 0.7849,
"step": 928
},
{
"epoch": 0.64,
"grad_norm": 0.47835626940344983,
"learning_rate": 1.1881693814352543e-05,
"loss": 0.7248,
"step": 929
},
{
"epoch": 0.64,
"grad_norm": 0.4967186420977549,
"learning_rate": 1.1840639452777288e-05,
"loss": 0.7104,
"step": 930
},
{
"epoch": 0.65,
"grad_norm": 0.47869836965662654,
"learning_rate": 1.179962629538852e-05,
"loss": 0.768,
"step": 931
},
{
"epoch": 0.65,
"grad_norm": 0.49664205233469666,
"learning_rate": 1.1758654549299735e-05,
"loss": 0.8081,
"step": 932
},
{
"epoch": 0.65,
"grad_norm": 0.48099225205462176,
"learning_rate": 1.1717724421415322e-05,
"loss": 0.7487,
"step": 933
},
{
"epoch": 0.65,
"grad_norm": 0.4729521775142028,
"learning_rate": 1.1676836118429502e-05,
"loss": 0.7306,
"step": 934
},
{
"epoch": 0.65,
"grad_norm": 0.4649798940750168,
"learning_rate": 1.1635989846825275e-05,
"loss": 0.7717,
"step": 935
},
{
"epoch": 0.65,
"grad_norm": 0.4700670527402769,
"learning_rate": 1.1595185812873382e-05,
"loss": 0.7559,
"step": 936
},
{
"epoch": 0.65,
"grad_norm": 0.4954868929076864,
"learning_rate": 1.155442422263128e-05,
"loss": 0.7548,
"step": 937
},
{
"epoch": 0.65,
"grad_norm": 0.47420553168858387,
"learning_rate": 1.1513705281942072e-05,
"loss": 0.7817,
"step": 938
},
{
"epoch": 0.65,
"grad_norm": 0.4818021248601088,
"learning_rate": 1.1473029196433496e-05,
"loss": 0.7662,
"step": 939
},
{
"epoch": 0.65,
"grad_norm": 0.4863889468359599,
"learning_rate": 1.1432396171516882e-05,
"loss": 0.8149,
"step": 940
},
{
"epoch": 0.65,
"grad_norm": 0.47797327677318235,
"learning_rate": 1.1391806412386086e-05,
"loss": 0.7679,
"step": 941
},
{
"epoch": 0.65,
"grad_norm": 0.4849047069778823,
"learning_rate": 1.1351260124016479e-05,
"loss": 0.78,
"step": 942
},
{
"epoch": 0.65,
"grad_norm": 0.4960165370813466,
"learning_rate": 1.1310757511163919e-05,
"loss": 0.7812,
"step": 943
},
{
"epoch": 0.65,
"grad_norm": 0.4789511229743143,
"learning_rate": 1.127029877836371e-05,
"loss": 0.7681,
"step": 944
},
{
"epoch": 0.66,
"grad_norm": 0.4705245179516804,
"learning_rate": 1.1229884129929549e-05,
"loss": 0.792,
"step": 945
},
{
"epoch": 0.66,
"grad_norm": 0.48864519472999407,
"learning_rate": 1.118951376995251e-05,
"loss": 0.7464,
"step": 946
},
{
"epoch": 0.66,
"grad_norm": 0.48859827820124624,
"learning_rate": 1.1149187902300032e-05,
"loss": 0.8027,
"step": 947
},
{
"epoch": 0.66,
"grad_norm": 0.4888823947308124,
"learning_rate": 1.1108906730614841e-05,
"loss": 0.7869,
"step": 948
},
{
"epoch": 0.66,
"grad_norm": 0.49872738148191,
"learning_rate": 1.1068670458313984e-05,
"loss": 0.7784,
"step": 949
},
{
"epoch": 0.66,
"grad_norm": 0.4859674466345397,
"learning_rate": 1.102847928858776e-05,
"loss": 0.7668,
"step": 950
},
{
"epoch": 0.66,
"grad_norm": 0.49042437410834233,
"learning_rate": 1.0988333424398687e-05,
"loss": 0.8085,
"step": 951
},
{
"epoch": 0.66,
"grad_norm": 0.47905184830976416,
"learning_rate": 1.0948233068480501e-05,
"loss": 0.7791,
"step": 952
},
{
"epoch": 0.66,
"grad_norm": 0.47152624271179366,
"learning_rate": 1.0908178423337135e-05,
"loss": 0.762,
"step": 953
},
{
"epoch": 0.66,
"grad_norm": 0.47303514061947616,
"learning_rate": 1.0868169691241683e-05,
"loss": 0.7758,
"step": 954
},
{
"epoch": 0.66,
"grad_norm": 0.4859350143456943,
"learning_rate": 1.0828207074235367e-05,
"loss": 0.7913,
"step": 955
},
{
"epoch": 0.66,
"grad_norm": 0.4846233084079503,
"learning_rate": 1.0788290774126549e-05,
"loss": 0.8016,
"step": 956
},
{
"epoch": 0.66,
"grad_norm": 0.48358355933838904,
"learning_rate": 1.0748420992489687e-05,
"loss": 0.7907,
"step": 957
},
{
"epoch": 0.66,
"grad_norm": 0.4759715221475911,
"learning_rate": 1.0708597930664313e-05,
"loss": 0.741,
"step": 958
},
{
"epoch": 0.66,
"grad_norm": 0.4802209611270416,
"learning_rate": 1.0668821789754041e-05,
"loss": 0.7766,
"step": 959
},
{
"epoch": 0.67,
"grad_norm": 0.49702180806713997,
"learning_rate": 1.0629092770625547e-05,
"loss": 0.803,
"step": 960
},
{
"epoch": 0.67,
"grad_norm": 0.48562794091638134,
"learning_rate": 1.058941107390752e-05,
"loss": 0.7591,
"step": 961
},
{
"epoch": 0.67,
"grad_norm": 0.47694622065405146,
"learning_rate": 1.0549776899989686e-05,
"loss": 0.7923,
"step": 962
},
{
"epoch": 0.67,
"grad_norm": 0.47932521321019556,
"learning_rate": 1.0510190449021787e-05,
"loss": 0.7495,
"step": 963
},
{
"epoch": 0.67,
"grad_norm": 0.5017444243492222,
"learning_rate": 1.0470651920912576e-05,
"loss": 0.8229,
"step": 964
},
{
"epoch": 0.67,
"grad_norm": 0.4732247422203251,
"learning_rate": 1.043116151532877e-05,
"loss": 0.7832,
"step": 965
},
{
"epoch": 0.67,
"grad_norm": 0.5397120440027713,
"learning_rate": 1.039171943169411e-05,
"loss": 0.7516,
"step": 966
},
{
"epoch": 0.67,
"grad_norm": 0.4835945036046448,
"learning_rate": 1.0352325869188276e-05,
"loss": 0.7617,
"step": 967
},
{
"epoch": 0.67,
"grad_norm": 0.4606668610404498,
"learning_rate": 1.0312981026745952e-05,
"loss": 0.7121,
"step": 968
},
{
"epoch": 0.67,
"grad_norm": 0.47622727593455694,
"learning_rate": 1.027368510305576e-05,
"loss": 0.7871,
"step": 969
},
{
"epoch": 0.67,
"grad_norm": 0.49002948664940926,
"learning_rate": 1.0234438296559317e-05,
"loss": 0.779,
"step": 970
},
{
"epoch": 0.67,
"grad_norm": 0.47917416945117547,
"learning_rate": 1.019524080545017e-05,
"loss": 0.8079,
"step": 971
},
{
"epoch": 0.67,
"grad_norm": 0.5089922392471253,
"learning_rate": 1.0156092827672848e-05,
"loss": 0.7422,
"step": 972
},
{
"epoch": 0.67,
"grad_norm": 0.5019076067747493,
"learning_rate": 1.0116994560921847e-05,
"loss": 0.7947,
"step": 973
},
{
"epoch": 0.68,
"grad_norm": 0.4852604975152797,
"learning_rate": 1.0077946202640603e-05,
"loss": 0.7827,
"step": 974
},
{
"epoch": 0.68,
"grad_norm": 0.48288079133015493,
"learning_rate": 1.003894795002052e-05,
"loss": 0.7244,
"step": 975
},
{
"epoch": 0.68,
"grad_norm": 0.48831445246949257,
"learning_rate": 1.0000000000000006e-05,
"loss": 0.7974,
"step": 976
},
{
"epoch": 0.68,
"grad_norm": 0.4817541508587628,
"learning_rate": 9.961102549263393e-06,
"loss": 0.784,
"step": 977
},
{
"epoch": 0.68,
"grad_norm": 0.5735509556303754,
"learning_rate": 9.922255794240035e-06,
"loss": 0.7831,
"step": 978
},
{
"epoch": 0.68,
"grad_norm": 0.4805608076773916,
"learning_rate": 9.883459931103267e-06,
"loss": 0.7404,
"step": 979
},
{
"epoch": 0.68,
"grad_norm": 0.4761209068791223,
"learning_rate": 9.844715155769418e-06,
"loss": 0.7758,
"step": 980
},
{
"epoch": 0.68,
"grad_norm": 0.4736587441970293,
"learning_rate": 9.806021663896816e-06,
"loss": 0.7652,
"step": 981
},
{
"epoch": 0.68,
"grad_norm": 0.5249857515520794,
"learning_rate": 9.767379650884836e-06,
"loss": 0.7558,
"step": 982
},
{
"epoch": 0.68,
"grad_norm": 0.498989930767968,
"learning_rate": 9.72878931187288e-06,
"loss": 0.8031,
"step": 983
},
{
"epoch": 0.68,
"grad_norm": 0.49276185097308844,
"learning_rate": 9.690250841739392e-06,
"loss": 0.7679,
"step": 984
},
{
"epoch": 0.68,
"grad_norm": 0.49380125173304984,
"learning_rate": 9.651764435100879e-06,
"loss": 0.7677,
"step": 985
},
{
"epoch": 0.68,
"grad_norm": 0.5008521146693184,
"learning_rate": 9.613330286310952e-06,
"loss": 0.8169,
"step": 986
},
{
"epoch": 0.68,
"grad_norm": 0.48635954924143693,
"learning_rate": 9.574948589459295e-06,
"loss": 0.7504,
"step": 987
},
{
"epoch": 0.68,
"grad_norm": 0.4904491905638087,
"learning_rate": 9.536619538370738e-06,
"loss": 0.7717,
"step": 988
},
{
"epoch": 0.69,
"grad_norm": 0.4806288406024631,
"learning_rate": 9.498343326604249e-06,
"loss": 0.743,
"step": 989
},
{
"epoch": 0.69,
"grad_norm": 0.4994511455828098,
"learning_rate": 9.460120147451952e-06,
"loss": 0.7825,
"step": 990
},
{
"epoch": 0.69,
"grad_norm": 0.47255503720719894,
"learning_rate": 9.421950193938157e-06,
"loss": 0.758,
"step": 991
},
{
"epoch": 0.69,
"grad_norm": 0.48920414622792796,
"learning_rate": 9.3838336588184e-06,
"loss": 0.7489,
"step": 992
},
{
"epoch": 0.69,
"grad_norm": 0.48592079865016213,
"learning_rate": 9.345770734578465e-06,
"loss": 0.7447,
"step": 993
},
{
"epoch": 0.69,
"grad_norm": 0.4796517836432118,
"learning_rate": 9.307761613433377e-06,
"loss": 0.7655,
"step": 994
},
{
"epoch": 0.69,
"grad_norm": 0.4878166679182864,
"learning_rate": 9.269806487326491e-06,
"loss": 0.7727,
"step": 995
},
{
"epoch": 0.69,
"grad_norm": 0.4705115730490068,
"learning_rate": 9.23190554792847e-06,
"loss": 0.771,
"step": 996
},
{
"epoch": 0.69,
"grad_norm": 0.4875638303944303,
"learning_rate": 9.194058986636336e-06,
"loss": 0.7795,
"step": 997
},
{
"epoch": 0.69,
"grad_norm": 0.48019206357183797,
"learning_rate": 9.156266994572518e-06,
"loss": 0.7305,
"step": 998
},
{
"epoch": 0.69,
"grad_norm": 0.49341123666392966,
"learning_rate": 9.118529762583881e-06,
"loss": 0.7212,
"step": 999
},
{
"epoch": 0.69,
"grad_norm": 0.508537649350886,
"learning_rate": 9.080847481240735e-06,
"loss": 0.7476,
"step": 1000
},
{
"epoch": 0.69,
"grad_norm": 0.4946734973821319,
"learning_rate": 9.043220340835895e-06,
"loss": 0.7851,
"step": 1001
},
{
"epoch": 0.69,
"grad_norm": 0.4843614227355129,
"learning_rate": 9.005648531383733e-06,
"loss": 0.743,
"step": 1002
},
{
"epoch": 0.7,
"grad_norm": 0.47258072461371337,
"learning_rate": 8.968132242619204e-06,
"loss": 0.7776,
"step": 1003
},
{
"epoch": 0.7,
"grad_norm": 0.5024272930505665,
"learning_rate": 8.930671663996864e-06,
"loss": 0.7384,
"step": 1004
},
{
"epoch": 0.7,
"grad_norm": 0.48391068522162595,
"learning_rate": 8.893266984689967e-06,
"loss": 0.7754,
"step": 1005
},
{
"epoch": 0.7,
"grad_norm": 0.49236617097888385,
"learning_rate": 8.855918393589462e-06,
"loss": 0.7544,
"step": 1006
},
{
"epoch": 0.7,
"grad_norm": 0.5133487076031475,
"learning_rate": 8.818626079303038e-06,
"loss": 0.7882,
"step": 1007
},
{
"epoch": 0.7,
"grad_norm": 0.4961125344942953,
"learning_rate": 8.781390230154247e-06,
"loss": 0.7937,
"step": 1008
},
{
"epoch": 0.7,
"grad_norm": 0.4945117467247688,
"learning_rate": 8.744211034181444e-06,
"loss": 0.7691,
"step": 1009
},
{
"epoch": 0.7,
"grad_norm": 0.4961446600956197,
"learning_rate": 8.707088679136898e-06,
"loss": 0.7395,
"step": 1010
},
{
"epoch": 0.7,
"grad_norm": 0.4754005207851951,
"learning_rate": 8.670023352485859e-06,
"loss": 0.7493,
"step": 1011
},
{
"epoch": 0.7,
"grad_norm": 0.4992030574943027,
"learning_rate": 8.633015241405558e-06,
"loss": 0.7433,
"step": 1012
},
{
"epoch": 0.7,
"grad_norm": 0.4846150905673964,
"learning_rate": 8.59606453278432e-06,
"loss": 0.7448,
"step": 1013
},
{
"epoch": 0.7,
"grad_norm": 0.5057678537750075,
"learning_rate": 8.559171413220565e-06,
"loss": 0.7412,
"step": 1014
},
{
"epoch": 0.7,
"grad_norm": 0.49246061712014344,
"learning_rate": 8.522336069021914e-06,
"loss": 0.751,
"step": 1015
},
{
"epoch": 0.7,
"grad_norm": 0.4719939909381366,
"learning_rate": 8.485558686204215e-06,
"loss": 0.7642,
"step": 1016
},
{
"epoch": 0.71,
"grad_norm": 0.47183113833725665,
"learning_rate": 8.448839450490605e-06,
"loss": 0.7751,
"step": 1017
},
{
"epoch": 0.71,
"grad_norm": 0.49660032068986937,
"learning_rate": 8.412178547310619e-06,
"loss": 0.7803,
"step": 1018
},
{
"epoch": 0.71,
"grad_norm": 0.49486946871488025,
"learning_rate": 8.37557616179918e-06,
"loss": 0.7241,
"step": 1019
},
{
"epoch": 0.71,
"grad_norm": 0.5518439444017471,
"learning_rate": 8.33903247879571e-06,
"loss": 0.7353,
"step": 1020
},
{
"epoch": 0.71,
"grad_norm": 0.4692857839466924,
"learning_rate": 8.302547682843199e-06,
"loss": 0.7498,
"step": 1021
},
{
"epoch": 0.71,
"grad_norm": 0.47373248320053507,
"learning_rate": 8.266121958187246e-06,
"loss": 0.7546,
"step": 1022
},
{
"epoch": 0.71,
"grad_norm": 0.5246320978803066,
"learning_rate": 8.229755488775162e-06,
"loss": 0.7795,
"step": 1023
},
{
"epoch": 0.71,
"grad_norm": 0.49616923874865776,
"learning_rate": 8.193448458255e-06,
"loss": 0.7549,
"step": 1024
},
{
"epoch": 0.71,
"grad_norm": 0.4467862135244976,
"learning_rate": 8.15720104997468e-06,
"loss": 0.7595,
"step": 1025
},
{
"epoch": 0.71,
"grad_norm": 0.47523702448943994,
"learning_rate": 8.121013446981004e-06,
"loss": 0.7392,
"step": 1026
},
{
"epoch": 0.71,
"grad_norm": 0.512670891013966,
"learning_rate": 8.08488583201878e-06,
"loss": 0.785,
"step": 1027
},
{
"epoch": 0.71,
"grad_norm": 0.4732277457535213,
"learning_rate": 8.048818387529888e-06,
"loss": 0.7204,
"step": 1028
},
{
"epoch": 0.71,
"grad_norm": 0.47773597696272846,
"learning_rate": 8.01281129565233e-06,
"loss": 0.7888,
"step": 1029
},
{
"epoch": 0.71,
"grad_norm": 0.46309912687943056,
"learning_rate": 7.976864738219334e-06,
"loss": 0.7619,
"step": 1030
},
{
"epoch": 0.71,
"grad_norm": 0.4927641984397257,
"learning_rate": 7.940978896758449e-06,
"loss": 0.7605,
"step": 1031
},
{
"epoch": 0.72,
"grad_norm": 0.4925801861465075,
"learning_rate": 7.905153952490614e-06,
"loss": 0.7701,
"step": 1032
},
{
"epoch": 0.72,
"grad_norm": 0.4893405623100433,
"learning_rate": 7.869390086329214e-06,
"loss": 0.7207,
"step": 1033
},
{
"epoch": 0.72,
"grad_norm": 0.46599381286827557,
"learning_rate": 7.833687478879228e-06,
"loss": 0.7727,
"step": 1034
},
{
"epoch": 0.72,
"grad_norm": 0.47919930553097273,
"learning_rate": 7.79804631043626e-06,
"loss": 0.7667,
"step": 1035
},
{
"epoch": 0.72,
"grad_norm": 0.47524382443322716,
"learning_rate": 7.762466760985651e-06,
"loss": 0.7575,
"step": 1036
},
{
"epoch": 0.72,
"grad_norm": 0.48892521405660583,
"learning_rate": 7.726949010201585e-06,
"loss": 0.7565,
"step": 1037
},
{
"epoch": 0.72,
"grad_norm": 0.4948642763191729,
"learning_rate": 7.691493237446168e-06,
"loss": 0.7471,
"step": 1038
},
{
"epoch": 0.72,
"grad_norm": 0.47710747813528664,
"learning_rate": 7.656099621768508e-06,
"loss": 0.7407,
"step": 1039
},
{
"epoch": 0.72,
"grad_norm": 0.4934962006786032,
"learning_rate": 7.620768341903817e-06,
"loss": 0.7554,
"step": 1040
},
{
"epoch": 0.72,
"grad_norm": 0.4944599245145105,
"learning_rate": 7.585499576272539e-06,
"loss": 0.7575,
"step": 1041
},
{
"epoch": 0.72,
"grad_norm": 0.4687014772978345,
"learning_rate": 7.550293502979424e-06,
"loss": 0.7785,
"step": 1042
},
{
"epoch": 0.72,
"grad_norm": 0.4839921549688308,
"learning_rate": 7.5151502998126035e-06,
"loss": 0.7473,
"step": 1043
},
{
"epoch": 0.72,
"grad_norm": 0.47599865421348475,
"learning_rate": 7.480070144242753e-06,
"loss": 0.7459,
"step": 1044
},
{
"epoch": 0.72,
"grad_norm": 0.4917312802216608,
"learning_rate": 7.445053213422138e-06,
"loss": 0.7506,
"step": 1045
},
{
"epoch": 0.73,
"grad_norm": 0.4842184512405059,
"learning_rate": 7.410099684183738e-06,
"loss": 0.758,
"step": 1046
},
{
"epoch": 0.73,
"grad_norm": 0.48428108865266567,
"learning_rate": 7.375209733040401e-06,
"loss": 0.7919,
"step": 1047
},
{
"epoch": 0.73,
"grad_norm": 0.4907661456392694,
"learning_rate": 7.340383536183866e-06,
"loss": 0.7847,
"step": 1048
},
{
"epoch": 0.73,
"grad_norm": 0.4990566900453814,
"learning_rate": 7.305621269483927e-06,
"loss": 0.753,
"step": 1049
},
{
"epoch": 0.73,
"grad_norm": 0.5020877598957763,
"learning_rate": 7.270923108487558e-06,
"loss": 0.6874,
"step": 1050
},
{
"epoch": 0.73,
"grad_norm": 0.49294627426632276,
"learning_rate": 7.236289228417972e-06,
"loss": 0.7417,
"step": 1051
},
{
"epoch": 0.73,
"grad_norm": 0.5028036349209548,
"learning_rate": 7.201719804173797e-06,
"loss": 0.7704,
"step": 1052
},
{
"epoch": 0.73,
"grad_norm": 0.48667449930108614,
"learning_rate": 7.167215010328134e-06,
"loss": 0.7743,
"step": 1053
},
{
"epoch": 0.73,
"grad_norm": 0.48914182038025444,
"learning_rate": 7.132775021127738e-06,
"loss": 0.772,
"step": 1054
},
{
"epoch": 0.73,
"grad_norm": 0.47300106980167894,
"learning_rate": 7.098400010492079e-06,
"loss": 0.7587,
"step": 1055
},
{
"epoch": 0.73,
"grad_norm": 0.49877104532046845,
"learning_rate": 7.064090152012488e-06,
"loss": 0.7691,
"step": 1056
},
{
"epoch": 0.73,
"grad_norm": 0.5022381051104589,
"learning_rate": 7.029845618951319e-06,
"loss": 0.7667,
"step": 1057
},
{
"epoch": 0.73,
"grad_norm": 0.5168669843457216,
"learning_rate": 6.995666584240998e-06,
"loss": 0.7435,
"step": 1058
},
{
"epoch": 0.73,
"grad_norm": 0.49429583272619027,
"learning_rate": 6.961553220483199e-06,
"loss": 0.7269,
"step": 1059
},
{
"epoch": 0.73,
"grad_norm": 0.4991684885624798,
"learning_rate": 6.927505699947974e-06,
"loss": 0.7342,
"step": 1060
},
{
"epoch": 0.74,
"grad_norm": 0.47363522091975074,
"learning_rate": 6.893524194572856e-06,
"loss": 0.8032,
"step": 1061
},
{
"epoch": 0.74,
"grad_norm": 0.48713197150772847,
"learning_rate": 6.8596088759620164e-06,
"loss": 0.7745,
"step": 1062
},
{
"epoch": 0.74,
"grad_norm": 0.4878588216902752,
"learning_rate": 6.825759915385393e-06,
"loss": 0.788,
"step": 1063
},
{
"epoch": 0.74,
"grad_norm": 0.49397209018699784,
"learning_rate": 6.791977483777808e-06,
"loss": 0.7583,
"step": 1064
},
{
"epoch": 0.74,
"grad_norm": 0.47944105365042805,
"learning_rate": 6.758261751738113e-06,
"loss": 0.7639,
"step": 1065
},
{
"epoch": 0.74,
"grad_norm": 0.4619065725544852,
"learning_rate": 6.72461288952835e-06,
"loss": 0.7404,
"step": 1066
},
{
"epoch": 0.74,
"grad_norm": 0.47314201273147294,
"learning_rate": 6.691031067072866e-06,
"loss": 0.7693,
"step": 1067
},
{
"epoch": 0.74,
"grad_norm": 0.5329976849916267,
"learning_rate": 6.657516453957458e-06,
"loss": 0.7174,
"step": 1068
},
{
"epoch": 0.74,
"grad_norm": 0.475282884295269,
"learning_rate": 6.624069219428511e-06,
"loss": 0.7998,
"step": 1069
},
{
"epoch": 0.74,
"grad_norm": 0.4847809680201937,
"learning_rate": 6.5906895323921805e-06,
"loss": 0.7705,
"step": 1070
},
{
"epoch": 0.74,
"grad_norm": 0.5246289889232041,
"learning_rate": 6.557377561413483e-06,
"loss": 0.7302,
"step": 1071
},
{
"epoch": 0.74,
"grad_norm": 0.506859525016317,
"learning_rate": 6.524133474715488e-06,
"loss": 0.7694,
"step": 1072
},
{
"epoch": 0.74,
"grad_norm": 0.49018613561075985,
"learning_rate": 6.490957440178467e-06,
"loss": 0.7698,
"step": 1073
},
{
"epoch": 0.74,
"grad_norm": 0.4811447007719257,
"learning_rate": 6.457849625339006e-06,
"loss": 0.747,
"step": 1074
},
{
"epoch": 0.75,
"grad_norm": 0.4913638349338644,
"learning_rate": 6.424810197389195e-06,
"loss": 0.752,
"step": 1075
},
{
"epoch": 0.75,
"grad_norm": 0.5072569104701655,
"learning_rate": 6.391839323175788e-06,
"loss": 0.7112,
"step": 1076
},
{
"epoch": 0.75,
"grad_norm": 0.4974735521117355,
"learning_rate": 6.35893716919934e-06,
"loss": 0.755,
"step": 1077
},
{
"epoch": 0.75,
"grad_norm": 0.47934502112391125,
"learning_rate": 6.326103901613363e-06,
"loss": 0.7603,
"step": 1078
},
{
"epoch": 0.75,
"grad_norm": 0.4705577092922598,
"learning_rate": 6.293339686223521e-06,
"loss": 0.7826,
"step": 1079
},
{
"epoch": 0.75,
"grad_norm": 0.5085037979856588,
"learning_rate": 6.260644688486746e-06,
"loss": 0.7853,
"step": 1080
},
{
"epoch": 0.75,
"grad_norm": 0.4650913736907933,
"learning_rate": 6.228019073510432e-06,
"loss": 0.7706,
"step": 1081
},
{
"epoch": 0.75,
"grad_norm": 0.48385539177717063,
"learning_rate": 6.1954630060516005e-06,
"loss": 0.7467,
"step": 1082
},
{
"epoch": 0.75,
"grad_norm": 0.4838820589616206,
"learning_rate": 6.1629766505160725e-06,
"loss": 0.7908,
"step": 1083
},
{
"epoch": 0.75,
"grad_norm": 0.5155647139304619,
"learning_rate": 6.130560170957609e-06,
"loss": 0.7868,
"step": 1084
},
{
"epoch": 0.75,
"grad_norm": 0.4659186399629742,
"learning_rate": 6.098213731077101e-06,
"loss": 0.7947,
"step": 1085
},
{
"epoch": 0.75,
"grad_norm": 0.505048902759112,
"learning_rate": 6.065937494221763e-06,
"loss": 0.7488,
"step": 1086
},
{
"epoch": 0.75,
"grad_norm": 0.5070049063706124,
"learning_rate": 6.0337316233842865e-06,
"loss": 0.8068,
"step": 1087
},
{
"epoch": 0.75,
"grad_norm": 0.47992401136648666,
"learning_rate": 6.001596281201998e-06,
"loss": 0.7718,
"step": 1088
},
{
"epoch": 0.75,
"grad_norm": 0.46857594724702556,
"learning_rate": 5.969531629956089e-06,
"loss": 0.7899,
"step": 1089
},
{
"epoch": 0.76,
"grad_norm": 0.471139234573952,
"learning_rate": 5.937537831570745e-06,
"loss": 0.7429,
"step": 1090
},
{
"epoch": 0.76,
"grad_norm": 0.5114811109980374,
"learning_rate": 5.905615047612352e-06,
"loss": 0.7751,
"step": 1091
},
{
"epoch": 0.76,
"grad_norm": 0.464080556904048,
"learning_rate": 5.873763439288689e-06,
"loss": 0.7488,
"step": 1092
},
{
"epoch": 0.76,
"grad_norm": 0.4584592551371595,
"learning_rate": 5.8419831674481e-06,
"loss": 0.7483,
"step": 1093
},
{
"epoch": 0.76,
"grad_norm": 0.49662619046110545,
"learning_rate": 5.810274392578672e-06,
"loss": 0.7767,
"step": 1094
},
{
"epoch": 0.76,
"grad_norm": 0.4976494415108561,
"learning_rate": 5.778637274807455e-06,
"loss": 0.7979,
"step": 1095
},
{
"epoch": 0.76,
"grad_norm": 0.47487664657294143,
"learning_rate": 5.747071973899634e-06,
"loss": 0.7546,
"step": 1096
},
{
"epoch": 0.76,
"grad_norm": 0.4970355012656808,
"learning_rate": 5.715578649257709e-06,
"loss": 0.7845,
"step": 1097
},
{
"epoch": 0.76,
"grad_norm": 0.4786107687902576,
"learning_rate": 5.684157459920712e-06,
"loss": 0.7269,
"step": 1098
},
{
"epoch": 0.76,
"grad_norm": 0.5176058558935316,
"learning_rate": 5.65280856456341e-06,
"loss": 0.716,
"step": 1099
},
{
"epoch": 0.76,
"grad_norm": 0.5015496603390578,
"learning_rate": 5.621532121495468e-06,
"loss": 0.7604,
"step": 1100
},
{
"epoch": 0.76,
"grad_norm": 0.482460121447496,
"learning_rate": 5.590328288660687e-06,
"loss": 0.7795,
"step": 1101
},
{
"epoch": 0.76,
"grad_norm": 0.506774507577087,
"learning_rate": 5.559197223636197e-06,
"loss": 0.767,
"step": 1102
},
{
"epoch": 0.76,
"grad_norm": 0.5116694374237193,
"learning_rate": 5.528139083631641e-06,
"loss": 0.7601,
"step": 1103
},
{
"epoch": 0.77,
"grad_norm": 0.4793111704467444,
"learning_rate": 5.497154025488396e-06,
"loss": 0.7372,
"step": 1104
},
{
"epoch": 0.77,
"grad_norm": 0.5031738737335565,
"learning_rate": 5.4662422056787935e-06,
"loss": 0.7543,
"step": 1105
},
{
"epoch": 0.77,
"grad_norm": 0.4941544670400219,
"learning_rate": 5.4354037803053124e-06,
"loss": 0.7909,
"step": 1106
},
{
"epoch": 0.77,
"grad_norm": 0.4882920358536235,
"learning_rate": 5.4046389050997905e-06,
"loss": 0.7493,
"step": 1107
},
{
"epoch": 0.77,
"grad_norm": 0.48385177103685056,
"learning_rate": 5.373947735422638e-06,
"loss": 0.763,
"step": 1108
},
{
"epoch": 0.77,
"grad_norm": 0.48743244433219834,
"learning_rate": 5.343330426262075e-06,
"loss": 0.7856,
"step": 1109
},
{
"epoch": 0.77,
"grad_norm": 0.4699135998701381,
"learning_rate": 5.312787132233306e-06,
"loss": 0.7705,
"step": 1110
},
{
"epoch": 0.77,
"grad_norm": 0.4809633875738757,
"learning_rate": 5.282318007577789e-06,
"loss": 0.7725,
"step": 1111
},
{
"epoch": 0.77,
"grad_norm": 0.5318475387916723,
"learning_rate": 5.2519232061624255e-06,
"loss": 0.7557,
"step": 1112
},
{
"epoch": 0.77,
"grad_norm": 0.4857938018189243,
"learning_rate": 5.221602881478782e-06,
"loss": 0.769,
"step": 1113
},
{
"epoch": 0.77,
"grad_norm": 0.46536504626111413,
"learning_rate": 5.191357186642316e-06,
"loss": 0.7692,
"step": 1114
},
{
"epoch": 0.77,
"grad_norm": 0.48547637155616297,
"learning_rate": 5.161186274391632e-06,
"loss": 0.7539,
"step": 1115
},
{
"epoch": 0.77,
"grad_norm": 0.4849072637691909,
"learning_rate": 5.131090297087682e-06,
"loss": 0.7453,
"step": 1116
},
{
"epoch": 0.77,
"grad_norm": 0.48077925797425247,
"learning_rate": 5.101069406712979e-06,
"loss": 0.7361,
"step": 1117
},
{
"epoch": 0.78,
"grad_norm": 0.4704858783944034,
"learning_rate": 5.071123754870888e-06,
"loss": 0.7639,
"step": 1118
},
{
"epoch": 0.78,
"grad_norm": 0.4813252065835414,
"learning_rate": 5.041253492784797e-06,
"loss": 0.7229,
"step": 1119
},
{
"epoch": 0.78,
"grad_norm": 0.4805703618175304,
"learning_rate": 5.011458771297384e-06,
"loss": 0.7696,
"step": 1120
},
{
"epoch": 0.78,
"grad_norm": 0.4687630666036753,
"learning_rate": 4.981739740869866e-06,
"loss": 0.7518,
"step": 1121
},
{
"epoch": 0.78,
"grad_norm": 0.48572644993430747,
"learning_rate": 4.952096551581227e-06,
"loss": 0.7754,
"step": 1122
},
{
"epoch": 0.78,
"grad_norm": 0.4914811047861307,
"learning_rate": 4.922529353127439e-06,
"loss": 0.7572,
"step": 1123
},
{
"epoch": 0.78,
"grad_norm": 0.4856212085453324,
"learning_rate": 4.893038294820736e-06,
"loss": 0.7989,
"step": 1124
},
{
"epoch": 0.78,
"grad_norm": 0.48173957981869625,
"learning_rate": 4.863623525588854e-06,
"loss": 0.746,
"step": 1125
},
{
"epoch": 0.78,
"grad_norm": 0.4719936450842549,
"learning_rate": 4.834285193974277e-06,
"loss": 0.78,
"step": 1126
},
{
"epoch": 0.78,
"grad_norm": 0.47334670921136673,
"learning_rate": 4.805023448133468e-06,
"loss": 0.7847,
"step": 1127
},
{
"epoch": 0.78,
"grad_norm": 0.4829370382771918,
"learning_rate": 4.775838435836158e-06,
"loss": 0.7831,
"step": 1128
},
{
"epoch": 0.78,
"grad_norm": 0.4812769599142954,
"learning_rate": 4.746730304464567e-06,
"loss": 0.7591,
"step": 1129
},
{
"epoch": 0.78,
"grad_norm": 0.4948157455251411,
"learning_rate": 4.717699201012658e-06,
"loss": 0.811,
"step": 1130
},
{
"epoch": 0.78,
"grad_norm": 0.474178676859596,
"learning_rate": 4.688745272085451e-06,
"loss": 0.7398,
"step": 1131
},
{
"epoch": 0.78,
"grad_norm": 0.49124160620120977,
"learning_rate": 4.659868663898202e-06,
"loss": 0.7242,
"step": 1132
},
{
"epoch": 0.79,
"grad_norm": 0.4769722044486364,
"learning_rate": 4.6310695222757065e-06,
"loss": 0.763,
"step": 1133
},
{
"epoch": 0.79,
"grad_norm": 0.47333094057365704,
"learning_rate": 4.602347992651581e-06,
"loss": 0.7595,
"step": 1134
},
{
"epoch": 0.79,
"grad_norm": 0.4715204087591866,
"learning_rate": 4.573704220067481e-06,
"loss": 0.7534,
"step": 1135
},
{
"epoch": 0.79,
"grad_norm": 0.49198080182529574,
"learning_rate": 4.545138349172418e-06,
"loss": 0.7863,
"step": 1136
},
{
"epoch": 0.79,
"grad_norm": 0.48215380852417716,
"learning_rate": 4.516650524221984e-06,
"loss": 0.7632,
"step": 1137
},
{
"epoch": 0.79,
"grad_norm": 0.46183129558266617,
"learning_rate": 4.488240889077666e-06,
"loss": 0.7401,
"step": 1138
},
{
"epoch": 0.79,
"grad_norm": 0.5070149963438687,
"learning_rate": 4.459909587206082e-06,
"loss": 0.7581,
"step": 1139
},
{
"epoch": 0.79,
"grad_norm": 0.4841782800627238,
"learning_rate": 4.431656761678265e-06,
"loss": 0.7561,
"step": 1140
},
{
"epoch": 0.79,
"grad_norm": 0.4899678673395973,
"learning_rate": 4.403482555168983e-06,
"loss": 0.7629,
"step": 1141
},
{
"epoch": 0.79,
"grad_norm": 0.4968102449241334,
"learning_rate": 4.375387109955953e-06,
"loss": 0.8076,
"step": 1142
},
{
"epoch": 0.79,
"grad_norm": 0.47160462687115107,
"learning_rate": 4.347370567919153e-06,
"loss": 0.752,
"step": 1143
},
{
"epoch": 0.79,
"grad_norm": 0.48205162224091935,
"learning_rate": 4.319433070540126e-06,
"loss": 0.7395,
"step": 1144
},
{
"epoch": 0.79,
"grad_norm": 0.4582782834288945,
"learning_rate": 4.291574758901224e-06,
"loss": 0.7381,
"step": 1145
},
{
"epoch": 0.79,
"grad_norm": 0.4837458444364102,
"learning_rate": 4.263795773684929e-06,
"loss": 0.7714,
"step": 1146
},
{
"epoch": 0.8,
"grad_norm": 0.45710548577120463,
"learning_rate": 4.236096255173134e-06,
"loss": 0.7875,
"step": 1147
},
{
"epoch": 0.8,
"grad_norm": 0.4982877694374727,
"learning_rate": 4.208476343246417e-06,
"loss": 0.7467,
"step": 1148
},
{
"epoch": 0.8,
"grad_norm": 0.4499674542726187,
"learning_rate": 4.1809361773833505e-06,
"loss": 0.7492,
"step": 1149
},
{
"epoch": 0.8,
"grad_norm": 0.4711163292007359,
"learning_rate": 4.153475896659806e-06,
"loss": 0.7853,
"step": 1150
},
{
"epoch": 0.8,
"grad_norm": 0.504703279761807,
"learning_rate": 4.12609563974824e-06,
"loss": 0.7417,
"step": 1151
},
{
"epoch": 0.8,
"grad_norm": 0.4894097810346055,
"learning_rate": 4.098795544916985e-06,
"loss": 0.7915,
"step": 1152
},
{
"epoch": 0.8,
"grad_norm": 0.476371568906177,
"learning_rate": 4.071575750029557e-06,
"loss": 0.7844,
"step": 1153
},
{
"epoch": 0.8,
"grad_norm": 0.47729517902398616,
"learning_rate": 4.0444363925439845e-06,
"loss": 0.7488,
"step": 1154
},
{
"epoch": 0.8,
"grad_norm": 0.5231496277929877,
"learning_rate": 4.0173776095120656e-06,
"loss": 0.7894,
"step": 1155
},
{
"epoch": 0.8,
"grad_norm": 0.48239234882327225,
"learning_rate": 3.9903995375787245e-06,
"loss": 0.7502,
"step": 1156
},
{
"epoch": 0.8,
"grad_norm": 0.4836905348429235,
"learning_rate": 3.963502312981298e-06,
"loss": 0.7766,
"step": 1157
},
{
"epoch": 0.8,
"grad_norm": 0.49624765918153074,
"learning_rate": 3.936686071548837e-06,
"loss": 0.7367,
"step": 1158
},
{
"epoch": 0.8,
"grad_norm": 0.4645203815429803,
"learning_rate": 3.9099509487014375e-06,
"loss": 0.7615,
"step": 1159
},
{
"epoch": 0.8,
"grad_norm": 0.48463192677637107,
"learning_rate": 3.883297079449559e-06,
"loss": 0.7727,
"step": 1160
},
{
"epoch": 0.8,
"grad_norm": 0.4749281790649865,
"learning_rate": 3.85672459839334e-06,
"loss": 0.7412,
"step": 1161
},
{
"epoch": 0.81,
"grad_norm": 0.4791840197906529,
"learning_rate": 3.830233639721894e-06,
"loss": 0.7535,
"step": 1162
},
{
"epoch": 0.81,
"grad_norm": 0.4925967311195896,
"learning_rate": 3.803824337212678e-06,
"loss": 0.7868,
"step": 1163
},
{
"epoch": 0.81,
"grad_norm": 0.4780286556854472,
"learning_rate": 3.7774968242307662e-06,
"loss": 0.7563,
"step": 1164
},
{
"epoch": 0.81,
"grad_norm": 0.4755609403541473,
"learning_rate": 3.7512512337282235e-06,
"loss": 0.7198,
"step": 1165
},
{
"epoch": 0.81,
"grad_norm": 0.4861625146213427,
"learning_rate": 3.7250876982433947e-06,
"loss": 0.7876,
"step": 1166
},
{
"epoch": 0.81,
"grad_norm": 0.507245552427644,
"learning_rate": 3.6990063499002716e-06,
"loss": 0.8041,
"step": 1167
},
{
"epoch": 0.81,
"grad_norm": 0.48400718244469165,
"learning_rate": 3.6730073204077863e-06,
"loss": 0.7646,
"step": 1168
},
{
"epoch": 0.81,
"grad_norm": 0.4846848248910939,
"learning_rate": 3.6470907410591695e-06,
"loss": 0.7777,
"step": 1169
},
{
"epoch": 0.81,
"grad_norm": 0.478301208419343,
"learning_rate": 3.6212567427313096e-06,
"loss": 0.7392,
"step": 1170
},
{
"epoch": 0.81,
"grad_norm": 0.47192867635984564,
"learning_rate": 3.5955054558840387e-06,
"loss": 0.7311,
"step": 1171
},
{
"epoch": 0.81,
"grad_norm": 0.4820756571318969,
"learning_rate": 3.569837010559505e-06,
"loss": 0.808,
"step": 1172
},
{
"epoch": 0.81,
"grad_norm": 0.5019257111043679,
"learning_rate": 3.5442515363815333e-06,
"loss": 0.7755,
"step": 1173
},
{
"epoch": 0.81,
"grad_norm": 0.49849811032662616,
"learning_rate": 3.5187491625549198e-06,
"loss": 0.7973,
"step": 1174
},
{
"epoch": 0.81,
"grad_norm": 0.4708783211613849,
"learning_rate": 3.4933300178648423e-06,
"loss": 0.7766,
"step": 1175
},
{
"epoch": 0.82,
"grad_norm": 0.5240281864464211,
"learning_rate": 3.4679942306761484e-06,
"loss": 0.774,
"step": 1176
},
{
"epoch": 0.82,
"grad_norm": 0.48924093829090437,
"learning_rate": 3.442741928932758e-06,
"loss": 0.7644,
"step": 1177
},
{
"epoch": 0.82,
"grad_norm": 0.4711864990822743,
"learning_rate": 3.417573240156984e-06,
"loss": 0.7579,
"step": 1178
},
{
"epoch": 0.82,
"grad_norm": 0.4907908287742941,
"learning_rate": 3.39248829144889e-06,
"loss": 0.7741,
"step": 1179
},
{
"epoch": 0.82,
"grad_norm": 0.4810963042874063,
"learning_rate": 3.367487209485694e-06,
"loss": 0.7741,
"step": 1180
},
{
"epoch": 0.82,
"grad_norm": 0.4831704749158564,
"learning_rate": 3.3425701205210557e-06,
"loss": 0.7643,
"step": 1181
},
{
"epoch": 0.82,
"grad_norm": 0.47066247046282106,
"learning_rate": 3.317737150384488e-06,
"loss": 0.7556,
"step": 1182
},
{
"epoch": 0.82,
"grad_norm": 0.4897595808001683,
"learning_rate": 3.2929884244807187e-06,
"loss": 0.789,
"step": 1183
},
{
"epoch": 0.82,
"grad_norm": 0.48617214406660014,
"learning_rate": 3.2683240677890373e-06,
"loss": 0.7918,
"step": 1184
},
{
"epoch": 0.82,
"grad_norm": 0.5019468634409586,
"learning_rate": 3.243744204862678e-06,
"loss": 0.7418,
"step": 1185
},
{
"epoch": 0.82,
"grad_norm": 0.4724528751456791,
"learning_rate": 3.219248959828196e-06,
"loss": 0.7729,
"step": 1186
},
{
"epoch": 0.82,
"grad_norm": 0.4678004180129943,
"learning_rate": 3.194838456384819e-06,
"loss": 0.7764,
"step": 1187
},
{
"epoch": 0.82,
"grad_norm": 0.5127056883178569,
"learning_rate": 3.170512817803837e-06,
"loss": 0.7228,
"step": 1188
},
{
"epoch": 0.82,
"grad_norm": 0.489865646516689,
"learning_rate": 3.146272166927986e-06,
"loss": 0.7304,
"step": 1189
},
{
"epoch": 0.82,
"grad_norm": 0.48446154013383796,
"learning_rate": 3.122116626170826e-06,
"loss": 0.7636,
"step": 1190
},
{
"epoch": 0.83,
"grad_norm": 0.4844894290482539,
"learning_rate": 3.098046317516099e-06,
"loss": 0.7941,
"step": 1191
},
{
"epoch": 0.83,
"grad_norm": 0.4811797305046475,
"learning_rate": 3.074061362517138e-06,
"loss": 0.7428,
"step": 1192
},
{
"epoch": 0.83,
"grad_norm": 0.4721697803092834,
"learning_rate": 3.0501618822962566e-06,
"loss": 0.8002,
"step": 1193
},
{
"epoch": 0.83,
"grad_norm": 0.5118556430405979,
"learning_rate": 3.026347997544108e-06,
"loss": 0.7428,
"step": 1194
},
{
"epoch": 0.83,
"grad_norm": 0.4834100822099178,
"learning_rate": 3.0026198285191132e-06,
"loss": 0.7908,
"step": 1195
},
{
"epoch": 0.83,
"grad_norm": 0.4822894959379539,
"learning_rate": 2.9789774950468265e-06,
"loss": 0.7984,
"step": 1196
},
{
"epoch": 0.83,
"grad_norm": 0.4573225907672513,
"learning_rate": 2.955421116519337e-06,
"loss": 0.742,
"step": 1197
},
{
"epoch": 0.83,
"grad_norm": 0.47207390310293706,
"learning_rate": 2.931950811894666e-06,
"loss": 0.7479,
"step": 1198
},
{
"epoch": 0.83,
"grad_norm": 0.4852570509272973,
"learning_rate": 2.908566699696174e-06,
"loss": 0.7717,
"step": 1199
},
{
"epoch": 0.83,
"grad_norm": 0.48927839174364257,
"learning_rate": 2.885268898011957e-06,
"loss": 0.7929,
"step": 1200
},
{
"epoch": 0.83,
"grad_norm": 0.48858241464752034,
"learning_rate": 2.862057524494237e-06,
"loss": 0.7367,
"step": 1201
},
{
"epoch": 0.83,
"grad_norm": 0.5004372260423828,
"learning_rate": 2.838932696358798e-06,
"loss": 0.7402,
"step": 1202
},
{
"epoch": 0.83,
"grad_norm": 0.4849797404001239,
"learning_rate": 2.8158945303843597e-06,
"loss": 0.727,
"step": 1203
},
{
"epoch": 0.83,
"grad_norm": 0.49732019194240507,
"learning_rate": 2.792943142912008e-06,
"loss": 0.7304,
"step": 1204
},
{
"epoch": 0.84,
"grad_norm": 0.4646131309906768,
"learning_rate": 2.77007864984461e-06,
"loss": 0.7496,
"step": 1205
},
{
"epoch": 0.84,
"grad_norm": 0.4740456116745344,
"learning_rate": 2.747301166646221e-06,
"loss": 0.7258,
"step": 1206
},
{
"epoch": 0.84,
"grad_norm": 0.4447676433392948,
"learning_rate": 2.724610808341499e-06,
"loss": 0.7582,
"step": 1207
},
{
"epoch": 0.84,
"grad_norm": 0.4806130266403288,
"learning_rate": 2.7020076895151226e-06,
"loss": 0.7253,
"step": 1208
},
{
"epoch": 0.84,
"grad_norm": 0.47087647352218315,
"learning_rate": 2.679491924311226e-06,
"loss": 0.6908,
"step": 1209
},
{
"epoch": 0.84,
"grad_norm": 0.4663345434717518,
"learning_rate": 2.6570636264328185e-06,
"loss": 0.8114,
"step": 1210
},
{
"epoch": 0.84,
"grad_norm": 0.5006866552171956,
"learning_rate": 2.6347229091411876e-06,
"loss": 0.7837,
"step": 1211
},
{
"epoch": 0.84,
"grad_norm": 0.4668788920070028,
"learning_rate": 2.6124698852553664e-06,
"loss": 0.7061,
"step": 1212
},
{
"epoch": 0.84,
"grad_norm": 0.48941088999265875,
"learning_rate": 2.590304667151524e-06,
"loss": 0.7822,
"step": 1213
},
{
"epoch": 0.84,
"grad_norm": 0.46085579935014787,
"learning_rate": 2.5682273667624235e-06,
"loss": 0.7324,
"step": 1214
},
{
"epoch": 0.84,
"grad_norm": 0.47992832336223706,
"learning_rate": 2.546238095576856e-06,
"loss": 0.7334,
"step": 1215
},
{
"epoch": 0.84,
"grad_norm": 0.48223650538783,
"learning_rate": 2.524336964639067e-06,
"loss": 0.7516,
"step": 1216
},
{
"epoch": 0.84,
"grad_norm": 0.5375401955259207,
"learning_rate": 2.5025240845481945e-06,
"loss": 0.7624,
"step": 1217
},
{
"epoch": 0.84,
"grad_norm": 0.5154044048413867,
"learning_rate": 2.4807995654577278e-06,
"loss": 0.7083,
"step": 1218
},
{
"epoch": 0.85,
"grad_norm": 0.4601148699393038,
"learning_rate": 2.459163517074923e-06,
"loss": 0.7553,
"step": 1219
},
{
"epoch": 0.85,
"grad_norm": 0.4861091104958433,
"learning_rate": 2.4376160486602875e-06,
"loss": 0.7574,
"step": 1220
},
{
"epoch": 0.85,
"grad_norm": 0.45475958980692743,
"learning_rate": 2.41615726902698e-06,
"loss": 0.7368,
"step": 1221
},
{
"epoch": 0.85,
"grad_norm": 0.47438728137949354,
"learning_rate": 2.3947872865403144e-06,
"loss": 0.7587,
"step": 1222
},
{
"epoch": 0.85,
"grad_norm": 0.497159813046733,
"learning_rate": 2.37350620911716e-06,
"loss": 0.7144,
"step": 1223
},
{
"epoch": 0.85,
"grad_norm": 0.47231115312244326,
"learning_rate": 2.3523141442254382e-06,
"loss": 0.7765,
"step": 1224
},
{
"epoch": 0.85,
"grad_norm": 0.45613150054797136,
"learning_rate": 2.331211198883567e-06,
"loss": 0.6735,
"step": 1225
},
{
"epoch": 0.85,
"grad_norm": 0.45977375838305684,
"learning_rate": 2.3101974796599015e-06,
"loss": 0.8008,
"step": 1226
},
{
"epoch": 0.85,
"grad_norm": 0.4852464702286184,
"learning_rate": 2.289273092672215e-06,
"loss": 0.743,
"step": 1227
},
{
"epoch": 0.85,
"grad_norm": 0.4757634723542441,
"learning_rate": 2.2684381435871706e-06,
"loss": 0.7152,
"step": 1228
},
{
"epoch": 0.85,
"grad_norm": 0.4961912205809631,
"learning_rate": 2.247692737619769e-06,
"loss": 0.7449,
"step": 1229
},
{
"epoch": 0.85,
"grad_norm": 0.4906318377358333,
"learning_rate": 2.227036979532824e-06,
"loss": 0.7819,
"step": 1230
},
{
"epoch": 0.85,
"grad_norm": 0.4751462924801871,
"learning_rate": 2.2064709736364297e-06,
"loss": 0.7231,
"step": 1231
},
{
"epoch": 0.85,
"grad_norm": 0.4939725893647483,
"learning_rate": 2.1859948237874517e-06,
"loss": 0.7702,
"step": 1232
},
{
"epoch": 0.85,
"grad_norm": 0.4862507767124879,
"learning_rate": 2.1656086333889714e-06,
"loss": 0.7602,
"step": 1233
},
{
"epoch": 0.86,
"grad_norm": 0.469770488615222,
"learning_rate": 2.145312505389796e-06,
"loss": 0.8132,
"step": 1234
},
{
"epoch": 0.86,
"grad_norm": 0.4989376806754674,
"learning_rate": 2.1251065422839212e-06,
"loss": 0.7294,
"step": 1235
},
{
"epoch": 0.86,
"grad_norm": 0.4712804629076971,
"learning_rate": 2.1049908461100086e-06,
"loss": 0.7804,
"step": 1236
},
{
"epoch": 0.86,
"grad_norm": 0.49252204725086196,
"learning_rate": 2.0849655184508833e-06,
"loss": 0.783,
"step": 1237
},
{
"epoch": 0.86,
"grad_norm": 0.5134557300357184,
"learning_rate": 2.0650306604330163e-06,
"loss": 0.7253,
"step": 1238
},
{
"epoch": 0.86,
"grad_norm": 0.4444651508279809,
"learning_rate": 2.045186372726018e-06,
"loss": 0.7513,
"step": 1239
},
{
"epoch": 0.86,
"grad_norm": 0.4616378018549794,
"learning_rate": 2.025432755542114e-06,
"loss": 0.7577,
"step": 1240
},
{
"epoch": 0.86,
"grad_norm": 0.4912390441801168,
"learning_rate": 2.005769908635662e-06,
"loss": 0.7486,
"step": 1241
},
{
"epoch": 0.86,
"grad_norm": 0.4701391448569379,
"learning_rate": 1.986197931302629e-06,
"loss": 0.7749,
"step": 1242
},
{
"epoch": 0.86,
"grad_norm": 0.5058929551905256,
"learning_rate": 1.966716922380094e-06,
"loss": 0.766,
"step": 1243
},
{
"epoch": 0.86,
"grad_norm": 0.5036107941029966,
"learning_rate": 1.947326980245763e-06,
"loss": 0.7652,
"step": 1244
},
{
"epoch": 0.86,
"grad_norm": 0.5748899561706519,
"learning_rate": 1.9280282028174533e-06,
"loss": 0.7463,
"step": 1245
},
{
"epoch": 0.86,
"grad_norm": 0.46920940462488214,
"learning_rate": 1.9088206875526128e-06,
"loss": 0.7641,
"step": 1246
},
{
"epoch": 0.86,
"grad_norm": 0.502979080527669,
"learning_rate": 1.889704531447809e-06,
"loss": 0.767,
"step": 1247
},
{
"epoch": 0.87,
"grad_norm": 0.4771376170612732,
"learning_rate": 1.8706798310382668e-06,
"loss": 0.7818,
"step": 1248
},
{
"epoch": 0.87,
"grad_norm": 0.4923361943245115,
"learning_rate": 1.8517466823973662e-06,
"loss": 0.6997,
"step": 1249
},
{
"epoch": 0.87,
"grad_norm": 0.4940537428587214,
"learning_rate": 1.832905181136142e-06,
"loss": 0.7479,
"step": 1250
},
{
"epoch": 0.87,
"grad_norm": 0.4830197145644431,
"learning_rate": 1.8141554224028347e-06,
"loss": 0.7387,
"step": 1251
},
{
"epoch": 0.87,
"grad_norm": 0.4866940341685407,
"learning_rate": 1.7954975008823795e-06,
"loss": 0.7397,
"step": 1252
},
{
"epoch": 0.87,
"grad_norm": 0.4515251694669247,
"learning_rate": 1.7769315107959385e-06,
"loss": 0.709,
"step": 1253
},
{
"epoch": 0.87,
"grad_norm": 0.4874300748045056,
"learning_rate": 1.7584575459004426e-06,
"loss": 0.7226,
"step": 1254
},
{
"epoch": 0.87,
"grad_norm": 0.48538740276371206,
"learning_rate": 1.7400756994880885e-06,
"loss": 0.813,
"step": 1255
},
{
"epoch": 0.87,
"grad_norm": 0.4871227071221069,
"learning_rate": 1.7217860643858797e-06,
"loss": 0.7862,
"step": 1256
},
{
"epoch": 0.87,
"grad_norm": 0.4573188355467668,
"learning_rate": 1.7035887329551703e-06,
"loss": 0.7424,
"step": 1257
},
{
"epoch": 0.87,
"grad_norm": 0.47231818095826156,
"learning_rate": 1.6854837970911719e-06,
"loss": 0.708,
"step": 1258
},
{
"epoch": 0.87,
"grad_norm": 0.49590889240008,
"learning_rate": 1.6674713482225246e-06,
"loss": 0.7488,
"step": 1259
},
{
"epoch": 0.87,
"grad_norm": 0.4515946670996238,
"learning_rate": 1.6495514773107979e-06,
"loss": 0.7222,
"step": 1260
},
{
"epoch": 0.87,
"grad_norm": 0.474255246423421,
"learning_rate": 1.6317242748500617e-06,
"loss": 0.7733,
"step": 1261
},
{
"epoch": 0.87,
"grad_norm": 0.4692249451620519,
"learning_rate": 1.6139898308664093e-06,
"loss": 0.7418,
"step": 1262
},
{
"epoch": 0.88,
"grad_norm": 0.454254035187692,
"learning_rate": 1.596348234917504e-06,
"loss": 0.7433,
"step": 1263
},
{
"epoch": 0.88,
"grad_norm": 0.45969353993141326,
"learning_rate": 1.5787995760921603e-06,
"loss": 0.7421,
"step": 1264
},
{
"epoch": 0.88,
"grad_norm": 0.48585551705925695,
"learning_rate": 1.5613439430098388e-06,
"loss": 0.7387,
"step": 1265
},
{
"epoch": 0.88,
"grad_norm": 0.4954690032986467,
"learning_rate": 1.5439814238202356e-06,
"loss": 0.7567,
"step": 1266
},
{
"epoch": 0.88,
"grad_norm": 0.4643005272442231,
"learning_rate": 1.52671210620283e-06,
"loss": 0.7432,
"step": 1267
},
{
"epoch": 0.88,
"grad_norm": 0.4900529641780409,
"learning_rate": 1.5095360773664402e-06,
"loss": 0.7342,
"step": 1268
},
{
"epoch": 0.88,
"grad_norm": 0.4935332114441657,
"learning_rate": 1.492453424048781e-06,
"loss": 0.7348,
"step": 1269
},
{
"epoch": 0.88,
"grad_norm": 0.45263880560852243,
"learning_rate": 1.4754642325160306e-06,
"loss": 0.7489,
"step": 1270
},
{
"epoch": 0.88,
"grad_norm": 0.4887656949191309,
"learning_rate": 1.4585685885623901e-06,
"loss": 0.7673,
"step": 1271
},
{
"epoch": 0.88,
"grad_norm": 0.4745425906286061,
"learning_rate": 1.4417665775096467e-06,
"loss": 0.7395,
"step": 1272
},
{
"epoch": 0.88,
"grad_norm": 0.47395782452971347,
"learning_rate": 1.425058284206755e-06,
"loss": 0.7122,
"step": 1273
},
{
"epoch": 0.88,
"grad_norm": 0.47098240808537273,
"learning_rate": 1.4084437930294059e-06,
"loss": 0.7586,
"step": 1274
},
{
"epoch": 0.88,
"grad_norm": 0.4832606774953095,
"learning_rate": 1.3919231878795824e-06,
"loss": 0.7627,
"step": 1275
},
{
"epoch": 0.88,
"grad_norm": 0.47363560462738996,
"learning_rate": 1.3754965521851582e-06,
"loss": 0.7741,
"step": 1276
},
{
"epoch": 0.89,
"grad_norm": 0.4912614779650221,
"learning_rate": 1.359163968899473e-06,
"loss": 0.7321,
"step": 1277
},
{
"epoch": 0.89,
"grad_norm": 0.48268614647540403,
"learning_rate": 1.3429255205008952e-06,
"loss": 0.7183,
"step": 1278
},
{
"epoch": 0.89,
"grad_norm": 0.49532646109182604,
"learning_rate": 1.3267812889924292e-06,
"loss": 0.7435,
"step": 1279
},
{
"epoch": 0.89,
"grad_norm": 0.48096780906232267,
"learning_rate": 1.3107313559012936e-06,
"loss": 0.7371,
"step": 1280
},
{
"epoch": 0.89,
"grad_norm": 0.5091750746912477,
"learning_rate": 1.2947758022784961e-06,
"loss": 0.7643,
"step": 1281
},
{
"epoch": 0.89,
"grad_norm": 0.4775376877822782,
"learning_rate": 1.2789147086984377e-06,
"loss": 0.7444,
"step": 1282
},
{
"epoch": 0.89,
"grad_norm": 0.46806947892972683,
"learning_rate": 1.2631481552585067e-06,
"loss": 0.8119,
"step": 1283
},
{
"epoch": 0.89,
"grad_norm": 0.4878717747753156,
"learning_rate": 1.247476221578674e-06,
"loss": 0.7469,
"step": 1284
},
{
"epoch": 0.89,
"grad_norm": 0.48469979818308273,
"learning_rate": 1.2318989868010767e-06,
"loss": 0.7441,
"step": 1285
},
{
"epoch": 0.89,
"grad_norm": 0.46427630749376386,
"learning_rate": 1.2164165295896392e-06,
"loss": 0.7718,
"step": 1286
},
{
"epoch": 0.89,
"grad_norm": 0.5004773994413342,
"learning_rate": 1.2010289281296572e-06,
"loss": 0.7065,
"step": 1287
},
{
"epoch": 0.89,
"grad_norm": 0.5069287281870396,
"learning_rate": 1.1857362601274191e-06,
"loss": 0.7324,
"step": 1288
},
{
"epoch": 0.89,
"grad_norm": 0.46605056886060936,
"learning_rate": 1.1705386028098009e-06,
"loss": 0.7567,
"step": 1289
},
{
"epoch": 0.89,
"grad_norm": 0.43754276679506393,
"learning_rate": 1.155436032923889e-06,
"loss": 0.7603,
"step": 1290
},
{
"epoch": 0.89,
"grad_norm": 0.4705358322616403,
"learning_rate": 1.140428626736576e-06,
"loss": 0.7314,
"step": 1291
},
{
"epoch": 0.9,
"grad_norm": 0.4786991661319982,
"learning_rate": 1.1255164600341816e-06,
"loss": 0.795,
"step": 1292
},
{
"epoch": 0.9,
"grad_norm": 0.5103469420403395,
"learning_rate": 1.1106996081220944e-06,
"loss": 0.7423,
"step": 1293
},
{
"epoch": 0.9,
"grad_norm": 0.4730582266227602,
"learning_rate": 1.095978145824348e-06,
"loss": 0.76,
"step": 1294
},
{
"epoch": 0.9,
"grad_norm": 0.4675534950989374,
"learning_rate": 1.08135214748327e-06,
"loss": 0.736,
"step": 1295
},
{
"epoch": 0.9,
"grad_norm": 0.4930411670078587,
"learning_rate": 1.0668216869591098e-06,
"loss": 0.7473,
"step": 1296
},
{
"epoch": 0.9,
"grad_norm": 0.48500722611847774,
"learning_rate": 1.0523868376296486e-06,
"loss": 0.7738,
"step": 1297
},
{
"epoch": 0.9,
"grad_norm": 0.4704258102270377,
"learning_rate": 1.0380476723898458e-06,
"loss": 0.7784,
"step": 1298
},
{
"epoch": 0.9,
"grad_norm": 0.4708327443401958,
"learning_rate": 1.0238042636514534e-06,
"loss": 0.7214,
"step": 1299
},
{
"epoch": 0.9,
"grad_norm": 0.4951058037537131,
"learning_rate": 1.0096566833426724e-06,
"loss": 0.6946,
"step": 1300
},
{
"epoch": 0.9,
"grad_norm": 0.46056704440139096,
"learning_rate": 9.956050029077646e-07,
"loss": 0.7513,
"step": 1301
},
{
"epoch": 0.9,
"grad_norm": 0.4764204851650003,
"learning_rate": 9.816492933067124e-07,
"loss": 0.7722,
"step": 1302
},
{
"epoch": 0.9,
"grad_norm": 0.4903746540833927,
"learning_rate": 9.677896250148521e-07,
"loss": 0.7593,
"step": 1303
},
{
"epoch": 0.9,
"grad_norm": 0.4691605718305241,
"learning_rate": 9.540260680225133e-07,
"loss": 0.7528,
"step": 1304
},
{
"epoch": 0.9,
"grad_norm": 0.49336749676361696,
"learning_rate": 9.403586918346708e-07,
"loss": 0.7493,
"step": 1305
},
{
"epoch": 0.91,
"grad_norm": 0.4807833585161635,
"learning_rate": 9.267875654706015e-07,
"loss": 0.7328,
"step": 1306
},
{
"epoch": 0.91,
"grad_norm": 0.48972408804140277,
"learning_rate": 9.133127574635181e-07,
"loss": 0.7721,
"step": 1307
},
{
"epoch": 0.91,
"grad_norm": 0.48378497446817137,
"learning_rate": 8.999343358602352e-07,
"loss": 0.7999,
"step": 1308
},
{
"epoch": 0.91,
"grad_norm": 0.49493921001862706,
"learning_rate": 8.866523682208283e-07,
"loss": 0.7367,
"step": 1309
},
{
"epoch": 0.91,
"grad_norm": 0.48509717810895514,
"learning_rate": 8.734669216182779e-07,
"loss": 0.7372,
"step": 1310
},
{
"epoch": 0.91,
"grad_norm": 0.4927749122664698,
"learning_rate": 8.603780626381475e-07,
"loss": 0.7297,
"step": 1311
},
{
"epoch": 0.91,
"grad_norm": 0.4855998344979973,
"learning_rate": 8.473858573782379e-07,
"loss": 0.7222,
"step": 1312
},
{
"epoch": 0.91,
"grad_norm": 0.5217610090004413,
"learning_rate": 8.344903714482555e-07,
"loss": 0.7619,
"step": 1313
},
{
"epoch": 0.91,
"grad_norm": 0.5053642521469498,
"learning_rate": 8.216916699694821e-07,
"loss": 0.765,
"step": 1314
},
{
"epoch": 0.91,
"grad_norm": 0.5057510276333106,
"learning_rate": 8.089898175744371e-07,
"loss": 0.7744,
"step": 1315
},
{
"epoch": 0.91,
"grad_norm": 0.5038908088798605,
"learning_rate": 7.963848784065753e-07,
"loss": 0.7571,
"step": 1316
},
{
"epoch": 0.91,
"grad_norm": 0.48679760944128175,
"learning_rate": 7.838769161199277e-07,
"loss": 0.6917,
"step": 1317
},
{
"epoch": 0.91,
"grad_norm": 0.4822871591157676,
"learning_rate": 7.714659938788127e-07,
"loss": 0.7514,
"step": 1318
},
{
"epoch": 0.91,
"grad_norm": 0.45982801290302383,
"learning_rate": 7.591521743575003e-07,
"loss": 0.7289,
"step": 1319
},
{
"epoch": 0.92,
"grad_norm": 0.4736369942884383,
"learning_rate": 7.469355197398953e-07,
"loss": 0.7451,
"step": 1320
},
{
"epoch": 0.92,
"grad_norm": 0.4851273240811053,
"learning_rate": 7.348160917192281e-07,
"loss": 0.7749,
"step": 1321
},
{
"epoch": 0.92,
"grad_norm": 0.4804030339834929,
"learning_rate": 7.227939514977422e-07,
"loss": 0.7535,
"step": 1322
},
{
"epoch": 0.92,
"grad_norm": 0.4681890481834141,
"learning_rate": 7.108691597863871e-07,
"loss": 0.7337,
"step": 1323
},
{
"epoch": 0.92,
"grad_norm": 0.49607602170211645,
"learning_rate": 6.990417768045055e-07,
"loss": 0.7644,
"step": 1324
},
{
"epoch": 0.92,
"grad_norm": 0.4788946640042614,
"learning_rate": 6.87311862279536e-07,
"loss": 0.755,
"step": 1325
},
{
"epoch": 0.92,
"grad_norm": 0.4754436260583643,
"learning_rate": 6.756794754467045e-07,
"loss": 0.7436,
"step": 1326
},
{
"epoch": 0.92,
"grad_norm": 0.4788720664697889,
"learning_rate": 6.641446750487323e-07,
"loss": 0.7568,
"step": 1327
},
{
"epoch": 0.92,
"grad_norm": 0.48018755007419983,
"learning_rate": 6.527075193355337e-07,
"loss": 0.749,
"step": 1328
},
{
"epoch": 0.92,
"grad_norm": 0.5109552410349083,
"learning_rate": 6.413680660639321e-07,
"loss": 0.7587,
"step": 1329
},
{
"epoch": 0.92,
"grad_norm": 0.4687918001326902,
"learning_rate": 6.301263724973505e-07,
"loss": 0.7817,
"step": 1330
},
{
"epoch": 0.92,
"grad_norm": 0.4863906027567447,
"learning_rate": 6.189824954055335e-07,
"loss": 0.702,
"step": 1331
},
{
"epoch": 0.92,
"grad_norm": 0.4663536385500733,
"learning_rate": 6.079364910642649e-07,
"loss": 0.7388,
"step": 1332
},
{
"epoch": 0.92,
"grad_norm": 0.49581299728033285,
"learning_rate": 5.969884152550798e-07,
"loss": 0.7424,
"step": 1333
},
{
"epoch": 0.92,
"grad_norm": 0.4997736987440403,
"learning_rate": 5.861383232649708e-07,
"loss": 0.7572,
"step": 1334
},
{
"epoch": 0.93,
"grad_norm": 0.4720874078473283,
"learning_rate": 5.753862698861312e-07,
"loss": 0.7362,
"step": 1335
},
{
"epoch": 0.93,
"grad_norm": 0.48969901119639553,
"learning_rate": 5.647323094156565e-07,
"loss": 0.7849,
"step": 1336
},
{
"epoch": 0.93,
"grad_norm": 0.4695926648363245,
"learning_rate": 5.541764956552831e-07,
"loss": 0.7418,
"step": 1337
},
{
"epoch": 0.93,
"grad_norm": 0.5120616276508253,
"learning_rate": 5.43718881911115e-07,
"loss": 0.7694,
"step": 1338
},
{
"epoch": 0.93,
"grad_norm": 0.5120253828670005,
"learning_rate": 5.333595209933595e-07,
"loss": 0.7408,
"step": 1339
},
{
"epoch": 0.93,
"grad_norm": 0.4804428812006281,
"learning_rate": 5.230984652160387e-07,
"loss": 0.7497,
"step": 1340
},
{
"epoch": 0.93,
"grad_norm": 0.4794614616278687,
"learning_rate": 5.12935766396756e-07,
"loss": 0.7258,
"step": 1341
},
{
"epoch": 0.93,
"grad_norm": 0.48509979043776824,
"learning_rate": 5.028714758564057e-07,
"loss": 0.7294,
"step": 1342
},
{
"epoch": 0.93,
"grad_norm": 0.4703371962570861,
"learning_rate": 4.92905644418944e-07,
"loss": 0.7428,
"step": 1343
},
{
"epoch": 0.93,
"grad_norm": 0.47548089700226776,
"learning_rate": 4.830383224110958e-07,
"loss": 0.7494,
"step": 1344
},
{
"epoch": 0.93,
"grad_norm": 0.45390692722068204,
"learning_rate": 4.732695596621373e-07,
"loss": 0.7451,
"step": 1345
},
{
"epoch": 0.93,
"grad_norm": 0.48383933520475125,
"learning_rate": 4.635994055036208e-07,
"loss": 0.7439,
"step": 1346
},
{
"epoch": 0.93,
"grad_norm": 0.46251608452770443,
"learning_rate": 4.5402790876912575e-07,
"loss": 0.7614,
"step": 1347
},
{
"epoch": 0.93,
"grad_norm": 0.4565800958722612,
"learning_rate": 4.4455511779403216e-07,
"loss": 0.7324,
"step": 1348
},
{
"epoch": 0.94,
"grad_norm": 0.49080024860361204,
"learning_rate": 4.3518108041525675e-07,
"loss": 0.7404,
"step": 1349
},
{
"epoch": 0.94,
"grad_norm": 0.4871682122411577,
"learning_rate": 4.2590584397101066e-07,
"loss": 0.7534,
"step": 1350
},
{
"epoch": 0.94,
"grad_norm": 0.4700874581604195,
"learning_rate": 4.167294553005774e-07,
"loss": 0.7347,
"step": 1351
},
{
"epoch": 0.94,
"grad_norm": 0.4844231563317733,
"learning_rate": 4.0765196074406433e-07,
"loss": 0.7557,
"step": 1352
},
{
"epoch": 0.94,
"grad_norm": 0.4867452072319235,
"learning_rate": 3.986734061421671e-07,
"loss": 0.7753,
"step": 1353
},
{
"epoch": 0.94,
"grad_norm": 0.4619728741534759,
"learning_rate": 3.897938368359411e-07,
"loss": 0.758,
"step": 1354
},
{
"epoch": 0.94,
"grad_norm": 0.48196400661661043,
"learning_rate": 3.8101329766657924e-07,
"loss": 0.7475,
"step": 1355
},
{
"epoch": 0.94,
"grad_norm": 0.48223678797781916,
"learning_rate": 3.723318329751746e-07,
"loss": 0.7628,
"step": 1356
},
{
"epoch": 0.94,
"grad_norm": 0.49482417215262636,
"learning_rate": 3.637494866025004e-07,
"loss": 0.7413,
"step": 1357
},
{
"epoch": 0.94,
"grad_norm": 0.45523043687708575,
"learning_rate": 3.5526630188879475e-07,
"loss": 0.7622,
"step": 1358
},
{
"epoch": 0.94,
"grad_norm": 0.4859155772100884,
"learning_rate": 3.4688232167353174e-07,
"loss": 0.72,
"step": 1359
},
{
"epoch": 0.94,
"grad_norm": 0.49632275899440886,
"learning_rate": 3.385975882952064e-07,
"loss": 0.7372,
"step": 1360
},
{
"epoch": 0.94,
"grad_norm": 0.46803321175091367,
"learning_rate": 3.304121435911345e-07,
"loss": 0.7602,
"step": 1361
},
{
"epoch": 0.94,
"grad_norm": 0.49113653274018587,
"learning_rate": 3.223260288972263e-07,
"loss": 0.7437,
"step": 1362
},
{
"epoch": 0.94,
"grad_norm": 0.48112373777808054,
"learning_rate": 3.143392850477778e-07,
"loss": 0.7492,
"step": 1363
},
{
"epoch": 0.95,
"grad_norm": 0.46194001289344877,
"learning_rate": 3.064519523752751e-07,
"loss": 0.7467,
"step": 1364
},
{
"epoch": 0.95,
"grad_norm": 0.4894654595731178,
"learning_rate": 2.986640707101862e-07,
"loss": 0.7518,
"step": 1365
},
{
"epoch": 0.95,
"grad_norm": 0.46856025877247653,
"learning_rate": 2.9097567938074943e-07,
"loss": 0.7361,
"step": 1366
},
{
"epoch": 0.95,
"grad_norm": 0.47858576837541933,
"learning_rate": 2.8338681721279627e-07,
"loss": 0.747,
"step": 1367
},
{
"epoch": 0.95,
"grad_norm": 0.4783811590975944,
"learning_rate": 2.758975225295357e-07,
"loss": 0.7285,
"step": 1368
},
{
"epoch": 0.95,
"grad_norm": 0.483166438650482,
"learning_rate": 2.685078331513702e-07,
"loss": 0.7207,
"step": 1369
},
{
"epoch": 0.95,
"grad_norm": 0.45623838904478914,
"learning_rate": 2.612177863956977e-07,
"loss": 0.7659,
"step": 1370
},
{
"epoch": 0.95,
"grad_norm": 0.501406903771167,
"learning_rate": 2.5402741907673665e-07,
"loss": 0.7457,
"step": 1371
},
{
"epoch": 0.95,
"grad_norm": 0.46268897689488814,
"learning_rate": 2.4693676750532804e-07,
"loss": 0.7561,
"step": 1372
},
{
"epoch": 0.95,
"grad_norm": 0.4659733522785684,
"learning_rate": 2.3994586748875116e-07,
"loss": 0.7375,
"step": 1373
},
{
"epoch": 0.95,
"grad_norm": 0.501592760504288,
"learning_rate": 2.330547543305528e-07,
"loss": 0.7462,
"step": 1374
},
{
"epoch": 0.95,
"grad_norm": 0.48735838614514193,
"learning_rate": 2.2626346283036061e-07,
"loss": 0.7375,
"step": 1375
},
{
"epoch": 0.95,
"grad_norm": 0.45575346512027154,
"learning_rate": 2.1957202728370542e-07,
"loss": 0.7497,
"step": 1376
},
{
"epoch": 0.95,
"grad_norm": 0.4929899286572087,
"learning_rate": 2.129804814818659e-07,
"loss": 0.7495,
"step": 1377
},
{
"epoch": 0.96,
"grad_norm": 0.457310543597152,
"learning_rate": 2.064888587116709e-07,
"loss": 0.7634,
"step": 1378
},
{
"epoch": 0.96,
"grad_norm": 0.4730275776323102,
"learning_rate": 2.000971917553529e-07,
"loss": 0.7486,
"step": 1379
},
{
"epoch": 0.96,
"grad_norm": 0.45949303936247915,
"learning_rate": 1.9380551289037042e-07,
"loss": 0.7472,
"step": 1380
},
{
"epoch": 0.96,
"grad_norm": 0.4584300632554652,
"learning_rate": 1.8761385388925246e-07,
"loss": 0.7582,
"step": 1381
},
{
"epoch": 0.96,
"grad_norm": 0.5043074154009135,
"learning_rate": 1.8152224601943435e-07,
"loss": 0.6926,
"step": 1382
},
{
"epoch": 0.96,
"grad_norm": 0.4705347425432189,
"learning_rate": 1.7553072004309778e-07,
"loss": 0.7276,
"step": 1383
},
{
"epoch": 0.96,
"grad_norm": 0.4768439268951333,
"learning_rate": 1.6963930621702207e-07,
"loss": 0.7493,
"step": 1384
},
{
"epoch": 0.96,
"grad_norm": 0.48802004756476197,
"learning_rate": 1.6384803429242202e-07,
"loss": 0.7515,
"step": 1385
},
{
"epoch": 0.96,
"grad_norm": 0.47484511609891183,
"learning_rate": 1.5815693351480587e-07,
"loss": 0.7465,
"step": 1386
},
{
"epoch": 0.96,
"grad_norm": 0.4700608322267168,
"learning_rate": 1.5256603262383095e-07,
"loss": 0.7674,
"step": 1387
},
{
"epoch": 0.96,
"grad_norm": 0.46595990138337035,
"learning_rate": 1.4707535985314158e-07,
"loss": 0.7198,
"step": 1388
},
{
"epoch": 0.96,
"grad_norm": 0.4681650449939773,
"learning_rate": 1.416849429302425e-07,
"loss": 0.731,
"step": 1389
},
{
"epoch": 0.96,
"grad_norm": 0.4690561195442063,
"learning_rate": 1.363948090763545e-07,
"loss": 0.7202,
"step": 1390
},
{
"epoch": 0.96,
"grad_norm": 0.5020189202731862,
"learning_rate": 1.3120498500627243e-07,
"loss": 0.7743,
"step": 1391
},
{
"epoch": 0.96,
"grad_norm": 0.4979702156171372,
"learning_rate": 1.2611549692823854e-07,
"loss": 0.7713,
"step": 1392
},
{
"epoch": 0.97,
"grad_norm": 0.4651787217000015,
"learning_rate": 1.211263705438026e-07,
"loss": 0.7506,
"step": 1393
},
{
"epoch": 0.97,
"grad_norm": 0.5207920535428358,
"learning_rate": 1.1623763104769536e-07,
"loss": 0.7671,
"step": 1394
},
{
"epoch": 0.97,
"grad_norm": 0.4624680781402704,
"learning_rate": 1.1144930312769975e-07,
"loss": 0.7437,
"step": 1395
},
{
"epoch": 0.97,
"grad_norm": 0.46485409951837053,
"learning_rate": 1.0676141096453097e-07,
"loss": 0.7274,
"step": 1396
},
{
"epoch": 0.97,
"grad_norm": 0.4795031713845527,
"learning_rate": 1.0217397823170771e-07,
"loss": 0.7334,
"step": 1397
},
{
"epoch": 0.97,
"grad_norm": 0.4834944425801974,
"learning_rate": 9.768702809543895e-08,
"loss": 0.75,
"step": 1398
},
{
"epoch": 0.97,
"grad_norm": 0.481907738242048,
"learning_rate": 9.330058321449731e-08,
"loss": 0.773,
"step": 1399
},
{
"epoch": 0.97,
"grad_norm": 0.5067283439150281,
"learning_rate": 8.901466574011919e-08,
"loss": 0.7416,
"step": 1400
},
{
"epoch": 0.97,
"grad_norm": 0.4912490475221768,
"learning_rate": 8.482929731588041e-08,
"loss": 0.7582,
"step": 1401
},
{
"epoch": 0.97,
"grad_norm": 0.473318021289074,
"learning_rate": 8.074449907758742e-08,
"loss": 0.7721,
"step": 1402
},
{
"epoch": 0.97,
"grad_norm": 0.4897559244461315,
"learning_rate": 7.676029165318622e-08,
"loss": 0.7987,
"step": 1403
},
{
"epoch": 0.97,
"grad_norm": 0.47237981569554943,
"learning_rate": 7.287669516263362e-08,
"loss": 0.7641,
"step": 1404
},
{
"epoch": 0.97,
"grad_norm": 0.4596171916292889,
"learning_rate": 6.90937292178151e-08,
"loss": 0.7714,
"step": 1405
},
{
"epoch": 0.97,
"grad_norm": 0.4637417929405846,
"learning_rate": 6.541141292243814e-08,
"loss": 0.741,
"step": 1406
},
{
"epoch": 0.98,
"grad_norm": 0.4859604777874071,
"learning_rate": 6.18297648719346e-08,
"loss": 0.7316,
"step": 1407
},
{
"epoch": 0.98,
"grad_norm": 0.5024857053396906,
"learning_rate": 5.83488031533741e-08,
"loss": 0.7123,
"step": 1408
},
{
"epoch": 0.98,
"grad_norm": 0.49923528693144187,
"learning_rate": 5.496854534536189e-08,
"loss": 0.7615,
"step": 1409
},
{
"epoch": 0.98,
"grad_norm": 0.4811537647230991,
"learning_rate": 5.168900851795666e-08,
"loss": 0.7267,
"step": 1410
},
{
"epoch": 0.98,
"grad_norm": 0.4758989640717563,
"learning_rate": 4.8510209232588424e-08,
"loss": 0.7777,
"step": 1411
},
{
"epoch": 0.98,
"grad_norm": 0.4898073465912536,
"learning_rate": 4.5432163541960785e-08,
"loss": 0.7712,
"step": 1412
},
{
"epoch": 0.98,
"grad_norm": 0.4777666723797962,
"learning_rate": 4.2454886989988784e-08,
"loss": 0.738,
"step": 1413
},
{
"epoch": 0.98,
"grad_norm": 0.49335310345678834,
"learning_rate": 3.957839461170343e-08,
"loss": 0.7475,
"step": 1414
},
{
"epoch": 0.98,
"grad_norm": 0.4730636083729561,
"learning_rate": 3.680270093318505e-08,
"loss": 0.7143,
"step": 1415
},
{
"epoch": 0.98,
"grad_norm": 0.4696529487353288,
"learning_rate": 3.412781997148784e-08,
"loss": 0.7159,
"step": 1416
},
{
"epoch": 0.98,
"grad_norm": 0.47782069260815496,
"learning_rate": 3.1553765234570985e-08,
"loss": 0.7493,
"step": 1417
},
{
"epoch": 0.98,
"grad_norm": 0.4684708777106045,
"learning_rate": 2.9080549721225426e-08,
"loss": 0.7635,
"step": 1418
},
{
"epoch": 0.98,
"grad_norm": 0.4595490895169446,
"learning_rate": 2.6708185921011653e-08,
"loss": 0.7821,
"step": 1419
},
{
"epoch": 0.98,
"grad_norm": 0.49119816650472004,
"learning_rate": 2.4436685814199778e-08,
"loss": 0.7683,
"step": 1420
},
{
"epoch": 0.99,
"grad_norm": 0.47041334171888,
"learning_rate": 2.226606087169847e-08,
"loss": 0.7558,
"step": 1421
},
{
"epoch": 0.99,
"grad_norm": 0.48524755829473964,
"learning_rate": 2.0196322055010543e-08,
"loss": 0.7392,
"step": 1422
},
{
"epoch": 0.99,
"grad_norm": 0.47785751618912004,
"learning_rate": 1.822747981616857e-08,
"loss": 0.7501,
"step": 1423
},
{
"epoch": 0.99,
"grad_norm": 0.48124918664408944,
"learning_rate": 1.6359544097686033e-08,
"loss": 0.7372,
"step": 1424
},
{
"epoch": 0.99,
"grad_norm": 0.46871471486326804,
"learning_rate": 1.4592524332504022e-08,
"loss": 0.7505,
"step": 1425
},
{
"epoch": 0.99,
"grad_norm": 0.4908215296641364,
"learning_rate": 1.29264294439424e-08,
"loss": 0.7313,
"step": 1426
},
{
"epoch": 0.99,
"grad_norm": 0.4736489108138525,
"learning_rate": 1.136126784566649e-08,
"loss": 0.7297,
"step": 1427
},
{
"epoch": 0.99,
"grad_norm": 0.48090568556134466,
"learning_rate": 9.897047441627116e-09,
"loss": 0.7685,
"step": 1428
},
{
"epoch": 0.99,
"grad_norm": 0.48586605409007705,
"learning_rate": 8.533775626033968e-09,
"loss": 0.7542,
"step": 1429
},
{
"epoch": 0.99,
"grad_norm": 0.4918405639027352,
"learning_rate": 7.271459283308968e-09,
"loss": 0.7864,
"step": 1430
},
{
"epoch": 0.99,
"grad_norm": 0.48937965577574266,
"learning_rate": 6.110104788061843e-09,
"loss": 0.7373,
"step": 1431
},
{
"epoch": 0.99,
"grad_norm": 0.5234117230773252,
"learning_rate": 5.0497180050501635e-09,
"loss": 0.7447,
"step": 1432
},
{
"epoch": 0.99,
"grad_norm": 0.4757714617999256,
"learning_rate": 4.090304289150471e-09,
"loss": 0.6992,
"step": 1433
},
{
"epoch": 0.99,
"grad_norm": 0.48647786891626527,
"learning_rate": 3.2318684853338556e-09,
"loss": 0.7652,
"step": 1434
},
{
"epoch": 0.99,
"grad_norm": 0.4664451323809064,
"learning_rate": 2.4744149286370923e-09,
"loss": 0.7318,
"step": 1435
},
{
"epoch": 1.0,
"grad_norm": 0.45572647778798603,
"learning_rate": 1.817947444149315e-09,
"loss": 0.7287,
"step": 1436
},
{
"epoch": 1.0,
"grad_norm": 0.4882451883637373,
"learning_rate": 1.2624693469831529e-09,
"loss": 0.6892,
"step": 1437
},
{
"epoch": 1.0,
"grad_norm": 0.47130270149142,
"learning_rate": 8.079834422636267e-10,
"loss": 0.7695,
"step": 1438
},
{
"epoch": 1.0,
"grad_norm": 0.4892198899964654,
"learning_rate": 4.544920251126073e-10,
"loss": 0.7665,
"step": 1439
},
{
"epoch": 1.0,
"grad_norm": 0.4640128167635843,
"learning_rate": 2.0199688063549105e-10,
"loss": 0.7164,
"step": 1440
},
{
"epoch": 1.0,
"grad_norm": 0.48402701368882556,
"learning_rate": 5.049928391231973e-11,
"loss": 0.7362,
"step": 1441
},
{
"epoch": 1.0,
"grad_norm": 0.48654747915425856,
"learning_rate": 0.0,
"loss": 0.7232,
"step": 1442
},
{
"epoch": 1.0,
"step": 1442,
"total_flos": 1.2371314558369792e+16,
"train_loss": 0.8182366644129178,
"train_runtime": 59803.8495,
"train_samples_per_second": 12.35,
"train_steps_per_second": 0.024
}
],
"logging_steps": 1.0,
"max_steps": 1442,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 1.2371314558369792e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}