phi2-cda-religion / trainer_state.json
ao9000's picture
Upload 2 files
b651d16 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.8503986104306773,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0014251993052153387,
"grad_norm": 2.3808159828186035,
"learning_rate": 4.9975e-05,
"loss": 4.9303,
"step": 1
},
{
"epoch": 0.0028503986104306774,
"grad_norm": 2.415224075317383,
"learning_rate": 4.995e-05,
"loss": 4.912,
"step": 2
},
{
"epoch": 0.004275597915646016,
"grad_norm": 2.338815450668335,
"learning_rate": 4.992500000000001e-05,
"loss": 4.5512,
"step": 3
},
{
"epoch": 0.005700797220861355,
"grad_norm": 2.681542158126831,
"learning_rate": 4.99e-05,
"loss": 4.6698,
"step": 4
},
{
"epoch": 0.007125996526076693,
"grad_norm": 2.858518600463867,
"learning_rate": 4.9875000000000006e-05,
"loss": 4.5722,
"step": 5
},
{
"epoch": 0.008551195831292033,
"grad_norm": 3.3381805419921875,
"learning_rate": 4.9850000000000006e-05,
"loss": 4.5432,
"step": 6
},
{
"epoch": 0.00997639513650737,
"grad_norm": 3.531815528869629,
"learning_rate": 4.9825000000000005e-05,
"loss": 4.3542,
"step": 7
},
{
"epoch": 0.01140159444172271,
"grad_norm": 3.5773863792419434,
"learning_rate": 4.9800000000000004e-05,
"loss": 3.9884,
"step": 8
},
{
"epoch": 0.01282679374693805,
"grad_norm": 4.022645950317383,
"learning_rate": 4.9775000000000004e-05,
"loss": 3.9361,
"step": 9
},
{
"epoch": 0.014251993052153387,
"grad_norm": 4.557852745056152,
"learning_rate": 4.975e-05,
"loss": 3.8644,
"step": 10
},
{
"epoch": 0.015677192357368724,
"grad_norm": 4.871453762054443,
"learning_rate": 4.9725e-05,
"loss": 3.6454,
"step": 11
},
{
"epoch": 0.017102391662584066,
"grad_norm": 5.2355451583862305,
"learning_rate": 4.97e-05,
"loss": 3.4451,
"step": 12
},
{
"epoch": 0.018527590967799403,
"grad_norm": 5.135752201080322,
"learning_rate": 4.967500000000001e-05,
"loss": 3.0268,
"step": 13
},
{
"epoch": 0.01995279027301474,
"grad_norm": 5.464292049407959,
"learning_rate": 4.965e-05,
"loss": 2.6256,
"step": 14
},
{
"epoch": 0.021377989578230082,
"grad_norm": 6.0373735427856445,
"learning_rate": 4.962500000000001e-05,
"loss": 2.4167,
"step": 15
},
{
"epoch": 0.02280318888344542,
"grad_norm": 5.705432415008545,
"learning_rate": 4.96e-05,
"loss": 2.2417,
"step": 16
},
{
"epoch": 0.024228388188660757,
"grad_norm": 5.656646728515625,
"learning_rate": 4.9575000000000006e-05,
"loss": 1.9157,
"step": 17
},
{
"epoch": 0.0256535874938761,
"grad_norm": 5.283426284790039,
"learning_rate": 4.9550000000000005e-05,
"loss": 1.646,
"step": 18
},
{
"epoch": 0.027078786799091436,
"grad_norm": 5.5149993896484375,
"learning_rate": 4.9525000000000004e-05,
"loss": 1.4554,
"step": 19
},
{
"epoch": 0.028503986104306774,
"grad_norm": 3.482173204421997,
"learning_rate": 4.9500000000000004e-05,
"loss": 1.3281,
"step": 20
},
{
"epoch": 0.02992918540952211,
"grad_norm": 3.054492950439453,
"learning_rate": 4.9475e-05,
"loss": 1.1017,
"step": 21
},
{
"epoch": 0.03135438471473745,
"grad_norm": 2.021817445755005,
"learning_rate": 4.945e-05,
"loss": 0.9781,
"step": 22
},
{
"epoch": 0.03277958401995279,
"grad_norm": 1.3400617837905884,
"learning_rate": 4.9425e-05,
"loss": 0.957,
"step": 23
},
{
"epoch": 0.03420478332516813,
"grad_norm": 0.5696387887001038,
"learning_rate": 4.94e-05,
"loss": 0.9205,
"step": 24
},
{
"epoch": 0.035629982630383465,
"grad_norm": 0.3762161135673523,
"learning_rate": 4.937500000000001e-05,
"loss": 0.8709,
"step": 25
},
{
"epoch": 0.037055181935598806,
"grad_norm": 0.2803989052772522,
"learning_rate": 4.935e-05,
"loss": 0.8753,
"step": 26
},
{
"epoch": 0.03848038124081415,
"grad_norm": 0.3254147469997406,
"learning_rate": 4.9325000000000006e-05,
"loss": 0.9583,
"step": 27
},
{
"epoch": 0.03990558054602948,
"grad_norm": 0.3624233305454254,
"learning_rate": 4.93e-05,
"loss": 0.9307,
"step": 28
},
{
"epoch": 0.04133077985124482,
"grad_norm": 0.37928587198257446,
"learning_rate": 4.9275000000000005e-05,
"loss": 0.9018,
"step": 29
},
{
"epoch": 0.042755979156460164,
"grad_norm": 0.37186911702156067,
"learning_rate": 4.9250000000000004e-05,
"loss": 1.0122,
"step": 30
},
{
"epoch": 0.0441811784616755,
"grad_norm": 0.3072810173034668,
"learning_rate": 4.9225000000000004e-05,
"loss": 0.9318,
"step": 31
},
{
"epoch": 0.04560637776689084,
"grad_norm": 0.2723548710346222,
"learning_rate": 4.92e-05,
"loss": 0.788,
"step": 32
},
{
"epoch": 0.04703157707210618,
"grad_norm": 0.3143625259399414,
"learning_rate": 4.9175e-05,
"loss": 0.9277,
"step": 33
},
{
"epoch": 0.048456776377321514,
"grad_norm": 0.2649214267730713,
"learning_rate": 4.915e-05,
"loss": 0.9105,
"step": 34
},
{
"epoch": 0.049881975682536855,
"grad_norm": 0.25748565793037415,
"learning_rate": 4.9125e-05,
"loss": 0.8428,
"step": 35
},
{
"epoch": 0.0513071749877522,
"grad_norm": 0.21407876908779144,
"learning_rate": 4.91e-05,
"loss": 0.8605,
"step": 36
},
{
"epoch": 0.05273237429296753,
"grad_norm": 0.21605724096298218,
"learning_rate": 4.907500000000001e-05,
"loss": 0.8478,
"step": 37
},
{
"epoch": 0.05415757359818287,
"grad_norm": 0.19568459689617157,
"learning_rate": 4.905e-05,
"loss": 0.8291,
"step": 38
},
{
"epoch": 0.05558277290339821,
"grad_norm": 0.18652412295341492,
"learning_rate": 4.9025000000000006e-05,
"loss": 0.8853,
"step": 39
},
{
"epoch": 0.05700797220861355,
"grad_norm": 0.17057964205741882,
"learning_rate": 4.9e-05,
"loss": 0.7241,
"step": 40
},
{
"epoch": 0.05843317151382889,
"grad_norm": 0.1821194887161255,
"learning_rate": 4.8975000000000005e-05,
"loss": 0.8798,
"step": 41
},
{
"epoch": 0.05985837081904422,
"grad_norm": 0.18263833224773407,
"learning_rate": 4.8950000000000004e-05,
"loss": 0.9179,
"step": 42
},
{
"epoch": 0.06128357012425956,
"grad_norm": 0.17591266334056854,
"learning_rate": 4.8925e-05,
"loss": 0.907,
"step": 43
},
{
"epoch": 0.0627087694294749,
"grad_norm": 0.1583259254693985,
"learning_rate": 4.89e-05,
"loss": 0.9427,
"step": 44
},
{
"epoch": 0.06413396873469024,
"grad_norm": 0.15764452517032623,
"learning_rate": 4.8875e-05,
"loss": 0.761,
"step": 45
},
{
"epoch": 0.06555916803990558,
"grad_norm": 0.1542762666940689,
"learning_rate": 4.885e-05,
"loss": 0.8792,
"step": 46
},
{
"epoch": 0.06698436734512092,
"grad_norm": 0.14711661636829376,
"learning_rate": 4.8825e-05,
"loss": 0.8863,
"step": 47
},
{
"epoch": 0.06840956665033626,
"grad_norm": 0.14572006464004517,
"learning_rate": 4.88e-05,
"loss": 0.9373,
"step": 48
},
{
"epoch": 0.0698347659555516,
"grad_norm": 0.12578891217708588,
"learning_rate": 4.8775000000000007e-05,
"loss": 0.8109,
"step": 49
},
{
"epoch": 0.07125996526076693,
"grad_norm": 0.1245233416557312,
"learning_rate": 4.875e-05,
"loss": 0.7661,
"step": 50
},
{
"epoch": 0.07268516456598227,
"grad_norm": 0.12405986338853836,
"learning_rate": 4.8725000000000005e-05,
"loss": 0.9007,
"step": 51
},
{
"epoch": 0.07411036387119761,
"grad_norm": 0.12161869555711746,
"learning_rate": 4.87e-05,
"loss": 0.8402,
"step": 52
},
{
"epoch": 0.07553556317641295,
"grad_norm": 0.11237402260303497,
"learning_rate": 4.8675000000000004e-05,
"loss": 0.8955,
"step": 53
},
{
"epoch": 0.0769607624816283,
"grad_norm": 0.11724139750003815,
"learning_rate": 4.8650000000000003e-05,
"loss": 0.8738,
"step": 54
},
{
"epoch": 0.07838596178684362,
"grad_norm": 0.09971917420625687,
"learning_rate": 4.8625e-05,
"loss": 0.7731,
"step": 55
},
{
"epoch": 0.07981116109205896,
"grad_norm": 0.10756105929613113,
"learning_rate": 4.86e-05,
"loss": 0.8021,
"step": 56
},
{
"epoch": 0.0812363603972743,
"grad_norm": 0.10592478513717651,
"learning_rate": 4.8575e-05,
"loss": 0.889,
"step": 57
},
{
"epoch": 0.08266155970248965,
"grad_norm": 0.10013467818498611,
"learning_rate": 4.855e-05,
"loss": 0.7657,
"step": 58
},
{
"epoch": 0.08408675900770499,
"grad_norm": 0.10104944556951523,
"learning_rate": 4.8525e-05,
"loss": 0.841,
"step": 59
},
{
"epoch": 0.08551195831292033,
"grad_norm": 0.10966558754444122,
"learning_rate": 4.85e-05,
"loss": 0.8929,
"step": 60
},
{
"epoch": 0.08693715761813565,
"grad_norm": 0.12098626047372818,
"learning_rate": 4.8475000000000006e-05,
"loss": 0.9607,
"step": 61
},
{
"epoch": 0.088362356923351,
"grad_norm": 0.11154744029045105,
"learning_rate": 4.845e-05,
"loss": 0.9572,
"step": 62
},
{
"epoch": 0.08978755622856634,
"grad_norm": 0.11025462299585342,
"learning_rate": 4.8425000000000005e-05,
"loss": 0.8079,
"step": 63
},
{
"epoch": 0.09121275553378168,
"grad_norm": 0.10437805205583572,
"learning_rate": 4.8400000000000004e-05,
"loss": 0.8494,
"step": 64
},
{
"epoch": 0.09263795483899702,
"grad_norm": 0.09998121112585068,
"learning_rate": 4.8375000000000004e-05,
"loss": 0.8458,
"step": 65
},
{
"epoch": 0.09406315414421236,
"grad_norm": 0.0947476401925087,
"learning_rate": 4.835e-05,
"loss": 0.7706,
"step": 66
},
{
"epoch": 0.09548835344942769,
"grad_norm": 0.09839773178100586,
"learning_rate": 4.8325e-05,
"loss": 0.7757,
"step": 67
},
{
"epoch": 0.09691355275464303,
"grad_norm": 0.10350161045789719,
"learning_rate": 4.83e-05,
"loss": 0.8034,
"step": 68
},
{
"epoch": 0.09833875205985837,
"grad_norm": 0.10200931131839752,
"learning_rate": 4.8275e-05,
"loss": 0.9039,
"step": 69
},
{
"epoch": 0.09976395136507371,
"grad_norm": 0.10722529888153076,
"learning_rate": 4.825e-05,
"loss": 0.9933,
"step": 70
},
{
"epoch": 0.10118915067028905,
"grad_norm": 0.09730961918830872,
"learning_rate": 4.822500000000001e-05,
"loss": 0.7981,
"step": 71
},
{
"epoch": 0.1026143499755044,
"grad_norm": 0.10621146112680435,
"learning_rate": 4.82e-05,
"loss": 0.7649,
"step": 72
},
{
"epoch": 0.10403954928071972,
"grad_norm": 0.09823603183031082,
"learning_rate": 4.8175000000000005e-05,
"loss": 0.7422,
"step": 73
},
{
"epoch": 0.10546474858593506,
"grad_norm": 0.10114400088787079,
"learning_rate": 4.815e-05,
"loss": 0.7957,
"step": 74
},
{
"epoch": 0.1068899478911504,
"grad_norm": 0.09885763376951218,
"learning_rate": 4.8125000000000004e-05,
"loss": 0.79,
"step": 75
},
{
"epoch": 0.10831514719636574,
"grad_norm": 0.10542196780443192,
"learning_rate": 4.8100000000000004e-05,
"loss": 0.8467,
"step": 76
},
{
"epoch": 0.10974034650158108,
"grad_norm": 0.10014081746339798,
"learning_rate": 4.8075e-05,
"loss": 0.8893,
"step": 77
},
{
"epoch": 0.11116554580679643,
"grad_norm": 0.09690476208925247,
"learning_rate": 4.805e-05,
"loss": 0.8154,
"step": 78
},
{
"epoch": 0.11259074511201175,
"grad_norm": 0.09984306991100311,
"learning_rate": 4.8025e-05,
"loss": 0.8093,
"step": 79
},
{
"epoch": 0.1140159444172271,
"grad_norm": 0.10842040181159973,
"learning_rate": 4.8e-05,
"loss": 0.9409,
"step": 80
},
{
"epoch": 0.11544114372244244,
"grad_norm": 0.0983848050236702,
"learning_rate": 4.7975e-05,
"loss": 0.8474,
"step": 81
},
{
"epoch": 0.11686634302765778,
"grad_norm": 0.1021835133433342,
"learning_rate": 4.795e-05,
"loss": 0.8256,
"step": 82
},
{
"epoch": 0.11829154233287312,
"grad_norm": 0.09965500980615616,
"learning_rate": 4.7925000000000006e-05,
"loss": 0.7844,
"step": 83
},
{
"epoch": 0.11971674163808844,
"grad_norm": 0.09537848830223083,
"learning_rate": 4.79e-05,
"loss": 0.7836,
"step": 84
},
{
"epoch": 0.12114194094330379,
"grad_norm": 0.09506389498710632,
"learning_rate": 4.7875000000000005e-05,
"loss": 0.7477,
"step": 85
},
{
"epoch": 0.12256714024851913,
"grad_norm": 0.10709495842456818,
"learning_rate": 4.785e-05,
"loss": 0.8446,
"step": 86
},
{
"epoch": 0.12399233955373447,
"grad_norm": 0.10288501530885696,
"learning_rate": 4.7825000000000004e-05,
"loss": 0.8816,
"step": 87
},
{
"epoch": 0.1254175388589498,
"grad_norm": 0.09660671651363373,
"learning_rate": 4.78e-05,
"loss": 0.7996,
"step": 88
},
{
"epoch": 0.12684273816416514,
"grad_norm": 0.1137470006942749,
"learning_rate": 4.7775e-05,
"loss": 0.989,
"step": 89
},
{
"epoch": 0.12826793746938048,
"grad_norm": 0.11435054242610931,
"learning_rate": 4.775e-05,
"loss": 0.9744,
"step": 90
},
{
"epoch": 0.12969313677459582,
"grad_norm": 0.1271040290594101,
"learning_rate": 4.7725e-05,
"loss": 0.9311,
"step": 91
},
{
"epoch": 0.13111833607981116,
"grad_norm": 0.09658078849315643,
"learning_rate": 4.77e-05,
"loss": 0.7304,
"step": 92
},
{
"epoch": 0.1325435353850265,
"grad_norm": 0.09088589251041412,
"learning_rate": 4.7675e-05,
"loss": 0.7537,
"step": 93
},
{
"epoch": 0.13396873469024184,
"grad_norm": 0.10256076604127884,
"learning_rate": 4.765e-05,
"loss": 0.8711,
"step": 94
},
{
"epoch": 0.13539393399545718,
"grad_norm": 0.09635764360427856,
"learning_rate": 4.7625000000000006e-05,
"loss": 0.8243,
"step": 95
},
{
"epoch": 0.13681913330067252,
"grad_norm": 0.10228880494832993,
"learning_rate": 4.76e-05,
"loss": 0.8442,
"step": 96
},
{
"epoch": 0.13824433260588787,
"grad_norm": 0.09500208497047424,
"learning_rate": 4.7575000000000004e-05,
"loss": 0.7335,
"step": 97
},
{
"epoch": 0.1396695319111032,
"grad_norm": 0.09565471112728119,
"learning_rate": 4.755e-05,
"loss": 0.8124,
"step": 98
},
{
"epoch": 0.14109473121631852,
"grad_norm": 0.0910060852766037,
"learning_rate": 4.7525e-05,
"loss": 0.7428,
"step": 99
},
{
"epoch": 0.14251993052153386,
"grad_norm": 0.09788507968187332,
"learning_rate": 4.75e-05,
"loss": 0.8027,
"step": 100
},
{
"epoch": 0.1439451298267492,
"grad_norm": 0.10399317741394043,
"learning_rate": 4.7475e-05,
"loss": 0.8683,
"step": 101
},
{
"epoch": 0.14537032913196454,
"grad_norm": 0.106503926217556,
"learning_rate": 4.745e-05,
"loss": 0.9017,
"step": 102
},
{
"epoch": 0.14679552843717988,
"grad_norm": 0.10645844787359238,
"learning_rate": 4.7425e-05,
"loss": 0.8474,
"step": 103
},
{
"epoch": 0.14822072774239523,
"grad_norm": 0.0960610881447792,
"learning_rate": 4.74e-05,
"loss": 0.8204,
"step": 104
},
{
"epoch": 0.14964592704761057,
"grad_norm": 0.09153874218463898,
"learning_rate": 4.7375e-05,
"loss": 0.725,
"step": 105
},
{
"epoch": 0.1510711263528259,
"grad_norm": 0.10132791101932526,
"learning_rate": 4.735e-05,
"loss": 0.8344,
"step": 106
},
{
"epoch": 0.15249632565804125,
"grad_norm": 0.09215882420539856,
"learning_rate": 4.7325000000000005e-05,
"loss": 0.769,
"step": 107
},
{
"epoch": 0.1539215249632566,
"grad_norm": 0.09991485625505447,
"learning_rate": 4.73e-05,
"loss": 0.8754,
"step": 108
},
{
"epoch": 0.15534672426847193,
"grad_norm": 0.09824506938457489,
"learning_rate": 4.7275000000000004e-05,
"loss": 0.8447,
"step": 109
},
{
"epoch": 0.15677192357368724,
"grad_norm": 0.09528657793998718,
"learning_rate": 4.7249999999999997e-05,
"loss": 0.8477,
"step": 110
},
{
"epoch": 0.15819712287890259,
"grad_norm": 0.09362693130970001,
"learning_rate": 4.7225e-05,
"loss": 0.7892,
"step": 111
},
{
"epoch": 0.15962232218411793,
"grad_norm": 0.10120360553264618,
"learning_rate": 4.72e-05,
"loss": 0.8305,
"step": 112
},
{
"epoch": 0.16104752148933327,
"grad_norm": 0.09102863818407059,
"learning_rate": 4.7175e-05,
"loss": 0.701,
"step": 113
},
{
"epoch": 0.1624727207945486,
"grad_norm": 0.10099191218614578,
"learning_rate": 4.715e-05,
"loss": 0.8191,
"step": 114
},
{
"epoch": 0.16389792009976395,
"grad_norm": 0.09713483601808548,
"learning_rate": 4.7125e-05,
"loss": 0.8317,
"step": 115
},
{
"epoch": 0.1653231194049793,
"grad_norm": 0.09373105317354202,
"learning_rate": 4.71e-05,
"loss": 0.8181,
"step": 116
},
{
"epoch": 0.16674831871019463,
"grad_norm": 0.10019250959157944,
"learning_rate": 4.7075e-05,
"loss": 0.8807,
"step": 117
},
{
"epoch": 0.16817351801540997,
"grad_norm": 0.10471469908952713,
"learning_rate": 4.705e-05,
"loss": 0.917,
"step": 118
},
{
"epoch": 0.16959871732062531,
"grad_norm": 0.09110147505998611,
"learning_rate": 4.7025000000000005e-05,
"loss": 0.7423,
"step": 119
},
{
"epoch": 0.17102391662584066,
"grad_norm": 0.10044188052415848,
"learning_rate": 4.7e-05,
"loss": 0.7266,
"step": 120
},
{
"epoch": 0.172449115931056,
"grad_norm": 0.0988844633102417,
"learning_rate": 4.6975000000000003e-05,
"loss": 0.8469,
"step": 121
},
{
"epoch": 0.1738743152362713,
"grad_norm": 0.10097721219062805,
"learning_rate": 4.695e-05,
"loss": 0.8035,
"step": 122
},
{
"epoch": 0.17529951454148665,
"grad_norm": 0.09659305959939957,
"learning_rate": 4.6925e-05,
"loss": 0.7965,
"step": 123
},
{
"epoch": 0.176724713846702,
"grad_norm": 0.10277678817510605,
"learning_rate": 4.69e-05,
"loss": 0.9071,
"step": 124
},
{
"epoch": 0.17814991315191733,
"grad_norm": 0.10112367570400238,
"learning_rate": 4.6875e-05,
"loss": 0.8183,
"step": 125
},
{
"epoch": 0.17957511245713267,
"grad_norm": 0.1075640618801117,
"learning_rate": 4.685000000000001e-05,
"loss": 0.952,
"step": 126
},
{
"epoch": 0.18100031176234802,
"grad_norm": 0.0978296771645546,
"learning_rate": 4.6825e-05,
"loss": 0.8194,
"step": 127
},
{
"epoch": 0.18242551106756336,
"grad_norm": 0.09457103163003922,
"learning_rate": 4.6800000000000006e-05,
"loss": 0.7409,
"step": 128
},
{
"epoch": 0.1838507103727787,
"grad_norm": 0.10937376320362091,
"learning_rate": 4.6775000000000005e-05,
"loss": 0.825,
"step": 129
},
{
"epoch": 0.18527590967799404,
"grad_norm": 0.09174269437789917,
"learning_rate": 4.6750000000000005e-05,
"loss": 0.7436,
"step": 130
},
{
"epoch": 0.18670110898320938,
"grad_norm": 0.09961359947919846,
"learning_rate": 4.6725000000000004e-05,
"loss": 0.8207,
"step": 131
},
{
"epoch": 0.18812630828842472,
"grad_norm": 0.10639068484306335,
"learning_rate": 4.6700000000000003e-05,
"loss": 0.8795,
"step": 132
},
{
"epoch": 0.18955150759364006,
"grad_norm": 0.1043340265750885,
"learning_rate": 4.6675e-05,
"loss": 0.8564,
"step": 133
},
{
"epoch": 0.19097670689885538,
"grad_norm": 0.10141237080097198,
"learning_rate": 4.665e-05,
"loss": 0.8536,
"step": 134
},
{
"epoch": 0.19240190620407072,
"grad_norm": 0.10130324959754944,
"learning_rate": 4.6625e-05,
"loss": 0.7448,
"step": 135
},
{
"epoch": 0.19382710550928606,
"grad_norm": 0.09831742942333221,
"learning_rate": 4.660000000000001e-05,
"loss": 0.8022,
"step": 136
},
{
"epoch": 0.1952523048145014,
"grad_norm": 0.10223159939050674,
"learning_rate": 4.6575e-05,
"loss": 0.8987,
"step": 137
},
{
"epoch": 0.19667750411971674,
"grad_norm": 0.10412299633026123,
"learning_rate": 4.655000000000001e-05,
"loss": 0.7896,
"step": 138
},
{
"epoch": 0.19810270342493208,
"grad_norm": 0.10144494473934174,
"learning_rate": 4.6525e-05,
"loss": 0.8473,
"step": 139
},
{
"epoch": 0.19952790273014742,
"grad_norm": 0.09591324627399445,
"learning_rate": 4.6500000000000005e-05,
"loss": 0.7606,
"step": 140
},
{
"epoch": 0.20095310203536276,
"grad_norm": 0.10004063695669174,
"learning_rate": 4.6475000000000005e-05,
"loss": 0.7814,
"step": 141
},
{
"epoch": 0.2023783013405781,
"grad_norm": 0.10389166325330734,
"learning_rate": 4.6450000000000004e-05,
"loss": 0.9484,
"step": 142
},
{
"epoch": 0.20380350064579345,
"grad_norm": 0.10206881910562515,
"learning_rate": 4.6425000000000004e-05,
"loss": 0.8014,
"step": 143
},
{
"epoch": 0.2052286999510088,
"grad_norm": 0.0980120301246643,
"learning_rate": 4.64e-05,
"loss": 0.8191,
"step": 144
},
{
"epoch": 0.2066538992562241,
"grad_norm": 0.10040997713804245,
"learning_rate": 4.6375e-05,
"loss": 0.8783,
"step": 145
},
{
"epoch": 0.20807909856143944,
"grad_norm": 0.09480734169483185,
"learning_rate": 4.635e-05,
"loss": 0.7052,
"step": 146
},
{
"epoch": 0.20950429786665478,
"grad_norm": 0.09925828874111176,
"learning_rate": 4.6325e-05,
"loss": 0.7411,
"step": 147
},
{
"epoch": 0.21092949717187012,
"grad_norm": 0.09905529767274857,
"learning_rate": 4.630000000000001e-05,
"loss": 0.8313,
"step": 148
},
{
"epoch": 0.21235469647708546,
"grad_norm": 0.10014835745096207,
"learning_rate": 4.6275e-05,
"loss": 0.7224,
"step": 149
},
{
"epoch": 0.2137798957823008,
"grad_norm": 0.10312588512897491,
"learning_rate": 4.6250000000000006e-05,
"loss": 0.8382,
"step": 150
},
{
"epoch": 0.21520509508751615,
"grad_norm": 0.09137175232172012,
"learning_rate": 4.6225e-05,
"loss": 0.719,
"step": 151
},
{
"epoch": 0.2166302943927315,
"grad_norm": 0.10332849621772766,
"learning_rate": 4.6200000000000005e-05,
"loss": 0.7336,
"step": 152
},
{
"epoch": 0.21805549369794683,
"grad_norm": 0.097069151699543,
"learning_rate": 4.6175000000000004e-05,
"loss": 0.7914,
"step": 153
},
{
"epoch": 0.21948069300316217,
"grad_norm": 0.0982237234711647,
"learning_rate": 4.6150000000000004e-05,
"loss": 0.7311,
"step": 154
},
{
"epoch": 0.2209058923083775,
"grad_norm": 0.10182492434978485,
"learning_rate": 4.6125e-05,
"loss": 0.8212,
"step": 155
},
{
"epoch": 0.22233109161359285,
"grad_norm": 0.10361000150442123,
"learning_rate": 4.61e-05,
"loss": 0.7684,
"step": 156
},
{
"epoch": 0.22375629091880817,
"grad_norm": 0.09307707101106644,
"learning_rate": 4.6075e-05,
"loss": 0.6805,
"step": 157
},
{
"epoch": 0.2251814902240235,
"grad_norm": 0.11543877422809601,
"learning_rate": 4.605e-05,
"loss": 0.7775,
"step": 158
},
{
"epoch": 0.22660668952923885,
"grad_norm": 0.0867183655500412,
"learning_rate": 4.6025e-05,
"loss": 0.6517,
"step": 159
},
{
"epoch": 0.2280318888344542,
"grad_norm": 0.0990353375673294,
"learning_rate": 4.600000000000001e-05,
"loss": 0.7419,
"step": 160
},
{
"epoch": 0.22945708813966953,
"grad_norm": 0.1119513213634491,
"learning_rate": 4.5975e-05,
"loss": 0.8633,
"step": 161
},
{
"epoch": 0.23088228744488487,
"grad_norm": 0.10065409541130066,
"learning_rate": 4.5950000000000006e-05,
"loss": 0.8092,
"step": 162
},
{
"epoch": 0.2323074867501002,
"grad_norm": 0.10603216290473938,
"learning_rate": 4.5925e-05,
"loss": 0.8269,
"step": 163
},
{
"epoch": 0.23373268605531555,
"grad_norm": 0.10229446738958359,
"learning_rate": 4.5900000000000004e-05,
"loss": 0.8378,
"step": 164
},
{
"epoch": 0.2351578853605309,
"grad_norm": 0.11105506122112274,
"learning_rate": 4.5875000000000004e-05,
"loss": 0.8295,
"step": 165
},
{
"epoch": 0.23658308466574623,
"grad_norm": 0.10707306861877441,
"learning_rate": 4.585e-05,
"loss": 0.7005,
"step": 166
},
{
"epoch": 0.23800828397096158,
"grad_norm": 0.10230866074562073,
"learning_rate": 4.5825e-05,
"loss": 0.8244,
"step": 167
},
{
"epoch": 0.2394334832761769,
"grad_norm": 0.09538046270608902,
"learning_rate": 4.58e-05,
"loss": 0.6782,
"step": 168
},
{
"epoch": 0.24085868258139223,
"grad_norm": 0.09909283369779587,
"learning_rate": 4.5775e-05,
"loss": 0.766,
"step": 169
},
{
"epoch": 0.24228388188660757,
"grad_norm": 0.10667891055345535,
"learning_rate": 4.575e-05,
"loss": 0.8368,
"step": 170
},
{
"epoch": 0.2437090811918229,
"grad_norm": 0.10888192057609558,
"learning_rate": 4.5725e-05,
"loss": 0.8679,
"step": 171
},
{
"epoch": 0.24513428049703825,
"grad_norm": 0.10493852198123932,
"learning_rate": 4.5700000000000006e-05,
"loss": 0.8145,
"step": 172
},
{
"epoch": 0.2465594798022536,
"grad_norm": 0.10101497173309326,
"learning_rate": 4.5675e-05,
"loss": 0.783,
"step": 173
},
{
"epoch": 0.24798467910746894,
"grad_norm": 0.09885123372077942,
"learning_rate": 4.5650000000000005e-05,
"loss": 0.8117,
"step": 174
},
{
"epoch": 0.24940987841268428,
"grad_norm": 0.10251446068286896,
"learning_rate": 4.5625e-05,
"loss": 0.7805,
"step": 175
},
{
"epoch": 0.2508350777178996,
"grad_norm": 0.10276594758033752,
"learning_rate": 4.5600000000000004e-05,
"loss": 0.8183,
"step": 176
},
{
"epoch": 0.25226027702311493,
"grad_norm": 0.10428547859191895,
"learning_rate": 4.5575e-05,
"loss": 0.8479,
"step": 177
},
{
"epoch": 0.2536854763283303,
"grad_norm": 0.10580292344093323,
"learning_rate": 4.555e-05,
"loss": 0.7803,
"step": 178
},
{
"epoch": 0.2551106756335456,
"grad_norm": 0.10346924513578415,
"learning_rate": 4.5525e-05,
"loss": 0.826,
"step": 179
},
{
"epoch": 0.25653587493876095,
"grad_norm": 0.10467348992824554,
"learning_rate": 4.55e-05,
"loss": 0.7606,
"step": 180
},
{
"epoch": 0.2579610742439763,
"grad_norm": 0.1021052822470665,
"learning_rate": 4.5475e-05,
"loss": 0.8233,
"step": 181
},
{
"epoch": 0.25938627354919164,
"grad_norm": 0.10617226362228394,
"learning_rate": 4.545000000000001e-05,
"loss": 0.8375,
"step": 182
},
{
"epoch": 0.260811472854407,
"grad_norm": 0.09988026320934296,
"learning_rate": 4.5425e-05,
"loss": 0.7752,
"step": 183
},
{
"epoch": 0.2622366721596223,
"grad_norm": 0.10718479007482529,
"learning_rate": 4.5400000000000006e-05,
"loss": 0.7118,
"step": 184
},
{
"epoch": 0.26366187146483766,
"grad_norm": 0.10486292093992233,
"learning_rate": 4.5375e-05,
"loss": 0.8519,
"step": 185
},
{
"epoch": 0.265087070770053,
"grad_norm": 0.10830938816070557,
"learning_rate": 4.5350000000000005e-05,
"loss": 0.8276,
"step": 186
},
{
"epoch": 0.26651227007526834,
"grad_norm": 0.10683759301900864,
"learning_rate": 4.5325000000000004e-05,
"loss": 0.8389,
"step": 187
},
{
"epoch": 0.2679374693804837,
"grad_norm": 0.0971045196056366,
"learning_rate": 4.53e-05,
"loss": 0.7385,
"step": 188
},
{
"epoch": 0.269362668685699,
"grad_norm": 0.09445853531360626,
"learning_rate": 4.5275e-05,
"loss": 0.6954,
"step": 189
},
{
"epoch": 0.27078786799091437,
"grad_norm": 0.09003406018018723,
"learning_rate": 4.525e-05,
"loss": 0.666,
"step": 190
},
{
"epoch": 0.2722130672961297,
"grad_norm": 0.1080944612622261,
"learning_rate": 4.5225e-05,
"loss": 0.8569,
"step": 191
},
{
"epoch": 0.27363826660134505,
"grad_norm": 0.11204903572797775,
"learning_rate": 4.52e-05,
"loss": 0.7499,
"step": 192
},
{
"epoch": 0.2750634659065604,
"grad_norm": 0.0980222150683403,
"learning_rate": 4.5175e-05,
"loss": 0.7413,
"step": 193
},
{
"epoch": 0.27648866521177573,
"grad_norm": 0.10219401121139526,
"learning_rate": 4.5150000000000006e-05,
"loss": 0.8179,
"step": 194
},
{
"epoch": 0.27791386451699107,
"grad_norm": 0.10334503650665283,
"learning_rate": 4.5125e-05,
"loss": 0.8254,
"step": 195
},
{
"epoch": 0.2793390638222064,
"grad_norm": 0.10013623535633087,
"learning_rate": 4.5100000000000005e-05,
"loss": 0.7774,
"step": 196
},
{
"epoch": 0.2807642631274217,
"grad_norm": 0.10263955593109131,
"learning_rate": 4.5075e-05,
"loss": 0.8304,
"step": 197
},
{
"epoch": 0.28218946243263704,
"grad_norm": 0.10180577635765076,
"learning_rate": 4.5050000000000004e-05,
"loss": 0.7493,
"step": 198
},
{
"epoch": 0.2836146617378524,
"grad_norm": 0.09825511276721954,
"learning_rate": 4.5025000000000003e-05,
"loss": 0.7498,
"step": 199
},
{
"epoch": 0.2850398610430677,
"grad_norm": 0.11399830877780914,
"learning_rate": 4.5e-05,
"loss": 0.8909,
"step": 200
},
{
"epoch": 0.28646506034828306,
"grad_norm": 0.10873403400182724,
"learning_rate": 4.4975e-05,
"loss": 0.8518,
"step": 201
},
{
"epoch": 0.2878902596534984,
"grad_norm": 0.10373584181070328,
"learning_rate": 4.495e-05,
"loss": 0.7696,
"step": 202
},
{
"epoch": 0.28931545895871374,
"grad_norm": 0.11340975761413574,
"learning_rate": 4.4925e-05,
"loss": 0.8955,
"step": 203
},
{
"epoch": 0.2907406582639291,
"grad_norm": 0.10780593752861023,
"learning_rate": 4.49e-05,
"loss": 0.803,
"step": 204
},
{
"epoch": 0.2921658575691444,
"grad_norm": 0.10378744453191757,
"learning_rate": 4.4875e-05,
"loss": 0.7567,
"step": 205
},
{
"epoch": 0.29359105687435977,
"grad_norm": 0.10121216624975204,
"learning_rate": 4.4850000000000006e-05,
"loss": 0.7824,
"step": 206
},
{
"epoch": 0.2950162561795751,
"grad_norm": 0.10147752612829208,
"learning_rate": 4.4825e-05,
"loss": 0.7487,
"step": 207
},
{
"epoch": 0.29644145548479045,
"grad_norm": 0.10661248862743378,
"learning_rate": 4.4800000000000005e-05,
"loss": 0.8994,
"step": 208
},
{
"epoch": 0.2978666547900058,
"grad_norm": 0.1013302281498909,
"learning_rate": 4.4775e-05,
"loss": 0.7264,
"step": 209
},
{
"epoch": 0.29929185409522113,
"grad_norm": 0.10879641771316528,
"learning_rate": 4.4750000000000004e-05,
"loss": 0.8762,
"step": 210
},
{
"epoch": 0.3007170534004365,
"grad_norm": 0.0987788587808609,
"learning_rate": 4.4725e-05,
"loss": 0.7726,
"step": 211
},
{
"epoch": 0.3021422527056518,
"grad_norm": 0.10740034282207489,
"learning_rate": 4.47e-05,
"loss": 0.8782,
"step": 212
},
{
"epoch": 0.30356745201086716,
"grad_norm": 0.10802042484283447,
"learning_rate": 4.4675e-05,
"loss": 0.7877,
"step": 213
},
{
"epoch": 0.3049926513160825,
"grad_norm": 0.1044781282544136,
"learning_rate": 4.465e-05,
"loss": 0.8367,
"step": 214
},
{
"epoch": 0.30641785062129784,
"grad_norm": 0.10474110394716263,
"learning_rate": 4.4625e-05,
"loss": 0.773,
"step": 215
},
{
"epoch": 0.3078430499265132,
"grad_norm": 0.10125650465488434,
"learning_rate": 4.46e-05,
"loss": 0.6825,
"step": 216
},
{
"epoch": 0.3092682492317285,
"grad_norm": 0.1103627160191536,
"learning_rate": 4.4575e-05,
"loss": 0.8542,
"step": 217
},
{
"epoch": 0.31069344853694386,
"grad_norm": 0.09732158482074738,
"learning_rate": 4.4550000000000005e-05,
"loss": 0.7632,
"step": 218
},
{
"epoch": 0.3121186478421592,
"grad_norm": 0.11524324119091034,
"learning_rate": 4.4525e-05,
"loss": 0.9403,
"step": 219
},
{
"epoch": 0.3135438471473745,
"grad_norm": 0.09775565564632416,
"learning_rate": 4.4500000000000004e-05,
"loss": 0.7332,
"step": 220
},
{
"epoch": 0.31496904645258983,
"grad_norm": 0.11021077632904053,
"learning_rate": 4.4475e-05,
"loss": 0.8395,
"step": 221
},
{
"epoch": 0.31639424575780517,
"grad_norm": 0.11803734302520752,
"learning_rate": 4.445e-05,
"loss": 0.7647,
"step": 222
},
{
"epoch": 0.3178194450630205,
"grad_norm": 0.12009308487176895,
"learning_rate": 4.4425e-05,
"loss": 0.7925,
"step": 223
},
{
"epoch": 0.31924464436823585,
"grad_norm": 0.10618815571069717,
"learning_rate": 4.44e-05,
"loss": 0.8435,
"step": 224
},
{
"epoch": 0.3206698436734512,
"grad_norm": 0.10659938305616379,
"learning_rate": 4.4375e-05,
"loss": 0.775,
"step": 225
},
{
"epoch": 0.32209504297866653,
"grad_norm": 0.11108309030532837,
"learning_rate": 4.435e-05,
"loss": 0.8754,
"step": 226
},
{
"epoch": 0.3235202422838819,
"grad_norm": 0.10515527427196503,
"learning_rate": 4.4325e-05,
"loss": 0.8339,
"step": 227
},
{
"epoch": 0.3249454415890972,
"grad_norm": 0.10745377838611603,
"learning_rate": 4.43e-05,
"loss": 0.8548,
"step": 228
},
{
"epoch": 0.32637064089431256,
"grad_norm": 0.10583943873643875,
"learning_rate": 4.4275e-05,
"loss": 0.699,
"step": 229
},
{
"epoch": 0.3277958401995279,
"grad_norm": 0.10007690638303757,
"learning_rate": 4.4250000000000005e-05,
"loss": 0.6141,
"step": 230
},
{
"epoch": 0.32922103950474324,
"grad_norm": 0.10546508431434631,
"learning_rate": 4.4225e-05,
"loss": 0.7301,
"step": 231
},
{
"epoch": 0.3306462388099586,
"grad_norm": 0.11111865937709808,
"learning_rate": 4.4200000000000004e-05,
"loss": 0.7874,
"step": 232
},
{
"epoch": 0.3320714381151739,
"grad_norm": 0.10841409116983414,
"learning_rate": 4.4174999999999996e-05,
"loss": 0.8304,
"step": 233
},
{
"epoch": 0.33349663742038926,
"grad_norm": 0.09959273040294647,
"learning_rate": 4.415e-05,
"loss": 0.7127,
"step": 234
},
{
"epoch": 0.3349218367256046,
"grad_norm": 0.10455303639173508,
"learning_rate": 4.4125e-05,
"loss": 0.7744,
"step": 235
},
{
"epoch": 0.33634703603081995,
"grad_norm": 0.1017572209239006,
"learning_rate": 4.41e-05,
"loss": 0.7018,
"step": 236
},
{
"epoch": 0.3377722353360353,
"grad_norm": 0.10500877350568771,
"learning_rate": 4.4075e-05,
"loss": 0.8501,
"step": 237
},
{
"epoch": 0.33919743464125063,
"grad_norm": 0.11418474465608597,
"learning_rate": 4.405e-05,
"loss": 0.8175,
"step": 238
},
{
"epoch": 0.34062263394646597,
"grad_norm": 0.11544894427061081,
"learning_rate": 4.4025e-05,
"loss": 0.8615,
"step": 239
},
{
"epoch": 0.3420478332516813,
"grad_norm": 0.10797794908285141,
"learning_rate": 4.4000000000000006e-05,
"loss": 0.8563,
"step": 240
},
{
"epoch": 0.34347303255689665,
"grad_norm": 0.10476794093847275,
"learning_rate": 4.3975e-05,
"loss": 0.8188,
"step": 241
},
{
"epoch": 0.344898231862112,
"grad_norm": 0.11608009785413742,
"learning_rate": 4.3950000000000004e-05,
"loss": 0.9397,
"step": 242
},
{
"epoch": 0.34632343116732733,
"grad_norm": 0.10791412740945816,
"learning_rate": 4.3925e-05,
"loss": 0.8384,
"step": 243
},
{
"epoch": 0.3477486304725426,
"grad_norm": 0.09884046018123627,
"learning_rate": 4.39e-05,
"loss": 0.6745,
"step": 244
},
{
"epoch": 0.34917382977775796,
"grad_norm": 0.10947505384683609,
"learning_rate": 4.3875e-05,
"loss": 0.8591,
"step": 245
},
{
"epoch": 0.3505990290829733,
"grad_norm": 0.10952378064393997,
"learning_rate": 4.385e-05,
"loss": 0.8513,
"step": 246
},
{
"epoch": 0.35202422838818864,
"grad_norm": 0.10364098101854324,
"learning_rate": 4.3825e-05,
"loss": 0.7091,
"step": 247
},
{
"epoch": 0.353449427693404,
"grad_norm": 0.10010597109794617,
"learning_rate": 4.38e-05,
"loss": 0.7164,
"step": 248
},
{
"epoch": 0.3548746269986193,
"grad_norm": 0.10271008312702179,
"learning_rate": 4.3775e-05,
"loss": 0.7056,
"step": 249
},
{
"epoch": 0.35629982630383467,
"grad_norm": 0.10976854711771011,
"learning_rate": 4.375e-05,
"loss": 0.8428,
"step": 250
},
{
"epoch": 0.35772502560905,
"grad_norm": 0.1052214577794075,
"learning_rate": 4.3725000000000006e-05,
"loss": 0.8304,
"step": 251
},
{
"epoch": 0.35915022491426535,
"grad_norm": 0.10342039912939072,
"learning_rate": 4.3700000000000005e-05,
"loss": 0.7555,
"step": 252
},
{
"epoch": 0.3605754242194807,
"grad_norm": 0.09833579510450363,
"learning_rate": 4.3675000000000005e-05,
"loss": 0.7567,
"step": 253
},
{
"epoch": 0.36200062352469603,
"grad_norm": 0.10225775837898254,
"learning_rate": 4.3650000000000004e-05,
"loss": 0.7658,
"step": 254
},
{
"epoch": 0.36342582282991137,
"grad_norm": 0.10725218057632446,
"learning_rate": 4.3625e-05,
"loss": 0.7952,
"step": 255
},
{
"epoch": 0.3648510221351267,
"grad_norm": 0.10721350461244583,
"learning_rate": 4.36e-05,
"loss": 0.8012,
"step": 256
},
{
"epoch": 0.36627622144034205,
"grad_norm": 0.10893641412258148,
"learning_rate": 4.3575e-05,
"loss": 0.8517,
"step": 257
},
{
"epoch": 0.3677014207455574,
"grad_norm": 0.10116464644670486,
"learning_rate": 4.355e-05,
"loss": 0.7651,
"step": 258
},
{
"epoch": 0.36912662005077274,
"grad_norm": 0.10063575208187103,
"learning_rate": 4.352500000000001e-05,
"loss": 0.6854,
"step": 259
},
{
"epoch": 0.3705518193559881,
"grad_norm": 0.09998445957899094,
"learning_rate": 4.35e-05,
"loss": 0.6819,
"step": 260
},
{
"epoch": 0.3719770186612034,
"grad_norm": 0.10402004420757294,
"learning_rate": 4.3475000000000006e-05,
"loss": 0.7709,
"step": 261
},
{
"epoch": 0.37340221796641876,
"grad_norm": 0.10427525639533997,
"learning_rate": 4.345e-05,
"loss": 0.7711,
"step": 262
},
{
"epoch": 0.3748274172716341,
"grad_norm": 0.12417809665203094,
"learning_rate": 4.3425000000000005e-05,
"loss": 0.8744,
"step": 263
},
{
"epoch": 0.37625261657684944,
"grad_norm": 0.10869535058736801,
"learning_rate": 4.3400000000000005e-05,
"loss": 0.8193,
"step": 264
},
{
"epoch": 0.3776778158820648,
"grad_norm": 0.10038340836763382,
"learning_rate": 4.3375000000000004e-05,
"loss": 0.697,
"step": 265
},
{
"epoch": 0.3791030151872801,
"grad_norm": 0.1014515683054924,
"learning_rate": 4.335e-05,
"loss": 0.8344,
"step": 266
},
{
"epoch": 0.3805282144924954,
"grad_norm": 0.10957922786474228,
"learning_rate": 4.3325e-05,
"loss": 0.7692,
"step": 267
},
{
"epoch": 0.38195341379771075,
"grad_norm": 0.11139275878667831,
"learning_rate": 4.33e-05,
"loss": 0.8952,
"step": 268
},
{
"epoch": 0.3833786131029261,
"grad_norm": 0.11045045405626297,
"learning_rate": 4.3275e-05,
"loss": 0.8474,
"step": 269
},
{
"epoch": 0.38480381240814143,
"grad_norm": 0.11069018393754959,
"learning_rate": 4.325e-05,
"loss": 0.8269,
"step": 270
},
{
"epoch": 0.3862290117133568,
"grad_norm": 0.10967734456062317,
"learning_rate": 4.322500000000001e-05,
"loss": 0.7249,
"step": 271
},
{
"epoch": 0.3876542110185721,
"grad_norm": 0.10094799101352692,
"learning_rate": 4.32e-05,
"loss": 0.703,
"step": 272
},
{
"epoch": 0.38907941032378746,
"grad_norm": 0.10996127128601074,
"learning_rate": 4.3175000000000006e-05,
"loss": 0.6771,
"step": 273
},
{
"epoch": 0.3905046096290028,
"grad_norm": 0.11255199462175369,
"learning_rate": 4.315e-05,
"loss": 0.8818,
"step": 274
},
{
"epoch": 0.39192980893421814,
"grad_norm": 0.10402040928602219,
"learning_rate": 4.3125000000000005e-05,
"loss": 0.8188,
"step": 275
},
{
"epoch": 0.3933550082394335,
"grad_norm": 0.10776573419570923,
"learning_rate": 4.3100000000000004e-05,
"loss": 0.8489,
"step": 276
},
{
"epoch": 0.3947802075446488,
"grad_norm": 0.10712596029043198,
"learning_rate": 4.3075000000000003e-05,
"loss": 0.7743,
"step": 277
},
{
"epoch": 0.39620540684986416,
"grad_norm": 0.11135271191596985,
"learning_rate": 4.305e-05,
"loss": 0.8426,
"step": 278
},
{
"epoch": 0.3976306061550795,
"grad_norm": 0.10813678056001663,
"learning_rate": 4.3025e-05,
"loss": 0.7565,
"step": 279
},
{
"epoch": 0.39905580546029484,
"grad_norm": 0.10677481442689896,
"learning_rate": 4.3e-05,
"loss": 0.8087,
"step": 280
},
{
"epoch": 0.4004810047655102,
"grad_norm": 0.10443906486034393,
"learning_rate": 4.2975e-05,
"loss": 0.7402,
"step": 281
},
{
"epoch": 0.4019062040707255,
"grad_norm": 0.1226535364985466,
"learning_rate": 4.295e-05,
"loss": 0.8995,
"step": 282
},
{
"epoch": 0.40333140337594087,
"grad_norm": 0.11007159948348999,
"learning_rate": 4.2925000000000007e-05,
"loss": 0.8466,
"step": 283
},
{
"epoch": 0.4047566026811562,
"grad_norm": 0.09968432039022446,
"learning_rate": 4.29e-05,
"loss": 0.7246,
"step": 284
},
{
"epoch": 0.40618180198637155,
"grad_norm": 0.11246407777070999,
"learning_rate": 4.2875000000000005e-05,
"loss": 0.8664,
"step": 285
},
{
"epoch": 0.4076070012915869,
"grad_norm": 0.12011117488145828,
"learning_rate": 4.285e-05,
"loss": 0.8936,
"step": 286
},
{
"epoch": 0.40903220059680223,
"grad_norm": 0.10944236814975739,
"learning_rate": 4.2825000000000004e-05,
"loss": 0.7469,
"step": 287
},
{
"epoch": 0.4104573999020176,
"grad_norm": 0.09821781516075134,
"learning_rate": 4.2800000000000004e-05,
"loss": 0.6542,
"step": 288
},
{
"epoch": 0.4118825992072329,
"grad_norm": 0.11124128103256226,
"learning_rate": 4.2775e-05,
"loss": 0.7569,
"step": 289
},
{
"epoch": 0.4133077985124482,
"grad_norm": 0.10597945749759674,
"learning_rate": 4.275e-05,
"loss": 0.7624,
"step": 290
},
{
"epoch": 0.41473299781766354,
"grad_norm": 0.1080138310790062,
"learning_rate": 4.2725e-05,
"loss": 0.7675,
"step": 291
},
{
"epoch": 0.4161581971228789,
"grad_norm": 0.11542925983667374,
"learning_rate": 4.27e-05,
"loss": 0.9096,
"step": 292
},
{
"epoch": 0.4175833964280942,
"grad_norm": 0.10396554321050644,
"learning_rate": 4.2675e-05,
"loss": 0.7076,
"step": 293
},
{
"epoch": 0.41900859573330956,
"grad_norm": 0.11440159380435944,
"learning_rate": 4.265e-05,
"loss": 0.893,
"step": 294
},
{
"epoch": 0.4204337950385249,
"grad_norm": 0.10120712220668793,
"learning_rate": 4.2625000000000006e-05,
"loss": 0.7379,
"step": 295
},
{
"epoch": 0.42185899434374025,
"grad_norm": 0.11033878475427628,
"learning_rate": 4.26e-05,
"loss": 0.7135,
"step": 296
},
{
"epoch": 0.4232841936489556,
"grad_norm": 0.10791100561618805,
"learning_rate": 4.2575000000000005e-05,
"loss": 0.7594,
"step": 297
},
{
"epoch": 0.42470939295417093,
"grad_norm": 0.10651058703660965,
"learning_rate": 4.2550000000000004e-05,
"loss": 0.8037,
"step": 298
},
{
"epoch": 0.42613459225938627,
"grad_norm": 0.10513100773096085,
"learning_rate": 4.2525000000000004e-05,
"loss": 0.7794,
"step": 299
},
{
"epoch": 0.4275597915646016,
"grad_norm": 0.11503254622220993,
"learning_rate": 4.25e-05,
"loss": 0.8263,
"step": 300
},
{
"epoch": 0.42898499086981695,
"grad_norm": 0.10572726279497147,
"learning_rate": 4.2475e-05,
"loss": 0.7898,
"step": 301
},
{
"epoch": 0.4304101901750323,
"grad_norm": 0.10707788169384003,
"learning_rate": 4.245e-05,
"loss": 0.8071,
"step": 302
},
{
"epoch": 0.43183538948024763,
"grad_norm": 0.11152343451976776,
"learning_rate": 4.2425e-05,
"loss": 0.8429,
"step": 303
},
{
"epoch": 0.433260588785463,
"grad_norm": 0.11002811789512634,
"learning_rate": 4.24e-05,
"loss": 0.7854,
"step": 304
},
{
"epoch": 0.4346857880906783,
"grad_norm": 0.10544416308403015,
"learning_rate": 4.237500000000001e-05,
"loss": 0.8108,
"step": 305
},
{
"epoch": 0.43611098739589366,
"grad_norm": 0.1152181476354599,
"learning_rate": 4.235e-05,
"loss": 0.8762,
"step": 306
},
{
"epoch": 0.437536186701109,
"grad_norm": 0.11044751107692719,
"learning_rate": 4.2325000000000006e-05,
"loss": 0.796,
"step": 307
},
{
"epoch": 0.43896138600632434,
"grad_norm": 0.11706916242837906,
"learning_rate": 4.23e-05,
"loss": 0.9074,
"step": 308
},
{
"epoch": 0.4403865853115397,
"grad_norm": 0.10910330712795258,
"learning_rate": 4.2275000000000004e-05,
"loss": 0.7894,
"step": 309
},
{
"epoch": 0.441811784616755,
"grad_norm": 0.11087482422590256,
"learning_rate": 4.2250000000000004e-05,
"loss": 0.7597,
"step": 310
},
{
"epoch": 0.44323698392197036,
"grad_norm": 0.10505465418100357,
"learning_rate": 4.2225e-05,
"loss": 0.7961,
"step": 311
},
{
"epoch": 0.4446621832271857,
"grad_norm": 0.1059369295835495,
"learning_rate": 4.22e-05,
"loss": 0.7392,
"step": 312
},
{
"epoch": 0.446087382532401,
"grad_norm": 0.10603440552949905,
"learning_rate": 4.2175e-05,
"loss": 0.7637,
"step": 313
},
{
"epoch": 0.44751258183761633,
"grad_norm": 0.10093475133180618,
"learning_rate": 4.215e-05,
"loss": 0.7505,
"step": 314
},
{
"epoch": 0.44893778114283167,
"grad_norm": 0.10665563493967056,
"learning_rate": 4.2125e-05,
"loss": 0.8048,
"step": 315
},
{
"epoch": 0.450362980448047,
"grad_norm": 0.10656612366437912,
"learning_rate": 4.21e-05,
"loss": 0.7788,
"step": 316
},
{
"epoch": 0.45178817975326235,
"grad_norm": 0.10967171937227249,
"learning_rate": 4.2075000000000006e-05,
"loss": 0.7662,
"step": 317
},
{
"epoch": 0.4532133790584777,
"grad_norm": 0.102504201233387,
"learning_rate": 4.205e-05,
"loss": 0.7277,
"step": 318
},
{
"epoch": 0.45463857836369304,
"grad_norm": 0.11493342369794846,
"learning_rate": 4.2025000000000005e-05,
"loss": 0.8414,
"step": 319
},
{
"epoch": 0.4560637776689084,
"grad_norm": 0.10588521510362625,
"learning_rate": 4.2e-05,
"loss": 0.8022,
"step": 320
},
{
"epoch": 0.4574889769741237,
"grad_norm": 0.10491249710321426,
"learning_rate": 4.1975000000000004e-05,
"loss": 0.7132,
"step": 321
},
{
"epoch": 0.45891417627933906,
"grad_norm": 0.11623478680849075,
"learning_rate": 4.195e-05,
"loss": 0.8,
"step": 322
},
{
"epoch": 0.4603393755845544,
"grad_norm": 0.11456143110990524,
"learning_rate": 4.1925e-05,
"loss": 0.8189,
"step": 323
},
{
"epoch": 0.46176457488976974,
"grad_norm": 0.1125398501753807,
"learning_rate": 4.19e-05,
"loss": 0.7886,
"step": 324
},
{
"epoch": 0.4631897741949851,
"grad_norm": 0.11077994853258133,
"learning_rate": 4.1875e-05,
"loss": 0.6866,
"step": 325
},
{
"epoch": 0.4646149735002004,
"grad_norm": 0.1082717552781105,
"learning_rate": 4.185e-05,
"loss": 0.7412,
"step": 326
},
{
"epoch": 0.46604017280541576,
"grad_norm": 0.10469144582748413,
"learning_rate": 4.1825e-05,
"loss": 0.7507,
"step": 327
},
{
"epoch": 0.4674653721106311,
"grad_norm": 0.1020510271191597,
"learning_rate": 4.18e-05,
"loss": 0.7296,
"step": 328
},
{
"epoch": 0.46889057141584645,
"grad_norm": 0.10324457287788391,
"learning_rate": 4.1775000000000006e-05,
"loss": 0.7269,
"step": 329
},
{
"epoch": 0.4703157707210618,
"grad_norm": 0.11042344570159912,
"learning_rate": 4.175e-05,
"loss": 0.8476,
"step": 330
},
{
"epoch": 0.47174097002627713,
"grad_norm": 0.10724121332168579,
"learning_rate": 4.1725000000000005e-05,
"loss": 0.7598,
"step": 331
},
{
"epoch": 0.47316616933149247,
"grad_norm": 0.10429059714078903,
"learning_rate": 4.17e-05,
"loss": 0.7261,
"step": 332
},
{
"epoch": 0.4745913686367078,
"grad_norm": 0.10833486169576645,
"learning_rate": 4.1675e-05,
"loss": 0.751,
"step": 333
},
{
"epoch": 0.47601656794192315,
"grad_norm": 0.10824701189994812,
"learning_rate": 4.165e-05,
"loss": 0.7625,
"step": 334
},
{
"epoch": 0.4774417672471385,
"grad_norm": 0.11305626481771469,
"learning_rate": 4.1625e-05,
"loss": 0.8327,
"step": 335
},
{
"epoch": 0.4788669665523538,
"grad_norm": 0.1034177765250206,
"learning_rate": 4.16e-05,
"loss": 0.7484,
"step": 336
},
{
"epoch": 0.4802921658575691,
"grad_norm": 0.10624039173126221,
"learning_rate": 4.1575e-05,
"loss": 0.7728,
"step": 337
},
{
"epoch": 0.48171736516278446,
"grad_norm": 0.09717201441526413,
"learning_rate": 4.155e-05,
"loss": 0.6679,
"step": 338
},
{
"epoch": 0.4831425644679998,
"grad_norm": 0.1153498962521553,
"learning_rate": 4.1525e-05,
"loss": 0.7724,
"step": 339
},
{
"epoch": 0.48456776377321514,
"grad_norm": 0.11195168644189835,
"learning_rate": 4.15e-05,
"loss": 0.7977,
"step": 340
},
{
"epoch": 0.4859929630784305,
"grad_norm": 0.11596551537513733,
"learning_rate": 4.1475000000000005e-05,
"loss": 0.8476,
"step": 341
},
{
"epoch": 0.4874181623836458,
"grad_norm": 0.10531023889780045,
"learning_rate": 4.145e-05,
"loss": 0.7127,
"step": 342
},
{
"epoch": 0.48884336168886117,
"grad_norm": 0.10975097864866257,
"learning_rate": 4.1425000000000004e-05,
"loss": 0.7569,
"step": 343
},
{
"epoch": 0.4902685609940765,
"grad_norm": 0.10925944149494171,
"learning_rate": 4.14e-05,
"loss": 0.8002,
"step": 344
},
{
"epoch": 0.49169376029929185,
"grad_norm": 0.10551533102989197,
"learning_rate": 4.1375e-05,
"loss": 0.8065,
"step": 345
},
{
"epoch": 0.4931189596045072,
"grad_norm": 0.09912430495023727,
"learning_rate": 4.135e-05,
"loss": 0.6942,
"step": 346
},
{
"epoch": 0.49454415890972253,
"grad_norm": 0.10893817991018295,
"learning_rate": 4.1325e-05,
"loss": 0.6574,
"step": 347
},
{
"epoch": 0.49596935821493787,
"grad_norm": 0.1124715805053711,
"learning_rate": 4.13e-05,
"loss": 0.8577,
"step": 348
},
{
"epoch": 0.4973945575201532,
"grad_norm": 0.11103921383619308,
"learning_rate": 4.1275e-05,
"loss": 0.8254,
"step": 349
},
{
"epoch": 0.49881975682536855,
"grad_norm": 0.11786279082298279,
"learning_rate": 4.125e-05,
"loss": 0.8266,
"step": 350
},
{
"epoch": 0.5002449561305838,
"grad_norm": 0.10194683820009232,
"learning_rate": 4.1225e-05,
"loss": 0.6318,
"step": 351
},
{
"epoch": 0.5016701554357992,
"grad_norm": 0.10637018084526062,
"learning_rate": 4.12e-05,
"loss": 0.8682,
"step": 352
},
{
"epoch": 0.5030953547410145,
"grad_norm": 0.10803207010030746,
"learning_rate": 4.1175000000000005e-05,
"loss": 0.8038,
"step": 353
},
{
"epoch": 0.5045205540462299,
"grad_norm": 0.11039870232343674,
"learning_rate": 4.115e-05,
"loss": 0.8438,
"step": 354
},
{
"epoch": 0.5059457533514452,
"grad_norm": 0.10990013182163239,
"learning_rate": 4.1125000000000004e-05,
"loss": 0.8432,
"step": 355
},
{
"epoch": 0.5073709526566605,
"grad_norm": 0.11267924308776855,
"learning_rate": 4.11e-05,
"loss": 0.852,
"step": 356
},
{
"epoch": 0.5087961519618759,
"grad_norm": 0.1128760427236557,
"learning_rate": 4.1075e-05,
"loss": 0.8138,
"step": 357
},
{
"epoch": 0.5102213512670912,
"grad_norm": 0.10334976017475128,
"learning_rate": 4.105e-05,
"loss": 0.718,
"step": 358
},
{
"epoch": 0.5116465505723066,
"grad_norm": 0.1046612337231636,
"learning_rate": 4.1025e-05,
"loss": 0.6978,
"step": 359
},
{
"epoch": 0.5130717498775219,
"grad_norm": 0.10563401132822037,
"learning_rate": 4.1e-05,
"loss": 0.7532,
"step": 360
},
{
"epoch": 0.5144969491827373,
"grad_norm": 0.1084398552775383,
"learning_rate": 4.0975e-05,
"loss": 0.7941,
"step": 361
},
{
"epoch": 0.5159221484879526,
"grad_norm": 0.10790320485830307,
"learning_rate": 4.095e-05,
"loss": 0.7606,
"step": 362
},
{
"epoch": 0.5173473477931679,
"grad_norm": 0.11809930950403214,
"learning_rate": 4.0925000000000005e-05,
"loss": 0.8535,
"step": 363
},
{
"epoch": 0.5187725470983833,
"grad_norm": 0.11993366479873657,
"learning_rate": 4.09e-05,
"loss": 0.8257,
"step": 364
},
{
"epoch": 0.5201977464035986,
"grad_norm": 0.10867496579885483,
"learning_rate": 4.0875000000000004e-05,
"loss": 0.8039,
"step": 365
},
{
"epoch": 0.521622945708814,
"grad_norm": 0.11474283784627914,
"learning_rate": 4.085e-05,
"loss": 0.7946,
"step": 366
},
{
"epoch": 0.5230481450140293,
"grad_norm": 0.10183428227901459,
"learning_rate": 4.0825e-05,
"loss": 0.6936,
"step": 367
},
{
"epoch": 0.5244733443192446,
"grad_norm": 0.10531643778085709,
"learning_rate": 4.08e-05,
"loss": 0.7414,
"step": 368
},
{
"epoch": 0.52589854362446,
"grad_norm": 0.1065676361322403,
"learning_rate": 4.0775e-05,
"loss": 0.775,
"step": 369
},
{
"epoch": 0.5273237429296753,
"grad_norm": 0.11469631642103195,
"learning_rate": 4.075e-05,
"loss": 0.8126,
"step": 370
},
{
"epoch": 0.5287489422348907,
"grad_norm": 0.11348406225442886,
"learning_rate": 4.0725e-05,
"loss": 0.8252,
"step": 371
},
{
"epoch": 0.530174141540106,
"grad_norm": 0.1113358736038208,
"learning_rate": 4.07e-05,
"loss": 0.8081,
"step": 372
},
{
"epoch": 0.5315993408453213,
"grad_norm": 0.11194054037332535,
"learning_rate": 4.0675e-05,
"loss": 0.7808,
"step": 373
},
{
"epoch": 0.5330245401505367,
"grad_norm": 0.1085849329829216,
"learning_rate": 4.065e-05,
"loss": 0.7901,
"step": 374
},
{
"epoch": 0.534449739455752,
"grad_norm": 0.11418144404888153,
"learning_rate": 4.0625000000000005e-05,
"loss": 0.8094,
"step": 375
},
{
"epoch": 0.5358749387609674,
"grad_norm": 0.11190535873174667,
"learning_rate": 4.0600000000000004e-05,
"loss": 0.7677,
"step": 376
},
{
"epoch": 0.5373001380661827,
"grad_norm": 0.11096848547458649,
"learning_rate": 4.0575000000000004e-05,
"loss": 0.7926,
"step": 377
},
{
"epoch": 0.538725337371398,
"grad_norm": 0.10761623829603195,
"learning_rate": 4.055e-05,
"loss": 0.7468,
"step": 378
},
{
"epoch": 0.5401505366766134,
"grad_norm": 0.10699047893285751,
"learning_rate": 4.0525e-05,
"loss": 0.7345,
"step": 379
},
{
"epoch": 0.5415757359818287,
"grad_norm": 0.10887409001588821,
"learning_rate": 4.05e-05,
"loss": 0.6518,
"step": 380
},
{
"epoch": 0.5430009352870441,
"grad_norm": 0.11576902866363525,
"learning_rate": 4.0475e-05,
"loss": 0.8155,
"step": 381
},
{
"epoch": 0.5444261345922594,
"grad_norm": 0.10397214442491531,
"learning_rate": 4.045000000000001e-05,
"loss": 0.7012,
"step": 382
},
{
"epoch": 0.5458513338974748,
"grad_norm": 0.10588834434747696,
"learning_rate": 4.0425e-05,
"loss": 0.7716,
"step": 383
},
{
"epoch": 0.5472765332026901,
"grad_norm": 0.10732339322566986,
"learning_rate": 4.0400000000000006e-05,
"loss": 0.7867,
"step": 384
},
{
"epoch": 0.5487017325079054,
"grad_norm": 0.11092869937419891,
"learning_rate": 4.0375e-05,
"loss": 0.7933,
"step": 385
},
{
"epoch": 0.5501269318131208,
"grad_norm": 0.10742226988077164,
"learning_rate": 4.0350000000000005e-05,
"loss": 0.7818,
"step": 386
},
{
"epoch": 0.5515521311183361,
"grad_norm": 0.11102816462516785,
"learning_rate": 4.0325000000000004e-05,
"loss": 0.7929,
"step": 387
},
{
"epoch": 0.5529773304235515,
"grad_norm": 0.102665014564991,
"learning_rate": 4.0300000000000004e-05,
"loss": 0.7661,
"step": 388
},
{
"epoch": 0.5544025297287668,
"grad_norm": 0.10255452990531921,
"learning_rate": 4.0275e-05,
"loss": 0.7154,
"step": 389
},
{
"epoch": 0.5558277290339821,
"grad_norm": 0.10553614050149918,
"learning_rate": 4.025e-05,
"loss": 0.7112,
"step": 390
},
{
"epoch": 0.5572529283391975,
"grad_norm": 0.10714104771614075,
"learning_rate": 4.0225e-05,
"loss": 0.7942,
"step": 391
},
{
"epoch": 0.5586781276444128,
"grad_norm": 0.11751249432563782,
"learning_rate": 4.02e-05,
"loss": 0.8394,
"step": 392
},
{
"epoch": 0.5601033269496282,
"grad_norm": 0.1187812089920044,
"learning_rate": 4.0175e-05,
"loss": 0.8714,
"step": 393
},
{
"epoch": 0.5615285262548434,
"grad_norm": 0.1089523434638977,
"learning_rate": 4.015000000000001e-05,
"loss": 0.7385,
"step": 394
},
{
"epoch": 0.5629537255600587,
"grad_norm": 0.10678154975175858,
"learning_rate": 4.0125e-05,
"loss": 0.8022,
"step": 395
},
{
"epoch": 0.5643789248652741,
"grad_norm": 0.10617212951183319,
"learning_rate": 4.0100000000000006e-05,
"loss": 0.747,
"step": 396
},
{
"epoch": 0.5658041241704894,
"grad_norm": 0.10585972666740417,
"learning_rate": 4.0075e-05,
"loss": 0.7558,
"step": 397
},
{
"epoch": 0.5672293234757048,
"grad_norm": 0.10323747992515564,
"learning_rate": 4.0050000000000004e-05,
"loss": 0.7731,
"step": 398
},
{
"epoch": 0.5686545227809201,
"grad_norm": 0.11604605615139008,
"learning_rate": 4.0025000000000004e-05,
"loss": 0.7148,
"step": 399
},
{
"epoch": 0.5700797220861354,
"grad_norm": 0.10585249215364456,
"learning_rate": 4e-05,
"loss": 0.7092,
"step": 400
},
{
"epoch": 0.5715049213913508,
"grad_norm": 0.1080741137266159,
"learning_rate": 3.9975e-05,
"loss": 0.7381,
"step": 401
},
{
"epoch": 0.5729301206965661,
"grad_norm": 0.11979860812425613,
"learning_rate": 3.995e-05,
"loss": 0.8133,
"step": 402
},
{
"epoch": 0.5743553200017815,
"grad_norm": 0.1198972687125206,
"learning_rate": 3.9925e-05,
"loss": 0.8487,
"step": 403
},
{
"epoch": 0.5757805193069968,
"grad_norm": 0.10009247064590454,
"learning_rate": 3.99e-05,
"loss": 0.6694,
"step": 404
},
{
"epoch": 0.5772057186122121,
"grad_norm": 0.11554791033267975,
"learning_rate": 3.9875e-05,
"loss": 0.7056,
"step": 405
},
{
"epoch": 0.5786309179174275,
"grad_norm": 0.11562521010637283,
"learning_rate": 3.9850000000000006e-05,
"loss": 0.8568,
"step": 406
},
{
"epoch": 0.5800561172226428,
"grad_norm": 0.11275051534175873,
"learning_rate": 3.9825e-05,
"loss": 0.7759,
"step": 407
},
{
"epoch": 0.5814813165278582,
"grad_norm": 0.10360349714756012,
"learning_rate": 3.9800000000000005e-05,
"loss": 0.7161,
"step": 408
},
{
"epoch": 0.5829065158330735,
"grad_norm": 0.10370558500289917,
"learning_rate": 3.9775e-05,
"loss": 0.7134,
"step": 409
},
{
"epoch": 0.5843317151382889,
"grad_norm": 0.1140303909778595,
"learning_rate": 3.9750000000000004e-05,
"loss": 0.8294,
"step": 410
},
{
"epoch": 0.5857569144435042,
"grad_norm": 0.10513099282979965,
"learning_rate": 3.9725e-05,
"loss": 0.7483,
"step": 411
},
{
"epoch": 0.5871821137487195,
"grad_norm": 0.10989110171794891,
"learning_rate": 3.97e-05,
"loss": 0.7373,
"step": 412
},
{
"epoch": 0.5886073130539349,
"grad_norm": 0.10806335508823395,
"learning_rate": 3.9675e-05,
"loss": 0.7139,
"step": 413
},
{
"epoch": 0.5900325123591502,
"grad_norm": 0.112205371260643,
"learning_rate": 3.965e-05,
"loss": 0.7727,
"step": 414
},
{
"epoch": 0.5914577116643656,
"grad_norm": 0.10414332896471024,
"learning_rate": 3.9625e-05,
"loss": 0.7363,
"step": 415
},
{
"epoch": 0.5928829109695809,
"grad_norm": 0.11041130125522614,
"learning_rate": 3.960000000000001e-05,
"loss": 0.7176,
"step": 416
},
{
"epoch": 0.5943081102747962,
"grad_norm": 0.1183328703045845,
"learning_rate": 3.9575e-05,
"loss": 0.813,
"step": 417
},
{
"epoch": 0.5957333095800116,
"grad_norm": 0.10583087801933289,
"learning_rate": 3.9550000000000006e-05,
"loss": 0.7304,
"step": 418
},
{
"epoch": 0.5971585088852269,
"grad_norm": 0.11155475676059723,
"learning_rate": 3.9525e-05,
"loss": 0.8246,
"step": 419
},
{
"epoch": 0.5985837081904423,
"grad_norm": 0.10921408981084824,
"learning_rate": 3.9500000000000005e-05,
"loss": 0.7808,
"step": 420
},
{
"epoch": 0.6000089074956576,
"grad_norm": 0.10471367090940475,
"learning_rate": 3.9475000000000004e-05,
"loss": 0.6909,
"step": 421
},
{
"epoch": 0.601434106800873,
"grad_norm": 0.11162669956684113,
"learning_rate": 3.9450000000000003e-05,
"loss": 0.786,
"step": 422
},
{
"epoch": 0.6028593061060883,
"grad_norm": 0.10998982936143875,
"learning_rate": 3.9425e-05,
"loss": 0.7317,
"step": 423
},
{
"epoch": 0.6042845054113036,
"grad_norm": 0.10951656848192215,
"learning_rate": 3.94e-05,
"loss": 0.759,
"step": 424
},
{
"epoch": 0.605709704716519,
"grad_norm": 0.11754517257213593,
"learning_rate": 3.9375e-05,
"loss": 0.8296,
"step": 425
},
{
"epoch": 0.6071349040217343,
"grad_norm": 0.1161508858203888,
"learning_rate": 3.935e-05,
"loss": 0.8672,
"step": 426
},
{
"epoch": 0.6085601033269497,
"grad_norm": 0.10837070643901825,
"learning_rate": 3.9325e-05,
"loss": 0.7249,
"step": 427
},
{
"epoch": 0.609985302632165,
"grad_norm": 0.10840938985347748,
"learning_rate": 3.9300000000000007e-05,
"loss": 0.7189,
"step": 428
},
{
"epoch": 0.6114105019373803,
"grad_norm": 0.10374140739440918,
"learning_rate": 3.9275e-05,
"loss": 0.6698,
"step": 429
},
{
"epoch": 0.6128357012425957,
"grad_norm": 0.10821092873811722,
"learning_rate": 3.9250000000000005e-05,
"loss": 0.5453,
"step": 430
},
{
"epoch": 0.614260900547811,
"grad_norm": 0.10909675806760788,
"learning_rate": 3.9225e-05,
"loss": 0.7421,
"step": 431
},
{
"epoch": 0.6156860998530264,
"grad_norm": 0.11154672503471375,
"learning_rate": 3.9200000000000004e-05,
"loss": 0.7775,
"step": 432
},
{
"epoch": 0.6171112991582417,
"grad_norm": 0.10661336779594421,
"learning_rate": 3.9175000000000004e-05,
"loss": 0.7046,
"step": 433
},
{
"epoch": 0.618536498463457,
"grad_norm": 0.10708242654800415,
"learning_rate": 3.915e-05,
"loss": 0.7658,
"step": 434
},
{
"epoch": 0.6199616977686724,
"grad_norm": 0.12210702896118164,
"learning_rate": 3.9125e-05,
"loss": 0.8109,
"step": 435
},
{
"epoch": 0.6213868970738877,
"grad_norm": 0.10495051741600037,
"learning_rate": 3.91e-05,
"loss": 0.6841,
"step": 436
},
{
"epoch": 0.6228120963791031,
"grad_norm": 0.1136164590716362,
"learning_rate": 3.9075e-05,
"loss": 0.8065,
"step": 437
},
{
"epoch": 0.6242372956843184,
"grad_norm": 0.1157810389995575,
"learning_rate": 3.905e-05,
"loss": 0.7854,
"step": 438
},
{
"epoch": 0.6256624949895337,
"grad_norm": 0.11174576729536057,
"learning_rate": 3.9025e-05,
"loss": 0.7498,
"step": 439
},
{
"epoch": 0.627087694294749,
"grad_norm": 0.11039690673351288,
"learning_rate": 3.9000000000000006e-05,
"loss": 0.7073,
"step": 440
},
{
"epoch": 0.6285128935999643,
"grad_norm": 0.12029264122247696,
"learning_rate": 3.8975e-05,
"loss": 0.8692,
"step": 441
},
{
"epoch": 0.6299380929051797,
"grad_norm": 0.10919161885976791,
"learning_rate": 3.8950000000000005e-05,
"loss": 0.729,
"step": 442
},
{
"epoch": 0.631363292210395,
"grad_norm": 0.10995293408632278,
"learning_rate": 3.8925e-05,
"loss": 0.7307,
"step": 443
},
{
"epoch": 0.6327884915156103,
"grad_norm": 0.11090438812971115,
"learning_rate": 3.8900000000000004e-05,
"loss": 0.7266,
"step": 444
},
{
"epoch": 0.6342136908208257,
"grad_norm": 0.10368547588586807,
"learning_rate": 3.8875e-05,
"loss": 0.707,
"step": 445
},
{
"epoch": 0.635638890126041,
"grad_norm": 0.11268392950296402,
"learning_rate": 3.885e-05,
"loss": 0.7286,
"step": 446
},
{
"epoch": 0.6370640894312564,
"grad_norm": 0.11009091138839722,
"learning_rate": 3.8825e-05,
"loss": 0.7696,
"step": 447
},
{
"epoch": 0.6384892887364717,
"grad_norm": 0.12235729396343231,
"learning_rate": 3.88e-05,
"loss": 0.8193,
"step": 448
},
{
"epoch": 0.639914488041687,
"grad_norm": 0.11753611266613007,
"learning_rate": 3.8775e-05,
"loss": 0.8704,
"step": 449
},
{
"epoch": 0.6413396873469024,
"grad_norm": 0.10819843411445618,
"learning_rate": 3.875e-05,
"loss": 0.715,
"step": 450
},
{
"epoch": 0.6427648866521177,
"grad_norm": 0.11220356076955795,
"learning_rate": 3.8725e-05,
"loss": 0.7312,
"step": 451
},
{
"epoch": 0.6441900859573331,
"grad_norm": 0.12640978395938873,
"learning_rate": 3.8700000000000006e-05,
"loss": 0.9658,
"step": 452
},
{
"epoch": 0.6456152852625484,
"grad_norm": 0.10517140477895737,
"learning_rate": 3.8675e-05,
"loss": 0.6892,
"step": 453
},
{
"epoch": 0.6470404845677638,
"grad_norm": 0.11707638204097748,
"learning_rate": 3.8650000000000004e-05,
"loss": 0.8219,
"step": 454
},
{
"epoch": 0.6484656838729791,
"grad_norm": 0.11122265458106995,
"learning_rate": 3.8625e-05,
"loss": 0.8652,
"step": 455
},
{
"epoch": 0.6498908831781944,
"grad_norm": 0.11619355529546738,
"learning_rate": 3.86e-05,
"loss": 0.7841,
"step": 456
},
{
"epoch": 0.6513160824834098,
"grad_norm": 0.10745042562484741,
"learning_rate": 3.8575e-05,
"loss": 0.7221,
"step": 457
},
{
"epoch": 0.6527412817886251,
"grad_norm": 0.10636070370674133,
"learning_rate": 3.855e-05,
"loss": 0.6589,
"step": 458
},
{
"epoch": 0.6541664810938405,
"grad_norm": 0.11421851813793182,
"learning_rate": 3.8525e-05,
"loss": 0.811,
"step": 459
},
{
"epoch": 0.6555916803990558,
"grad_norm": 0.10628124326467514,
"learning_rate": 3.85e-05,
"loss": 0.7789,
"step": 460
},
{
"epoch": 0.6570168797042711,
"grad_norm": 0.1043456643819809,
"learning_rate": 3.8475e-05,
"loss": 0.6444,
"step": 461
},
{
"epoch": 0.6584420790094865,
"grad_norm": 0.10461540520191193,
"learning_rate": 3.845e-05,
"loss": 0.6143,
"step": 462
},
{
"epoch": 0.6598672783147018,
"grad_norm": 0.11799418926239014,
"learning_rate": 3.8425e-05,
"loss": 0.855,
"step": 463
},
{
"epoch": 0.6612924776199172,
"grad_norm": 0.10864429175853729,
"learning_rate": 3.8400000000000005e-05,
"loss": 0.7053,
"step": 464
},
{
"epoch": 0.6627176769251325,
"grad_norm": 0.1104680523276329,
"learning_rate": 3.8375e-05,
"loss": 0.7377,
"step": 465
},
{
"epoch": 0.6641428762303478,
"grad_norm": 0.10622696578502655,
"learning_rate": 3.8350000000000004e-05,
"loss": 0.6833,
"step": 466
},
{
"epoch": 0.6655680755355632,
"grad_norm": 0.11084061115980148,
"learning_rate": 3.8324999999999996e-05,
"loss": 0.7745,
"step": 467
},
{
"epoch": 0.6669932748407785,
"grad_norm": 0.10528327524662018,
"learning_rate": 3.83e-05,
"loss": 0.6934,
"step": 468
},
{
"epoch": 0.6684184741459939,
"grad_norm": 0.1130615696310997,
"learning_rate": 3.8275e-05,
"loss": 0.7164,
"step": 469
},
{
"epoch": 0.6698436734512092,
"grad_norm": 0.10822808742523193,
"learning_rate": 3.825e-05,
"loss": 0.7998,
"step": 470
},
{
"epoch": 0.6712688727564246,
"grad_norm": 0.11788542568683624,
"learning_rate": 3.8225e-05,
"loss": 0.7756,
"step": 471
},
{
"epoch": 0.6726940720616399,
"grad_norm": 0.1111772358417511,
"learning_rate": 3.82e-05,
"loss": 0.8329,
"step": 472
},
{
"epoch": 0.6741192713668552,
"grad_norm": 0.11801609396934509,
"learning_rate": 3.8175e-05,
"loss": 0.8246,
"step": 473
},
{
"epoch": 0.6755444706720706,
"grad_norm": 0.10770072042942047,
"learning_rate": 3.8150000000000006e-05,
"loss": 0.6941,
"step": 474
},
{
"epoch": 0.6769696699772859,
"grad_norm": 0.10677233338356018,
"learning_rate": 3.8125e-05,
"loss": 0.6876,
"step": 475
},
{
"epoch": 0.6783948692825013,
"grad_norm": 0.10247763991355896,
"learning_rate": 3.8100000000000005e-05,
"loss": 0.691,
"step": 476
},
{
"epoch": 0.6798200685877166,
"grad_norm": 0.11144045740365982,
"learning_rate": 3.8075e-05,
"loss": 0.631,
"step": 477
},
{
"epoch": 0.6812452678929319,
"grad_norm": 0.11134562641382217,
"learning_rate": 3.805e-05,
"loss": 0.7367,
"step": 478
},
{
"epoch": 0.6826704671981473,
"grad_norm": 0.11454468220472336,
"learning_rate": 3.8025e-05,
"loss": 0.837,
"step": 479
},
{
"epoch": 0.6840956665033626,
"grad_norm": 0.11754635721445084,
"learning_rate": 3.8e-05,
"loss": 0.8069,
"step": 480
},
{
"epoch": 0.685520865808578,
"grad_norm": 0.1168476864695549,
"learning_rate": 3.7975e-05,
"loss": 0.7686,
"step": 481
},
{
"epoch": 0.6869460651137933,
"grad_norm": 0.11569193750619888,
"learning_rate": 3.795e-05,
"loss": 0.879,
"step": 482
},
{
"epoch": 0.6883712644190086,
"grad_norm": 0.10266441851854324,
"learning_rate": 3.7925e-05,
"loss": 0.6913,
"step": 483
},
{
"epoch": 0.689796463724224,
"grad_norm": 0.10712210834026337,
"learning_rate": 3.79e-05,
"loss": 0.77,
"step": 484
},
{
"epoch": 0.6912216630294393,
"grad_norm": 0.10386429727077484,
"learning_rate": 3.7875e-05,
"loss": 0.6913,
"step": 485
},
{
"epoch": 0.6926468623346547,
"grad_norm": 0.1123117133975029,
"learning_rate": 3.7850000000000005e-05,
"loss": 0.7888,
"step": 486
},
{
"epoch": 0.6940720616398699,
"grad_norm": 0.11101800203323364,
"learning_rate": 3.7825e-05,
"loss": 0.8469,
"step": 487
},
{
"epoch": 0.6954972609450852,
"grad_norm": 0.1021580845117569,
"learning_rate": 3.7800000000000004e-05,
"loss": 0.6855,
"step": 488
},
{
"epoch": 0.6969224602503006,
"grad_norm": 0.11199895292520523,
"learning_rate": 3.7775e-05,
"loss": 0.7755,
"step": 489
},
{
"epoch": 0.6983476595555159,
"grad_norm": 0.11293957382440567,
"learning_rate": 3.775e-05,
"loss": 0.8001,
"step": 490
},
{
"epoch": 0.6997728588607313,
"grad_norm": 0.10203820466995239,
"learning_rate": 3.7725e-05,
"loss": 0.711,
"step": 491
},
{
"epoch": 0.7011980581659466,
"grad_norm": 0.11941137909889221,
"learning_rate": 3.77e-05,
"loss": 0.8005,
"step": 492
},
{
"epoch": 0.7026232574711619,
"grad_norm": 0.10990210622549057,
"learning_rate": 3.7675e-05,
"loss": 0.6958,
"step": 493
},
{
"epoch": 0.7040484567763773,
"grad_norm": 0.11313103884458542,
"learning_rate": 3.765e-05,
"loss": 0.8212,
"step": 494
},
{
"epoch": 0.7054736560815926,
"grad_norm": 0.10514695197343826,
"learning_rate": 3.7625e-05,
"loss": 0.6504,
"step": 495
},
{
"epoch": 0.706898855386808,
"grad_norm": 0.11459620296955109,
"learning_rate": 3.76e-05,
"loss": 0.8442,
"step": 496
},
{
"epoch": 0.7083240546920233,
"grad_norm": 0.10818853229284286,
"learning_rate": 3.7575e-05,
"loss": 0.7479,
"step": 497
},
{
"epoch": 0.7097492539972386,
"grad_norm": 0.11675546318292618,
"learning_rate": 3.7550000000000005e-05,
"loss": 0.8529,
"step": 498
},
{
"epoch": 0.711174453302454,
"grad_norm": 0.10918252915143967,
"learning_rate": 3.7525e-05,
"loss": 0.7573,
"step": 499
},
{
"epoch": 0.7125996526076693,
"grad_norm": 0.10979127138853073,
"learning_rate": 3.7500000000000003e-05,
"loss": 0.6899,
"step": 500
},
{
"epoch": 0.7140248519128847,
"grad_norm": 0.11179076135158539,
"learning_rate": 3.7475e-05,
"loss": 0.7369,
"step": 501
},
{
"epoch": 0.7154500512181,
"grad_norm": 0.1197597086429596,
"learning_rate": 3.745e-05,
"loss": 0.8371,
"step": 502
},
{
"epoch": 0.7168752505233154,
"grad_norm": 0.11278288811445236,
"learning_rate": 3.7425e-05,
"loss": 0.7285,
"step": 503
},
{
"epoch": 0.7183004498285307,
"grad_norm": 0.10949387401342392,
"learning_rate": 3.74e-05,
"loss": 0.8028,
"step": 504
},
{
"epoch": 0.719725649133746,
"grad_norm": 0.11052912473678589,
"learning_rate": 3.737500000000001e-05,
"loss": 0.756,
"step": 505
},
{
"epoch": 0.7211508484389614,
"grad_norm": 0.11377502232789993,
"learning_rate": 3.735e-05,
"loss": 0.848,
"step": 506
},
{
"epoch": 0.7225760477441767,
"grad_norm": 0.11317731440067291,
"learning_rate": 3.7325000000000006e-05,
"loss": 0.7748,
"step": 507
},
{
"epoch": 0.7240012470493921,
"grad_norm": 0.1152520477771759,
"learning_rate": 3.73e-05,
"loss": 0.8497,
"step": 508
},
{
"epoch": 0.7254264463546074,
"grad_norm": 0.10839933902025223,
"learning_rate": 3.7275000000000005e-05,
"loss": 0.769,
"step": 509
},
{
"epoch": 0.7268516456598227,
"grad_norm": 0.1020594909787178,
"learning_rate": 3.7250000000000004e-05,
"loss": 0.6656,
"step": 510
},
{
"epoch": 0.7282768449650381,
"grad_norm": 0.11031897366046906,
"learning_rate": 3.7225000000000004e-05,
"loss": 0.8189,
"step": 511
},
{
"epoch": 0.7297020442702534,
"grad_norm": 0.11382223665714264,
"learning_rate": 3.72e-05,
"loss": 0.7907,
"step": 512
},
{
"epoch": 0.7311272435754688,
"grad_norm": 0.12720376253128052,
"learning_rate": 3.7175e-05,
"loss": 0.8853,
"step": 513
},
{
"epoch": 0.7325524428806841,
"grad_norm": 0.10073132067918777,
"learning_rate": 3.715e-05,
"loss": 0.7096,
"step": 514
},
{
"epoch": 0.7339776421858994,
"grad_norm": 0.12077777087688446,
"learning_rate": 3.7125e-05,
"loss": 0.9036,
"step": 515
},
{
"epoch": 0.7354028414911148,
"grad_norm": 0.10772977769374847,
"learning_rate": 3.71e-05,
"loss": 0.7885,
"step": 516
},
{
"epoch": 0.7368280407963301,
"grad_norm": 0.1199839785695076,
"learning_rate": 3.707500000000001e-05,
"loss": 0.8906,
"step": 517
},
{
"epoch": 0.7382532401015455,
"grad_norm": 0.11046578735113144,
"learning_rate": 3.705e-05,
"loss": 0.822,
"step": 518
},
{
"epoch": 0.7396784394067608,
"grad_norm": 0.11242034286260605,
"learning_rate": 3.7025000000000005e-05,
"loss": 0.7387,
"step": 519
},
{
"epoch": 0.7411036387119762,
"grad_norm": 0.1116667166352272,
"learning_rate": 3.7e-05,
"loss": 0.7873,
"step": 520
},
{
"epoch": 0.7425288380171915,
"grad_norm": 0.10666213184595108,
"learning_rate": 3.6975000000000004e-05,
"loss": 0.7065,
"step": 521
},
{
"epoch": 0.7439540373224068,
"grad_norm": 0.12476396560668945,
"learning_rate": 3.6950000000000004e-05,
"loss": 0.861,
"step": 522
},
{
"epoch": 0.7453792366276222,
"grad_norm": 0.11960679292678833,
"learning_rate": 3.6925e-05,
"loss": 0.8927,
"step": 523
},
{
"epoch": 0.7468044359328375,
"grad_norm": 0.11509039998054504,
"learning_rate": 3.69e-05,
"loss": 0.8859,
"step": 524
},
{
"epoch": 0.7482296352380529,
"grad_norm": 0.10691982507705688,
"learning_rate": 3.6875e-05,
"loss": 0.6986,
"step": 525
},
{
"epoch": 0.7496548345432682,
"grad_norm": 0.1187766045331955,
"learning_rate": 3.685e-05,
"loss": 0.8791,
"step": 526
},
{
"epoch": 0.7510800338484835,
"grad_norm": 0.11173699051141739,
"learning_rate": 3.6825e-05,
"loss": 0.846,
"step": 527
},
{
"epoch": 0.7525052331536989,
"grad_norm": 0.107224240899086,
"learning_rate": 3.68e-05,
"loss": 0.7074,
"step": 528
},
{
"epoch": 0.7539304324589142,
"grad_norm": 0.10689052194356918,
"learning_rate": 3.6775000000000006e-05,
"loss": 0.7011,
"step": 529
},
{
"epoch": 0.7553556317641296,
"grad_norm": 0.12444566190242767,
"learning_rate": 3.675e-05,
"loss": 0.7226,
"step": 530
},
{
"epoch": 0.7567808310693449,
"grad_norm": 0.10695558786392212,
"learning_rate": 3.6725000000000005e-05,
"loss": 0.7958,
"step": 531
},
{
"epoch": 0.7582060303745602,
"grad_norm": 0.10377009958028793,
"learning_rate": 3.6700000000000004e-05,
"loss": 0.7662,
"step": 532
},
{
"epoch": 0.7596312296797755,
"grad_norm": 0.10755226016044617,
"learning_rate": 3.6675000000000004e-05,
"loss": 0.8274,
"step": 533
},
{
"epoch": 0.7610564289849908,
"grad_norm": 0.11320257186889648,
"learning_rate": 3.665e-05,
"loss": 0.8398,
"step": 534
},
{
"epoch": 0.7624816282902062,
"grad_norm": 0.10872103273868561,
"learning_rate": 3.6625e-05,
"loss": 0.7672,
"step": 535
},
{
"epoch": 0.7639068275954215,
"grad_norm": 0.10606484860181808,
"learning_rate": 3.66e-05,
"loss": 0.7491,
"step": 536
},
{
"epoch": 0.7653320269006368,
"grad_norm": 0.10515744239091873,
"learning_rate": 3.6575e-05,
"loss": 0.6722,
"step": 537
},
{
"epoch": 0.7667572262058522,
"grad_norm": 0.11239241063594818,
"learning_rate": 3.655e-05,
"loss": 0.845,
"step": 538
},
{
"epoch": 0.7681824255110675,
"grad_norm": 0.1278771311044693,
"learning_rate": 3.652500000000001e-05,
"loss": 0.915,
"step": 539
},
{
"epoch": 0.7696076248162829,
"grad_norm": 0.10387738794088364,
"learning_rate": 3.65e-05,
"loss": 0.6657,
"step": 540
},
{
"epoch": 0.7710328241214982,
"grad_norm": 0.12028856575489044,
"learning_rate": 3.6475000000000006e-05,
"loss": 0.8984,
"step": 541
},
{
"epoch": 0.7724580234267135,
"grad_norm": 0.10866948217153549,
"learning_rate": 3.645e-05,
"loss": 0.6828,
"step": 542
},
{
"epoch": 0.7738832227319289,
"grad_norm": 0.10894352942705154,
"learning_rate": 3.6425000000000004e-05,
"loss": 0.7138,
"step": 543
},
{
"epoch": 0.7753084220371442,
"grad_norm": 0.31375852227211,
"learning_rate": 3.6400000000000004e-05,
"loss": 0.7552,
"step": 544
},
{
"epoch": 0.7767336213423596,
"grad_norm": 0.10994130373001099,
"learning_rate": 3.6375e-05,
"loss": 0.7705,
"step": 545
},
{
"epoch": 0.7781588206475749,
"grad_norm": 0.1167830377817154,
"learning_rate": 3.635e-05,
"loss": 0.7476,
"step": 546
},
{
"epoch": 0.7795840199527903,
"grad_norm": 0.11294935643672943,
"learning_rate": 3.6325e-05,
"loss": 0.7731,
"step": 547
},
{
"epoch": 0.7810092192580056,
"grad_norm": 0.10201647877693176,
"learning_rate": 3.63e-05,
"loss": 0.7036,
"step": 548
},
{
"epoch": 0.7824344185632209,
"grad_norm": 0.11800608783960342,
"learning_rate": 3.6275e-05,
"loss": 0.7678,
"step": 549
},
{
"epoch": 0.7838596178684363,
"grad_norm": 0.11095620691776276,
"learning_rate": 3.625e-05,
"loss": 0.8129,
"step": 550
},
{
"epoch": 0.7852848171736516,
"grad_norm": 0.11530263721942902,
"learning_rate": 3.6225000000000006e-05,
"loss": 0.7668,
"step": 551
},
{
"epoch": 0.786710016478867,
"grad_norm": 0.10162388533353806,
"learning_rate": 3.62e-05,
"loss": 0.6534,
"step": 552
},
{
"epoch": 0.7881352157840823,
"grad_norm": 0.1128762885928154,
"learning_rate": 3.6175000000000005e-05,
"loss": 0.816,
"step": 553
},
{
"epoch": 0.7895604150892976,
"grad_norm": 0.11896790564060211,
"learning_rate": 3.615e-05,
"loss": 0.8348,
"step": 554
},
{
"epoch": 0.790985614394513,
"grad_norm": 0.11223790794610977,
"learning_rate": 3.6125000000000004e-05,
"loss": 0.8242,
"step": 555
},
{
"epoch": 0.7924108136997283,
"grad_norm": 0.10786377638578415,
"learning_rate": 3.61e-05,
"loss": 0.799,
"step": 556
},
{
"epoch": 0.7938360130049437,
"grad_norm": 0.10580893605947495,
"learning_rate": 3.6075e-05,
"loss": 0.7987,
"step": 557
},
{
"epoch": 0.795261212310159,
"grad_norm": 0.1106286495923996,
"learning_rate": 3.605e-05,
"loss": 0.7271,
"step": 558
},
{
"epoch": 0.7966864116153743,
"grad_norm": 0.11526136845350266,
"learning_rate": 3.6025e-05,
"loss": 0.7864,
"step": 559
},
{
"epoch": 0.7981116109205897,
"grad_norm": 0.1108018010854721,
"learning_rate": 3.6e-05,
"loss": 0.7877,
"step": 560
},
{
"epoch": 0.799536810225805,
"grad_norm": 0.10613119602203369,
"learning_rate": 3.5975e-05,
"loss": 0.764,
"step": 561
},
{
"epoch": 0.8009620095310204,
"grad_norm": 0.1147136315703392,
"learning_rate": 3.595e-05,
"loss": 0.7908,
"step": 562
},
{
"epoch": 0.8023872088362357,
"grad_norm": 0.11605832725763321,
"learning_rate": 3.5925000000000006e-05,
"loss": 0.8167,
"step": 563
},
{
"epoch": 0.803812408141451,
"grad_norm": 0.11185579746961594,
"learning_rate": 3.59e-05,
"loss": 0.7623,
"step": 564
},
{
"epoch": 0.8052376074466664,
"grad_norm": 0.1086210310459137,
"learning_rate": 3.5875000000000005e-05,
"loss": 0.7476,
"step": 565
},
{
"epoch": 0.8066628067518817,
"grad_norm": 0.10728700459003448,
"learning_rate": 3.585e-05,
"loss": 0.7987,
"step": 566
},
{
"epoch": 0.8080880060570971,
"grad_norm": 0.10938131809234619,
"learning_rate": 3.5825000000000003e-05,
"loss": 0.7188,
"step": 567
},
{
"epoch": 0.8095132053623124,
"grad_norm": 0.11207251995801926,
"learning_rate": 3.58e-05,
"loss": 0.8095,
"step": 568
},
{
"epoch": 0.8109384046675278,
"grad_norm": 0.10773014277219772,
"learning_rate": 3.5775e-05,
"loss": 0.8274,
"step": 569
},
{
"epoch": 0.8123636039727431,
"grad_norm": 0.10718001425266266,
"learning_rate": 3.575e-05,
"loss": 0.6876,
"step": 570
},
{
"epoch": 0.8137888032779584,
"grad_norm": 0.11211090534925461,
"learning_rate": 3.5725e-05,
"loss": 0.755,
"step": 571
},
{
"epoch": 0.8152140025831738,
"grad_norm": 0.11154986172914505,
"learning_rate": 3.57e-05,
"loss": 0.72,
"step": 572
},
{
"epoch": 0.8166392018883891,
"grad_norm": 0.11845521628856659,
"learning_rate": 3.5675e-05,
"loss": 0.7529,
"step": 573
},
{
"epoch": 0.8180644011936045,
"grad_norm": 0.11100569367408752,
"learning_rate": 3.565e-05,
"loss": 0.698,
"step": 574
},
{
"epoch": 0.8194896004988198,
"grad_norm": 0.12163333594799042,
"learning_rate": 3.5625000000000005e-05,
"loss": 0.8128,
"step": 575
},
{
"epoch": 0.8209147998040351,
"grad_norm": 0.10685774683952332,
"learning_rate": 3.56e-05,
"loss": 0.6885,
"step": 576
},
{
"epoch": 0.8223399991092505,
"grad_norm": 0.11051548272371292,
"learning_rate": 3.5575000000000004e-05,
"loss": 0.8355,
"step": 577
},
{
"epoch": 0.8237651984144658,
"grad_norm": 0.11231344938278198,
"learning_rate": 3.555e-05,
"loss": 0.8017,
"step": 578
},
{
"epoch": 0.8251903977196811,
"grad_norm": 0.10797639191150665,
"learning_rate": 3.5525e-05,
"loss": 0.758,
"step": 579
},
{
"epoch": 0.8266155970248964,
"grad_norm": 0.11136302351951599,
"learning_rate": 3.55e-05,
"loss": 0.7232,
"step": 580
},
{
"epoch": 0.8280407963301117,
"grad_norm": 0.1068771630525589,
"learning_rate": 3.5475e-05,
"loss": 0.7624,
"step": 581
},
{
"epoch": 0.8294659956353271,
"grad_norm": 0.10620054602622986,
"learning_rate": 3.545e-05,
"loss": 0.7852,
"step": 582
},
{
"epoch": 0.8308911949405424,
"grad_norm": 0.10546271502971649,
"learning_rate": 3.5425e-05,
"loss": 0.7138,
"step": 583
},
{
"epoch": 0.8323163942457578,
"grad_norm": 0.11504678428173065,
"learning_rate": 3.54e-05,
"loss": 0.8349,
"step": 584
},
{
"epoch": 0.8337415935509731,
"grad_norm": 0.11970749497413635,
"learning_rate": 3.5375e-05,
"loss": 0.8809,
"step": 585
},
{
"epoch": 0.8351667928561884,
"grad_norm": 0.11696553975343704,
"learning_rate": 3.535e-05,
"loss": 0.8067,
"step": 586
},
{
"epoch": 0.8365919921614038,
"grad_norm": 0.11569545418024063,
"learning_rate": 3.5325000000000005e-05,
"loss": 0.8548,
"step": 587
},
{
"epoch": 0.8380171914666191,
"grad_norm": 0.10500162839889526,
"learning_rate": 3.53e-05,
"loss": 0.7495,
"step": 588
},
{
"epoch": 0.8394423907718345,
"grad_norm": 0.11246982216835022,
"learning_rate": 3.5275000000000004e-05,
"loss": 0.7587,
"step": 589
},
{
"epoch": 0.8408675900770498,
"grad_norm": 0.11662692576646805,
"learning_rate": 3.525e-05,
"loss": 0.7885,
"step": 590
},
{
"epoch": 0.8422927893822652,
"grad_norm": 0.10545974224805832,
"learning_rate": 3.5225e-05,
"loss": 0.7343,
"step": 591
},
{
"epoch": 0.8437179886874805,
"grad_norm": 0.1082710549235344,
"learning_rate": 3.52e-05,
"loss": 0.7731,
"step": 592
},
{
"epoch": 0.8451431879926958,
"grad_norm": 0.11447472870349884,
"learning_rate": 3.5175e-05,
"loss": 0.8354,
"step": 593
},
{
"epoch": 0.8465683872979112,
"grad_norm": 0.11837653070688248,
"learning_rate": 3.515e-05,
"loss": 0.838,
"step": 594
},
{
"epoch": 0.8479935866031265,
"grad_norm": 0.10760863870382309,
"learning_rate": 3.5125e-05,
"loss": 0.757,
"step": 595
},
{
"epoch": 0.8494187859083419,
"grad_norm": 0.11764154583215714,
"learning_rate": 3.51e-05,
"loss": 0.7688,
"step": 596
},
{
"epoch": 0.8508439852135572,
"grad_norm": 0.11905018240213394,
"learning_rate": 3.5075000000000006e-05,
"loss": 0.8207,
"step": 597
},
{
"epoch": 0.8522691845187725,
"grad_norm": 0.11451385915279388,
"learning_rate": 3.505e-05,
"loss": 0.7987,
"step": 598
},
{
"epoch": 0.8536943838239879,
"grad_norm": 0.10468224436044693,
"learning_rate": 3.5025000000000004e-05,
"loss": 0.7852,
"step": 599
},
{
"epoch": 0.8551195831292032,
"grad_norm": 0.10961973667144775,
"learning_rate": 3.5e-05,
"loss": 0.7369,
"step": 600
},
{
"epoch": 0.8565447824344186,
"grad_norm": 0.1163216084241867,
"learning_rate": 3.4975e-05,
"loss": 0.7503,
"step": 601
},
{
"epoch": 0.8579699817396339,
"grad_norm": 0.11560174822807312,
"learning_rate": 3.495e-05,
"loss": 0.854,
"step": 602
},
{
"epoch": 0.8593951810448492,
"grad_norm": 0.11435361951589584,
"learning_rate": 3.4925e-05,
"loss": 0.7741,
"step": 603
},
{
"epoch": 0.8608203803500646,
"grad_norm": 0.11231216043233871,
"learning_rate": 3.49e-05,
"loss": 0.7751,
"step": 604
},
{
"epoch": 0.8622455796552799,
"grad_norm": 0.10653416812419891,
"learning_rate": 3.4875e-05,
"loss": 0.714,
"step": 605
},
{
"epoch": 0.8636707789604953,
"grad_norm": 0.1115972176194191,
"learning_rate": 3.485e-05,
"loss": 0.8376,
"step": 606
},
{
"epoch": 0.8650959782657106,
"grad_norm": 0.10895362496376038,
"learning_rate": 3.4825e-05,
"loss": 0.8173,
"step": 607
},
{
"epoch": 0.866521177570926,
"grad_norm": 0.1130899041891098,
"learning_rate": 3.48e-05,
"loss": 0.8056,
"step": 608
},
{
"epoch": 0.8679463768761413,
"grad_norm": 0.11149253696203232,
"learning_rate": 3.4775000000000005e-05,
"loss": 0.7673,
"step": 609
},
{
"epoch": 0.8693715761813566,
"grad_norm": 0.1033184602856636,
"learning_rate": 3.475e-05,
"loss": 0.7468,
"step": 610
},
{
"epoch": 0.870796775486572,
"grad_norm": 0.11901544779539108,
"learning_rate": 3.4725000000000004e-05,
"loss": 0.8184,
"step": 611
},
{
"epoch": 0.8722219747917873,
"grad_norm": 0.11261369287967682,
"learning_rate": 3.4699999999999996e-05,
"loss": 0.7694,
"step": 612
},
{
"epoch": 0.8736471740970027,
"grad_norm": 0.1080607995390892,
"learning_rate": 3.4675e-05,
"loss": 0.7627,
"step": 613
},
{
"epoch": 0.875072373402218,
"grad_norm": 0.11211580783128738,
"learning_rate": 3.465e-05,
"loss": 0.775,
"step": 614
},
{
"epoch": 0.8764975727074333,
"grad_norm": 0.10770720988512039,
"learning_rate": 3.4625e-05,
"loss": 0.7436,
"step": 615
},
{
"epoch": 0.8779227720126487,
"grad_norm": 0.11540824919939041,
"learning_rate": 3.46e-05,
"loss": 0.7446,
"step": 616
},
{
"epoch": 0.879347971317864,
"grad_norm": 0.11012860387563705,
"learning_rate": 3.4575e-05,
"loss": 0.765,
"step": 617
},
{
"epoch": 0.8807731706230794,
"grad_norm": 0.11205737292766571,
"learning_rate": 3.455e-05,
"loss": 0.8043,
"step": 618
},
{
"epoch": 0.8821983699282947,
"grad_norm": 0.11690792441368103,
"learning_rate": 3.4525e-05,
"loss": 0.8393,
"step": 619
},
{
"epoch": 0.88362356923351,
"grad_norm": 0.10665767639875412,
"learning_rate": 3.45e-05,
"loss": 0.7157,
"step": 620
},
{
"epoch": 0.8850487685387254,
"grad_norm": 0.11402905732393265,
"learning_rate": 3.4475000000000005e-05,
"loss": 0.7395,
"step": 621
},
{
"epoch": 0.8864739678439407,
"grad_norm": 0.1107073724269867,
"learning_rate": 3.445e-05,
"loss": 0.7858,
"step": 622
},
{
"epoch": 0.8878991671491561,
"grad_norm": 0.11748577654361725,
"learning_rate": 3.4425e-05,
"loss": 0.8875,
"step": 623
},
{
"epoch": 0.8893243664543714,
"grad_norm": 0.10924755781888962,
"learning_rate": 3.4399999999999996e-05,
"loss": 0.7681,
"step": 624
},
{
"epoch": 0.8907495657595866,
"grad_norm": 0.10647712647914886,
"learning_rate": 3.4375e-05,
"loss": 0.6527,
"step": 625
},
{
"epoch": 0.892174765064802,
"grad_norm": 0.10401409864425659,
"learning_rate": 3.435e-05,
"loss": 0.6493,
"step": 626
},
{
"epoch": 0.8935999643700173,
"grad_norm": 0.10345117002725601,
"learning_rate": 3.4325e-05,
"loss": 0.7132,
"step": 627
},
{
"epoch": 0.8950251636752327,
"grad_norm": 0.11213523894548416,
"learning_rate": 3.430000000000001e-05,
"loss": 0.7787,
"step": 628
},
{
"epoch": 0.896450362980448,
"grad_norm": 0.11001811176538467,
"learning_rate": 3.4275e-05,
"loss": 0.7112,
"step": 629
},
{
"epoch": 0.8978755622856633,
"grad_norm": 0.11377232521772385,
"learning_rate": 3.4250000000000006e-05,
"loss": 0.7399,
"step": 630
},
{
"epoch": 0.8993007615908787,
"grad_norm": 0.10903467237949371,
"learning_rate": 3.4225e-05,
"loss": 0.7131,
"step": 631
},
{
"epoch": 0.900725960896094,
"grad_norm": 0.10232018679380417,
"learning_rate": 3.4200000000000005e-05,
"loss": 0.7224,
"step": 632
},
{
"epoch": 0.9021511602013094,
"grad_norm": 0.11049452424049377,
"learning_rate": 3.4175000000000004e-05,
"loss": 0.8153,
"step": 633
},
{
"epoch": 0.9035763595065247,
"grad_norm": 0.1101723462343216,
"learning_rate": 3.415e-05,
"loss": 0.6999,
"step": 634
},
{
"epoch": 0.90500155881174,
"grad_norm": 0.10376341640949249,
"learning_rate": 3.4125e-05,
"loss": 0.7344,
"step": 635
},
{
"epoch": 0.9064267581169554,
"grad_norm": 0.10386863350868225,
"learning_rate": 3.41e-05,
"loss": 0.707,
"step": 636
},
{
"epoch": 0.9078519574221707,
"grad_norm": 0.12242279201745987,
"learning_rate": 3.4075e-05,
"loss": 0.8152,
"step": 637
},
{
"epoch": 0.9092771567273861,
"grad_norm": 0.11781612783670425,
"learning_rate": 3.405e-05,
"loss": 0.8968,
"step": 638
},
{
"epoch": 0.9107023560326014,
"grad_norm": 0.11563844978809357,
"learning_rate": 3.4025e-05,
"loss": 0.7905,
"step": 639
},
{
"epoch": 0.9121275553378168,
"grad_norm": 0.11466317623853683,
"learning_rate": 3.4000000000000007e-05,
"loss": 0.7997,
"step": 640
},
{
"epoch": 0.9135527546430321,
"grad_norm": 0.10724882036447525,
"learning_rate": 3.3975e-05,
"loss": 0.7474,
"step": 641
},
{
"epoch": 0.9149779539482474,
"grad_norm": 0.11543431878089905,
"learning_rate": 3.3950000000000005e-05,
"loss": 0.7856,
"step": 642
},
{
"epoch": 0.9164031532534628,
"grad_norm": 0.10220611095428467,
"learning_rate": 3.3925e-05,
"loss": 0.7021,
"step": 643
},
{
"epoch": 0.9178283525586781,
"grad_norm": 0.11843941360712051,
"learning_rate": 3.3900000000000004e-05,
"loss": 0.8562,
"step": 644
},
{
"epoch": 0.9192535518638935,
"grad_norm": 0.10933360457420349,
"learning_rate": 3.3875000000000003e-05,
"loss": 0.7761,
"step": 645
},
{
"epoch": 0.9206787511691088,
"grad_norm": 0.11264971643686295,
"learning_rate": 3.385e-05,
"loss": 0.7731,
"step": 646
},
{
"epoch": 0.9221039504743241,
"grad_norm": 0.10890379548072815,
"learning_rate": 3.3825e-05,
"loss": 0.6445,
"step": 647
},
{
"epoch": 0.9235291497795395,
"grad_norm": 0.11738170683383942,
"learning_rate": 3.38e-05,
"loss": 0.8711,
"step": 648
},
{
"epoch": 0.9249543490847548,
"grad_norm": 0.118171326816082,
"learning_rate": 3.3775e-05,
"loss": 0.8978,
"step": 649
},
{
"epoch": 0.9263795483899702,
"grad_norm": 0.1249963566660881,
"learning_rate": 3.375000000000001e-05,
"loss": 0.7577,
"step": 650
},
{
"epoch": 0.9278047476951855,
"grad_norm": 0.10974939912557602,
"learning_rate": 3.3725e-05,
"loss": 0.7957,
"step": 651
},
{
"epoch": 0.9292299470004008,
"grad_norm": 0.10896071046590805,
"learning_rate": 3.3700000000000006e-05,
"loss": 0.728,
"step": 652
},
{
"epoch": 0.9306551463056162,
"grad_norm": 0.10733288526535034,
"learning_rate": 3.3675e-05,
"loss": 0.7506,
"step": 653
},
{
"epoch": 0.9320803456108315,
"grad_norm": 0.11123745143413544,
"learning_rate": 3.3650000000000005e-05,
"loss": 0.7551,
"step": 654
},
{
"epoch": 0.9335055449160469,
"grad_norm": 0.11613799631595612,
"learning_rate": 3.3625000000000004e-05,
"loss": 0.8158,
"step": 655
},
{
"epoch": 0.9349307442212622,
"grad_norm": 0.102536641061306,
"learning_rate": 3.3600000000000004e-05,
"loss": 0.6836,
"step": 656
},
{
"epoch": 0.9363559435264776,
"grad_norm": 0.12000053375959396,
"learning_rate": 3.3575e-05,
"loss": 0.8521,
"step": 657
},
{
"epoch": 0.9377811428316929,
"grad_norm": 0.11152161657810211,
"learning_rate": 3.355e-05,
"loss": 0.7014,
"step": 658
},
{
"epoch": 0.9392063421369082,
"grad_norm": 0.10560782253742218,
"learning_rate": 3.3525e-05,
"loss": 0.7105,
"step": 659
},
{
"epoch": 0.9406315414421236,
"grad_norm": 0.12321106344461441,
"learning_rate": 3.35e-05,
"loss": 0.8041,
"step": 660
},
{
"epoch": 0.9420567407473389,
"grad_norm": 0.12155333906412125,
"learning_rate": 3.3475e-05,
"loss": 0.8584,
"step": 661
},
{
"epoch": 0.9434819400525543,
"grad_norm": 0.11716923862695694,
"learning_rate": 3.345000000000001e-05,
"loss": 0.7385,
"step": 662
},
{
"epoch": 0.9449071393577696,
"grad_norm": 0.11149972677230835,
"learning_rate": 3.3425e-05,
"loss": 0.7228,
"step": 663
},
{
"epoch": 0.9463323386629849,
"grad_norm": 0.11156123131513596,
"learning_rate": 3.3400000000000005e-05,
"loss": 0.7231,
"step": 664
},
{
"epoch": 0.9477575379682003,
"grad_norm": 0.1101122573018074,
"learning_rate": 3.3375e-05,
"loss": 0.7688,
"step": 665
},
{
"epoch": 0.9491827372734156,
"grad_norm": 0.12238501757383347,
"learning_rate": 3.3350000000000004e-05,
"loss": 0.86,
"step": 666
},
{
"epoch": 0.950607936578631,
"grad_norm": 0.11561581492424011,
"learning_rate": 3.3325000000000004e-05,
"loss": 0.7321,
"step": 667
},
{
"epoch": 0.9520331358838463,
"grad_norm": 0.10836911201477051,
"learning_rate": 3.33e-05,
"loss": 0.6845,
"step": 668
},
{
"epoch": 0.9534583351890616,
"grad_norm": 0.1002361848950386,
"learning_rate": 3.3275e-05,
"loss": 0.6176,
"step": 669
},
{
"epoch": 0.954883534494277,
"grad_norm": 0.11439014226198196,
"learning_rate": 3.325e-05,
"loss": 0.7235,
"step": 670
},
{
"epoch": 0.9563087337994923,
"grad_norm": 0.10837440192699432,
"learning_rate": 3.3225e-05,
"loss": 0.6995,
"step": 671
},
{
"epoch": 0.9577339331047076,
"grad_norm": 0.11277542263269424,
"learning_rate": 3.32e-05,
"loss": 0.8286,
"step": 672
},
{
"epoch": 0.9591591324099229,
"grad_norm": 0.1216956079006195,
"learning_rate": 3.3175e-05,
"loss": 0.9266,
"step": 673
},
{
"epoch": 0.9605843317151382,
"grad_norm": 0.11107106506824493,
"learning_rate": 3.3150000000000006e-05,
"loss": 0.7682,
"step": 674
},
{
"epoch": 0.9620095310203536,
"grad_norm": 0.11087433993816376,
"learning_rate": 3.3125e-05,
"loss": 0.7306,
"step": 675
},
{
"epoch": 0.9634347303255689,
"grad_norm": 0.11477134376764297,
"learning_rate": 3.3100000000000005e-05,
"loss": 0.8074,
"step": 676
},
{
"epoch": 0.9648599296307843,
"grad_norm": 0.10591534525156021,
"learning_rate": 3.3075e-05,
"loss": 0.7222,
"step": 677
},
{
"epoch": 0.9662851289359996,
"grad_norm": 0.1108870878815651,
"learning_rate": 3.3050000000000004e-05,
"loss": 0.7333,
"step": 678
},
{
"epoch": 0.967710328241215,
"grad_norm": 0.11072699725627899,
"learning_rate": 3.3025e-05,
"loss": 0.6947,
"step": 679
},
{
"epoch": 0.9691355275464303,
"grad_norm": 0.12149933725595474,
"learning_rate": 3.3e-05,
"loss": 0.9078,
"step": 680
},
{
"epoch": 0.9705607268516456,
"grad_norm": 0.10815934836864471,
"learning_rate": 3.2975e-05,
"loss": 0.7564,
"step": 681
},
{
"epoch": 0.971985926156861,
"grad_norm": 0.10558459162712097,
"learning_rate": 3.295e-05,
"loss": 0.7145,
"step": 682
},
{
"epoch": 0.9734111254620763,
"grad_norm": 0.10861023515462875,
"learning_rate": 3.2925e-05,
"loss": 0.7435,
"step": 683
},
{
"epoch": 0.9748363247672917,
"grad_norm": 0.11413640528917313,
"learning_rate": 3.29e-05,
"loss": 0.8248,
"step": 684
},
{
"epoch": 0.976261524072507,
"grad_norm": 0.11392252892255783,
"learning_rate": 3.2875e-05,
"loss": 0.8574,
"step": 685
},
{
"epoch": 0.9776867233777223,
"grad_norm": 0.11165247112512589,
"learning_rate": 3.2850000000000006e-05,
"loss": 0.7688,
"step": 686
},
{
"epoch": 0.9791119226829377,
"grad_norm": 0.11283797025680542,
"learning_rate": 3.2825e-05,
"loss": 0.7432,
"step": 687
},
{
"epoch": 0.980537121988153,
"grad_norm": 0.10223109275102615,
"learning_rate": 3.2800000000000004e-05,
"loss": 0.6856,
"step": 688
},
{
"epoch": 0.9819623212933684,
"grad_norm": 0.11328654736280441,
"learning_rate": 3.2775e-05,
"loss": 0.7608,
"step": 689
},
{
"epoch": 0.9833875205985837,
"grad_norm": 0.11116191744804382,
"learning_rate": 3.275e-05,
"loss": 0.7427,
"step": 690
},
{
"epoch": 0.984812719903799,
"grad_norm": 0.10710608214139938,
"learning_rate": 3.2725e-05,
"loss": 0.6568,
"step": 691
},
{
"epoch": 0.9862379192090144,
"grad_norm": 0.12032485008239746,
"learning_rate": 3.27e-05,
"loss": 0.8994,
"step": 692
},
{
"epoch": 0.9876631185142297,
"grad_norm": 0.11141428351402283,
"learning_rate": 3.2675e-05,
"loss": 0.8004,
"step": 693
},
{
"epoch": 0.9890883178194451,
"grad_norm": 0.12085778266191483,
"learning_rate": 3.265e-05,
"loss": 0.8975,
"step": 694
},
{
"epoch": 0.9905135171246604,
"grad_norm": 0.12465686351060867,
"learning_rate": 3.2625e-05,
"loss": 0.8386,
"step": 695
},
{
"epoch": 0.9919387164298757,
"grad_norm": 0.10885719954967499,
"learning_rate": 3.26e-05,
"loss": 0.7007,
"step": 696
},
{
"epoch": 0.9933639157350911,
"grad_norm": 0.1127619817852974,
"learning_rate": 3.2575e-05,
"loss": 0.8693,
"step": 697
},
{
"epoch": 0.9947891150403064,
"grad_norm": 0.09418598562479019,
"learning_rate": 3.2550000000000005e-05,
"loss": 0.5662,
"step": 698
},
{
"epoch": 0.9962143143455218,
"grad_norm": 0.10917676985263824,
"learning_rate": 3.2525e-05,
"loss": 0.8167,
"step": 699
},
{
"epoch": 0.9976395136507371,
"grad_norm": 0.10221868008375168,
"learning_rate": 3.2500000000000004e-05,
"loss": 0.6722,
"step": 700
},
{
"epoch": 0.9990647129559524,
"grad_norm": 0.10847768932580948,
"learning_rate": 3.2474999999999997e-05,
"loss": 0.7407,
"step": 701
},
{
"epoch": 1.0004899122611677,
"grad_norm": 0.12031456083059311,
"learning_rate": 3.245e-05,
"loss": 0.8321,
"step": 702
},
{
"epoch": 1.0019151115663831,
"grad_norm": 0.1099601462483406,
"learning_rate": 3.2425e-05,
"loss": 0.7682,
"step": 703
},
{
"epoch": 1.0033403108715984,
"grad_norm": 0.17697882652282715,
"learning_rate": 3.24e-05,
"loss": 0.7248,
"step": 704
},
{
"epoch": 1.0047655101768138,
"grad_norm": 0.10929471254348755,
"learning_rate": 3.2375e-05,
"loss": 0.7333,
"step": 705
},
{
"epoch": 1.006190709482029,
"grad_norm": 0.11158668249845505,
"learning_rate": 3.235e-05,
"loss": 0.7985,
"step": 706
},
{
"epoch": 1.0076159087872445,
"grad_norm": 0.10497406125068665,
"learning_rate": 3.2325e-05,
"loss": 0.7726,
"step": 707
},
{
"epoch": 1.0090411080924597,
"grad_norm": 0.10328636318445206,
"learning_rate": 3.2300000000000006e-05,
"loss": 0.6774,
"step": 708
},
{
"epoch": 1.0104663073976752,
"grad_norm": 0.10857168585062027,
"learning_rate": 3.2275e-05,
"loss": 0.6608,
"step": 709
},
{
"epoch": 1.0118915067028904,
"grad_norm": 0.11575551331043243,
"learning_rate": 3.2250000000000005e-05,
"loss": 0.7498,
"step": 710
},
{
"epoch": 1.0133167060081059,
"grad_norm": 0.11768371611833572,
"learning_rate": 3.2225e-05,
"loss": 0.7972,
"step": 711
},
{
"epoch": 1.014741905313321,
"grad_norm": 0.12007362395524979,
"learning_rate": 3.2200000000000003e-05,
"loss": 0.7714,
"step": 712
},
{
"epoch": 1.0161671046185365,
"grad_norm": 0.10321053117513657,
"learning_rate": 3.2175e-05,
"loss": 0.6626,
"step": 713
},
{
"epoch": 1.0175923039237518,
"grad_norm": 0.1099637821316719,
"learning_rate": 3.215e-05,
"loss": 0.7879,
"step": 714
},
{
"epoch": 1.0190175032289672,
"grad_norm": 0.13048391044139862,
"learning_rate": 3.2125e-05,
"loss": 0.8307,
"step": 715
},
{
"epoch": 1.0204427025341825,
"grad_norm": 0.10304483026266098,
"learning_rate": 3.21e-05,
"loss": 0.7024,
"step": 716
},
{
"epoch": 1.021867901839398,
"grad_norm": 0.11871415376663208,
"learning_rate": 3.2075e-05,
"loss": 0.8031,
"step": 717
},
{
"epoch": 1.0232931011446131,
"grad_norm": 0.10770536959171295,
"learning_rate": 3.205e-05,
"loss": 0.6468,
"step": 718
},
{
"epoch": 1.0247183004498286,
"grad_norm": 0.10824979096651077,
"learning_rate": 3.2025e-05,
"loss": 0.7304,
"step": 719
},
{
"epoch": 1.0261434997550438,
"grad_norm": 0.11491674929857254,
"learning_rate": 3.2000000000000005e-05,
"loss": 0.7486,
"step": 720
},
{
"epoch": 1.0275686990602593,
"grad_norm": 0.10757166892290115,
"learning_rate": 3.1975e-05,
"loss": 0.7896,
"step": 721
},
{
"epoch": 1.0289938983654745,
"grad_norm": 0.10940910875797272,
"learning_rate": 3.1950000000000004e-05,
"loss": 0.7404,
"step": 722
},
{
"epoch": 1.03041909767069,
"grad_norm": 0.11575860530138016,
"learning_rate": 3.1925e-05,
"loss": 0.8692,
"step": 723
},
{
"epoch": 1.0318442969759052,
"grad_norm": 0.1145920678973198,
"learning_rate": 3.19e-05,
"loss": 0.7723,
"step": 724
},
{
"epoch": 1.0332694962811206,
"grad_norm": 0.10641290992498398,
"learning_rate": 3.1875e-05,
"loss": 0.7916,
"step": 725
},
{
"epoch": 1.0346946955863359,
"grad_norm": 0.124814473092556,
"learning_rate": 3.185e-05,
"loss": 0.8979,
"step": 726
},
{
"epoch": 1.0361198948915513,
"grad_norm": 0.11630263179540634,
"learning_rate": 3.1825e-05,
"loss": 0.8516,
"step": 727
},
{
"epoch": 1.0375450941967665,
"grad_norm": 0.11225227266550064,
"learning_rate": 3.18e-05,
"loss": 0.7762,
"step": 728
},
{
"epoch": 1.038970293501982,
"grad_norm": 0.1091301217675209,
"learning_rate": 3.1775e-05,
"loss": 0.7651,
"step": 729
},
{
"epoch": 1.0403954928071972,
"grad_norm": 0.11405711621046066,
"learning_rate": 3.175e-05,
"loss": 0.7843,
"step": 730
},
{
"epoch": 1.0418206921124127,
"grad_norm": 0.10882287472486496,
"learning_rate": 3.1725e-05,
"loss": 0.7975,
"step": 731
},
{
"epoch": 1.043245891417628,
"grad_norm": 0.11969635635614395,
"learning_rate": 3.1700000000000005e-05,
"loss": 0.7813,
"step": 732
},
{
"epoch": 1.0446710907228434,
"grad_norm": 0.11477787047624588,
"learning_rate": 3.1675e-05,
"loss": 0.751,
"step": 733
},
{
"epoch": 1.0460962900280586,
"grad_norm": 0.10773949325084686,
"learning_rate": 3.1650000000000004e-05,
"loss": 0.7,
"step": 734
},
{
"epoch": 1.047521489333274,
"grad_norm": 0.1150415688753128,
"learning_rate": 3.1624999999999996e-05,
"loss": 0.8051,
"step": 735
},
{
"epoch": 1.0489466886384893,
"grad_norm": 0.11701670289039612,
"learning_rate": 3.16e-05,
"loss": 0.8287,
"step": 736
},
{
"epoch": 1.0503718879437047,
"grad_norm": 0.11323084682226181,
"learning_rate": 3.1575e-05,
"loss": 0.7829,
"step": 737
},
{
"epoch": 1.05179708724892,
"grad_norm": 0.10788066685199738,
"learning_rate": 3.155e-05,
"loss": 0.7178,
"step": 738
},
{
"epoch": 1.0532222865541354,
"grad_norm": 0.11535206437110901,
"learning_rate": 3.1525e-05,
"loss": 0.8406,
"step": 739
},
{
"epoch": 1.0546474858593506,
"grad_norm": 0.11271157115697861,
"learning_rate": 3.15e-05,
"loss": 0.7658,
"step": 740
},
{
"epoch": 1.0560726851645659,
"grad_norm": 0.11143078655004501,
"learning_rate": 3.1475e-05,
"loss": 0.745,
"step": 741
},
{
"epoch": 1.0574978844697813,
"grad_norm": 0.1161406859755516,
"learning_rate": 3.145e-05,
"loss": 0.8459,
"step": 742
},
{
"epoch": 1.0589230837749966,
"grad_norm": 0.1163041815161705,
"learning_rate": 3.1425e-05,
"loss": 0.8073,
"step": 743
},
{
"epoch": 1.060348283080212,
"grad_norm": 0.10760021954774857,
"learning_rate": 3.1400000000000004e-05,
"loss": 0.7199,
"step": 744
},
{
"epoch": 1.0617734823854272,
"grad_norm": 0.1155887320637703,
"learning_rate": 3.1375e-05,
"loss": 0.7956,
"step": 745
},
{
"epoch": 1.0631986816906427,
"grad_norm": 0.10809388011693954,
"learning_rate": 3.135e-05,
"loss": 0.7268,
"step": 746
},
{
"epoch": 1.064623880995858,
"grad_norm": 0.11383721977472305,
"learning_rate": 3.1324999999999996e-05,
"loss": 0.7623,
"step": 747
},
{
"epoch": 1.0660490803010734,
"grad_norm": 0.11876889318227768,
"learning_rate": 3.13e-05,
"loss": 0.7695,
"step": 748
},
{
"epoch": 1.0674742796062886,
"grad_norm": 0.1192469596862793,
"learning_rate": 3.1275e-05,
"loss": 0.7625,
"step": 749
},
{
"epoch": 1.068899478911504,
"grad_norm": 0.11618269979953766,
"learning_rate": 3.125e-05,
"loss": 0.8504,
"step": 750
},
{
"epoch": 1.0703246782167193,
"grad_norm": 0.10920976847410202,
"learning_rate": 3.122500000000001e-05,
"loss": 0.6496,
"step": 751
},
{
"epoch": 1.0717498775219347,
"grad_norm": 0.1146429181098938,
"learning_rate": 3.12e-05,
"loss": 0.794,
"step": 752
},
{
"epoch": 1.07317507682715,
"grad_norm": 0.11848996579647064,
"learning_rate": 3.1175000000000006e-05,
"loss": 0.7534,
"step": 753
},
{
"epoch": 1.0746002761323654,
"grad_norm": 0.10744237899780273,
"learning_rate": 3.115e-05,
"loss": 0.6302,
"step": 754
},
{
"epoch": 1.0760254754375806,
"grad_norm": 0.11214388906955719,
"learning_rate": 3.1125000000000004e-05,
"loss": 0.6914,
"step": 755
},
{
"epoch": 1.077450674742796,
"grad_norm": 0.12251964211463928,
"learning_rate": 3.1100000000000004e-05,
"loss": 0.8505,
"step": 756
},
{
"epoch": 1.0788758740480113,
"grad_norm": 0.11381114274263382,
"learning_rate": 3.1075e-05,
"loss": 0.8019,
"step": 757
},
{
"epoch": 1.0803010733532268,
"grad_norm": 0.11258359998464584,
"learning_rate": 3.105e-05,
"loss": 0.7696,
"step": 758
},
{
"epoch": 1.081726272658442,
"grad_norm": 0.1243850365281105,
"learning_rate": 3.1025e-05,
"loss": 0.8047,
"step": 759
},
{
"epoch": 1.0831514719636575,
"grad_norm": 0.11022070795297623,
"learning_rate": 3.1e-05,
"loss": 0.7921,
"step": 760
},
{
"epoch": 1.0845766712688727,
"grad_norm": 0.120261549949646,
"learning_rate": 3.0975e-05,
"loss": 0.8719,
"step": 761
},
{
"epoch": 1.0860018705740881,
"grad_norm": 0.11760720610618591,
"learning_rate": 3.095e-05,
"loss": 0.8081,
"step": 762
},
{
"epoch": 1.0874270698793034,
"grad_norm": 0.11106765270233154,
"learning_rate": 3.0925000000000006e-05,
"loss": 0.7001,
"step": 763
},
{
"epoch": 1.0888522691845188,
"grad_norm": 0.11390947550535202,
"learning_rate": 3.09e-05,
"loss": 0.845,
"step": 764
},
{
"epoch": 1.090277468489734,
"grad_norm": 0.11912084370851517,
"learning_rate": 3.0875000000000005e-05,
"loss": 0.8996,
"step": 765
},
{
"epoch": 1.0917026677949495,
"grad_norm": 0.1158471331000328,
"learning_rate": 3.0850000000000004e-05,
"loss": 0.8087,
"step": 766
},
{
"epoch": 1.0931278671001647,
"grad_norm": 0.11678409576416016,
"learning_rate": 3.0825000000000004e-05,
"loss": 0.8629,
"step": 767
},
{
"epoch": 1.0945530664053802,
"grad_norm": 0.108505018055439,
"learning_rate": 3.08e-05,
"loss": 0.7188,
"step": 768
},
{
"epoch": 1.0959782657105954,
"grad_norm": 0.11528582870960236,
"learning_rate": 3.0775e-05,
"loss": 0.7914,
"step": 769
},
{
"epoch": 1.0974034650158109,
"grad_norm": 0.10161586850881577,
"learning_rate": 3.075e-05,
"loss": 0.6522,
"step": 770
},
{
"epoch": 1.098828664321026,
"grad_norm": 0.11312681436538696,
"learning_rate": 3.0725e-05,
"loss": 0.7733,
"step": 771
},
{
"epoch": 1.1002538636262416,
"grad_norm": 0.11256951838731766,
"learning_rate": 3.07e-05,
"loss": 0.8176,
"step": 772
},
{
"epoch": 1.1016790629314568,
"grad_norm": 0.10952552407979965,
"learning_rate": 3.067500000000001e-05,
"loss": 0.7184,
"step": 773
},
{
"epoch": 1.1031042622366722,
"grad_norm": 0.11778173595666885,
"learning_rate": 3.065e-05,
"loss": 0.7988,
"step": 774
},
{
"epoch": 1.1045294615418875,
"grad_norm": 0.10901171714067459,
"learning_rate": 3.0625000000000006e-05,
"loss": 0.7352,
"step": 775
},
{
"epoch": 1.105954660847103,
"grad_norm": 0.11319074779748917,
"learning_rate": 3.06e-05,
"loss": 0.7592,
"step": 776
},
{
"epoch": 1.1073798601523182,
"grad_norm": 0.12140481173992157,
"learning_rate": 3.0575000000000005e-05,
"loss": 0.8724,
"step": 777
},
{
"epoch": 1.1088050594575336,
"grad_norm": 0.1316307634115219,
"learning_rate": 3.0550000000000004e-05,
"loss": 0.8713,
"step": 778
},
{
"epoch": 1.1102302587627488,
"grad_norm": 0.11967484652996063,
"learning_rate": 3.0525e-05,
"loss": 0.889,
"step": 779
},
{
"epoch": 1.1116554580679643,
"grad_norm": 0.11505062878131866,
"learning_rate": 3.05e-05,
"loss": 0.7637,
"step": 780
},
{
"epoch": 1.1130806573731795,
"grad_norm": 0.11000332981348038,
"learning_rate": 3.0475000000000002e-05,
"loss": 0.6761,
"step": 781
},
{
"epoch": 1.114505856678395,
"grad_norm": 0.11521769315004349,
"learning_rate": 3.045e-05,
"loss": 0.8158,
"step": 782
},
{
"epoch": 1.1159310559836102,
"grad_norm": 0.12122384458780289,
"learning_rate": 3.0425000000000004e-05,
"loss": 0.8666,
"step": 783
},
{
"epoch": 1.1173562552888257,
"grad_norm": 0.10664860159158707,
"learning_rate": 3.04e-05,
"loss": 0.7174,
"step": 784
},
{
"epoch": 1.1187814545940409,
"grad_norm": 0.10680918395519257,
"learning_rate": 3.0375000000000003e-05,
"loss": 0.6657,
"step": 785
},
{
"epoch": 1.120206653899256,
"grad_norm": 0.11731777340173721,
"learning_rate": 3.035e-05,
"loss": 0.8031,
"step": 786
},
{
"epoch": 1.1216318532044716,
"grad_norm": 0.111178919672966,
"learning_rate": 3.0325000000000002e-05,
"loss": 0.6885,
"step": 787
},
{
"epoch": 1.123057052509687,
"grad_norm": 0.10737734287977219,
"learning_rate": 3.03e-05,
"loss": 0.7228,
"step": 788
},
{
"epoch": 1.1244822518149022,
"grad_norm": 0.10785820335149765,
"learning_rate": 3.0275000000000004e-05,
"loss": 0.7011,
"step": 789
},
{
"epoch": 1.1259074511201175,
"grad_norm": 0.11544083803892136,
"learning_rate": 3.025e-05,
"loss": 0.711,
"step": 790
},
{
"epoch": 1.127332650425333,
"grad_norm": 0.11879689246416092,
"learning_rate": 3.0225000000000003e-05,
"loss": 0.8618,
"step": 791
},
{
"epoch": 1.1287578497305484,
"grad_norm": 0.11350908875465393,
"learning_rate": 3.02e-05,
"loss": 0.7603,
"step": 792
},
{
"epoch": 1.1301830490357636,
"grad_norm": 0.11244130879640579,
"learning_rate": 3.0175e-05,
"loss": 0.8373,
"step": 793
},
{
"epoch": 1.1316082483409788,
"grad_norm": 0.1183832585811615,
"learning_rate": 3.015e-05,
"loss": 0.7033,
"step": 794
},
{
"epoch": 1.1330334476461943,
"grad_norm": 0.11027313768863678,
"learning_rate": 3.0125000000000004e-05,
"loss": 0.6558,
"step": 795
},
{
"epoch": 1.1344586469514095,
"grad_norm": 0.10848906636238098,
"learning_rate": 3.01e-05,
"loss": 0.7403,
"step": 796
},
{
"epoch": 1.135883846256625,
"grad_norm": 0.1231088861823082,
"learning_rate": 3.0075000000000003e-05,
"loss": 0.8309,
"step": 797
},
{
"epoch": 1.1373090455618402,
"grad_norm": 0.1170823872089386,
"learning_rate": 3.0050000000000002e-05,
"loss": 0.8275,
"step": 798
},
{
"epoch": 1.1387342448670557,
"grad_norm": 0.10994869470596313,
"learning_rate": 3.0025000000000005e-05,
"loss": 0.7709,
"step": 799
},
{
"epoch": 1.1401594441722709,
"grad_norm": 0.10876326262950897,
"learning_rate": 3e-05,
"loss": 0.7561,
"step": 800
},
{
"epoch": 1.1415846434774863,
"grad_norm": 0.1107611432671547,
"learning_rate": 2.9975000000000004e-05,
"loss": 0.7594,
"step": 801
},
{
"epoch": 1.1430098427827016,
"grad_norm": 0.11292228102684021,
"learning_rate": 2.995e-05,
"loss": 0.723,
"step": 802
},
{
"epoch": 1.144435042087917,
"grad_norm": 0.11022774875164032,
"learning_rate": 2.9925000000000002e-05,
"loss": 0.718,
"step": 803
},
{
"epoch": 1.1458602413931323,
"grad_norm": 0.12020154297351837,
"learning_rate": 2.9900000000000002e-05,
"loss": 0.8193,
"step": 804
},
{
"epoch": 1.1472854406983477,
"grad_norm": 0.11510249227285385,
"learning_rate": 2.9875000000000004e-05,
"loss": 0.7877,
"step": 805
},
{
"epoch": 1.148710640003563,
"grad_norm": 0.11864664405584335,
"learning_rate": 2.985e-05,
"loss": 0.8008,
"step": 806
},
{
"epoch": 1.1501358393087784,
"grad_norm": 0.12587730586528778,
"learning_rate": 2.9825000000000003e-05,
"loss": 0.9608,
"step": 807
},
{
"epoch": 1.1515610386139936,
"grad_norm": 0.11417608708143234,
"learning_rate": 2.98e-05,
"loss": 0.7897,
"step": 808
},
{
"epoch": 1.152986237919209,
"grad_norm": 0.11821332573890686,
"learning_rate": 2.9775000000000002e-05,
"loss": 0.7669,
"step": 809
},
{
"epoch": 1.1544114372244243,
"grad_norm": 0.11195316165685654,
"learning_rate": 2.975e-05,
"loss": 0.7052,
"step": 810
},
{
"epoch": 1.1558366365296397,
"grad_norm": 0.11873237043619156,
"learning_rate": 2.9725000000000004e-05,
"loss": 0.7784,
"step": 811
},
{
"epoch": 1.157261835834855,
"grad_norm": 0.11098534613847733,
"learning_rate": 2.97e-05,
"loss": 0.7087,
"step": 812
},
{
"epoch": 1.1586870351400704,
"grad_norm": 0.11937931180000305,
"learning_rate": 2.9675000000000003e-05,
"loss": 0.6998,
"step": 813
},
{
"epoch": 1.1601122344452857,
"grad_norm": 0.111545130610466,
"learning_rate": 2.965e-05,
"loss": 0.8013,
"step": 814
},
{
"epoch": 1.1615374337505011,
"grad_norm": 0.11358655244112015,
"learning_rate": 2.9625000000000002e-05,
"loss": 0.7807,
"step": 815
},
{
"epoch": 1.1629626330557163,
"grad_norm": 0.12042693048715591,
"learning_rate": 2.96e-05,
"loss": 0.8347,
"step": 816
},
{
"epoch": 1.1643878323609318,
"grad_norm": 0.11781366914510727,
"learning_rate": 2.9575000000000004e-05,
"loss": 0.806,
"step": 817
},
{
"epoch": 1.165813031666147,
"grad_norm": 0.11442884802818298,
"learning_rate": 2.955e-05,
"loss": 0.7579,
"step": 818
},
{
"epoch": 1.1672382309713625,
"grad_norm": 0.11311296373605728,
"learning_rate": 2.9525000000000003e-05,
"loss": 0.693,
"step": 819
},
{
"epoch": 1.1686634302765777,
"grad_norm": 0.11555072665214539,
"learning_rate": 2.95e-05,
"loss": 0.6646,
"step": 820
},
{
"epoch": 1.1700886295817932,
"grad_norm": 0.12172921001911163,
"learning_rate": 2.9475e-05,
"loss": 0.8053,
"step": 821
},
{
"epoch": 1.1715138288870084,
"grad_norm": 0.12780840694904327,
"learning_rate": 2.945e-05,
"loss": 0.8148,
"step": 822
},
{
"epoch": 1.1729390281922238,
"grad_norm": 0.11470324546098709,
"learning_rate": 2.9425000000000004e-05,
"loss": 0.7265,
"step": 823
},
{
"epoch": 1.174364227497439,
"grad_norm": 0.12027261406183243,
"learning_rate": 2.94e-05,
"loss": 0.833,
"step": 824
},
{
"epoch": 1.1757894268026545,
"grad_norm": 0.11553133279085159,
"learning_rate": 2.9375000000000003e-05,
"loss": 0.7802,
"step": 825
},
{
"epoch": 1.1772146261078698,
"grad_norm": 0.1147785484790802,
"learning_rate": 2.935e-05,
"loss": 0.7044,
"step": 826
},
{
"epoch": 1.1786398254130852,
"grad_norm": 0.11770600825548172,
"learning_rate": 2.9325e-05,
"loss": 0.7862,
"step": 827
},
{
"epoch": 1.1800650247183004,
"grad_norm": 0.11175667494535446,
"learning_rate": 2.93e-05,
"loss": 0.6743,
"step": 828
},
{
"epoch": 1.181490224023516,
"grad_norm": 0.10811686515808105,
"learning_rate": 2.9275000000000003e-05,
"loss": 0.7471,
"step": 829
},
{
"epoch": 1.1829154233287311,
"grad_norm": 0.11273656040430069,
"learning_rate": 2.925e-05,
"loss": 0.7662,
"step": 830
},
{
"epoch": 1.1843406226339463,
"grad_norm": 0.10511904954910278,
"learning_rate": 2.9225000000000002e-05,
"loss": 0.7025,
"step": 831
},
{
"epoch": 1.1857658219391618,
"grad_norm": 0.11568602174520493,
"learning_rate": 2.9199999999999998e-05,
"loss": 0.7975,
"step": 832
},
{
"epoch": 1.1871910212443773,
"grad_norm": 0.10936152189970016,
"learning_rate": 2.9175e-05,
"loss": 0.6815,
"step": 833
},
{
"epoch": 1.1886162205495925,
"grad_norm": 0.10992341488599777,
"learning_rate": 2.915e-05,
"loss": 0.7014,
"step": 834
},
{
"epoch": 1.1900414198548077,
"grad_norm": 0.1112779974937439,
"learning_rate": 2.9125000000000003e-05,
"loss": 0.747,
"step": 835
},
{
"epoch": 1.1914666191600232,
"grad_norm": 0.1107979565858841,
"learning_rate": 2.91e-05,
"loss": 0.755,
"step": 836
},
{
"epoch": 1.1928918184652386,
"grad_norm": 0.11044613271951675,
"learning_rate": 2.9075000000000002e-05,
"loss": 0.8332,
"step": 837
},
{
"epoch": 1.1943170177704538,
"grad_norm": 0.12301047891378403,
"learning_rate": 2.9049999999999998e-05,
"loss": 0.8696,
"step": 838
},
{
"epoch": 1.195742217075669,
"grad_norm": 0.12159786373376846,
"learning_rate": 2.9025e-05,
"loss": 0.8202,
"step": 839
},
{
"epoch": 1.1971674163808845,
"grad_norm": 0.11098175495862961,
"learning_rate": 2.9e-05,
"loss": 0.7643,
"step": 840
},
{
"epoch": 1.1985926156860998,
"grad_norm": 0.11090519279241562,
"learning_rate": 2.8975000000000003e-05,
"loss": 0.7344,
"step": 841
},
{
"epoch": 1.2000178149913152,
"grad_norm": 0.1116253063082695,
"learning_rate": 2.895e-05,
"loss": 0.8015,
"step": 842
},
{
"epoch": 1.2014430142965304,
"grad_norm": 0.11157463490962982,
"learning_rate": 2.8925000000000002e-05,
"loss": 0.7292,
"step": 843
},
{
"epoch": 1.202868213601746,
"grad_norm": 0.1171332597732544,
"learning_rate": 2.8899999999999998e-05,
"loss": 0.8668,
"step": 844
},
{
"epoch": 1.2042934129069611,
"grad_norm": 0.11264057457447052,
"learning_rate": 2.8875e-05,
"loss": 0.6854,
"step": 845
},
{
"epoch": 1.2057186122121766,
"grad_norm": 0.12508609890937805,
"learning_rate": 2.885e-05,
"loss": 0.8113,
"step": 846
},
{
"epoch": 1.2071438115173918,
"grad_norm": 0.11260174214839935,
"learning_rate": 2.8825000000000003e-05,
"loss": 0.798,
"step": 847
},
{
"epoch": 1.2085690108226073,
"grad_norm": 0.12849608063697815,
"learning_rate": 2.88e-05,
"loss": 0.8004,
"step": 848
},
{
"epoch": 1.2099942101278225,
"grad_norm": 0.11969997733831406,
"learning_rate": 2.8775e-05,
"loss": 0.7685,
"step": 849
},
{
"epoch": 1.211419409433038,
"grad_norm": 0.10770337283611298,
"learning_rate": 2.8749999999999997e-05,
"loss": 0.6968,
"step": 850
},
{
"epoch": 1.2128446087382532,
"grad_norm": 0.11139461398124695,
"learning_rate": 2.8725e-05,
"loss": 0.6978,
"step": 851
},
{
"epoch": 1.2142698080434686,
"grad_norm": 0.12267636507749557,
"learning_rate": 2.87e-05,
"loss": 0.7502,
"step": 852
},
{
"epoch": 1.2156950073486839,
"grad_norm": 0.11457645148038864,
"learning_rate": 2.8675000000000002e-05,
"loss": 0.7271,
"step": 853
},
{
"epoch": 1.2171202066538993,
"grad_norm": 0.1200946569442749,
"learning_rate": 2.865e-05,
"loss": 0.8226,
"step": 854
},
{
"epoch": 1.2185454059591145,
"grad_norm": 0.11289604008197784,
"learning_rate": 2.8625e-05,
"loss": 0.7205,
"step": 855
},
{
"epoch": 1.21997060526433,
"grad_norm": 0.12491659075021744,
"learning_rate": 2.86e-05,
"loss": 0.873,
"step": 856
},
{
"epoch": 1.2213958045695452,
"grad_norm": 0.1107926070690155,
"learning_rate": 2.8575000000000003e-05,
"loss": 0.6857,
"step": 857
},
{
"epoch": 1.2228210038747607,
"grad_norm": 0.11701218038797379,
"learning_rate": 2.855e-05,
"loss": 0.8107,
"step": 858
},
{
"epoch": 1.224246203179976,
"grad_norm": 0.11461485177278519,
"learning_rate": 2.8525000000000002e-05,
"loss": 0.7489,
"step": 859
},
{
"epoch": 1.2256714024851914,
"grad_norm": 0.11121220886707306,
"learning_rate": 2.8499999999999998e-05,
"loss": 0.7554,
"step": 860
},
{
"epoch": 1.2270966017904066,
"grad_norm": 0.10512278228998184,
"learning_rate": 2.8475e-05,
"loss": 0.637,
"step": 861
},
{
"epoch": 1.228521801095622,
"grad_norm": 0.12895485758781433,
"learning_rate": 2.845e-05,
"loss": 0.8105,
"step": 862
},
{
"epoch": 1.2299470004008373,
"grad_norm": 0.11858545243740082,
"learning_rate": 2.8425000000000003e-05,
"loss": 0.7652,
"step": 863
},
{
"epoch": 1.2313721997060527,
"grad_norm": 0.11664186418056488,
"learning_rate": 2.84e-05,
"loss": 0.7943,
"step": 864
},
{
"epoch": 1.232797399011268,
"grad_norm": 0.10928841680288315,
"learning_rate": 2.8375000000000002e-05,
"loss": 0.7179,
"step": 865
},
{
"epoch": 1.2342225983164834,
"grad_norm": 0.1079331487417221,
"learning_rate": 2.8349999999999998e-05,
"loss": 0.6557,
"step": 866
},
{
"epoch": 1.2356477976216986,
"grad_norm": 0.1149611622095108,
"learning_rate": 2.8325e-05,
"loss": 0.7827,
"step": 867
},
{
"epoch": 1.237072996926914,
"grad_norm": 0.11604591459035873,
"learning_rate": 2.83e-05,
"loss": 0.7713,
"step": 868
},
{
"epoch": 1.2384981962321293,
"grad_norm": 0.11439640074968338,
"learning_rate": 2.8275000000000003e-05,
"loss": 0.7547,
"step": 869
},
{
"epoch": 1.2399233955373448,
"grad_norm": 0.11716994643211365,
"learning_rate": 2.825e-05,
"loss": 0.8289,
"step": 870
},
{
"epoch": 1.24134859484256,
"grad_norm": 0.12129350751638412,
"learning_rate": 2.8225e-05,
"loss": 0.8517,
"step": 871
},
{
"epoch": 1.2427737941477754,
"grad_norm": 0.11103874444961548,
"learning_rate": 2.8199999999999998e-05,
"loss": 0.6972,
"step": 872
},
{
"epoch": 1.2441989934529907,
"grad_norm": 0.11324205994606018,
"learning_rate": 2.8175e-05,
"loss": 0.6996,
"step": 873
},
{
"epoch": 1.2456241927582061,
"grad_norm": 0.1129378080368042,
"learning_rate": 2.815e-05,
"loss": 0.7104,
"step": 874
},
{
"epoch": 1.2470493920634214,
"grad_norm": 0.1273653358221054,
"learning_rate": 2.8125000000000003e-05,
"loss": 0.8208,
"step": 875
},
{
"epoch": 1.2484745913686366,
"grad_norm": 0.1083623394370079,
"learning_rate": 2.8100000000000005e-05,
"loss": 0.6718,
"step": 876
},
{
"epoch": 1.249899790673852,
"grad_norm": 0.11138517409563065,
"learning_rate": 2.8075e-05,
"loss": 0.7382,
"step": 877
},
{
"epoch": 1.2513249899790675,
"grad_norm": 0.10969806462526321,
"learning_rate": 2.8050000000000004e-05,
"loss": 0.6979,
"step": 878
},
{
"epoch": 1.2527501892842827,
"grad_norm": 0.12256379425525665,
"learning_rate": 2.8025e-05,
"loss": 0.8439,
"step": 879
},
{
"epoch": 1.254175388589498,
"grad_norm": 0.12170591950416565,
"learning_rate": 2.8000000000000003e-05,
"loss": 0.7671,
"step": 880
},
{
"epoch": 1.2556005878947134,
"grad_norm": 0.12270461767911911,
"learning_rate": 2.7975000000000002e-05,
"loss": 0.7909,
"step": 881
},
{
"epoch": 1.2570257871999289,
"grad_norm": 0.1191706582903862,
"learning_rate": 2.7950000000000005e-05,
"loss": 0.7844,
"step": 882
},
{
"epoch": 1.258450986505144,
"grad_norm": 0.13361094892024994,
"learning_rate": 2.7925e-05,
"loss": 0.8401,
"step": 883
},
{
"epoch": 1.2598761858103593,
"grad_norm": 0.11256485432386398,
"learning_rate": 2.7900000000000004e-05,
"loss": 0.701,
"step": 884
},
{
"epoch": 1.2613013851155748,
"grad_norm": 0.10536285489797592,
"learning_rate": 2.7875e-05,
"loss": 0.6978,
"step": 885
},
{
"epoch": 1.2627265844207902,
"grad_norm": 0.11960794031620026,
"learning_rate": 2.7850000000000003e-05,
"loss": 0.8081,
"step": 886
},
{
"epoch": 1.2641517837260055,
"grad_norm": 0.11483211070299149,
"learning_rate": 2.7825000000000002e-05,
"loss": 0.7701,
"step": 887
},
{
"epoch": 1.2655769830312207,
"grad_norm": 0.10743017494678497,
"learning_rate": 2.7800000000000005e-05,
"loss": 0.7144,
"step": 888
},
{
"epoch": 1.2670021823364361,
"grad_norm": 0.10820018500089645,
"learning_rate": 2.7775e-05,
"loss": 0.6862,
"step": 889
},
{
"epoch": 1.2684273816416516,
"grad_norm": 0.12604548037052155,
"learning_rate": 2.7750000000000004e-05,
"loss": 0.8798,
"step": 890
},
{
"epoch": 1.2698525809468668,
"grad_norm": 0.11172569543123245,
"learning_rate": 2.7725e-05,
"loss": 0.7618,
"step": 891
},
{
"epoch": 1.271277780252082,
"grad_norm": 0.1111871749162674,
"learning_rate": 2.7700000000000002e-05,
"loss": 0.7488,
"step": 892
},
{
"epoch": 1.2727029795572975,
"grad_norm": 0.12239037454128265,
"learning_rate": 2.7675000000000002e-05,
"loss": 0.8765,
"step": 893
},
{
"epoch": 1.2741281788625127,
"grad_norm": 0.10793200135231018,
"learning_rate": 2.7650000000000005e-05,
"loss": 0.7697,
"step": 894
},
{
"epoch": 1.2755533781677282,
"grad_norm": 0.10788409411907196,
"learning_rate": 2.7625e-05,
"loss": 0.7179,
"step": 895
},
{
"epoch": 1.2769785774729434,
"grad_norm": 0.10764143615961075,
"learning_rate": 2.7600000000000003e-05,
"loss": 0.7262,
"step": 896
},
{
"epoch": 1.2784037767781589,
"grad_norm": 0.120219387114048,
"learning_rate": 2.7575e-05,
"loss": 0.7258,
"step": 897
},
{
"epoch": 1.279828976083374,
"grad_norm": 0.11540699750185013,
"learning_rate": 2.7550000000000002e-05,
"loss": 0.7984,
"step": 898
},
{
"epoch": 1.2812541753885895,
"grad_norm": 0.1095326691865921,
"learning_rate": 2.7525e-05,
"loss": 0.6831,
"step": 899
},
{
"epoch": 1.2826793746938048,
"grad_norm": 0.11630558222532272,
"learning_rate": 2.7500000000000004e-05,
"loss": 0.7077,
"step": 900
},
{
"epoch": 1.2841045739990202,
"grad_norm": 0.12241445481777191,
"learning_rate": 2.7475e-05,
"loss": 0.8176,
"step": 901
},
{
"epoch": 1.2855297733042355,
"grad_norm": 0.1199202612042427,
"learning_rate": 2.7450000000000003e-05,
"loss": 0.7538,
"step": 902
},
{
"epoch": 1.286954972609451,
"grad_norm": 0.11809781938791275,
"learning_rate": 2.7425e-05,
"loss": 0.7675,
"step": 903
},
{
"epoch": 1.2883801719146661,
"grad_norm": 0.11524670571088791,
"learning_rate": 2.7400000000000002e-05,
"loss": 0.8294,
"step": 904
},
{
"epoch": 1.2898053712198816,
"grad_norm": 0.11991703510284424,
"learning_rate": 2.7375e-05,
"loss": 0.7522,
"step": 905
},
{
"epoch": 1.2912305705250968,
"grad_norm": 0.11874469369649887,
"learning_rate": 2.7350000000000004e-05,
"loss": 0.7491,
"step": 906
},
{
"epoch": 1.2926557698303123,
"grad_norm": 0.1085904985666275,
"learning_rate": 2.7325e-05,
"loss": 0.6949,
"step": 907
},
{
"epoch": 1.2940809691355275,
"grad_norm": 0.11535164713859558,
"learning_rate": 2.7300000000000003e-05,
"loss": 0.712,
"step": 908
},
{
"epoch": 1.295506168440743,
"grad_norm": 0.1114681139588356,
"learning_rate": 2.7275e-05,
"loss": 0.6805,
"step": 909
},
{
"epoch": 1.2969313677459582,
"grad_norm": 0.11432049423456192,
"learning_rate": 2.725e-05,
"loss": 0.7956,
"step": 910
},
{
"epoch": 1.2983565670511736,
"grad_norm": 0.11360208690166473,
"learning_rate": 2.7225e-05,
"loss": 0.7288,
"step": 911
},
{
"epoch": 1.2997817663563889,
"grad_norm": 0.1157182827591896,
"learning_rate": 2.7200000000000004e-05,
"loss": 0.7597,
"step": 912
},
{
"epoch": 1.3012069656616043,
"grad_norm": 0.1076773926615715,
"learning_rate": 2.7175e-05,
"loss": 0.589,
"step": 913
},
{
"epoch": 1.3026321649668196,
"grad_norm": 0.10875940322875977,
"learning_rate": 2.7150000000000003e-05,
"loss": 0.7503,
"step": 914
},
{
"epoch": 1.304057364272035,
"grad_norm": 0.12709981203079224,
"learning_rate": 2.7125000000000002e-05,
"loss": 0.7951,
"step": 915
},
{
"epoch": 1.3054825635772502,
"grad_norm": 0.11561498045921326,
"learning_rate": 2.7100000000000005e-05,
"loss": 0.7525,
"step": 916
},
{
"epoch": 1.3069077628824655,
"grad_norm": 0.119575634598732,
"learning_rate": 2.7075e-05,
"loss": 0.7594,
"step": 917
},
{
"epoch": 1.308332962187681,
"grad_norm": 0.12520454823970795,
"learning_rate": 2.7050000000000004e-05,
"loss": 0.8108,
"step": 918
},
{
"epoch": 1.3097581614928964,
"grad_norm": 0.1092676967382431,
"learning_rate": 2.7025e-05,
"loss": 0.7539,
"step": 919
},
{
"epoch": 1.3111833607981116,
"grad_norm": 0.11588326841592789,
"learning_rate": 2.7000000000000002e-05,
"loss": 0.7869,
"step": 920
},
{
"epoch": 1.3126085601033268,
"grad_norm": 0.11451387405395508,
"learning_rate": 2.6975000000000002e-05,
"loss": 0.7404,
"step": 921
},
{
"epoch": 1.3140337594085423,
"grad_norm": 0.11125651001930237,
"learning_rate": 2.6950000000000005e-05,
"loss": 0.6937,
"step": 922
},
{
"epoch": 1.3154589587137577,
"grad_norm": 0.11820688098669052,
"learning_rate": 2.6925e-05,
"loss": 0.7915,
"step": 923
},
{
"epoch": 1.316884158018973,
"grad_norm": 0.11905471980571747,
"learning_rate": 2.6900000000000003e-05,
"loss": 0.7854,
"step": 924
},
{
"epoch": 1.3183093573241882,
"grad_norm": 0.11712141335010529,
"learning_rate": 2.6875e-05,
"loss": 0.7925,
"step": 925
},
{
"epoch": 1.3197345566294036,
"grad_norm": 0.1105298399925232,
"learning_rate": 2.6850000000000002e-05,
"loss": 0.7454,
"step": 926
},
{
"epoch": 1.321159755934619,
"grad_norm": 0.12145576626062393,
"learning_rate": 2.6825e-05,
"loss": 0.8142,
"step": 927
},
{
"epoch": 1.3225849552398343,
"grad_norm": 0.11712533235549927,
"learning_rate": 2.6800000000000004e-05,
"loss": 0.763,
"step": 928
},
{
"epoch": 1.3240101545450496,
"grad_norm": 0.11194977909326553,
"learning_rate": 2.6775e-05,
"loss": 0.6832,
"step": 929
},
{
"epoch": 1.325435353850265,
"grad_norm": 0.11105328053236008,
"learning_rate": 2.6750000000000003e-05,
"loss": 0.7002,
"step": 930
},
{
"epoch": 1.3268605531554805,
"grad_norm": 0.12465706467628479,
"learning_rate": 2.6725e-05,
"loss": 0.9137,
"step": 931
},
{
"epoch": 1.3282857524606957,
"grad_norm": 0.12636415660381317,
"learning_rate": 2.6700000000000002e-05,
"loss": 0.8045,
"step": 932
},
{
"epoch": 1.329710951765911,
"grad_norm": 0.1165253296494484,
"learning_rate": 2.6675e-05,
"loss": 0.7864,
"step": 933
},
{
"epoch": 1.3311361510711264,
"grad_norm": 0.1266823410987854,
"learning_rate": 2.6650000000000004e-05,
"loss": 0.8342,
"step": 934
},
{
"epoch": 1.3325613503763418,
"grad_norm": 0.11023375391960144,
"learning_rate": 2.6625e-05,
"loss": 0.7103,
"step": 935
},
{
"epoch": 1.333986549681557,
"grad_norm": 0.12472933530807495,
"learning_rate": 2.6600000000000003e-05,
"loss": 0.7953,
"step": 936
},
{
"epoch": 1.3354117489867723,
"grad_norm": 0.10343880951404572,
"learning_rate": 2.6575e-05,
"loss": 0.6893,
"step": 937
},
{
"epoch": 1.3368369482919877,
"grad_norm": 0.12377790361642838,
"learning_rate": 2.655e-05,
"loss": 0.8661,
"step": 938
},
{
"epoch": 1.338262147597203,
"grad_norm": 0.1217459961771965,
"learning_rate": 2.6525e-05,
"loss": 0.7914,
"step": 939
},
{
"epoch": 1.3396873469024184,
"grad_norm": 0.1167265772819519,
"learning_rate": 2.6500000000000004e-05,
"loss": 0.7552,
"step": 940
},
{
"epoch": 1.3411125462076336,
"grad_norm": 0.12100812792778015,
"learning_rate": 2.6475e-05,
"loss": 0.8515,
"step": 941
},
{
"epoch": 1.342537745512849,
"grad_norm": 0.117970772087574,
"learning_rate": 2.6450000000000003e-05,
"loss": 0.8037,
"step": 942
},
{
"epoch": 1.3439629448180643,
"grad_norm": 0.11965232342481613,
"learning_rate": 2.6425e-05,
"loss": 0.7754,
"step": 943
},
{
"epoch": 1.3453881441232798,
"grad_norm": 0.11228921264410019,
"learning_rate": 2.64e-05,
"loss": 0.7037,
"step": 944
},
{
"epoch": 1.346813343428495,
"grad_norm": 0.11814633011817932,
"learning_rate": 2.6375e-05,
"loss": 0.824,
"step": 945
},
{
"epoch": 1.3482385427337105,
"grad_norm": 0.11586852371692657,
"learning_rate": 2.6350000000000004e-05,
"loss": 0.6572,
"step": 946
},
{
"epoch": 1.3496637420389257,
"grad_norm": 0.11452041566371918,
"learning_rate": 2.6325e-05,
"loss": 0.7533,
"step": 947
},
{
"epoch": 1.3510889413441411,
"grad_norm": 0.1118515357375145,
"learning_rate": 2.6300000000000002e-05,
"loss": 0.7236,
"step": 948
},
{
"epoch": 1.3525141406493564,
"grad_norm": 0.11664275079965591,
"learning_rate": 2.6275e-05,
"loss": 0.7828,
"step": 949
},
{
"epoch": 1.3539393399545718,
"grad_norm": 0.11567650735378265,
"learning_rate": 2.625e-05,
"loss": 0.7203,
"step": 950
},
{
"epoch": 1.355364539259787,
"grad_norm": 0.11788018047809601,
"learning_rate": 2.6225e-05,
"loss": 0.7733,
"step": 951
},
{
"epoch": 1.3567897385650025,
"grad_norm": 0.1237102746963501,
"learning_rate": 2.6200000000000003e-05,
"loss": 0.8207,
"step": 952
},
{
"epoch": 1.3582149378702177,
"grad_norm": 0.1189865916967392,
"learning_rate": 2.6175e-05,
"loss": 0.8356,
"step": 953
},
{
"epoch": 1.3596401371754332,
"grad_norm": 0.1173599511384964,
"learning_rate": 2.6150000000000002e-05,
"loss": 0.7304,
"step": 954
},
{
"epoch": 1.3610653364806484,
"grad_norm": 0.12222309410572052,
"learning_rate": 2.6124999999999998e-05,
"loss": 0.7392,
"step": 955
},
{
"epoch": 1.3624905357858639,
"grad_norm": 0.1205679327249527,
"learning_rate": 2.61e-05,
"loss": 0.8496,
"step": 956
},
{
"epoch": 1.363915735091079,
"grad_norm": 0.12086839228868484,
"learning_rate": 2.6075e-05,
"loss": 0.82,
"step": 957
},
{
"epoch": 1.3653409343962946,
"grad_norm": 0.11372940987348557,
"learning_rate": 2.6050000000000003e-05,
"loss": 0.7732,
"step": 958
},
{
"epoch": 1.3667661337015098,
"grad_norm": 0.11405348032712936,
"learning_rate": 2.6025e-05,
"loss": 0.7328,
"step": 959
},
{
"epoch": 1.3681913330067252,
"grad_norm": 0.12149921804666519,
"learning_rate": 2.6000000000000002e-05,
"loss": 0.6837,
"step": 960
},
{
"epoch": 1.3696165323119405,
"grad_norm": 0.12730655074119568,
"learning_rate": 2.5974999999999998e-05,
"loss": 0.7108,
"step": 961
},
{
"epoch": 1.3710417316171557,
"grad_norm": 0.1427479088306427,
"learning_rate": 2.595e-05,
"loss": 0.8389,
"step": 962
},
{
"epoch": 1.3724669309223712,
"grad_norm": 0.12024298310279846,
"learning_rate": 2.5925e-05,
"loss": 0.8355,
"step": 963
},
{
"epoch": 1.3738921302275866,
"grad_norm": 0.10839163511991501,
"learning_rate": 2.5900000000000003e-05,
"loss": 0.7386,
"step": 964
},
{
"epoch": 1.3753173295328018,
"grad_norm": 0.1163763478398323,
"learning_rate": 2.5875e-05,
"loss": 0.7443,
"step": 965
},
{
"epoch": 1.376742528838017,
"grad_norm": 0.11475004255771637,
"learning_rate": 2.585e-05,
"loss": 0.7724,
"step": 966
},
{
"epoch": 1.3781677281432325,
"grad_norm": 0.12580887973308563,
"learning_rate": 2.5824999999999998e-05,
"loss": 0.8061,
"step": 967
},
{
"epoch": 1.379592927448448,
"grad_norm": 0.12531021237373352,
"learning_rate": 2.58e-05,
"loss": 0.7711,
"step": 968
},
{
"epoch": 1.3810181267536632,
"grad_norm": 0.11652937531471252,
"learning_rate": 2.5775e-05,
"loss": 0.7479,
"step": 969
},
{
"epoch": 1.3824433260588784,
"grad_norm": 0.1105751171708107,
"learning_rate": 2.5750000000000002e-05,
"loss": 0.6708,
"step": 970
},
{
"epoch": 1.3838685253640939,
"grad_norm": 0.1220349669456482,
"learning_rate": 2.5725e-05,
"loss": 0.8461,
"step": 971
},
{
"epoch": 1.3852937246693093,
"grad_norm": 0.11623432487249374,
"learning_rate": 2.57e-05,
"loss": 0.6789,
"step": 972
},
{
"epoch": 1.3867189239745246,
"grad_norm": 0.11964385211467743,
"learning_rate": 2.5675e-05,
"loss": 0.7368,
"step": 973
},
{
"epoch": 1.3881441232797398,
"grad_norm": 0.10884688049554825,
"learning_rate": 2.5650000000000003e-05,
"loss": 0.727,
"step": 974
},
{
"epoch": 1.3895693225849552,
"grad_norm": 0.10422010719776154,
"learning_rate": 2.5625e-05,
"loss": 0.6787,
"step": 975
},
{
"epoch": 1.3909945218901707,
"grad_norm": 0.10569993406534195,
"learning_rate": 2.5600000000000002e-05,
"loss": 0.6858,
"step": 976
},
{
"epoch": 1.392419721195386,
"grad_norm": 0.12773045897483826,
"learning_rate": 2.5574999999999998e-05,
"loss": 0.8111,
"step": 977
},
{
"epoch": 1.3938449205006012,
"grad_norm": 0.11709249764680862,
"learning_rate": 2.555e-05,
"loss": 0.7091,
"step": 978
},
{
"epoch": 1.3952701198058166,
"grad_norm": 0.12767601013183594,
"learning_rate": 2.5525e-05,
"loss": 0.8974,
"step": 979
},
{
"epoch": 1.396695319111032,
"grad_norm": 0.11176025122404099,
"learning_rate": 2.5500000000000003e-05,
"loss": 0.6698,
"step": 980
},
{
"epoch": 1.3981205184162473,
"grad_norm": 0.11420497298240662,
"learning_rate": 2.5475e-05,
"loss": 0.7048,
"step": 981
},
{
"epoch": 1.3995457177214625,
"grad_norm": 0.11570915579795837,
"learning_rate": 2.5450000000000002e-05,
"loss": 0.794,
"step": 982
},
{
"epoch": 1.400970917026678,
"grad_norm": 0.12082798033952713,
"learning_rate": 2.5424999999999998e-05,
"loss": 0.8466,
"step": 983
},
{
"epoch": 1.4023961163318932,
"grad_norm": 0.116717629134655,
"learning_rate": 2.54e-05,
"loss": 0.7611,
"step": 984
},
{
"epoch": 1.4038213156371087,
"grad_norm": 0.12127409130334854,
"learning_rate": 2.5375e-05,
"loss": 0.8609,
"step": 985
},
{
"epoch": 1.4052465149423239,
"grad_norm": 0.12146691232919693,
"learning_rate": 2.5350000000000003e-05,
"loss": 0.803,
"step": 986
},
{
"epoch": 1.4066717142475393,
"grad_norm": 0.11171932518482208,
"learning_rate": 2.5325e-05,
"loss": 0.7607,
"step": 987
},
{
"epoch": 1.4080969135527546,
"grad_norm": 0.09921254217624664,
"learning_rate": 2.5300000000000002e-05,
"loss": 0.6177,
"step": 988
},
{
"epoch": 1.40952211285797,
"grad_norm": 0.11079659312963486,
"learning_rate": 2.5274999999999998e-05,
"loss": 0.6938,
"step": 989
},
{
"epoch": 1.4109473121631853,
"grad_norm": 0.11670161038637161,
"learning_rate": 2.525e-05,
"loss": 0.7806,
"step": 990
},
{
"epoch": 1.4123725114684007,
"grad_norm": 0.11570683866739273,
"learning_rate": 2.5225e-05,
"loss": 0.7121,
"step": 991
},
{
"epoch": 1.413797710773616,
"grad_norm": 0.11448443681001663,
"learning_rate": 2.5200000000000003e-05,
"loss": 0.7263,
"step": 992
},
{
"epoch": 1.4152229100788314,
"grad_norm": 0.10907650738954544,
"learning_rate": 2.5175e-05,
"loss": 0.7088,
"step": 993
},
{
"epoch": 1.4166481093840466,
"grad_norm": 0.12411980330944061,
"learning_rate": 2.515e-05,
"loss": 0.8867,
"step": 994
},
{
"epoch": 1.418073308689262,
"grad_norm": 0.12327571958303452,
"learning_rate": 2.5124999999999997e-05,
"loss": 0.7451,
"step": 995
},
{
"epoch": 1.4194985079944773,
"grad_norm": 0.1201588436961174,
"learning_rate": 2.51e-05,
"loss": 0.7162,
"step": 996
},
{
"epoch": 1.4209237072996928,
"grad_norm": 0.11017190665006638,
"learning_rate": 2.5075e-05,
"loss": 0.7236,
"step": 997
},
{
"epoch": 1.422348906604908,
"grad_norm": 0.11153571307659149,
"learning_rate": 2.5050000000000002e-05,
"loss": 0.7212,
"step": 998
},
{
"epoch": 1.4237741059101234,
"grad_norm": 0.11366070061922073,
"learning_rate": 2.5025e-05,
"loss": 0.7366,
"step": 999
},
{
"epoch": 1.4251993052153387,
"grad_norm": 0.10990682244300842,
"learning_rate": 2.5e-05,
"loss": 0.7304,
"step": 1000
},
{
"epoch": 1.4266245045205541,
"grad_norm": 0.11313823610544205,
"learning_rate": 2.4975e-05,
"loss": 0.6938,
"step": 1001
},
{
"epoch": 1.4280497038257693,
"grad_norm": 0.10493762791156769,
"learning_rate": 2.495e-05,
"loss": 0.6733,
"step": 1002
},
{
"epoch": 1.4294749031309848,
"grad_norm": 0.1234835833311081,
"learning_rate": 2.4925000000000003e-05,
"loss": 0.8295,
"step": 1003
},
{
"epoch": 1.4309001024362,
"grad_norm": 0.1296398788690567,
"learning_rate": 2.4900000000000002e-05,
"loss": 0.8166,
"step": 1004
},
{
"epoch": 1.4323253017414155,
"grad_norm": 0.11815960705280304,
"learning_rate": 2.4875e-05,
"loss": 0.7955,
"step": 1005
},
{
"epoch": 1.4337505010466307,
"grad_norm": 0.11081986874341965,
"learning_rate": 2.485e-05,
"loss": 0.6855,
"step": 1006
},
{
"epoch": 1.435175700351846,
"grad_norm": 0.11586599051952362,
"learning_rate": 2.4825e-05,
"loss": 0.7843,
"step": 1007
},
{
"epoch": 1.4366008996570614,
"grad_norm": 0.11511670798063278,
"learning_rate": 2.48e-05,
"loss": 0.6926,
"step": 1008
},
{
"epoch": 1.4380260989622768,
"grad_norm": 0.11871721595525742,
"learning_rate": 2.4775000000000003e-05,
"loss": 0.755,
"step": 1009
},
{
"epoch": 1.439451298267492,
"grad_norm": 0.11956765502691269,
"learning_rate": 2.4750000000000002e-05,
"loss": 0.8112,
"step": 1010
},
{
"epoch": 1.4408764975727073,
"grad_norm": 0.11924542486667633,
"learning_rate": 2.4725e-05,
"loss": 0.8178,
"step": 1011
},
{
"epoch": 1.4423016968779228,
"grad_norm": 0.11436988413333893,
"learning_rate": 2.47e-05,
"loss": 0.7395,
"step": 1012
},
{
"epoch": 1.4437268961831382,
"grad_norm": 0.1106889471411705,
"learning_rate": 2.4675e-05,
"loss": 0.6839,
"step": 1013
},
{
"epoch": 1.4451520954883534,
"grad_norm": 0.11819828301668167,
"learning_rate": 2.465e-05,
"loss": 0.7651,
"step": 1014
},
{
"epoch": 1.4465772947935687,
"grad_norm": 0.11884670704603195,
"learning_rate": 2.4625000000000002e-05,
"loss": 0.8327,
"step": 1015
},
{
"epoch": 1.4480024940987841,
"grad_norm": 0.11763067543506622,
"learning_rate": 2.46e-05,
"loss": 0.7973,
"step": 1016
},
{
"epoch": 1.4494276934039996,
"grad_norm": 0.12050741165876389,
"learning_rate": 2.4575e-05,
"loss": 0.7672,
"step": 1017
},
{
"epoch": 1.4508528927092148,
"grad_norm": 0.10879375040531158,
"learning_rate": 2.455e-05,
"loss": 0.6811,
"step": 1018
},
{
"epoch": 1.45227809201443,
"grad_norm": 0.11369942128658295,
"learning_rate": 2.4525e-05,
"loss": 0.7813,
"step": 1019
},
{
"epoch": 1.4537032913196455,
"grad_norm": 0.14750194549560547,
"learning_rate": 2.45e-05,
"loss": 0.702,
"step": 1020
},
{
"epoch": 1.455128490624861,
"grad_norm": 0.1253705769777298,
"learning_rate": 2.4475000000000002e-05,
"loss": 0.8349,
"step": 1021
},
{
"epoch": 1.4565536899300762,
"grad_norm": 0.13692884147167206,
"learning_rate": 2.445e-05,
"loss": 0.7743,
"step": 1022
},
{
"epoch": 1.4579788892352914,
"grad_norm": 0.1298292577266693,
"learning_rate": 2.4425e-05,
"loss": 0.8964,
"step": 1023
},
{
"epoch": 1.4594040885405068,
"grad_norm": 0.10757498443126678,
"learning_rate": 2.44e-05,
"loss": 0.5595,
"step": 1024
},
{
"epoch": 1.4608292878457223,
"grad_norm": 0.10584932565689087,
"learning_rate": 2.4375e-05,
"loss": 0.6221,
"step": 1025
},
{
"epoch": 1.4622544871509375,
"grad_norm": 0.11721090972423553,
"learning_rate": 2.435e-05,
"loss": 0.824,
"step": 1026
},
{
"epoch": 1.4636796864561528,
"grad_norm": 0.11982092261314392,
"learning_rate": 2.4325000000000002e-05,
"loss": 0.8375,
"step": 1027
},
{
"epoch": 1.4651048857613682,
"grad_norm": 0.11559251695871353,
"learning_rate": 2.43e-05,
"loss": 0.7538,
"step": 1028
},
{
"epoch": 1.4665300850665837,
"grad_norm": 0.11582160741090775,
"learning_rate": 2.4275e-05,
"loss": 0.761,
"step": 1029
},
{
"epoch": 1.467955284371799,
"grad_norm": 0.10671703517436981,
"learning_rate": 2.425e-05,
"loss": 0.6718,
"step": 1030
},
{
"epoch": 1.4693804836770141,
"grad_norm": 0.12085099518299103,
"learning_rate": 2.4225e-05,
"loss": 0.7571,
"step": 1031
},
{
"epoch": 1.4708056829822296,
"grad_norm": 0.10394426435232162,
"learning_rate": 2.4200000000000002e-05,
"loss": 0.6049,
"step": 1032
},
{
"epoch": 1.4722308822874448,
"grad_norm": 0.11202719807624817,
"learning_rate": 2.4175e-05,
"loss": 0.7581,
"step": 1033
},
{
"epoch": 1.4736560815926603,
"grad_norm": 0.12303619831800461,
"learning_rate": 2.415e-05,
"loss": 0.8265,
"step": 1034
},
{
"epoch": 1.4750812808978755,
"grad_norm": 0.11713062226772308,
"learning_rate": 2.4125e-05,
"loss": 0.6683,
"step": 1035
},
{
"epoch": 1.476506480203091,
"grad_norm": 0.12619657814502716,
"learning_rate": 2.41e-05,
"loss": 0.8032,
"step": 1036
},
{
"epoch": 1.4779316795083062,
"grad_norm": 0.11300335079431534,
"learning_rate": 2.4075e-05,
"loss": 0.775,
"step": 1037
},
{
"epoch": 1.4793568788135216,
"grad_norm": 0.12636785209178925,
"learning_rate": 2.4050000000000002e-05,
"loss": 0.8361,
"step": 1038
},
{
"epoch": 1.4807820781187369,
"grad_norm": 0.11806098371744156,
"learning_rate": 2.4025e-05,
"loss": 0.7612,
"step": 1039
},
{
"epoch": 1.4822072774239523,
"grad_norm": 0.10547633469104767,
"learning_rate": 2.4e-05,
"loss": 0.6112,
"step": 1040
},
{
"epoch": 1.4836324767291675,
"grad_norm": 0.10859034210443497,
"learning_rate": 2.3975e-05,
"loss": 0.6798,
"step": 1041
},
{
"epoch": 1.485057676034383,
"grad_norm": 0.12310510128736496,
"learning_rate": 2.395e-05,
"loss": 0.8202,
"step": 1042
},
{
"epoch": 1.4864828753395982,
"grad_norm": 0.11892445385456085,
"learning_rate": 2.3925e-05,
"loss": 0.853,
"step": 1043
},
{
"epoch": 1.4879080746448137,
"grad_norm": 0.1227976605296135,
"learning_rate": 2.39e-05,
"loss": 0.7982,
"step": 1044
},
{
"epoch": 1.489333273950029,
"grad_norm": 0.12141431123018265,
"learning_rate": 2.3875e-05,
"loss": 0.8444,
"step": 1045
},
{
"epoch": 1.4907584732552444,
"grad_norm": 0.11771409958600998,
"learning_rate": 2.385e-05,
"loss": 0.8214,
"step": 1046
},
{
"epoch": 1.4921836725604596,
"grad_norm": 0.10507446527481079,
"learning_rate": 2.3825e-05,
"loss": 0.665,
"step": 1047
},
{
"epoch": 1.493608871865675,
"grad_norm": 0.11452708393335342,
"learning_rate": 2.38e-05,
"loss": 0.7497,
"step": 1048
},
{
"epoch": 1.4950340711708903,
"grad_norm": 0.10935872793197632,
"learning_rate": 2.3775e-05,
"loss": 0.7101,
"step": 1049
},
{
"epoch": 1.4964592704761057,
"grad_norm": 0.12085019797086716,
"learning_rate": 2.375e-05,
"loss": 0.7761,
"step": 1050
},
{
"epoch": 1.497884469781321,
"grad_norm": 0.11829937249422073,
"learning_rate": 2.3725e-05,
"loss": 0.7342,
"step": 1051
},
{
"epoch": 1.4993096690865364,
"grad_norm": 0.1236567497253418,
"learning_rate": 2.37e-05,
"loss": 0.8387,
"step": 1052
},
{
"epoch": 1.5007348683917516,
"grad_norm": 0.11435924470424652,
"learning_rate": 2.3675e-05,
"loss": 0.7695,
"step": 1053
},
{
"epoch": 1.502160067696967,
"grad_norm": 0.11337295174598694,
"learning_rate": 2.365e-05,
"loss": 0.733,
"step": 1054
},
{
"epoch": 1.5035852670021823,
"grad_norm": 0.11937617510557175,
"learning_rate": 2.3624999999999998e-05,
"loss": 0.8075,
"step": 1055
},
{
"epoch": 1.5050104663073975,
"grad_norm": 0.11299442499876022,
"learning_rate": 2.36e-05,
"loss": 0.8135,
"step": 1056
},
{
"epoch": 1.506435665612613,
"grad_norm": 0.10889643430709839,
"learning_rate": 2.3575e-05,
"loss": 0.658,
"step": 1057
},
{
"epoch": 1.5078608649178284,
"grad_norm": 0.13474972546100616,
"learning_rate": 2.355e-05,
"loss": 0.9556,
"step": 1058
},
{
"epoch": 1.5092860642230437,
"grad_norm": 0.11456069350242615,
"learning_rate": 2.3525e-05,
"loss": 0.7983,
"step": 1059
},
{
"epoch": 1.510711263528259,
"grad_norm": 0.12779653072357178,
"learning_rate": 2.35e-05,
"loss": 0.8739,
"step": 1060
},
{
"epoch": 1.5121364628334744,
"grad_norm": 0.11765363067388535,
"learning_rate": 2.3475e-05,
"loss": 0.8203,
"step": 1061
},
{
"epoch": 1.5135616621386898,
"grad_norm": 0.16360750794410706,
"learning_rate": 2.345e-05,
"loss": 0.7682,
"step": 1062
},
{
"epoch": 1.514986861443905,
"grad_norm": 0.13148315250873566,
"learning_rate": 2.3425000000000004e-05,
"loss": 0.8373,
"step": 1063
},
{
"epoch": 1.5164120607491203,
"grad_norm": 0.1110619381070137,
"learning_rate": 2.3400000000000003e-05,
"loss": 0.7296,
"step": 1064
},
{
"epoch": 1.5178372600543357,
"grad_norm": 0.11759109050035477,
"learning_rate": 2.3375000000000002e-05,
"loss": 0.8485,
"step": 1065
},
{
"epoch": 1.5192624593595512,
"grad_norm": 0.12075982987880707,
"learning_rate": 2.3350000000000002e-05,
"loss": 0.8372,
"step": 1066
},
{
"epoch": 1.5206876586647664,
"grad_norm": 0.11918433755636215,
"learning_rate": 2.3325e-05,
"loss": 0.8262,
"step": 1067
},
{
"epoch": 1.5221128579699816,
"grad_norm": 0.11648005992174149,
"learning_rate": 2.3300000000000004e-05,
"loss": 0.7623,
"step": 1068
},
{
"epoch": 1.523538057275197,
"grad_norm": 0.10459990054368973,
"learning_rate": 2.3275000000000003e-05,
"loss": 0.6282,
"step": 1069
},
{
"epoch": 1.5249632565804125,
"grad_norm": 0.11834321916103363,
"learning_rate": 2.3250000000000003e-05,
"loss": 0.8539,
"step": 1070
},
{
"epoch": 1.5263884558856278,
"grad_norm": 0.12588469684123993,
"learning_rate": 2.3225000000000002e-05,
"loss": 0.8874,
"step": 1071
},
{
"epoch": 1.527813655190843,
"grad_norm": 0.11278087645769119,
"learning_rate": 2.32e-05,
"loss": 0.8067,
"step": 1072
},
{
"epoch": 1.5292388544960585,
"grad_norm": 0.11475430428981781,
"learning_rate": 2.3175e-05,
"loss": 0.7752,
"step": 1073
},
{
"epoch": 1.530664053801274,
"grad_norm": 0.12166863679885864,
"learning_rate": 2.3150000000000004e-05,
"loss": 0.7893,
"step": 1074
},
{
"epoch": 1.5320892531064891,
"grad_norm": 0.11727246642112732,
"learning_rate": 2.3125000000000003e-05,
"loss": 0.7429,
"step": 1075
},
{
"epoch": 1.5335144524117044,
"grad_norm": 0.11505335569381714,
"learning_rate": 2.3100000000000002e-05,
"loss": 0.7563,
"step": 1076
},
{
"epoch": 1.5349396517169198,
"grad_norm": 0.1198313981294632,
"learning_rate": 2.3075000000000002e-05,
"loss": 0.7014,
"step": 1077
},
{
"epoch": 1.5363648510221353,
"grad_norm": 0.11371297389268875,
"learning_rate": 2.305e-05,
"loss": 0.7772,
"step": 1078
},
{
"epoch": 1.5377900503273505,
"grad_norm": 0.11227618902921677,
"learning_rate": 2.3025e-05,
"loss": 0.6562,
"step": 1079
},
{
"epoch": 1.5392152496325657,
"grad_norm": 0.10970646142959595,
"learning_rate": 2.3000000000000003e-05,
"loss": 0.6746,
"step": 1080
},
{
"epoch": 1.5406404489377812,
"grad_norm": 0.11396931111812592,
"learning_rate": 2.2975000000000003e-05,
"loss": 0.7087,
"step": 1081
},
{
"epoch": 1.5420656482429966,
"grad_norm": 0.12560035288333893,
"learning_rate": 2.2950000000000002e-05,
"loss": 0.8747,
"step": 1082
},
{
"epoch": 1.5434908475482119,
"grad_norm": 0.11479337513446808,
"learning_rate": 2.2925e-05,
"loss": 0.8166,
"step": 1083
},
{
"epoch": 1.544916046853427,
"grad_norm": 0.10455924272537231,
"learning_rate": 2.29e-05,
"loss": 0.5757,
"step": 1084
},
{
"epoch": 1.5463412461586425,
"grad_norm": 0.11855601519346237,
"learning_rate": 2.2875e-05,
"loss": 0.8159,
"step": 1085
},
{
"epoch": 1.547766445463858,
"grad_norm": 0.11588083207607269,
"learning_rate": 2.2850000000000003e-05,
"loss": 0.772,
"step": 1086
},
{
"epoch": 1.5491916447690732,
"grad_norm": 0.12005151808261871,
"learning_rate": 2.2825000000000003e-05,
"loss": 0.8014,
"step": 1087
},
{
"epoch": 1.5506168440742885,
"grad_norm": 0.1143575981259346,
"learning_rate": 2.2800000000000002e-05,
"loss": 0.6844,
"step": 1088
},
{
"epoch": 1.5520420433795037,
"grad_norm": 0.12026940286159515,
"learning_rate": 2.2775e-05,
"loss": 0.7923,
"step": 1089
},
{
"epoch": 1.5534672426847191,
"grad_norm": 0.11226795613765717,
"learning_rate": 2.275e-05,
"loss": 0.7147,
"step": 1090
},
{
"epoch": 1.5548924419899346,
"grad_norm": 0.12784352898597717,
"learning_rate": 2.2725000000000003e-05,
"loss": 0.8299,
"step": 1091
},
{
"epoch": 1.5563176412951498,
"grad_norm": 0.11506415903568268,
"learning_rate": 2.2700000000000003e-05,
"loss": 0.7666,
"step": 1092
},
{
"epoch": 1.557742840600365,
"grad_norm": 0.11893978714942932,
"learning_rate": 2.2675000000000002e-05,
"loss": 0.7346,
"step": 1093
},
{
"epoch": 1.5591680399055805,
"grad_norm": 0.11422725766897202,
"learning_rate": 2.265e-05,
"loss": 0.6913,
"step": 1094
},
{
"epoch": 1.560593239210796,
"grad_norm": 0.12034717947244644,
"learning_rate": 2.2625e-05,
"loss": 0.7345,
"step": 1095
},
{
"epoch": 1.5620184385160112,
"grad_norm": 0.1104605421423912,
"learning_rate": 2.26e-05,
"loss": 0.6607,
"step": 1096
},
{
"epoch": 1.5634436378212264,
"grad_norm": 0.11132071912288666,
"learning_rate": 2.2575000000000003e-05,
"loss": 0.7116,
"step": 1097
},
{
"epoch": 1.5648688371264419,
"grad_norm": 0.12944871187210083,
"learning_rate": 2.2550000000000003e-05,
"loss": 0.8974,
"step": 1098
},
{
"epoch": 1.5662940364316573,
"grad_norm": 0.12086828052997589,
"learning_rate": 2.2525000000000002e-05,
"loss": 0.8535,
"step": 1099
},
{
"epoch": 1.5677192357368726,
"grad_norm": 0.10980792343616486,
"learning_rate": 2.25e-05,
"loss": 0.6819,
"step": 1100
},
{
"epoch": 1.5691444350420878,
"grad_norm": 0.11690583825111389,
"learning_rate": 2.2475e-05,
"loss": 0.7431,
"step": 1101
},
{
"epoch": 1.5705696343473032,
"grad_norm": 0.124410480260849,
"learning_rate": 2.245e-05,
"loss": 0.7774,
"step": 1102
},
{
"epoch": 1.5719948336525187,
"grad_norm": 0.11295003443956375,
"learning_rate": 2.2425000000000003e-05,
"loss": 0.6707,
"step": 1103
},
{
"epoch": 1.573420032957734,
"grad_norm": 0.11237070709466934,
"learning_rate": 2.2400000000000002e-05,
"loss": 0.7087,
"step": 1104
},
{
"epoch": 1.5748452322629491,
"grad_norm": 0.11122558265924454,
"learning_rate": 2.2375000000000002e-05,
"loss": 0.742,
"step": 1105
},
{
"epoch": 1.5762704315681646,
"grad_norm": 0.11660466343164444,
"learning_rate": 2.235e-05,
"loss": 0.7166,
"step": 1106
},
{
"epoch": 1.57769563087338,
"grad_norm": 0.11522463709115982,
"learning_rate": 2.2325e-05,
"loss": 0.7779,
"step": 1107
},
{
"epoch": 1.5791208301785953,
"grad_norm": 0.11560066789388657,
"learning_rate": 2.23e-05,
"loss": 0.7809,
"step": 1108
},
{
"epoch": 1.5805460294838105,
"grad_norm": 0.11911153793334961,
"learning_rate": 2.2275000000000003e-05,
"loss": 0.7946,
"step": 1109
},
{
"epoch": 1.581971228789026,
"grad_norm": 0.11437411606311798,
"learning_rate": 2.2250000000000002e-05,
"loss": 0.6906,
"step": 1110
},
{
"epoch": 1.5833964280942414,
"grad_norm": 0.12097032368183136,
"learning_rate": 2.2225e-05,
"loss": 0.8124,
"step": 1111
},
{
"epoch": 1.5848216273994566,
"grad_norm": 0.10900327563285828,
"learning_rate": 2.22e-05,
"loss": 0.7381,
"step": 1112
},
{
"epoch": 1.5862468267046719,
"grad_norm": 0.10762134939432144,
"learning_rate": 2.2175e-05,
"loss": 0.676,
"step": 1113
},
{
"epoch": 1.5876720260098873,
"grad_norm": 0.11865833401679993,
"learning_rate": 2.215e-05,
"loss": 0.7425,
"step": 1114
},
{
"epoch": 1.5890972253151028,
"grad_norm": 0.11480837315320969,
"learning_rate": 2.2125000000000002e-05,
"loss": 0.7614,
"step": 1115
},
{
"epoch": 1.590522424620318,
"grad_norm": 0.12709598243236542,
"learning_rate": 2.2100000000000002e-05,
"loss": 0.8548,
"step": 1116
},
{
"epoch": 1.5919476239255332,
"grad_norm": 0.11570257693529129,
"learning_rate": 2.2075e-05,
"loss": 0.7607,
"step": 1117
},
{
"epoch": 1.5933728232307487,
"grad_norm": 0.11411630362272263,
"learning_rate": 2.205e-05,
"loss": 0.7413,
"step": 1118
},
{
"epoch": 1.5947980225359641,
"grad_norm": 0.11640138924121857,
"learning_rate": 2.2025e-05,
"loss": 0.7484,
"step": 1119
},
{
"epoch": 1.5962232218411794,
"grad_norm": 0.12499689310789108,
"learning_rate": 2.2000000000000003e-05,
"loss": 0.8315,
"step": 1120
},
{
"epoch": 1.5976484211463946,
"grad_norm": 0.12135297805070877,
"learning_rate": 2.1975000000000002e-05,
"loss": 0.8407,
"step": 1121
},
{
"epoch": 1.59907362045161,
"grad_norm": 0.12362591922283173,
"learning_rate": 2.195e-05,
"loss": 0.7397,
"step": 1122
},
{
"epoch": 1.6004988197568255,
"grad_norm": 0.1172281950712204,
"learning_rate": 2.1925e-05,
"loss": 0.7697,
"step": 1123
},
{
"epoch": 1.6019240190620407,
"grad_norm": 0.12606358528137207,
"learning_rate": 2.19e-05,
"loss": 0.7374,
"step": 1124
},
{
"epoch": 1.603349218367256,
"grad_norm": 0.10724998265504837,
"learning_rate": 2.1875e-05,
"loss": 0.6447,
"step": 1125
},
{
"epoch": 1.6047744176724714,
"grad_norm": 0.12473460286855698,
"learning_rate": 2.1850000000000003e-05,
"loss": 0.7192,
"step": 1126
},
{
"epoch": 1.6061996169776869,
"grad_norm": 0.11639761179685593,
"learning_rate": 2.1825000000000002e-05,
"loss": 0.747,
"step": 1127
},
{
"epoch": 1.607624816282902,
"grad_norm": 0.11176454275846481,
"learning_rate": 2.18e-05,
"loss": 0.7708,
"step": 1128
},
{
"epoch": 1.6090500155881173,
"grad_norm": 0.13046066462993622,
"learning_rate": 2.1775e-05,
"loss": 0.8589,
"step": 1129
},
{
"epoch": 1.6104752148933328,
"grad_norm": 0.10981936752796173,
"learning_rate": 2.175e-05,
"loss": 0.7328,
"step": 1130
},
{
"epoch": 1.6119004141985482,
"grad_norm": 0.11868731677532196,
"learning_rate": 2.1725e-05,
"loss": 0.7213,
"step": 1131
},
{
"epoch": 1.6133256135037635,
"grad_norm": 0.1088484451174736,
"learning_rate": 2.1700000000000002e-05,
"loss": 0.678,
"step": 1132
},
{
"epoch": 1.6147508128089787,
"grad_norm": 0.10352146625518799,
"learning_rate": 2.1675e-05,
"loss": 0.5654,
"step": 1133
},
{
"epoch": 1.6161760121141941,
"grad_norm": 0.1084115207195282,
"learning_rate": 2.165e-05,
"loss": 0.6513,
"step": 1134
},
{
"epoch": 1.6176012114194094,
"grad_norm": 0.1288776993751526,
"learning_rate": 2.1625e-05,
"loss": 0.744,
"step": 1135
},
{
"epoch": 1.6190264107246248,
"grad_norm": 0.11180362105369568,
"learning_rate": 2.16e-05,
"loss": 0.7266,
"step": 1136
},
{
"epoch": 1.62045161002984,
"grad_norm": 0.11752684414386749,
"learning_rate": 2.1575e-05,
"loss": 0.7544,
"step": 1137
},
{
"epoch": 1.6218768093350553,
"grad_norm": 0.1284864991903305,
"learning_rate": 2.1550000000000002e-05,
"loss": 0.797,
"step": 1138
},
{
"epoch": 1.6233020086402707,
"grad_norm": 0.11992134153842926,
"learning_rate": 2.1525e-05,
"loss": 0.7892,
"step": 1139
},
{
"epoch": 1.6247272079454862,
"grad_norm": 0.11159648001194,
"learning_rate": 2.15e-05,
"loss": 0.7511,
"step": 1140
},
{
"epoch": 1.6261524072507014,
"grad_norm": 0.12105632573366165,
"learning_rate": 2.1475e-05,
"loss": 0.8036,
"step": 1141
},
{
"epoch": 1.6275776065559167,
"grad_norm": 0.11925482004880905,
"learning_rate": 2.145e-05,
"loss": 0.7353,
"step": 1142
},
{
"epoch": 1.629002805861132,
"grad_norm": 0.1302722543478012,
"learning_rate": 2.1425e-05,
"loss": 0.779,
"step": 1143
},
{
"epoch": 1.6304280051663476,
"grad_norm": 0.11342183500528336,
"learning_rate": 2.1400000000000002e-05,
"loss": 0.7226,
"step": 1144
},
{
"epoch": 1.6318532044715628,
"grad_norm": 0.1540747582912445,
"learning_rate": 2.1375e-05,
"loss": 0.7433,
"step": 1145
},
{
"epoch": 1.633278403776778,
"grad_norm": 0.10765055567026138,
"learning_rate": 2.135e-05,
"loss": 0.686,
"step": 1146
},
{
"epoch": 1.6347036030819935,
"grad_norm": 0.11824098974466324,
"learning_rate": 2.1325e-05,
"loss": 0.8146,
"step": 1147
},
{
"epoch": 1.636128802387209,
"grad_norm": 0.11234360933303833,
"learning_rate": 2.13e-05,
"loss": 0.6943,
"step": 1148
},
{
"epoch": 1.6375540016924242,
"grad_norm": 0.12486449629068375,
"learning_rate": 2.1275000000000002e-05,
"loss": 0.8429,
"step": 1149
},
{
"epoch": 1.6389792009976394,
"grad_norm": 0.11341369897127151,
"learning_rate": 2.125e-05,
"loss": 0.7432,
"step": 1150
},
{
"epoch": 1.6404044003028548,
"grad_norm": 0.11858033388853073,
"learning_rate": 2.1225e-05,
"loss": 0.8066,
"step": 1151
},
{
"epoch": 1.6418295996080703,
"grad_norm": 0.12702523171901703,
"learning_rate": 2.12e-05,
"loss": 0.8349,
"step": 1152
},
{
"epoch": 1.6432547989132855,
"grad_norm": 0.10853750258684158,
"learning_rate": 2.1175e-05,
"loss": 0.6419,
"step": 1153
},
{
"epoch": 1.6446799982185007,
"grad_norm": 0.11756341904401779,
"learning_rate": 2.115e-05,
"loss": 0.8139,
"step": 1154
},
{
"epoch": 1.6461051975237162,
"grad_norm": 0.10983914136886597,
"learning_rate": 2.1125000000000002e-05,
"loss": 0.6477,
"step": 1155
},
{
"epoch": 1.6475303968289317,
"grad_norm": 0.10841131210327148,
"learning_rate": 2.11e-05,
"loss": 0.6422,
"step": 1156
},
{
"epoch": 1.6489555961341469,
"grad_norm": 0.11757591366767883,
"learning_rate": 2.1075e-05,
"loss": 0.8042,
"step": 1157
},
{
"epoch": 1.6503807954393621,
"grad_norm": 0.11589635908603668,
"learning_rate": 2.105e-05,
"loss": 0.7324,
"step": 1158
},
{
"epoch": 1.6518059947445776,
"grad_norm": 0.12278266251087189,
"learning_rate": 2.1025e-05,
"loss": 0.7909,
"step": 1159
},
{
"epoch": 1.653231194049793,
"grad_norm": 0.12580262124538422,
"learning_rate": 2.1e-05,
"loss": 0.8552,
"step": 1160
},
{
"epoch": 1.6546563933550082,
"grad_norm": 0.12269178032875061,
"learning_rate": 2.0975e-05,
"loss": 0.7632,
"step": 1161
},
{
"epoch": 1.6560815926602235,
"grad_norm": 0.12437185645103455,
"learning_rate": 2.095e-05,
"loss": 0.8039,
"step": 1162
},
{
"epoch": 1.657506791965439,
"grad_norm": 0.12049966305494308,
"learning_rate": 2.0925e-05,
"loss": 0.852,
"step": 1163
},
{
"epoch": 1.6589319912706544,
"grad_norm": 0.1199018657207489,
"learning_rate": 2.09e-05,
"loss": 0.8442,
"step": 1164
},
{
"epoch": 1.6603571905758696,
"grad_norm": 0.11515132337808609,
"learning_rate": 2.0875e-05,
"loss": 0.7385,
"step": 1165
},
{
"epoch": 1.6617823898810848,
"grad_norm": 0.11873182654380798,
"learning_rate": 2.085e-05,
"loss": 0.7578,
"step": 1166
},
{
"epoch": 1.6632075891863003,
"grad_norm": 0.11742768436670303,
"learning_rate": 2.0825e-05,
"loss": 0.7802,
"step": 1167
},
{
"epoch": 1.6646327884915157,
"grad_norm": 0.1316840499639511,
"learning_rate": 2.08e-05,
"loss": 0.9133,
"step": 1168
},
{
"epoch": 1.666057987796731,
"grad_norm": 0.11492247134447098,
"learning_rate": 2.0775e-05,
"loss": 0.6864,
"step": 1169
},
{
"epoch": 1.6674831871019462,
"grad_norm": 0.11681463569402695,
"learning_rate": 2.075e-05,
"loss": 0.7433,
"step": 1170
},
{
"epoch": 1.6689083864071617,
"grad_norm": 0.12047108262777328,
"learning_rate": 2.0725e-05,
"loss": 0.7276,
"step": 1171
},
{
"epoch": 1.6703335857123771,
"grad_norm": 0.10665176808834076,
"learning_rate": 2.07e-05,
"loss": 0.657,
"step": 1172
},
{
"epoch": 1.6717587850175923,
"grad_norm": 0.12145126610994339,
"learning_rate": 2.0675e-05,
"loss": 0.8263,
"step": 1173
},
{
"epoch": 1.6731839843228076,
"grad_norm": 0.11550833284854889,
"learning_rate": 2.065e-05,
"loss": 0.8267,
"step": 1174
},
{
"epoch": 1.674609183628023,
"grad_norm": 0.11100798100233078,
"learning_rate": 2.0625e-05,
"loss": 0.5986,
"step": 1175
},
{
"epoch": 1.6760343829332385,
"grad_norm": 0.11675282567739487,
"learning_rate": 2.06e-05,
"loss": 0.8138,
"step": 1176
},
{
"epoch": 1.6774595822384537,
"grad_norm": 0.11824329197406769,
"learning_rate": 2.0575e-05,
"loss": 0.7051,
"step": 1177
},
{
"epoch": 1.678884781543669,
"grad_norm": 0.12450709193944931,
"learning_rate": 2.055e-05,
"loss": 0.7949,
"step": 1178
},
{
"epoch": 1.6803099808488844,
"grad_norm": 0.11215358972549438,
"learning_rate": 2.0525e-05,
"loss": 0.6936,
"step": 1179
},
{
"epoch": 1.6817351801540998,
"grad_norm": 0.11467184126377106,
"learning_rate": 2.05e-05,
"loss": 0.7346,
"step": 1180
},
{
"epoch": 1.683160379459315,
"grad_norm": 0.11364340782165527,
"learning_rate": 2.0475e-05,
"loss": 0.7414,
"step": 1181
},
{
"epoch": 1.6845855787645303,
"grad_norm": 0.11684124916791916,
"learning_rate": 2.045e-05,
"loss": 0.6853,
"step": 1182
},
{
"epoch": 1.6860107780697455,
"grad_norm": 0.12584123015403748,
"learning_rate": 2.0425e-05,
"loss": 0.8101,
"step": 1183
},
{
"epoch": 1.687435977374961,
"grad_norm": 0.13136857748031616,
"learning_rate": 2.04e-05,
"loss": 0.8765,
"step": 1184
},
{
"epoch": 1.6888611766801764,
"grad_norm": 0.11868689209222794,
"learning_rate": 2.0375e-05,
"loss": 0.798,
"step": 1185
},
{
"epoch": 1.6902863759853917,
"grad_norm": 0.11631432175636292,
"learning_rate": 2.035e-05,
"loss": 0.7918,
"step": 1186
},
{
"epoch": 1.691711575290607,
"grad_norm": 0.13290460407733917,
"learning_rate": 2.0325e-05,
"loss": 0.782,
"step": 1187
},
{
"epoch": 1.6931367745958223,
"grad_norm": 0.11426626145839691,
"learning_rate": 2.0300000000000002e-05,
"loss": 0.7343,
"step": 1188
},
{
"epoch": 1.6945619739010378,
"grad_norm": 0.11654408276081085,
"learning_rate": 2.0275e-05,
"loss": 0.6684,
"step": 1189
},
{
"epoch": 1.695987173206253,
"grad_norm": 0.1184675469994545,
"learning_rate": 2.025e-05,
"loss": 0.7234,
"step": 1190
},
{
"epoch": 1.6974123725114683,
"grad_norm": 0.1210794672369957,
"learning_rate": 2.0225000000000004e-05,
"loss": 0.6063,
"step": 1191
},
{
"epoch": 1.6988375718166837,
"grad_norm": 0.1125306561589241,
"learning_rate": 2.0200000000000003e-05,
"loss": 0.6978,
"step": 1192
},
{
"epoch": 1.7002627711218992,
"grad_norm": 0.11815322935581207,
"learning_rate": 2.0175000000000003e-05,
"loss": 0.7817,
"step": 1193
},
{
"epoch": 1.7016879704271144,
"grad_norm": 0.12084371596574783,
"learning_rate": 2.0150000000000002e-05,
"loss": 0.8214,
"step": 1194
},
{
"epoch": 1.7031131697323296,
"grad_norm": 0.11437653750181198,
"learning_rate": 2.0125e-05,
"loss": 0.7134,
"step": 1195
},
{
"epoch": 1.704538369037545,
"grad_norm": 0.11765848100185394,
"learning_rate": 2.01e-05,
"loss": 0.7335,
"step": 1196
},
{
"epoch": 1.7059635683427605,
"grad_norm": 0.11822240054607391,
"learning_rate": 2.0075000000000003e-05,
"loss": 0.6657,
"step": 1197
},
{
"epoch": 1.7073887676479758,
"grad_norm": 0.1302330493927002,
"learning_rate": 2.0050000000000003e-05,
"loss": 0.8493,
"step": 1198
},
{
"epoch": 1.708813966953191,
"grad_norm": 0.12303894013166428,
"learning_rate": 2.0025000000000002e-05,
"loss": 0.8987,
"step": 1199
},
{
"epoch": 1.7102391662584064,
"grad_norm": 0.12197510153055191,
"learning_rate": 2e-05,
"loss": 0.7929,
"step": 1200
},
{
"epoch": 1.711664365563622,
"grad_norm": 0.11869043856859207,
"learning_rate": 1.9975e-05,
"loss": 0.7771,
"step": 1201
},
{
"epoch": 1.7130895648688371,
"grad_norm": 0.1199924498796463,
"learning_rate": 1.995e-05,
"loss": 0.749,
"step": 1202
},
{
"epoch": 1.7145147641740524,
"grad_norm": 0.11533213406801224,
"learning_rate": 1.9925000000000003e-05,
"loss": 0.7473,
"step": 1203
},
{
"epoch": 1.7159399634792678,
"grad_norm": 0.11309585720300674,
"learning_rate": 1.9900000000000003e-05,
"loss": 0.7346,
"step": 1204
},
{
"epoch": 1.7173651627844833,
"grad_norm": 0.115189328789711,
"learning_rate": 1.9875000000000002e-05,
"loss": 0.7252,
"step": 1205
},
{
"epoch": 1.7187903620896985,
"grad_norm": 0.11548563838005066,
"learning_rate": 1.985e-05,
"loss": 0.75,
"step": 1206
},
{
"epoch": 1.7202155613949137,
"grad_norm": 0.11706007272005081,
"learning_rate": 1.9825e-05,
"loss": 0.7493,
"step": 1207
},
{
"epoch": 1.7216407607001292,
"grad_norm": 0.1308816820383072,
"learning_rate": 1.9800000000000004e-05,
"loss": 0.9765,
"step": 1208
},
{
"epoch": 1.7230659600053446,
"grad_norm": 0.13562998175621033,
"learning_rate": 1.9775000000000003e-05,
"loss": 0.857,
"step": 1209
},
{
"epoch": 1.7244911593105599,
"grad_norm": 0.115278460085392,
"learning_rate": 1.9750000000000002e-05,
"loss": 0.6956,
"step": 1210
},
{
"epoch": 1.725916358615775,
"grad_norm": 0.12365888059139252,
"learning_rate": 1.9725000000000002e-05,
"loss": 0.8094,
"step": 1211
},
{
"epoch": 1.7273415579209905,
"grad_norm": 0.11262635141611099,
"learning_rate": 1.97e-05,
"loss": 0.7143,
"step": 1212
},
{
"epoch": 1.728766757226206,
"grad_norm": 0.11334698647260666,
"learning_rate": 1.9675e-05,
"loss": 0.7492,
"step": 1213
},
{
"epoch": 1.7301919565314212,
"grad_norm": 0.12654392421245575,
"learning_rate": 1.9650000000000003e-05,
"loss": 0.6904,
"step": 1214
},
{
"epoch": 1.7316171558366364,
"grad_norm": 0.12807974219322205,
"learning_rate": 1.9625000000000003e-05,
"loss": 0.7679,
"step": 1215
},
{
"epoch": 1.733042355141852,
"grad_norm": 0.12357012927532196,
"learning_rate": 1.9600000000000002e-05,
"loss": 0.8028,
"step": 1216
},
{
"epoch": 1.7344675544470674,
"grad_norm": 0.11295270174741745,
"learning_rate": 1.9575e-05,
"loss": 0.6739,
"step": 1217
},
{
"epoch": 1.7358927537522826,
"grad_norm": 0.11770445108413696,
"learning_rate": 1.955e-05,
"loss": 0.776,
"step": 1218
},
{
"epoch": 1.7373179530574978,
"grad_norm": 0.11651358008384705,
"learning_rate": 1.9525e-05,
"loss": 0.6756,
"step": 1219
},
{
"epoch": 1.7387431523627133,
"grad_norm": 0.11906799674034119,
"learning_rate": 1.9500000000000003e-05,
"loss": 0.8508,
"step": 1220
},
{
"epoch": 1.7401683516679287,
"grad_norm": 0.11876309663057327,
"learning_rate": 1.9475000000000002e-05,
"loss": 0.709,
"step": 1221
},
{
"epoch": 1.741593550973144,
"grad_norm": 0.12379126250743866,
"learning_rate": 1.9450000000000002e-05,
"loss": 0.8298,
"step": 1222
},
{
"epoch": 1.7430187502783592,
"grad_norm": 0.12954673171043396,
"learning_rate": 1.9425e-05,
"loss": 0.8344,
"step": 1223
},
{
"epoch": 1.7444439495835746,
"grad_norm": 0.11757223308086395,
"learning_rate": 1.94e-05,
"loss": 0.8091,
"step": 1224
},
{
"epoch": 1.74586914888879,
"grad_norm": 0.1115206778049469,
"learning_rate": 1.9375e-05,
"loss": 0.7179,
"step": 1225
},
{
"epoch": 1.7472943481940053,
"grad_norm": 0.11148568987846375,
"learning_rate": 1.9350000000000003e-05,
"loss": 0.737,
"step": 1226
},
{
"epoch": 1.7487195474992205,
"grad_norm": 0.11569315195083618,
"learning_rate": 1.9325000000000002e-05,
"loss": 0.8104,
"step": 1227
},
{
"epoch": 1.7501447468044358,
"grad_norm": 0.10935363918542862,
"learning_rate": 1.93e-05,
"loss": 0.6894,
"step": 1228
},
{
"epoch": 1.7515699461096512,
"grad_norm": 0.11655670404434204,
"learning_rate": 1.9275e-05,
"loss": 0.7216,
"step": 1229
},
{
"epoch": 1.7529951454148667,
"grad_norm": 0.12681761384010315,
"learning_rate": 1.925e-05,
"loss": 0.8627,
"step": 1230
},
{
"epoch": 1.754420344720082,
"grad_norm": 0.12221668660640717,
"learning_rate": 1.9225e-05,
"loss": 0.7943,
"step": 1231
},
{
"epoch": 1.7558455440252971,
"grad_norm": 0.11506941169500351,
"learning_rate": 1.9200000000000003e-05,
"loss": 0.8125,
"step": 1232
},
{
"epoch": 1.7572707433305126,
"grad_norm": 0.12049120664596558,
"learning_rate": 1.9175000000000002e-05,
"loss": 0.8164,
"step": 1233
},
{
"epoch": 1.758695942635728,
"grad_norm": 0.11663015931844711,
"learning_rate": 1.915e-05,
"loss": 0.7075,
"step": 1234
},
{
"epoch": 1.7601211419409433,
"grad_norm": 0.11724608391523361,
"learning_rate": 1.9125e-05,
"loss": 0.7388,
"step": 1235
},
{
"epoch": 1.7615463412461585,
"grad_norm": 0.11687958240509033,
"learning_rate": 1.91e-05,
"loss": 0.6914,
"step": 1236
},
{
"epoch": 1.762971540551374,
"grad_norm": 0.11852959543466568,
"learning_rate": 1.9075000000000003e-05,
"loss": 0.704,
"step": 1237
},
{
"epoch": 1.7643967398565894,
"grad_norm": 0.11464525759220123,
"learning_rate": 1.9050000000000002e-05,
"loss": 0.7366,
"step": 1238
},
{
"epoch": 1.7658219391618046,
"grad_norm": 0.12063807994127274,
"learning_rate": 1.9025e-05,
"loss": 0.7495,
"step": 1239
},
{
"epoch": 1.7672471384670199,
"grad_norm": 0.10998585820198059,
"learning_rate": 1.9e-05,
"loss": 0.716,
"step": 1240
},
{
"epoch": 1.7686723377722353,
"grad_norm": 0.12558630108833313,
"learning_rate": 1.8975e-05,
"loss": 0.7944,
"step": 1241
},
{
"epoch": 1.7700975370774508,
"grad_norm": 0.11636888980865479,
"learning_rate": 1.895e-05,
"loss": 0.805,
"step": 1242
},
{
"epoch": 1.771522736382666,
"grad_norm": 0.11274611204862595,
"learning_rate": 1.8925000000000003e-05,
"loss": 0.7282,
"step": 1243
},
{
"epoch": 1.7729479356878812,
"grad_norm": 0.12306196242570877,
"learning_rate": 1.8900000000000002e-05,
"loss": 0.8264,
"step": 1244
},
{
"epoch": 1.7743731349930967,
"grad_norm": 0.11455065757036209,
"learning_rate": 1.8875e-05,
"loss": 0.66,
"step": 1245
},
{
"epoch": 1.7757983342983121,
"grad_norm": 0.12018316239118576,
"learning_rate": 1.885e-05,
"loss": 0.7686,
"step": 1246
},
{
"epoch": 1.7772235336035274,
"grad_norm": 0.11516337096691132,
"learning_rate": 1.8825e-05,
"loss": 0.7669,
"step": 1247
},
{
"epoch": 1.7786487329087426,
"grad_norm": 0.12667624652385712,
"learning_rate": 1.88e-05,
"loss": 0.8361,
"step": 1248
},
{
"epoch": 1.780073932213958,
"grad_norm": 0.1274881660938263,
"learning_rate": 1.8775000000000002e-05,
"loss": 0.9603,
"step": 1249
},
{
"epoch": 1.7814991315191735,
"grad_norm": 0.11917906999588013,
"learning_rate": 1.8750000000000002e-05,
"loss": 0.7389,
"step": 1250
},
{
"epoch": 1.7829243308243887,
"grad_norm": 0.11658228933811188,
"learning_rate": 1.8725e-05,
"loss": 0.7439,
"step": 1251
},
{
"epoch": 1.784349530129604,
"grad_norm": 0.10840719938278198,
"learning_rate": 1.87e-05,
"loss": 0.7,
"step": 1252
},
{
"epoch": 1.7857747294348194,
"grad_norm": 0.12013845890760422,
"learning_rate": 1.8675e-05,
"loss": 0.7422,
"step": 1253
},
{
"epoch": 1.7871999287400349,
"grad_norm": 0.11224650591611862,
"learning_rate": 1.865e-05,
"loss": 0.6432,
"step": 1254
},
{
"epoch": 1.78862512804525,
"grad_norm": 0.12337217479944229,
"learning_rate": 1.8625000000000002e-05,
"loss": 0.8003,
"step": 1255
},
{
"epoch": 1.7900503273504653,
"grad_norm": 0.11573939770460129,
"learning_rate": 1.86e-05,
"loss": 0.7589,
"step": 1256
},
{
"epoch": 1.7914755266556808,
"grad_norm": 0.1214504987001419,
"learning_rate": 1.8575e-05,
"loss": 0.7923,
"step": 1257
},
{
"epoch": 1.7929007259608962,
"grad_norm": 0.11900078505277634,
"learning_rate": 1.855e-05,
"loss": 0.7524,
"step": 1258
},
{
"epoch": 1.7943259252661115,
"grad_norm": 0.11382339894771576,
"learning_rate": 1.8525e-05,
"loss": 0.817,
"step": 1259
},
{
"epoch": 1.7957511245713267,
"grad_norm": 0.10705025494098663,
"learning_rate": 1.85e-05,
"loss": 0.7123,
"step": 1260
},
{
"epoch": 1.7971763238765421,
"grad_norm": 0.11555399000644684,
"learning_rate": 1.8475000000000002e-05,
"loss": 0.79,
"step": 1261
},
{
"epoch": 1.7986015231817576,
"grad_norm": 0.11513382196426392,
"learning_rate": 1.845e-05,
"loss": 0.7243,
"step": 1262
},
{
"epoch": 1.8000267224869728,
"grad_norm": 0.11477670818567276,
"learning_rate": 1.8425e-05,
"loss": 0.6683,
"step": 1263
},
{
"epoch": 1.801451921792188,
"grad_norm": 0.1210535317659378,
"learning_rate": 1.84e-05,
"loss": 0.773,
"step": 1264
},
{
"epoch": 1.8028771210974035,
"grad_norm": 0.11059469729661942,
"learning_rate": 1.8375e-05,
"loss": 0.6793,
"step": 1265
},
{
"epoch": 1.804302320402619,
"grad_norm": 0.11386141926050186,
"learning_rate": 1.8350000000000002e-05,
"loss": 0.7924,
"step": 1266
},
{
"epoch": 1.8057275197078342,
"grad_norm": 0.107034832239151,
"learning_rate": 1.8325e-05,
"loss": 0.6633,
"step": 1267
},
{
"epoch": 1.8071527190130494,
"grad_norm": 0.12659116089344025,
"learning_rate": 1.83e-05,
"loss": 0.8098,
"step": 1268
},
{
"epoch": 1.8085779183182649,
"grad_norm": 0.12415776401758194,
"learning_rate": 1.8275e-05,
"loss": 0.8356,
"step": 1269
},
{
"epoch": 1.8100031176234803,
"grad_norm": 0.11433088034391403,
"learning_rate": 1.825e-05,
"loss": 0.7736,
"step": 1270
},
{
"epoch": 1.8114283169286955,
"grad_norm": 0.11597501486539841,
"learning_rate": 1.8225e-05,
"loss": 0.7486,
"step": 1271
},
{
"epoch": 1.8128535162339108,
"grad_norm": 0.11873357743024826,
"learning_rate": 1.8200000000000002e-05,
"loss": 0.7487,
"step": 1272
},
{
"epoch": 1.8142787155391262,
"grad_norm": 0.1227918341755867,
"learning_rate": 1.8175e-05,
"loss": 0.7322,
"step": 1273
},
{
"epoch": 1.8157039148443415,
"grad_norm": 0.1198427826166153,
"learning_rate": 1.815e-05,
"loss": 0.7837,
"step": 1274
},
{
"epoch": 1.817129114149557,
"grad_norm": 0.1180831640958786,
"learning_rate": 1.8125e-05,
"loss": 0.8058,
"step": 1275
},
{
"epoch": 1.8185543134547721,
"grad_norm": 0.11378481239080429,
"learning_rate": 1.81e-05,
"loss": 0.7631,
"step": 1276
},
{
"epoch": 1.8199795127599874,
"grad_norm": 0.12430572509765625,
"learning_rate": 1.8075e-05,
"loss": 0.8437,
"step": 1277
},
{
"epoch": 1.8214047120652028,
"grad_norm": 0.11852643638849258,
"learning_rate": 1.805e-05,
"loss": 0.8014,
"step": 1278
},
{
"epoch": 1.8228299113704183,
"grad_norm": 0.10726958513259888,
"learning_rate": 1.8025e-05,
"loss": 0.6771,
"step": 1279
},
{
"epoch": 1.8242551106756335,
"grad_norm": 0.10945148766040802,
"learning_rate": 1.8e-05,
"loss": 0.6468,
"step": 1280
},
{
"epoch": 1.8256803099808487,
"grad_norm": 0.11965233087539673,
"learning_rate": 1.7975e-05,
"loss": 0.7017,
"step": 1281
},
{
"epoch": 1.8271055092860642,
"grad_norm": 0.11906860768795013,
"learning_rate": 1.795e-05,
"loss": 0.7309,
"step": 1282
},
{
"epoch": 1.8285307085912796,
"grad_norm": 0.12411758303642273,
"learning_rate": 1.7925e-05,
"loss": 0.8734,
"step": 1283
},
{
"epoch": 1.8299559078964949,
"grad_norm": 0.11309721320867538,
"learning_rate": 1.79e-05,
"loss": 0.7488,
"step": 1284
},
{
"epoch": 1.83138110720171,
"grad_norm": 0.11379046738147736,
"learning_rate": 1.7875e-05,
"loss": 0.7686,
"step": 1285
},
{
"epoch": 1.8328063065069256,
"grad_norm": 0.1085650771856308,
"learning_rate": 1.785e-05,
"loss": 0.6619,
"step": 1286
},
{
"epoch": 1.834231505812141,
"grad_norm": 0.11642839014530182,
"learning_rate": 1.7825e-05,
"loss": 0.7677,
"step": 1287
},
{
"epoch": 1.8356567051173562,
"grad_norm": 0.11506713926792145,
"learning_rate": 1.78e-05,
"loss": 0.7625,
"step": 1288
},
{
"epoch": 1.8370819044225715,
"grad_norm": 0.11398710310459137,
"learning_rate": 1.7775e-05,
"loss": 0.7457,
"step": 1289
},
{
"epoch": 1.838507103727787,
"grad_norm": 0.1294632852077484,
"learning_rate": 1.775e-05,
"loss": 0.8519,
"step": 1290
},
{
"epoch": 1.8399323030330024,
"grad_norm": 0.1216592788696289,
"learning_rate": 1.7725e-05,
"loss": 0.8828,
"step": 1291
},
{
"epoch": 1.8413575023382176,
"grad_norm": 0.11567086726427078,
"learning_rate": 1.77e-05,
"loss": 0.7936,
"step": 1292
},
{
"epoch": 1.8427827016434328,
"grad_norm": 0.12294867634773254,
"learning_rate": 1.7675e-05,
"loss": 0.8505,
"step": 1293
},
{
"epoch": 1.8442079009486483,
"grad_norm": 0.11555781215429306,
"learning_rate": 1.765e-05,
"loss": 0.7565,
"step": 1294
},
{
"epoch": 1.8456331002538637,
"grad_norm": 0.11338488012552261,
"learning_rate": 1.7625e-05,
"loss": 0.6803,
"step": 1295
},
{
"epoch": 1.847058299559079,
"grad_norm": 0.12446663528680801,
"learning_rate": 1.76e-05,
"loss": 0.7886,
"step": 1296
},
{
"epoch": 1.8484834988642942,
"grad_norm": 0.1139151006937027,
"learning_rate": 1.7575e-05,
"loss": 0.7146,
"step": 1297
},
{
"epoch": 1.8499086981695096,
"grad_norm": 0.10957379639148712,
"learning_rate": 1.755e-05,
"loss": 0.6639,
"step": 1298
},
{
"epoch": 1.851333897474725,
"grad_norm": 0.11710165441036224,
"learning_rate": 1.7525e-05,
"loss": 0.6906,
"step": 1299
},
{
"epoch": 1.8527590967799403,
"grad_norm": 0.11123017966747284,
"learning_rate": 1.75e-05,
"loss": 0.6675,
"step": 1300
},
{
"epoch": 1.8541842960851556,
"grad_norm": 0.1142721101641655,
"learning_rate": 1.7475e-05,
"loss": 0.6965,
"step": 1301
},
{
"epoch": 1.855609495390371,
"grad_norm": 0.11140482127666473,
"learning_rate": 1.745e-05,
"loss": 0.6667,
"step": 1302
},
{
"epoch": 1.8570346946955865,
"grad_norm": 0.1206270381808281,
"learning_rate": 1.7425e-05,
"loss": 0.7525,
"step": 1303
},
{
"epoch": 1.8584598940008017,
"grad_norm": 0.11329346150159836,
"learning_rate": 1.74e-05,
"loss": 0.7327,
"step": 1304
},
{
"epoch": 1.859885093306017,
"grad_norm": 0.11181637644767761,
"learning_rate": 1.7375e-05,
"loss": 0.6996,
"step": 1305
},
{
"epoch": 1.8613102926112324,
"grad_norm": 0.11300571262836456,
"learning_rate": 1.7349999999999998e-05,
"loss": 0.7035,
"step": 1306
},
{
"epoch": 1.8627354919164478,
"grad_norm": 0.1209411546587944,
"learning_rate": 1.7325e-05,
"loss": 0.7219,
"step": 1307
},
{
"epoch": 1.864160691221663,
"grad_norm": 0.12239369750022888,
"learning_rate": 1.73e-05,
"loss": 0.7913,
"step": 1308
},
{
"epoch": 1.8655858905268783,
"grad_norm": 0.11422554403543472,
"learning_rate": 1.7275e-05,
"loss": 0.7676,
"step": 1309
},
{
"epoch": 1.8670110898320937,
"grad_norm": 0.11339714378118515,
"learning_rate": 1.725e-05,
"loss": 0.6834,
"step": 1310
},
{
"epoch": 1.8684362891373092,
"grad_norm": 0.11262977123260498,
"learning_rate": 1.7225e-05,
"loss": 0.7037,
"step": 1311
},
{
"epoch": 1.8698614884425244,
"grad_norm": 0.11199497431516647,
"learning_rate": 1.7199999999999998e-05,
"loss": 0.6267,
"step": 1312
},
{
"epoch": 1.8712866877477397,
"grad_norm": 0.12480831891298294,
"learning_rate": 1.7175e-05,
"loss": 0.7201,
"step": 1313
},
{
"epoch": 1.872711887052955,
"grad_norm": 0.12308057397603989,
"learning_rate": 1.7150000000000004e-05,
"loss": 0.7635,
"step": 1314
},
{
"epoch": 1.8741370863581706,
"grad_norm": 0.11173748970031738,
"learning_rate": 1.7125000000000003e-05,
"loss": 0.7077,
"step": 1315
},
{
"epoch": 1.8755622856633858,
"grad_norm": 0.11492527276277542,
"learning_rate": 1.7100000000000002e-05,
"loss": 0.7184,
"step": 1316
},
{
"epoch": 1.876987484968601,
"grad_norm": 0.12163175642490387,
"learning_rate": 1.7075e-05,
"loss": 0.7321,
"step": 1317
},
{
"epoch": 1.8784126842738165,
"grad_norm": 0.11297082901000977,
"learning_rate": 1.705e-05,
"loss": 0.7317,
"step": 1318
},
{
"epoch": 1.879837883579032,
"grad_norm": 0.1268317550420761,
"learning_rate": 1.7025e-05,
"loss": 0.7702,
"step": 1319
},
{
"epoch": 1.8812630828842472,
"grad_norm": 0.1205640584230423,
"learning_rate": 1.7000000000000003e-05,
"loss": 0.7726,
"step": 1320
},
{
"epoch": 1.8826882821894624,
"grad_norm": 0.11420892924070358,
"learning_rate": 1.6975000000000003e-05,
"loss": 0.7118,
"step": 1321
},
{
"epoch": 1.8841134814946776,
"grad_norm": 0.11650469154119492,
"learning_rate": 1.6950000000000002e-05,
"loss": 0.7273,
"step": 1322
},
{
"epoch": 1.885538680799893,
"grad_norm": 0.1077483668923378,
"learning_rate": 1.6925e-05,
"loss": 0.6672,
"step": 1323
},
{
"epoch": 1.8869638801051085,
"grad_norm": 0.11557702720165253,
"learning_rate": 1.69e-05,
"loss": 0.7005,
"step": 1324
},
{
"epoch": 1.8883890794103237,
"grad_norm": 0.11656571179628372,
"learning_rate": 1.6875000000000004e-05,
"loss": 0.7638,
"step": 1325
},
{
"epoch": 1.889814278715539,
"grad_norm": 0.11160217225551605,
"learning_rate": 1.6850000000000003e-05,
"loss": 0.6867,
"step": 1326
},
{
"epoch": 1.8912394780207544,
"grad_norm": 0.13571971654891968,
"learning_rate": 1.6825000000000002e-05,
"loss": 0.808,
"step": 1327
},
{
"epoch": 1.8926646773259699,
"grad_norm": 0.11478560417890549,
"learning_rate": 1.6800000000000002e-05,
"loss": 0.7052,
"step": 1328
},
{
"epoch": 1.894089876631185,
"grad_norm": 0.11757104098796844,
"learning_rate": 1.6775e-05,
"loss": 0.8061,
"step": 1329
},
{
"epoch": 1.8955150759364003,
"grad_norm": 0.10413039475679398,
"learning_rate": 1.675e-05,
"loss": 0.6964,
"step": 1330
},
{
"epoch": 1.8969402752416158,
"grad_norm": 0.1203950047492981,
"learning_rate": 1.6725000000000003e-05,
"loss": 0.7644,
"step": 1331
},
{
"epoch": 1.8983654745468312,
"grad_norm": 0.1287614107131958,
"learning_rate": 1.6700000000000003e-05,
"loss": 0.8097,
"step": 1332
},
{
"epoch": 1.8997906738520465,
"grad_norm": 0.12178017944097519,
"learning_rate": 1.6675000000000002e-05,
"loss": 0.792,
"step": 1333
},
{
"epoch": 1.9012158731572617,
"grad_norm": 0.11223037540912628,
"learning_rate": 1.665e-05,
"loss": 0.6732,
"step": 1334
},
{
"epoch": 1.9026410724624772,
"grad_norm": 0.11780444532632828,
"learning_rate": 1.6625e-05,
"loss": 0.8324,
"step": 1335
},
{
"epoch": 1.9040662717676926,
"grad_norm": 0.12306583672761917,
"learning_rate": 1.66e-05,
"loss": 0.6778,
"step": 1336
},
{
"epoch": 1.9054914710729078,
"grad_norm": 0.11490003019571304,
"learning_rate": 1.6575000000000003e-05,
"loss": 0.7704,
"step": 1337
},
{
"epoch": 1.906916670378123,
"grad_norm": 0.11581801623106003,
"learning_rate": 1.6550000000000002e-05,
"loss": 0.7704,
"step": 1338
},
{
"epoch": 1.9083418696833385,
"grad_norm": 0.11577301472425461,
"learning_rate": 1.6525000000000002e-05,
"loss": 0.7406,
"step": 1339
},
{
"epoch": 1.909767068988554,
"grad_norm": 0.1162189468741417,
"learning_rate": 1.65e-05,
"loss": 0.6722,
"step": 1340
},
{
"epoch": 1.9111922682937692,
"grad_norm": 0.11098510026931763,
"learning_rate": 1.6475e-05,
"loss": 0.728,
"step": 1341
},
{
"epoch": 1.9126174675989844,
"grad_norm": 0.11218460649251938,
"learning_rate": 1.645e-05,
"loss": 0.7434,
"step": 1342
},
{
"epoch": 1.9140426669041999,
"grad_norm": 0.11843094974756241,
"learning_rate": 1.6425000000000003e-05,
"loss": 0.7763,
"step": 1343
},
{
"epoch": 1.9154678662094153,
"grad_norm": 0.11646418273448944,
"learning_rate": 1.6400000000000002e-05,
"loss": 0.7182,
"step": 1344
},
{
"epoch": 1.9168930655146306,
"grad_norm": 0.1216135323047638,
"learning_rate": 1.6375e-05,
"loss": 0.7977,
"step": 1345
},
{
"epoch": 1.9183182648198458,
"grad_norm": 0.11630626022815704,
"learning_rate": 1.635e-05,
"loss": 0.7543,
"step": 1346
},
{
"epoch": 1.9197434641250612,
"grad_norm": 0.10721370577812195,
"learning_rate": 1.6325e-05,
"loss": 0.6646,
"step": 1347
},
{
"epoch": 1.9211686634302767,
"grad_norm": 0.12896528840065002,
"learning_rate": 1.63e-05,
"loss": 0.8023,
"step": 1348
},
{
"epoch": 1.922593862735492,
"grad_norm": 0.12321396172046661,
"learning_rate": 1.6275000000000003e-05,
"loss": 0.7823,
"step": 1349
},
{
"epoch": 1.9240190620407072,
"grad_norm": 0.11609542369842529,
"learning_rate": 1.6250000000000002e-05,
"loss": 0.7502,
"step": 1350
},
{
"epoch": 1.9254442613459226,
"grad_norm": 0.12276794761419296,
"learning_rate": 1.6225e-05,
"loss": 0.8565,
"step": 1351
},
{
"epoch": 1.926869460651138,
"grad_norm": 0.10274726152420044,
"learning_rate": 1.62e-05,
"loss": 0.6267,
"step": 1352
},
{
"epoch": 1.9282946599563533,
"grad_norm": 0.11378515511751175,
"learning_rate": 1.6175e-05,
"loss": 0.756,
"step": 1353
},
{
"epoch": 1.9297198592615685,
"grad_norm": 0.11715764552354813,
"learning_rate": 1.6150000000000003e-05,
"loss": 0.7742,
"step": 1354
},
{
"epoch": 1.931145058566784,
"grad_norm": 0.11469716578722,
"learning_rate": 1.6125000000000002e-05,
"loss": 0.7005,
"step": 1355
},
{
"epoch": 1.9325702578719994,
"grad_norm": 0.11489725857973099,
"learning_rate": 1.6100000000000002e-05,
"loss": 0.7391,
"step": 1356
},
{
"epoch": 1.9339954571772147,
"grad_norm": 0.11428959667682648,
"learning_rate": 1.6075e-05,
"loss": 0.7226,
"step": 1357
},
{
"epoch": 1.93542065648243,
"grad_norm": 0.11531893908977509,
"learning_rate": 1.605e-05,
"loss": 0.6902,
"step": 1358
},
{
"epoch": 1.9368458557876453,
"grad_norm": 0.12249966710805893,
"learning_rate": 1.6025e-05,
"loss": 0.7978,
"step": 1359
},
{
"epoch": 1.9382710550928608,
"grad_norm": 0.11142789572477341,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.6981,
"step": 1360
},
{
"epoch": 1.939696254398076,
"grad_norm": 0.11207502335309982,
"learning_rate": 1.5975000000000002e-05,
"loss": 0.6957,
"step": 1361
},
{
"epoch": 1.9411214537032913,
"grad_norm": 0.11011394113302231,
"learning_rate": 1.595e-05,
"loss": 0.6529,
"step": 1362
},
{
"epoch": 1.9425466530085067,
"grad_norm": 0.12015828490257263,
"learning_rate": 1.5925e-05,
"loss": 0.7686,
"step": 1363
},
{
"epoch": 1.9439718523137222,
"grad_norm": 0.11104772239923477,
"learning_rate": 1.59e-05,
"loss": 0.6486,
"step": 1364
},
{
"epoch": 1.9453970516189374,
"grad_norm": 0.11970749497413635,
"learning_rate": 1.5875e-05,
"loss": 0.7987,
"step": 1365
},
{
"epoch": 1.9468222509241526,
"grad_norm": 0.11878058314323425,
"learning_rate": 1.5850000000000002e-05,
"loss": 0.7871,
"step": 1366
},
{
"epoch": 1.9482474502293678,
"grad_norm": 0.11635654419660568,
"learning_rate": 1.5825000000000002e-05,
"loss": 0.7659,
"step": 1367
},
{
"epoch": 1.9496726495345833,
"grad_norm": 0.11868441104888916,
"learning_rate": 1.58e-05,
"loss": 0.7162,
"step": 1368
},
{
"epoch": 1.9510978488397988,
"grad_norm": 0.12125667929649353,
"learning_rate": 1.5775e-05,
"loss": 0.7924,
"step": 1369
},
{
"epoch": 1.952523048145014,
"grad_norm": 0.13137130439281464,
"learning_rate": 1.575e-05,
"loss": 0.886,
"step": 1370
},
{
"epoch": 1.9539482474502292,
"grad_norm": 0.11672665923833847,
"learning_rate": 1.5725e-05,
"loss": 0.7022,
"step": 1371
},
{
"epoch": 1.9553734467554447,
"grad_norm": 0.1167367473244667,
"learning_rate": 1.5700000000000002e-05,
"loss": 0.7456,
"step": 1372
},
{
"epoch": 1.9567986460606601,
"grad_norm": 0.12303975224494934,
"learning_rate": 1.5675e-05,
"loss": 0.802,
"step": 1373
},
{
"epoch": 1.9582238453658753,
"grad_norm": 0.12283257395029068,
"learning_rate": 1.565e-05,
"loss": 0.8205,
"step": 1374
},
{
"epoch": 1.9596490446710906,
"grad_norm": 0.11862889677286148,
"learning_rate": 1.5625e-05,
"loss": 0.7463,
"step": 1375
},
{
"epoch": 1.961074243976306,
"grad_norm": 0.12448704987764359,
"learning_rate": 1.56e-05,
"loss": 0.7817,
"step": 1376
},
{
"epoch": 1.9624994432815215,
"grad_norm": 0.12315742671489716,
"learning_rate": 1.5575e-05,
"loss": 0.7188,
"step": 1377
},
{
"epoch": 1.9639246425867367,
"grad_norm": 0.11181633919477463,
"learning_rate": 1.5550000000000002e-05,
"loss": 0.6057,
"step": 1378
},
{
"epoch": 1.965349841891952,
"grad_norm": 0.12252115458250046,
"learning_rate": 1.5525e-05,
"loss": 0.7927,
"step": 1379
},
{
"epoch": 1.9667750411971674,
"grad_norm": 0.10547539591789246,
"learning_rate": 1.55e-05,
"loss": 0.5575,
"step": 1380
},
{
"epoch": 1.9682002405023828,
"grad_norm": 0.11923118680715561,
"learning_rate": 1.5475e-05,
"loss": 0.7897,
"step": 1381
},
{
"epoch": 1.969625439807598,
"grad_norm": 0.11893084645271301,
"learning_rate": 1.545e-05,
"loss": 0.722,
"step": 1382
},
{
"epoch": 1.9710506391128133,
"grad_norm": 0.12651361525058746,
"learning_rate": 1.5425000000000002e-05,
"loss": 0.8377,
"step": 1383
},
{
"epoch": 1.9724758384180288,
"grad_norm": 0.10511170327663422,
"learning_rate": 1.54e-05,
"loss": 0.6157,
"step": 1384
},
{
"epoch": 1.9739010377232442,
"grad_norm": 0.12200909107923508,
"learning_rate": 1.5375e-05,
"loss": 0.7678,
"step": 1385
},
{
"epoch": 1.9753262370284594,
"grad_norm": 0.11904618889093399,
"learning_rate": 1.535e-05,
"loss": 0.7318,
"step": 1386
},
{
"epoch": 1.9767514363336747,
"grad_norm": 0.11417367309331894,
"learning_rate": 1.5325e-05,
"loss": 0.7179,
"step": 1387
},
{
"epoch": 1.9781766356388901,
"grad_norm": 0.1127004623413086,
"learning_rate": 1.53e-05,
"loss": 0.7245,
"step": 1388
},
{
"epoch": 1.9796018349441056,
"grad_norm": 0.11270885169506073,
"learning_rate": 1.5275000000000002e-05,
"loss": 0.7453,
"step": 1389
},
{
"epoch": 1.9810270342493208,
"grad_norm": 0.11461874097585678,
"learning_rate": 1.525e-05,
"loss": 0.7346,
"step": 1390
},
{
"epoch": 1.982452233554536,
"grad_norm": 0.1171596348285675,
"learning_rate": 1.5225e-05,
"loss": 0.8075,
"step": 1391
},
{
"epoch": 1.9838774328597515,
"grad_norm": 0.11885005235671997,
"learning_rate": 1.52e-05,
"loss": 0.7848,
"step": 1392
},
{
"epoch": 1.985302632164967,
"grad_norm": 0.11367981135845184,
"learning_rate": 1.5175e-05,
"loss": 0.7534,
"step": 1393
},
{
"epoch": 1.9867278314701822,
"grad_norm": 0.11118461936712265,
"learning_rate": 1.515e-05,
"loss": 0.657,
"step": 1394
},
{
"epoch": 1.9881530307753974,
"grad_norm": 0.11886285245418549,
"learning_rate": 1.5125e-05,
"loss": 0.6616,
"step": 1395
},
{
"epoch": 1.9895782300806129,
"grad_norm": 0.13241873681545258,
"learning_rate": 1.51e-05,
"loss": 0.8516,
"step": 1396
},
{
"epoch": 1.9910034293858283,
"grad_norm": 0.12339285016059875,
"learning_rate": 1.5075e-05,
"loss": 0.8219,
"step": 1397
},
{
"epoch": 1.9924286286910435,
"grad_norm": 0.11771489679813385,
"learning_rate": 1.505e-05,
"loss": 0.7237,
"step": 1398
},
{
"epoch": 1.9938538279962588,
"grad_norm": 0.11778849363327026,
"learning_rate": 1.5025000000000001e-05,
"loss": 0.7489,
"step": 1399
},
{
"epoch": 1.9952790273014742,
"grad_norm": 0.1067933589220047,
"learning_rate": 1.5e-05,
"loss": 0.5696,
"step": 1400
},
{
"epoch": 1.9967042266066897,
"grad_norm": 0.11527703702449799,
"learning_rate": 1.4975e-05,
"loss": 0.7037,
"step": 1401
},
{
"epoch": 1.998129425911905,
"grad_norm": 0.11605516821146011,
"learning_rate": 1.4950000000000001e-05,
"loss": 0.8071,
"step": 1402
},
{
"epoch": 1.9995546252171201,
"grad_norm": 0.11824622750282288,
"learning_rate": 1.4925e-05,
"loss": 0.7754,
"step": 1403
},
{
"epoch": 2.0009798245223354,
"grad_norm": 0.12479126453399658,
"learning_rate": 1.49e-05,
"loss": 0.8783,
"step": 1404
},
{
"epoch": 2.002405023827551,
"grad_norm": 0.11088936775922775,
"learning_rate": 1.4875e-05,
"loss": 0.7036,
"step": 1405
},
{
"epoch": 2.0038302231327663,
"grad_norm": 0.11528785526752472,
"learning_rate": 1.485e-05,
"loss": 0.7549,
"step": 1406
},
{
"epoch": 2.0052554224379815,
"grad_norm": 0.12080977857112885,
"learning_rate": 1.4825e-05,
"loss": 0.8292,
"step": 1407
},
{
"epoch": 2.0066806217431967,
"grad_norm": 0.14161688089370728,
"learning_rate": 1.48e-05,
"loss": 0.864,
"step": 1408
},
{
"epoch": 2.0081058210484124,
"grad_norm": 0.12025702744722366,
"learning_rate": 1.4775e-05,
"loss": 0.7734,
"step": 1409
},
{
"epoch": 2.0095310203536276,
"grad_norm": 0.11836081743240356,
"learning_rate": 1.475e-05,
"loss": 0.6775,
"step": 1410
},
{
"epoch": 2.010956219658843,
"grad_norm": 0.11134908348321915,
"learning_rate": 1.4725e-05,
"loss": 0.6916,
"step": 1411
},
{
"epoch": 2.012381418964058,
"grad_norm": 0.11166395992040634,
"learning_rate": 1.47e-05,
"loss": 0.646,
"step": 1412
},
{
"epoch": 2.0138066182692738,
"grad_norm": 0.11349400132894516,
"learning_rate": 1.4675e-05,
"loss": 0.7185,
"step": 1413
},
{
"epoch": 2.015231817574489,
"grad_norm": 0.11489682644605637,
"learning_rate": 1.465e-05,
"loss": 0.7165,
"step": 1414
},
{
"epoch": 2.0166570168797042,
"grad_norm": 0.12211542576551437,
"learning_rate": 1.4625e-05,
"loss": 0.8108,
"step": 1415
},
{
"epoch": 2.0180822161849195,
"grad_norm": 0.10779068619012833,
"learning_rate": 1.4599999999999999e-05,
"loss": 0.6726,
"step": 1416
},
{
"epoch": 2.019507415490135,
"grad_norm": 0.11913087964057922,
"learning_rate": 1.4575e-05,
"loss": 0.782,
"step": 1417
},
{
"epoch": 2.0209326147953504,
"grad_norm": 0.11915663629770279,
"learning_rate": 1.455e-05,
"loss": 0.7616,
"step": 1418
},
{
"epoch": 2.0223578141005656,
"grad_norm": 0.11302046477794647,
"learning_rate": 1.4524999999999999e-05,
"loss": 0.7216,
"step": 1419
},
{
"epoch": 2.023783013405781,
"grad_norm": 0.11448400467634201,
"learning_rate": 1.45e-05,
"loss": 0.7061,
"step": 1420
},
{
"epoch": 2.0252082127109965,
"grad_norm": 0.13068683445453644,
"learning_rate": 1.4475e-05,
"loss": 0.8133,
"step": 1421
},
{
"epoch": 2.0266334120162117,
"grad_norm": 0.11377590149641037,
"learning_rate": 1.4449999999999999e-05,
"loss": 0.7559,
"step": 1422
},
{
"epoch": 2.028058611321427,
"grad_norm": 0.1144394725561142,
"learning_rate": 1.4425e-05,
"loss": 0.6592,
"step": 1423
},
{
"epoch": 2.029483810626642,
"grad_norm": 0.12672756612300873,
"learning_rate": 1.44e-05,
"loss": 0.8173,
"step": 1424
},
{
"epoch": 2.030909009931858,
"grad_norm": 0.11324042081832886,
"learning_rate": 1.4374999999999999e-05,
"loss": 0.7565,
"step": 1425
},
{
"epoch": 2.032334209237073,
"grad_norm": 0.11558470875024796,
"learning_rate": 1.435e-05,
"loss": 0.7597,
"step": 1426
},
{
"epoch": 2.0337594085422883,
"grad_norm": 0.1105930507183075,
"learning_rate": 1.4325e-05,
"loss": 0.6478,
"step": 1427
},
{
"epoch": 2.0351846078475035,
"grad_norm": 0.1114899069070816,
"learning_rate": 1.43e-05,
"loss": 0.7128,
"step": 1428
},
{
"epoch": 2.036609807152719,
"grad_norm": 0.10746341198682785,
"learning_rate": 1.4275e-05,
"loss": 0.6796,
"step": 1429
},
{
"epoch": 2.0380350064579345,
"grad_norm": 0.12986305356025696,
"learning_rate": 1.4249999999999999e-05,
"loss": 0.7724,
"step": 1430
},
{
"epoch": 2.0394602057631497,
"grad_norm": 0.12249688059091568,
"learning_rate": 1.4225e-05,
"loss": 0.6979,
"step": 1431
},
{
"epoch": 2.040885405068365,
"grad_norm": 0.12488153576850891,
"learning_rate": 1.42e-05,
"loss": 0.7864,
"step": 1432
},
{
"epoch": 2.0423106043735806,
"grad_norm": 0.10996521264314651,
"learning_rate": 1.4174999999999999e-05,
"loss": 0.6874,
"step": 1433
},
{
"epoch": 2.043735803678796,
"grad_norm": 0.11176054179668427,
"learning_rate": 1.415e-05,
"loss": 0.722,
"step": 1434
},
{
"epoch": 2.045161002984011,
"grad_norm": 0.11584016680717468,
"learning_rate": 1.4125e-05,
"loss": 0.7452,
"step": 1435
},
{
"epoch": 2.0465862022892263,
"grad_norm": 0.11588002741336823,
"learning_rate": 1.4099999999999999e-05,
"loss": 0.7491,
"step": 1436
},
{
"epoch": 2.0480114015944415,
"grad_norm": 0.115396648645401,
"learning_rate": 1.4075e-05,
"loss": 0.7181,
"step": 1437
},
{
"epoch": 2.049436600899657,
"grad_norm": 0.1259024441242218,
"learning_rate": 1.4050000000000003e-05,
"loss": 0.8212,
"step": 1438
},
{
"epoch": 2.0508618002048724,
"grad_norm": 0.11719755828380585,
"learning_rate": 1.4025000000000002e-05,
"loss": 0.7401,
"step": 1439
},
{
"epoch": 2.0522869995100876,
"grad_norm": 0.13028720021247864,
"learning_rate": 1.4000000000000001e-05,
"loss": 0.7184,
"step": 1440
},
{
"epoch": 2.053712198815303,
"grad_norm": 0.12556292116641998,
"learning_rate": 1.3975000000000003e-05,
"loss": 0.7715,
"step": 1441
},
{
"epoch": 2.0551373981205185,
"grad_norm": 0.11687686294317245,
"learning_rate": 1.3950000000000002e-05,
"loss": 0.7384,
"step": 1442
},
{
"epoch": 2.0565625974257338,
"grad_norm": 0.1204209178686142,
"learning_rate": 1.3925000000000001e-05,
"loss": 0.7651,
"step": 1443
},
{
"epoch": 2.057987796730949,
"grad_norm": 0.12702758610248566,
"learning_rate": 1.3900000000000002e-05,
"loss": 0.8488,
"step": 1444
},
{
"epoch": 2.0594129960361642,
"grad_norm": 0.12386943399906158,
"learning_rate": 1.3875000000000002e-05,
"loss": 0.8434,
"step": 1445
},
{
"epoch": 2.06083819534138,
"grad_norm": 0.11999662220478058,
"learning_rate": 1.3850000000000001e-05,
"loss": 0.8056,
"step": 1446
},
{
"epoch": 2.062263394646595,
"grad_norm": 0.1173747107386589,
"learning_rate": 1.3825000000000002e-05,
"loss": 0.7093,
"step": 1447
},
{
"epoch": 2.0636885939518104,
"grad_norm": 0.12630094587802887,
"learning_rate": 1.3800000000000002e-05,
"loss": 0.8145,
"step": 1448
},
{
"epoch": 2.0651137932570256,
"grad_norm": 0.1318366378545761,
"learning_rate": 1.3775000000000001e-05,
"loss": 0.8764,
"step": 1449
},
{
"epoch": 2.0665389925622413,
"grad_norm": 0.1189817264676094,
"learning_rate": 1.3750000000000002e-05,
"loss": 0.7808,
"step": 1450
},
{
"epoch": 2.0679641918674565,
"grad_norm": 0.12852776050567627,
"learning_rate": 1.3725000000000002e-05,
"loss": 0.8242,
"step": 1451
},
{
"epoch": 2.0693893911726717,
"grad_norm": 0.1179296150803566,
"learning_rate": 1.3700000000000001e-05,
"loss": 0.8224,
"step": 1452
},
{
"epoch": 2.070814590477887,
"grad_norm": 0.12006964534521103,
"learning_rate": 1.3675000000000002e-05,
"loss": 0.7155,
"step": 1453
},
{
"epoch": 2.0722397897831026,
"grad_norm": 0.11850495636463165,
"learning_rate": 1.3650000000000001e-05,
"loss": 0.8329,
"step": 1454
},
{
"epoch": 2.073664989088318,
"grad_norm": 0.107646144926548,
"learning_rate": 1.3625e-05,
"loss": 0.7061,
"step": 1455
},
{
"epoch": 2.075090188393533,
"grad_norm": 0.1252947747707367,
"learning_rate": 1.3600000000000002e-05,
"loss": 0.7666,
"step": 1456
},
{
"epoch": 2.0765153876987483,
"grad_norm": 0.1107422411441803,
"learning_rate": 1.3575000000000001e-05,
"loss": 0.6322,
"step": 1457
},
{
"epoch": 2.077940587003964,
"grad_norm": 0.12277202308177948,
"learning_rate": 1.3550000000000002e-05,
"loss": 0.8494,
"step": 1458
},
{
"epoch": 2.0793657863091792,
"grad_norm": 0.1135435402393341,
"learning_rate": 1.3525000000000002e-05,
"loss": 0.6783,
"step": 1459
},
{
"epoch": 2.0807909856143945,
"grad_norm": 0.11442642658948898,
"learning_rate": 1.3500000000000001e-05,
"loss": 0.7204,
"step": 1460
},
{
"epoch": 2.0822161849196097,
"grad_norm": 0.11424251645803452,
"learning_rate": 1.3475000000000002e-05,
"loss": 0.7309,
"step": 1461
},
{
"epoch": 2.0836413842248254,
"grad_norm": 0.10719011723995209,
"learning_rate": 1.3450000000000002e-05,
"loss": 0.6181,
"step": 1462
},
{
"epoch": 2.0850665835300406,
"grad_norm": 0.1075449138879776,
"learning_rate": 1.3425000000000001e-05,
"loss": 0.6775,
"step": 1463
},
{
"epoch": 2.086491782835256,
"grad_norm": 0.10613955557346344,
"learning_rate": 1.3400000000000002e-05,
"loss": 0.6608,
"step": 1464
},
{
"epoch": 2.087916982140471,
"grad_norm": 0.11520925909280777,
"learning_rate": 1.3375000000000002e-05,
"loss": 0.7492,
"step": 1465
},
{
"epoch": 2.0893421814456867,
"grad_norm": 0.11839079111814499,
"learning_rate": 1.3350000000000001e-05,
"loss": 0.7322,
"step": 1466
},
{
"epoch": 2.090767380750902,
"grad_norm": 0.11454327404499054,
"learning_rate": 1.3325000000000002e-05,
"loss": 0.6981,
"step": 1467
},
{
"epoch": 2.092192580056117,
"grad_norm": 0.11810775101184845,
"learning_rate": 1.3300000000000001e-05,
"loss": 0.7739,
"step": 1468
},
{
"epoch": 2.0936177793613324,
"grad_norm": 0.1320187896490097,
"learning_rate": 1.3275e-05,
"loss": 0.8309,
"step": 1469
},
{
"epoch": 2.095042978666548,
"grad_norm": 0.10732674598693848,
"learning_rate": 1.3250000000000002e-05,
"loss": 0.6125,
"step": 1470
},
{
"epoch": 2.0964681779717633,
"grad_norm": 0.11840767413377762,
"learning_rate": 1.3225000000000001e-05,
"loss": 0.674,
"step": 1471
},
{
"epoch": 2.0978933772769786,
"grad_norm": 0.11793968826532364,
"learning_rate": 1.32e-05,
"loss": 0.743,
"step": 1472
},
{
"epoch": 2.099318576582194,
"grad_norm": 0.11920617520809174,
"learning_rate": 1.3175000000000002e-05,
"loss": 0.7397,
"step": 1473
},
{
"epoch": 2.1007437758874095,
"grad_norm": 0.12798868119716644,
"learning_rate": 1.3150000000000001e-05,
"loss": 0.7953,
"step": 1474
},
{
"epoch": 2.1021689751926247,
"grad_norm": 0.12053003907203674,
"learning_rate": 1.3125e-05,
"loss": 0.8101,
"step": 1475
},
{
"epoch": 2.10359417449784,
"grad_norm": 0.11525367200374603,
"learning_rate": 1.3100000000000002e-05,
"loss": 0.6454,
"step": 1476
},
{
"epoch": 2.105019373803055,
"grad_norm": 0.12415064126253128,
"learning_rate": 1.3075000000000001e-05,
"loss": 0.7973,
"step": 1477
},
{
"epoch": 2.106444573108271,
"grad_norm": 0.11695541441440582,
"learning_rate": 1.305e-05,
"loss": 0.7763,
"step": 1478
},
{
"epoch": 2.107869772413486,
"grad_norm": 0.12200458347797394,
"learning_rate": 1.3025000000000002e-05,
"loss": 0.7779,
"step": 1479
},
{
"epoch": 2.1092949717187013,
"grad_norm": 0.12681321799755096,
"learning_rate": 1.3000000000000001e-05,
"loss": 0.8388,
"step": 1480
},
{
"epoch": 2.1107201710239165,
"grad_norm": 0.1264181137084961,
"learning_rate": 1.2975e-05,
"loss": 0.825,
"step": 1481
},
{
"epoch": 2.1121453703291317,
"grad_norm": 0.12091600894927979,
"learning_rate": 1.2950000000000001e-05,
"loss": 0.8026,
"step": 1482
},
{
"epoch": 2.1135705696343474,
"grad_norm": 0.11768314987421036,
"learning_rate": 1.2925e-05,
"loss": 0.7892,
"step": 1483
},
{
"epoch": 2.1149957689395626,
"grad_norm": 0.11952891200780869,
"learning_rate": 1.29e-05,
"loss": 0.7183,
"step": 1484
},
{
"epoch": 2.116420968244778,
"grad_norm": 0.12314397841691971,
"learning_rate": 1.2875000000000001e-05,
"loss": 0.8061,
"step": 1485
},
{
"epoch": 2.117846167549993,
"grad_norm": 0.1361514925956726,
"learning_rate": 1.285e-05,
"loss": 0.8051,
"step": 1486
},
{
"epoch": 2.119271366855209,
"grad_norm": 0.11860343813896179,
"learning_rate": 1.2825000000000002e-05,
"loss": 0.8088,
"step": 1487
},
{
"epoch": 2.120696566160424,
"grad_norm": 0.11546521633863449,
"learning_rate": 1.2800000000000001e-05,
"loss": 0.7102,
"step": 1488
},
{
"epoch": 2.1221217654656392,
"grad_norm": 0.1236782968044281,
"learning_rate": 1.2775e-05,
"loss": 0.8012,
"step": 1489
},
{
"epoch": 2.1235469647708545,
"grad_norm": 0.12327463179826736,
"learning_rate": 1.2750000000000002e-05,
"loss": 0.767,
"step": 1490
},
{
"epoch": 2.12497216407607,
"grad_norm": 0.12601208686828613,
"learning_rate": 1.2725000000000001e-05,
"loss": 0.753,
"step": 1491
},
{
"epoch": 2.1263973633812854,
"grad_norm": 0.11073155701160431,
"learning_rate": 1.27e-05,
"loss": 0.6253,
"step": 1492
},
{
"epoch": 2.1278225626865006,
"grad_norm": 0.1171952560544014,
"learning_rate": 1.2675000000000001e-05,
"loss": 0.7247,
"step": 1493
},
{
"epoch": 2.129247761991716,
"grad_norm": 0.11619804054498672,
"learning_rate": 1.2650000000000001e-05,
"loss": 0.7165,
"step": 1494
},
{
"epoch": 2.1306729612969315,
"grad_norm": 0.11245686560869217,
"learning_rate": 1.2625e-05,
"loss": 0.6525,
"step": 1495
},
{
"epoch": 2.1320981606021467,
"grad_norm": 0.13841381669044495,
"learning_rate": 1.2600000000000001e-05,
"loss": 0.9236,
"step": 1496
},
{
"epoch": 2.133523359907362,
"grad_norm": 0.12667906284332275,
"learning_rate": 1.2575e-05,
"loss": 0.7527,
"step": 1497
},
{
"epoch": 2.134948559212577,
"grad_norm": 0.11976878345012665,
"learning_rate": 1.255e-05,
"loss": 0.7915,
"step": 1498
},
{
"epoch": 2.136373758517793,
"grad_norm": 0.11504536122083664,
"learning_rate": 1.2525000000000001e-05,
"loss": 0.7421,
"step": 1499
},
{
"epoch": 2.137798957823008,
"grad_norm": 0.1265455186367035,
"learning_rate": 1.25e-05,
"loss": 0.882,
"step": 1500
},
{
"epoch": 2.1392241571282233,
"grad_norm": 0.11819656193256378,
"learning_rate": 1.2475e-05,
"loss": 0.7381,
"step": 1501
},
{
"epoch": 2.1406493564334386,
"grad_norm": 0.11757169663906097,
"learning_rate": 1.2450000000000001e-05,
"loss": 0.7778,
"step": 1502
},
{
"epoch": 2.1420745557386542,
"grad_norm": 0.12320396304130554,
"learning_rate": 1.2425e-05,
"loss": 0.6773,
"step": 1503
},
{
"epoch": 2.1434997550438695,
"grad_norm": 0.12814640998840332,
"learning_rate": 1.24e-05,
"loss": 0.7398,
"step": 1504
},
{
"epoch": 2.1449249543490847,
"grad_norm": 0.12207196652889252,
"learning_rate": 1.2375000000000001e-05,
"loss": 0.7376,
"step": 1505
},
{
"epoch": 2.1463501536543,
"grad_norm": 0.12162980437278748,
"learning_rate": 1.235e-05,
"loss": 0.826,
"step": 1506
},
{
"epoch": 2.1477753529595156,
"grad_norm": 0.11753570288419724,
"learning_rate": 1.2325e-05,
"loss": 0.8029,
"step": 1507
},
{
"epoch": 2.149200552264731,
"grad_norm": 0.12760865688323975,
"learning_rate": 1.23e-05,
"loss": 0.8388,
"step": 1508
},
{
"epoch": 2.150625751569946,
"grad_norm": 0.11775285005569458,
"learning_rate": 1.2275e-05,
"loss": 0.7024,
"step": 1509
},
{
"epoch": 2.1520509508751613,
"grad_norm": 0.12445250153541565,
"learning_rate": 1.225e-05,
"loss": 0.8326,
"step": 1510
},
{
"epoch": 2.153476150180377,
"grad_norm": 0.11945131421089172,
"learning_rate": 1.2225e-05,
"loss": 0.7012,
"step": 1511
},
{
"epoch": 2.154901349485592,
"grad_norm": 0.11524348706007004,
"learning_rate": 1.22e-05,
"loss": 0.6519,
"step": 1512
},
{
"epoch": 2.1563265487908074,
"grad_norm": 0.12231986969709396,
"learning_rate": 1.2175e-05,
"loss": 0.7588,
"step": 1513
},
{
"epoch": 2.1577517480960227,
"grad_norm": 0.12248896062374115,
"learning_rate": 1.215e-05,
"loss": 0.8316,
"step": 1514
},
{
"epoch": 2.1591769474012383,
"grad_norm": 0.11815195530653,
"learning_rate": 1.2125e-05,
"loss": 0.7419,
"step": 1515
},
{
"epoch": 2.1606021467064536,
"grad_norm": 0.11775035411119461,
"learning_rate": 1.2100000000000001e-05,
"loss": 0.802,
"step": 1516
},
{
"epoch": 2.162027346011669,
"grad_norm": 0.1312135010957718,
"learning_rate": 1.2075e-05,
"loss": 0.886,
"step": 1517
},
{
"epoch": 2.163452545316884,
"grad_norm": 0.11067631840705872,
"learning_rate": 1.205e-05,
"loss": 0.6706,
"step": 1518
},
{
"epoch": 2.1648777446220997,
"grad_norm": 0.11906652897596359,
"learning_rate": 1.2025000000000001e-05,
"loss": 0.7323,
"step": 1519
},
{
"epoch": 2.166302943927315,
"grad_norm": 0.1251925379037857,
"learning_rate": 1.2e-05,
"loss": 0.7595,
"step": 1520
},
{
"epoch": 2.16772814323253,
"grad_norm": 0.12256534397602081,
"learning_rate": 1.1975e-05,
"loss": 0.8008,
"step": 1521
},
{
"epoch": 2.1691533425377454,
"grad_norm": 0.12022333592176437,
"learning_rate": 1.195e-05,
"loss": 0.7324,
"step": 1522
},
{
"epoch": 2.1705785418429606,
"grad_norm": 0.11774138361215591,
"learning_rate": 1.1925e-05,
"loss": 0.7132,
"step": 1523
},
{
"epoch": 2.1720037411481763,
"grad_norm": 0.1149405911564827,
"learning_rate": 1.19e-05,
"loss": 0.6907,
"step": 1524
},
{
"epoch": 2.1734289404533915,
"grad_norm": 0.12193207442760468,
"learning_rate": 1.1875e-05,
"loss": 0.7536,
"step": 1525
},
{
"epoch": 2.1748541397586068,
"grad_norm": 0.11501070857048035,
"learning_rate": 1.185e-05,
"loss": 0.6846,
"step": 1526
},
{
"epoch": 2.1762793390638224,
"grad_norm": 0.11576826125383377,
"learning_rate": 1.1825e-05,
"loss": 0.6953,
"step": 1527
},
{
"epoch": 2.1777045383690377,
"grad_norm": 0.12076572328805923,
"learning_rate": 1.18e-05,
"loss": 0.6932,
"step": 1528
},
{
"epoch": 2.179129737674253,
"grad_norm": 0.11565462499856949,
"learning_rate": 1.1775e-05,
"loss": 0.75,
"step": 1529
},
{
"epoch": 2.180554936979468,
"grad_norm": 0.11860915273427963,
"learning_rate": 1.175e-05,
"loss": 0.7659,
"step": 1530
},
{
"epoch": 2.1819801362846833,
"grad_norm": 0.1218111589550972,
"learning_rate": 1.1725e-05,
"loss": 0.7759,
"step": 1531
},
{
"epoch": 2.183405335589899,
"grad_norm": 0.10861881822347641,
"learning_rate": 1.1700000000000001e-05,
"loss": 0.6285,
"step": 1532
},
{
"epoch": 2.1848305348951143,
"grad_norm": 0.11303700506687164,
"learning_rate": 1.1675000000000001e-05,
"loss": 0.6816,
"step": 1533
},
{
"epoch": 2.1862557342003295,
"grad_norm": 0.11598026007413864,
"learning_rate": 1.1650000000000002e-05,
"loss": 0.6909,
"step": 1534
},
{
"epoch": 2.1876809335055447,
"grad_norm": 0.11362384259700775,
"learning_rate": 1.1625000000000001e-05,
"loss": 0.6865,
"step": 1535
},
{
"epoch": 2.1891061328107604,
"grad_norm": 0.11769592761993408,
"learning_rate": 1.16e-05,
"loss": 0.6984,
"step": 1536
},
{
"epoch": 2.1905313321159756,
"grad_norm": 0.11079058051109314,
"learning_rate": 1.1575000000000002e-05,
"loss": 0.6694,
"step": 1537
},
{
"epoch": 2.191956531421191,
"grad_norm": 0.1249648779630661,
"learning_rate": 1.1550000000000001e-05,
"loss": 0.8489,
"step": 1538
},
{
"epoch": 2.193381730726406,
"grad_norm": 0.11719927936792374,
"learning_rate": 1.1525e-05,
"loss": 0.7736,
"step": 1539
},
{
"epoch": 2.1948069300316218,
"grad_norm": 0.11512032151222229,
"learning_rate": 1.1500000000000002e-05,
"loss": 0.6812,
"step": 1540
},
{
"epoch": 2.196232129336837,
"grad_norm": 0.12779206037521362,
"learning_rate": 1.1475000000000001e-05,
"loss": 0.8148,
"step": 1541
},
{
"epoch": 2.197657328642052,
"grad_norm": 0.12432851642370224,
"learning_rate": 1.145e-05,
"loss": 0.7645,
"step": 1542
},
{
"epoch": 2.1990825279472674,
"grad_norm": 0.11282522231340408,
"learning_rate": 1.1425000000000002e-05,
"loss": 0.6896,
"step": 1543
},
{
"epoch": 2.200507727252483,
"grad_norm": 0.11266729980707169,
"learning_rate": 1.1400000000000001e-05,
"loss": 0.7111,
"step": 1544
},
{
"epoch": 2.2019329265576983,
"grad_norm": 0.13082057237625122,
"learning_rate": 1.1375e-05,
"loss": 0.8309,
"step": 1545
},
{
"epoch": 2.2033581258629136,
"grad_norm": 0.12001019716262817,
"learning_rate": 1.1350000000000001e-05,
"loss": 0.7075,
"step": 1546
},
{
"epoch": 2.204783325168129,
"grad_norm": 0.1148969978094101,
"learning_rate": 1.1325e-05,
"loss": 0.7472,
"step": 1547
},
{
"epoch": 2.2062085244733445,
"grad_norm": 0.11718905717134476,
"learning_rate": 1.13e-05,
"loss": 0.7487,
"step": 1548
},
{
"epoch": 2.2076337237785597,
"grad_norm": 0.1215912476181984,
"learning_rate": 1.1275000000000001e-05,
"loss": 0.7311,
"step": 1549
},
{
"epoch": 2.209058923083775,
"grad_norm": 0.12704242765903473,
"learning_rate": 1.125e-05,
"loss": 0.8595,
"step": 1550
},
{
"epoch": 2.21048412238899,
"grad_norm": 0.12223290652036667,
"learning_rate": 1.1225e-05,
"loss": 0.7268,
"step": 1551
},
{
"epoch": 2.211909321694206,
"grad_norm": 0.11448406428098679,
"learning_rate": 1.1200000000000001e-05,
"loss": 0.7018,
"step": 1552
},
{
"epoch": 2.213334520999421,
"grad_norm": 0.11852296441793442,
"learning_rate": 1.1175e-05,
"loss": 0.7919,
"step": 1553
},
{
"epoch": 2.2147597203046363,
"grad_norm": 0.11791814118623734,
"learning_rate": 1.115e-05,
"loss": 0.7948,
"step": 1554
},
{
"epoch": 2.2161849196098515,
"grad_norm": 0.11913073807954788,
"learning_rate": 1.1125000000000001e-05,
"loss": 0.7551,
"step": 1555
},
{
"epoch": 2.217610118915067,
"grad_norm": 0.12008018791675568,
"learning_rate": 1.11e-05,
"loss": 0.794,
"step": 1556
},
{
"epoch": 2.2190353182202824,
"grad_norm": 0.11271677911281586,
"learning_rate": 1.1075e-05,
"loss": 0.668,
"step": 1557
},
{
"epoch": 2.2204605175254977,
"grad_norm": 0.12180522829294205,
"learning_rate": 1.1050000000000001e-05,
"loss": 0.7297,
"step": 1558
},
{
"epoch": 2.221885716830713,
"grad_norm": 0.12393065541982651,
"learning_rate": 1.1025e-05,
"loss": 0.7345,
"step": 1559
},
{
"epoch": 2.2233109161359286,
"grad_norm": 0.11478493362665176,
"learning_rate": 1.1000000000000001e-05,
"loss": 0.6471,
"step": 1560
},
{
"epoch": 2.224736115441144,
"grad_norm": 0.12193029373884201,
"learning_rate": 1.0975e-05,
"loss": 0.7766,
"step": 1561
},
{
"epoch": 2.226161314746359,
"grad_norm": 0.12564438581466675,
"learning_rate": 1.095e-05,
"loss": 0.8206,
"step": 1562
},
{
"epoch": 2.2275865140515743,
"grad_norm": 0.123799629509449,
"learning_rate": 1.0925000000000001e-05,
"loss": 0.8034,
"step": 1563
},
{
"epoch": 2.22901171335679,
"grad_norm": 0.12128764390945435,
"learning_rate": 1.09e-05,
"loss": 0.7638,
"step": 1564
},
{
"epoch": 2.230436912662005,
"grad_norm": 0.14379791915416718,
"learning_rate": 1.0875e-05,
"loss": 0.891,
"step": 1565
},
{
"epoch": 2.2318621119672204,
"grad_norm": 0.12576928734779358,
"learning_rate": 1.0850000000000001e-05,
"loss": 0.7485,
"step": 1566
},
{
"epoch": 2.2332873112724356,
"grad_norm": 0.11960528790950775,
"learning_rate": 1.0825e-05,
"loss": 0.7533,
"step": 1567
},
{
"epoch": 2.2347125105776513,
"grad_norm": 0.1273089498281479,
"learning_rate": 1.08e-05,
"loss": 0.7984,
"step": 1568
},
{
"epoch": 2.2361377098828665,
"grad_norm": 0.12562482059001923,
"learning_rate": 1.0775000000000001e-05,
"loss": 0.8571,
"step": 1569
},
{
"epoch": 2.2375629091880818,
"grad_norm": 0.12799841165542603,
"learning_rate": 1.075e-05,
"loss": 0.8763,
"step": 1570
},
{
"epoch": 2.238988108493297,
"grad_norm": 0.1117575541138649,
"learning_rate": 1.0725e-05,
"loss": 0.7309,
"step": 1571
},
{
"epoch": 2.240413307798512,
"grad_norm": 0.12499681115150452,
"learning_rate": 1.0700000000000001e-05,
"loss": 0.7826,
"step": 1572
},
{
"epoch": 2.241838507103728,
"grad_norm": 0.12500298023223877,
"learning_rate": 1.0675e-05,
"loss": 0.7852,
"step": 1573
},
{
"epoch": 2.243263706408943,
"grad_norm": 0.12016839534044266,
"learning_rate": 1.065e-05,
"loss": 0.7519,
"step": 1574
},
{
"epoch": 2.2446889057141584,
"grad_norm": 0.13093706965446472,
"learning_rate": 1.0625e-05,
"loss": 0.77,
"step": 1575
},
{
"epoch": 2.246114105019374,
"grad_norm": 0.12764617800712585,
"learning_rate": 1.06e-05,
"loss": 0.8072,
"step": 1576
},
{
"epoch": 2.2475393043245893,
"grad_norm": 0.11935148388147354,
"learning_rate": 1.0575e-05,
"loss": 0.6752,
"step": 1577
},
{
"epoch": 2.2489645036298045,
"grad_norm": 0.12015220522880554,
"learning_rate": 1.055e-05,
"loss": 0.7848,
"step": 1578
},
{
"epoch": 2.2503897029350197,
"grad_norm": 0.11768006533384323,
"learning_rate": 1.0525e-05,
"loss": 0.7271,
"step": 1579
},
{
"epoch": 2.251814902240235,
"grad_norm": 0.12421943992376328,
"learning_rate": 1.05e-05,
"loss": 0.7812,
"step": 1580
},
{
"epoch": 2.2532401015454506,
"grad_norm": 0.11917510628700256,
"learning_rate": 1.0475e-05,
"loss": 0.6853,
"step": 1581
},
{
"epoch": 2.254665300850666,
"grad_norm": 0.12405484914779663,
"learning_rate": 1.045e-05,
"loss": 0.8367,
"step": 1582
},
{
"epoch": 2.256090500155881,
"grad_norm": 0.12408018857240677,
"learning_rate": 1.0425e-05,
"loss": 0.7655,
"step": 1583
},
{
"epoch": 2.2575156994610968,
"grad_norm": 0.1322789192199707,
"learning_rate": 1.04e-05,
"loss": 0.7606,
"step": 1584
},
{
"epoch": 2.258940898766312,
"grad_norm": 0.10779939591884613,
"learning_rate": 1.0375e-05,
"loss": 0.6671,
"step": 1585
},
{
"epoch": 2.260366098071527,
"grad_norm": 0.12800702452659607,
"learning_rate": 1.035e-05,
"loss": 0.8216,
"step": 1586
},
{
"epoch": 2.2617912973767424,
"grad_norm": 0.13168492913246155,
"learning_rate": 1.0325e-05,
"loss": 0.8246,
"step": 1587
},
{
"epoch": 2.2632164966819577,
"grad_norm": 0.1147138699889183,
"learning_rate": 1.03e-05,
"loss": 0.687,
"step": 1588
},
{
"epoch": 2.2646416959871734,
"grad_norm": 0.12469549477100372,
"learning_rate": 1.0275e-05,
"loss": 0.7781,
"step": 1589
},
{
"epoch": 2.2660668952923886,
"grad_norm": 0.10783041268587112,
"learning_rate": 1.025e-05,
"loss": 0.6163,
"step": 1590
},
{
"epoch": 2.267492094597604,
"grad_norm": 0.11734215915203094,
"learning_rate": 1.0225e-05,
"loss": 0.7272,
"step": 1591
},
{
"epoch": 2.268917293902819,
"grad_norm": 0.11080688238143921,
"learning_rate": 1.02e-05,
"loss": 0.5814,
"step": 1592
},
{
"epoch": 2.2703424932080347,
"grad_norm": 0.11479774862527847,
"learning_rate": 1.0175e-05,
"loss": 0.6896,
"step": 1593
},
{
"epoch": 2.27176769251325,
"grad_norm": 0.12201329320669174,
"learning_rate": 1.0150000000000001e-05,
"loss": 0.7888,
"step": 1594
},
{
"epoch": 2.273192891818465,
"grad_norm": 0.12215454876422882,
"learning_rate": 1.0125e-05,
"loss": 0.7651,
"step": 1595
},
{
"epoch": 2.2746180911236804,
"grad_norm": 0.12559713423252106,
"learning_rate": 1.0100000000000002e-05,
"loss": 0.8581,
"step": 1596
},
{
"epoch": 2.276043290428896,
"grad_norm": 0.12606406211853027,
"learning_rate": 1.0075000000000001e-05,
"loss": 0.7455,
"step": 1597
},
{
"epoch": 2.2774684897341113,
"grad_norm": 0.1169300451874733,
"learning_rate": 1.005e-05,
"loss": 0.7946,
"step": 1598
},
{
"epoch": 2.2788936890393265,
"grad_norm": 0.12295825034379959,
"learning_rate": 1.0025000000000001e-05,
"loss": 0.7395,
"step": 1599
},
{
"epoch": 2.2803188883445418,
"grad_norm": 0.11682257801294327,
"learning_rate": 1e-05,
"loss": 0.753,
"step": 1600
},
{
"epoch": 2.2817440876497574,
"grad_norm": 0.11514255404472351,
"learning_rate": 9.975e-06,
"loss": 0.6616,
"step": 1601
},
{
"epoch": 2.2831692869549727,
"grad_norm": 0.12616537511348724,
"learning_rate": 9.950000000000001e-06,
"loss": 0.7614,
"step": 1602
},
{
"epoch": 2.284594486260188,
"grad_norm": 0.11529654264450073,
"learning_rate": 9.925e-06,
"loss": 0.6218,
"step": 1603
},
{
"epoch": 2.286019685565403,
"grad_norm": 0.12469761818647385,
"learning_rate": 9.900000000000002e-06,
"loss": 0.7057,
"step": 1604
},
{
"epoch": 2.287444884870619,
"grad_norm": 0.12557555735111237,
"learning_rate": 9.875000000000001e-06,
"loss": 0.8207,
"step": 1605
},
{
"epoch": 2.288870084175834,
"grad_norm": 0.11551600694656372,
"learning_rate": 9.85e-06,
"loss": 0.739,
"step": 1606
},
{
"epoch": 2.2902952834810493,
"grad_norm": 0.11563023179769516,
"learning_rate": 9.825000000000002e-06,
"loss": 0.7218,
"step": 1607
},
{
"epoch": 2.2917204827862645,
"grad_norm": 0.11785290390253067,
"learning_rate": 9.800000000000001e-06,
"loss": 0.7686,
"step": 1608
},
{
"epoch": 2.29314568209148,
"grad_norm": 0.11486709117889404,
"learning_rate": 9.775e-06,
"loss": 0.7254,
"step": 1609
},
{
"epoch": 2.2945708813966954,
"grad_norm": 0.1194198727607727,
"learning_rate": 9.750000000000002e-06,
"loss": 0.7413,
"step": 1610
},
{
"epoch": 2.2959960807019106,
"grad_norm": 0.11484915018081665,
"learning_rate": 9.725000000000001e-06,
"loss": 0.6978,
"step": 1611
},
{
"epoch": 2.297421280007126,
"grad_norm": 0.11842513829469681,
"learning_rate": 9.7e-06,
"loss": 0.741,
"step": 1612
},
{
"epoch": 2.298846479312341,
"grad_norm": 0.1397864669561386,
"learning_rate": 9.675000000000001e-06,
"loss": 0.7881,
"step": 1613
},
{
"epoch": 2.3002716786175568,
"grad_norm": 0.1275712549686432,
"learning_rate": 9.65e-06,
"loss": 0.7742,
"step": 1614
},
{
"epoch": 2.301696877922772,
"grad_norm": 0.1229950338602066,
"learning_rate": 9.625e-06,
"loss": 0.7695,
"step": 1615
},
{
"epoch": 2.3031220772279872,
"grad_norm": 0.12439840286970139,
"learning_rate": 9.600000000000001e-06,
"loss": 0.8782,
"step": 1616
},
{
"epoch": 2.304547276533203,
"grad_norm": 0.10799761116504669,
"learning_rate": 9.575e-06,
"loss": 0.6751,
"step": 1617
},
{
"epoch": 2.305972475838418,
"grad_norm": 0.12160013616085052,
"learning_rate": 9.55e-06,
"loss": 0.758,
"step": 1618
},
{
"epoch": 2.3073976751436334,
"grad_norm": 0.11878684908151627,
"learning_rate": 9.525000000000001e-06,
"loss": 0.733,
"step": 1619
},
{
"epoch": 2.3088228744488486,
"grad_norm": 0.11758676916360855,
"learning_rate": 9.5e-06,
"loss": 0.7144,
"step": 1620
},
{
"epoch": 2.310248073754064,
"grad_norm": 0.12013750523328781,
"learning_rate": 9.475e-06,
"loss": 0.7405,
"step": 1621
},
{
"epoch": 2.3116732730592795,
"grad_norm": 0.13890595734119415,
"learning_rate": 9.450000000000001e-06,
"loss": 0.8457,
"step": 1622
},
{
"epoch": 2.3130984723644947,
"grad_norm": 0.12639594078063965,
"learning_rate": 9.425e-06,
"loss": 0.8009,
"step": 1623
},
{
"epoch": 2.31452367166971,
"grad_norm": 0.1280781775712967,
"learning_rate": 9.4e-06,
"loss": 0.7908,
"step": 1624
},
{
"epoch": 2.3159488709749256,
"grad_norm": 0.12729118764400482,
"learning_rate": 9.375000000000001e-06,
"loss": 0.8407,
"step": 1625
},
{
"epoch": 2.317374070280141,
"grad_norm": 0.11954189836978912,
"learning_rate": 9.35e-06,
"loss": 0.7875,
"step": 1626
},
{
"epoch": 2.318799269585356,
"grad_norm": 0.14966493844985962,
"learning_rate": 9.325e-06,
"loss": 0.6676,
"step": 1627
},
{
"epoch": 2.3202244688905713,
"grad_norm": 0.12440977990627289,
"learning_rate": 9.3e-06,
"loss": 0.7499,
"step": 1628
},
{
"epoch": 2.3216496681957866,
"grad_norm": 0.13797077536582947,
"learning_rate": 9.275e-06,
"loss": 0.8734,
"step": 1629
},
{
"epoch": 2.3230748675010022,
"grad_norm": 0.12184900790452957,
"learning_rate": 9.25e-06,
"loss": 0.6987,
"step": 1630
},
{
"epoch": 2.3245000668062175,
"grad_norm": 0.12345797568559647,
"learning_rate": 9.225e-06,
"loss": 0.8149,
"step": 1631
},
{
"epoch": 2.3259252661114327,
"grad_norm": 0.12976861000061035,
"learning_rate": 9.2e-06,
"loss": 0.9041,
"step": 1632
},
{
"epoch": 2.327350465416648,
"grad_norm": 0.12925173342227936,
"learning_rate": 9.175000000000001e-06,
"loss": 0.8597,
"step": 1633
},
{
"epoch": 2.3287756647218636,
"grad_norm": 0.11982955783605576,
"learning_rate": 9.15e-06,
"loss": 0.7259,
"step": 1634
},
{
"epoch": 2.330200864027079,
"grad_norm": 0.11004996299743652,
"learning_rate": 9.125e-06,
"loss": 0.5955,
"step": 1635
},
{
"epoch": 2.331626063332294,
"grad_norm": 0.13262394070625305,
"learning_rate": 9.100000000000001e-06,
"loss": 0.8701,
"step": 1636
},
{
"epoch": 2.3330512626375093,
"grad_norm": 0.12708348035812378,
"learning_rate": 9.075e-06,
"loss": 0.8422,
"step": 1637
},
{
"epoch": 2.334476461942725,
"grad_norm": 0.11875199526548386,
"learning_rate": 9.05e-06,
"loss": 0.7583,
"step": 1638
},
{
"epoch": 2.33590166124794,
"grad_norm": 0.12294340133666992,
"learning_rate": 9.025e-06,
"loss": 0.7797,
"step": 1639
},
{
"epoch": 2.3373268605531554,
"grad_norm": 0.12166517227888107,
"learning_rate": 9e-06,
"loss": 0.7584,
"step": 1640
},
{
"epoch": 2.3387520598583706,
"grad_norm": 0.12379319220781326,
"learning_rate": 8.975e-06,
"loss": 0.8686,
"step": 1641
},
{
"epoch": 2.3401772591635863,
"grad_norm": 0.12286663055419922,
"learning_rate": 8.95e-06,
"loss": 0.7587,
"step": 1642
},
{
"epoch": 2.3416024584688016,
"grad_norm": 0.11421581357717514,
"learning_rate": 8.925e-06,
"loss": 0.6171,
"step": 1643
},
{
"epoch": 2.343027657774017,
"grad_norm": 0.11967427283525467,
"learning_rate": 8.9e-06,
"loss": 0.7807,
"step": 1644
},
{
"epoch": 2.344452857079232,
"grad_norm": 0.1162426769733429,
"learning_rate": 8.875e-06,
"loss": 0.7575,
"step": 1645
},
{
"epoch": 2.3458780563844477,
"grad_norm": 0.11273334920406342,
"learning_rate": 8.85e-06,
"loss": 0.6617,
"step": 1646
},
{
"epoch": 2.347303255689663,
"grad_norm": 0.11894617229700089,
"learning_rate": 8.825e-06,
"loss": 0.7863,
"step": 1647
},
{
"epoch": 2.348728454994878,
"grad_norm": 0.12167418003082275,
"learning_rate": 8.8e-06,
"loss": 0.783,
"step": 1648
},
{
"epoch": 2.3501536543000934,
"grad_norm": 0.1292978972196579,
"learning_rate": 8.775e-06,
"loss": 0.8341,
"step": 1649
},
{
"epoch": 2.351578853605309,
"grad_norm": 0.1268078088760376,
"learning_rate": 8.75e-06,
"loss": 0.8515,
"step": 1650
},
{
"epoch": 2.3530040529105243,
"grad_norm": 0.12533758580684662,
"learning_rate": 8.725e-06,
"loss": 0.7716,
"step": 1651
},
{
"epoch": 2.3544292522157395,
"grad_norm": 0.1212184950709343,
"learning_rate": 8.7e-06,
"loss": 0.7515,
"step": 1652
},
{
"epoch": 2.3558544515209547,
"grad_norm": 0.12197005748748779,
"learning_rate": 8.674999999999999e-06,
"loss": 0.6969,
"step": 1653
},
{
"epoch": 2.3572796508261704,
"grad_norm": 0.1268652081489563,
"learning_rate": 8.65e-06,
"loss": 0.769,
"step": 1654
},
{
"epoch": 2.3587048501313856,
"grad_norm": 0.11762691289186478,
"learning_rate": 8.625e-06,
"loss": 0.7033,
"step": 1655
},
{
"epoch": 2.360130049436601,
"grad_norm": 0.12427245080471039,
"learning_rate": 8.599999999999999e-06,
"loss": 0.7782,
"step": 1656
},
{
"epoch": 2.361555248741816,
"grad_norm": 0.11639199405908585,
"learning_rate": 8.575000000000002e-06,
"loss": 0.669,
"step": 1657
},
{
"epoch": 2.362980448047032,
"grad_norm": 0.1165226623415947,
"learning_rate": 8.550000000000001e-06,
"loss": 0.7022,
"step": 1658
},
{
"epoch": 2.364405647352247,
"grad_norm": 0.11844088137149811,
"learning_rate": 8.525e-06,
"loss": 0.7464,
"step": 1659
},
{
"epoch": 2.3658308466574622,
"grad_norm": 0.12533368170261383,
"learning_rate": 8.500000000000002e-06,
"loss": 0.8612,
"step": 1660
},
{
"epoch": 2.3672560459626775,
"grad_norm": 0.12981925904750824,
"learning_rate": 8.475000000000001e-06,
"loss": 0.7748,
"step": 1661
},
{
"epoch": 2.3686812452678927,
"grad_norm": 0.12725293636322021,
"learning_rate": 8.45e-06,
"loss": 0.8478,
"step": 1662
},
{
"epoch": 2.3701064445731084,
"grad_norm": 0.11386356502771378,
"learning_rate": 8.425000000000001e-06,
"loss": 0.7445,
"step": 1663
},
{
"epoch": 2.3715316438783236,
"grad_norm": 0.12211571633815765,
"learning_rate": 8.400000000000001e-06,
"loss": 0.7509,
"step": 1664
},
{
"epoch": 2.372956843183539,
"grad_norm": 0.11927679926156998,
"learning_rate": 8.375e-06,
"loss": 0.722,
"step": 1665
},
{
"epoch": 2.3743820424887545,
"grad_norm": 0.12333447486162186,
"learning_rate": 8.350000000000001e-06,
"loss": 0.7596,
"step": 1666
},
{
"epoch": 2.3758072417939697,
"grad_norm": 0.1089521273970604,
"learning_rate": 8.325e-06,
"loss": 0.7014,
"step": 1667
},
{
"epoch": 2.377232441099185,
"grad_norm": 0.12622588872909546,
"learning_rate": 8.3e-06,
"loss": 0.8427,
"step": 1668
},
{
"epoch": 2.3786576404044,
"grad_norm": 0.12404906004667282,
"learning_rate": 8.275000000000001e-06,
"loss": 0.7783,
"step": 1669
},
{
"epoch": 2.3800828397096154,
"grad_norm": 0.12706147134304047,
"learning_rate": 8.25e-06,
"loss": 0.8827,
"step": 1670
},
{
"epoch": 2.381508039014831,
"grad_norm": 0.11896945536136627,
"learning_rate": 8.225e-06,
"loss": 0.6955,
"step": 1671
},
{
"epoch": 2.3829332383200463,
"grad_norm": 0.11771317571401596,
"learning_rate": 8.200000000000001e-06,
"loss": 0.6743,
"step": 1672
},
{
"epoch": 2.3843584376252616,
"grad_norm": 0.1278369128704071,
"learning_rate": 8.175e-06,
"loss": 0.8391,
"step": 1673
},
{
"epoch": 2.3857836369304772,
"grad_norm": 0.1194535568356514,
"learning_rate": 8.15e-06,
"loss": 0.7186,
"step": 1674
},
{
"epoch": 2.3872088362356925,
"grad_norm": 0.13312692940235138,
"learning_rate": 8.125000000000001e-06,
"loss": 0.7934,
"step": 1675
},
{
"epoch": 2.3886340355409077,
"grad_norm": 0.11666008830070496,
"learning_rate": 8.1e-06,
"loss": 0.7656,
"step": 1676
},
{
"epoch": 2.390059234846123,
"grad_norm": 0.11676149070262909,
"learning_rate": 8.075000000000001e-06,
"loss": 0.7163,
"step": 1677
},
{
"epoch": 2.391484434151338,
"grad_norm": 0.11438217014074326,
"learning_rate": 8.050000000000001e-06,
"loss": 0.7174,
"step": 1678
},
{
"epoch": 2.392909633456554,
"grad_norm": 0.11360858380794525,
"learning_rate": 8.025e-06,
"loss": 0.6754,
"step": 1679
},
{
"epoch": 2.394334832761769,
"grad_norm": 0.13313031196594238,
"learning_rate": 8.000000000000001e-06,
"loss": 0.871,
"step": 1680
},
{
"epoch": 2.3957600320669843,
"grad_norm": 0.11977960169315338,
"learning_rate": 7.975e-06,
"loss": 0.7631,
"step": 1681
},
{
"epoch": 2.3971852313721995,
"grad_norm": 0.13392534852027893,
"learning_rate": 7.95e-06,
"loss": 0.8869,
"step": 1682
},
{
"epoch": 2.398610430677415,
"grad_norm": 0.12363595515489578,
"learning_rate": 7.925000000000001e-06,
"loss": 0.7996,
"step": 1683
},
{
"epoch": 2.4000356299826304,
"grad_norm": 0.11034916341304779,
"learning_rate": 7.9e-06,
"loss": 0.6389,
"step": 1684
},
{
"epoch": 2.4014608292878457,
"grad_norm": 0.12855437397956848,
"learning_rate": 7.875e-06,
"loss": 0.9039,
"step": 1685
},
{
"epoch": 2.402886028593061,
"grad_norm": 0.12384526431560516,
"learning_rate": 7.850000000000001e-06,
"loss": 0.8441,
"step": 1686
},
{
"epoch": 2.4043112278982766,
"grad_norm": 0.1235758513212204,
"learning_rate": 7.825e-06,
"loss": 0.8064,
"step": 1687
},
{
"epoch": 2.405736427203492,
"grad_norm": 0.11274658143520355,
"learning_rate": 7.8e-06,
"loss": 0.6277,
"step": 1688
},
{
"epoch": 2.407161626508707,
"grad_norm": 0.11616003513336182,
"learning_rate": 7.775000000000001e-06,
"loss": 0.5955,
"step": 1689
},
{
"epoch": 2.4085868258139222,
"grad_norm": 0.12322861701250076,
"learning_rate": 7.75e-06,
"loss": 0.82,
"step": 1690
},
{
"epoch": 2.410012025119138,
"grad_norm": 0.12352516502141953,
"learning_rate": 7.725e-06,
"loss": 0.8106,
"step": 1691
},
{
"epoch": 2.411437224424353,
"grad_norm": 0.11373526602983475,
"learning_rate": 7.7e-06,
"loss": 0.684,
"step": 1692
},
{
"epoch": 2.4128624237295684,
"grad_norm": 0.11835110932588577,
"learning_rate": 7.675e-06,
"loss": 0.7641,
"step": 1693
},
{
"epoch": 2.4142876230347836,
"grad_norm": 0.11987770348787308,
"learning_rate": 7.65e-06,
"loss": 0.7243,
"step": 1694
},
{
"epoch": 2.4157128223399993,
"grad_norm": 0.12252168357372284,
"learning_rate": 7.625e-06,
"loss": 0.6893,
"step": 1695
},
{
"epoch": 2.4171380216452145,
"grad_norm": 0.11639352142810822,
"learning_rate": 7.6e-06,
"loss": 0.707,
"step": 1696
},
{
"epoch": 2.4185632209504297,
"grad_norm": 0.12323253601789474,
"learning_rate": 7.575e-06,
"loss": 0.805,
"step": 1697
},
{
"epoch": 2.419988420255645,
"grad_norm": 0.11841978877782822,
"learning_rate": 7.55e-06,
"loss": 0.6948,
"step": 1698
},
{
"epoch": 2.4214136195608607,
"grad_norm": 0.11974740773439407,
"learning_rate": 7.525e-06,
"loss": 0.7446,
"step": 1699
},
{
"epoch": 2.422838818866076,
"grad_norm": 0.11730451881885529,
"learning_rate": 7.5e-06,
"loss": 0.6993,
"step": 1700
},
{
"epoch": 2.424264018171291,
"grad_norm": 0.12473763525485992,
"learning_rate": 7.4750000000000004e-06,
"loss": 0.7917,
"step": 1701
},
{
"epoch": 2.4256892174765063,
"grad_norm": 0.11867184191942215,
"learning_rate": 7.45e-06,
"loss": 0.7343,
"step": 1702
},
{
"epoch": 2.4271144167817216,
"grad_norm": 0.1147843450307846,
"learning_rate": 7.425e-06,
"loss": 0.696,
"step": 1703
},
{
"epoch": 2.4285396160869372,
"grad_norm": 0.1301279067993164,
"learning_rate": 7.4e-06,
"loss": 0.8316,
"step": 1704
},
{
"epoch": 2.4299648153921525,
"grad_norm": 0.12093105167150497,
"learning_rate": 7.375e-06,
"loss": 0.6746,
"step": 1705
},
{
"epoch": 2.4313900146973677,
"grad_norm": 0.12082387506961823,
"learning_rate": 7.35e-06,
"loss": 0.7262,
"step": 1706
},
{
"epoch": 2.4328152140025834,
"grad_norm": 0.12111669778823853,
"learning_rate": 7.325e-06,
"loss": 0.7678,
"step": 1707
},
{
"epoch": 2.4342404133077986,
"grad_norm": 0.1261623352766037,
"learning_rate": 7.2999999999999996e-06,
"loss": 0.8135,
"step": 1708
},
{
"epoch": 2.435665612613014,
"grad_norm": 0.12080813944339752,
"learning_rate": 7.275e-06,
"loss": 0.7215,
"step": 1709
},
{
"epoch": 2.437090811918229,
"grad_norm": 0.12469711899757385,
"learning_rate": 7.25e-06,
"loss": 0.8366,
"step": 1710
},
{
"epoch": 2.4385160112234443,
"grad_norm": 0.1178738996386528,
"learning_rate": 7.2249999999999994e-06,
"loss": 0.6865,
"step": 1711
},
{
"epoch": 2.43994121052866,
"grad_norm": 0.13311132788658142,
"learning_rate": 7.2e-06,
"loss": 0.8884,
"step": 1712
},
{
"epoch": 2.441366409833875,
"grad_norm": 0.11603731662034988,
"learning_rate": 7.175e-06,
"loss": 0.7161,
"step": 1713
},
{
"epoch": 2.4427916091390904,
"grad_norm": 0.12046404182910919,
"learning_rate": 7.15e-06,
"loss": 0.7942,
"step": 1714
},
{
"epoch": 2.444216808444306,
"grad_norm": 0.11505760997533798,
"learning_rate": 7.1249999999999995e-06,
"loss": 0.6453,
"step": 1715
},
{
"epoch": 2.4456420077495213,
"grad_norm": 0.1391943395137787,
"learning_rate": 7.1e-06,
"loss": 0.8448,
"step": 1716
},
{
"epoch": 2.4470672070547366,
"grad_norm": 0.13511300086975098,
"learning_rate": 7.075e-06,
"loss": 0.6891,
"step": 1717
},
{
"epoch": 2.448492406359952,
"grad_norm": 0.1206061914563179,
"learning_rate": 7.049999999999999e-06,
"loss": 0.7045,
"step": 1718
},
{
"epoch": 2.449917605665167,
"grad_norm": 0.12295015156269073,
"learning_rate": 7.025000000000001e-06,
"loss": 0.8128,
"step": 1719
},
{
"epoch": 2.4513428049703827,
"grad_norm": 0.12133322656154633,
"learning_rate": 7.000000000000001e-06,
"loss": 0.7565,
"step": 1720
},
{
"epoch": 2.452768004275598,
"grad_norm": 0.1251683533191681,
"learning_rate": 6.975000000000001e-06,
"loss": 0.7614,
"step": 1721
},
{
"epoch": 2.454193203580813,
"grad_norm": 0.12777367234230042,
"learning_rate": 6.950000000000001e-06,
"loss": 0.875,
"step": 1722
},
{
"epoch": 2.455618402886029,
"grad_norm": 0.12276291102170944,
"learning_rate": 6.925000000000001e-06,
"loss": 0.8173,
"step": 1723
},
{
"epoch": 2.457043602191244,
"grad_norm": 0.11498101055622101,
"learning_rate": 6.900000000000001e-06,
"loss": 0.6659,
"step": 1724
},
{
"epoch": 2.4584688014964593,
"grad_norm": 0.1250537484884262,
"learning_rate": 6.875000000000001e-06,
"loss": 0.737,
"step": 1725
},
{
"epoch": 2.4598940008016745,
"grad_norm": 0.12164362519979477,
"learning_rate": 6.8500000000000005e-06,
"loss": 0.79,
"step": 1726
},
{
"epoch": 2.4613192001068898,
"grad_norm": 0.12467821687459946,
"learning_rate": 6.825000000000001e-06,
"loss": 0.8659,
"step": 1727
},
{
"epoch": 2.4627443994121054,
"grad_norm": 0.11534330993890762,
"learning_rate": 6.800000000000001e-06,
"loss": 0.6905,
"step": 1728
},
{
"epoch": 2.4641695987173207,
"grad_norm": 0.1208868995308876,
"learning_rate": 6.775000000000001e-06,
"loss": 0.7486,
"step": 1729
},
{
"epoch": 2.465594798022536,
"grad_norm": 0.1111760213971138,
"learning_rate": 6.750000000000001e-06,
"loss": 0.6498,
"step": 1730
},
{
"epoch": 2.467019997327751,
"grad_norm": 0.1235458105802536,
"learning_rate": 6.725000000000001e-06,
"loss": 0.7426,
"step": 1731
},
{
"epoch": 2.468445196632967,
"grad_norm": 0.12159588932991028,
"learning_rate": 6.700000000000001e-06,
"loss": 0.8089,
"step": 1732
},
{
"epoch": 2.469870395938182,
"grad_norm": 0.12019369751214981,
"learning_rate": 6.6750000000000005e-06,
"loss": 0.8079,
"step": 1733
},
{
"epoch": 2.4712955952433973,
"grad_norm": 0.1312427669763565,
"learning_rate": 6.650000000000001e-06,
"loss": 0.8103,
"step": 1734
},
{
"epoch": 2.4727207945486125,
"grad_norm": 0.1223914697766304,
"learning_rate": 6.625000000000001e-06,
"loss": 0.7441,
"step": 1735
},
{
"epoch": 2.474145993853828,
"grad_norm": 0.1405949592590332,
"learning_rate": 6.6e-06,
"loss": 0.8079,
"step": 1736
},
{
"epoch": 2.4755711931590434,
"grad_norm": 0.12272985279560089,
"learning_rate": 6.5750000000000006e-06,
"loss": 0.7404,
"step": 1737
},
{
"epoch": 2.4769963924642586,
"grad_norm": 0.12396412342786789,
"learning_rate": 6.550000000000001e-06,
"loss": 0.7804,
"step": 1738
},
{
"epoch": 2.478421591769474,
"grad_norm": 0.11829429119825363,
"learning_rate": 6.525e-06,
"loss": 0.7606,
"step": 1739
},
{
"epoch": 2.4798467910746895,
"grad_norm": 0.11471271514892578,
"learning_rate": 6.5000000000000004e-06,
"loss": 0.6735,
"step": 1740
},
{
"epoch": 2.4812719903799048,
"grad_norm": 0.12272030115127563,
"learning_rate": 6.475000000000001e-06,
"loss": 0.7678,
"step": 1741
},
{
"epoch": 2.48269718968512,
"grad_norm": 0.1255359798669815,
"learning_rate": 6.45e-06,
"loss": 0.7327,
"step": 1742
},
{
"epoch": 2.484122388990335,
"grad_norm": 0.12281396239995956,
"learning_rate": 6.425e-06,
"loss": 0.7726,
"step": 1743
},
{
"epoch": 2.485547588295551,
"grad_norm": 0.11960241198539734,
"learning_rate": 6.4000000000000006e-06,
"loss": 0.6427,
"step": 1744
},
{
"epoch": 2.486972787600766,
"grad_norm": 0.12338385730981827,
"learning_rate": 6.375000000000001e-06,
"loss": 0.8268,
"step": 1745
},
{
"epoch": 2.4883979869059814,
"grad_norm": 0.1181061640381813,
"learning_rate": 6.35e-06,
"loss": 0.7701,
"step": 1746
},
{
"epoch": 2.4898231862111966,
"grad_norm": 0.1181604266166687,
"learning_rate": 6.3250000000000004e-06,
"loss": 0.676,
"step": 1747
},
{
"epoch": 2.4912483855164123,
"grad_norm": 0.11661811918020248,
"learning_rate": 6.300000000000001e-06,
"loss": 0.7034,
"step": 1748
},
{
"epoch": 2.4926735848216275,
"grad_norm": 0.11947411298751831,
"learning_rate": 6.275e-06,
"loss": 0.7375,
"step": 1749
},
{
"epoch": 2.4940987841268427,
"grad_norm": 0.11874453723430634,
"learning_rate": 6.25e-06,
"loss": 0.7974,
"step": 1750
},
{
"epoch": 2.495523983432058,
"grad_norm": 0.117217518389225,
"learning_rate": 6.2250000000000005e-06,
"loss": 0.7087,
"step": 1751
},
{
"epoch": 2.496949182737273,
"grad_norm": 0.11590299010276794,
"learning_rate": 6.2e-06,
"loss": 0.7112,
"step": 1752
},
{
"epoch": 2.498374382042489,
"grad_norm": 0.12025054544210434,
"learning_rate": 6.175e-06,
"loss": 0.7041,
"step": 1753
},
{
"epoch": 2.499799581347704,
"grad_norm": 0.1151423379778862,
"learning_rate": 6.15e-06,
"loss": 0.6121,
"step": 1754
},
{
"epoch": 2.5012247806529193,
"grad_norm": 0.1263328343629837,
"learning_rate": 6.125e-06,
"loss": 0.7851,
"step": 1755
},
{
"epoch": 2.502649979958135,
"grad_norm": 0.11996535956859589,
"learning_rate": 6.1e-06,
"loss": 0.7056,
"step": 1756
},
{
"epoch": 2.50407517926335,
"grad_norm": 0.12052210420370102,
"learning_rate": 6.075e-06,
"loss": 0.7063,
"step": 1757
},
{
"epoch": 2.5055003785685654,
"grad_norm": 0.12435757368803024,
"learning_rate": 6.0500000000000005e-06,
"loss": 0.886,
"step": 1758
},
{
"epoch": 2.5069255778737807,
"grad_norm": 0.11676619946956635,
"learning_rate": 6.025e-06,
"loss": 0.7048,
"step": 1759
},
{
"epoch": 2.508350777178996,
"grad_norm": 0.11727142333984375,
"learning_rate": 6e-06,
"loss": 0.7086,
"step": 1760
},
{
"epoch": 2.5097759764842116,
"grad_norm": 0.11802036315202713,
"learning_rate": 5.975e-06,
"loss": 0.684,
"step": 1761
},
{
"epoch": 2.511201175789427,
"grad_norm": 0.13360996544361115,
"learning_rate": 5.95e-06,
"loss": 0.7878,
"step": 1762
},
{
"epoch": 2.512626375094642,
"grad_norm": 0.1270720660686493,
"learning_rate": 5.925e-06,
"loss": 0.8207,
"step": 1763
},
{
"epoch": 2.5140515743998577,
"grad_norm": 0.11882425099611282,
"learning_rate": 5.9e-06,
"loss": 0.7868,
"step": 1764
},
{
"epoch": 2.515476773705073,
"grad_norm": 0.12095914036035538,
"learning_rate": 5.875e-06,
"loss": 0.6861,
"step": 1765
},
{
"epoch": 2.516901973010288,
"grad_norm": 0.12867845594882965,
"learning_rate": 5.850000000000001e-06,
"loss": 0.7394,
"step": 1766
},
{
"epoch": 2.5183271723155034,
"grad_norm": 0.12166421115398407,
"learning_rate": 5.825000000000001e-06,
"loss": 0.7448,
"step": 1767
},
{
"epoch": 2.5197523716207186,
"grad_norm": 0.13703720271587372,
"learning_rate": 5.8e-06,
"loss": 0.8185,
"step": 1768
},
{
"epoch": 2.5211775709259343,
"grad_norm": 0.12400788068771362,
"learning_rate": 5.775000000000001e-06,
"loss": 0.6927,
"step": 1769
},
{
"epoch": 2.5226027702311495,
"grad_norm": 0.13564196228981018,
"learning_rate": 5.750000000000001e-06,
"loss": 0.9506,
"step": 1770
},
{
"epoch": 2.5240279695363648,
"grad_norm": 0.11773041635751724,
"learning_rate": 5.725e-06,
"loss": 0.7143,
"step": 1771
},
{
"epoch": 2.5254531688415804,
"grad_norm": 0.12555238604545593,
"learning_rate": 5.7000000000000005e-06,
"loss": 0.7211,
"step": 1772
},
{
"epoch": 2.5268783681467957,
"grad_norm": 0.12316561490297318,
"learning_rate": 5.675000000000001e-06,
"loss": 0.7576,
"step": 1773
},
{
"epoch": 2.528303567452011,
"grad_norm": 0.1221129521727562,
"learning_rate": 5.65e-06,
"loss": 0.7636,
"step": 1774
},
{
"epoch": 2.529728766757226,
"grad_norm": 0.12478466331958771,
"learning_rate": 5.625e-06,
"loss": 0.7083,
"step": 1775
},
{
"epoch": 2.5311539660624414,
"grad_norm": 0.11639775335788727,
"learning_rate": 5.600000000000001e-06,
"loss": 0.6953,
"step": 1776
},
{
"epoch": 2.532579165367657,
"grad_norm": 0.12269164621829987,
"learning_rate": 5.575e-06,
"loss": 0.6731,
"step": 1777
},
{
"epoch": 2.5340043646728723,
"grad_norm": 0.12404763698577881,
"learning_rate": 5.55e-06,
"loss": 0.8149,
"step": 1778
},
{
"epoch": 2.5354295639780875,
"grad_norm": 0.12355664372444153,
"learning_rate": 5.5250000000000005e-06,
"loss": 0.7543,
"step": 1779
},
{
"epoch": 2.536854763283303,
"grad_norm": 0.11390047520399094,
"learning_rate": 5.500000000000001e-06,
"loss": 0.7083,
"step": 1780
},
{
"epoch": 2.5382799625885184,
"grad_norm": 0.11722477525472641,
"learning_rate": 5.475e-06,
"loss": 0.7027,
"step": 1781
},
{
"epoch": 2.5397051618937336,
"grad_norm": 0.12340289354324341,
"learning_rate": 5.45e-06,
"loss": 0.7497,
"step": 1782
},
{
"epoch": 2.541130361198949,
"grad_norm": 0.11837738007307053,
"learning_rate": 5.4250000000000006e-06,
"loss": 0.6918,
"step": 1783
},
{
"epoch": 2.542555560504164,
"grad_norm": 0.12761008739471436,
"learning_rate": 5.4e-06,
"loss": 0.7178,
"step": 1784
},
{
"epoch": 2.5439807598093793,
"grad_norm": 0.13894891738891602,
"learning_rate": 5.375e-06,
"loss": 0.895,
"step": 1785
},
{
"epoch": 2.545405959114595,
"grad_norm": 0.12420764565467834,
"learning_rate": 5.3500000000000004e-06,
"loss": 0.8076,
"step": 1786
},
{
"epoch": 2.5468311584198102,
"grad_norm": 0.12363575398921967,
"learning_rate": 5.325e-06,
"loss": 0.7797,
"step": 1787
},
{
"epoch": 2.5482563577250255,
"grad_norm": 0.12346049398183823,
"learning_rate": 5.3e-06,
"loss": 0.7399,
"step": 1788
},
{
"epoch": 2.549681557030241,
"grad_norm": 0.11307359486818314,
"learning_rate": 5.275e-06,
"loss": 0.7003,
"step": 1789
},
{
"epoch": 2.5511067563354564,
"grad_norm": 0.12951073050498962,
"learning_rate": 5.25e-06,
"loss": 0.8756,
"step": 1790
},
{
"epoch": 2.5525319556406716,
"grad_norm": 0.12414433062076569,
"learning_rate": 5.225e-06,
"loss": 0.7552,
"step": 1791
},
{
"epoch": 2.553957154945887,
"grad_norm": 0.11736006289720535,
"learning_rate": 5.2e-06,
"loss": 0.6833,
"step": 1792
},
{
"epoch": 2.555382354251102,
"grad_norm": 0.1235772892832756,
"learning_rate": 5.175e-06,
"loss": 0.725,
"step": 1793
},
{
"epoch": 2.5568075535563177,
"grad_norm": 0.12362828105688095,
"learning_rate": 5.15e-06,
"loss": 0.7992,
"step": 1794
},
{
"epoch": 2.558232752861533,
"grad_norm": 0.11789260059595108,
"learning_rate": 5.125e-06,
"loss": 0.7456,
"step": 1795
},
{
"epoch": 2.559657952166748,
"grad_norm": 0.12103858590126038,
"learning_rate": 5.1e-06,
"loss": 0.7257,
"step": 1796
},
{
"epoch": 2.561083151471964,
"grad_norm": 0.13514219224452972,
"learning_rate": 5.0750000000000005e-06,
"loss": 0.9066,
"step": 1797
},
{
"epoch": 2.562508350777179,
"grad_norm": 0.12865163385868073,
"learning_rate": 5.050000000000001e-06,
"loss": 0.8367,
"step": 1798
},
{
"epoch": 2.5639335500823943,
"grad_norm": 0.12221334874629974,
"learning_rate": 5.025e-06,
"loss": 0.7395,
"step": 1799
},
{
"epoch": 2.5653587493876095,
"grad_norm": 0.11637355387210846,
"learning_rate": 5e-06,
"loss": 0.6942,
"step": 1800
},
{
"epoch": 2.566783948692825,
"grad_norm": 0.11978044360876083,
"learning_rate": 4.975000000000001e-06,
"loss": 0.744,
"step": 1801
},
{
"epoch": 2.5682091479980405,
"grad_norm": 0.12551835179328918,
"learning_rate": 4.950000000000001e-06,
"loss": 0.8768,
"step": 1802
},
{
"epoch": 2.5696343473032557,
"grad_norm": 0.11978841572999954,
"learning_rate": 4.925e-06,
"loss": 0.7239,
"step": 1803
},
{
"epoch": 2.571059546608471,
"grad_norm": 0.12948131561279297,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.8536,
"step": 1804
},
{
"epoch": 2.5724847459136866,
"grad_norm": 0.127218559384346,
"learning_rate": 4.875000000000001e-06,
"loss": 0.7903,
"step": 1805
},
{
"epoch": 2.573909945218902,
"grad_norm": 0.12129092961549759,
"learning_rate": 4.85e-06,
"loss": 0.7256,
"step": 1806
},
{
"epoch": 2.575335144524117,
"grad_norm": 0.12269183248281479,
"learning_rate": 4.825e-06,
"loss": 0.7088,
"step": 1807
},
{
"epoch": 2.5767603438293323,
"grad_norm": 0.12601250410079956,
"learning_rate": 4.800000000000001e-06,
"loss": 0.7656,
"step": 1808
},
{
"epoch": 2.5781855431345475,
"grad_norm": 0.12074815481901169,
"learning_rate": 4.775e-06,
"loss": 0.761,
"step": 1809
},
{
"epoch": 2.579610742439763,
"grad_norm": 0.12048009783029556,
"learning_rate": 4.75e-06,
"loss": 0.6886,
"step": 1810
},
{
"epoch": 2.5810359417449784,
"grad_norm": 0.12202589958906174,
"learning_rate": 4.7250000000000005e-06,
"loss": 0.7895,
"step": 1811
},
{
"epoch": 2.5824611410501936,
"grad_norm": 0.12241724878549576,
"learning_rate": 4.7e-06,
"loss": 0.8167,
"step": 1812
},
{
"epoch": 2.5838863403554093,
"grad_norm": 0.12409822642803192,
"learning_rate": 4.675e-06,
"loss": 0.7267,
"step": 1813
},
{
"epoch": 2.5853115396606245,
"grad_norm": 0.11447839438915253,
"learning_rate": 4.65e-06,
"loss": 0.6374,
"step": 1814
},
{
"epoch": 2.5867367389658398,
"grad_norm": 0.1258813887834549,
"learning_rate": 4.625e-06,
"loss": 0.748,
"step": 1815
},
{
"epoch": 2.588161938271055,
"grad_norm": 0.11605683714151382,
"learning_rate": 4.6e-06,
"loss": 0.7089,
"step": 1816
},
{
"epoch": 2.5895871375762702,
"grad_norm": 0.13095863163471222,
"learning_rate": 4.575e-06,
"loss": 0.7914,
"step": 1817
},
{
"epoch": 2.591012336881486,
"grad_norm": 0.12397770583629608,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.7564,
"step": 1818
},
{
"epoch": 2.592437536186701,
"grad_norm": 0.11487865447998047,
"learning_rate": 4.525e-06,
"loss": 0.6603,
"step": 1819
},
{
"epoch": 2.5938627354919164,
"grad_norm": 0.12649807333946228,
"learning_rate": 4.5e-06,
"loss": 0.8516,
"step": 1820
},
{
"epoch": 2.595287934797132,
"grad_norm": 0.1237516924738884,
"learning_rate": 4.475e-06,
"loss": 0.7985,
"step": 1821
},
{
"epoch": 2.5967131341023473,
"grad_norm": 0.13134270906448364,
"learning_rate": 4.45e-06,
"loss": 0.7892,
"step": 1822
},
{
"epoch": 2.5981383334075625,
"grad_norm": 0.1222863495349884,
"learning_rate": 4.425e-06,
"loss": 0.8678,
"step": 1823
},
{
"epoch": 2.5995635327127777,
"grad_norm": 0.1202515959739685,
"learning_rate": 4.4e-06,
"loss": 0.6988,
"step": 1824
},
{
"epoch": 2.600988732017993,
"grad_norm": 0.12608233094215393,
"learning_rate": 4.375e-06,
"loss": 0.8241,
"step": 1825
},
{
"epoch": 2.6024139313232086,
"grad_norm": 0.12674085795879364,
"learning_rate": 4.35e-06,
"loss": 0.6958,
"step": 1826
},
{
"epoch": 2.603839130628424,
"grad_norm": 0.12526774406433105,
"learning_rate": 4.325e-06,
"loss": 0.8751,
"step": 1827
},
{
"epoch": 2.605264329933639,
"grad_norm": 0.1335974782705307,
"learning_rate": 4.2999999999999995e-06,
"loss": 0.7088,
"step": 1828
},
{
"epoch": 2.6066895292388543,
"grad_norm": 0.11556920409202576,
"learning_rate": 4.2750000000000006e-06,
"loss": 0.7315,
"step": 1829
},
{
"epoch": 2.60811472854407,
"grad_norm": 0.1270662099123001,
"learning_rate": 4.250000000000001e-06,
"loss": 0.7228,
"step": 1830
},
{
"epoch": 2.6095399278492852,
"grad_norm": 0.11954999715089798,
"learning_rate": 4.225e-06,
"loss": 0.7093,
"step": 1831
},
{
"epoch": 2.6109651271545005,
"grad_norm": 0.11506129801273346,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.6901,
"step": 1832
},
{
"epoch": 2.6123903264597157,
"grad_norm": 0.11959473043680191,
"learning_rate": 4.175000000000001e-06,
"loss": 0.7638,
"step": 1833
},
{
"epoch": 2.613815525764931,
"grad_norm": 0.12196359783411026,
"learning_rate": 4.15e-06,
"loss": 0.653,
"step": 1834
},
{
"epoch": 2.6152407250701466,
"grad_norm": 0.13187910616397858,
"learning_rate": 4.125e-06,
"loss": 0.7781,
"step": 1835
},
{
"epoch": 2.616665924375362,
"grad_norm": 0.12887540459632874,
"learning_rate": 4.1000000000000006e-06,
"loss": 0.8214,
"step": 1836
},
{
"epoch": 2.618091123680577,
"grad_norm": 0.11380273848772049,
"learning_rate": 4.075e-06,
"loss": 0.6827,
"step": 1837
},
{
"epoch": 2.6195163229857927,
"grad_norm": 0.1320565640926361,
"learning_rate": 4.05e-06,
"loss": 0.8468,
"step": 1838
},
{
"epoch": 2.620941522291008,
"grad_norm": 0.12046577036380768,
"learning_rate": 4.0250000000000004e-06,
"loss": 0.7815,
"step": 1839
},
{
"epoch": 2.622366721596223,
"grad_norm": 0.1277719885110855,
"learning_rate": 4.000000000000001e-06,
"loss": 0.844,
"step": 1840
},
{
"epoch": 2.6237919209014384,
"grad_norm": 0.12599779665470123,
"learning_rate": 3.975e-06,
"loss": 0.7362,
"step": 1841
},
{
"epoch": 2.6252171202066537,
"grad_norm": 0.11851386725902557,
"learning_rate": 3.95e-06,
"loss": 0.7138,
"step": 1842
},
{
"epoch": 2.6266423195118693,
"grad_norm": 0.1267745941877365,
"learning_rate": 3.9250000000000005e-06,
"loss": 0.7724,
"step": 1843
},
{
"epoch": 2.6280675188170846,
"grad_norm": 0.11635207384824753,
"learning_rate": 3.9e-06,
"loss": 0.7213,
"step": 1844
},
{
"epoch": 2.6294927181223,
"grad_norm": 0.11369098722934723,
"learning_rate": 3.875e-06,
"loss": 0.6038,
"step": 1845
},
{
"epoch": 2.6309179174275155,
"grad_norm": 0.12977294623851776,
"learning_rate": 3.85e-06,
"loss": 0.8076,
"step": 1846
},
{
"epoch": 2.6323431167327307,
"grad_norm": 0.11781536787748337,
"learning_rate": 3.825e-06,
"loss": 0.7181,
"step": 1847
},
{
"epoch": 2.633768316037946,
"grad_norm": 0.1289232075214386,
"learning_rate": 3.8e-06,
"loss": 0.7667,
"step": 1848
},
{
"epoch": 2.635193515343161,
"grad_norm": 0.12459786236286163,
"learning_rate": 3.775e-06,
"loss": 0.7916,
"step": 1849
},
{
"epoch": 2.6366187146483764,
"grad_norm": 0.11282416433095932,
"learning_rate": 3.75e-06,
"loss": 0.7283,
"step": 1850
},
{
"epoch": 2.638043913953592,
"grad_norm": 0.11688493937253952,
"learning_rate": 3.725e-06,
"loss": 0.6963,
"step": 1851
},
{
"epoch": 2.6394691132588073,
"grad_norm": 0.11632485687732697,
"learning_rate": 3.7e-06,
"loss": 0.6424,
"step": 1852
},
{
"epoch": 2.6408943125640225,
"grad_norm": 0.12673233449459076,
"learning_rate": 3.675e-06,
"loss": 0.7327,
"step": 1853
},
{
"epoch": 2.642319511869238,
"grad_norm": 0.11964259296655655,
"learning_rate": 3.6499999999999998e-06,
"loss": 0.7097,
"step": 1854
},
{
"epoch": 2.6437447111744534,
"grad_norm": 0.11717774718999863,
"learning_rate": 3.625e-06,
"loss": 0.6426,
"step": 1855
},
{
"epoch": 2.6451699104796687,
"grad_norm": 0.12356612831354141,
"learning_rate": 3.6e-06,
"loss": 0.6822,
"step": 1856
},
{
"epoch": 2.646595109784884,
"grad_norm": 0.11736509203910828,
"learning_rate": 3.575e-06,
"loss": 0.6893,
"step": 1857
},
{
"epoch": 2.648020309090099,
"grad_norm": 0.11904231458902359,
"learning_rate": 3.55e-06,
"loss": 0.7792,
"step": 1858
},
{
"epoch": 2.649445508395315,
"grad_norm": 0.1232367530465126,
"learning_rate": 3.5249999999999997e-06,
"loss": 0.8195,
"step": 1859
},
{
"epoch": 2.65087070770053,
"grad_norm": 0.11096363514661789,
"learning_rate": 3.5000000000000004e-06,
"loss": 0.6655,
"step": 1860
},
{
"epoch": 2.6522959070057452,
"grad_norm": 0.11970917135477066,
"learning_rate": 3.4750000000000006e-06,
"loss": 0.7128,
"step": 1861
},
{
"epoch": 2.653721106310961,
"grad_norm": 0.12297781556844711,
"learning_rate": 3.4500000000000004e-06,
"loss": 0.7813,
"step": 1862
},
{
"epoch": 2.655146305616176,
"grad_norm": 0.11325845122337341,
"learning_rate": 3.4250000000000002e-06,
"loss": 0.7027,
"step": 1863
},
{
"epoch": 2.6565715049213914,
"grad_norm": 0.12247852236032486,
"learning_rate": 3.4000000000000005e-06,
"loss": 0.6647,
"step": 1864
},
{
"epoch": 2.6579967042266066,
"grad_norm": 0.12802624702453613,
"learning_rate": 3.3750000000000003e-06,
"loss": 0.8041,
"step": 1865
},
{
"epoch": 2.659421903531822,
"grad_norm": 0.12722131609916687,
"learning_rate": 3.3500000000000005e-06,
"loss": 0.7893,
"step": 1866
},
{
"epoch": 2.6608471028370375,
"grad_norm": 0.12242351472377777,
"learning_rate": 3.3250000000000004e-06,
"loss": 0.7781,
"step": 1867
},
{
"epoch": 2.6622723021422527,
"grad_norm": 0.12402434647083282,
"learning_rate": 3.3e-06,
"loss": 0.7309,
"step": 1868
},
{
"epoch": 2.663697501447468,
"grad_norm": 0.12203481793403625,
"learning_rate": 3.2750000000000004e-06,
"loss": 0.7015,
"step": 1869
},
{
"epoch": 2.6651227007526836,
"grad_norm": 0.1173618957400322,
"learning_rate": 3.2500000000000002e-06,
"loss": 0.7337,
"step": 1870
},
{
"epoch": 2.666547900057899,
"grad_norm": 0.12130317091941833,
"learning_rate": 3.225e-06,
"loss": 0.7552,
"step": 1871
},
{
"epoch": 2.667973099363114,
"grad_norm": 0.11796274036169052,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.6919,
"step": 1872
},
{
"epoch": 2.6693982986683293,
"grad_norm": 0.12095659226179123,
"learning_rate": 3.175e-06,
"loss": 0.7246,
"step": 1873
},
{
"epoch": 2.6708234979735446,
"grad_norm": 0.12967567145824432,
"learning_rate": 3.1500000000000003e-06,
"loss": 0.8096,
"step": 1874
},
{
"epoch": 2.67224869727876,
"grad_norm": 0.12744221091270447,
"learning_rate": 3.125e-06,
"loss": 0.8115,
"step": 1875
},
{
"epoch": 2.6736738965839755,
"grad_norm": 0.12004055827856064,
"learning_rate": 3.1e-06,
"loss": 0.6885,
"step": 1876
},
{
"epoch": 2.6750990958891907,
"grad_norm": 0.1271507889032364,
"learning_rate": 3.075e-06,
"loss": 0.8234,
"step": 1877
},
{
"epoch": 2.676524295194406,
"grad_norm": 0.12193158268928528,
"learning_rate": 3.05e-06,
"loss": 0.7624,
"step": 1878
},
{
"epoch": 2.6779494944996216,
"grad_norm": 0.13193710148334503,
"learning_rate": 3.0250000000000003e-06,
"loss": 0.8447,
"step": 1879
},
{
"epoch": 2.679374693804837,
"grad_norm": 0.12805476784706116,
"learning_rate": 3e-06,
"loss": 0.7211,
"step": 1880
},
{
"epoch": 2.680799893110052,
"grad_norm": 0.12373394519090652,
"learning_rate": 2.975e-06,
"loss": 0.7846,
"step": 1881
},
{
"epoch": 2.6822250924152673,
"grad_norm": 0.12767145037651062,
"learning_rate": 2.95e-06,
"loss": 0.8227,
"step": 1882
},
{
"epoch": 2.6836502917204825,
"grad_norm": 0.12840954959392548,
"learning_rate": 2.9250000000000004e-06,
"loss": 0.8073,
"step": 1883
},
{
"epoch": 2.685075491025698,
"grad_norm": 0.14032377302646637,
"learning_rate": 2.9e-06,
"loss": 0.9263,
"step": 1884
},
{
"epoch": 2.6865006903309134,
"grad_norm": 0.11493780463933945,
"learning_rate": 2.8750000000000004e-06,
"loss": 0.6231,
"step": 1885
},
{
"epoch": 2.6879258896361287,
"grad_norm": 0.13087178766727448,
"learning_rate": 2.8500000000000002e-06,
"loss": 0.8403,
"step": 1886
},
{
"epoch": 2.6893510889413443,
"grad_norm": 0.12325944006443024,
"learning_rate": 2.825e-06,
"loss": 0.768,
"step": 1887
},
{
"epoch": 2.6907762882465596,
"grad_norm": 0.12279864400625229,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.7102,
"step": 1888
},
{
"epoch": 2.692201487551775,
"grad_norm": 0.12409889698028564,
"learning_rate": 2.775e-06,
"loss": 0.8263,
"step": 1889
},
{
"epoch": 2.69362668685699,
"grad_norm": 0.1252397894859314,
"learning_rate": 2.7500000000000004e-06,
"loss": 0.7527,
"step": 1890
},
{
"epoch": 2.6950518861622053,
"grad_norm": 0.11774540692567825,
"learning_rate": 2.725e-06,
"loss": 0.6686,
"step": 1891
},
{
"epoch": 2.696477085467421,
"grad_norm": 0.12210378795862198,
"learning_rate": 2.7e-06,
"loss": 0.7795,
"step": 1892
},
{
"epoch": 2.697902284772636,
"grad_norm": 0.12077716737985611,
"learning_rate": 2.6750000000000002e-06,
"loss": 0.8108,
"step": 1893
},
{
"epoch": 2.6993274840778514,
"grad_norm": 0.11705675721168518,
"learning_rate": 2.65e-06,
"loss": 0.7479,
"step": 1894
},
{
"epoch": 2.700752683383067,
"grad_norm": 0.11738929152488708,
"learning_rate": 2.625e-06,
"loss": 0.7724,
"step": 1895
},
{
"epoch": 2.7021778826882823,
"grad_norm": 0.12680017948150635,
"learning_rate": 2.6e-06,
"loss": 0.7767,
"step": 1896
},
{
"epoch": 2.7036030819934975,
"grad_norm": 0.11805405467748642,
"learning_rate": 2.575e-06,
"loss": 0.7104,
"step": 1897
},
{
"epoch": 2.7050282812987128,
"grad_norm": 0.12776324152946472,
"learning_rate": 2.55e-06,
"loss": 0.8612,
"step": 1898
},
{
"epoch": 2.706453480603928,
"grad_norm": 0.12870031595230103,
"learning_rate": 2.5250000000000004e-06,
"loss": 0.728,
"step": 1899
},
{
"epoch": 2.7078786799091437,
"grad_norm": 0.12859638035297394,
"learning_rate": 2.5e-06,
"loss": 0.8568,
"step": 1900
},
{
"epoch": 2.709303879214359,
"grad_norm": 0.12751026451587677,
"learning_rate": 2.4750000000000004e-06,
"loss": 0.6971,
"step": 1901
},
{
"epoch": 2.710729078519574,
"grad_norm": 0.12134160101413727,
"learning_rate": 2.4500000000000003e-06,
"loss": 0.6743,
"step": 1902
},
{
"epoch": 2.71215427782479,
"grad_norm": 0.12432581186294556,
"learning_rate": 2.425e-06,
"loss": 0.8231,
"step": 1903
},
{
"epoch": 2.713579477130005,
"grad_norm": 0.11920090019702911,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.7886,
"step": 1904
},
{
"epoch": 2.7150046764352203,
"grad_norm": 0.1256590038537979,
"learning_rate": 2.375e-06,
"loss": 0.7584,
"step": 1905
},
{
"epoch": 2.7164298757404355,
"grad_norm": 0.11471147835254669,
"learning_rate": 2.35e-06,
"loss": 0.6923,
"step": 1906
},
{
"epoch": 2.7178550750456507,
"grad_norm": 0.11847636103630066,
"learning_rate": 2.325e-06,
"loss": 0.7459,
"step": 1907
},
{
"epoch": 2.7192802743508664,
"grad_norm": 0.1191699430346489,
"learning_rate": 2.3e-06,
"loss": 0.7235,
"step": 1908
},
{
"epoch": 2.7207054736560816,
"grad_norm": 0.113314688205719,
"learning_rate": 2.2750000000000002e-06,
"loss": 0.7038,
"step": 1909
},
{
"epoch": 2.722130672961297,
"grad_norm": 0.1155691146850586,
"learning_rate": 2.25e-06,
"loss": 0.7501,
"step": 1910
},
{
"epoch": 2.7235558722665125,
"grad_norm": 0.1164008155465126,
"learning_rate": 2.225e-06,
"loss": 0.6688,
"step": 1911
},
{
"epoch": 2.7249810715717278,
"grad_norm": 0.12328604608774185,
"learning_rate": 2.2e-06,
"loss": 0.7401,
"step": 1912
},
{
"epoch": 2.726406270876943,
"grad_norm": 0.11712934076786041,
"learning_rate": 2.175e-06,
"loss": 0.6694,
"step": 1913
},
{
"epoch": 2.727831470182158,
"grad_norm": 0.12323250621557236,
"learning_rate": 2.1499999999999997e-06,
"loss": 0.774,
"step": 1914
},
{
"epoch": 2.7292566694873734,
"grad_norm": 0.12422061711549759,
"learning_rate": 2.1250000000000004e-06,
"loss": 0.7699,
"step": 1915
},
{
"epoch": 2.730681868792589,
"grad_norm": 0.11473996937274933,
"learning_rate": 2.1000000000000002e-06,
"loss": 0.7167,
"step": 1916
},
{
"epoch": 2.7321070680978043,
"grad_norm": 0.13377778232097626,
"learning_rate": 2.075e-06,
"loss": 0.8338,
"step": 1917
},
{
"epoch": 2.7335322674030196,
"grad_norm": 0.11472638696432114,
"learning_rate": 2.0500000000000003e-06,
"loss": 0.6416,
"step": 1918
},
{
"epoch": 2.7349574667082353,
"grad_norm": 0.11805146187543869,
"learning_rate": 2.025e-06,
"loss": 0.7236,
"step": 1919
},
{
"epoch": 2.7363826660134505,
"grad_norm": 0.12198193371295929,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.7635,
"step": 1920
},
{
"epoch": 2.7378078653186657,
"grad_norm": 0.12916578352451324,
"learning_rate": 1.975e-06,
"loss": 0.7817,
"step": 1921
},
{
"epoch": 2.739233064623881,
"grad_norm": 0.1267390251159668,
"learning_rate": 1.95e-06,
"loss": 0.7867,
"step": 1922
},
{
"epoch": 2.740658263929096,
"grad_norm": 0.11180416494607925,
"learning_rate": 1.925e-06,
"loss": 0.6699,
"step": 1923
},
{
"epoch": 2.7420834632343114,
"grad_norm": 0.11823485046625137,
"learning_rate": 1.9e-06,
"loss": 0.7751,
"step": 1924
},
{
"epoch": 2.743508662539527,
"grad_norm": 0.12333336472511292,
"learning_rate": 1.875e-06,
"loss": 0.7191,
"step": 1925
},
{
"epoch": 2.7449338618447423,
"grad_norm": 0.12522101402282715,
"learning_rate": 1.85e-06,
"loss": 0.828,
"step": 1926
},
{
"epoch": 2.7463590611499575,
"grad_norm": 0.12964409589767456,
"learning_rate": 1.8249999999999999e-06,
"loss": 0.8416,
"step": 1927
},
{
"epoch": 2.747784260455173,
"grad_norm": 0.13239257037639618,
"learning_rate": 1.8e-06,
"loss": 0.8906,
"step": 1928
},
{
"epoch": 2.7492094597603884,
"grad_norm": 0.12686698138713837,
"learning_rate": 1.775e-06,
"loss": 0.8255,
"step": 1929
},
{
"epoch": 2.7506346590656037,
"grad_norm": 0.11465885490179062,
"learning_rate": 1.7500000000000002e-06,
"loss": 0.6797,
"step": 1930
},
{
"epoch": 2.752059858370819,
"grad_norm": 0.1219501942396164,
"learning_rate": 1.7250000000000002e-06,
"loss": 0.7506,
"step": 1931
},
{
"epoch": 2.753485057676034,
"grad_norm": 0.11884892731904984,
"learning_rate": 1.7000000000000002e-06,
"loss": 0.7068,
"step": 1932
},
{
"epoch": 2.75491025698125,
"grad_norm": 0.1211390420794487,
"learning_rate": 1.6750000000000003e-06,
"loss": 0.7196,
"step": 1933
},
{
"epoch": 2.756335456286465,
"grad_norm": 0.11794040352106094,
"learning_rate": 1.65e-06,
"loss": 0.7426,
"step": 1934
},
{
"epoch": 2.7577606555916803,
"grad_norm": 0.13087007403373718,
"learning_rate": 1.6250000000000001e-06,
"loss": 0.8746,
"step": 1935
},
{
"epoch": 2.759185854896896,
"grad_norm": 0.12401354312896729,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.7959,
"step": 1936
},
{
"epoch": 2.760611054202111,
"grad_norm": 0.11777616292238235,
"learning_rate": 1.5750000000000002e-06,
"loss": 0.7174,
"step": 1937
},
{
"epoch": 2.7620362535073264,
"grad_norm": 0.13140791654586792,
"learning_rate": 1.55e-06,
"loss": 0.7578,
"step": 1938
},
{
"epoch": 2.7634614528125416,
"grad_norm": 0.12180142849683762,
"learning_rate": 1.525e-06,
"loss": 0.8048,
"step": 1939
},
{
"epoch": 2.764886652117757,
"grad_norm": 0.1280883550643921,
"learning_rate": 1.5e-06,
"loss": 0.8051,
"step": 1940
},
{
"epoch": 2.7663118514229725,
"grad_norm": 0.12221908569335938,
"learning_rate": 1.475e-06,
"loss": 0.7688,
"step": 1941
},
{
"epoch": 2.7677370507281878,
"grad_norm": 0.123964324593544,
"learning_rate": 1.45e-06,
"loss": 0.7169,
"step": 1942
},
{
"epoch": 2.769162250033403,
"grad_norm": 0.12531334161758423,
"learning_rate": 1.4250000000000001e-06,
"loss": 0.7709,
"step": 1943
},
{
"epoch": 2.7705874493386187,
"grad_norm": 0.11511649191379547,
"learning_rate": 1.4000000000000001e-06,
"loss": 0.7153,
"step": 1944
},
{
"epoch": 2.772012648643834,
"grad_norm": 0.12430664896965027,
"learning_rate": 1.3750000000000002e-06,
"loss": 0.773,
"step": 1945
},
{
"epoch": 2.773437847949049,
"grad_norm": 0.12184526771306992,
"learning_rate": 1.35e-06,
"loss": 0.7321,
"step": 1946
},
{
"epoch": 2.7748630472542644,
"grad_norm": 0.11108440160751343,
"learning_rate": 1.325e-06,
"loss": 0.6475,
"step": 1947
},
{
"epoch": 2.7762882465594796,
"grad_norm": 0.1188865527510643,
"learning_rate": 1.3e-06,
"loss": 0.7117,
"step": 1948
},
{
"epoch": 2.7777134458646953,
"grad_norm": 0.11639267951250076,
"learning_rate": 1.275e-06,
"loss": 0.6986,
"step": 1949
},
{
"epoch": 2.7791386451699105,
"grad_norm": 0.10887181758880615,
"learning_rate": 1.25e-06,
"loss": 0.6363,
"step": 1950
},
{
"epoch": 2.7805638444751257,
"grad_norm": 0.11379990726709366,
"learning_rate": 1.2250000000000001e-06,
"loss": 0.593,
"step": 1951
},
{
"epoch": 2.7819890437803414,
"grad_norm": 0.12831738591194153,
"learning_rate": 1.2000000000000002e-06,
"loss": 0.7472,
"step": 1952
},
{
"epoch": 2.7834142430855566,
"grad_norm": 0.14005379378795624,
"learning_rate": 1.175e-06,
"loss": 0.6726,
"step": 1953
},
{
"epoch": 2.784839442390772,
"grad_norm": 0.12531784176826477,
"learning_rate": 1.15e-06,
"loss": 0.6896,
"step": 1954
},
{
"epoch": 2.786264641695987,
"grad_norm": 0.1217687800526619,
"learning_rate": 1.125e-06,
"loss": 0.7135,
"step": 1955
},
{
"epoch": 2.7876898410012023,
"grad_norm": 0.12607036530971527,
"learning_rate": 1.1e-06,
"loss": 0.7731,
"step": 1956
},
{
"epoch": 2.789115040306418,
"grad_norm": 0.12070278078317642,
"learning_rate": 1.0749999999999999e-06,
"loss": 0.7502,
"step": 1957
},
{
"epoch": 2.790540239611633,
"grad_norm": 0.12250007688999176,
"learning_rate": 1.0500000000000001e-06,
"loss": 0.7396,
"step": 1958
},
{
"epoch": 2.7919654389168485,
"grad_norm": 0.1261885017156601,
"learning_rate": 1.0250000000000001e-06,
"loss": 0.7622,
"step": 1959
},
{
"epoch": 2.793390638222064,
"grad_norm": 0.12981918454170227,
"learning_rate": 1.0000000000000002e-06,
"loss": 0.8239,
"step": 1960
},
{
"epoch": 2.7948158375272794,
"grad_norm": 0.11547914147377014,
"learning_rate": 9.75e-07,
"loss": 0.7063,
"step": 1961
},
{
"epoch": 2.7962410368324946,
"grad_norm": 0.11914034187793732,
"learning_rate": 9.5e-07,
"loss": 0.704,
"step": 1962
},
{
"epoch": 2.79766623613771,
"grad_norm": 0.12382140755653381,
"learning_rate": 9.25e-07,
"loss": 0.8165,
"step": 1963
},
{
"epoch": 2.799091435442925,
"grad_norm": 0.12351801991462708,
"learning_rate": 9e-07,
"loss": 0.7417,
"step": 1964
},
{
"epoch": 2.8005166347481407,
"grad_norm": 0.1257532387971878,
"learning_rate": 8.750000000000001e-07,
"loss": 0.8165,
"step": 1965
},
{
"epoch": 2.801941834053356,
"grad_norm": 0.11033283174037933,
"learning_rate": 8.500000000000001e-07,
"loss": 0.6358,
"step": 1966
},
{
"epoch": 2.803367033358571,
"grad_norm": 0.1269092708826065,
"learning_rate": 8.25e-07,
"loss": 0.7348,
"step": 1967
},
{
"epoch": 2.8047922326637864,
"grad_norm": 0.13281433284282684,
"learning_rate": 8.000000000000001e-07,
"loss": 0.8293,
"step": 1968
},
{
"epoch": 2.806217431969002,
"grad_norm": 0.12043067812919617,
"learning_rate": 7.75e-07,
"loss": 0.7292,
"step": 1969
},
{
"epoch": 2.8076426312742173,
"grad_norm": 0.12765593826770782,
"learning_rate": 7.5e-07,
"loss": 0.8368,
"step": 1970
},
{
"epoch": 2.8090678305794325,
"grad_norm": 0.12831556797027588,
"learning_rate": 7.25e-07,
"loss": 0.8231,
"step": 1971
},
{
"epoch": 2.8104930298846478,
"grad_norm": 0.1239595040678978,
"learning_rate": 7.000000000000001e-07,
"loss": 0.8117,
"step": 1972
},
{
"epoch": 2.811918229189863,
"grad_norm": 0.12040313333272934,
"learning_rate": 6.75e-07,
"loss": 0.7088,
"step": 1973
},
{
"epoch": 2.8133434284950787,
"grad_norm": 0.12489041686058044,
"learning_rate": 6.5e-07,
"loss": 0.7469,
"step": 1974
},
{
"epoch": 2.814768627800294,
"grad_norm": 0.12376904487609863,
"learning_rate": 6.25e-07,
"loss": 0.7068,
"step": 1975
},
{
"epoch": 2.816193827105509,
"grad_norm": 0.13215838372707367,
"learning_rate": 6.000000000000001e-07,
"loss": 0.8671,
"step": 1976
},
{
"epoch": 2.817619026410725,
"grad_norm": 0.114949069917202,
"learning_rate": 5.75e-07,
"loss": 0.6687,
"step": 1977
},
{
"epoch": 2.81904422571594,
"grad_norm": 0.1178496778011322,
"learning_rate": 5.5e-07,
"loss": 0.7144,
"step": 1978
},
{
"epoch": 2.8204694250211553,
"grad_norm": 0.13831894099712372,
"learning_rate": 5.250000000000001e-07,
"loss": 0.8793,
"step": 1979
},
{
"epoch": 2.8218946243263705,
"grad_norm": 0.1233241856098175,
"learning_rate": 5.000000000000001e-07,
"loss": 0.7527,
"step": 1980
},
{
"epoch": 2.8233198236315857,
"grad_norm": 0.11879520863294601,
"learning_rate": 4.75e-07,
"loss": 0.6299,
"step": 1981
},
{
"epoch": 2.8247450229368014,
"grad_norm": 0.11689655482769012,
"learning_rate": 4.5e-07,
"loss": 0.7273,
"step": 1982
},
{
"epoch": 2.8261702222420166,
"grad_norm": 0.13215471804141998,
"learning_rate": 4.2500000000000006e-07,
"loss": 0.751,
"step": 1983
},
{
"epoch": 2.827595421547232,
"grad_norm": 0.1314304918050766,
"learning_rate": 4.0000000000000003e-07,
"loss": 0.804,
"step": 1984
},
{
"epoch": 2.8290206208524475,
"grad_norm": 0.11878157407045364,
"learning_rate": 3.75e-07,
"loss": 0.7649,
"step": 1985
},
{
"epoch": 2.8304458201576628,
"grad_norm": 0.12103203684091568,
"learning_rate": 3.5000000000000004e-07,
"loss": 0.7191,
"step": 1986
},
{
"epoch": 2.831871019462878,
"grad_norm": 0.13341623544692993,
"learning_rate": 3.25e-07,
"loss": 0.8578,
"step": 1987
},
{
"epoch": 2.8332962187680932,
"grad_norm": 0.12499632686376572,
"learning_rate": 3.0000000000000004e-07,
"loss": 0.7619,
"step": 1988
},
{
"epoch": 2.8347214180733085,
"grad_norm": 0.11657874286174774,
"learning_rate": 2.75e-07,
"loss": 0.7479,
"step": 1989
},
{
"epoch": 2.836146617378524,
"grad_norm": 0.12138865143060684,
"learning_rate": 2.5000000000000004e-07,
"loss": 0.6576,
"step": 1990
},
{
"epoch": 2.8375718166837394,
"grad_norm": 0.1212976723909378,
"learning_rate": 2.25e-07,
"loss": 0.7479,
"step": 1991
},
{
"epoch": 2.8389970159889546,
"grad_norm": 0.11877257376909256,
"learning_rate": 2.0000000000000002e-07,
"loss": 0.7133,
"step": 1992
},
{
"epoch": 2.8404222152941703,
"grad_norm": 0.12164323776960373,
"learning_rate": 1.7500000000000002e-07,
"loss": 0.7399,
"step": 1993
},
{
"epoch": 2.8418474145993855,
"grad_norm": 0.12122008949518204,
"learning_rate": 1.5000000000000002e-07,
"loss": 0.7227,
"step": 1994
},
{
"epoch": 2.8432726139046007,
"grad_norm": 0.12460826337337494,
"learning_rate": 1.2500000000000002e-07,
"loss": 0.7506,
"step": 1995
},
{
"epoch": 2.844697813209816,
"grad_norm": 0.12194038927555084,
"learning_rate": 1.0000000000000001e-07,
"loss": 0.7424,
"step": 1996
},
{
"epoch": 2.846123012515031,
"grad_norm": 0.12100560963153839,
"learning_rate": 7.500000000000001e-08,
"loss": 0.6573,
"step": 1997
},
{
"epoch": 2.847548211820247,
"grad_norm": 0.11969904601573944,
"learning_rate": 5.0000000000000004e-08,
"loss": 0.7095,
"step": 1998
},
{
"epoch": 2.848973411125462,
"grad_norm": 0.1289009302854538,
"learning_rate": 2.5000000000000002e-08,
"loss": 0.8218,
"step": 1999
},
{
"epoch": 2.8503986104306773,
"grad_norm": 0.12378975749015808,
"learning_rate": 0.0,
"loss": 0.8249,
"step": 2000
},
{
"epoch": 2.8503986104306773,
"step": 2000,
"total_flos": 1.0451807295707516e+18,
"train_loss": 0.7933661967515946,
"train_runtime": 65423.383,
"train_samples_per_second": 0.978,
"train_steps_per_second": 0.031
}
],
"logging_steps": 1.0,
"max_steps": 2000,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.0451807295707516e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}