xp171 / checkpoint-1276 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
1f4ef48 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.989561586638831,
"eval_steps": 500,
"global_step": 1276,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003131524008350731,
"grad_norm": 13.917898178100586,
"learning_rate": 5.0000000000000004e-08,
"loss": 4.1051,
"step": 1
},
{
"epoch": 0.006263048016701462,
"grad_norm": 17.327869415283203,
"learning_rate": 1.0000000000000001e-07,
"loss": 4.1048,
"step": 2
},
{
"epoch": 0.009394572025052192,
"grad_norm": 14.063946723937988,
"learning_rate": 1.5000000000000002e-07,
"loss": 4.0741,
"step": 3
},
{
"epoch": 0.012526096033402923,
"grad_norm": 16.817699432373047,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.2002,
"step": 4
},
{
"epoch": 0.015657620041753653,
"grad_norm": 14.47036361694336,
"learning_rate": 2.5000000000000004e-07,
"loss": 4.2652,
"step": 5
},
{
"epoch": 0.018789144050104383,
"grad_norm": 14.474193572998047,
"learning_rate": 3.0000000000000004e-07,
"loss": 4.0888,
"step": 6
},
{
"epoch": 0.021920668058455117,
"grad_norm": 14.865458488464355,
"learning_rate": 3.5000000000000004e-07,
"loss": 4.0014,
"step": 7
},
{
"epoch": 0.025052192066805846,
"grad_norm": 15.338888168334961,
"learning_rate": 4.0000000000000003e-07,
"loss": 4.13,
"step": 8
},
{
"epoch": 0.028183716075156576,
"grad_norm": 15.154336929321289,
"learning_rate": 4.5000000000000003e-07,
"loss": 4.2493,
"step": 9
},
{
"epoch": 0.031315240083507306,
"grad_norm": 15.919597625732422,
"learning_rate": 5.000000000000001e-07,
"loss": 4.0535,
"step": 10
},
{
"epoch": 0.03444676409185804,
"grad_norm": 14.981926918029785,
"learning_rate": 5.5e-07,
"loss": 3.9064,
"step": 11
},
{
"epoch": 0.037578288100208766,
"grad_norm": 13.36101245880127,
"learning_rate": 6.000000000000001e-07,
"loss": 4.1939,
"step": 12
},
{
"epoch": 0.0407098121085595,
"grad_norm": 15.58773422241211,
"learning_rate": 6.5e-07,
"loss": 4.18,
"step": 13
},
{
"epoch": 0.04384133611691023,
"grad_norm": 13.560139656066895,
"learning_rate": 7.000000000000001e-07,
"loss": 3.9414,
"step": 14
},
{
"epoch": 0.04697286012526096,
"grad_norm": 12.307971954345703,
"learning_rate": 7.5e-07,
"loss": 3.8836,
"step": 15
},
{
"epoch": 0.05010438413361169,
"grad_norm": 14.533182144165039,
"learning_rate": 8.000000000000001e-07,
"loss": 4.1551,
"step": 16
},
{
"epoch": 0.05323590814196242,
"grad_norm": 13.453729629516602,
"learning_rate": 8.500000000000001e-07,
"loss": 4.0048,
"step": 17
},
{
"epoch": 0.05636743215031315,
"grad_norm": 13.45992374420166,
"learning_rate": 9.000000000000001e-07,
"loss": 4.0745,
"step": 18
},
{
"epoch": 0.059498956158663886,
"grad_norm": 11.857145309448242,
"learning_rate": 9.500000000000001e-07,
"loss": 3.9871,
"step": 19
},
{
"epoch": 0.06263048016701461,
"grad_norm": 11.872294425964355,
"learning_rate": 1.0000000000000002e-06,
"loss": 3.8959,
"step": 20
},
{
"epoch": 0.06576200417536535,
"grad_norm": 12.969825744628906,
"learning_rate": 1.0500000000000001e-06,
"loss": 4.0308,
"step": 21
},
{
"epoch": 0.06889352818371608,
"grad_norm": 12.33769416809082,
"learning_rate": 1.1e-06,
"loss": 3.9341,
"step": 22
},
{
"epoch": 0.0720250521920668,
"grad_norm": 12.669405937194824,
"learning_rate": 1.1500000000000002e-06,
"loss": 3.8511,
"step": 23
},
{
"epoch": 0.07515657620041753,
"grad_norm": 10.677213668823242,
"learning_rate": 1.2000000000000002e-06,
"loss": 3.7764,
"step": 24
},
{
"epoch": 0.07828810020876827,
"grad_norm": 10.366402626037598,
"learning_rate": 1.25e-06,
"loss": 3.5291,
"step": 25
},
{
"epoch": 0.081419624217119,
"grad_norm": 11.211421012878418,
"learning_rate": 1.3e-06,
"loss": 3.5765,
"step": 26
},
{
"epoch": 0.08455114822546973,
"grad_norm": 11.313716888427734,
"learning_rate": 1.3500000000000002e-06,
"loss": 3.4849,
"step": 27
},
{
"epoch": 0.08768267223382047,
"grad_norm": 10.41294002532959,
"learning_rate": 1.4000000000000001e-06,
"loss": 3.2653,
"step": 28
},
{
"epoch": 0.09081419624217119,
"grad_norm": 10.40064525604248,
"learning_rate": 1.45e-06,
"loss": 3.3384,
"step": 29
},
{
"epoch": 0.09394572025052192,
"grad_norm": 10.05427074432373,
"learning_rate": 1.5e-06,
"loss": 3.2257,
"step": 30
},
{
"epoch": 0.09707724425887265,
"grad_norm": 9.583163261413574,
"learning_rate": 1.5500000000000002e-06,
"loss": 3.1371,
"step": 31
},
{
"epoch": 0.10020876826722339,
"grad_norm": 10.09977912902832,
"learning_rate": 1.6000000000000001e-06,
"loss": 3.0658,
"step": 32
},
{
"epoch": 0.10334029227557412,
"grad_norm": 9.271486282348633,
"learning_rate": 1.6500000000000003e-06,
"loss": 2.9693,
"step": 33
},
{
"epoch": 0.10647181628392484,
"grad_norm": 10.687992095947266,
"learning_rate": 1.7000000000000002e-06,
"loss": 2.95,
"step": 34
},
{
"epoch": 0.10960334029227557,
"grad_norm": 8.762290000915527,
"learning_rate": 1.75e-06,
"loss": 2.8286,
"step": 35
},
{
"epoch": 0.1127348643006263,
"grad_norm": 10.13785171508789,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.3664,
"step": 36
},
{
"epoch": 0.11586638830897704,
"grad_norm": 18.301353454589844,
"learning_rate": 1.85e-06,
"loss": 2.5533,
"step": 37
},
{
"epoch": 0.11899791231732777,
"grad_norm": 11.490377426147461,
"learning_rate": 1.9000000000000002e-06,
"loss": 2.6133,
"step": 38
},
{
"epoch": 0.12212943632567849,
"grad_norm": 15.614163398742676,
"learning_rate": 1.9500000000000004e-06,
"loss": 2.3596,
"step": 39
},
{
"epoch": 0.12526096033402923,
"grad_norm": 17.757442474365234,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.3491,
"step": 40
},
{
"epoch": 0.12839248434237996,
"grad_norm": 17.18431854248047,
"learning_rate": 2.05e-06,
"loss": 2.2361,
"step": 41
},
{
"epoch": 0.1315240083507307,
"grad_norm": 16.149789810180664,
"learning_rate": 2.1000000000000002e-06,
"loss": 2.1457,
"step": 42
},
{
"epoch": 0.13465553235908143,
"grad_norm": 15.256914138793945,
"learning_rate": 2.15e-06,
"loss": 2.12,
"step": 43
},
{
"epoch": 0.13778705636743216,
"grad_norm": 15.537406921386719,
"learning_rate": 2.2e-06,
"loss": 2.1877,
"step": 44
},
{
"epoch": 0.1409185803757829,
"grad_norm": 7.947713851928711,
"learning_rate": 2.25e-06,
"loss": 2.1648,
"step": 45
},
{
"epoch": 0.1440501043841336,
"grad_norm": 8.818676948547363,
"learning_rate": 2.3000000000000004e-06,
"loss": 2.134,
"step": 46
},
{
"epoch": 0.14718162839248433,
"grad_norm": 5.175768852233887,
"learning_rate": 2.35e-06,
"loss": 2.0796,
"step": 47
},
{
"epoch": 0.15031315240083507,
"grad_norm": 6.750611305236816,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.9174,
"step": 48
},
{
"epoch": 0.1534446764091858,
"grad_norm": 6.2147979736328125,
"learning_rate": 2.4500000000000003e-06,
"loss": 1.8065,
"step": 49
},
{
"epoch": 0.15657620041753653,
"grad_norm": 13.291611671447754,
"learning_rate": 2.5e-06,
"loss": 1.7061,
"step": 50
},
{
"epoch": 0.15970772442588727,
"grad_norm": 7.251201629638672,
"learning_rate": 2.55e-06,
"loss": 1.7924,
"step": 51
},
{
"epoch": 0.162839248434238,
"grad_norm": 5.2126054763793945,
"learning_rate": 2.6e-06,
"loss": 1.6735,
"step": 52
},
{
"epoch": 0.16597077244258873,
"grad_norm": 5.435528755187988,
"learning_rate": 2.6500000000000005e-06,
"loss": 1.6265,
"step": 53
},
{
"epoch": 0.16910229645093947,
"grad_norm": 4.505807399749756,
"learning_rate": 2.7000000000000004e-06,
"loss": 1.4851,
"step": 54
},
{
"epoch": 0.1722338204592902,
"grad_norm": 5.128388404846191,
"learning_rate": 2.7500000000000004e-06,
"loss": 1.5832,
"step": 55
},
{
"epoch": 0.17536534446764093,
"grad_norm": 16.935827255249023,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.6553,
"step": 56
},
{
"epoch": 0.17849686847599164,
"grad_norm": 3.664458990097046,
"learning_rate": 2.85e-06,
"loss": 1.5,
"step": 57
},
{
"epoch": 0.18162839248434237,
"grad_norm": 7.763802528381348,
"learning_rate": 2.9e-06,
"loss": 1.367,
"step": 58
},
{
"epoch": 0.1847599164926931,
"grad_norm": 3.2216155529022217,
"learning_rate": 2.95e-06,
"loss": 1.3863,
"step": 59
},
{
"epoch": 0.18789144050104384,
"grad_norm": 4.384445667266846,
"learning_rate": 3e-06,
"loss": 1.4247,
"step": 60
},
{
"epoch": 0.19102296450939457,
"grad_norm": 4.8080878257751465,
"learning_rate": 3.05e-06,
"loss": 1.3257,
"step": 61
},
{
"epoch": 0.1941544885177453,
"grad_norm": 4.154761791229248,
"learning_rate": 3.1000000000000004e-06,
"loss": 1.321,
"step": 62
},
{
"epoch": 0.19728601252609604,
"grad_norm": 6.4742112159729,
"learning_rate": 3.1500000000000003e-06,
"loss": 1.2823,
"step": 63
},
{
"epoch": 0.20041753653444677,
"grad_norm": 2.583422899246216,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.2136,
"step": 64
},
{
"epoch": 0.2035490605427975,
"grad_norm": 4.1933488845825195,
"learning_rate": 3.2500000000000002e-06,
"loss": 1.1855,
"step": 65
},
{
"epoch": 0.20668058455114824,
"grad_norm": 4.11049747467041,
"learning_rate": 3.3000000000000006e-06,
"loss": 1.2389,
"step": 66
},
{
"epoch": 0.20981210855949894,
"grad_norm": 2.264458417892456,
"learning_rate": 3.3500000000000005e-06,
"loss": 1.0651,
"step": 67
},
{
"epoch": 0.21294363256784968,
"grad_norm": 2.5408174991607666,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.1389,
"step": 68
},
{
"epoch": 0.2160751565762004,
"grad_norm": 7.82421350479126,
"learning_rate": 3.45e-06,
"loss": 1.0956,
"step": 69
},
{
"epoch": 0.21920668058455114,
"grad_norm": 3.070939064025879,
"learning_rate": 3.5e-06,
"loss": 1.0451,
"step": 70
},
{
"epoch": 0.22233820459290188,
"grad_norm": 2.6310527324676514,
"learning_rate": 3.5500000000000003e-06,
"loss": 1.0538,
"step": 71
},
{
"epoch": 0.2254697286012526,
"grad_norm": 7.630155563354492,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.0052,
"step": 72
},
{
"epoch": 0.22860125260960334,
"grad_norm": 6.950636863708496,
"learning_rate": 3.65e-06,
"loss": 1.0473,
"step": 73
},
{
"epoch": 0.23173277661795408,
"grad_norm": 2.2703945636749268,
"learning_rate": 3.7e-06,
"loss": 1.0576,
"step": 74
},
{
"epoch": 0.2348643006263048,
"grad_norm": 3.3817710876464844,
"learning_rate": 3.7500000000000005e-06,
"loss": 1.0177,
"step": 75
},
{
"epoch": 0.23799582463465555,
"grad_norm": 7.266414642333984,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.0645,
"step": 76
},
{
"epoch": 0.24112734864300625,
"grad_norm": 5.782608509063721,
"learning_rate": 3.85e-06,
"loss": 1.0162,
"step": 77
},
{
"epoch": 0.24425887265135698,
"grad_norm": 2.7938575744628906,
"learning_rate": 3.900000000000001e-06,
"loss": 0.9664,
"step": 78
},
{
"epoch": 0.24739039665970772,
"grad_norm": 6.681935787200928,
"learning_rate": 3.95e-06,
"loss": 0.953,
"step": 79
},
{
"epoch": 0.25052192066805845,
"grad_norm": 2.253279209136963,
"learning_rate": 4.000000000000001e-06,
"loss": 0.9568,
"step": 80
},
{
"epoch": 0.2536534446764092,
"grad_norm": 1.4875826835632324,
"learning_rate": 4.05e-06,
"loss": 0.9448,
"step": 81
},
{
"epoch": 0.2567849686847599,
"grad_norm": 2.4987940788269043,
"learning_rate": 4.1e-06,
"loss": 0.9393,
"step": 82
},
{
"epoch": 0.2599164926931106,
"grad_norm": 4.712948322296143,
"learning_rate": 4.15e-06,
"loss": 0.9532,
"step": 83
},
{
"epoch": 0.2630480167014614,
"grad_norm": 6.9030632972717285,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.96,
"step": 84
},
{
"epoch": 0.2661795407098121,
"grad_norm": 3.4780967235565186,
"learning_rate": 4.25e-06,
"loss": 0.8993,
"step": 85
},
{
"epoch": 0.26931106471816285,
"grad_norm": 1.526064395904541,
"learning_rate": 4.3e-06,
"loss": 0.9021,
"step": 86
},
{
"epoch": 0.27244258872651356,
"grad_norm": 10.727686882019043,
"learning_rate": 4.350000000000001e-06,
"loss": 0.856,
"step": 87
},
{
"epoch": 0.2755741127348643,
"grad_norm": 12.483160972595215,
"learning_rate": 4.4e-06,
"loss": 0.9357,
"step": 88
},
{
"epoch": 0.278705636743215,
"grad_norm": 6.544492244720459,
"learning_rate": 4.450000000000001e-06,
"loss": 0.9168,
"step": 89
},
{
"epoch": 0.2818371607515658,
"grad_norm": 1.178139567375183,
"learning_rate": 4.5e-06,
"loss": 0.8748,
"step": 90
},
{
"epoch": 0.2849686847599165,
"grad_norm": 1.711506962776184,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.8425,
"step": 91
},
{
"epoch": 0.2881002087682672,
"grad_norm": 3.281747341156006,
"learning_rate": 4.600000000000001e-06,
"loss": 0.8491,
"step": 92
},
{
"epoch": 0.29123173277661796,
"grad_norm": 2.2964377403259277,
"learning_rate": 4.65e-06,
"loss": 0.8038,
"step": 93
},
{
"epoch": 0.29436325678496866,
"grad_norm": 1.959700345993042,
"learning_rate": 4.7e-06,
"loss": 0.8439,
"step": 94
},
{
"epoch": 0.2974947807933194,
"grad_norm": 3.979384183883667,
"learning_rate": 4.75e-06,
"loss": 0.8839,
"step": 95
},
{
"epoch": 0.30062630480167013,
"grad_norm": 1.4721262454986572,
"learning_rate": 4.800000000000001e-06,
"loss": 0.845,
"step": 96
},
{
"epoch": 0.3037578288100209,
"grad_norm": 2.862248659133911,
"learning_rate": 4.85e-06,
"loss": 0.7748,
"step": 97
},
{
"epoch": 0.3068893528183716,
"grad_norm": 3.7439088821411133,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.8145,
"step": 98
},
{
"epoch": 0.31002087682672236,
"grad_norm": 1.6654618978500366,
"learning_rate": 4.95e-06,
"loss": 0.8326,
"step": 99
},
{
"epoch": 0.31315240083507306,
"grad_norm": 7.8437581062316895,
"learning_rate": 5e-06,
"loss": 0.8666,
"step": 100
},
{
"epoch": 0.3162839248434238,
"grad_norm": 6.429738521575928,
"learning_rate": 4.999996250830422e-06,
"loss": 0.836,
"step": 101
},
{
"epoch": 0.31941544885177453,
"grad_norm": 2.6017794609069824,
"learning_rate": 4.9999850033329326e-06,
"loss": 0.7785,
"step": 102
},
{
"epoch": 0.32254697286012524,
"grad_norm": 1.0575449466705322,
"learning_rate": 4.999966257541265e-06,
"loss": 0.7639,
"step": 103
},
{
"epoch": 0.325678496868476,
"grad_norm": 2.6932010650634766,
"learning_rate": 4.999940013511647e-06,
"loss": 0.8214,
"step": 104
},
{
"epoch": 0.3288100208768267,
"grad_norm": 2.925288438796997,
"learning_rate": 4.999906271322792e-06,
"loss": 0.8797,
"step": 105
},
{
"epoch": 0.33194154488517746,
"grad_norm": 1.3570607900619507,
"learning_rate": 4.9998650310759035e-06,
"loss": 0.792,
"step": 106
},
{
"epoch": 0.33507306889352817,
"grad_norm": 5.126713752746582,
"learning_rate": 4.999816292894676e-06,
"loss": 0.8352,
"step": 107
},
{
"epoch": 0.33820459290187893,
"grad_norm": 1.8966432809829712,
"learning_rate": 4.99976005692529e-06,
"loss": 0.7663,
"step": 108
},
{
"epoch": 0.34133611691022964,
"grad_norm": 1.3100829124450684,
"learning_rate": 4.999696323336418e-06,
"loss": 0.771,
"step": 109
},
{
"epoch": 0.3444676409185804,
"grad_norm": 2.4025354385375977,
"learning_rate": 4.999625092319218e-06,
"loss": 0.7618,
"step": 110
},
{
"epoch": 0.3475991649269311,
"grad_norm": 1.130232810974121,
"learning_rate": 4.999546364087334e-06,
"loss": 0.7705,
"step": 111
},
{
"epoch": 0.35073068893528186,
"grad_norm": 3.430262327194214,
"learning_rate": 4.999460138876901e-06,
"loss": 0.77,
"step": 112
},
{
"epoch": 0.35386221294363257,
"grad_norm": 1.1272103786468506,
"learning_rate": 4.999366416946536e-06,
"loss": 0.7133,
"step": 113
},
{
"epoch": 0.3569937369519833,
"grad_norm": 1.1740471124649048,
"learning_rate": 4.999265198577342e-06,
"loss": 0.7684,
"step": 114
},
{
"epoch": 0.36012526096033404,
"grad_norm": 1.3138248920440674,
"learning_rate": 4.999156484072907e-06,
"loss": 0.7888,
"step": 115
},
{
"epoch": 0.36325678496868474,
"grad_norm": 1.061711311340332,
"learning_rate": 4.999040273759304e-06,
"loss": 0.7484,
"step": 116
},
{
"epoch": 0.3663883089770355,
"grad_norm": 1.4682390689849854,
"learning_rate": 4.998916567985083e-06,
"loss": 0.7296,
"step": 117
},
{
"epoch": 0.3695198329853862,
"grad_norm": 2.884068250656128,
"learning_rate": 4.998785367121284e-06,
"loss": 0.7662,
"step": 118
},
{
"epoch": 0.37265135699373697,
"grad_norm": 0.9812761545181274,
"learning_rate": 4.9986466715614205e-06,
"loss": 0.7307,
"step": 119
},
{
"epoch": 0.3757828810020877,
"grad_norm": 2.2237496376037598,
"learning_rate": 4.998500481721484e-06,
"loss": 0.6761,
"step": 120
},
{
"epoch": 0.37891440501043844,
"grad_norm": 1.4004178047180176,
"learning_rate": 4.998346798039952e-06,
"loss": 0.7505,
"step": 121
},
{
"epoch": 0.38204592901878914,
"grad_norm": 5.54975700378418,
"learning_rate": 4.99818562097777e-06,
"loss": 0.7615,
"step": 122
},
{
"epoch": 0.38517745302713985,
"grad_norm": 6.17140531539917,
"learning_rate": 4.9980169510183624e-06,
"loss": 0.7002,
"step": 123
},
{
"epoch": 0.3883089770354906,
"grad_norm": 4.974380016326904,
"learning_rate": 4.997840788667628e-06,
"loss": 0.7449,
"step": 124
},
{
"epoch": 0.3914405010438413,
"grad_norm": 1.4133399724960327,
"learning_rate": 4.997657134453937e-06,
"loss": 0.7442,
"step": 125
},
{
"epoch": 0.3945720250521921,
"grad_norm": 1.868915319442749,
"learning_rate": 4.9974659889281295e-06,
"loss": 0.7104,
"step": 126
},
{
"epoch": 0.3977035490605428,
"grad_norm": 1.2599350214004517,
"learning_rate": 4.997267352663514e-06,
"loss": 0.7385,
"step": 127
},
{
"epoch": 0.40083507306889354,
"grad_norm": 1.4353641271591187,
"learning_rate": 4.997061226255869e-06,
"loss": 0.7081,
"step": 128
},
{
"epoch": 0.40396659707724425,
"grad_norm": 3.2492141723632812,
"learning_rate": 4.996847610323437e-06,
"loss": 0.7859,
"step": 129
},
{
"epoch": 0.407098121085595,
"grad_norm": 9.599719047546387,
"learning_rate": 4.996626505506923e-06,
"loss": 0.7241,
"step": 130
},
{
"epoch": 0.4102296450939457,
"grad_norm": 10.053650856018066,
"learning_rate": 4.996397912469494e-06,
"loss": 0.6841,
"step": 131
},
{
"epoch": 0.4133611691022965,
"grad_norm": 1.323876976966858,
"learning_rate": 4.996161831896777e-06,
"loss": 0.7317,
"step": 132
},
{
"epoch": 0.4164926931106472,
"grad_norm": 1.4180598258972168,
"learning_rate": 4.9959182644968594e-06,
"loss": 0.692,
"step": 133
},
{
"epoch": 0.4196242171189979,
"grad_norm": 1.2194396257400513,
"learning_rate": 4.99566721100028e-06,
"loss": 0.7068,
"step": 134
},
{
"epoch": 0.42275574112734865,
"grad_norm": 1.0984960794448853,
"learning_rate": 4.995408672160031e-06,
"loss": 0.6946,
"step": 135
},
{
"epoch": 0.42588726513569936,
"grad_norm": 1.9341071844100952,
"learning_rate": 4.995142648751561e-06,
"loss": 0.7467,
"step": 136
},
{
"epoch": 0.4290187891440501,
"grad_norm": 1.9960932731628418,
"learning_rate": 4.9948691415727594e-06,
"loss": 0.7379,
"step": 137
},
{
"epoch": 0.4321503131524008,
"grad_norm": 0.8743917942047119,
"learning_rate": 4.994588151443968e-06,
"loss": 0.66,
"step": 138
},
{
"epoch": 0.4352818371607516,
"grad_norm": 0.8655261993408203,
"learning_rate": 4.99429967920797e-06,
"loss": 0.6646,
"step": 139
},
{
"epoch": 0.4384133611691023,
"grad_norm": 5.462070941925049,
"learning_rate": 4.994003725729992e-06,
"loss": 0.643,
"step": 140
},
{
"epoch": 0.44154488517745305,
"grad_norm": 2.1401469707489014,
"learning_rate": 4.993700291897695e-06,
"loss": 0.6639,
"step": 141
},
{
"epoch": 0.44467640918580376,
"grad_norm": 1.8219833374023438,
"learning_rate": 4.9933893786211815e-06,
"loss": 0.6673,
"step": 142
},
{
"epoch": 0.44780793319415446,
"grad_norm": 1.641079306602478,
"learning_rate": 4.993070986832984e-06,
"loss": 0.658,
"step": 143
},
{
"epoch": 0.4509394572025052,
"grad_norm": 1.1739819049835205,
"learning_rate": 4.992745117488066e-06,
"loss": 0.6826,
"step": 144
},
{
"epoch": 0.45407098121085593,
"grad_norm": 2.309185743331909,
"learning_rate": 4.9924117715638185e-06,
"loss": 0.6536,
"step": 145
},
{
"epoch": 0.4572025052192067,
"grad_norm": 1.09304940700531,
"learning_rate": 4.99207095006006e-06,
"loss": 0.721,
"step": 146
},
{
"epoch": 0.4603340292275574,
"grad_norm": 0.9056984186172485,
"learning_rate": 4.991722653999025e-06,
"loss": 0.7019,
"step": 147
},
{
"epoch": 0.46346555323590816,
"grad_norm": 1.8440625667572021,
"learning_rate": 4.991366884425374e-06,
"loss": 0.707,
"step": 148
},
{
"epoch": 0.46659707724425886,
"grad_norm": 1.2244676351547241,
"learning_rate": 4.991003642406177e-06,
"loss": 0.6407,
"step": 149
},
{
"epoch": 0.4697286012526096,
"grad_norm": 0.9258589744567871,
"learning_rate": 4.99063292903092e-06,
"loss": 0.6954,
"step": 150
},
{
"epoch": 0.47286012526096033,
"grad_norm": 4.176390647888184,
"learning_rate": 4.990254745411496e-06,
"loss": 0.6812,
"step": 151
},
{
"epoch": 0.4759916492693111,
"grad_norm": 1.4322530031204224,
"learning_rate": 4.989869092682205e-06,
"loss": 0.6808,
"step": 152
},
{
"epoch": 0.4791231732776618,
"grad_norm": 0.8017717003822327,
"learning_rate": 4.989475971999748e-06,
"loss": 0.687,
"step": 153
},
{
"epoch": 0.4822546972860125,
"grad_norm": 1.5641374588012695,
"learning_rate": 4.989075384543228e-06,
"loss": 0.6599,
"step": 154
},
{
"epoch": 0.48538622129436326,
"grad_norm": 1.1522141695022583,
"learning_rate": 4.98866733151414e-06,
"loss": 0.6546,
"step": 155
},
{
"epoch": 0.48851774530271397,
"grad_norm": 0.8593171238899231,
"learning_rate": 4.988251814136372e-06,
"loss": 0.6857,
"step": 156
},
{
"epoch": 0.49164926931106473,
"grad_norm": 2.668159246444702,
"learning_rate": 4.9878288336562e-06,
"loss": 0.661,
"step": 157
},
{
"epoch": 0.49478079331941544,
"grad_norm": 0.9953671097755432,
"learning_rate": 4.987398391342285e-06,
"loss": 0.6512,
"step": 158
},
{
"epoch": 0.4979123173277662,
"grad_norm": 1.042872667312622,
"learning_rate": 4.986960488485667e-06,
"loss": 0.6311,
"step": 159
},
{
"epoch": 0.5010438413361169,
"grad_norm": 0.9070663452148438,
"learning_rate": 4.9865151263997645e-06,
"loss": 0.675,
"step": 160
},
{
"epoch": 0.5041753653444676,
"grad_norm": 0.8460433483123779,
"learning_rate": 4.986062306420367e-06,
"loss": 0.6635,
"step": 161
},
{
"epoch": 0.5073068893528184,
"grad_norm": 1.2639834880828857,
"learning_rate": 4.985602029905635e-06,
"loss": 0.6327,
"step": 162
},
{
"epoch": 0.5104384133611691,
"grad_norm": 0.8775074481964111,
"learning_rate": 4.985134298236091e-06,
"loss": 0.644,
"step": 163
},
{
"epoch": 0.5135699373695198,
"grad_norm": 1.2031961679458618,
"learning_rate": 4.98465911281462e-06,
"loss": 0.6254,
"step": 164
},
{
"epoch": 0.5167014613778705,
"grad_norm": 0.892494797706604,
"learning_rate": 4.984176475066463e-06,
"loss": 0.7122,
"step": 165
},
{
"epoch": 0.5198329853862212,
"grad_norm": 2.7122485637664795,
"learning_rate": 4.983686386439212e-06,
"loss": 0.6679,
"step": 166
},
{
"epoch": 0.5229645093945721,
"grad_norm": 0.9344426989555359,
"learning_rate": 4.983188848402806e-06,
"loss": 0.6319,
"step": 167
},
{
"epoch": 0.5260960334029228,
"grad_norm": 1.4093577861785889,
"learning_rate": 4.982683862449531e-06,
"loss": 0.6425,
"step": 168
},
{
"epoch": 0.5292275574112735,
"grad_norm": 1.1285009384155273,
"learning_rate": 4.982171430094007e-06,
"loss": 0.6298,
"step": 169
},
{
"epoch": 0.5323590814196242,
"grad_norm": 1.952778935432434,
"learning_rate": 4.981651552873193e-06,
"loss": 0.7066,
"step": 170
},
{
"epoch": 0.535490605427975,
"grad_norm": 5.133765697479248,
"learning_rate": 4.981124232346374e-06,
"loss": 0.6634,
"step": 171
},
{
"epoch": 0.5386221294363257,
"grad_norm": 0.9770542979240417,
"learning_rate": 4.980589470095161e-06,
"loss": 0.7121,
"step": 172
},
{
"epoch": 0.5417536534446764,
"grad_norm": 0.8414323925971985,
"learning_rate": 4.980047267723487e-06,
"loss": 0.6397,
"step": 173
},
{
"epoch": 0.5448851774530271,
"grad_norm": 1.9173879623413086,
"learning_rate": 4.979497626857596e-06,
"loss": 0.6228,
"step": 174
},
{
"epoch": 0.5480167014613778,
"grad_norm": 1.0823363065719604,
"learning_rate": 4.978940549146048e-06,
"loss": 0.6475,
"step": 175
},
{
"epoch": 0.5511482254697286,
"grad_norm": 3.715353488922119,
"learning_rate": 4.978376036259706e-06,
"loss": 0.7127,
"step": 176
},
{
"epoch": 0.5542797494780793,
"grad_norm": 0.981584370136261,
"learning_rate": 4.9778040898917325e-06,
"loss": 0.6468,
"step": 177
},
{
"epoch": 0.55741127348643,
"grad_norm": 1.70566987991333,
"learning_rate": 4.977224711757587e-06,
"loss": 0.6476,
"step": 178
},
{
"epoch": 0.5605427974947808,
"grad_norm": 0.9217923283576965,
"learning_rate": 4.976637903595019e-06,
"loss": 0.6731,
"step": 179
},
{
"epoch": 0.5636743215031316,
"grad_norm": 0.8994677662849426,
"learning_rate": 4.976043667164063e-06,
"loss": 0.6562,
"step": 180
},
{
"epoch": 0.5668058455114823,
"grad_norm": 1.1613017320632935,
"learning_rate": 4.975442004247034e-06,
"loss": 0.6417,
"step": 181
},
{
"epoch": 0.569937369519833,
"grad_norm": 1.6041977405548096,
"learning_rate": 4.974832916648521e-06,
"loss": 0.6029,
"step": 182
},
{
"epoch": 0.5730688935281837,
"grad_norm": 1.7978405952453613,
"learning_rate": 4.974216406195383e-06,
"loss": 0.6269,
"step": 183
},
{
"epoch": 0.5762004175365344,
"grad_norm": 1.6021920442581177,
"learning_rate": 4.973592474736739e-06,
"loss": 0.6149,
"step": 184
},
{
"epoch": 0.5793319415448852,
"grad_norm": 0.8973568677902222,
"learning_rate": 4.972961124143971e-06,
"loss": 0.6648,
"step": 185
},
{
"epoch": 0.5824634655532359,
"grad_norm": 1.9432591199874878,
"learning_rate": 4.972322356310711e-06,
"loss": 0.6299,
"step": 186
},
{
"epoch": 0.5855949895615866,
"grad_norm": 4.457028388977051,
"learning_rate": 4.971676173152839e-06,
"loss": 0.656,
"step": 187
},
{
"epoch": 0.5887265135699373,
"grad_norm": 2.0989716053009033,
"learning_rate": 4.971022576608473e-06,
"loss": 0.6539,
"step": 188
},
{
"epoch": 0.5918580375782881,
"grad_norm": 1.0646967887878418,
"learning_rate": 4.97036156863797e-06,
"loss": 0.6727,
"step": 189
},
{
"epoch": 0.5949895615866388,
"grad_norm": 1.6522265672683716,
"learning_rate": 4.969693151223914e-06,
"loss": 0.6643,
"step": 190
},
{
"epoch": 0.5981210855949896,
"grad_norm": 1.7503505945205688,
"learning_rate": 4.969017326371115e-06,
"loss": 0.6402,
"step": 191
},
{
"epoch": 0.6012526096033403,
"grad_norm": 1.2341989278793335,
"learning_rate": 4.968334096106597e-06,
"loss": 0.6413,
"step": 192
},
{
"epoch": 0.6043841336116911,
"grad_norm": 3.089054584503174,
"learning_rate": 4.967643462479597e-06,
"loss": 0.6825,
"step": 193
},
{
"epoch": 0.6075156576200418,
"grad_norm": 2.711623430252075,
"learning_rate": 4.966945427561557e-06,
"loss": 0.65,
"step": 194
},
{
"epoch": 0.6106471816283925,
"grad_norm": 4.641184329986572,
"learning_rate": 4.966239993446118e-06,
"loss": 0.6229,
"step": 195
},
{
"epoch": 0.6137787056367432,
"grad_norm": 1.7984074354171753,
"learning_rate": 4.965527162249114e-06,
"loss": 0.6473,
"step": 196
},
{
"epoch": 0.6169102296450939,
"grad_norm": 1.1643115282058716,
"learning_rate": 4.964806936108566e-06,
"loss": 0.6404,
"step": 197
},
{
"epoch": 0.6200417536534447,
"grad_norm": 2.1877920627593994,
"learning_rate": 4.9640793171846725e-06,
"loss": 0.6185,
"step": 198
},
{
"epoch": 0.6231732776617954,
"grad_norm": 1.7970566749572754,
"learning_rate": 4.963344307659807e-06,
"loss": 0.634,
"step": 199
},
{
"epoch": 0.6263048016701461,
"grad_norm": 1.6014361381530762,
"learning_rate": 4.96260190973851e-06,
"loss": 0.6562,
"step": 200
},
{
"epoch": 0.6294363256784968,
"grad_norm": 0.8743320107460022,
"learning_rate": 4.961852125647482e-06,
"loss": 0.6133,
"step": 201
},
{
"epoch": 0.6325678496868476,
"grad_norm": 1.9526551961898804,
"learning_rate": 4.961094957635578e-06,
"loss": 0.6451,
"step": 202
},
{
"epoch": 0.6356993736951984,
"grad_norm": 3.6597347259521484,
"learning_rate": 4.960330407973798e-06,
"loss": 0.6386,
"step": 203
},
{
"epoch": 0.6388308977035491,
"grad_norm": 1.7180207967758179,
"learning_rate": 4.959558478955283e-06,
"loss": 0.6688,
"step": 204
},
{
"epoch": 0.6419624217118998,
"grad_norm": 0.9058470129966736,
"learning_rate": 4.958779172895308e-06,
"loss": 0.6161,
"step": 205
},
{
"epoch": 0.6450939457202505,
"grad_norm": 1.0031033754348755,
"learning_rate": 4.957992492131274e-06,
"loss": 0.6437,
"step": 206
},
{
"epoch": 0.6482254697286013,
"grad_norm": 1.5846725702285767,
"learning_rate": 4.9571984390226985e-06,
"loss": 0.6332,
"step": 207
},
{
"epoch": 0.651356993736952,
"grad_norm": 1.9951609373092651,
"learning_rate": 4.956397015951215e-06,
"loss": 0.636,
"step": 208
},
{
"epoch": 0.6544885177453027,
"grad_norm": 1.4122583866119385,
"learning_rate": 4.95558822532056e-06,
"loss": 0.6586,
"step": 209
},
{
"epoch": 0.6576200417536534,
"grad_norm": 1.2243481874465942,
"learning_rate": 4.954772069556568e-06,
"loss": 0.6313,
"step": 210
},
{
"epoch": 0.6607515657620042,
"grad_norm": 0.8756356835365295,
"learning_rate": 4.953948551107164e-06,
"loss": 0.6406,
"step": 211
},
{
"epoch": 0.6638830897703549,
"grad_norm": 2.9979734420776367,
"learning_rate": 4.953117672442356e-06,
"loss": 0.5803,
"step": 212
},
{
"epoch": 0.6670146137787056,
"grad_norm": 2.1859359741210938,
"learning_rate": 4.952279436054229e-06,
"loss": 0.6607,
"step": 213
},
{
"epoch": 0.6701461377870563,
"grad_norm": 0.6929755806922913,
"learning_rate": 4.9514338444569346e-06,
"loss": 0.5989,
"step": 214
},
{
"epoch": 0.673277661795407,
"grad_norm": 1.0361783504486084,
"learning_rate": 4.950580900186685e-06,
"loss": 0.6654,
"step": 215
},
{
"epoch": 0.6764091858037579,
"grad_norm": 1.210898518562317,
"learning_rate": 4.9497206058017475e-06,
"loss": 0.6213,
"step": 216
},
{
"epoch": 0.6795407098121086,
"grad_norm": 1.200990080833435,
"learning_rate": 4.948852963882434e-06,
"loss": 0.6654,
"step": 217
},
{
"epoch": 0.6826722338204593,
"grad_norm": 1.481831669807434,
"learning_rate": 4.947977977031093e-06,
"loss": 0.6474,
"step": 218
},
{
"epoch": 0.68580375782881,
"grad_norm": 0.9883334636688232,
"learning_rate": 4.947095647872103e-06,
"loss": 0.6735,
"step": 219
},
{
"epoch": 0.6889352818371608,
"grad_norm": 0.7436536550521851,
"learning_rate": 4.946205979051868e-06,
"loss": 0.6456,
"step": 220
},
{
"epoch": 0.6920668058455115,
"grad_norm": 0.9057570099830627,
"learning_rate": 4.945308973238802e-06,
"loss": 0.6228,
"step": 221
},
{
"epoch": 0.6951983298538622,
"grad_norm": 1.341081142425537,
"learning_rate": 4.944404633123324e-06,
"loss": 0.6417,
"step": 222
},
{
"epoch": 0.6983298538622129,
"grad_norm": 0.7958157062530518,
"learning_rate": 4.943492961417859e-06,
"loss": 0.6494,
"step": 223
},
{
"epoch": 0.7014613778705637,
"grad_norm": 1.216025471687317,
"learning_rate": 4.9425739608568106e-06,
"loss": 0.6566,
"step": 224
},
{
"epoch": 0.7045929018789144,
"grad_norm": 0.9774854779243469,
"learning_rate": 4.9416476341965735e-06,
"loss": 0.6171,
"step": 225
},
{
"epoch": 0.7077244258872651,
"grad_norm": 2.1562681198120117,
"learning_rate": 4.940713984215512e-06,
"loss": 0.629,
"step": 226
},
{
"epoch": 0.7108559498956158,
"grad_norm": 1.9521286487579346,
"learning_rate": 4.9397730137139556e-06,
"loss": 0.6475,
"step": 227
},
{
"epoch": 0.7139874739039666,
"grad_norm": 1.5749104022979736,
"learning_rate": 4.9388247255141895e-06,
"loss": 0.6053,
"step": 228
},
{
"epoch": 0.7171189979123174,
"grad_norm": 1.2008254528045654,
"learning_rate": 4.937869122460449e-06,
"loss": 0.6052,
"step": 229
},
{
"epoch": 0.7202505219206681,
"grad_norm": 1.0774102210998535,
"learning_rate": 4.93690620741891e-06,
"loss": 0.6099,
"step": 230
},
{
"epoch": 0.7233820459290188,
"grad_norm": 1.0929996967315674,
"learning_rate": 4.935935983277675e-06,
"loss": 0.6363,
"step": 231
},
{
"epoch": 0.7265135699373695,
"grad_norm": 0.8830653429031372,
"learning_rate": 4.934958452946774e-06,
"loss": 0.6136,
"step": 232
},
{
"epoch": 0.7296450939457203,
"grad_norm": 3.591218948364258,
"learning_rate": 4.933973619358147e-06,
"loss": 0.5962,
"step": 233
},
{
"epoch": 0.732776617954071,
"grad_norm": 2.5797672271728516,
"learning_rate": 4.932981485465643e-06,
"loss": 0.6405,
"step": 234
},
{
"epoch": 0.7359081419624217,
"grad_norm": 1.0467664003372192,
"learning_rate": 4.9319820542450025e-06,
"loss": 0.6155,
"step": 235
},
{
"epoch": 0.7390396659707724,
"grad_norm": 0.8099795579910278,
"learning_rate": 4.930975328693856e-06,
"loss": 0.5615,
"step": 236
},
{
"epoch": 0.7421711899791231,
"grad_norm": 0.8906702995300293,
"learning_rate": 4.92996131183171e-06,
"loss": 0.6501,
"step": 237
},
{
"epoch": 0.7453027139874739,
"grad_norm": 1.0871416330337524,
"learning_rate": 4.928940006699944e-06,
"loss": 0.6282,
"step": 238
},
{
"epoch": 0.7484342379958246,
"grad_norm": 1.3209614753723145,
"learning_rate": 4.927911416361792e-06,
"loss": 0.598,
"step": 239
},
{
"epoch": 0.7515657620041754,
"grad_norm": 1.2252682447433472,
"learning_rate": 4.926875543902344e-06,
"loss": 0.6433,
"step": 240
},
{
"epoch": 0.7546972860125261,
"grad_norm": 1.0569007396697998,
"learning_rate": 4.9258323924285285e-06,
"loss": 0.5927,
"step": 241
},
{
"epoch": 0.7578288100208769,
"grad_norm": 0.9309014081954956,
"learning_rate": 4.924781965069106e-06,
"loss": 0.5927,
"step": 242
},
{
"epoch": 0.7609603340292276,
"grad_norm": 1.0200378894805908,
"learning_rate": 4.923724264974662e-06,
"loss": 0.6064,
"step": 243
},
{
"epoch": 0.7640918580375783,
"grad_norm": 1.0533075332641602,
"learning_rate": 4.922659295317593e-06,
"loss": 0.6373,
"step": 244
},
{
"epoch": 0.767223382045929,
"grad_norm": 0.7889382839202881,
"learning_rate": 4.921587059292102e-06,
"loss": 0.5887,
"step": 245
},
{
"epoch": 0.7703549060542797,
"grad_norm": 0.7943588495254517,
"learning_rate": 4.920507560114183e-06,
"loss": 0.593,
"step": 246
},
{
"epoch": 0.7734864300626305,
"grad_norm": 0.8247205018997192,
"learning_rate": 4.919420801021617e-06,
"loss": 0.6151,
"step": 247
},
{
"epoch": 0.7766179540709812,
"grad_norm": 0.9979158043861389,
"learning_rate": 4.91832678527396e-06,
"loss": 0.6019,
"step": 248
},
{
"epoch": 0.7797494780793319,
"grad_norm": 0.9346868991851807,
"learning_rate": 4.917225516152532e-06,
"loss": 0.6098,
"step": 249
},
{
"epoch": 0.7828810020876826,
"grad_norm": 0.7487881183624268,
"learning_rate": 4.916116996960408e-06,
"loss": 0.5965,
"step": 250
},
{
"epoch": 0.7860125260960334,
"grad_norm": 0.821576714515686,
"learning_rate": 4.915001231022411e-06,
"loss": 0.6483,
"step": 251
},
{
"epoch": 0.7891440501043842,
"grad_norm": 1.0413196086883545,
"learning_rate": 4.913878221685096e-06,
"loss": 0.6108,
"step": 252
},
{
"epoch": 0.7922755741127349,
"grad_norm": 0.9560331702232361,
"learning_rate": 4.912747972316745e-06,
"loss": 0.5758,
"step": 253
},
{
"epoch": 0.7954070981210856,
"grad_norm": 0.8964638113975525,
"learning_rate": 4.911610486307356e-06,
"loss": 0.6432,
"step": 254
},
{
"epoch": 0.7985386221294363,
"grad_norm": 0.8418346047401428,
"learning_rate": 4.910465767068631e-06,
"loss": 0.6027,
"step": 255
},
{
"epoch": 0.8016701461377871,
"grad_norm": 1.792371153831482,
"learning_rate": 4.909313818033966e-06,
"loss": 0.6198,
"step": 256
},
{
"epoch": 0.8048016701461378,
"grad_norm": 1.036665439605713,
"learning_rate": 4.908154642658446e-06,
"loss": 0.6255,
"step": 257
},
{
"epoch": 0.8079331941544885,
"grad_norm": 0.7592151165008545,
"learning_rate": 4.906988244418823e-06,
"loss": 0.6035,
"step": 258
},
{
"epoch": 0.8110647181628392,
"grad_norm": 0.8843073844909668,
"learning_rate": 4.90581462681352e-06,
"loss": 0.6299,
"step": 259
},
{
"epoch": 0.81419624217119,
"grad_norm": 0.9489964246749878,
"learning_rate": 4.9046337933626086e-06,
"loss": 0.5869,
"step": 260
},
{
"epoch": 0.8173277661795407,
"grad_norm": 0.851691722869873,
"learning_rate": 4.903445747607806e-06,
"loss": 0.603,
"step": 261
},
{
"epoch": 0.8204592901878914,
"grad_norm": 1.3722106218338013,
"learning_rate": 4.902250493112458e-06,
"loss": 0.5939,
"step": 262
},
{
"epoch": 0.8235908141962421,
"grad_norm": 1.1002827882766724,
"learning_rate": 4.901048033461537e-06,
"loss": 0.6452,
"step": 263
},
{
"epoch": 0.826722338204593,
"grad_norm": 0.8428632020950317,
"learning_rate": 4.89983837226162e-06,
"loss": 0.5956,
"step": 264
},
{
"epoch": 0.8298538622129437,
"grad_norm": 0.7666584849357605,
"learning_rate": 4.898621513140889e-06,
"loss": 0.6067,
"step": 265
},
{
"epoch": 0.8329853862212944,
"grad_norm": 0.8413611054420471,
"learning_rate": 4.897397459749113e-06,
"loss": 0.5985,
"step": 266
},
{
"epoch": 0.8361169102296451,
"grad_norm": 2.3374335765838623,
"learning_rate": 4.896166215757638e-06,
"loss": 0.5885,
"step": 267
},
{
"epoch": 0.8392484342379958,
"grad_norm": 2.236640214920044,
"learning_rate": 4.894927784859377e-06,
"loss": 0.6408,
"step": 268
},
{
"epoch": 0.8423799582463466,
"grad_norm": 0.9715856313705444,
"learning_rate": 4.893682170768802e-06,
"loss": 0.5954,
"step": 269
},
{
"epoch": 0.8455114822546973,
"grad_norm": 1.0249912738800049,
"learning_rate": 4.892429377221928e-06,
"loss": 0.6186,
"step": 270
},
{
"epoch": 0.848643006263048,
"grad_norm": 1.255426049232483,
"learning_rate": 4.891169407976302e-06,
"loss": 0.6351,
"step": 271
},
{
"epoch": 0.8517745302713987,
"grad_norm": 0.9339559674263,
"learning_rate": 4.889902266810995e-06,
"loss": 0.5944,
"step": 272
},
{
"epoch": 0.8549060542797495,
"grad_norm": 1.2473429441452026,
"learning_rate": 4.888627957526589e-06,
"loss": 0.544,
"step": 273
},
{
"epoch": 0.8580375782881002,
"grad_norm": 1.0589442253112793,
"learning_rate": 4.887346483945166e-06,
"loss": 0.5543,
"step": 274
},
{
"epoch": 0.8611691022964509,
"grad_norm": 0.9844024777412415,
"learning_rate": 4.886057849910294e-06,
"loss": 0.5941,
"step": 275
},
{
"epoch": 0.8643006263048016,
"grad_norm": 2.88578200340271,
"learning_rate": 4.8847620592870196e-06,
"loss": 0.6124,
"step": 276
},
{
"epoch": 0.8674321503131524,
"grad_norm": 0.7496054172515869,
"learning_rate": 4.8834591159618524e-06,
"loss": 0.6006,
"step": 277
},
{
"epoch": 0.8705636743215032,
"grad_norm": 0.7403052449226379,
"learning_rate": 4.88214902384276e-06,
"loss": 0.5911,
"step": 278
},
{
"epoch": 0.8736951983298539,
"grad_norm": 0.9003771543502808,
"learning_rate": 4.880831786859146e-06,
"loss": 0.6347,
"step": 279
},
{
"epoch": 0.8768267223382046,
"grad_norm": 1.0345501899719238,
"learning_rate": 4.879507408961847e-06,
"loss": 0.6111,
"step": 280
},
{
"epoch": 0.8799582463465553,
"grad_norm": 1.4385879039764404,
"learning_rate": 4.878175894123116e-06,
"loss": 0.6454,
"step": 281
},
{
"epoch": 0.8830897703549061,
"grad_norm": 0.8469482064247131,
"learning_rate": 4.8768372463366145e-06,
"loss": 0.6163,
"step": 282
},
{
"epoch": 0.8862212943632568,
"grad_norm": 0.8859589695930481,
"learning_rate": 4.875491469617395e-06,
"loss": 0.6144,
"step": 283
},
{
"epoch": 0.8893528183716075,
"grad_norm": 1.8436834812164307,
"learning_rate": 4.874138568001895e-06,
"loss": 0.6275,
"step": 284
},
{
"epoch": 0.8924843423799582,
"grad_norm": 0.6646101474761963,
"learning_rate": 4.87277854554792e-06,
"loss": 0.615,
"step": 285
},
{
"epoch": 0.8956158663883089,
"grad_norm": 1.0070925951004028,
"learning_rate": 4.871411406334633e-06,
"loss": 0.5898,
"step": 286
},
{
"epoch": 0.8987473903966597,
"grad_norm": 0.9785194993019104,
"learning_rate": 4.870037154462545e-06,
"loss": 0.5992,
"step": 287
},
{
"epoch": 0.9018789144050104,
"grad_norm": 0.7244889736175537,
"learning_rate": 4.868655794053497e-06,
"loss": 0.6078,
"step": 288
},
{
"epoch": 0.9050104384133612,
"grad_norm": 1.4496444463729858,
"learning_rate": 4.8672673292506535e-06,
"loss": 0.5855,
"step": 289
},
{
"epoch": 0.9081419624217119,
"grad_norm": 1.8514957427978516,
"learning_rate": 4.865871764218486e-06,
"loss": 0.5707,
"step": 290
},
{
"epoch": 0.9112734864300627,
"grad_norm": 0.8439773321151733,
"learning_rate": 4.864469103142763e-06,
"loss": 0.5562,
"step": 291
},
{
"epoch": 0.9144050104384134,
"grad_norm": 0.8146086931228638,
"learning_rate": 4.8630593502305355e-06,
"loss": 0.6161,
"step": 292
},
{
"epoch": 0.9175365344467641,
"grad_norm": 0.8920315504074097,
"learning_rate": 4.861642509710126e-06,
"loss": 0.6139,
"step": 293
},
{
"epoch": 0.9206680584551148,
"grad_norm": 1.4980088472366333,
"learning_rate": 4.860218585831116e-06,
"loss": 0.6187,
"step": 294
},
{
"epoch": 0.9237995824634656,
"grad_norm": 0.9910127520561218,
"learning_rate": 4.8587875828643285e-06,
"loss": 0.5852,
"step": 295
},
{
"epoch": 0.9269311064718163,
"grad_norm": 0.819600522518158,
"learning_rate": 4.857349505101823e-06,
"loss": 0.6172,
"step": 296
},
{
"epoch": 0.930062630480167,
"grad_norm": 1.1059772968292236,
"learning_rate": 4.855904356856878e-06,
"loss": 0.5868,
"step": 297
},
{
"epoch": 0.9331941544885177,
"grad_norm": 1.2362196445465088,
"learning_rate": 4.854452142463977e-06,
"loss": 0.625,
"step": 298
},
{
"epoch": 0.9363256784968684,
"grad_norm": 0.9956470727920532,
"learning_rate": 4.852992866278799e-06,
"loss": 0.5923,
"step": 299
},
{
"epoch": 0.9394572025052192,
"grad_norm": 0.864109218120575,
"learning_rate": 4.851526532678203e-06,
"loss": 0.6315,
"step": 300
},
{
"epoch": 0.94258872651357,
"grad_norm": 0.8900614380836487,
"learning_rate": 4.850053146060217e-06,
"loss": 0.6128,
"step": 301
},
{
"epoch": 0.9457202505219207,
"grad_norm": 0.927254855632782,
"learning_rate": 4.84857271084402e-06,
"loss": 0.5955,
"step": 302
},
{
"epoch": 0.9488517745302714,
"grad_norm": 1.0046517848968506,
"learning_rate": 4.847085231469935e-06,
"loss": 0.6134,
"step": 303
},
{
"epoch": 0.9519832985386222,
"grad_norm": 0.734597384929657,
"learning_rate": 4.8455907123994125e-06,
"loss": 0.5927,
"step": 304
},
{
"epoch": 0.9551148225469729,
"grad_norm": 0.7338348031044006,
"learning_rate": 4.844089158115016e-06,
"loss": 0.5897,
"step": 305
},
{
"epoch": 0.9582463465553236,
"grad_norm": 0.9163988828659058,
"learning_rate": 4.8425805731204106e-06,
"loss": 0.6051,
"step": 306
},
{
"epoch": 0.9613778705636743,
"grad_norm": 1.050246238708496,
"learning_rate": 4.84106496194035e-06,
"loss": 0.5751,
"step": 307
},
{
"epoch": 0.964509394572025,
"grad_norm": 0.7637603878974915,
"learning_rate": 4.83954232912066e-06,
"loss": 0.5677,
"step": 308
},
{
"epoch": 0.9676409185803758,
"grad_norm": 0.7110525965690613,
"learning_rate": 4.838012679228229e-06,
"loss": 0.6051,
"step": 309
},
{
"epoch": 0.9707724425887265,
"grad_norm": 0.7662068605422974,
"learning_rate": 4.836476016850988e-06,
"loss": 0.59,
"step": 310
},
{
"epoch": 0.9739039665970772,
"grad_norm": 0.8907375335693359,
"learning_rate": 4.834932346597906e-06,
"loss": 0.5792,
"step": 311
},
{
"epoch": 0.9770354906054279,
"grad_norm": 0.8939849138259888,
"learning_rate": 4.833381673098966e-06,
"loss": 0.6062,
"step": 312
},
{
"epoch": 0.9801670146137788,
"grad_norm": 0.8878788948059082,
"learning_rate": 4.8318240010051595e-06,
"loss": 0.5694,
"step": 313
},
{
"epoch": 0.9832985386221295,
"grad_norm": 1.2523870468139648,
"learning_rate": 4.830259334988468e-06,
"loss": 0.5809,
"step": 314
},
{
"epoch": 0.9864300626304802,
"grad_norm": 1.0836797952651978,
"learning_rate": 4.82868767974185e-06,
"loss": 0.5949,
"step": 315
},
{
"epoch": 0.9895615866388309,
"grad_norm": 0.7985473871231079,
"learning_rate": 4.827109039979226e-06,
"loss": 0.6057,
"step": 316
},
{
"epoch": 0.9926931106471816,
"grad_norm": 1.042951226234436,
"learning_rate": 4.825523420435469e-06,
"loss": 0.6004,
"step": 317
},
{
"epoch": 0.9958246346555324,
"grad_norm": 0.7845115661621094,
"learning_rate": 4.823930825866381e-06,
"loss": 0.6161,
"step": 318
},
{
"epoch": 0.9989561586638831,
"grad_norm": 0.931854784488678,
"learning_rate": 4.82233126104869e-06,
"loss": 0.5912,
"step": 319
},
{
"epoch": 1.0,
"grad_norm": 0.931854784488678,
"learning_rate": 4.8207247307800275e-06,
"loss": 0.1914,
"step": 320
},
{
"epoch": 1.0031315240083507,
"grad_norm": 0.751028835773468,
"learning_rate": 4.819111239878916e-06,
"loss": 0.5802,
"step": 321
},
{
"epoch": 1.0062630480167014,
"grad_norm": 1.4943569898605347,
"learning_rate": 4.817490793184758e-06,
"loss": 0.613,
"step": 322
},
{
"epoch": 1.0093945720250521,
"grad_norm": 2.296318531036377,
"learning_rate": 4.815863395557816e-06,
"loss": 0.5453,
"step": 323
},
{
"epoch": 1.0125260960334028,
"grad_norm": 0.760101318359375,
"learning_rate": 4.814229051879202e-06,
"loss": 0.5302,
"step": 324
},
{
"epoch": 1.0156576200417538,
"grad_norm": 0.8145846128463745,
"learning_rate": 4.812587767050861e-06,
"loss": 0.5831,
"step": 325
},
{
"epoch": 1.0187891440501045,
"grad_norm": 0.9169796109199524,
"learning_rate": 4.8109395459955565e-06,
"loss": 0.5756,
"step": 326
},
{
"epoch": 1.0219206680584552,
"grad_norm": 0.8791524171829224,
"learning_rate": 4.809284393656858e-06,
"loss": 0.5988,
"step": 327
},
{
"epoch": 1.0250521920668059,
"grad_norm": 1.0184170007705688,
"learning_rate": 4.807622314999122e-06,
"loss": 0.5476,
"step": 328
},
{
"epoch": 1.0281837160751566,
"grad_norm": 0.8095184564590454,
"learning_rate": 4.8059533150074805e-06,
"loss": 0.5723,
"step": 329
},
{
"epoch": 1.0313152400835073,
"grad_norm": 0.7621930241584778,
"learning_rate": 4.804277398687826e-06,
"loss": 0.5841,
"step": 330
},
{
"epoch": 1.034446764091858,
"grad_norm": 3.729628324508667,
"learning_rate": 4.802594571066791e-06,
"loss": 0.5639,
"step": 331
},
{
"epoch": 1.0375782881002087,
"grad_norm": 1.6502974033355713,
"learning_rate": 4.800904837191743e-06,
"loss": 0.6024,
"step": 332
},
{
"epoch": 1.0407098121085594,
"grad_norm": 0.8031198978424072,
"learning_rate": 4.799208202130762e-06,
"loss": 0.5305,
"step": 333
},
{
"epoch": 1.0438413361169103,
"grad_norm": 0.939644992351532,
"learning_rate": 4.797504670972623e-06,
"loss": 0.5446,
"step": 334
},
{
"epoch": 1.046972860125261,
"grad_norm": 1.0589954853057861,
"learning_rate": 4.795794248826789e-06,
"loss": 0.5366,
"step": 335
},
{
"epoch": 1.0501043841336117,
"grad_norm": 0.9089614748954773,
"learning_rate": 4.794076940823391e-06,
"loss": 0.5795,
"step": 336
},
{
"epoch": 1.0532359081419624,
"grad_norm": 0.7732561230659485,
"learning_rate": 4.792352752113212e-06,
"loss": 0.5765,
"step": 337
},
{
"epoch": 1.0563674321503131,
"grad_norm": 1.811553955078125,
"learning_rate": 4.790621687867672e-06,
"loss": 0.561,
"step": 338
},
{
"epoch": 1.0594989561586639,
"grad_norm": 1.1930758953094482,
"learning_rate": 4.788883753278813e-06,
"loss": 0.5,
"step": 339
},
{
"epoch": 1.0626304801670146,
"grad_norm": 0.9551813006401062,
"learning_rate": 4.787138953559285e-06,
"loss": 0.5228,
"step": 340
},
{
"epoch": 1.0657620041753653,
"grad_norm": 0.9609586596488953,
"learning_rate": 4.785387293942329e-06,
"loss": 0.5827,
"step": 341
},
{
"epoch": 1.068893528183716,
"grad_norm": 0.8403449654579163,
"learning_rate": 4.783628779681759e-06,
"loss": 0.5585,
"step": 342
},
{
"epoch": 1.072025052192067,
"grad_norm": 0.9108251929283142,
"learning_rate": 4.7818634160519496e-06,
"loss": 0.6077,
"step": 343
},
{
"epoch": 1.0751565762004176,
"grad_norm": 0.9476898908615112,
"learning_rate": 4.780091208347819e-06,
"loss": 0.5493,
"step": 344
},
{
"epoch": 1.0782881002087683,
"grad_norm": 1.1943707466125488,
"learning_rate": 4.778312161884813e-06,
"loss": 0.5736,
"step": 345
},
{
"epoch": 1.081419624217119,
"grad_norm": 3.1342639923095703,
"learning_rate": 4.77652628199889e-06,
"loss": 0.5765,
"step": 346
},
{
"epoch": 1.0845511482254697,
"grad_norm": 2.7982125282287598,
"learning_rate": 4.7747335740465015e-06,
"loss": 0.6003,
"step": 347
},
{
"epoch": 1.0876826722338204,
"grad_norm": 1.5068914890289307,
"learning_rate": 4.7729340434045815e-06,
"loss": 0.5033,
"step": 348
},
{
"epoch": 1.0908141962421711,
"grad_norm": 0.8273429274559021,
"learning_rate": 4.771127695470527e-06,
"loss": 0.5309,
"step": 349
},
{
"epoch": 1.0939457202505218,
"grad_norm": 1.104974389076233,
"learning_rate": 4.76931453566218e-06,
"loss": 0.5244,
"step": 350
},
{
"epoch": 1.0970772442588728,
"grad_norm": 1.096509337425232,
"learning_rate": 4.7674945694178166e-06,
"loss": 0.5585,
"step": 351
},
{
"epoch": 1.1002087682672235,
"grad_norm": 1.0238200426101685,
"learning_rate": 4.765667802196127e-06,
"loss": 0.5589,
"step": 352
},
{
"epoch": 1.1033402922755742,
"grad_norm": 0.7515526413917542,
"learning_rate": 4.763834239476197e-06,
"loss": 0.5304,
"step": 353
},
{
"epoch": 1.1064718162839249,
"grad_norm": 1.0282566547393799,
"learning_rate": 4.761993886757499e-06,
"loss": 0.5476,
"step": 354
},
{
"epoch": 1.1096033402922756,
"grad_norm": 0.9962708950042725,
"learning_rate": 4.760146749559868e-06,
"loss": 0.5117,
"step": 355
},
{
"epoch": 1.1127348643006263,
"grad_norm": 0.7851671576499939,
"learning_rate": 4.758292833423488e-06,
"loss": 0.5542,
"step": 356
},
{
"epoch": 1.115866388308977,
"grad_norm": 0.8857759237289429,
"learning_rate": 4.756432143908876e-06,
"loss": 0.544,
"step": 357
},
{
"epoch": 1.1189979123173277,
"grad_norm": 0.9402740597724915,
"learning_rate": 4.7545646865968645e-06,
"loss": 0.5656,
"step": 358
},
{
"epoch": 1.1221294363256784,
"grad_norm": 0.8210407495498657,
"learning_rate": 4.752690467088584e-06,
"loss": 0.5733,
"step": 359
},
{
"epoch": 1.1252609603340291,
"grad_norm": 0.795684278011322,
"learning_rate": 4.750809491005449e-06,
"loss": 0.5678,
"step": 360
},
{
"epoch": 1.12839248434238,
"grad_norm": 0.8712463974952698,
"learning_rate": 4.748921763989139e-06,
"loss": 0.5777,
"step": 361
},
{
"epoch": 1.1315240083507307,
"grad_norm": 0.9810119867324829,
"learning_rate": 4.747027291701578e-06,
"loss": 0.5511,
"step": 362
},
{
"epoch": 1.1346555323590815,
"grad_norm": 0.81117844581604,
"learning_rate": 4.745126079824926e-06,
"loss": 0.5038,
"step": 363
},
{
"epoch": 1.1377870563674322,
"grad_norm": 0.7631494402885437,
"learning_rate": 4.743218134061556e-06,
"loss": 0.6272,
"step": 364
},
{
"epoch": 1.1409185803757829,
"grad_norm": 0.7601696252822876,
"learning_rate": 4.741303460134038e-06,
"loss": 0.571,
"step": 365
},
{
"epoch": 1.1440501043841336,
"grad_norm": 1.7977744340896606,
"learning_rate": 4.7393820637851205e-06,
"loss": 0.538,
"step": 366
},
{
"epoch": 1.1471816283924843,
"grad_norm": 2.022578001022339,
"learning_rate": 4.737453950777718e-06,
"loss": 0.5822,
"step": 367
},
{
"epoch": 1.150313152400835,
"grad_norm": 0.7586764693260193,
"learning_rate": 4.735519126894885e-06,
"loss": 0.5986,
"step": 368
},
{
"epoch": 1.153444676409186,
"grad_norm": 0.8970286846160889,
"learning_rate": 4.733577597939812e-06,
"loss": 0.542,
"step": 369
},
{
"epoch": 1.1565762004175366,
"grad_norm": 0.8546352982521057,
"learning_rate": 4.731629369735793e-06,
"loss": 0.5832,
"step": 370
},
{
"epoch": 1.1597077244258873,
"grad_norm": 0.9266164898872375,
"learning_rate": 4.72967444812622e-06,
"loss": 0.551,
"step": 371
},
{
"epoch": 1.162839248434238,
"grad_norm": 1.0413658618927002,
"learning_rate": 4.7277128389745595e-06,
"loss": 0.5866,
"step": 372
},
{
"epoch": 1.1659707724425887,
"grad_norm": 0.9312199950218201,
"learning_rate": 4.7257445481643334e-06,
"loss": 0.5723,
"step": 373
},
{
"epoch": 1.1691022964509394,
"grad_norm": 0.7389806509017944,
"learning_rate": 4.723769581599109e-06,
"loss": 0.5209,
"step": 374
},
{
"epoch": 1.1722338204592901,
"grad_norm": 3.053169012069702,
"learning_rate": 4.721787945202472e-06,
"loss": 0.6094,
"step": 375
},
{
"epoch": 1.1753653444676408,
"grad_norm": 1.288589596748352,
"learning_rate": 4.719799644918017e-06,
"loss": 0.5616,
"step": 376
},
{
"epoch": 1.1784968684759916,
"grad_norm": 0.7675042152404785,
"learning_rate": 4.717804686709323e-06,
"loss": 0.4963,
"step": 377
},
{
"epoch": 1.1816283924843423,
"grad_norm": 0.7246491312980652,
"learning_rate": 4.715803076559938e-06,
"loss": 0.5273,
"step": 378
},
{
"epoch": 1.1847599164926932,
"grad_norm": 0.8193361759185791,
"learning_rate": 4.713794820473366e-06,
"loss": 0.6107,
"step": 379
},
{
"epoch": 1.187891440501044,
"grad_norm": 0.9498510360717773,
"learning_rate": 4.711779924473037e-06,
"loss": 0.5421,
"step": 380
},
{
"epoch": 1.1910229645093946,
"grad_norm": 1.0479756593704224,
"learning_rate": 4.709758394602305e-06,
"loss": 0.5257,
"step": 381
},
{
"epoch": 1.1941544885177453,
"grad_norm": 0.907866895198822,
"learning_rate": 4.707730236924413e-06,
"loss": 0.5289,
"step": 382
},
{
"epoch": 1.197286012526096,
"grad_norm": 0.8861165642738342,
"learning_rate": 4.705695457522488e-06,
"loss": 0.5727,
"step": 383
},
{
"epoch": 1.2004175365344467,
"grad_norm": 0.7467761039733887,
"learning_rate": 4.703654062499516e-06,
"loss": 0.5602,
"step": 384
},
{
"epoch": 1.2035490605427974,
"grad_norm": 0.7456198334693909,
"learning_rate": 4.701606057978325e-06,
"loss": 0.5345,
"step": 385
},
{
"epoch": 1.2066805845511483,
"grad_norm": 1.9976060390472412,
"learning_rate": 4.699551450101571e-06,
"loss": 0.5504,
"step": 386
},
{
"epoch": 1.209812108559499,
"grad_norm": 1.5253807306289673,
"learning_rate": 4.697490245031709e-06,
"loss": 0.5568,
"step": 387
},
{
"epoch": 1.2129436325678498,
"grad_norm": 1.0786075592041016,
"learning_rate": 4.6954224489509885e-06,
"loss": 0.5564,
"step": 388
},
{
"epoch": 1.2160751565762005,
"grad_norm": 0.8385995030403137,
"learning_rate": 4.693348068061422e-06,
"loss": 0.5341,
"step": 389
},
{
"epoch": 1.2192066805845512,
"grad_norm": 0.8184949159622192,
"learning_rate": 4.691267108584774e-06,
"loss": 0.5614,
"step": 390
},
{
"epoch": 1.2223382045929019,
"grad_norm": 0.9964898824691772,
"learning_rate": 4.68917957676254e-06,
"loss": 0.5589,
"step": 391
},
{
"epoch": 1.2254697286012526,
"grad_norm": 1.0168914794921875,
"learning_rate": 4.687085478855931e-06,
"loss": 0.5892,
"step": 392
},
{
"epoch": 1.2286012526096033,
"grad_norm": 0.8841140866279602,
"learning_rate": 4.684984821145846e-06,
"loss": 0.5327,
"step": 393
},
{
"epoch": 1.231732776617954,
"grad_norm": 0.834431529045105,
"learning_rate": 4.682877609932866e-06,
"loss": 0.5594,
"step": 394
},
{
"epoch": 1.2348643006263047,
"grad_norm": 0.7256641983985901,
"learning_rate": 4.6807638515372234e-06,
"loss": 0.5443,
"step": 395
},
{
"epoch": 1.2379958246346556,
"grad_norm": 0.765096127986908,
"learning_rate": 4.678643552298788e-06,
"loss": 0.5439,
"step": 396
},
{
"epoch": 1.2411273486430063,
"grad_norm": 0.8760455846786499,
"learning_rate": 4.676516718577051e-06,
"loss": 0.5485,
"step": 397
},
{
"epoch": 1.244258872651357,
"grad_norm": 2.7111501693725586,
"learning_rate": 4.674383356751099e-06,
"loss": 0.5696,
"step": 398
},
{
"epoch": 1.2473903966597077,
"grad_norm": 1.0521738529205322,
"learning_rate": 4.672243473219601e-06,
"loss": 0.5503,
"step": 399
},
{
"epoch": 1.2505219206680585,
"grad_norm": 0.8909669518470764,
"learning_rate": 4.670097074400785e-06,
"loss": 0.5183,
"step": 400
},
{
"epoch": 1.2536534446764092,
"grad_norm": 0.7483847737312317,
"learning_rate": 4.667944166732424e-06,
"loss": 0.5669,
"step": 401
},
{
"epoch": 1.2567849686847599,
"grad_norm": 1.146997094154358,
"learning_rate": 4.665784756671808e-06,
"loss": 0.5464,
"step": 402
},
{
"epoch": 1.2599164926931106,
"grad_norm": 0.8998096585273743,
"learning_rate": 4.663618850695733e-06,
"loss": 0.5502,
"step": 403
},
{
"epoch": 1.2630480167014615,
"grad_norm": 0.8882688283920288,
"learning_rate": 4.6614464553004795e-06,
"loss": 0.5507,
"step": 404
},
{
"epoch": 1.2661795407098122,
"grad_norm": 0.8310684561729431,
"learning_rate": 4.659267577001789e-06,
"loss": 0.5164,
"step": 405
},
{
"epoch": 1.269311064718163,
"grad_norm": 0.9286114573478699,
"learning_rate": 4.657082222334851e-06,
"loss": 0.4813,
"step": 406
},
{
"epoch": 1.2724425887265136,
"grad_norm": 1.2394906282424927,
"learning_rate": 4.654890397854275e-06,
"loss": 0.5837,
"step": 407
},
{
"epoch": 1.2755741127348643,
"grad_norm": 4.00585412979126,
"learning_rate": 4.652692110134079e-06,
"loss": 0.5453,
"step": 408
},
{
"epoch": 1.278705636743215,
"grad_norm": 1.1667803525924683,
"learning_rate": 4.650487365767667e-06,
"loss": 0.5652,
"step": 409
},
{
"epoch": 1.2818371607515657,
"grad_norm": 0.9351289868354797,
"learning_rate": 4.648276171367807e-06,
"loss": 0.5576,
"step": 410
},
{
"epoch": 1.2849686847599164,
"grad_norm": 0.8107728958129883,
"learning_rate": 4.646058533566614e-06,
"loss": 0.5821,
"step": 411
},
{
"epoch": 1.2881002087682671,
"grad_norm": 0.7293011546134949,
"learning_rate": 4.643834459015525e-06,
"loss": 0.5363,
"step": 412
},
{
"epoch": 1.2912317327766178,
"grad_norm": 0.7550690770149231,
"learning_rate": 4.641603954385289e-06,
"loss": 0.53,
"step": 413
},
{
"epoch": 1.2943632567849686,
"grad_norm": 0.7626177072525024,
"learning_rate": 4.639367026365938e-06,
"loss": 0.5307,
"step": 414
},
{
"epoch": 1.2974947807933195,
"grad_norm": 1.0841096639633179,
"learning_rate": 4.637123681666769e-06,
"loss": 0.5162,
"step": 415
},
{
"epoch": 1.3006263048016702,
"grad_norm": 0.8814271092414856,
"learning_rate": 4.634873927016326e-06,
"loss": 0.5369,
"step": 416
},
{
"epoch": 1.303757828810021,
"grad_norm": 0.7402971982955933,
"learning_rate": 4.632617769162378e-06,
"loss": 0.5846,
"step": 417
},
{
"epoch": 1.3068893528183716,
"grad_norm": 0.8106061220169067,
"learning_rate": 4.6303552148719e-06,
"loss": 0.5289,
"step": 418
},
{
"epoch": 1.3100208768267223,
"grad_norm": 0.9241361618041992,
"learning_rate": 4.628086270931053e-06,
"loss": 0.5714,
"step": 419
},
{
"epoch": 1.313152400835073,
"grad_norm": 0.950332522392273,
"learning_rate": 4.625810944145159e-06,
"loss": 0.5817,
"step": 420
},
{
"epoch": 1.316283924843424,
"grad_norm": 0.9037718772888184,
"learning_rate": 4.623529241338689e-06,
"loss": 0.5902,
"step": 421
},
{
"epoch": 1.3194154488517746,
"grad_norm": 1.2110658884048462,
"learning_rate": 4.621241169355234e-06,
"loss": 0.561,
"step": 422
},
{
"epoch": 1.3225469728601253,
"grad_norm": 0.8582742214202881,
"learning_rate": 4.618946735057491e-06,
"loss": 0.5003,
"step": 423
},
{
"epoch": 1.325678496868476,
"grad_norm": 0.9203405976295471,
"learning_rate": 4.6166459453272386e-06,
"loss": 0.5639,
"step": 424
},
{
"epoch": 1.3288100208768268,
"grad_norm": 0.933721125125885,
"learning_rate": 4.614338807065317e-06,
"loss": 0.5766,
"step": 425
},
{
"epoch": 1.3319415448851775,
"grad_norm": 0.8435131311416626,
"learning_rate": 4.612025327191608e-06,
"loss": 0.5656,
"step": 426
},
{
"epoch": 1.3350730688935282,
"grad_norm": 0.795796811580658,
"learning_rate": 4.609705512645015e-06,
"loss": 0.4996,
"step": 427
},
{
"epoch": 1.3382045929018789,
"grad_norm": 0.8168228268623352,
"learning_rate": 4.6073793703834404e-06,
"loss": 0.5465,
"step": 428
},
{
"epoch": 1.3413361169102296,
"grad_norm": 0.8795569539070129,
"learning_rate": 4.605046907383765e-06,
"loss": 0.5407,
"step": 429
},
{
"epoch": 1.3444676409185803,
"grad_norm": 0.8504094481468201,
"learning_rate": 4.6027081306418295e-06,
"loss": 0.5589,
"step": 430
},
{
"epoch": 1.347599164926931,
"grad_norm": 1.485202431678772,
"learning_rate": 4.600363047172409e-06,
"loss": 0.5515,
"step": 431
},
{
"epoch": 1.350730688935282,
"grad_norm": 1.1156851053237915,
"learning_rate": 4.598011664009197e-06,
"loss": 0.5681,
"step": 432
},
{
"epoch": 1.3538622129436326,
"grad_norm": 0.8666876554489136,
"learning_rate": 4.595653988204779e-06,
"loss": 0.5451,
"step": 433
},
{
"epoch": 1.3569937369519833,
"grad_norm": 0.8192381858825684,
"learning_rate": 4.593290026830619e-06,
"loss": 0.5632,
"step": 434
},
{
"epoch": 1.360125260960334,
"grad_norm": 0.7994804978370667,
"learning_rate": 4.590919786977029e-06,
"loss": 0.5181,
"step": 435
},
{
"epoch": 1.3632567849686847,
"grad_norm": 0.8038607835769653,
"learning_rate": 4.5885432757531535e-06,
"loss": 0.5385,
"step": 436
},
{
"epoch": 1.3663883089770354,
"grad_norm": 0.7677503824234009,
"learning_rate": 4.586160500286948e-06,
"loss": 0.5455,
"step": 437
},
{
"epoch": 1.3695198329853862,
"grad_norm": 0.8293285369873047,
"learning_rate": 4.583771467725157e-06,
"loss": 0.5401,
"step": 438
},
{
"epoch": 1.372651356993737,
"grad_norm": 0.8607680797576904,
"learning_rate": 4.581376185233289e-06,
"loss": 0.5782,
"step": 439
},
{
"epoch": 1.3757828810020878,
"grad_norm": 0.8847081065177917,
"learning_rate": 4.578974659995601e-06,
"loss": 0.572,
"step": 440
},
{
"epoch": 1.3789144050104385,
"grad_norm": 0.7669641971588135,
"learning_rate": 4.576566899215075e-06,
"loss": 0.5655,
"step": 441
},
{
"epoch": 1.3820459290187892,
"grad_norm": 0.8514629006385803,
"learning_rate": 4.5741529101133904e-06,
"loss": 0.5218,
"step": 442
},
{
"epoch": 1.38517745302714,
"grad_norm": 0.8719842433929443,
"learning_rate": 4.5717326999309145e-06,
"loss": 0.5579,
"step": 443
},
{
"epoch": 1.3883089770354906,
"grad_norm": 1.1142809391021729,
"learning_rate": 4.569306275926667e-06,
"loss": 0.5535,
"step": 444
},
{
"epoch": 1.3914405010438413,
"grad_norm": 0.7392387986183167,
"learning_rate": 4.566873645378309e-06,
"loss": 0.5335,
"step": 445
},
{
"epoch": 1.394572025052192,
"grad_norm": 0.9066658616065979,
"learning_rate": 4.564434815582117e-06,
"loss": 0.5286,
"step": 446
},
{
"epoch": 1.3977035490605427,
"grad_norm": 0.8648932576179504,
"learning_rate": 4.561989793852959e-06,
"loss": 0.5008,
"step": 447
},
{
"epoch": 1.4008350730688934,
"grad_norm": 0.7768712043762207,
"learning_rate": 4.559538587524276e-06,
"loss": 0.5727,
"step": 448
},
{
"epoch": 1.4039665970772441,
"grad_norm": 0.7851182222366333,
"learning_rate": 4.557081203948059e-06,
"loss": 0.5731,
"step": 449
},
{
"epoch": 1.407098121085595,
"grad_norm": 0.8959861397743225,
"learning_rate": 4.5546176504948255e-06,
"loss": 0.5587,
"step": 450
},
{
"epoch": 1.4102296450939458,
"grad_norm": 1.0538026094436646,
"learning_rate": 4.552147934553601e-06,
"loss": 0.5808,
"step": 451
},
{
"epoch": 1.4133611691022965,
"grad_norm": 0.9887629151344299,
"learning_rate": 4.54967206353189e-06,
"loss": 0.5658,
"step": 452
},
{
"epoch": 1.4164926931106472,
"grad_norm": 0.9579302072525024,
"learning_rate": 4.547190044855663e-06,
"loss": 0.5092,
"step": 453
},
{
"epoch": 1.4196242171189979,
"grad_norm": 0.6993522047996521,
"learning_rate": 4.544701885969326e-06,
"loss": 0.5233,
"step": 454
},
{
"epoch": 1.4227557411273486,
"grad_norm": 0.8197568655014038,
"learning_rate": 4.542207594335703e-06,
"loss": 0.553,
"step": 455
},
{
"epoch": 1.4258872651356993,
"grad_norm": 2.921947717666626,
"learning_rate": 4.53970717743601e-06,
"loss": 0.4857,
"step": 456
},
{
"epoch": 1.4290187891440502,
"grad_norm": 1.3547242879867554,
"learning_rate": 4.53720064276984e-06,
"loss": 0.5676,
"step": 457
},
{
"epoch": 1.432150313152401,
"grad_norm": 1.4175567626953125,
"learning_rate": 4.534687997855131e-06,
"loss": 0.5164,
"step": 458
},
{
"epoch": 1.4352818371607516,
"grad_norm": 1.378146767616272,
"learning_rate": 4.532169250228145e-06,
"loss": 0.5429,
"step": 459
},
{
"epoch": 1.4384133611691023,
"grad_norm": 0.7811698317527771,
"learning_rate": 4.529644407443456e-06,
"loss": 0.524,
"step": 460
},
{
"epoch": 1.441544885177453,
"grad_norm": 1.1481678485870361,
"learning_rate": 4.527113477073914e-06,
"loss": 0.5513,
"step": 461
},
{
"epoch": 1.4446764091858038,
"grad_norm": 0.8450161218643188,
"learning_rate": 4.5245764667106266e-06,
"loss": 0.5632,
"step": 462
},
{
"epoch": 1.4478079331941545,
"grad_norm": 1.1582145690917969,
"learning_rate": 4.522033383962941e-06,
"loss": 0.5834,
"step": 463
},
{
"epoch": 1.4509394572025052,
"grad_norm": 1.0403447151184082,
"learning_rate": 4.519484236458416e-06,
"loss": 0.506,
"step": 464
},
{
"epoch": 1.4540709812108559,
"grad_norm": 0.7894920706748962,
"learning_rate": 4.516929031842799e-06,
"loss": 0.5526,
"step": 465
},
{
"epoch": 1.4572025052192066,
"grad_norm": 0.8092262744903564,
"learning_rate": 4.51436777778001e-06,
"loss": 0.5619,
"step": 466
},
{
"epoch": 1.4603340292275573,
"grad_norm": 0.9773806929588318,
"learning_rate": 4.511800481952106e-06,
"loss": 0.5179,
"step": 467
},
{
"epoch": 1.4634655532359082,
"grad_norm": 1.018676519393921,
"learning_rate": 4.509227152059271e-06,
"loss": 0.5415,
"step": 468
},
{
"epoch": 1.466597077244259,
"grad_norm": 0.7457838654518127,
"learning_rate": 4.506647795819784e-06,
"loss": 0.5473,
"step": 469
},
{
"epoch": 1.4697286012526096,
"grad_norm": 0.7826436161994934,
"learning_rate": 4.50406242097e-06,
"loss": 0.5526,
"step": 470
},
{
"epoch": 1.4728601252609603,
"grad_norm": 0.9492483139038086,
"learning_rate": 4.501471035264328e-06,
"loss": 0.5179,
"step": 471
},
{
"epoch": 1.475991649269311,
"grad_norm": 0.93398517370224,
"learning_rate": 4.4988736464752005e-06,
"loss": 0.5195,
"step": 472
},
{
"epoch": 1.4791231732776617,
"grad_norm": 0.8396487832069397,
"learning_rate": 4.496270262393061e-06,
"loss": 0.5447,
"step": 473
},
{
"epoch": 1.4822546972860124,
"grad_norm": 0.7450584173202515,
"learning_rate": 4.4936608908263315e-06,
"loss": 0.5207,
"step": 474
},
{
"epoch": 1.4853862212943634,
"grad_norm": 0.7887717485427856,
"learning_rate": 4.491045539601392e-06,
"loss": 0.523,
"step": 475
},
{
"epoch": 1.488517745302714,
"grad_norm": 1.2051388025283813,
"learning_rate": 4.48842421656256e-06,
"loss": 0.5402,
"step": 476
},
{
"epoch": 1.4916492693110648,
"grad_norm": 2.3103389739990234,
"learning_rate": 4.485796929572063e-06,
"loss": 0.5588,
"step": 477
},
{
"epoch": 1.4947807933194155,
"grad_norm": 0.7473112344741821,
"learning_rate": 4.483163686510016e-06,
"loss": 0.5731,
"step": 478
},
{
"epoch": 1.4979123173277662,
"grad_norm": 0.7545126676559448,
"learning_rate": 4.480524495274399e-06,
"loss": 0.5536,
"step": 479
},
{
"epoch": 1.501043841336117,
"grad_norm": 0.7801297903060913,
"learning_rate": 4.477879363781033e-06,
"loss": 0.5696,
"step": 480
},
{
"epoch": 1.5041753653444676,
"grad_norm": 0.7740563750267029,
"learning_rate": 4.475228299963554e-06,
"loss": 0.5526,
"step": 481
},
{
"epoch": 1.5073068893528183,
"grad_norm": 0.8600060343742371,
"learning_rate": 4.4725713117733936e-06,
"loss": 0.5051,
"step": 482
},
{
"epoch": 1.510438413361169,
"grad_norm": 0.6934283971786499,
"learning_rate": 4.46990840717975e-06,
"loss": 0.5564,
"step": 483
},
{
"epoch": 1.5135699373695197,
"grad_norm": 0.8927920460700989,
"learning_rate": 4.46723959416957e-06,
"loss": 0.5529,
"step": 484
},
{
"epoch": 1.5167014613778704,
"grad_norm": 0.9570988416671753,
"learning_rate": 4.464564880747517e-06,
"loss": 0.5661,
"step": 485
},
{
"epoch": 1.5198329853862211,
"grad_norm": 0.7229202389717102,
"learning_rate": 4.461884274935956e-06,
"loss": 0.5964,
"step": 486
},
{
"epoch": 1.522964509394572,
"grad_norm": 0.7367239594459534,
"learning_rate": 4.4591977847749225e-06,
"loss": 0.5455,
"step": 487
},
{
"epoch": 1.5260960334029228,
"grad_norm": 0.8062120676040649,
"learning_rate": 4.456505418322103e-06,
"loss": 0.5735,
"step": 488
},
{
"epoch": 1.5292275574112735,
"grad_norm": 0.8854482769966125,
"learning_rate": 4.453807183652808e-06,
"loss": 0.5421,
"step": 489
},
{
"epoch": 1.5323590814196242,
"grad_norm": 0.7518959045410156,
"learning_rate": 4.451103088859951e-06,
"loss": 0.5083,
"step": 490
},
{
"epoch": 1.535490605427975,
"grad_norm": 0.8621206879615784,
"learning_rate": 4.448393142054016e-06,
"loss": 0.4712,
"step": 491
},
{
"epoch": 1.5386221294363258,
"grad_norm": 1.0618741512298584,
"learning_rate": 4.445677351363046e-06,
"loss": 0.5808,
"step": 492
},
{
"epoch": 1.5417536534446765,
"grad_norm": 0.8261345028877258,
"learning_rate": 4.442955724932607e-06,
"loss": 0.5625,
"step": 493
},
{
"epoch": 1.5448851774530272,
"grad_norm": 0.7067139744758606,
"learning_rate": 4.440228270925772e-06,
"loss": 0.5661,
"step": 494
},
{
"epoch": 1.548016701461378,
"grad_norm": 0.9234416484832764,
"learning_rate": 4.437494997523091e-06,
"loss": 0.5428,
"step": 495
},
{
"epoch": 1.5511482254697286,
"grad_norm": 0.9273470044136047,
"learning_rate": 4.434755912922567e-06,
"loss": 0.5388,
"step": 496
},
{
"epoch": 1.5542797494780793,
"grad_norm": 1.0163263082504272,
"learning_rate": 4.4320110253396345e-06,
"loss": 0.5409,
"step": 497
},
{
"epoch": 1.55741127348643,
"grad_norm": 0.9542096853256226,
"learning_rate": 4.429260343007133e-06,
"loss": 0.5329,
"step": 498
},
{
"epoch": 1.5605427974947808,
"grad_norm": 0.8076801896095276,
"learning_rate": 4.426503874175283e-06,
"loss": 0.5616,
"step": 499
},
{
"epoch": 1.5636743215031315,
"grad_norm": 1.0063767433166504,
"learning_rate": 4.423741627111658e-06,
"loss": 0.5369,
"step": 500
},
{
"epoch": 1.5668058455114822,
"grad_norm": 1.040286898612976,
"learning_rate": 4.420973610101166e-06,
"loss": 0.5474,
"step": 501
},
{
"epoch": 1.5699373695198329,
"grad_norm": 0.7832860946655273,
"learning_rate": 4.4181998314460164e-06,
"loss": 0.5486,
"step": 502
},
{
"epoch": 1.5730688935281836,
"grad_norm": 0.8162257075309753,
"learning_rate": 4.415420299465706e-06,
"loss": 0.5054,
"step": 503
},
{
"epoch": 1.5762004175365343,
"grad_norm": 0.9108433127403259,
"learning_rate": 4.4126350224969814e-06,
"loss": 0.5399,
"step": 504
},
{
"epoch": 1.5793319415448852,
"grad_norm": 0.8002520799636841,
"learning_rate": 4.409844008893824e-06,
"loss": 0.5485,
"step": 505
},
{
"epoch": 1.582463465553236,
"grad_norm": 0.8543248772621155,
"learning_rate": 4.407047267027423e-06,
"loss": 0.4984,
"step": 506
},
{
"epoch": 1.5855949895615866,
"grad_norm": 0.7154155373573303,
"learning_rate": 4.404244805286141e-06,
"loss": 0.5392,
"step": 507
},
{
"epoch": 1.5887265135699373,
"grad_norm": 0.818553626537323,
"learning_rate": 4.401436632075504e-06,
"loss": 0.5178,
"step": 508
},
{
"epoch": 1.5918580375782883,
"grad_norm": 0.7535017728805542,
"learning_rate": 4.398622755818167e-06,
"loss": 0.5446,
"step": 509
},
{
"epoch": 1.594989561586639,
"grad_norm": 0.9328975677490234,
"learning_rate": 4.395803184953889e-06,
"loss": 0.5546,
"step": 510
},
{
"epoch": 1.5981210855949897,
"grad_norm": 0.7960026860237122,
"learning_rate": 4.392977927939508e-06,
"loss": 0.5451,
"step": 511
},
{
"epoch": 1.6012526096033404,
"grad_norm": 0.9686267971992493,
"learning_rate": 4.3901469932489195e-06,
"loss": 0.5198,
"step": 512
},
{
"epoch": 1.604384133611691,
"grad_norm": 0.903137743473053,
"learning_rate": 4.387310389373047e-06,
"loss": 0.5395,
"step": 513
},
{
"epoch": 1.6075156576200418,
"grad_norm": 1.0728516578674316,
"learning_rate": 4.384468124819816e-06,
"loss": 0.5843,
"step": 514
},
{
"epoch": 1.6106471816283925,
"grad_norm": 1.0245436429977417,
"learning_rate": 4.3816202081141345e-06,
"loss": 0.5672,
"step": 515
},
{
"epoch": 1.6137787056367432,
"grad_norm": 0.9672732353210449,
"learning_rate": 4.378766647797858e-06,
"loss": 0.5369,
"step": 516
},
{
"epoch": 1.616910229645094,
"grad_norm": 0.9149513840675354,
"learning_rate": 4.375907452429774e-06,
"loss": 0.4628,
"step": 517
},
{
"epoch": 1.6200417536534446,
"grad_norm": 0.7543843984603882,
"learning_rate": 4.373042630585567e-06,
"loss": 0.5344,
"step": 518
},
{
"epoch": 1.6231732776617953,
"grad_norm": 0.7589017152786255,
"learning_rate": 4.370172190857801e-06,
"loss": 0.5672,
"step": 519
},
{
"epoch": 1.626304801670146,
"grad_norm": 0.803040623664856,
"learning_rate": 4.367296141855887e-06,
"loss": 0.5313,
"step": 520
},
{
"epoch": 1.6294363256784967,
"grad_norm": 0.8305794596672058,
"learning_rate": 4.3644144922060625e-06,
"loss": 0.5754,
"step": 521
},
{
"epoch": 1.6325678496868476,
"grad_norm": 1.0086486339569092,
"learning_rate": 4.361527250551361e-06,
"loss": 0.5433,
"step": 522
},
{
"epoch": 1.6356993736951984,
"grad_norm": 0.7217550277709961,
"learning_rate": 4.35863442555159e-06,
"loss": 0.524,
"step": 523
},
{
"epoch": 1.638830897703549,
"grad_norm": 0.7788524627685547,
"learning_rate": 4.355736025883303e-06,
"loss": 0.536,
"step": 524
},
{
"epoch": 1.6419624217118998,
"grad_norm": 0.8460550904273987,
"learning_rate": 4.352832060239774e-06,
"loss": 0.5381,
"step": 525
},
{
"epoch": 1.6450939457202505,
"grad_norm": 0.7571215033531189,
"learning_rate": 4.3499225373309675e-06,
"loss": 0.541,
"step": 526
},
{
"epoch": 1.6482254697286014,
"grad_norm": 0.7343226671218872,
"learning_rate": 4.347007465883523e-06,
"loss": 0.5147,
"step": 527
},
{
"epoch": 1.651356993736952,
"grad_norm": 0.7271892428398132,
"learning_rate": 4.3440868546407165e-06,
"loss": 0.5311,
"step": 528
},
{
"epoch": 1.6544885177453028,
"grad_norm": 0.8166136741638184,
"learning_rate": 4.341160712362442e-06,
"loss": 0.5379,
"step": 529
},
{
"epoch": 1.6576200417536535,
"grad_norm": 1.5985233783721924,
"learning_rate": 4.338229047825182e-06,
"loss": 0.5782,
"step": 530
},
{
"epoch": 1.6607515657620042,
"grad_norm": 0.7835702896118164,
"learning_rate": 4.3352918698219835e-06,
"loss": 0.525,
"step": 531
},
{
"epoch": 1.663883089770355,
"grad_norm": 0.7278687953948975,
"learning_rate": 4.332349187162428e-06,
"loss": 0.5266,
"step": 532
},
{
"epoch": 1.6670146137787056,
"grad_norm": 0.8240190148353577,
"learning_rate": 4.329401008672608e-06,
"loss": 0.5515,
"step": 533
},
{
"epoch": 1.6701461377870563,
"grad_norm": 0.9447080492973328,
"learning_rate": 4.326447343195102e-06,
"loss": 0.5596,
"step": 534
},
{
"epoch": 1.673277661795407,
"grad_norm": 0.7827372550964355,
"learning_rate": 4.323488199588944e-06,
"loss": 0.5466,
"step": 535
},
{
"epoch": 1.6764091858037578,
"grad_norm": 0.9252517223358154,
"learning_rate": 4.320523586729599e-06,
"loss": 0.5433,
"step": 536
},
{
"epoch": 1.6795407098121085,
"grad_norm": 0.9437504410743713,
"learning_rate": 4.317553513508934e-06,
"loss": 0.5552,
"step": 537
},
{
"epoch": 1.6826722338204592,
"grad_norm": 0.8972746133804321,
"learning_rate": 4.3145779888351986e-06,
"loss": 0.5259,
"step": 538
},
{
"epoch": 1.6858037578288099,
"grad_norm": 0.8017446994781494,
"learning_rate": 4.311597021632988e-06,
"loss": 0.5263,
"step": 539
},
{
"epoch": 1.6889352818371608,
"grad_norm": 0.7875497341156006,
"learning_rate": 4.3086106208432235e-06,
"loss": 0.5316,
"step": 540
},
{
"epoch": 1.6920668058455115,
"grad_norm": 0.8204905986785889,
"learning_rate": 4.305618795423125e-06,
"loss": 0.5506,
"step": 541
},
{
"epoch": 1.6951983298538622,
"grad_norm": 0.888359785079956,
"learning_rate": 4.30262155434618e-06,
"loss": 0.4825,
"step": 542
},
{
"epoch": 1.698329853862213,
"grad_norm": 1.1026058197021484,
"learning_rate": 4.29961890660212e-06,
"loss": 0.5321,
"step": 543
},
{
"epoch": 1.7014613778705638,
"grad_norm": 0.7662535905838013,
"learning_rate": 4.2966108611968945e-06,
"loss": 0.5432,
"step": 544
},
{
"epoch": 1.7045929018789145,
"grad_norm": 1.1951749324798584,
"learning_rate": 4.293597427152641e-06,
"loss": 0.5123,
"step": 545
},
{
"epoch": 1.7077244258872653,
"grad_norm": 1.303183913230896,
"learning_rate": 4.290578613507661e-06,
"loss": 0.5346,
"step": 546
},
{
"epoch": 1.710855949895616,
"grad_norm": 0.7653357982635498,
"learning_rate": 4.287554429316387e-06,
"loss": 0.5397,
"step": 547
},
{
"epoch": 1.7139874739039667,
"grad_norm": 0.796215295791626,
"learning_rate": 4.284524883649366e-06,
"loss": 0.5421,
"step": 548
},
{
"epoch": 1.7171189979123174,
"grad_norm": 0.7599332332611084,
"learning_rate": 4.281489985593219e-06,
"loss": 0.5289,
"step": 549
},
{
"epoch": 1.720250521920668,
"grad_norm": 0.8029115796089172,
"learning_rate": 4.2784497442506265e-06,
"loss": 0.5409,
"step": 550
},
{
"epoch": 1.7233820459290188,
"grad_norm": 0.7194099426269531,
"learning_rate": 4.275404168740291e-06,
"loss": 0.5327,
"step": 551
},
{
"epoch": 1.7265135699373695,
"grad_norm": 0.7960740923881531,
"learning_rate": 4.272353268196917e-06,
"loss": 0.4896,
"step": 552
},
{
"epoch": 1.7296450939457202,
"grad_norm": 0.9572116732597351,
"learning_rate": 4.269297051771178e-06,
"loss": 0.5402,
"step": 553
},
{
"epoch": 1.732776617954071,
"grad_norm": 1.3604938983917236,
"learning_rate": 4.266235528629695e-06,
"loss": 0.5792,
"step": 554
},
{
"epoch": 1.7359081419624216,
"grad_norm": 2.067286729812622,
"learning_rate": 4.263168707955002e-06,
"loss": 0.5033,
"step": 555
},
{
"epoch": 1.7390396659707723,
"grad_norm": 0.8031097054481506,
"learning_rate": 4.260096598945523e-06,
"loss": 0.5117,
"step": 556
},
{
"epoch": 1.742171189979123,
"grad_norm": 1.0241729021072388,
"learning_rate": 4.257019210815546e-06,
"loss": 0.5359,
"step": 557
},
{
"epoch": 1.745302713987474,
"grad_norm": 0.7625218629837036,
"learning_rate": 4.25393655279519e-06,
"loss": 0.5625,
"step": 558
},
{
"epoch": 1.7484342379958246,
"grad_norm": 0.8603503704071045,
"learning_rate": 4.250848634130381e-06,
"loss": 0.5043,
"step": 559
},
{
"epoch": 1.7515657620041754,
"grad_norm": 0.9543750286102295,
"learning_rate": 4.247755464082824e-06,
"loss": 0.5364,
"step": 560
},
{
"epoch": 1.754697286012526,
"grad_norm": 0.9707463979721069,
"learning_rate": 4.244657051929973e-06,
"loss": 0.5184,
"step": 561
},
{
"epoch": 1.757828810020877,
"grad_norm": 0.7491432428359985,
"learning_rate": 4.241553406965008e-06,
"loss": 0.559,
"step": 562
},
{
"epoch": 1.7609603340292277,
"grad_norm": 0.7444972991943359,
"learning_rate": 4.238444538496801e-06,
"loss": 0.5327,
"step": 563
},
{
"epoch": 1.7640918580375784,
"grad_norm": 2.7108678817749023,
"learning_rate": 4.235330455849892e-06,
"loss": 0.55,
"step": 564
},
{
"epoch": 1.767223382045929,
"grad_norm": 1.6716049909591675,
"learning_rate": 4.232211168364459e-06,
"loss": 0.5093,
"step": 565
},
{
"epoch": 1.7703549060542798,
"grad_norm": 0.7023475170135498,
"learning_rate": 4.229086685396295e-06,
"loss": 0.569,
"step": 566
},
{
"epoch": 1.7734864300626305,
"grad_norm": 0.8596265316009521,
"learning_rate": 4.225957016316771e-06,
"loss": 0.5128,
"step": 567
},
{
"epoch": 1.7766179540709812,
"grad_norm": 0.8110849857330322,
"learning_rate": 4.222822170512816e-06,
"loss": 0.5142,
"step": 568
},
{
"epoch": 1.779749478079332,
"grad_norm": 0.7583725452423096,
"learning_rate": 4.219682157386884e-06,
"loss": 0.5584,
"step": 569
},
{
"epoch": 1.7828810020876826,
"grad_norm": 0.787811279296875,
"learning_rate": 4.21653698635693e-06,
"loss": 0.5068,
"step": 570
},
{
"epoch": 1.7860125260960333,
"grad_norm": 0.8298993110656738,
"learning_rate": 4.213386666856375e-06,
"loss": 0.5496,
"step": 571
},
{
"epoch": 1.789144050104384,
"grad_norm": 0.8999841213226318,
"learning_rate": 4.210231208334087e-06,
"loss": 0.5454,
"step": 572
},
{
"epoch": 1.7922755741127347,
"grad_norm": 4.264521598815918,
"learning_rate": 4.207070620254345e-06,
"loss": 0.5486,
"step": 573
},
{
"epoch": 1.7954070981210855,
"grad_norm": 0.8517448306083679,
"learning_rate": 4.203904912096812e-06,
"loss": 0.5566,
"step": 574
},
{
"epoch": 1.7985386221294362,
"grad_norm": 0.9230182766914368,
"learning_rate": 4.200734093356511e-06,
"loss": 0.4964,
"step": 575
},
{
"epoch": 1.801670146137787,
"grad_norm": 1.224039912223816,
"learning_rate": 4.197558173543791e-06,
"loss": 0.5356,
"step": 576
},
{
"epoch": 1.8048016701461378,
"grad_norm": 0.9998573660850525,
"learning_rate": 4.194377162184301e-06,
"loss": 0.5334,
"step": 577
},
{
"epoch": 1.8079331941544885,
"grad_norm": 0.865521252155304,
"learning_rate": 4.191191068818963e-06,
"loss": 0.5036,
"step": 578
},
{
"epoch": 1.8110647181628392,
"grad_norm": 0.8048138618469238,
"learning_rate": 4.18799990300394e-06,
"loss": 0.4979,
"step": 579
},
{
"epoch": 1.8141962421711901,
"grad_norm": 0.717815637588501,
"learning_rate": 4.184803674310609e-06,
"loss": 0.5623,
"step": 580
},
{
"epoch": 1.8173277661795408,
"grad_norm": 0.8403327465057373,
"learning_rate": 4.1816023923255335e-06,
"loss": 0.5055,
"step": 581
},
{
"epoch": 1.8204592901878915,
"grad_norm": 0.7298995852470398,
"learning_rate": 4.178396066650432e-06,
"loss": 0.5641,
"step": 582
},
{
"epoch": 1.8235908141962422,
"grad_norm": 0.9469727873802185,
"learning_rate": 4.1751847069021516e-06,
"loss": 0.5557,
"step": 583
},
{
"epoch": 1.826722338204593,
"grad_norm": 0.8641784191131592,
"learning_rate": 4.1719683227126386e-06,
"loss": 0.5153,
"step": 584
},
{
"epoch": 1.8298538622129437,
"grad_norm": 0.7316668629646301,
"learning_rate": 4.168746923728908e-06,
"loss": 0.4988,
"step": 585
},
{
"epoch": 1.8329853862212944,
"grad_norm": 0.8795468807220459,
"learning_rate": 4.165520519613017e-06,
"loss": 0.5483,
"step": 586
},
{
"epoch": 1.836116910229645,
"grad_norm": 0.7323560118675232,
"learning_rate": 4.162289120042034e-06,
"loss": 0.5194,
"step": 587
},
{
"epoch": 1.8392484342379958,
"grad_norm": 0.8217021822929382,
"learning_rate": 4.159052734708013e-06,
"loss": 0.532,
"step": 588
},
{
"epoch": 1.8423799582463465,
"grad_norm": 0.7669674754142761,
"learning_rate": 4.155811373317958e-06,
"loss": 0.541,
"step": 589
},
{
"epoch": 1.8455114822546972,
"grad_norm": 0.8312156200408936,
"learning_rate": 4.152565045593801e-06,
"loss": 0.5298,
"step": 590
},
{
"epoch": 1.848643006263048,
"grad_norm": 0.8967565298080444,
"learning_rate": 4.1493137612723665e-06,
"loss": 0.51,
"step": 591
},
{
"epoch": 1.8517745302713986,
"grad_norm": 0.8706664443016052,
"learning_rate": 4.14605753010535e-06,
"loss": 0.4941,
"step": 592
},
{
"epoch": 1.8549060542797495,
"grad_norm": 0.7585753798484802,
"learning_rate": 4.14279636185928e-06,
"loss": 0.5161,
"step": 593
},
{
"epoch": 1.8580375782881002,
"grad_norm": 0.7495241165161133,
"learning_rate": 4.1395302663154954e-06,
"loss": 0.5388,
"step": 594
},
{
"epoch": 1.861169102296451,
"grad_norm": 1.0746862888336182,
"learning_rate": 4.136259253270114e-06,
"loss": 0.4976,
"step": 595
},
{
"epoch": 1.8643006263048016,
"grad_norm": 0.872309684753418,
"learning_rate": 4.132983332534e-06,
"loss": 0.559,
"step": 596
},
{
"epoch": 1.8674321503131524,
"grad_norm": 0.8759891986846924,
"learning_rate": 4.1297025139327405e-06,
"loss": 0.5436,
"step": 597
},
{
"epoch": 1.8705636743215033,
"grad_norm": 1.1044493913650513,
"learning_rate": 4.126416807306611e-06,
"loss": 0.5476,
"step": 598
},
{
"epoch": 1.873695198329854,
"grad_norm": 0.8340442180633545,
"learning_rate": 4.123126222510549e-06,
"loss": 0.4592,
"step": 599
},
{
"epoch": 1.8768267223382047,
"grad_norm": 0.8331449031829834,
"learning_rate": 4.119830769414123e-06,
"loss": 0.5219,
"step": 600
},
{
"epoch": 1.8799582463465554,
"grad_norm": 1.0862973928451538,
"learning_rate": 4.116530457901503e-06,
"loss": 0.5159,
"step": 601
},
{
"epoch": 1.883089770354906,
"grad_norm": 0.8524414300918579,
"learning_rate": 4.113225297871431e-06,
"loss": 0.5502,
"step": 602
},
{
"epoch": 1.8862212943632568,
"grad_norm": 1.4945416450500488,
"learning_rate": 4.10991529923719e-06,
"loss": 0.5627,
"step": 603
},
{
"epoch": 1.8893528183716075,
"grad_norm": 1.5518157482147217,
"learning_rate": 4.10660047192658e-06,
"loss": 0.5517,
"step": 604
},
{
"epoch": 1.8924843423799582,
"grad_norm": 2.56638765335083,
"learning_rate": 4.103280825881878e-06,
"loss": 0.5422,
"step": 605
},
{
"epoch": 1.895615866388309,
"grad_norm": 0.867254912853241,
"learning_rate": 4.099956371059817e-06,
"loss": 0.4991,
"step": 606
},
{
"epoch": 1.8987473903966596,
"grad_norm": 0.9555892944335938,
"learning_rate": 4.096627117431554e-06,
"loss": 0.5339,
"step": 607
},
{
"epoch": 1.9018789144050103,
"grad_norm": 0.7905483245849609,
"learning_rate": 4.093293074982638e-06,
"loss": 0.5168,
"step": 608
},
{
"epoch": 1.905010438413361,
"grad_norm": 0.7500227093696594,
"learning_rate": 4.089954253712981e-06,
"loss": 0.5096,
"step": 609
},
{
"epoch": 1.9081419624217117,
"grad_norm": 0.8458324074745178,
"learning_rate": 4.086610663636828e-06,
"loss": 0.5296,
"step": 610
},
{
"epoch": 1.9112734864300627,
"grad_norm": 0.7392706871032715,
"learning_rate": 4.08326231478273e-06,
"loss": 0.5305,
"step": 611
},
{
"epoch": 1.9144050104384134,
"grad_norm": 0.8113343715667725,
"learning_rate": 4.079909217193508e-06,
"loss": 0.5044,
"step": 612
},
{
"epoch": 1.917536534446764,
"grad_norm": 0.7637801766395569,
"learning_rate": 4.076551380926226e-06,
"loss": 0.5298,
"step": 613
},
{
"epoch": 1.9206680584551148,
"grad_norm": 1.0523375272750854,
"learning_rate": 4.073188816052164e-06,
"loss": 0.5111,
"step": 614
},
{
"epoch": 1.9237995824634657,
"grad_norm": 0.8224868774414062,
"learning_rate": 4.069821532656781e-06,
"loss": 0.5178,
"step": 615
},
{
"epoch": 1.9269311064718164,
"grad_norm": 0.7270777821540833,
"learning_rate": 4.066449540839693e-06,
"loss": 0.5307,
"step": 616
},
{
"epoch": 1.9300626304801671,
"grad_norm": 0.7214602828025818,
"learning_rate": 4.063072850714631e-06,
"loss": 0.5171,
"step": 617
},
{
"epoch": 1.9331941544885178,
"grad_norm": 0.7333671450614929,
"learning_rate": 4.059691472409426e-06,
"loss": 0.56,
"step": 618
},
{
"epoch": 1.9363256784968685,
"grad_norm": 0.9166824221611023,
"learning_rate": 4.056305416065964e-06,
"loss": 0.5388,
"step": 619
},
{
"epoch": 1.9394572025052192,
"grad_norm": 0.7743303775787354,
"learning_rate": 4.052914691840167e-06,
"loss": 0.5134,
"step": 620
},
{
"epoch": 1.94258872651357,
"grad_norm": 0.704097330570221,
"learning_rate": 4.0495193099019524e-06,
"loss": 0.4926,
"step": 621
},
{
"epoch": 1.9457202505219207,
"grad_norm": 0.8508503437042236,
"learning_rate": 4.046119280435212e-06,
"loss": 0.5008,
"step": 622
},
{
"epoch": 1.9488517745302714,
"grad_norm": 0.725933313369751,
"learning_rate": 4.042714613637775e-06,
"loss": 0.5549,
"step": 623
},
{
"epoch": 1.951983298538622,
"grad_norm": 0.8919175863265991,
"learning_rate": 4.039305319721381e-06,
"loss": 0.5183,
"step": 624
},
{
"epoch": 1.9551148225469728,
"grad_norm": 0.827919065952301,
"learning_rate": 4.035891408911644e-06,
"loss": 0.5624,
"step": 625
},
{
"epoch": 1.9582463465553235,
"grad_norm": 0.7415187358856201,
"learning_rate": 4.032472891448032e-06,
"loss": 0.5454,
"step": 626
},
{
"epoch": 1.9613778705636742,
"grad_norm": 0.7675788998603821,
"learning_rate": 4.029049777583824e-06,
"loss": 0.5361,
"step": 627
},
{
"epoch": 1.964509394572025,
"grad_norm": 0.8464030623435974,
"learning_rate": 4.025622077586088e-06,
"loss": 0.5295,
"step": 628
},
{
"epoch": 1.9676409185803758,
"grad_norm": 0.7641633749008179,
"learning_rate": 4.022189801735646e-06,
"loss": 0.55,
"step": 629
},
{
"epoch": 1.9707724425887265,
"grad_norm": 0.7813227772712708,
"learning_rate": 4.018752960327048e-06,
"loss": 0.5587,
"step": 630
},
{
"epoch": 1.9739039665970772,
"grad_norm": 0.7576701641082764,
"learning_rate": 4.015311563668533e-06,
"loss": 0.5413,
"step": 631
},
{
"epoch": 1.977035490605428,
"grad_norm": 0.6949650049209595,
"learning_rate": 4.011865622082004e-06,
"loss": 0.5344,
"step": 632
},
{
"epoch": 1.9801670146137789,
"grad_norm": 0.9009145498275757,
"learning_rate": 4.008415145902997e-06,
"loss": 0.5233,
"step": 633
},
{
"epoch": 1.9832985386221296,
"grad_norm": 0.7635822892189026,
"learning_rate": 4.004960145480651e-06,
"loss": 0.4981,
"step": 634
},
{
"epoch": 1.9864300626304803,
"grad_norm": 0.8916334509849548,
"learning_rate": 4.0015006311776685e-06,
"loss": 0.5311,
"step": 635
},
{
"epoch": 1.989561586638831,
"grad_norm": 0.7197673320770264,
"learning_rate": 3.998036613370295e-06,
"loss": 0.5361,
"step": 636
},
{
"epoch": 1.9926931106471817,
"grad_norm": 0.8391228914260864,
"learning_rate": 3.994568102448284e-06,
"loss": 0.5473,
"step": 637
},
{
"epoch": 1.9958246346555324,
"grad_norm": 0.9371750950813293,
"learning_rate": 3.991095108814862e-06,
"loss": 0.5303,
"step": 638
},
{
"epoch": 1.998956158663883,
"grad_norm": 0.8929619789123535,
"learning_rate": 3.9876176428867046e-06,
"loss": 0.533,
"step": 639
},
{
"epoch": 2.0,
"grad_norm": 0.8929619789123535,
"learning_rate": 3.9841357150938984e-06,
"loss": 0.1831,
"step": 640
},
{
"epoch": 2.0031315240083507,
"grad_norm": 0.8802503347396851,
"learning_rate": 3.9806493358799135e-06,
"loss": 0.493,
"step": 641
},
{
"epoch": 2.0062630480167014,
"grad_norm": 0.802759051322937,
"learning_rate": 3.977158515701571e-06,
"loss": 0.498,
"step": 642
},
{
"epoch": 2.009394572025052,
"grad_norm": 1.0235401391983032,
"learning_rate": 3.973663265029013e-06,
"loss": 0.4887,
"step": 643
},
{
"epoch": 2.012526096033403,
"grad_norm": 0.7219089865684509,
"learning_rate": 3.97016359434567e-06,
"loss": 0.4628,
"step": 644
},
{
"epoch": 2.0156576200417535,
"grad_norm": 0.7887073755264282,
"learning_rate": 3.966659514148229e-06,
"loss": 0.525,
"step": 645
},
{
"epoch": 2.0187891440501042,
"grad_norm": 0.7960914969444275,
"learning_rate": 3.963151034946602e-06,
"loss": 0.4643,
"step": 646
},
{
"epoch": 2.021920668058455,
"grad_norm": 0.7902271151542664,
"learning_rate": 3.959638167263895e-06,
"loss": 0.4922,
"step": 647
},
{
"epoch": 2.0250521920668056,
"grad_norm": 0.9501478672027588,
"learning_rate": 3.956120921636379e-06,
"loss": 0.5285,
"step": 648
},
{
"epoch": 2.028183716075157,
"grad_norm": 0.9510527849197388,
"learning_rate": 3.952599308613454e-06,
"loss": 0.4909,
"step": 649
},
{
"epoch": 2.0313152400835075,
"grad_norm": 0.9408219456672668,
"learning_rate": 3.949073338757619e-06,
"loss": 0.4912,
"step": 650
},
{
"epoch": 2.034446764091858,
"grad_norm": 0.7148041725158691,
"learning_rate": 3.945543022644441e-06,
"loss": 0.4792,
"step": 651
},
{
"epoch": 2.037578288100209,
"grad_norm": 0.7737464904785156,
"learning_rate": 3.942008370862522e-06,
"loss": 0.4694,
"step": 652
},
{
"epoch": 2.0407098121085596,
"grad_norm": 0.8405889868736267,
"learning_rate": 3.938469394013472e-06,
"loss": 0.5048,
"step": 653
},
{
"epoch": 2.0438413361169103,
"grad_norm": 0.7896456718444824,
"learning_rate": 3.934926102711869e-06,
"loss": 0.4882,
"step": 654
},
{
"epoch": 2.046972860125261,
"grad_norm": 0.9290387034416199,
"learning_rate": 3.931378507585231e-06,
"loss": 0.503,
"step": 655
},
{
"epoch": 2.0501043841336117,
"grad_norm": 0.7386118769645691,
"learning_rate": 3.927826619273991e-06,
"loss": 0.4918,
"step": 656
},
{
"epoch": 2.0532359081419624,
"grad_norm": 0.9878676533699036,
"learning_rate": 3.92427044843145e-06,
"loss": 0.4958,
"step": 657
},
{
"epoch": 2.056367432150313,
"grad_norm": 1.0111151933670044,
"learning_rate": 3.92071000572376e-06,
"loss": 0.4886,
"step": 658
},
{
"epoch": 2.059498956158664,
"grad_norm": 0.8612061738967896,
"learning_rate": 3.917145301829884e-06,
"loss": 0.5216,
"step": 659
},
{
"epoch": 2.0626304801670146,
"grad_norm": 0.7458518743515015,
"learning_rate": 3.913576347441564e-06,
"loss": 0.4807,
"step": 660
},
{
"epoch": 2.0657620041753653,
"grad_norm": 0.7775886058807373,
"learning_rate": 3.910003153263294e-06,
"loss": 0.4837,
"step": 661
},
{
"epoch": 2.068893528183716,
"grad_norm": 0.7144196629524231,
"learning_rate": 3.906425730012282e-06,
"loss": 0.5081,
"step": 662
},
{
"epoch": 2.0720250521920667,
"grad_norm": 0.844971776008606,
"learning_rate": 3.9028440884184215e-06,
"loss": 0.474,
"step": 663
},
{
"epoch": 2.0751565762004174,
"grad_norm": 0.9709283113479614,
"learning_rate": 3.899258239224257e-06,
"loss": 0.503,
"step": 664
},
{
"epoch": 2.078288100208768,
"grad_norm": 1.1325515508651733,
"learning_rate": 3.895668193184954e-06,
"loss": 0.5058,
"step": 665
},
{
"epoch": 2.081419624217119,
"grad_norm": 0.7211254239082336,
"learning_rate": 3.892073961068266e-06,
"loss": 0.4982,
"step": 666
},
{
"epoch": 2.08455114822547,
"grad_norm": 0.8975517153739929,
"learning_rate": 3.888475553654502e-06,
"loss": 0.4699,
"step": 667
},
{
"epoch": 2.0876826722338206,
"grad_norm": 0.8270771503448486,
"learning_rate": 3.884872981736493e-06,
"loss": 0.4586,
"step": 668
},
{
"epoch": 2.0908141962421714,
"grad_norm": 0.8606625199317932,
"learning_rate": 3.881266256119561e-06,
"loss": 0.5299,
"step": 669
},
{
"epoch": 2.093945720250522,
"grad_norm": 0.9013976454734802,
"learning_rate": 3.877655387621488e-06,
"loss": 0.4887,
"step": 670
},
{
"epoch": 2.0970772442588728,
"grad_norm": 0.7603903412818909,
"learning_rate": 3.8740403870724795e-06,
"loss": 0.4992,
"step": 671
},
{
"epoch": 2.1002087682672235,
"grad_norm": 1.0432350635528564,
"learning_rate": 3.870421265315137e-06,
"loss": 0.5035,
"step": 672
},
{
"epoch": 2.103340292275574,
"grad_norm": 0.7727136611938477,
"learning_rate": 3.8667980332044195e-06,
"loss": 0.5006,
"step": 673
},
{
"epoch": 2.106471816283925,
"grad_norm": 0.9764307141304016,
"learning_rate": 3.863170701607618e-06,
"loss": 0.5061,
"step": 674
},
{
"epoch": 2.1096033402922756,
"grad_norm": 0.747818648815155,
"learning_rate": 3.859539281404317e-06,
"loss": 0.4761,
"step": 675
},
{
"epoch": 2.1127348643006263,
"grad_norm": 0.7254915237426758,
"learning_rate": 3.855903783486364e-06,
"loss": 0.5166,
"step": 676
},
{
"epoch": 2.115866388308977,
"grad_norm": 0.7678592801094055,
"learning_rate": 3.852264218757839e-06,
"loss": 0.5122,
"step": 677
},
{
"epoch": 2.1189979123173277,
"grad_norm": 0.8140144348144531,
"learning_rate": 3.8486205981350165e-06,
"loss": 0.4551,
"step": 678
},
{
"epoch": 2.1221294363256784,
"grad_norm": 0.9417359232902527,
"learning_rate": 3.844972932546338e-06,
"loss": 0.4748,
"step": 679
},
{
"epoch": 2.125260960334029,
"grad_norm": 0.8035290241241455,
"learning_rate": 3.841321232932378e-06,
"loss": 0.5079,
"step": 680
},
{
"epoch": 2.12839248434238,
"grad_norm": 0.8300641775131226,
"learning_rate": 3.837665510245809e-06,
"loss": 0.5018,
"step": 681
},
{
"epoch": 2.1315240083507305,
"grad_norm": 0.8293547034263611,
"learning_rate": 3.8340057754513715e-06,
"loss": 0.5042,
"step": 682
},
{
"epoch": 2.1346555323590812,
"grad_norm": 0.7780007719993591,
"learning_rate": 3.8303420395258365e-06,
"loss": 0.5048,
"step": 683
},
{
"epoch": 2.137787056367432,
"grad_norm": 0.7519420385360718,
"learning_rate": 3.8266743134579785e-06,
"loss": 0.5108,
"step": 684
},
{
"epoch": 2.140918580375783,
"grad_norm": 0.7872384190559387,
"learning_rate": 3.8230026082485404e-06,
"loss": 0.4924,
"step": 685
},
{
"epoch": 2.144050104384134,
"grad_norm": 0.7479491829872131,
"learning_rate": 3.819326934910197e-06,
"loss": 0.5184,
"step": 686
},
{
"epoch": 2.1471816283924845,
"grad_norm": 0.8438299298286438,
"learning_rate": 3.815647304467527e-06,
"loss": 0.4791,
"step": 687
},
{
"epoch": 2.150313152400835,
"grad_norm": 0.7923721671104431,
"learning_rate": 3.8119637279569773e-06,
"loss": 0.5305,
"step": 688
},
{
"epoch": 2.153444676409186,
"grad_norm": 0.7856534719467163,
"learning_rate": 3.80827621642683e-06,
"loss": 0.5063,
"step": 689
},
{
"epoch": 2.1565762004175366,
"grad_norm": 0.8544500470161438,
"learning_rate": 3.8045847809371706e-06,
"loss": 0.4989,
"step": 690
},
{
"epoch": 2.1597077244258873,
"grad_norm": 0.865390956401825,
"learning_rate": 3.800889432559852e-06,
"loss": 0.4931,
"step": 691
},
{
"epoch": 2.162839248434238,
"grad_norm": 0.9809399247169495,
"learning_rate": 3.797190182378466e-06,
"loss": 0.4785,
"step": 692
},
{
"epoch": 2.1659707724425887,
"grad_norm": 0.7954536080360413,
"learning_rate": 3.793487041488304e-06,
"loss": 0.4847,
"step": 693
},
{
"epoch": 2.1691022964509394,
"grad_norm": 0.754704475402832,
"learning_rate": 3.7897800209963298e-06,
"loss": 0.5125,
"step": 694
},
{
"epoch": 2.17223382045929,
"grad_norm": 0.7319822311401367,
"learning_rate": 3.7860691320211414e-06,
"loss": 0.477,
"step": 695
},
{
"epoch": 2.175365344467641,
"grad_norm": 0.8198635578155518,
"learning_rate": 3.7823543856929403e-06,
"loss": 0.4764,
"step": 696
},
{
"epoch": 2.1784968684759916,
"grad_norm": 0.708933413028717,
"learning_rate": 3.7786357931534987e-06,
"loss": 0.4948,
"step": 697
},
{
"epoch": 2.1816283924843423,
"grad_norm": 0.8493193984031677,
"learning_rate": 3.774913365556123e-06,
"loss": 0.5271,
"step": 698
},
{
"epoch": 2.184759916492693,
"grad_norm": 0.7999475002288818,
"learning_rate": 3.771187114065622e-06,
"loss": 0.4804,
"step": 699
},
{
"epoch": 2.1878914405010437,
"grad_norm": 0.8366796970367432,
"learning_rate": 3.7674570498582776e-06,
"loss": 0.457,
"step": 700
},
{
"epoch": 2.1910229645093944,
"grad_norm": 0.7935530543327332,
"learning_rate": 3.7637231841218015e-06,
"loss": 0.5001,
"step": 701
},
{
"epoch": 2.1941544885177455,
"grad_norm": 0.7700153589248657,
"learning_rate": 3.7599855280553125e-06,
"loss": 0.5091,
"step": 702
},
{
"epoch": 2.1972860125260962,
"grad_norm": 0.7991652488708496,
"learning_rate": 3.756244092869294e-06,
"loss": 0.4955,
"step": 703
},
{
"epoch": 2.200417536534447,
"grad_norm": 0.720051646232605,
"learning_rate": 3.752498889785567e-06,
"loss": 0.4902,
"step": 704
},
{
"epoch": 2.2035490605427976,
"grad_norm": 0.7312369346618652,
"learning_rate": 3.748749930037252e-06,
"loss": 0.4935,
"step": 705
},
{
"epoch": 2.2066805845511483,
"grad_norm": 0.8418563008308411,
"learning_rate": 3.744997224868739e-06,
"loss": 0.5186,
"step": 706
},
{
"epoch": 2.209812108559499,
"grad_norm": 0.8324081301689148,
"learning_rate": 3.741240785535649e-06,
"loss": 0.482,
"step": 707
},
{
"epoch": 2.2129436325678498,
"grad_norm": 0.8051855564117432,
"learning_rate": 3.737480623304805e-06,
"loss": 0.4663,
"step": 708
},
{
"epoch": 2.2160751565762005,
"grad_norm": 0.9464184641838074,
"learning_rate": 3.7337167494541948e-06,
"loss": 0.451,
"step": 709
},
{
"epoch": 2.219206680584551,
"grad_norm": 1.0227075815200806,
"learning_rate": 3.729949175272942e-06,
"loss": 0.4428,
"step": 710
},
{
"epoch": 2.222338204592902,
"grad_norm": 0.7930364012718201,
"learning_rate": 3.7261779120612633e-06,
"loss": 0.5132,
"step": 711
},
{
"epoch": 2.2254697286012526,
"grad_norm": 0.9033688306808472,
"learning_rate": 3.7224029711304444e-06,
"loss": 0.476,
"step": 712
},
{
"epoch": 2.2286012526096033,
"grad_norm": 0.8026887774467468,
"learning_rate": 3.7186243638028007e-06,
"loss": 0.4959,
"step": 713
},
{
"epoch": 2.231732776617954,
"grad_norm": 0.9391745328903198,
"learning_rate": 3.714842101411642e-06,
"loss": 0.4962,
"step": 714
},
{
"epoch": 2.2348643006263047,
"grad_norm": 0.7774361371994019,
"learning_rate": 3.711056195301245e-06,
"loss": 0.4748,
"step": 715
},
{
"epoch": 2.2379958246346554,
"grad_norm": 0.9278722405433655,
"learning_rate": 3.7072666568268115e-06,
"loss": 0.5074,
"step": 716
},
{
"epoch": 2.241127348643006,
"grad_norm": 0.771423876285553,
"learning_rate": 3.7034734973544406e-06,
"loss": 0.5072,
"step": 717
},
{
"epoch": 2.244258872651357,
"grad_norm": 0.8707448244094849,
"learning_rate": 3.6996767282610892e-06,
"loss": 0.4851,
"step": 718
},
{
"epoch": 2.2473903966597075,
"grad_norm": 0.7641019821166992,
"learning_rate": 3.695876360934543e-06,
"loss": 0.4941,
"step": 719
},
{
"epoch": 2.2505219206680582,
"grad_norm": 0.7647167444229126,
"learning_rate": 3.69207240677338e-06,
"loss": 0.5225,
"step": 720
},
{
"epoch": 2.2536534446764094,
"grad_norm": 0.9108865261077881,
"learning_rate": 3.6882648771869345e-06,
"loss": 0.454,
"step": 721
},
{
"epoch": 2.25678496868476,
"grad_norm": 0.86728835105896,
"learning_rate": 3.6844537835952666e-06,
"loss": 0.4461,
"step": 722
},
{
"epoch": 2.259916492693111,
"grad_norm": 1.1055282354354858,
"learning_rate": 3.6806391374291238e-06,
"loss": 0.4618,
"step": 723
},
{
"epoch": 2.2630480167014615,
"grad_norm": 0.7591858506202698,
"learning_rate": 3.6768209501299116e-06,
"loss": 0.4901,
"step": 724
},
{
"epoch": 2.266179540709812,
"grad_norm": 0.7966359257698059,
"learning_rate": 3.6729992331496554e-06,
"loss": 0.5171,
"step": 725
},
{
"epoch": 2.269311064718163,
"grad_norm": 0.983161211013794,
"learning_rate": 3.6691739979509672e-06,
"loss": 0.4949,
"step": 726
},
{
"epoch": 2.2724425887265136,
"grad_norm": 0.9200037121772766,
"learning_rate": 3.6653452560070106e-06,
"loss": 0.5234,
"step": 727
},
{
"epoch": 2.2755741127348643,
"grad_norm": 1.0288461446762085,
"learning_rate": 3.6615130188014685e-06,
"loss": 0.4713,
"step": 728
},
{
"epoch": 2.278705636743215,
"grad_norm": 0.7325463891029358,
"learning_rate": 3.6576772978285065e-06,
"loss": 0.527,
"step": 729
},
{
"epoch": 2.2818371607515657,
"grad_norm": 1.0045446157455444,
"learning_rate": 3.6538381045927395e-06,
"loss": 0.5139,
"step": 730
},
{
"epoch": 2.2849686847599164,
"grad_norm": 0.7391849756240845,
"learning_rate": 3.6499954506091963e-06,
"loss": 0.4829,
"step": 731
},
{
"epoch": 2.288100208768267,
"grad_norm": 0.7808229923248291,
"learning_rate": 3.646149347403286e-06,
"loss": 0.4831,
"step": 732
},
{
"epoch": 2.291231732776618,
"grad_norm": 0.7056961059570312,
"learning_rate": 3.6422998065107628e-06,
"loss": 0.5066,
"step": 733
},
{
"epoch": 2.2943632567849686,
"grad_norm": 0.7498443126678467,
"learning_rate": 3.6384468394776935e-06,
"loss": 0.4724,
"step": 734
},
{
"epoch": 2.2974947807933193,
"grad_norm": 0.8511576056480408,
"learning_rate": 3.634590457860418e-06,
"loss": 0.5286,
"step": 735
},
{
"epoch": 2.30062630480167,
"grad_norm": 0.873635470867157,
"learning_rate": 3.63073067322552e-06,
"loss": 0.4751,
"step": 736
},
{
"epoch": 2.3037578288100207,
"grad_norm": 0.7427377104759216,
"learning_rate": 3.626867497149788e-06,
"loss": 0.475,
"step": 737
},
{
"epoch": 2.306889352818372,
"grad_norm": 1.0591017007827759,
"learning_rate": 3.623000941220186e-06,
"loss": 0.4591,
"step": 738
},
{
"epoch": 2.3100208768267225,
"grad_norm": 0.8767879009246826,
"learning_rate": 3.6191310170338114e-06,
"loss": 0.4673,
"step": 739
},
{
"epoch": 2.3131524008350732,
"grad_norm": 0.9156234860420227,
"learning_rate": 3.615257736197866e-06,
"loss": 0.4622,
"step": 740
},
{
"epoch": 2.316283924843424,
"grad_norm": 0.6743756532669067,
"learning_rate": 3.611381110329619e-06,
"loss": 0.4723,
"step": 741
},
{
"epoch": 2.3194154488517746,
"grad_norm": 0.8655558228492737,
"learning_rate": 3.6075011510563732e-06,
"loss": 0.471,
"step": 742
},
{
"epoch": 2.3225469728601253,
"grad_norm": 0.7652033567428589,
"learning_rate": 3.603617870015429e-06,
"loss": 0.5155,
"step": 743
},
{
"epoch": 2.325678496868476,
"grad_norm": 0.7970699667930603,
"learning_rate": 3.599731278854049e-06,
"loss": 0.4507,
"step": 744
},
{
"epoch": 2.3288100208768268,
"grad_norm": 0.7538278698921204,
"learning_rate": 3.5958413892294253e-06,
"loss": 0.5093,
"step": 745
},
{
"epoch": 2.3319415448851775,
"grad_norm": 0.735996663570404,
"learning_rate": 3.5919482128086414e-06,
"loss": 0.5008,
"step": 746
},
{
"epoch": 2.335073068893528,
"grad_norm": 0.7643904685974121,
"learning_rate": 3.588051761268642e-06,
"loss": 0.5072,
"step": 747
},
{
"epoch": 2.338204592901879,
"grad_norm": 0.7646260857582092,
"learning_rate": 3.584152046296191e-06,
"loss": 0.4578,
"step": 748
},
{
"epoch": 2.3413361169102296,
"grad_norm": 0.7873825430870056,
"learning_rate": 3.5802490795878446e-06,
"loss": 0.5249,
"step": 749
},
{
"epoch": 2.3444676409185803,
"grad_norm": 1.095333218574524,
"learning_rate": 3.5763428728499095e-06,
"loss": 0.4913,
"step": 750
},
{
"epoch": 2.347599164926931,
"grad_norm": 1.3425395488739014,
"learning_rate": 3.5724334377984107e-06,
"loss": 0.5317,
"step": 751
},
{
"epoch": 2.3507306889352817,
"grad_norm": 0.7151113748550415,
"learning_rate": 3.568520786159055e-06,
"loss": 0.5135,
"step": 752
},
{
"epoch": 2.3538622129436324,
"grad_norm": 0.8072878122329712,
"learning_rate": 3.5646049296672004e-06,
"loss": 0.4863,
"step": 753
},
{
"epoch": 2.356993736951983,
"grad_norm": 0.8040189743041992,
"learning_rate": 3.5606858800678123e-06,
"loss": 0.4668,
"step": 754
},
{
"epoch": 2.3601252609603343,
"grad_norm": 0.7749765515327454,
"learning_rate": 3.5567636491154385e-06,
"loss": 0.4681,
"step": 755
},
{
"epoch": 2.3632567849686845,
"grad_norm": 0.773013710975647,
"learning_rate": 3.5528382485741638e-06,
"loss": 0.5012,
"step": 756
},
{
"epoch": 2.3663883089770357,
"grad_norm": 0.7017714381217957,
"learning_rate": 3.5489096902175835e-06,
"loss": 0.5019,
"step": 757
},
{
"epoch": 2.3695198329853864,
"grad_norm": 1.132458209991455,
"learning_rate": 3.5449779858287625e-06,
"loss": 0.5131,
"step": 758
},
{
"epoch": 2.372651356993737,
"grad_norm": 0.7624574899673462,
"learning_rate": 3.541043147200202e-06,
"loss": 0.4856,
"step": 759
},
{
"epoch": 2.375782881002088,
"grad_norm": 0.9078478217124939,
"learning_rate": 3.5371051861338036e-06,
"loss": 0.4337,
"step": 760
},
{
"epoch": 2.3789144050104385,
"grad_norm": 0.8608354330062866,
"learning_rate": 3.5331641144408344e-06,
"loss": 0.5053,
"step": 761
},
{
"epoch": 2.382045929018789,
"grad_norm": 0.775047779083252,
"learning_rate": 3.529219943941892e-06,
"loss": 0.4779,
"step": 762
},
{
"epoch": 2.38517745302714,
"grad_norm": 0.7775866389274597,
"learning_rate": 3.525272686466866e-06,
"loss": 0.4979,
"step": 763
},
{
"epoch": 2.3883089770354906,
"grad_norm": 0.9386464357376099,
"learning_rate": 3.521322353854908e-06,
"loss": 0.5222,
"step": 764
},
{
"epoch": 2.3914405010438413,
"grad_norm": 0.874109148979187,
"learning_rate": 3.517368957954391e-06,
"loss": 0.4681,
"step": 765
},
{
"epoch": 2.394572025052192,
"grad_norm": 0.824588418006897,
"learning_rate": 3.5134125106228766e-06,
"loss": 0.4955,
"step": 766
},
{
"epoch": 2.3977035490605427,
"grad_norm": 0.8790764808654785,
"learning_rate": 3.5094530237270774e-06,
"loss": 0.4722,
"step": 767
},
{
"epoch": 2.4008350730688934,
"grad_norm": 1.1399786472320557,
"learning_rate": 3.5054905091428253e-06,
"loss": 0.4771,
"step": 768
},
{
"epoch": 2.403966597077244,
"grad_norm": 1.2586532831192017,
"learning_rate": 3.50152497875503e-06,
"loss": 0.4849,
"step": 769
},
{
"epoch": 2.407098121085595,
"grad_norm": 0.7706464529037476,
"learning_rate": 3.4975564444576487e-06,
"loss": 0.477,
"step": 770
},
{
"epoch": 2.4102296450939455,
"grad_norm": 0.7695909142494202,
"learning_rate": 3.4935849181536484e-06,
"loss": 0.4695,
"step": 771
},
{
"epoch": 2.4133611691022967,
"grad_norm": 0.7744433283805847,
"learning_rate": 3.489610411754969e-06,
"loss": 0.499,
"step": 772
},
{
"epoch": 2.416492693110647,
"grad_norm": 0.9265744686126709,
"learning_rate": 3.48563293718249e-06,
"loss": 0.481,
"step": 773
},
{
"epoch": 2.419624217118998,
"grad_norm": 1.0680506229400635,
"learning_rate": 3.481652506365992e-06,
"loss": 0.4898,
"step": 774
},
{
"epoch": 2.422755741127349,
"grad_norm": 0.721493661403656,
"learning_rate": 3.477669131244122e-06,
"loss": 0.4813,
"step": 775
},
{
"epoch": 2.4258872651356995,
"grad_norm": 0.7993559837341309,
"learning_rate": 3.4736828237643616e-06,
"loss": 0.5179,
"step": 776
},
{
"epoch": 2.4290187891440502,
"grad_norm": 0.8148090839385986,
"learning_rate": 3.4696935958829837e-06,
"loss": 0.4753,
"step": 777
},
{
"epoch": 2.432150313152401,
"grad_norm": 0.8006406426429749,
"learning_rate": 3.465701459565022e-06,
"loss": 0.501,
"step": 778
},
{
"epoch": 2.4352818371607516,
"grad_norm": 0.9307970404624939,
"learning_rate": 3.4617064267842327e-06,
"loss": 0.487,
"step": 779
},
{
"epoch": 2.4384133611691023,
"grad_norm": 0.7192814946174622,
"learning_rate": 3.45770850952306e-06,
"loss": 0.4769,
"step": 780
},
{
"epoch": 2.441544885177453,
"grad_norm": 0.7386271953582764,
"learning_rate": 3.4537077197726023e-06,
"loss": 0.4726,
"step": 781
},
{
"epoch": 2.4446764091858038,
"grad_norm": 0.8006314039230347,
"learning_rate": 3.449704069532567e-06,
"loss": 0.494,
"step": 782
},
{
"epoch": 2.4478079331941545,
"grad_norm": 0.7466752529144287,
"learning_rate": 3.4456975708112477e-06,
"loss": 0.4778,
"step": 783
},
{
"epoch": 2.450939457202505,
"grad_norm": 0.8348856568336487,
"learning_rate": 3.4416882356254777e-06,
"loss": 0.4766,
"step": 784
},
{
"epoch": 2.454070981210856,
"grad_norm": 0.754851758480072,
"learning_rate": 3.4376760760005994e-06,
"loss": 0.4673,
"step": 785
},
{
"epoch": 2.4572025052192066,
"grad_norm": 0.7854018807411194,
"learning_rate": 3.433661103970427e-06,
"loss": 0.4954,
"step": 786
},
{
"epoch": 2.4603340292275573,
"grad_norm": 0.7238256931304932,
"learning_rate": 3.4296433315772084e-06,
"loss": 0.496,
"step": 787
},
{
"epoch": 2.463465553235908,
"grad_norm": 0.7007659673690796,
"learning_rate": 3.4256227708715915e-06,
"loss": 0.4793,
"step": 788
},
{
"epoch": 2.4665970772442587,
"grad_norm": 0.7234371900558472,
"learning_rate": 3.421599433912588e-06,
"loss": 0.4935,
"step": 789
},
{
"epoch": 2.4697286012526094,
"grad_norm": 0.7537544965744019,
"learning_rate": 3.4175733327675355e-06,
"loss": 0.5194,
"step": 790
},
{
"epoch": 2.4728601252609606,
"grad_norm": 0.7608047127723694,
"learning_rate": 3.4135444795120633e-06,
"loss": 0.4793,
"step": 791
},
{
"epoch": 2.4759916492693113,
"grad_norm": 0.7847898006439209,
"learning_rate": 3.4095128862300542e-06,
"loss": 0.4877,
"step": 792
},
{
"epoch": 2.479123173277662,
"grad_norm": 0.8002011179924011,
"learning_rate": 3.405478565013609e-06,
"loss": 0.4927,
"step": 793
},
{
"epoch": 2.4822546972860127,
"grad_norm": 0.8200219869613647,
"learning_rate": 3.401441527963013e-06,
"loss": 0.4997,
"step": 794
},
{
"epoch": 2.4853862212943634,
"grad_norm": 0.7220162749290466,
"learning_rate": 3.3974017871866938e-06,
"loss": 0.4668,
"step": 795
},
{
"epoch": 2.488517745302714,
"grad_norm": 0.8022251129150391,
"learning_rate": 3.3933593548011912e-06,
"loss": 0.5179,
"step": 796
},
{
"epoch": 2.491649269311065,
"grad_norm": 0.7914465069770813,
"learning_rate": 3.389314242931115e-06,
"loss": 0.4943,
"step": 797
},
{
"epoch": 2.4947807933194155,
"grad_norm": 1.1399403810501099,
"learning_rate": 3.385266463709116e-06,
"loss": 0.4896,
"step": 798
},
{
"epoch": 2.497912317327766,
"grad_norm": 0.8098909854888916,
"learning_rate": 3.38121602927584e-06,
"loss": 0.4904,
"step": 799
},
{
"epoch": 2.501043841336117,
"grad_norm": 0.7434052228927612,
"learning_rate": 3.377162951779902e-06,
"loss": 0.4864,
"step": 800
},
{
"epoch": 2.5041753653444676,
"grad_norm": 0.7397809624671936,
"learning_rate": 3.3731072433778407e-06,
"loss": 0.486,
"step": 801
},
{
"epoch": 2.5073068893528183,
"grad_norm": 0.99027019739151,
"learning_rate": 3.3690489162340867e-06,
"loss": 0.5011,
"step": 802
},
{
"epoch": 2.510438413361169,
"grad_norm": 0.8443610668182373,
"learning_rate": 3.3649879825209246e-06,
"loss": 0.455,
"step": 803
},
{
"epoch": 2.5135699373695197,
"grad_norm": 0.755649983882904,
"learning_rate": 3.3609244544184604e-06,
"loss": 0.4563,
"step": 804
},
{
"epoch": 2.5167014613778704,
"grad_norm": 0.728018045425415,
"learning_rate": 3.3568583441145765e-06,
"loss": 0.471,
"step": 805
},
{
"epoch": 2.519832985386221,
"grad_norm": 0.7777130603790283,
"learning_rate": 3.352789663804904e-06,
"loss": 0.4667,
"step": 806
},
{
"epoch": 2.522964509394572,
"grad_norm": 0.7545619606971741,
"learning_rate": 3.3487184256927785e-06,
"loss": 0.4915,
"step": 807
},
{
"epoch": 2.526096033402923,
"grad_norm": 0.8374579548835754,
"learning_rate": 3.3446446419892127e-06,
"loss": 0.485,
"step": 808
},
{
"epoch": 2.5292275574112733,
"grad_norm": 0.7354666590690613,
"learning_rate": 3.340568324912849e-06,
"loss": 0.5254,
"step": 809
},
{
"epoch": 2.5323590814196244,
"grad_norm": 0.7581545114517212,
"learning_rate": 3.3364894866899324e-06,
"loss": 0.4483,
"step": 810
},
{
"epoch": 2.535490605427975,
"grad_norm": 0.8077559471130371,
"learning_rate": 3.3324081395542662e-06,
"loss": 0.5022,
"step": 811
},
{
"epoch": 2.538622129436326,
"grad_norm": 0.8827865719795227,
"learning_rate": 3.3283242957471806e-06,
"loss": 0.4909,
"step": 812
},
{
"epoch": 2.5417536534446765,
"grad_norm": 0.9139482378959656,
"learning_rate": 3.3242379675174953e-06,
"loss": 0.5205,
"step": 813
},
{
"epoch": 2.5448851774530272,
"grad_norm": 0.7616812586784363,
"learning_rate": 3.3201491671214797e-06,
"loss": 0.4744,
"step": 814
},
{
"epoch": 2.548016701461378,
"grad_norm": 0.987173318862915,
"learning_rate": 3.3160579068228183e-06,
"loss": 0.4876,
"step": 815
},
{
"epoch": 2.5511482254697286,
"grad_norm": 1.259137749671936,
"learning_rate": 3.311964198892574e-06,
"loss": 0.454,
"step": 816
},
{
"epoch": 2.5542797494780793,
"grad_norm": 0.7866336703300476,
"learning_rate": 3.3078680556091513e-06,
"loss": 0.5107,
"step": 817
},
{
"epoch": 2.55741127348643,
"grad_norm": 0.9311352372169495,
"learning_rate": 3.303769489258258e-06,
"loss": 0.4843,
"step": 818
},
{
"epoch": 2.5605427974947808,
"grad_norm": 0.8556346893310547,
"learning_rate": 3.299668512132872e-06,
"loss": 0.5017,
"step": 819
},
{
"epoch": 2.5636743215031315,
"grad_norm": 2.810598373413086,
"learning_rate": 3.2955651365331988e-06,
"loss": 0.5223,
"step": 820
},
{
"epoch": 2.566805845511482,
"grad_norm": 1.0120766162872314,
"learning_rate": 3.29145937476664e-06,
"loss": 0.4959,
"step": 821
},
{
"epoch": 2.569937369519833,
"grad_norm": 0.751412034034729,
"learning_rate": 3.287351239147752e-06,
"loss": 0.4941,
"step": 822
},
{
"epoch": 2.5730688935281836,
"grad_norm": 1.9308148622512817,
"learning_rate": 3.2832407419982136e-06,
"loss": 0.4965,
"step": 823
},
{
"epoch": 2.5762004175365343,
"grad_norm": 0.9215649962425232,
"learning_rate": 3.279127895646786e-06,
"loss": 0.5071,
"step": 824
},
{
"epoch": 2.5793319415448854,
"grad_norm": 0.7599574327468872,
"learning_rate": 3.2750127124292754e-06,
"loss": 0.5191,
"step": 825
},
{
"epoch": 2.5824634655532357,
"grad_norm": 0.8234940767288208,
"learning_rate": 3.270895204688496e-06,
"loss": 0.4947,
"step": 826
},
{
"epoch": 2.585594989561587,
"grad_norm": 0.8401572704315186,
"learning_rate": 3.266775384774238e-06,
"loss": 0.4547,
"step": 827
},
{
"epoch": 2.588726513569937,
"grad_norm": 0.8927991986274719,
"learning_rate": 3.262653265043223e-06,
"loss": 0.4296,
"step": 828
},
{
"epoch": 2.5918580375782883,
"grad_norm": 0.8009241223335266,
"learning_rate": 3.2585288578590716e-06,
"loss": 0.4578,
"step": 829
},
{
"epoch": 2.594989561586639,
"grad_norm": 0.7982021570205688,
"learning_rate": 3.2544021755922663e-06,
"loss": 0.4961,
"step": 830
},
{
"epoch": 2.5981210855949897,
"grad_norm": 0.7096095681190491,
"learning_rate": 3.2502732306201112e-06,
"loss": 0.4975,
"step": 831
},
{
"epoch": 2.6012526096033404,
"grad_norm": 1.1092045307159424,
"learning_rate": 3.246142035326699e-06,
"loss": 0.4705,
"step": 832
},
{
"epoch": 2.604384133611691,
"grad_norm": 0.785799503326416,
"learning_rate": 3.24200860210287e-06,
"loss": 0.479,
"step": 833
},
{
"epoch": 2.607515657620042,
"grad_norm": 0.7315773367881775,
"learning_rate": 3.2378729433461804e-06,
"loss": 0.5036,
"step": 834
},
{
"epoch": 2.6106471816283925,
"grad_norm": 0.7840189337730408,
"learning_rate": 3.233735071460856e-06,
"loss": 0.4967,
"step": 835
},
{
"epoch": 2.613778705636743,
"grad_norm": 0.7186565399169922,
"learning_rate": 3.2295949988577655e-06,
"loss": 0.4889,
"step": 836
},
{
"epoch": 2.616910229645094,
"grad_norm": 0.766054093837738,
"learning_rate": 3.2254527379543747e-06,
"loss": 0.539,
"step": 837
},
{
"epoch": 2.6200417536534446,
"grad_norm": 0.7705381512641907,
"learning_rate": 3.2213083011747165e-06,
"loss": 0.4968,
"step": 838
},
{
"epoch": 2.6231732776617953,
"grad_norm": 1.3530604839324951,
"learning_rate": 3.217161700949346e-06,
"loss": 0.52,
"step": 839
},
{
"epoch": 2.626304801670146,
"grad_norm": 0.737389862537384,
"learning_rate": 3.2130129497153107e-06,
"loss": 0.4823,
"step": 840
},
{
"epoch": 2.6294363256784967,
"grad_norm": 0.9121193885803223,
"learning_rate": 3.2088620599161064e-06,
"loss": 0.4592,
"step": 841
},
{
"epoch": 2.632567849686848,
"grad_norm": 0.8869616389274597,
"learning_rate": 3.2047090440016464e-06,
"loss": 0.5001,
"step": 842
},
{
"epoch": 2.635699373695198,
"grad_norm": 0.8447219133377075,
"learning_rate": 3.200553914428219e-06,
"loss": 0.4969,
"step": 843
},
{
"epoch": 2.6388308977035493,
"grad_norm": 0.8877657055854797,
"learning_rate": 3.1963966836584524e-06,
"loss": 0.4718,
"step": 844
},
{
"epoch": 2.6419624217118995,
"grad_norm": 1.045272946357727,
"learning_rate": 3.192237364161277e-06,
"loss": 0.4864,
"step": 845
},
{
"epoch": 2.6450939457202507,
"grad_norm": 0.8485913276672363,
"learning_rate": 3.1880759684118876e-06,
"loss": 0.4688,
"step": 846
},
{
"epoch": 2.6482254697286014,
"grad_norm": 0.7328930497169495,
"learning_rate": 3.183912508891709e-06,
"loss": 0.4728,
"step": 847
},
{
"epoch": 2.651356993736952,
"grad_norm": 0.7377315759658813,
"learning_rate": 3.179746998088351e-06,
"loss": 0.4672,
"step": 848
},
{
"epoch": 2.654488517745303,
"grad_norm": 0.8017002940177917,
"learning_rate": 3.1755794484955817e-06,
"loss": 0.4884,
"step": 849
},
{
"epoch": 2.6576200417536535,
"grad_norm": 1.045470952987671,
"learning_rate": 3.171409872613278e-06,
"loss": 0.4789,
"step": 850
},
{
"epoch": 2.6607515657620042,
"grad_norm": 0.8823987245559692,
"learning_rate": 3.1672382829473997e-06,
"loss": 0.5117,
"step": 851
},
{
"epoch": 2.663883089770355,
"grad_norm": 0.7395204901695251,
"learning_rate": 3.163064692009944e-06,
"loss": 0.5476,
"step": 852
},
{
"epoch": 2.6670146137787056,
"grad_norm": 0.7778941988945007,
"learning_rate": 3.1588891123189103e-06,
"loss": 0.5092,
"step": 853
},
{
"epoch": 2.6701461377870563,
"grad_norm": 0.8072531819343567,
"learning_rate": 3.1547115563982643e-06,
"loss": 0.4961,
"step": 854
},
{
"epoch": 2.673277661795407,
"grad_norm": 0.9018139243125916,
"learning_rate": 3.1505320367778993e-06,
"loss": 0.4624,
"step": 855
},
{
"epoch": 2.6764091858037578,
"grad_norm": 0.8554450869560242,
"learning_rate": 3.1463505659935957e-06,
"loss": 0.4971,
"step": 856
},
{
"epoch": 2.6795407098121085,
"grad_norm": 0.7727259397506714,
"learning_rate": 3.14216715658699e-06,
"loss": 0.4544,
"step": 857
},
{
"epoch": 2.682672233820459,
"grad_norm": 0.9253409504890442,
"learning_rate": 3.137981821105529e-06,
"loss": 0.4893,
"step": 858
},
{
"epoch": 2.68580375782881,
"grad_norm": 0.8809456825256348,
"learning_rate": 3.1337945721024403e-06,
"loss": 0.5242,
"step": 859
},
{
"epoch": 2.6889352818371606,
"grad_norm": 0.981755256652832,
"learning_rate": 3.129605422136689e-06,
"loss": 0.4686,
"step": 860
},
{
"epoch": 2.6920668058455117,
"grad_norm": 1.1278467178344727,
"learning_rate": 3.1254143837729412e-06,
"loss": 0.4813,
"step": 861
},
{
"epoch": 2.695198329853862,
"grad_norm": 0.8529123663902283,
"learning_rate": 3.1212214695815285e-06,
"loss": 0.4723,
"step": 862
},
{
"epoch": 2.698329853862213,
"grad_norm": 0.7764189839363098,
"learning_rate": 3.1170266921384075e-06,
"loss": 0.4777,
"step": 863
},
{
"epoch": 2.701461377870564,
"grad_norm": 0.7364740967750549,
"learning_rate": 3.112830064025124e-06,
"loss": 0.4975,
"step": 864
},
{
"epoch": 2.7045929018789145,
"grad_norm": 0.7594549059867859,
"learning_rate": 3.108631597828774e-06,
"loss": 0.5083,
"step": 865
},
{
"epoch": 2.7077244258872653,
"grad_norm": 0.7337073683738708,
"learning_rate": 3.104431306141968e-06,
"loss": 0.4778,
"step": 866
},
{
"epoch": 2.710855949895616,
"grad_norm": 0.7709932327270508,
"learning_rate": 3.1002292015627894e-06,
"loss": 0.4754,
"step": 867
},
{
"epoch": 2.7139874739039667,
"grad_norm": 0.8001313209533691,
"learning_rate": 3.0960252966947605e-06,
"loss": 0.4489,
"step": 868
},
{
"epoch": 2.7171189979123174,
"grad_norm": 0.8280592560768127,
"learning_rate": 3.091819604146804e-06,
"loss": 0.4606,
"step": 869
},
{
"epoch": 2.720250521920668,
"grad_norm": 0.7463534474372864,
"learning_rate": 3.0876121365332024e-06,
"loss": 0.5168,
"step": 870
},
{
"epoch": 2.723382045929019,
"grad_norm": 0.9011222124099731,
"learning_rate": 3.0834029064735636e-06,
"loss": 0.5163,
"step": 871
},
{
"epoch": 2.7265135699373695,
"grad_norm": 0.7811456322669983,
"learning_rate": 3.0791919265927827e-06,
"loss": 0.5004,
"step": 872
},
{
"epoch": 2.72964509394572,
"grad_norm": 0.9251837134361267,
"learning_rate": 3.0749792095210003e-06,
"loss": 0.5081,
"step": 873
},
{
"epoch": 2.732776617954071,
"grad_norm": 0.8347085118293762,
"learning_rate": 3.0707647678935695e-06,
"loss": 0.4793,
"step": 874
},
{
"epoch": 2.7359081419624216,
"grad_norm": 0.9766442179679871,
"learning_rate": 3.0665486143510153e-06,
"loss": 0.493,
"step": 875
},
{
"epoch": 2.7390396659707723,
"grad_norm": 0.7692548036575317,
"learning_rate": 3.0623307615389975e-06,
"loss": 0.4874,
"step": 876
},
{
"epoch": 2.742171189979123,
"grad_norm": 0.7714599370956421,
"learning_rate": 3.0581112221082727e-06,
"loss": 0.4929,
"step": 877
},
{
"epoch": 2.745302713987474,
"grad_norm": 0.7797786593437195,
"learning_rate": 3.053890008714655e-06,
"loss": 0.4359,
"step": 878
},
{
"epoch": 2.7484342379958244,
"grad_norm": 5.118397235870361,
"learning_rate": 3.049667134018981e-06,
"loss": 0.4634,
"step": 879
},
{
"epoch": 2.7515657620041756,
"grad_norm": 0.7684539556503296,
"learning_rate": 3.04544261068707e-06,
"loss": 0.4688,
"step": 880
},
{
"epoch": 2.754697286012526,
"grad_norm": 0.8678610920906067,
"learning_rate": 3.0412164513896846e-06,
"loss": 0.5213,
"step": 881
},
{
"epoch": 2.757828810020877,
"grad_norm": 0.80293869972229,
"learning_rate": 3.0369886688024954e-06,
"loss": 0.4392,
"step": 882
},
{
"epoch": 2.7609603340292277,
"grad_norm": 0.7438644766807556,
"learning_rate": 3.0327592756060412e-06,
"loss": 0.528,
"step": 883
},
{
"epoch": 2.7640918580375784,
"grad_norm": 0.7701645493507385,
"learning_rate": 3.0285282844856917e-06,
"loss": 0.504,
"step": 884
},
{
"epoch": 2.767223382045929,
"grad_norm": 0.7113856673240662,
"learning_rate": 3.024295708131611e-06,
"loss": 0.4819,
"step": 885
},
{
"epoch": 2.77035490605428,
"grad_norm": 1.2697532176971436,
"learning_rate": 3.020061559238714e-06,
"loss": 0.5009,
"step": 886
},
{
"epoch": 2.7734864300626305,
"grad_norm": 1.0299439430236816,
"learning_rate": 3.015825850506636e-06,
"loss": 0.4707,
"step": 887
},
{
"epoch": 2.776617954070981,
"grad_norm": 0.9703660607337952,
"learning_rate": 3.011588594639688e-06,
"loss": 0.4102,
"step": 888
},
{
"epoch": 2.779749478079332,
"grad_norm": 0.7357314825057983,
"learning_rate": 3.0073498043468247e-06,
"loss": 0.4649,
"step": 889
},
{
"epoch": 2.7828810020876826,
"grad_norm": 0.7815471291542053,
"learning_rate": 3.0031094923415993e-06,
"loss": 0.469,
"step": 890
},
{
"epoch": 2.7860125260960333,
"grad_norm": 0.7856019139289856,
"learning_rate": 2.9988676713421318e-06,
"loss": 0.4241,
"step": 891
},
{
"epoch": 2.789144050104384,
"grad_norm": 0.7668167352676392,
"learning_rate": 2.994624354071066e-06,
"loss": 0.5309,
"step": 892
},
{
"epoch": 2.7922755741127347,
"grad_norm": 0.7485945820808411,
"learning_rate": 2.990379553255535e-06,
"loss": 0.5173,
"step": 893
},
{
"epoch": 2.7954070981210855,
"grad_norm": 0.8065824508666992,
"learning_rate": 2.986133281627123e-06,
"loss": 0.4995,
"step": 894
},
{
"epoch": 2.798538622129436,
"grad_norm": 0.7156995534896851,
"learning_rate": 2.9818855519218217e-06,
"loss": 0.4642,
"step": 895
},
{
"epoch": 2.801670146137787,
"grad_norm": 0.9115403890609741,
"learning_rate": 2.97763637688e-06,
"loss": 0.4799,
"step": 896
},
{
"epoch": 2.804801670146138,
"grad_norm": 0.7466689944267273,
"learning_rate": 2.9733857692463584e-06,
"loss": 0.4942,
"step": 897
},
{
"epoch": 2.8079331941544883,
"grad_norm": 0.7484914064407349,
"learning_rate": 2.9691337417698974e-06,
"loss": 0.4618,
"step": 898
},
{
"epoch": 2.8110647181628394,
"grad_norm": 0.816704511642456,
"learning_rate": 2.9648803072038736e-06,
"loss": 0.4748,
"step": 899
},
{
"epoch": 2.81419624217119,
"grad_norm": 0.7627584934234619,
"learning_rate": 2.9606254783057666e-06,
"loss": 0.4667,
"step": 900
},
{
"epoch": 2.817327766179541,
"grad_norm": 0.7341011166572571,
"learning_rate": 2.9563692678372342e-06,
"loss": 0.4802,
"step": 901
},
{
"epoch": 2.8204592901878915,
"grad_norm": 1.2541382312774658,
"learning_rate": 2.952111688564082e-06,
"loss": 0.5231,
"step": 902
},
{
"epoch": 2.8235908141962422,
"grad_norm": 0.7172819375991821,
"learning_rate": 2.9478527532562184e-06,
"loss": 0.4488,
"step": 903
},
{
"epoch": 2.826722338204593,
"grad_norm": 0.774529218673706,
"learning_rate": 2.943592474687621e-06,
"loss": 0.4964,
"step": 904
},
{
"epoch": 2.8298538622129437,
"grad_norm": 0.7315672636032104,
"learning_rate": 2.939330865636294e-06,
"loss": 0.4817,
"step": 905
},
{
"epoch": 2.8329853862212944,
"grad_norm": 0.7698234915733337,
"learning_rate": 2.9350679388842347e-06,
"loss": 0.5075,
"step": 906
},
{
"epoch": 2.836116910229645,
"grad_norm": 0.7717766761779785,
"learning_rate": 2.93080370721739e-06,
"loss": 0.4789,
"step": 907
},
{
"epoch": 2.8392484342379958,
"grad_norm": 0.7383570075035095,
"learning_rate": 2.926538183425622e-06,
"loss": 0.4992,
"step": 908
},
{
"epoch": 2.8423799582463465,
"grad_norm": 0.7858864068984985,
"learning_rate": 2.92227138030267e-06,
"loss": 0.4993,
"step": 909
},
{
"epoch": 2.845511482254697,
"grad_norm": 0.8220369219779968,
"learning_rate": 2.9180033106461076e-06,
"loss": 0.4929,
"step": 910
},
{
"epoch": 2.848643006263048,
"grad_norm": 0.7507152557373047,
"learning_rate": 2.9137339872573086e-06,
"loss": 0.4394,
"step": 911
},
{
"epoch": 2.8517745302713986,
"grad_norm": 0.7935269474983215,
"learning_rate": 2.9094634229414063e-06,
"loss": 0.4656,
"step": 912
},
{
"epoch": 2.8549060542797493,
"grad_norm": 0.9187721610069275,
"learning_rate": 2.9051916305072576e-06,
"loss": 0.4918,
"step": 913
},
{
"epoch": 2.8580375782881005,
"grad_norm": 0.8699706792831421,
"learning_rate": 2.9009186227674e-06,
"loss": 0.5106,
"step": 914
},
{
"epoch": 2.8611691022964507,
"grad_norm": 0.7175673246383667,
"learning_rate": 2.896644412538021e-06,
"loss": 0.5105,
"step": 915
},
{
"epoch": 2.864300626304802,
"grad_norm": 0.8563990592956543,
"learning_rate": 2.892369012638909e-06,
"loss": 0.4993,
"step": 916
},
{
"epoch": 2.867432150313152,
"grad_norm": 0.7891882658004761,
"learning_rate": 2.8880924358934246e-06,
"loss": 0.4983,
"step": 917
},
{
"epoch": 2.8705636743215033,
"grad_norm": 0.9247110486030579,
"learning_rate": 2.8838146951284575e-06,
"loss": 0.4789,
"step": 918
},
{
"epoch": 2.873695198329854,
"grad_norm": 0.7523055672645569,
"learning_rate": 2.879535803174387e-06,
"loss": 0.4982,
"step": 919
},
{
"epoch": 2.8768267223382047,
"grad_norm": 0.8096909523010254,
"learning_rate": 2.8752557728650467e-06,
"loss": 0.4958,
"step": 920
},
{
"epoch": 2.8799582463465554,
"grad_norm": 2.3476874828338623,
"learning_rate": 2.870974617037684e-06,
"loss": 0.491,
"step": 921
},
{
"epoch": 2.883089770354906,
"grad_norm": 0.8388578295707703,
"learning_rate": 2.8666923485329224e-06,
"loss": 0.5275,
"step": 922
},
{
"epoch": 2.886221294363257,
"grad_norm": 0.8162729144096375,
"learning_rate": 2.8624089801947234e-06,
"loss": 0.4776,
"step": 923
},
{
"epoch": 2.8893528183716075,
"grad_norm": 0.7306103110313416,
"learning_rate": 2.858124524870345e-06,
"loss": 0.4814,
"step": 924
},
{
"epoch": 2.892484342379958,
"grad_norm": 0.8736817836761475,
"learning_rate": 2.853838995410307e-06,
"loss": 0.5097,
"step": 925
},
{
"epoch": 2.895615866388309,
"grad_norm": 0.7771823406219482,
"learning_rate": 2.8495524046683525e-06,
"loss": 0.4806,
"step": 926
},
{
"epoch": 2.8987473903966596,
"grad_norm": 0.9421334862709045,
"learning_rate": 2.845264765501404e-06,
"loss": 0.5055,
"step": 927
},
{
"epoch": 2.9018789144050103,
"grad_norm": 0.8403921127319336,
"learning_rate": 2.8409760907695314e-06,
"loss": 0.4775,
"step": 928
},
{
"epoch": 2.905010438413361,
"grad_norm": 0.8095362186431885,
"learning_rate": 2.836686393335909e-06,
"loss": 0.4532,
"step": 929
},
{
"epoch": 2.9081419624217117,
"grad_norm": 0.7340645790100098,
"learning_rate": 2.8323956860667813e-06,
"loss": 0.4835,
"step": 930
},
{
"epoch": 2.911273486430063,
"grad_norm": 0.6970911026000977,
"learning_rate": 2.828103981831417e-06,
"loss": 0.4999,
"step": 931
},
{
"epoch": 2.914405010438413,
"grad_norm": 0.8136418461799622,
"learning_rate": 2.8238112935020794e-06,
"loss": 0.5038,
"step": 932
},
{
"epoch": 2.9175365344467643,
"grad_norm": 0.9045608043670654,
"learning_rate": 2.8195176339539816e-06,
"loss": 0.486,
"step": 933
},
{
"epoch": 2.9206680584551146,
"grad_norm": 1.14940345287323,
"learning_rate": 2.815223016065249e-06,
"loss": 0.5079,
"step": 934
},
{
"epoch": 2.9237995824634657,
"grad_norm": 0.7411190867424011,
"learning_rate": 2.8109274527168826e-06,
"loss": 0.4564,
"step": 935
},
{
"epoch": 2.9269311064718164,
"grad_norm": 0.8903455138206482,
"learning_rate": 2.806630956792719e-06,
"loss": 0.451,
"step": 936
},
{
"epoch": 2.930062630480167,
"grad_norm": 0.7865445017814636,
"learning_rate": 2.8023335411793904e-06,
"loss": 0.4658,
"step": 937
},
{
"epoch": 2.933194154488518,
"grad_norm": 0.8185790777206421,
"learning_rate": 2.798035218766292e-06,
"loss": 0.4776,
"step": 938
},
{
"epoch": 2.9363256784968685,
"grad_norm": 0.7516276836395264,
"learning_rate": 2.793736002445531e-06,
"loss": 0.4447,
"step": 939
},
{
"epoch": 2.9394572025052192,
"grad_norm": 0.738080620765686,
"learning_rate": 2.789435905111903e-06,
"loss": 0.4832,
"step": 940
},
{
"epoch": 2.94258872651357,
"grad_norm": 0.7971507906913757,
"learning_rate": 2.785134939662843e-06,
"loss": 0.4835,
"step": 941
},
{
"epoch": 2.9457202505219207,
"grad_norm": 0.7529093623161316,
"learning_rate": 2.78083311899839e-06,
"loss": 0.4759,
"step": 942
},
{
"epoch": 2.9488517745302714,
"grad_norm": 0.8222358226776123,
"learning_rate": 2.7765304560211482e-06,
"loss": 0.4365,
"step": 943
},
{
"epoch": 2.951983298538622,
"grad_norm": 0.729945182800293,
"learning_rate": 2.7722269636362462e-06,
"loss": 0.5026,
"step": 944
},
{
"epoch": 2.9551148225469728,
"grad_norm": 0.7287900447845459,
"learning_rate": 2.767922654751306e-06,
"loss": 0.4916,
"step": 945
},
{
"epoch": 2.9582463465553235,
"grad_norm": 0.869637131690979,
"learning_rate": 2.763617542276391e-06,
"loss": 0.5018,
"step": 946
},
{
"epoch": 2.961377870563674,
"grad_norm": 1.004909634590149,
"learning_rate": 2.7593116391239806e-06,
"loss": 0.5152,
"step": 947
},
{
"epoch": 2.964509394572025,
"grad_norm": 0.8263046145439148,
"learning_rate": 2.7550049582089235e-06,
"loss": 0.5249,
"step": 948
},
{
"epoch": 2.9676409185803756,
"grad_norm": 0.7963895797729492,
"learning_rate": 2.750697512448401e-06,
"loss": 0.5084,
"step": 949
},
{
"epoch": 2.9707724425887267,
"grad_norm": 0.7211249470710754,
"learning_rate": 2.7463893147618893e-06,
"loss": 0.4691,
"step": 950
},
{
"epoch": 2.973903966597077,
"grad_norm": 0.8010216951370239,
"learning_rate": 2.742080378071118e-06,
"loss": 0.5026,
"step": 951
},
{
"epoch": 2.977035490605428,
"grad_norm": 0.780078649520874,
"learning_rate": 2.7377707153000356e-06,
"loss": 0.4758,
"step": 952
},
{
"epoch": 2.980167014613779,
"grad_norm": 0.7728193998336792,
"learning_rate": 2.7334603393747684e-06,
"loss": 0.488,
"step": 953
},
{
"epoch": 2.9832985386221296,
"grad_norm": 0.836329996585846,
"learning_rate": 2.7291492632235777e-06,
"loss": 0.456,
"step": 954
},
{
"epoch": 2.9864300626304803,
"grad_norm": 0.7241990566253662,
"learning_rate": 2.724837499776831e-06,
"loss": 0.4953,
"step": 955
},
{
"epoch": 2.989561586638831,
"grad_norm": 0.7595076560974121,
"learning_rate": 2.7205250619669527e-06,
"loss": 0.446,
"step": 956
},
{
"epoch": 2.9926931106471817,
"grad_norm": 0.8177686333656311,
"learning_rate": 2.716211962728392e-06,
"loss": 0.5057,
"step": 957
},
{
"epoch": 2.9958246346555324,
"grad_norm": 0.7506977915763855,
"learning_rate": 2.71189821499758e-06,
"loss": 0.4821,
"step": 958
},
{
"epoch": 2.998956158663883,
"grad_norm": 0.8085163235664368,
"learning_rate": 2.7075838317128943e-06,
"loss": 0.5002,
"step": 959
},
{
"epoch": 3.0,
"grad_norm": 3.3674418926239014,
"learning_rate": 2.7032688258146207e-06,
"loss": 0.183,
"step": 960
},
{
"epoch": 3.0031315240083507,
"grad_norm": 1.066100835800171,
"learning_rate": 2.698953210244908e-06,
"loss": 0.4427,
"step": 961
},
{
"epoch": 3.0062630480167014,
"grad_norm": 0.7690210938453674,
"learning_rate": 2.6946369979477365e-06,
"loss": 0.4638,
"step": 962
},
{
"epoch": 3.009394572025052,
"grad_norm": 0.8114679455757141,
"learning_rate": 2.690320201868876e-06,
"loss": 0.4373,
"step": 963
},
{
"epoch": 3.012526096033403,
"grad_norm": 0.7680971622467041,
"learning_rate": 2.686002834955847e-06,
"loss": 0.4477,
"step": 964
},
{
"epoch": 3.0156576200417535,
"grad_norm": 0.7194678783416748,
"learning_rate": 2.6816849101578808e-06,
"loss": 0.455,
"step": 965
},
{
"epoch": 3.0187891440501042,
"grad_norm": 0.6890467405319214,
"learning_rate": 2.6773664404258854e-06,
"loss": 0.4246,
"step": 966
},
{
"epoch": 3.021920668058455,
"grad_norm": 0.8064301013946533,
"learning_rate": 2.6730474387123987e-06,
"loss": 0.497,
"step": 967
},
{
"epoch": 3.0250521920668056,
"grad_norm": 0.8164849281311035,
"learning_rate": 2.668727917971559e-06,
"loss": 0.457,
"step": 968
},
{
"epoch": 3.028183716075157,
"grad_norm": 0.7793440818786621,
"learning_rate": 2.6644078911590565e-06,
"loss": 0.4632,
"step": 969
},
{
"epoch": 3.0313152400835075,
"grad_norm": 0.7208535671234131,
"learning_rate": 2.6600873712321033e-06,
"loss": 0.4786,
"step": 970
},
{
"epoch": 3.034446764091858,
"grad_norm": 0.8905500769615173,
"learning_rate": 2.655766371149389e-06,
"loss": 0.4317,
"step": 971
},
{
"epoch": 3.037578288100209,
"grad_norm": 0.7537338733673096,
"learning_rate": 2.6514449038710418e-06,
"loss": 0.4783,
"step": 972
},
{
"epoch": 3.0407098121085596,
"grad_norm": 0.7901656031608582,
"learning_rate": 2.6471229823585937e-06,
"loss": 0.4762,
"step": 973
},
{
"epoch": 3.0438413361169103,
"grad_norm": 0.7427340745925903,
"learning_rate": 2.6428006195749373e-06,
"loss": 0.4782,
"step": 974
},
{
"epoch": 3.046972860125261,
"grad_norm": 0.8364680409431458,
"learning_rate": 2.6384778284842905e-06,
"loss": 0.4551,
"step": 975
},
{
"epoch": 3.0501043841336117,
"grad_norm": 1.1049500703811646,
"learning_rate": 2.634154622052155e-06,
"loss": 0.4451,
"step": 976
},
{
"epoch": 3.0532359081419624,
"grad_norm": 1.0797414779663086,
"learning_rate": 2.6298310132452757e-06,
"loss": 0.4512,
"step": 977
},
{
"epoch": 3.056367432150313,
"grad_norm": 0.8832088708877563,
"learning_rate": 2.62550701503161e-06,
"loss": 0.4569,
"step": 978
},
{
"epoch": 3.059498956158664,
"grad_norm": 0.748951256275177,
"learning_rate": 2.621182640380277e-06,
"loss": 0.4535,
"step": 979
},
{
"epoch": 3.0626304801670146,
"grad_norm": 0.776386022567749,
"learning_rate": 2.616857902261529e-06,
"loss": 0.4582,
"step": 980
},
{
"epoch": 3.0657620041753653,
"grad_norm": 0.730057418346405,
"learning_rate": 2.6125328136467074e-06,
"loss": 0.4581,
"step": 981
},
{
"epoch": 3.068893528183716,
"grad_norm": 0.7918877005577087,
"learning_rate": 2.6082073875082046e-06,
"loss": 0.4355,
"step": 982
},
{
"epoch": 3.0720250521920667,
"grad_norm": 0.803987443447113,
"learning_rate": 2.6038816368194265e-06,
"loss": 0.455,
"step": 983
},
{
"epoch": 3.0751565762004174,
"grad_norm": 1.2970365285873413,
"learning_rate": 2.599555574554749e-06,
"loss": 0.448,
"step": 984
},
{
"epoch": 3.078288100208768,
"grad_norm": 0.7814194560050964,
"learning_rate": 2.595229213689487e-06,
"loss": 0.4197,
"step": 985
},
{
"epoch": 3.081419624217119,
"grad_norm": 0.9865803122520447,
"learning_rate": 2.5909025671998483e-06,
"loss": 0.4649,
"step": 986
},
{
"epoch": 3.08455114822547,
"grad_norm": 0.7769168615341187,
"learning_rate": 2.586575648062898e-06,
"loss": 0.4287,
"step": 987
},
{
"epoch": 3.0876826722338206,
"grad_norm": 1.454988956451416,
"learning_rate": 2.582248469256519e-06,
"loss": 0.4548,
"step": 988
},
{
"epoch": 3.0908141962421714,
"grad_norm": 0.7656151056289673,
"learning_rate": 2.577921043759372e-06,
"loss": 0.4323,
"step": 989
},
{
"epoch": 3.093945720250522,
"grad_norm": 0.7989768385887146,
"learning_rate": 2.5735933845508598e-06,
"loss": 0.4616,
"step": 990
},
{
"epoch": 3.0970772442588728,
"grad_norm": 0.8711255192756653,
"learning_rate": 2.5692655046110855e-06,
"loss": 0.4923,
"step": 991
},
{
"epoch": 3.1002087682672235,
"grad_norm": 0.7333446145057678,
"learning_rate": 2.564937416920813e-06,
"loss": 0.4297,
"step": 992
},
{
"epoch": 3.103340292275574,
"grad_norm": 0.7611051201820374,
"learning_rate": 2.5606091344614297e-06,
"loss": 0.4276,
"step": 993
},
{
"epoch": 3.106471816283925,
"grad_norm": 0.9742305278778076,
"learning_rate": 2.5562806702149083e-06,
"loss": 0.4597,
"step": 994
},
{
"epoch": 3.1096033402922756,
"grad_norm": 0.8043314218521118,
"learning_rate": 2.551952037163765e-06,
"loss": 0.459,
"step": 995
},
{
"epoch": 3.1127348643006263,
"grad_norm": 0.7217118740081787,
"learning_rate": 2.5476232482910253e-06,
"loss": 0.4514,
"step": 996
},
{
"epoch": 3.115866388308977,
"grad_norm": 1.1410573720932007,
"learning_rate": 2.5432943165801765e-06,
"loss": 0.4942,
"step": 997
},
{
"epoch": 3.1189979123173277,
"grad_norm": 0.7188895344734192,
"learning_rate": 2.5389652550151416e-06,
"loss": 0.4641,
"step": 998
},
{
"epoch": 3.1221294363256784,
"grad_norm": 0.7223486304283142,
"learning_rate": 2.5346360765802276e-06,
"loss": 0.4382,
"step": 999
},
{
"epoch": 3.125260960334029,
"grad_norm": 0.8269757032394409,
"learning_rate": 2.5303067942600933e-06,
"loss": 0.4502,
"step": 1000
},
{
"epoch": 3.12839248434238,
"grad_norm": 0.7081235647201538,
"learning_rate": 2.5259774210397104e-06,
"loss": 0.4389,
"step": 1001
},
{
"epoch": 3.1315240083507305,
"grad_norm": 0.7493525147438049,
"learning_rate": 2.5216479699043224e-06,
"loss": 0.4697,
"step": 1002
},
{
"epoch": 3.1346555323590812,
"grad_norm": 0.7633835077285767,
"learning_rate": 2.5173184538394054e-06,
"loss": 0.4535,
"step": 1003
},
{
"epoch": 3.137787056367432,
"grad_norm": 0.7765631079673767,
"learning_rate": 2.5129888858306333e-06,
"loss": 0.4616,
"step": 1004
},
{
"epoch": 3.140918580375783,
"grad_norm": 0.7026706337928772,
"learning_rate": 2.508659278863832e-06,
"loss": 0.4535,
"step": 1005
},
{
"epoch": 3.144050104384134,
"grad_norm": 0.8816738128662109,
"learning_rate": 2.5043296459249466e-06,
"loss": 0.4274,
"step": 1006
},
{
"epoch": 3.1471816283924845,
"grad_norm": 0.8072274327278137,
"learning_rate": 2.5e-06,
"loss": 0.4566,
"step": 1007
},
{
"epoch": 3.150313152400835,
"grad_norm": 0.7775781750679016,
"learning_rate": 2.4956703540750542e-06,
"loss": 0.4248,
"step": 1008
},
{
"epoch": 3.153444676409186,
"grad_norm": 0.9271306991577148,
"learning_rate": 2.4913407211361686e-06,
"loss": 0.4251,
"step": 1009
},
{
"epoch": 3.1565762004175366,
"grad_norm": 0.763867974281311,
"learning_rate": 2.487011114169368e-06,
"loss": 0.4525,
"step": 1010
},
{
"epoch": 3.1597077244258873,
"grad_norm": 0.8754820227622986,
"learning_rate": 2.4826815461605955e-06,
"loss": 0.4524,
"step": 1011
},
{
"epoch": 3.162839248434238,
"grad_norm": 0.8261513113975525,
"learning_rate": 2.4783520300956784e-06,
"loss": 0.4303,
"step": 1012
},
{
"epoch": 3.1659707724425887,
"grad_norm": 0.8629854917526245,
"learning_rate": 2.4740225789602905e-06,
"loss": 0.4628,
"step": 1013
},
{
"epoch": 3.1691022964509394,
"grad_norm": 0.7281518578529358,
"learning_rate": 2.469693205739907e-06,
"loss": 0.461,
"step": 1014
},
{
"epoch": 3.17223382045929,
"grad_norm": 0.757644772529602,
"learning_rate": 2.465363923419774e-06,
"loss": 0.4455,
"step": 1015
},
{
"epoch": 3.175365344467641,
"grad_norm": 0.8021314740180969,
"learning_rate": 2.4610347449848592e-06,
"loss": 0.4755,
"step": 1016
},
{
"epoch": 3.1784968684759916,
"grad_norm": 0.7467564940452576,
"learning_rate": 2.456705683419824e-06,
"loss": 0.4419,
"step": 1017
},
{
"epoch": 3.1816283924843423,
"grad_norm": 0.8575125336647034,
"learning_rate": 2.452376751708976e-06,
"loss": 0.4577,
"step": 1018
},
{
"epoch": 3.184759916492693,
"grad_norm": 0.8101590871810913,
"learning_rate": 2.448047962836235e-06,
"loss": 0.4546,
"step": 1019
},
{
"epoch": 3.1878914405010437,
"grad_norm": 0.8146190643310547,
"learning_rate": 2.443719329785093e-06,
"loss": 0.4219,
"step": 1020
},
{
"epoch": 3.1910229645093944,
"grad_norm": 1.2800556421279907,
"learning_rate": 2.4393908655385708e-06,
"loss": 0.4502,
"step": 1021
},
{
"epoch": 3.1941544885177455,
"grad_norm": 0.7362221479415894,
"learning_rate": 2.4350625830791875e-06,
"loss": 0.4802,
"step": 1022
},
{
"epoch": 3.1972860125260962,
"grad_norm": 0.8307201862335205,
"learning_rate": 2.430734495388915e-06,
"loss": 0.4808,
"step": 1023
},
{
"epoch": 3.200417536534447,
"grad_norm": 0.8467394709587097,
"learning_rate": 2.42640661544914e-06,
"loss": 0.4598,
"step": 1024
},
{
"epoch": 3.2035490605427976,
"grad_norm": 0.9570673108100891,
"learning_rate": 2.422078956240629e-06,
"loss": 0.4341,
"step": 1025
},
{
"epoch": 3.2066805845511483,
"grad_norm": 0.7976422309875488,
"learning_rate": 2.4177515307434824e-06,
"loss": 0.4285,
"step": 1026
},
{
"epoch": 3.209812108559499,
"grad_norm": 0.8241607546806335,
"learning_rate": 2.413424351937103e-06,
"loss": 0.4563,
"step": 1027
},
{
"epoch": 3.2129436325678498,
"grad_norm": 0.7191072702407837,
"learning_rate": 2.4090974328001526e-06,
"loss": 0.456,
"step": 1028
},
{
"epoch": 3.2160751565762005,
"grad_norm": 0.8112174868583679,
"learning_rate": 2.4047707863105133e-06,
"loss": 0.4592,
"step": 1029
},
{
"epoch": 3.219206680584551,
"grad_norm": 0.7170486450195312,
"learning_rate": 2.4004444254452522e-06,
"loss": 0.4559,
"step": 1030
},
{
"epoch": 3.222338204592902,
"grad_norm": 0.7092299461364746,
"learning_rate": 2.3961183631805748e-06,
"loss": 0.4504,
"step": 1031
},
{
"epoch": 3.2254697286012526,
"grad_norm": 0.8104662299156189,
"learning_rate": 2.391792612491796e-06,
"loss": 0.4327,
"step": 1032
},
{
"epoch": 3.2286012526096033,
"grad_norm": 0.8512858152389526,
"learning_rate": 2.387467186353293e-06,
"loss": 0.4506,
"step": 1033
},
{
"epoch": 3.231732776617954,
"grad_norm": 0.7987692952156067,
"learning_rate": 2.3831420977384715e-06,
"loss": 0.4257,
"step": 1034
},
{
"epoch": 3.2348643006263047,
"grad_norm": 0.7805537581443787,
"learning_rate": 2.3788173596197244e-06,
"loss": 0.4692,
"step": 1035
},
{
"epoch": 3.2379958246346554,
"grad_norm": 0.737304151058197,
"learning_rate": 2.374492984968392e-06,
"loss": 0.4308,
"step": 1036
},
{
"epoch": 3.241127348643006,
"grad_norm": 0.8113856315612793,
"learning_rate": 2.3701689867547247e-06,
"loss": 0.4668,
"step": 1037
},
{
"epoch": 3.244258872651357,
"grad_norm": 1.507103443145752,
"learning_rate": 2.3658453779478464e-06,
"loss": 0.4527,
"step": 1038
},
{
"epoch": 3.2473903966597075,
"grad_norm": 0.7973915338516235,
"learning_rate": 2.3615221715157095e-06,
"loss": 0.4741,
"step": 1039
},
{
"epoch": 3.2505219206680582,
"grad_norm": 0.8206940293312073,
"learning_rate": 2.3571993804250635e-06,
"loss": 0.4582,
"step": 1040
},
{
"epoch": 3.2536534446764094,
"grad_norm": 0.8519637584686279,
"learning_rate": 2.3528770176414076e-06,
"loss": 0.4412,
"step": 1041
},
{
"epoch": 3.25678496868476,
"grad_norm": 0.7542241811752319,
"learning_rate": 2.348555096128959e-06,
"loss": 0.4861,
"step": 1042
},
{
"epoch": 3.259916492693111,
"grad_norm": 0.8074842691421509,
"learning_rate": 2.3442336288506125e-06,
"loss": 0.4707,
"step": 1043
},
{
"epoch": 3.2630480167014615,
"grad_norm": 0.8164265751838684,
"learning_rate": 2.3399126287678975e-06,
"loss": 0.4417,
"step": 1044
},
{
"epoch": 3.266179540709812,
"grad_norm": 0.7689628005027771,
"learning_rate": 2.3355921088409435e-06,
"loss": 0.4815,
"step": 1045
},
{
"epoch": 3.269311064718163,
"grad_norm": 0.7709240913391113,
"learning_rate": 2.3312720820284423e-06,
"loss": 0.4444,
"step": 1046
},
{
"epoch": 3.2724425887265136,
"grad_norm": 0.7340330481529236,
"learning_rate": 2.326952561287602e-06,
"loss": 0.4694,
"step": 1047
},
{
"epoch": 3.2755741127348643,
"grad_norm": 0.7785805463790894,
"learning_rate": 2.3226335595741154e-06,
"loss": 0.4273,
"step": 1048
},
{
"epoch": 3.278705636743215,
"grad_norm": 0.9968108534812927,
"learning_rate": 2.3183150898421196e-06,
"loss": 0.4782,
"step": 1049
},
{
"epoch": 3.2818371607515657,
"grad_norm": 0.7823553681373596,
"learning_rate": 2.3139971650441533e-06,
"loss": 0.444,
"step": 1050
},
{
"epoch": 3.2849686847599164,
"grad_norm": 0.7317377924919128,
"learning_rate": 2.3096797981311252e-06,
"loss": 0.4692,
"step": 1051
},
{
"epoch": 3.288100208768267,
"grad_norm": 0.8546518683433533,
"learning_rate": 2.3053630020522643e-06,
"loss": 0.4632,
"step": 1052
},
{
"epoch": 3.291231732776618,
"grad_norm": 1.2284396886825562,
"learning_rate": 2.301046789755093e-06,
"loss": 0.4409,
"step": 1053
},
{
"epoch": 3.2943632567849686,
"grad_norm": 0.8000460863113403,
"learning_rate": 2.2967311741853797e-06,
"loss": 0.4456,
"step": 1054
},
{
"epoch": 3.2974947807933193,
"grad_norm": 0.7689793109893799,
"learning_rate": 2.2924161682871053e-06,
"loss": 0.45,
"step": 1055
},
{
"epoch": 3.30062630480167,
"grad_norm": 0.8032956719398499,
"learning_rate": 2.288101785002421e-06,
"loss": 0.4817,
"step": 1056
},
{
"epoch": 3.3037578288100207,
"grad_norm": 0.6831309795379639,
"learning_rate": 2.283788037271609e-06,
"loss": 0.4502,
"step": 1057
},
{
"epoch": 3.306889352818372,
"grad_norm": 0.8581221103668213,
"learning_rate": 2.279474938033048e-06,
"loss": 0.4569,
"step": 1058
},
{
"epoch": 3.3100208768267225,
"grad_norm": 0.7937221527099609,
"learning_rate": 2.2751625002231696e-06,
"loss": 0.4451,
"step": 1059
},
{
"epoch": 3.3131524008350732,
"grad_norm": 0.8095264434814453,
"learning_rate": 2.270850736776422e-06,
"loss": 0.4462,
"step": 1060
},
{
"epoch": 3.316283924843424,
"grad_norm": 0.9141370058059692,
"learning_rate": 2.2665396606252332e-06,
"loss": 0.419,
"step": 1061
},
{
"epoch": 3.3194154488517746,
"grad_norm": 0.8648553490638733,
"learning_rate": 2.262229284699965e-06,
"loss": 0.4562,
"step": 1062
},
{
"epoch": 3.3225469728601253,
"grad_norm": 0.7716917395591736,
"learning_rate": 2.2579196219288825e-06,
"loss": 0.4734,
"step": 1063
},
{
"epoch": 3.325678496868476,
"grad_norm": 0.8074535727500916,
"learning_rate": 2.2536106852381116e-06,
"loss": 0.4272,
"step": 1064
},
{
"epoch": 3.3288100208768268,
"grad_norm": 0.8989127278327942,
"learning_rate": 2.249302487551599e-06,
"loss": 0.4728,
"step": 1065
},
{
"epoch": 3.3319415448851775,
"grad_norm": 0.7662765383720398,
"learning_rate": 2.2449950417910777e-06,
"loss": 0.4457,
"step": 1066
},
{
"epoch": 3.335073068893528,
"grad_norm": 0.7789275050163269,
"learning_rate": 2.24068836087602e-06,
"loss": 0.3919,
"step": 1067
},
{
"epoch": 3.338204592901879,
"grad_norm": 0.7359098196029663,
"learning_rate": 2.2363824577236097e-06,
"loss": 0.4876,
"step": 1068
},
{
"epoch": 3.3413361169102296,
"grad_norm": 0.9960948824882507,
"learning_rate": 2.232077345248695e-06,
"loss": 0.4894,
"step": 1069
},
{
"epoch": 3.3444676409185803,
"grad_norm": 0.7446064352989197,
"learning_rate": 2.2277730363637537e-06,
"loss": 0.4765,
"step": 1070
},
{
"epoch": 3.347599164926931,
"grad_norm": 0.7674328088760376,
"learning_rate": 2.2234695439788534e-06,
"loss": 0.4468,
"step": 1071
},
{
"epoch": 3.3507306889352817,
"grad_norm": 0.8957347869873047,
"learning_rate": 2.2191668810016105e-06,
"loss": 0.4733,
"step": 1072
},
{
"epoch": 3.3538622129436324,
"grad_norm": 0.9110277891159058,
"learning_rate": 2.2148650603371573e-06,
"loss": 0.4399,
"step": 1073
},
{
"epoch": 3.356993736951983,
"grad_norm": 0.9158220291137695,
"learning_rate": 2.2105640948880976e-06,
"loss": 0.4609,
"step": 1074
},
{
"epoch": 3.3601252609603343,
"grad_norm": 0.7630184888839722,
"learning_rate": 2.206263997554469e-06,
"loss": 0.4674,
"step": 1075
},
{
"epoch": 3.3632567849686845,
"grad_norm": 0.7975273728370667,
"learning_rate": 2.20196478123371e-06,
"loss": 0.4478,
"step": 1076
},
{
"epoch": 3.3663883089770357,
"grad_norm": 0.8825351595878601,
"learning_rate": 2.19766645882061e-06,
"loss": 0.4687,
"step": 1077
},
{
"epoch": 3.3695198329853864,
"grad_norm": 0.8907671570777893,
"learning_rate": 2.1933690432072817e-06,
"loss": 0.4223,
"step": 1078
},
{
"epoch": 3.372651356993737,
"grad_norm": 0.7449545860290527,
"learning_rate": 2.189072547283118e-06,
"loss": 0.4595,
"step": 1079
},
{
"epoch": 3.375782881002088,
"grad_norm": 0.8460972309112549,
"learning_rate": 2.184776983934751e-06,
"loss": 0.4443,
"step": 1080
},
{
"epoch": 3.3789144050104385,
"grad_norm": 0.7524845600128174,
"learning_rate": 2.1804823660460196e-06,
"loss": 0.4235,
"step": 1081
},
{
"epoch": 3.382045929018789,
"grad_norm": 0.8448389768600464,
"learning_rate": 2.176188706497921e-06,
"loss": 0.4387,
"step": 1082
},
{
"epoch": 3.38517745302714,
"grad_norm": 0.7701981663703918,
"learning_rate": 2.1718960181685838e-06,
"loss": 0.4257,
"step": 1083
},
{
"epoch": 3.3883089770354906,
"grad_norm": 0.8178983330726624,
"learning_rate": 2.167604313933219e-06,
"loss": 0.4983,
"step": 1084
},
{
"epoch": 3.3914405010438413,
"grad_norm": 0.7477235198020935,
"learning_rate": 2.163313606664091e-06,
"loss": 0.4559,
"step": 1085
},
{
"epoch": 3.394572025052192,
"grad_norm": 0.8127962350845337,
"learning_rate": 2.1590239092304694e-06,
"loss": 0.453,
"step": 1086
},
{
"epoch": 3.3977035490605427,
"grad_norm": 0.7462339997291565,
"learning_rate": 2.1547352344985966e-06,
"loss": 0.4697,
"step": 1087
},
{
"epoch": 3.4008350730688934,
"grad_norm": 0.9641384482383728,
"learning_rate": 2.1504475953316483e-06,
"loss": 0.4495,
"step": 1088
},
{
"epoch": 3.403966597077244,
"grad_norm": 0.7612512707710266,
"learning_rate": 2.146161004589693e-06,
"loss": 0.4579,
"step": 1089
},
{
"epoch": 3.407098121085595,
"grad_norm": 0.7547829747200012,
"learning_rate": 2.141875475129655e-06,
"loss": 0.4334,
"step": 1090
},
{
"epoch": 3.4102296450939455,
"grad_norm": 0.8036953806877136,
"learning_rate": 2.137591019805278e-06,
"loss": 0.4466,
"step": 1091
},
{
"epoch": 3.4133611691022967,
"grad_norm": 0.7319284081459045,
"learning_rate": 2.1333076514670784e-06,
"loss": 0.4942,
"step": 1092
},
{
"epoch": 3.416492693110647,
"grad_norm": 0.8278589248657227,
"learning_rate": 2.1290253829623165e-06,
"loss": 0.4554,
"step": 1093
},
{
"epoch": 3.419624217118998,
"grad_norm": 0.733059287071228,
"learning_rate": 2.124744227134954e-06,
"loss": 0.4187,
"step": 1094
},
{
"epoch": 3.422755741127349,
"grad_norm": 0.8222727179527283,
"learning_rate": 2.1204641968256136e-06,
"loss": 0.4587,
"step": 1095
},
{
"epoch": 3.4258872651356995,
"grad_norm": 0.8296732902526855,
"learning_rate": 2.1161853048715438e-06,
"loss": 0.4868,
"step": 1096
},
{
"epoch": 3.4290187891440502,
"grad_norm": 0.7309690713882446,
"learning_rate": 2.1119075641065758e-06,
"loss": 0.4594,
"step": 1097
},
{
"epoch": 3.432150313152401,
"grad_norm": 1.4901788234710693,
"learning_rate": 2.1076309873610916e-06,
"loss": 0.4216,
"step": 1098
},
{
"epoch": 3.4352818371607516,
"grad_norm": 0.7993581891059875,
"learning_rate": 2.1033555874619794e-06,
"loss": 0.4842,
"step": 1099
},
{
"epoch": 3.4384133611691023,
"grad_norm": 0.8846752643585205,
"learning_rate": 2.0990813772325995e-06,
"loss": 0.4395,
"step": 1100
},
{
"epoch": 3.441544885177453,
"grad_norm": 1.0796778202056885,
"learning_rate": 2.0948083694927436e-06,
"loss": 0.4573,
"step": 1101
},
{
"epoch": 3.4446764091858038,
"grad_norm": 0.74623042345047,
"learning_rate": 2.090536577058595e-06,
"loss": 0.4563,
"step": 1102
},
{
"epoch": 3.4478079331941545,
"grad_norm": 0.8245521783828735,
"learning_rate": 2.086266012742692e-06,
"loss": 0.4433,
"step": 1103
},
{
"epoch": 3.450939457202505,
"grad_norm": 0.8003777265548706,
"learning_rate": 2.081996689353893e-06,
"loss": 0.4599,
"step": 1104
},
{
"epoch": 3.454070981210856,
"grad_norm": 0.8309001922607422,
"learning_rate": 2.0777286196973302e-06,
"loss": 0.485,
"step": 1105
},
{
"epoch": 3.4572025052192066,
"grad_norm": 0.8299122452735901,
"learning_rate": 2.0734618165743782e-06,
"loss": 0.4685,
"step": 1106
},
{
"epoch": 3.4603340292275573,
"grad_norm": 0.9347029328346252,
"learning_rate": 2.069196292782611e-06,
"loss": 0.4615,
"step": 1107
},
{
"epoch": 3.463465553235908,
"grad_norm": 0.7146593332290649,
"learning_rate": 2.064932061115766e-06,
"loss": 0.4433,
"step": 1108
},
{
"epoch": 3.4665970772442587,
"grad_norm": 0.7674420475959778,
"learning_rate": 2.0606691343637063e-06,
"loss": 0.4444,
"step": 1109
},
{
"epoch": 3.4697286012526094,
"grad_norm": 0.7925504446029663,
"learning_rate": 2.05640752531238e-06,
"loss": 0.4631,
"step": 1110
},
{
"epoch": 3.4728601252609606,
"grad_norm": 0.7755677700042725,
"learning_rate": 2.0521472467437825e-06,
"loss": 0.4709,
"step": 1111
},
{
"epoch": 3.4759916492693113,
"grad_norm": 0.8535795211791992,
"learning_rate": 2.0478883114359187e-06,
"loss": 0.442,
"step": 1112
},
{
"epoch": 3.479123173277662,
"grad_norm": 0.723953127861023,
"learning_rate": 2.043630732162767e-06,
"loss": 0.4782,
"step": 1113
},
{
"epoch": 3.4822546972860127,
"grad_norm": 0.7817316651344299,
"learning_rate": 2.0393745216942343e-06,
"loss": 0.4841,
"step": 1114
},
{
"epoch": 3.4853862212943634,
"grad_norm": 0.8878781795501709,
"learning_rate": 2.0351196927961268e-06,
"loss": 0.4673,
"step": 1115
},
{
"epoch": 3.488517745302714,
"grad_norm": 0.8645241856575012,
"learning_rate": 2.030866258230104e-06,
"loss": 0.432,
"step": 1116
},
{
"epoch": 3.491649269311065,
"grad_norm": 0.7294583320617676,
"learning_rate": 2.026614230753643e-06,
"loss": 0.4683,
"step": 1117
},
{
"epoch": 3.4947807933194155,
"grad_norm": 0.7412407994270325,
"learning_rate": 2.022363623120001e-06,
"loss": 0.4523,
"step": 1118
},
{
"epoch": 3.497912317327766,
"grad_norm": 0.8559291362762451,
"learning_rate": 2.0181144480781787e-06,
"loss": 0.4309,
"step": 1119
},
{
"epoch": 3.501043841336117,
"grad_norm": 0.7442825436592102,
"learning_rate": 2.0138667183728775e-06,
"loss": 0.4096,
"step": 1120
},
{
"epoch": 3.5041753653444676,
"grad_norm": 0.7605662941932678,
"learning_rate": 2.0096204467444645e-06,
"loss": 0.4404,
"step": 1121
},
{
"epoch": 3.5073068893528183,
"grad_norm": 0.7984277009963989,
"learning_rate": 2.005375645928935e-06,
"loss": 0.4661,
"step": 1122
},
{
"epoch": 3.510438413361169,
"grad_norm": 1.1044552326202393,
"learning_rate": 2.001132328657869e-06,
"loss": 0.4185,
"step": 1123
},
{
"epoch": 3.5135699373695197,
"grad_norm": 0.8210328817367554,
"learning_rate": 1.996890507658401e-06,
"loss": 0.4746,
"step": 1124
},
{
"epoch": 3.5167014613778704,
"grad_norm": 0.7302148342132568,
"learning_rate": 1.9926501956531758e-06,
"loss": 0.4333,
"step": 1125
},
{
"epoch": 3.519832985386221,
"grad_norm": 0.7713826894760132,
"learning_rate": 1.9884114053603114e-06,
"loss": 0.4485,
"step": 1126
},
{
"epoch": 3.522964509394572,
"grad_norm": 0.7386549711227417,
"learning_rate": 1.984174149493365e-06,
"loss": 0.4678,
"step": 1127
},
{
"epoch": 3.526096033402923,
"grad_norm": 0.8006004095077515,
"learning_rate": 1.979938440761287e-06,
"loss": 0.4755,
"step": 1128
},
{
"epoch": 3.5292275574112733,
"grad_norm": 1.0635333061218262,
"learning_rate": 1.97570429186839e-06,
"loss": 0.4985,
"step": 1129
},
{
"epoch": 3.5323590814196244,
"grad_norm": 0.7470075488090515,
"learning_rate": 1.9714717155143083e-06,
"loss": 0.4307,
"step": 1130
},
{
"epoch": 3.535490605427975,
"grad_norm": 0.8314558863639832,
"learning_rate": 1.967240724393959e-06,
"loss": 0.4894,
"step": 1131
},
{
"epoch": 3.538622129436326,
"grad_norm": 0.761443018913269,
"learning_rate": 1.963011331197506e-06,
"loss": 0.4653,
"step": 1132
},
{
"epoch": 3.5417536534446765,
"grad_norm": 0.7483212351799011,
"learning_rate": 1.9587835486103163e-06,
"loss": 0.4456,
"step": 1133
},
{
"epoch": 3.5448851774530272,
"grad_norm": 0.7794159054756165,
"learning_rate": 1.9545573893129306e-06,
"loss": 0.4707,
"step": 1134
},
{
"epoch": 3.548016701461378,
"grad_norm": 0.8016185760498047,
"learning_rate": 1.950332865981019e-06,
"loss": 0.4547,
"step": 1135
},
{
"epoch": 3.5511482254697286,
"grad_norm": 0.8089869618415833,
"learning_rate": 1.9461099912853453e-06,
"loss": 0.4499,
"step": 1136
},
{
"epoch": 3.5542797494780793,
"grad_norm": 0.7774782180786133,
"learning_rate": 1.9418887778917286e-06,
"loss": 0.4531,
"step": 1137
},
{
"epoch": 3.55741127348643,
"grad_norm": 0.7793645262718201,
"learning_rate": 1.937669238461003e-06,
"loss": 0.4553,
"step": 1138
},
{
"epoch": 3.5605427974947808,
"grad_norm": 0.8139959573745728,
"learning_rate": 1.933451385648985e-06,
"loss": 0.458,
"step": 1139
},
{
"epoch": 3.5636743215031315,
"grad_norm": 0.7517053484916687,
"learning_rate": 1.929235232106431e-06,
"loss": 0.4779,
"step": 1140
},
{
"epoch": 3.566805845511482,
"grad_norm": 0.8851562142372131,
"learning_rate": 1.925020790479e-06,
"loss": 0.425,
"step": 1141
},
{
"epoch": 3.569937369519833,
"grad_norm": 0.8129401803016663,
"learning_rate": 1.920808073407218e-06,
"loss": 0.4616,
"step": 1142
},
{
"epoch": 3.5730688935281836,
"grad_norm": 0.7110117077827454,
"learning_rate": 1.916597093526437e-06,
"loss": 0.4748,
"step": 1143
},
{
"epoch": 3.5762004175365343,
"grad_norm": 0.8268555402755737,
"learning_rate": 1.912387863466798e-06,
"loss": 0.4752,
"step": 1144
},
{
"epoch": 3.5793319415448854,
"grad_norm": 1.1036733388900757,
"learning_rate": 1.9081803958531967e-06,
"loss": 0.4879,
"step": 1145
},
{
"epoch": 3.5824634655532357,
"grad_norm": 0.8561109304428101,
"learning_rate": 1.9039747033052395e-06,
"loss": 0.4409,
"step": 1146
},
{
"epoch": 3.585594989561587,
"grad_norm": 0.7597541809082031,
"learning_rate": 1.8997707984372119e-06,
"loss": 0.4518,
"step": 1147
},
{
"epoch": 3.588726513569937,
"grad_norm": 0.7225353121757507,
"learning_rate": 1.8955686938580329e-06,
"loss": 0.4735,
"step": 1148
},
{
"epoch": 3.5918580375782883,
"grad_norm": 0.9409791231155396,
"learning_rate": 1.8913684021712264e-06,
"loss": 0.4249,
"step": 1149
},
{
"epoch": 3.594989561586639,
"grad_norm": 0.8757275342941284,
"learning_rate": 1.8871699359748763e-06,
"loss": 0.4479,
"step": 1150
},
{
"epoch": 3.5981210855949897,
"grad_norm": 0.8090003728866577,
"learning_rate": 1.882973307861593e-06,
"loss": 0.4846,
"step": 1151
},
{
"epoch": 3.6012526096033404,
"grad_norm": 0.7568825483322144,
"learning_rate": 1.8787785304184726e-06,
"loss": 0.4301,
"step": 1152
},
{
"epoch": 3.604384133611691,
"grad_norm": 0.8233815431594849,
"learning_rate": 1.8745856162270592e-06,
"loss": 0.4838,
"step": 1153
},
{
"epoch": 3.607515657620042,
"grad_norm": 0.7817628979682922,
"learning_rate": 1.8703945778633121e-06,
"loss": 0.4669,
"step": 1154
},
{
"epoch": 3.6106471816283925,
"grad_norm": 0.821304202079773,
"learning_rate": 1.8662054278975605e-06,
"loss": 0.4536,
"step": 1155
},
{
"epoch": 3.613778705636743,
"grad_norm": 0.9304089546203613,
"learning_rate": 1.8620181788944712e-06,
"loss": 0.4489,
"step": 1156
},
{
"epoch": 3.616910229645094,
"grad_norm": 0.8202670216560364,
"learning_rate": 1.8578328434130114e-06,
"loss": 0.4309,
"step": 1157
},
{
"epoch": 3.6200417536534446,
"grad_norm": 0.8890257477760315,
"learning_rate": 1.8536494340064051e-06,
"loss": 0.4736,
"step": 1158
},
{
"epoch": 3.6231732776617953,
"grad_norm": 0.7940590381622314,
"learning_rate": 1.8494679632221013e-06,
"loss": 0.4468,
"step": 1159
},
{
"epoch": 3.626304801670146,
"grad_norm": 0.8388273119926453,
"learning_rate": 1.845288443601736e-06,
"loss": 0.4753,
"step": 1160
},
{
"epoch": 3.6294363256784967,
"grad_norm": 0.9392285346984863,
"learning_rate": 1.84111088768109e-06,
"loss": 0.4736,
"step": 1161
},
{
"epoch": 3.632567849686848,
"grad_norm": 0.7411681413650513,
"learning_rate": 1.8369353079900576e-06,
"loss": 0.4303,
"step": 1162
},
{
"epoch": 3.635699373695198,
"grad_norm": 0.8722569942474365,
"learning_rate": 1.8327617170526014e-06,
"loss": 0.4604,
"step": 1163
},
{
"epoch": 3.6388308977035493,
"grad_norm": 0.786891758441925,
"learning_rate": 1.8285901273867229e-06,
"loss": 0.4756,
"step": 1164
},
{
"epoch": 3.6419624217118995,
"grad_norm": 0.8159083724021912,
"learning_rate": 1.824420551504419e-06,
"loss": 0.4675,
"step": 1165
},
{
"epoch": 3.6450939457202507,
"grad_norm": 0.8271334767341614,
"learning_rate": 1.8202530019116487e-06,
"loss": 0.4311,
"step": 1166
},
{
"epoch": 3.6482254697286014,
"grad_norm": 0.7617189288139343,
"learning_rate": 1.816087491108292e-06,
"loss": 0.4522,
"step": 1167
},
{
"epoch": 3.651356993736952,
"grad_norm": 0.7248172760009766,
"learning_rate": 1.8119240315881126e-06,
"loss": 0.465,
"step": 1168
},
{
"epoch": 3.654488517745303,
"grad_norm": 0.8606911897659302,
"learning_rate": 1.8077626358387235e-06,
"loss": 0.4524,
"step": 1169
},
{
"epoch": 3.6576200417536535,
"grad_norm": 0.8571308851242065,
"learning_rate": 1.8036033163415484e-06,
"loss": 0.4625,
"step": 1170
},
{
"epoch": 3.6607515657620042,
"grad_norm": 0.7239511609077454,
"learning_rate": 1.7994460855717812e-06,
"loss": 0.5025,
"step": 1171
},
{
"epoch": 3.663883089770355,
"grad_norm": 0.7958929538726807,
"learning_rate": 1.7952909559983544e-06,
"loss": 0.4382,
"step": 1172
},
{
"epoch": 3.6670146137787056,
"grad_norm": 0.7920124530792236,
"learning_rate": 1.7911379400838947e-06,
"loss": 0.4393,
"step": 1173
},
{
"epoch": 3.6701461377870563,
"grad_norm": 0.8072578310966492,
"learning_rate": 1.7869870502846903e-06,
"loss": 0.4627,
"step": 1174
},
{
"epoch": 3.673277661795407,
"grad_norm": 0.8586218357086182,
"learning_rate": 1.7828382990506543e-06,
"loss": 0.4456,
"step": 1175
},
{
"epoch": 3.6764091858037578,
"grad_norm": 0.8741613030433655,
"learning_rate": 1.7786916988252845e-06,
"loss": 0.4613,
"step": 1176
},
{
"epoch": 3.6795407098121085,
"grad_norm": 0.7691352367401123,
"learning_rate": 1.774547262045626e-06,
"loss": 0.4641,
"step": 1177
},
{
"epoch": 3.682672233820459,
"grad_norm": 0.7866089940071106,
"learning_rate": 1.7704050011422357e-06,
"loss": 0.4308,
"step": 1178
},
{
"epoch": 3.68580375782881,
"grad_norm": 0.9934884309768677,
"learning_rate": 1.7662649285391447e-06,
"loss": 0.4434,
"step": 1179
},
{
"epoch": 3.6889352818371606,
"grad_norm": 0.794385552406311,
"learning_rate": 1.7621270566538204e-06,
"loss": 0.4481,
"step": 1180
},
{
"epoch": 3.6920668058455117,
"grad_norm": 0.7573548555374146,
"learning_rate": 1.7579913978971296e-06,
"loss": 0.4525,
"step": 1181
},
{
"epoch": 3.695198329853862,
"grad_norm": 0.7073976993560791,
"learning_rate": 1.7538579646733023e-06,
"loss": 0.4214,
"step": 1182
},
{
"epoch": 3.698329853862213,
"grad_norm": 0.8009579181671143,
"learning_rate": 1.7497267693798902e-06,
"loss": 0.4542,
"step": 1183
},
{
"epoch": 3.701461377870564,
"grad_norm": 1.4488778114318848,
"learning_rate": 1.7455978244077348e-06,
"loss": 0.443,
"step": 1184
},
{
"epoch": 3.7045929018789145,
"grad_norm": 1.0529266595840454,
"learning_rate": 1.7414711421409292e-06,
"loss": 0.4908,
"step": 1185
},
{
"epoch": 3.7077244258872653,
"grad_norm": 0.757431149482727,
"learning_rate": 1.7373467349567775e-06,
"loss": 0.4454,
"step": 1186
},
{
"epoch": 3.710855949895616,
"grad_norm": 0.7086379528045654,
"learning_rate": 1.733224615225763e-06,
"loss": 0.4292,
"step": 1187
},
{
"epoch": 3.7139874739039667,
"grad_norm": 0.7454110383987427,
"learning_rate": 1.7291047953115049e-06,
"loss": 0.4421,
"step": 1188
},
{
"epoch": 3.7171189979123174,
"grad_norm": 0.804027795791626,
"learning_rate": 1.7249872875707257e-06,
"loss": 0.4819,
"step": 1189
},
{
"epoch": 3.720250521920668,
"grad_norm": 0.8159645199775696,
"learning_rate": 1.7208721043532146e-06,
"loss": 0.4628,
"step": 1190
},
{
"epoch": 3.723382045929019,
"grad_norm": 0.8451672196388245,
"learning_rate": 1.7167592580017866e-06,
"loss": 0.4773,
"step": 1191
},
{
"epoch": 3.7265135699373695,
"grad_norm": 0.920553982257843,
"learning_rate": 1.7126487608522492e-06,
"loss": 0.4787,
"step": 1192
},
{
"epoch": 3.72964509394572,
"grad_norm": 0.9169708490371704,
"learning_rate": 1.7085406252333613e-06,
"loss": 0.4543,
"step": 1193
},
{
"epoch": 3.732776617954071,
"grad_norm": 0.7245096564292908,
"learning_rate": 1.7044348634668023e-06,
"loss": 0.4599,
"step": 1194
},
{
"epoch": 3.7359081419624216,
"grad_norm": 0.835832417011261,
"learning_rate": 1.7003314878671284e-06,
"loss": 0.4794,
"step": 1195
},
{
"epoch": 3.7390396659707723,
"grad_norm": 0.8455896973609924,
"learning_rate": 1.696230510741742e-06,
"loss": 0.4329,
"step": 1196
},
{
"epoch": 3.742171189979123,
"grad_norm": 0.743611752986908,
"learning_rate": 1.692131944390849e-06,
"loss": 0.4455,
"step": 1197
},
{
"epoch": 3.745302713987474,
"grad_norm": 1.1334915161132812,
"learning_rate": 1.6880358011074272e-06,
"loss": 0.4673,
"step": 1198
},
{
"epoch": 3.7484342379958244,
"grad_norm": 0.6935724020004272,
"learning_rate": 1.6839420931771828e-06,
"loss": 0.4686,
"step": 1199
},
{
"epoch": 3.7515657620041756,
"grad_norm": 0.962006151676178,
"learning_rate": 1.6798508328785213e-06,
"loss": 0.4474,
"step": 1200
},
{
"epoch": 3.754697286012526,
"grad_norm": 1.1499714851379395,
"learning_rate": 1.6757620324825047e-06,
"loss": 0.4659,
"step": 1201
},
{
"epoch": 3.757828810020877,
"grad_norm": 0.7689645886421204,
"learning_rate": 1.6716757042528192e-06,
"loss": 0.4552,
"step": 1202
},
{
"epoch": 3.7609603340292277,
"grad_norm": 0.7482030391693115,
"learning_rate": 1.6675918604457352e-06,
"loss": 0.4747,
"step": 1203
},
{
"epoch": 3.7640918580375784,
"grad_norm": 0.7727032899856567,
"learning_rate": 1.6635105133100686e-06,
"loss": 0.4508,
"step": 1204
},
{
"epoch": 3.767223382045929,
"grad_norm": 0.8722149133682251,
"learning_rate": 1.6594316750871514e-06,
"loss": 0.4685,
"step": 1205
},
{
"epoch": 3.77035490605428,
"grad_norm": 0.771304726600647,
"learning_rate": 1.6553553580107884e-06,
"loss": 0.4418,
"step": 1206
},
{
"epoch": 3.7734864300626305,
"grad_norm": 0.767315149307251,
"learning_rate": 1.6512815743072214e-06,
"loss": 0.4532,
"step": 1207
},
{
"epoch": 3.776617954070981,
"grad_norm": 0.8825518488883972,
"learning_rate": 1.6472103361950976e-06,
"loss": 0.468,
"step": 1208
},
{
"epoch": 3.779749478079332,
"grad_norm": 0.8887981176376343,
"learning_rate": 1.6431416558854243e-06,
"loss": 0.4264,
"step": 1209
},
{
"epoch": 3.7828810020876826,
"grad_norm": 0.8399733304977417,
"learning_rate": 1.63907554558154e-06,
"loss": 0.4405,
"step": 1210
},
{
"epoch": 3.7860125260960333,
"grad_norm": 0.8112586140632629,
"learning_rate": 1.6350120174790751e-06,
"loss": 0.445,
"step": 1211
},
{
"epoch": 3.789144050104384,
"grad_norm": 0.860775351524353,
"learning_rate": 1.6309510837659137e-06,
"loss": 0.4557,
"step": 1212
},
{
"epoch": 3.7922755741127347,
"grad_norm": 0.8522343039512634,
"learning_rate": 1.626892756622161e-06,
"loss": 0.481,
"step": 1213
},
{
"epoch": 3.7954070981210855,
"grad_norm": 0.7927511930465698,
"learning_rate": 1.6228370482200988e-06,
"loss": 0.4328,
"step": 1214
},
{
"epoch": 3.798538622129436,
"grad_norm": 0.7350064516067505,
"learning_rate": 1.6187839707241604e-06,
"loss": 0.4604,
"step": 1215
},
{
"epoch": 3.801670146137787,
"grad_norm": 0.8363698124885559,
"learning_rate": 1.6147335362908847e-06,
"loss": 0.4271,
"step": 1216
},
{
"epoch": 3.804801670146138,
"grad_norm": 1.080613613128662,
"learning_rate": 1.610685757068885e-06,
"loss": 0.447,
"step": 1217
},
{
"epoch": 3.8079331941544883,
"grad_norm": 1.1507478952407837,
"learning_rate": 1.6066406451988104e-06,
"loss": 0.4664,
"step": 1218
},
{
"epoch": 3.8110647181628394,
"grad_norm": 0.7778187990188599,
"learning_rate": 1.6025982128133073e-06,
"loss": 0.456,
"step": 1219
},
{
"epoch": 3.81419624217119,
"grad_norm": 0.8383583426475525,
"learning_rate": 1.5985584720369876e-06,
"loss": 0.4684,
"step": 1220
},
{
"epoch": 3.817327766179541,
"grad_norm": 0.7743321061134338,
"learning_rate": 1.5945214349863914e-06,
"loss": 0.4567,
"step": 1221
},
{
"epoch": 3.8204592901878915,
"grad_norm": 0.8020774126052856,
"learning_rate": 1.5904871137699462e-06,
"loss": 0.4175,
"step": 1222
},
{
"epoch": 3.8235908141962422,
"grad_norm": 0.790318489074707,
"learning_rate": 1.5864555204879375e-06,
"loss": 0.469,
"step": 1223
},
{
"epoch": 3.826722338204593,
"grad_norm": 0.8583689332008362,
"learning_rate": 1.5824266672324652e-06,
"loss": 0.4931,
"step": 1224
},
{
"epoch": 3.8298538622129437,
"grad_norm": 0.7788206934928894,
"learning_rate": 1.5784005660874125e-06,
"loss": 0.4643,
"step": 1225
},
{
"epoch": 3.8329853862212944,
"grad_norm": 0.8385717868804932,
"learning_rate": 1.574377229128409e-06,
"loss": 0.4567,
"step": 1226
},
{
"epoch": 3.836116910229645,
"grad_norm": 0.8447727560997009,
"learning_rate": 1.5703566684227922e-06,
"loss": 0.42,
"step": 1227
},
{
"epoch": 3.8392484342379958,
"grad_norm": 0.7286496758460999,
"learning_rate": 1.5663388960295742e-06,
"loss": 0.4603,
"step": 1228
},
{
"epoch": 3.8423799582463465,
"grad_norm": 0.8493947982788086,
"learning_rate": 1.562323923999401e-06,
"loss": 0.4731,
"step": 1229
},
{
"epoch": 3.845511482254697,
"grad_norm": 0.8641151785850525,
"learning_rate": 1.5583117643745233e-06,
"loss": 0.4491,
"step": 1230
},
{
"epoch": 3.848643006263048,
"grad_norm": 0.9493702054023743,
"learning_rate": 1.5543024291887532e-06,
"loss": 0.411,
"step": 1231
},
{
"epoch": 3.8517745302713986,
"grad_norm": 0.7246205806732178,
"learning_rate": 1.5502959304674337e-06,
"loss": 0.4569,
"step": 1232
},
{
"epoch": 3.8549060542797493,
"grad_norm": 0.7576872110366821,
"learning_rate": 1.5462922802273994e-06,
"loss": 0.4548,
"step": 1233
},
{
"epoch": 3.8580375782881005,
"grad_norm": 0.7710747718811035,
"learning_rate": 1.5422914904769404e-06,
"loss": 0.447,
"step": 1234
},
{
"epoch": 3.8611691022964507,
"grad_norm": 0.7661204934120178,
"learning_rate": 1.5382935732157677e-06,
"loss": 0.4601,
"step": 1235
},
{
"epoch": 3.864300626304802,
"grad_norm": 0.7133070826530457,
"learning_rate": 1.5342985404349788e-06,
"loss": 0.4245,
"step": 1236
},
{
"epoch": 3.867432150313152,
"grad_norm": 0.7716459631919861,
"learning_rate": 1.5303064041170163e-06,
"loss": 0.4543,
"step": 1237
},
{
"epoch": 3.8705636743215033,
"grad_norm": 0.737501859664917,
"learning_rate": 1.5263171762356388e-06,
"loss": 0.4405,
"step": 1238
},
{
"epoch": 3.873695198329854,
"grad_norm": 0.7885998487472534,
"learning_rate": 1.5223308687558786e-06,
"loss": 0.4412,
"step": 1239
},
{
"epoch": 3.8768267223382047,
"grad_norm": 0.7516661882400513,
"learning_rate": 1.5183474936340092e-06,
"loss": 0.4185,
"step": 1240
},
{
"epoch": 3.8799582463465554,
"grad_norm": 0.8790446519851685,
"learning_rate": 1.5143670628175111e-06,
"loss": 0.444,
"step": 1241
},
{
"epoch": 3.883089770354906,
"grad_norm": 0.7695789337158203,
"learning_rate": 1.5103895882450315e-06,
"loss": 0.4615,
"step": 1242
},
{
"epoch": 3.886221294363257,
"grad_norm": 0.7859196662902832,
"learning_rate": 1.506415081846353e-06,
"loss": 0.4379,
"step": 1243
},
{
"epoch": 3.8893528183716075,
"grad_norm": 0.7771942615509033,
"learning_rate": 1.5024435555423522e-06,
"loss": 0.5013,
"step": 1244
},
{
"epoch": 3.892484342379958,
"grad_norm": 0.7415695190429688,
"learning_rate": 1.498475021244971e-06,
"loss": 0.4861,
"step": 1245
},
{
"epoch": 3.895615866388309,
"grad_norm": 0.7909391522407532,
"learning_rate": 1.4945094908571755e-06,
"loss": 0.4599,
"step": 1246
},
{
"epoch": 3.8987473903966596,
"grad_norm": 0.7749060988426208,
"learning_rate": 1.490546976272923e-06,
"loss": 0.3986,
"step": 1247
},
{
"epoch": 3.9018789144050103,
"grad_norm": 0.810681164264679,
"learning_rate": 1.4865874893771248e-06,
"loss": 0.4495,
"step": 1248
},
{
"epoch": 3.905010438413361,
"grad_norm": 0.8018531799316406,
"learning_rate": 1.4826310420456103e-06,
"loss": 0.4426,
"step": 1249
},
{
"epoch": 3.9081419624217117,
"grad_norm": 0.756064236164093,
"learning_rate": 1.4786776461450924e-06,
"loss": 0.4474,
"step": 1250
},
{
"epoch": 3.911273486430063,
"grad_norm": 0.7581740021705627,
"learning_rate": 1.4747273135331347e-06,
"loss": 0.4494,
"step": 1251
},
{
"epoch": 3.914405010438413,
"grad_norm": 0.7666076421737671,
"learning_rate": 1.4707800560581086e-06,
"loss": 0.4593,
"step": 1252
},
{
"epoch": 3.9175365344467643,
"grad_norm": 0.7339973449707031,
"learning_rate": 1.4668358855591664e-06,
"loss": 0.4682,
"step": 1253
},
{
"epoch": 3.9206680584551146,
"grad_norm": 0.8504599928855896,
"learning_rate": 1.4628948138661974e-06,
"loss": 0.4504,
"step": 1254
},
{
"epoch": 3.9237995824634657,
"grad_norm": 0.8332642912864685,
"learning_rate": 1.4589568527997985e-06,
"loss": 0.5024,
"step": 1255
},
{
"epoch": 3.9269311064718164,
"grad_norm": 0.7813694477081299,
"learning_rate": 1.4550220141712384e-06,
"loss": 0.4547,
"step": 1256
},
{
"epoch": 3.930062630480167,
"grad_norm": 0.842258632183075,
"learning_rate": 1.451090309782417e-06,
"loss": 0.4584,
"step": 1257
},
{
"epoch": 3.933194154488518,
"grad_norm": 0.8159133791923523,
"learning_rate": 1.4471617514258373e-06,
"loss": 0.4538,
"step": 1258
},
{
"epoch": 3.9363256784968685,
"grad_norm": 0.8117021322250366,
"learning_rate": 1.4432363508845626e-06,
"loss": 0.4315,
"step": 1259
},
{
"epoch": 3.9394572025052192,
"grad_norm": 0.8087465167045593,
"learning_rate": 1.4393141199321881e-06,
"loss": 0.4367,
"step": 1260
},
{
"epoch": 3.94258872651357,
"grad_norm": 0.7954697012901306,
"learning_rate": 1.435395070332801e-06,
"loss": 0.4515,
"step": 1261
},
{
"epoch": 3.9457202505219207,
"grad_norm": 0.7305286526679993,
"learning_rate": 1.4314792138409454e-06,
"loss": 0.4879,
"step": 1262
},
{
"epoch": 3.9488517745302714,
"grad_norm": 0.8883433938026428,
"learning_rate": 1.4275665622015908e-06,
"loss": 0.4489,
"step": 1263
},
{
"epoch": 3.951983298538622,
"grad_norm": 0.8176298141479492,
"learning_rate": 1.4236571271500909e-06,
"loss": 0.4583,
"step": 1264
},
{
"epoch": 3.9551148225469728,
"grad_norm": 0.8042430281639099,
"learning_rate": 1.4197509204121563e-06,
"loss": 0.4277,
"step": 1265
},
{
"epoch": 3.9582463465553235,
"grad_norm": 0.8153829574584961,
"learning_rate": 1.4158479537038095e-06,
"loss": 0.4389,
"step": 1266
},
{
"epoch": 3.961377870563674,
"grad_norm": 0.7908188104629517,
"learning_rate": 1.4119482387313588e-06,
"loss": 0.4421,
"step": 1267
},
{
"epoch": 3.964509394572025,
"grad_norm": 0.831758975982666,
"learning_rate": 1.4080517871913596e-06,
"loss": 0.4308,
"step": 1268
},
{
"epoch": 3.9676409185803756,
"grad_norm": 0.8191989064216614,
"learning_rate": 1.4041586107705758e-06,
"loss": 0.4654,
"step": 1269
},
{
"epoch": 3.9707724425887267,
"grad_norm": 0.9455055594444275,
"learning_rate": 1.4002687211459524e-06,
"loss": 0.4668,
"step": 1270
},
{
"epoch": 3.973903966597077,
"grad_norm": 0.9271034002304077,
"learning_rate": 1.396382129984572e-06,
"loss": 0.4414,
"step": 1271
},
{
"epoch": 3.977035490605428,
"grad_norm": 0.7652955651283264,
"learning_rate": 1.392498848943627e-06,
"loss": 0.4575,
"step": 1272
},
{
"epoch": 3.980167014613779,
"grad_norm": 0.7850046157836914,
"learning_rate": 1.3886188896703816e-06,
"loss": 0.4554,
"step": 1273
},
{
"epoch": 3.9832985386221296,
"grad_norm": 0.7194349765777588,
"learning_rate": 1.3847422638021357e-06,
"loss": 0.437,
"step": 1274
},
{
"epoch": 3.9864300626304803,
"grad_norm": 0.8726270198822021,
"learning_rate": 1.3808689829661899e-06,
"loss": 0.4657,
"step": 1275
},
{
"epoch": 3.989561586638831,
"grad_norm": 0.7741451263427734,
"learning_rate": 1.3769990587798146e-06,
"loss": 0.3931,
"step": 1276
}
],
"logging_steps": 1,
"max_steps": 1914,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 319,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.866690661704217e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}