xp171 / checkpoint-319 /trainer_state.json
ugaoo's picture
Upload folder using huggingface_hub
1f4ef48 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9989561586638831,
"eval_steps": 500,
"global_step": 319,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003131524008350731,
"grad_norm": 13.917898178100586,
"learning_rate": 5.0000000000000004e-08,
"loss": 4.1051,
"step": 1
},
{
"epoch": 0.006263048016701462,
"grad_norm": 17.327869415283203,
"learning_rate": 1.0000000000000001e-07,
"loss": 4.1048,
"step": 2
},
{
"epoch": 0.009394572025052192,
"grad_norm": 14.063946723937988,
"learning_rate": 1.5000000000000002e-07,
"loss": 4.0741,
"step": 3
},
{
"epoch": 0.012526096033402923,
"grad_norm": 16.817699432373047,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.2002,
"step": 4
},
{
"epoch": 0.015657620041753653,
"grad_norm": 14.47036361694336,
"learning_rate": 2.5000000000000004e-07,
"loss": 4.2652,
"step": 5
},
{
"epoch": 0.018789144050104383,
"grad_norm": 14.474193572998047,
"learning_rate": 3.0000000000000004e-07,
"loss": 4.0888,
"step": 6
},
{
"epoch": 0.021920668058455117,
"grad_norm": 14.865458488464355,
"learning_rate": 3.5000000000000004e-07,
"loss": 4.0014,
"step": 7
},
{
"epoch": 0.025052192066805846,
"grad_norm": 15.338888168334961,
"learning_rate": 4.0000000000000003e-07,
"loss": 4.13,
"step": 8
},
{
"epoch": 0.028183716075156576,
"grad_norm": 15.154336929321289,
"learning_rate": 4.5000000000000003e-07,
"loss": 4.2493,
"step": 9
},
{
"epoch": 0.031315240083507306,
"grad_norm": 15.919597625732422,
"learning_rate": 5.000000000000001e-07,
"loss": 4.0535,
"step": 10
},
{
"epoch": 0.03444676409185804,
"grad_norm": 14.981926918029785,
"learning_rate": 5.5e-07,
"loss": 3.9064,
"step": 11
},
{
"epoch": 0.037578288100208766,
"grad_norm": 13.36101245880127,
"learning_rate": 6.000000000000001e-07,
"loss": 4.1939,
"step": 12
},
{
"epoch": 0.0407098121085595,
"grad_norm": 15.58773422241211,
"learning_rate": 6.5e-07,
"loss": 4.18,
"step": 13
},
{
"epoch": 0.04384133611691023,
"grad_norm": 13.560139656066895,
"learning_rate": 7.000000000000001e-07,
"loss": 3.9414,
"step": 14
},
{
"epoch": 0.04697286012526096,
"grad_norm": 12.307971954345703,
"learning_rate": 7.5e-07,
"loss": 3.8836,
"step": 15
},
{
"epoch": 0.05010438413361169,
"grad_norm": 14.533182144165039,
"learning_rate": 8.000000000000001e-07,
"loss": 4.1551,
"step": 16
},
{
"epoch": 0.05323590814196242,
"grad_norm": 13.453729629516602,
"learning_rate": 8.500000000000001e-07,
"loss": 4.0048,
"step": 17
},
{
"epoch": 0.05636743215031315,
"grad_norm": 13.45992374420166,
"learning_rate": 9.000000000000001e-07,
"loss": 4.0745,
"step": 18
},
{
"epoch": 0.059498956158663886,
"grad_norm": 11.857145309448242,
"learning_rate": 9.500000000000001e-07,
"loss": 3.9871,
"step": 19
},
{
"epoch": 0.06263048016701461,
"grad_norm": 11.872294425964355,
"learning_rate": 1.0000000000000002e-06,
"loss": 3.8959,
"step": 20
},
{
"epoch": 0.06576200417536535,
"grad_norm": 12.969825744628906,
"learning_rate": 1.0500000000000001e-06,
"loss": 4.0308,
"step": 21
},
{
"epoch": 0.06889352818371608,
"grad_norm": 12.33769416809082,
"learning_rate": 1.1e-06,
"loss": 3.9341,
"step": 22
},
{
"epoch": 0.0720250521920668,
"grad_norm": 12.669405937194824,
"learning_rate": 1.1500000000000002e-06,
"loss": 3.8511,
"step": 23
},
{
"epoch": 0.07515657620041753,
"grad_norm": 10.677213668823242,
"learning_rate": 1.2000000000000002e-06,
"loss": 3.7764,
"step": 24
},
{
"epoch": 0.07828810020876827,
"grad_norm": 10.366402626037598,
"learning_rate": 1.25e-06,
"loss": 3.5291,
"step": 25
},
{
"epoch": 0.081419624217119,
"grad_norm": 11.211421012878418,
"learning_rate": 1.3e-06,
"loss": 3.5765,
"step": 26
},
{
"epoch": 0.08455114822546973,
"grad_norm": 11.313716888427734,
"learning_rate": 1.3500000000000002e-06,
"loss": 3.4849,
"step": 27
},
{
"epoch": 0.08768267223382047,
"grad_norm": 10.41294002532959,
"learning_rate": 1.4000000000000001e-06,
"loss": 3.2653,
"step": 28
},
{
"epoch": 0.09081419624217119,
"grad_norm": 10.40064525604248,
"learning_rate": 1.45e-06,
"loss": 3.3384,
"step": 29
},
{
"epoch": 0.09394572025052192,
"grad_norm": 10.05427074432373,
"learning_rate": 1.5e-06,
"loss": 3.2257,
"step": 30
},
{
"epoch": 0.09707724425887265,
"grad_norm": 9.583163261413574,
"learning_rate": 1.5500000000000002e-06,
"loss": 3.1371,
"step": 31
},
{
"epoch": 0.10020876826722339,
"grad_norm": 10.09977912902832,
"learning_rate": 1.6000000000000001e-06,
"loss": 3.0658,
"step": 32
},
{
"epoch": 0.10334029227557412,
"grad_norm": 9.271486282348633,
"learning_rate": 1.6500000000000003e-06,
"loss": 2.9693,
"step": 33
},
{
"epoch": 0.10647181628392484,
"grad_norm": 10.687992095947266,
"learning_rate": 1.7000000000000002e-06,
"loss": 2.95,
"step": 34
},
{
"epoch": 0.10960334029227557,
"grad_norm": 8.762290000915527,
"learning_rate": 1.75e-06,
"loss": 2.8286,
"step": 35
},
{
"epoch": 0.1127348643006263,
"grad_norm": 10.13785171508789,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.3664,
"step": 36
},
{
"epoch": 0.11586638830897704,
"grad_norm": 18.301353454589844,
"learning_rate": 1.85e-06,
"loss": 2.5533,
"step": 37
},
{
"epoch": 0.11899791231732777,
"grad_norm": 11.490377426147461,
"learning_rate": 1.9000000000000002e-06,
"loss": 2.6133,
"step": 38
},
{
"epoch": 0.12212943632567849,
"grad_norm": 15.614163398742676,
"learning_rate": 1.9500000000000004e-06,
"loss": 2.3596,
"step": 39
},
{
"epoch": 0.12526096033402923,
"grad_norm": 17.757442474365234,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.3491,
"step": 40
},
{
"epoch": 0.12839248434237996,
"grad_norm": 17.18431854248047,
"learning_rate": 2.05e-06,
"loss": 2.2361,
"step": 41
},
{
"epoch": 0.1315240083507307,
"grad_norm": 16.149789810180664,
"learning_rate": 2.1000000000000002e-06,
"loss": 2.1457,
"step": 42
},
{
"epoch": 0.13465553235908143,
"grad_norm": 15.256914138793945,
"learning_rate": 2.15e-06,
"loss": 2.12,
"step": 43
},
{
"epoch": 0.13778705636743216,
"grad_norm": 15.537406921386719,
"learning_rate": 2.2e-06,
"loss": 2.1877,
"step": 44
},
{
"epoch": 0.1409185803757829,
"grad_norm": 7.947713851928711,
"learning_rate": 2.25e-06,
"loss": 2.1648,
"step": 45
},
{
"epoch": 0.1440501043841336,
"grad_norm": 8.818676948547363,
"learning_rate": 2.3000000000000004e-06,
"loss": 2.134,
"step": 46
},
{
"epoch": 0.14718162839248433,
"grad_norm": 5.175768852233887,
"learning_rate": 2.35e-06,
"loss": 2.0796,
"step": 47
},
{
"epoch": 0.15031315240083507,
"grad_norm": 6.750611305236816,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.9174,
"step": 48
},
{
"epoch": 0.1534446764091858,
"grad_norm": 6.2147979736328125,
"learning_rate": 2.4500000000000003e-06,
"loss": 1.8065,
"step": 49
},
{
"epoch": 0.15657620041753653,
"grad_norm": 13.291611671447754,
"learning_rate": 2.5e-06,
"loss": 1.7061,
"step": 50
},
{
"epoch": 0.15970772442588727,
"grad_norm": 7.251201629638672,
"learning_rate": 2.55e-06,
"loss": 1.7924,
"step": 51
},
{
"epoch": 0.162839248434238,
"grad_norm": 5.2126054763793945,
"learning_rate": 2.6e-06,
"loss": 1.6735,
"step": 52
},
{
"epoch": 0.16597077244258873,
"grad_norm": 5.435528755187988,
"learning_rate": 2.6500000000000005e-06,
"loss": 1.6265,
"step": 53
},
{
"epoch": 0.16910229645093947,
"grad_norm": 4.505807399749756,
"learning_rate": 2.7000000000000004e-06,
"loss": 1.4851,
"step": 54
},
{
"epoch": 0.1722338204592902,
"grad_norm": 5.128388404846191,
"learning_rate": 2.7500000000000004e-06,
"loss": 1.5832,
"step": 55
},
{
"epoch": 0.17536534446764093,
"grad_norm": 16.935827255249023,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.6553,
"step": 56
},
{
"epoch": 0.17849686847599164,
"grad_norm": 3.664458990097046,
"learning_rate": 2.85e-06,
"loss": 1.5,
"step": 57
},
{
"epoch": 0.18162839248434237,
"grad_norm": 7.763802528381348,
"learning_rate": 2.9e-06,
"loss": 1.367,
"step": 58
},
{
"epoch": 0.1847599164926931,
"grad_norm": 3.2216155529022217,
"learning_rate": 2.95e-06,
"loss": 1.3863,
"step": 59
},
{
"epoch": 0.18789144050104384,
"grad_norm": 4.384445667266846,
"learning_rate": 3e-06,
"loss": 1.4247,
"step": 60
},
{
"epoch": 0.19102296450939457,
"grad_norm": 4.8080878257751465,
"learning_rate": 3.05e-06,
"loss": 1.3257,
"step": 61
},
{
"epoch": 0.1941544885177453,
"grad_norm": 4.154761791229248,
"learning_rate": 3.1000000000000004e-06,
"loss": 1.321,
"step": 62
},
{
"epoch": 0.19728601252609604,
"grad_norm": 6.4742112159729,
"learning_rate": 3.1500000000000003e-06,
"loss": 1.2823,
"step": 63
},
{
"epoch": 0.20041753653444677,
"grad_norm": 2.583422899246216,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.2136,
"step": 64
},
{
"epoch": 0.2035490605427975,
"grad_norm": 4.1933488845825195,
"learning_rate": 3.2500000000000002e-06,
"loss": 1.1855,
"step": 65
},
{
"epoch": 0.20668058455114824,
"grad_norm": 4.11049747467041,
"learning_rate": 3.3000000000000006e-06,
"loss": 1.2389,
"step": 66
},
{
"epoch": 0.20981210855949894,
"grad_norm": 2.264458417892456,
"learning_rate": 3.3500000000000005e-06,
"loss": 1.0651,
"step": 67
},
{
"epoch": 0.21294363256784968,
"grad_norm": 2.5408174991607666,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.1389,
"step": 68
},
{
"epoch": 0.2160751565762004,
"grad_norm": 7.82421350479126,
"learning_rate": 3.45e-06,
"loss": 1.0956,
"step": 69
},
{
"epoch": 0.21920668058455114,
"grad_norm": 3.070939064025879,
"learning_rate": 3.5e-06,
"loss": 1.0451,
"step": 70
},
{
"epoch": 0.22233820459290188,
"grad_norm": 2.6310527324676514,
"learning_rate": 3.5500000000000003e-06,
"loss": 1.0538,
"step": 71
},
{
"epoch": 0.2254697286012526,
"grad_norm": 7.630155563354492,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.0052,
"step": 72
},
{
"epoch": 0.22860125260960334,
"grad_norm": 6.950636863708496,
"learning_rate": 3.65e-06,
"loss": 1.0473,
"step": 73
},
{
"epoch": 0.23173277661795408,
"grad_norm": 2.2703945636749268,
"learning_rate": 3.7e-06,
"loss": 1.0576,
"step": 74
},
{
"epoch": 0.2348643006263048,
"grad_norm": 3.3817710876464844,
"learning_rate": 3.7500000000000005e-06,
"loss": 1.0177,
"step": 75
},
{
"epoch": 0.23799582463465555,
"grad_norm": 7.266414642333984,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.0645,
"step": 76
},
{
"epoch": 0.24112734864300625,
"grad_norm": 5.782608509063721,
"learning_rate": 3.85e-06,
"loss": 1.0162,
"step": 77
},
{
"epoch": 0.24425887265135698,
"grad_norm": 2.7938575744628906,
"learning_rate": 3.900000000000001e-06,
"loss": 0.9664,
"step": 78
},
{
"epoch": 0.24739039665970772,
"grad_norm": 6.681935787200928,
"learning_rate": 3.95e-06,
"loss": 0.953,
"step": 79
},
{
"epoch": 0.25052192066805845,
"grad_norm": 2.253279209136963,
"learning_rate": 4.000000000000001e-06,
"loss": 0.9568,
"step": 80
},
{
"epoch": 0.2536534446764092,
"grad_norm": 1.4875826835632324,
"learning_rate": 4.05e-06,
"loss": 0.9448,
"step": 81
},
{
"epoch": 0.2567849686847599,
"grad_norm": 2.4987940788269043,
"learning_rate": 4.1e-06,
"loss": 0.9393,
"step": 82
},
{
"epoch": 0.2599164926931106,
"grad_norm": 4.712948322296143,
"learning_rate": 4.15e-06,
"loss": 0.9532,
"step": 83
},
{
"epoch": 0.2630480167014614,
"grad_norm": 6.9030632972717285,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.96,
"step": 84
},
{
"epoch": 0.2661795407098121,
"grad_norm": 3.4780967235565186,
"learning_rate": 4.25e-06,
"loss": 0.8993,
"step": 85
},
{
"epoch": 0.26931106471816285,
"grad_norm": 1.526064395904541,
"learning_rate": 4.3e-06,
"loss": 0.9021,
"step": 86
},
{
"epoch": 0.27244258872651356,
"grad_norm": 10.727686882019043,
"learning_rate": 4.350000000000001e-06,
"loss": 0.856,
"step": 87
},
{
"epoch": 0.2755741127348643,
"grad_norm": 12.483160972595215,
"learning_rate": 4.4e-06,
"loss": 0.9357,
"step": 88
},
{
"epoch": 0.278705636743215,
"grad_norm": 6.544492244720459,
"learning_rate": 4.450000000000001e-06,
"loss": 0.9168,
"step": 89
},
{
"epoch": 0.2818371607515658,
"grad_norm": 1.178139567375183,
"learning_rate": 4.5e-06,
"loss": 0.8748,
"step": 90
},
{
"epoch": 0.2849686847599165,
"grad_norm": 1.711506962776184,
"learning_rate": 4.5500000000000005e-06,
"loss": 0.8425,
"step": 91
},
{
"epoch": 0.2881002087682672,
"grad_norm": 3.281747341156006,
"learning_rate": 4.600000000000001e-06,
"loss": 0.8491,
"step": 92
},
{
"epoch": 0.29123173277661796,
"grad_norm": 2.2964377403259277,
"learning_rate": 4.65e-06,
"loss": 0.8038,
"step": 93
},
{
"epoch": 0.29436325678496866,
"grad_norm": 1.959700345993042,
"learning_rate": 4.7e-06,
"loss": 0.8439,
"step": 94
},
{
"epoch": 0.2974947807933194,
"grad_norm": 3.979384183883667,
"learning_rate": 4.75e-06,
"loss": 0.8839,
"step": 95
},
{
"epoch": 0.30062630480167013,
"grad_norm": 1.4721262454986572,
"learning_rate": 4.800000000000001e-06,
"loss": 0.845,
"step": 96
},
{
"epoch": 0.3037578288100209,
"grad_norm": 2.862248659133911,
"learning_rate": 4.85e-06,
"loss": 0.7748,
"step": 97
},
{
"epoch": 0.3068893528183716,
"grad_norm": 3.7439088821411133,
"learning_rate": 4.9000000000000005e-06,
"loss": 0.8145,
"step": 98
},
{
"epoch": 0.31002087682672236,
"grad_norm": 1.6654618978500366,
"learning_rate": 4.95e-06,
"loss": 0.8326,
"step": 99
},
{
"epoch": 0.31315240083507306,
"grad_norm": 7.8437581062316895,
"learning_rate": 5e-06,
"loss": 0.8666,
"step": 100
},
{
"epoch": 0.3162839248434238,
"grad_norm": 6.429738521575928,
"learning_rate": 4.999996250830422e-06,
"loss": 0.836,
"step": 101
},
{
"epoch": 0.31941544885177453,
"grad_norm": 2.6017794609069824,
"learning_rate": 4.9999850033329326e-06,
"loss": 0.7785,
"step": 102
},
{
"epoch": 0.32254697286012524,
"grad_norm": 1.0575449466705322,
"learning_rate": 4.999966257541265e-06,
"loss": 0.7639,
"step": 103
},
{
"epoch": 0.325678496868476,
"grad_norm": 2.6932010650634766,
"learning_rate": 4.999940013511647e-06,
"loss": 0.8214,
"step": 104
},
{
"epoch": 0.3288100208768267,
"grad_norm": 2.925288438796997,
"learning_rate": 4.999906271322792e-06,
"loss": 0.8797,
"step": 105
},
{
"epoch": 0.33194154488517746,
"grad_norm": 1.3570607900619507,
"learning_rate": 4.9998650310759035e-06,
"loss": 0.792,
"step": 106
},
{
"epoch": 0.33507306889352817,
"grad_norm": 5.126713752746582,
"learning_rate": 4.999816292894676e-06,
"loss": 0.8352,
"step": 107
},
{
"epoch": 0.33820459290187893,
"grad_norm": 1.8966432809829712,
"learning_rate": 4.99976005692529e-06,
"loss": 0.7663,
"step": 108
},
{
"epoch": 0.34133611691022964,
"grad_norm": 1.3100829124450684,
"learning_rate": 4.999696323336418e-06,
"loss": 0.771,
"step": 109
},
{
"epoch": 0.3444676409185804,
"grad_norm": 2.4025354385375977,
"learning_rate": 4.999625092319218e-06,
"loss": 0.7618,
"step": 110
},
{
"epoch": 0.3475991649269311,
"grad_norm": 1.130232810974121,
"learning_rate": 4.999546364087334e-06,
"loss": 0.7705,
"step": 111
},
{
"epoch": 0.35073068893528186,
"grad_norm": 3.430262327194214,
"learning_rate": 4.999460138876901e-06,
"loss": 0.77,
"step": 112
},
{
"epoch": 0.35386221294363257,
"grad_norm": 1.1272103786468506,
"learning_rate": 4.999366416946536e-06,
"loss": 0.7133,
"step": 113
},
{
"epoch": 0.3569937369519833,
"grad_norm": 1.1740471124649048,
"learning_rate": 4.999265198577342e-06,
"loss": 0.7684,
"step": 114
},
{
"epoch": 0.36012526096033404,
"grad_norm": 1.3138248920440674,
"learning_rate": 4.999156484072907e-06,
"loss": 0.7888,
"step": 115
},
{
"epoch": 0.36325678496868474,
"grad_norm": 1.061711311340332,
"learning_rate": 4.999040273759304e-06,
"loss": 0.7484,
"step": 116
},
{
"epoch": 0.3663883089770355,
"grad_norm": 1.4682390689849854,
"learning_rate": 4.998916567985083e-06,
"loss": 0.7296,
"step": 117
},
{
"epoch": 0.3695198329853862,
"grad_norm": 2.884068250656128,
"learning_rate": 4.998785367121284e-06,
"loss": 0.7662,
"step": 118
},
{
"epoch": 0.37265135699373697,
"grad_norm": 0.9812761545181274,
"learning_rate": 4.9986466715614205e-06,
"loss": 0.7307,
"step": 119
},
{
"epoch": 0.3757828810020877,
"grad_norm": 2.2237496376037598,
"learning_rate": 4.998500481721484e-06,
"loss": 0.6761,
"step": 120
},
{
"epoch": 0.37891440501043844,
"grad_norm": 1.4004178047180176,
"learning_rate": 4.998346798039952e-06,
"loss": 0.7505,
"step": 121
},
{
"epoch": 0.38204592901878914,
"grad_norm": 5.54975700378418,
"learning_rate": 4.99818562097777e-06,
"loss": 0.7615,
"step": 122
},
{
"epoch": 0.38517745302713985,
"grad_norm": 6.17140531539917,
"learning_rate": 4.9980169510183624e-06,
"loss": 0.7002,
"step": 123
},
{
"epoch": 0.3883089770354906,
"grad_norm": 4.974380016326904,
"learning_rate": 4.997840788667628e-06,
"loss": 0.7449,
"step": 124
},
{
"epoch": 0.3914405010438413,
"grad_norm": 1.4133399724960327,
"learning_rate": 4.997657134453937e-06,
"loss": 0.7442,
"step": 125
},
{
"epoch": 0.3945720250521921,
"grad_norm": 1.868915319442749,
"learning_rate": 4.9974659889281295e-06,
"loss": 0.7104,
"step": 126
},
{
"epoch": 0.3977035490605428,
"grad_norm": 1.2599350214004517,
"learning_rate": 4.997267352663514e-06,
"loss": 0.7385,
"step": 127
},
{
"epoch": 0.40083507306889354,
"grad_norm": 1.4353641271591187,
"learning_rate": 4.997061226255869e-06,
"loss": 0.7081,
"step": 128
},
{
"epoch": 0.40396659707724425,
"grad_norm": 3.2492141723632812,
"learning_rate": 4.996847610323437e-06,
"loss": 0.7859,
"step": 129
},
{
"epoch": 0.407098121085595,
"grad_norm": 9.599719047546387,
"learning_rate": 4.996626505506923e-06,
"loss": 0.7241,
"step": 130
},
{
"epoch": 0.4102296450939457,
"grad_norm": 10.053650856018066,
"learning_rate": 4.996397912469494e-06,
"loss": 0.6841,
"step": 131
},
{
"epoch": 0.4133611691022965,
"grad_norm": 1.323876976966858,
"learning_rate": 4.996161831896777e-06,
"loss": 0.7317,
"step": 132
},
{
"epoch": 0.4164926931106472,
"grad_norm": 1.4180598258972168,
"learning_rate": 4.9959182644968594e-06,
"loss": 0.692,
"step": 133
},
{
"epoch": 0.4196242171189979,
"grad_norm": 1.2194396257400513,
"learning_rate": 4.99566721100028e-06,
"loss": 0.7068,
"step": 134
},
{
"epoch": 0.42275574112734865,
"grad_norm": 1.0984960794448853,
"learning_rate": 4.995408672160031e-06,
"loss": 0.6946,
"step": 135
},
{
"epoch": 0.42588726513569936,
"grad_norm": 1.9341071844100952,
"learning_rate": 4.995142648751561e-06,
"loss": 0.7467,
"step": 136
},
{
"epoch": 0.4290187891440501,
"grad_norm": 1.9960932731628418,
"learning_rate": 4.9948691415727594e-06,
"loss": 0.7379,
"step": 137
},
{
"epoch": 0.4321503131524008,
"grad_norm": 0.8743917942047119,
"learning_rate": 4.994588151443968e-06,
"loss": 0.66,
"step": 138
},
{
"epoch": 0.4352818371607516,
"grad_norm": 0.8655261993408203,
"learning_rate": 4.99429967920797e-06,
"loss": 0.6646,
"step": 139
},
{
"epoch": 0.4384133611691023,
"grad_norm": 5.462070941925049,
"learning_rate": 4.994003725729992e-06,
"loss": 0.643,
"step": 140
},
{
"epoch": 0.44154488517745305,
"grad_norm": 2.1401469707489014,
"learning_rate": 4.993700291897695e-06,
"loss": 0.6639,
"step": 141
},
{
"epoch": 0.44467640918580376,
"grad_norm": 1.8219833374023438,
"learning_rate": 4.9933893786211815e-06,
"loss": 0.6673,
"step": 142
},
{
"epoch": 0.44780793319415446,
"grad_norm": 1.641079306602478,
"learning_rate": 4.993070986832984e-06,
"loss": 0.658,
"step": 143
},
{
"epoch": 0.4509394572025052,
"grad_norm": 1.1739819049835205,
"learning_rate": 4.992745117488066e-06,
"loss": 0.6826,
"step": 144
},
{
"epoch": 0.45407098121085593,
"grad_norm": 2.309185743331909,
"learning_rate": 4.9924117715638185e-06,
"loss": 0.6536,
"step": 145
},
{
"epoch": 0.4572025052192067,
"grad_norm": 1.09304940700531,
"learning_rate": 4.99207095006006e-06,
"loss": 0.721,
"step": 146
},
{
"epoch": 0.4603340292275574,
"grad_norm": 0.9056984186172485,
"learning_rate": 4.991722653999025e-06,
"loss": 0.7019,
"step": 147
},
{
"epoch": 0.46346555323590816,
"grad_norm": 1.8440625667572021,
"learning_rate": 4.991366884425374e-06,
"loss": 0.707,
"step": 148
},
{
"epoch": 0.46659707724425886,
"grad_norm": 1.2244676351547241,
"learning_rate": 4.991003642406177e-06,
"loss": 0.6407,
"step": 149
},
{
"epoch": 0.4697286012526096,
"grad_norm": 0.9258589744567871,
"learning_rate": 4.99063292903092e-06,
"loss": 0.6954,
"step": 150
},
{
"epoch": 0.47286012526096033,
"grad_norm": 4.176390647888184,
"learning_rate": 4.990254745411496e-06,
"loss": 0.6812,
"step": 151
},
{
"epoch": 0.4759916492693111,
"grad_norm": 1.4322530031204224,
"learning_rate": 4.989869092682205e-06,
"loss": 0.6808,
"step": 152
},
{
"epoch": 0.4791231732776618,
"grad_norm": 0.8017717003822327,
"learning_rate": 4.989475971999748e-06,
"loss": 0.687,
"step": 153
},
{
"epoch": 0.4822546972860125,
"grad_norm": 1.5641374588012695,
"learning_rate": 4.989075384543228e-06,
"loss": 0.6599,
"step": 154
},
{
"epoch": 0.48538622129436326,
"grad_norm": 1.1522141695022583,
"learning_rate": 4.98866733151414e-06,
"loss": 0.6546,
"step": 155
},
{
"epoch": 0.48851774530271397,
"grad_norm": 0.8593171238899231,
"learning_rate": 4.988251814136372e-06,
"loss": 0.6857,
"step": 156
},
{
"epoch": 0.49164926931106473,
"grad_norm": 2.668159246444702,
"learning_rate": 4.9878288336562e-06,
"loss": 0.661,
"step": 157
},
{
"epoch": 0.49478079331941544,
"grad_norm": 0.9953671097755432,
"learning_rate": 4.987398391342285e-06,
"loss": 0.6512,
"step": 158
},
{
"epoch": 0.4979123173277662,
"grad_norm": 1.042872667312622,
"learning_rate": 4.986960488485667e-06,
"loss": 0.6311,
"step": 159
},
{
"epoch": 0.5010438413361169,
"grad_norm": 0.9070663452148438,
"learning_rate": 4.9865151263997645e-06,
"loss": 0.675,
"step": 160
},
{
"epoch": 0.5041753653444676,
"grad_norm": 0.8460433483123779,
"learning_rate": 4.986062306420367e-06,
"loss": 0.6635,
"step": 161
},
{
"epoch": 0.5073068893528184,
"grad_norm": 1.2639834880828857,
"learning_rate": 4.985602029905635e-06,
"loss": 0.6327,
"step": 162
},
{
"epoch": 0.5104384133611691,
"grad_norm": 0.8775074481964111,
"learning_rate": 4.985134298236091e-06,
"loss": 0.644,
"step": 163
},
{
"epoch": 0.5135699373695198,
"grad_norm": 1.2031961679458618,
"learning_rate": 4.98465911281462e-06,
"loss": 0.6254,
"step": 164
},
{
"epoch": 0.5167014613778705,
"grad_norm": 0.892494797706604,
"learning_rate": 4.984176475066463e-06,
"loss": 0.7122,
"step": 165
},
{
"epoch": 0.5198329853862212,
"grad_norm": 2.7122485637664795,
"learning_rate": 4.983686386439212e-06,
"loss": 0.6679,
"step": 166
},
{
"epoch": 0.5229645093945721,
"grad_norm": 0.9344426989555359,
"learning_rate": 4.983188848402806e-06,
"loss": 0.6319,
"step": 167
},
{
"epoch": 0.5260960334029228,
"grad_norm": 1.4093577861785889,
"learning_rate": 4.982683862449531e-06,
"loss": 0.6425,
"step": 168
},
{
"epoch": 0.5292275574112735,
"grad_norm": 1.1285009384155273,
"learning_rate": 4.982171430094007e-06,
"loss": 0.6298,
"step": 169
},
{
"epoch": 0.5323590814196242,
"grad_norm": 1.952778935432434,
"learning_rate": 4.981651552873193e-06,
"loss": 0.7066,
"step": 170
},
{
"epoch": 0.535490605427975,
"grad_norm": 5.133765697479248,
"learning_rate": 4.981124232346374e-06,
"loss": 0.6634,
"step": 171
},
{
"epoch": 0.5386221294363257,
"grad_norm": 0.9770542979240417,
"learning_rate": 4.980589470095161e-06,
"loss": 0.7121,
"step": 172
},
{
"epoch": 0.5417536534446764,
"grad_norm": 0.8414323925971985,
"learning_rate": 4.980047267723487e-06,
"loss": 0.6397,
"step": 173
},
{
"epoch": 0.5448851774530271,
"grad_norm": 1.9173879623413086,
"learning_rate": 4.979497626857596e-06,
"loss": 0.6228,
"step": 174
},
{
"epoch": 0.5480167014613778,
"grad_norm": 1.0823363065719604,
"learning_rate": 4.978940549146048e-06,
"loss": 0.6475,
"step": 175
},
{
"epoch": 0.5511482254697286,
"grad_norm": 3.715353488922119,
"learning_rate": 4.978376036259706e-06,
"loss": 0.7127,
"step": 176
},
{
"epoch": 0.5542797494780793,
"grad_norm": 0.981584370136261,
"learning_rate": 4.9778040898917325e-06,
"loss": 0.6468,
"step": 177
},
{
"epoch": 0.55741127348643,
"grad_norm": 1.70566987991333,
"learning_rate": 4.977224711757587e-06,
"loss": 0.6476,
"step": 178
},
{
"epoch": 0.5605427974947808,
"grad_norm": 0.9217923283576965,
"learning_rate": 4.976637903595019e-06,
"loss": 0.6731,
"step": 179
},
{
"epoch": 0.5636743215031316,
"grad_norm": 0.8994677662849426,
"learning_rate": 4.976043667164063e-06,
"loss": 0.6562,
"step": 180
},
{
"epoch": 0.5668058455114823,
"grad_norm": 1.1613017320632935,
"learning_rate": 4.975442004247034e-06,
"loss": 0.6417,
"step": 181
},
{
"epoch": 0.569937369519833,
"grad_norm": 1.6041977405548096,
"learning_rate": 4.974832916648521e-06,
"loss": 0.6029,
"step": 182
},
{
"epoch": 0.5730688935281837,
"grad_norm": 1.7978405952453613,
"learning_rate": 4.974216406195383e-06,
"loss": 0.6269,
"step": 183
},
{
"epoch": 0.5762004175365344,
"grad_norm": 1.6021920442581177,
"learning_rate": 4.973592474736739e-06,
"loss": 0.6149,
"step": 184
},
{
"epoch": 0.5793319415448852,
"grad_norm": 0.8973568677902222,
"learning_rate": 4.972961124143971e-06,
"loss": 0.6648,
"step": 185
},
{
"epoch": 0.5824634655532359,
"grad_norm": 1.9432591199874878,
"learning_rate": 4.972322356310711e-06,
"loss": 0.6299,
"step": 186
},
{
"epoch": 0.5855949895615866,
"grad_norm": 4.457028388977051,
"learning_rate": 4.971676173152839e-06,
"loss": 0.656,
"step": 187
},
{
"epoch": 0.5887265135699373,
"grad_norm": 2.0989716053009033,
"learning_rate": 4.971022576608473e-06,
"loss": 0.6539,
"step": 188
},
{
"epoch": 0.5918580375782881,
"grad_norm": 1.0646967887878418,
"learning_rate": 4.97036156863797e-06,
"loss": 0.6727,
"step": 189
},
{
"epoch": 0.5949895615866388,
"grad_norm": 1.6522265672683716,
"learning_rate": 4.969693151223914e-06,
"loss": 0.6643,
"step": 190
},
{
"epoch": 0.5981210855949896,
"grad_norm": 1.7503505945205688,
"learning_rate": 4.969017326371115e-06,
"loss": 0.6402,
"step": 191
},
{
"epoch": 0.6012526096033403,
"grad_norm": 1.2341989278793335,
"learning_rate": 4.968334096106597e-06,
"loss": 0.6413,
"step": 192
},
{
"epoch": 0.6043841336116911,
"grad_norm": 3.089054584503174,
"learning_rate": 4.967643462479597e-06,
"loss": 0.6825,
"step": 193
},
{
"epoch": 0.6075156576200418,
"grad_norm": 2.711623430252075,
"learning_rate": 4.966945427561557e-06,
"loss": 0.65,
"step": 194
},
{
"epoch": 0.6106471816283925,
"grad_norm": 4.641184329986572,
"learning_rate": 4.966239993446118e-06,
"loss": 0.6229,
"step": 195
},
{
"epoch": 0.6137787056367432,
"grad_norm": 1.7984074354171753,
"learning_rate": 4.965527162249114e-06,
"loss": 0.6473,
"step": 196
},
{
"epoch": 0.6169102296450939,
"grad_norm": 1.1643115282058716,
"learning_rate": 4.964806936108566e-06,
"loss": 0.6404,
"step": 197
},
{
"epoch": 0.6200417536534447,
"grad_norm": 2.1877920627593994,
"learning_rate": 4.9640793171846725e-06,
"loss": 0.6185,
"step": 198
},
{
"epoch": 0.6231732776617954,
"grad_norm": 1.7970566749572754,
"learning_rate": 4.963344307659807e-06,
"loss": 0.634,
"step": 199
},
{
"epoch": 0.6263048016701461,
"grad_norm": 1.6014361381530762,
"learning_rate": 4.96260190973851e-06,
"loss": 0.6562,
"step": 200
},
{
"epoch": 0.6294363256784968,
"grad_norm": 0.8743320107460022,
"learning_rate": 4.961852125647482e-06,
"loss": 0.6133,
"step": 201
},
{
"epoch": 0.6325678496868476,
"grad_norm": 1.9526551961898804,
"learning_rate": 4.961094957635578e-06,
"loss": 0.6451,
"step": 202
},
{
"epoch": 0.6356993736951984,
"grad_norm": 3.6597347259521484,
"learning_rate": 4.960330407973798e-06,
"loss": 0.6386,
"step": 203
},
{
"epoch": 0.6388308977035491,
"grad_norm": 1.7180207967758179,
"learning_rate": 4.959558478955283e-06,
"loss": 0.6688,
"step": 204
},
{
"epoch": 0.6419624217118998,
"grad_norm": 0.9058470129966736,
"learning_rate": 4.958779172895308e-06,
"loss": 0.6161,
"step": 205
},
{
"epoch": 0.6450939457202505,
"grad_norm": 1.0031033754348755,
"learning_rate": 4.957992492131274e-06,
"loss": 0.6437,
"step": 206
},
{
"epoch": 0.6482254697286013,
"grad_norm": 1.5846725702285767,
"learning_rate": 4.9571984390226985e-06,
"loss": 0.6332,
"step": 207
},
{
"epoch": 0.651356993736952,
"grad_norm": 1.9951609373092651,
"learning_rate": 4.956397015951215e-06,
"loss": 0.636,
"step": 208
},
{
"epoch": 0.6544885177453027,
"grad_norm": 1.4122583866119385,
"learning_rate": 4.95558822532056e-06,
"loss": 0.6586,
"step": 209
},
{
"epoch": 0.6576200417536534,
"grad_norm": 1.2243481874465942,
"learning_rate": 4.954772069556568e-06,
"loss": 0.6313,
"step": 210
},
{
"epoch": 0.6607515657620042,
"grad_norm": 0.8756356835365295,
"learning_rate": 4.953948551107164e-06,
"loss": 0.6406,
"step": 211
},
{
"epoch": 0.6638830897703549,
"grad_norm": 2.9979734420776367,
"learning_rate": 4.953117672442356e-06,
"loss": 0.5803,
"step": 212
},
{
"epoch": 0.6670146137787056,
"grad_norm": 2.1859359741210938,
"learning_rate": 4.952279436054229e-06,
"loss": 0.6607,
"step": 213
},
{
"epoch": 0.6701461377870563,
"grad_norm": 0.6929755806922913,
"learning_rate": 4.9514338444569346e-06,
"loss": 0.5989,
"step": 214
},
{
"epoch": 0.673277661795407,
"grad_norm": 1.0361783504486084,
"learning_rate": 4.950580900186685e-06,
"loss": 0.6654,
"step": 215
},
{
"epoch": 0.6764091858037579,
"grad_norm": 1.210898518562317,
"learning_rate": 4.9497206058017475e-06,
"loss": 0.6213,
"step": 216
},
{
"epoch": 0.6795407098121086,
"grad_norm": 1.200990080833435,
"learning_rate": 4.948852963882434e-06,
"loss": 0.6654,
"step": 217
},
{
"epoch": 0.6826722338204593,
"grad_norm": 1.481831669807434,
"learning_rate": 4.947977977031093e-06,
"loss": 0.6474,
"step": 218
},
{
"epoch": 0.68580375782881,
"grad_norm": 0.9883334636688232,
"learning_rate": 4.947095647872103e-06,
"loss": 0.6735,
"step": 219
},
{
"epoch": 0.6889352818371608,
"grad_norm": 0.7436536550521851,
"learning_rate": 4.946205979051868e-06,
"loss": 0.6456,
"step": 220
},
{
"epoch": 0.6920668058455115,
"grad_norm": 0.9057570099830627,
"learning_rate": 4.945308973238802e-06,
"loss": 0.6228,
"step": 221
},
{
"epoch": 0.6951983298538622,
"grad_norm": 1.341081142425537,
"learning_rate": 4.944404633123324e-06,
"loss": 0.6417,
"step": 222
},
{
"epoch": 0.6983298538622129,
"grad_norm": 0.7958157062530518,
"learning_rate": 4.943492961417859e-06,
"loss": 0.6494,
"step": 223
},
{
"epoch": 0.7014613778705637,
"grad_norm": 1.216025471687317,
"learning_rate": 4.9425739608568106e-06,
"loss": 0.6566,
"step": 224
},
{
"epoch": 0.7045929018789144,
"grad_norm": 0.9774854779243469,
"learning_rate": 4.9416476341965735e-06,
"loss": 0.6171,
"step": 225
},
{
"epoch": 0.7077244258872651,
"grad_norm": 2.1562681198120117,
"learning_rate": 4.940713984215512e-06,
"loss": 0.629,
"step": 226
},
{
"epoch": 0.7108559498956158,
"grad_norm": 1.9521286487579346,
"learning_rate": 4.9397730137139556e-06,
"loss": 0.6475,
"step": 227
},
{
"epoch": 0.7139874739039666,
"grad_norm": 1.5749104022979736,
"learning_rate": 4.9388247255141895e-06,
"loss": 0.6053,
"step": 228
},
{
"epoch": 0.7171189979123174,
"grad_norm": 1.2008254528045654,
"learning_rate": 4.937869122460449e-06,
"loss": 0.6052,
"step": 229
},
{
"epoch": 0.7202505219206681,
"grad_norm": 1.0774102210998535,
"learning_rate": 4.93690620741891e-06,
"loss": 0.6099,
"step": 230
},
{
"epoch": 0.7233820459290188,
"grad_norm": 1.0929996967315674,
"learning_rate": 4.935935983277675e-06,
"loss": 0.6363,
"step": 231
},
{
"epoch": 0.7265135699373695,
"grad_norm": 0.8830653429031372,
"learning_rate": 4.934958452946774e-06,
"loss": 0.6136,
"step": 232
},
{
"epoch": 0.7296450939457203,
"grad_norm": 3.591218948364258,
"learning_rate": 4.933973619358147e-06,
"loss": 0.5962,
"step": 233
},
{
"epoch": 0.732776617954071,
"grad_norm": 2.5797672271728516,
"learning_rate": 4.932981485465643e-06,
"loss": 0.6405,
"step": 234
},
{
"epoch": 0.7359081419624217,
"grad_norm": 1.0467664003372192,
"learning_rate": 4.9319820542450025e-06,
"loss": 0.6155,
"step": 235
},
{
"epoch": 0.7390396659707724,
"grad_norm": 0.8099795579910278,
"learning_rate": 4.930975328693856e-06,
"loss": 0.5615,
"step": 236
},
{
"epoch": 0.7421711899791231,
"grad_norm": 0.8906702995300293,
"learning_rate": 4.92996131183171e-06,
"loss": 0.6501,
"step": 237
},
{
"epoch": 0.7453027139874739,
"grad_norm": 1.0871416330337524,
"learning_rate": 4.928940006699944e-06,
"loss": 0.6282,
"step": 238
},
{
"epoch": 0.7484342379958246,
"grad_norm": 1.3209614753723145,
"learning_rate": 4.927911416361792e-06,
"loss": 0.598,
"step": 239
},
{
"epoch": 0.7515657620041754,
"grad_norm": 1.2252682447433472,
"learning_rate": 4.926875543902344e-06,
"loss": 0.6433,
"step": 240
},
{
"epoch": 0.7546972860125261,
"grad_norm": 1.0569007396697998,
"learning_rate": 4.9258323924285285e-06,
"loss": 0.5927,
"step": 241
},
{
"epoch": 0.7578288100208769,
"grad_norm": 0.9309014081954956,
"learning_rate": 4.924781965069106e-06,
"loss": 0.5927,
"step": 242
},
{
"epoch": 0.7609603340292276,
"grad_norm": 1.0200378894805908,
"learning_rate": 4.923724264974662e-06,
"loss": 0.6064,
"step": 243
},
{
"epoch": 0.7640918580375783,
"grad_norm": 1.0533075332641602,
"learning_rate": 4.922659295317593e-06,
"loss": 0.6373,
"step": 244
},
{
"epoch": 0.767223382045929,
"grad_norm": 0.7889382839202881,
"learning_rate": 4.921587059292102e-06,
"loss": 0.5887,
"step": 245
},
{
"epoch": 0.7703549060542797,
"grad_norm": 0.7943588495254517,
"learning_rate": 4.920507560114183e-06,
"loss": 0.593,
"step": 246
},
{
"epoch": 0.7734864300626305,
"grad_norm": 0.8247205018997192,
"learning_rate": 4.919420801021617e-06,
"loss": 0.6151,
"step": 247
},
{
"epoch": 0.7766179540709812,
"grad_norm": 0.9979158043861389,
"learning_rate": 4.91832678527396e-06,
"loss": 0.6019,
"step": 248
},
{
"epoch": 0.7797494780793319,
"grad_norm": 0.9346868991851807,
"learning_rate": 4.917225516152532e-06,
"loss": 0.6098,
"step": 249
},
{
"epoch": 0.7828810020876826,
"grad_norm": 0.7487881183624268,
"learning_rate": 4.916116996960408e-06,
"loss": 0.5965,
"step": 250
},
{
"epoch": 0.7860125260960334,
"grad_norm": 0.821576714515686,
"learning_rate": 4.915001231022411e-06,
"loss": 0.6483,
"step": 251
},
{
"epoch": 0.7891440501043842,
"grad_norm": 1.0413196086883545,
"learning_rate": 4.913878221685096e-06,
"loss": 0.6108,
"step": 252
},
{
"epoch": 0.7922755741127349,
"grad_norm": 0.9560331702232361,
"learning_rate": 4.912747972316745e-06,
"loss": 0.5758,
"step": 253
},
{
"epoch": 0.7954070981210856,
"grad_norm": 0.8964638113975525,
"learning_rate": 4.911610486307356e-06,
"loss": 0.6432,
"step": 254
},
{
"epoch": 0.7985386221294363,
"grad_norm": 0.8418346047401428,
"learning_rate": 4.910465767068631e-06,
"loss": 0.6027,
"step": 255
},
{
"epoch": 0.8016701461377871,
"grad_norm": 1.792371153831482,
"learning_rate": 4.909313818033966e-06,
"loss": 0.6198,
"step": 256
},
{
"epoch": 0.8048016701461378,
"grad_norm": 1.036665439605713,
"learning_rate": 4.908154642658446e-06,
"loss": 0.6255,
"step": 257
},
{
"epoch": 0.8079331941544885,
"grad_norm": 0.7592151165008545,
"learning_rate": 4.906988244418823e-06,
"loss": 0.6035,
"step": 258
},
{
"epoch": 0.8110647181628392,
"grad_norm": 0.8843073844909668,
"learning_rate": 4.90581462681352e-06,
"loss": 0.6299,
"step": 259
},
{
"epoch": 0.81419624217119,
"grad_norm": 0.9489964246749878,
"learning_rate": 4.9046337933626086e-06,
"loss": 0.5869,
"step": 260
},
{
"epoch": 0.8173277661795407,
"grad_norm": 0.851691722869873,
"learning_rate": 4.903445747607806e-06,
"loss": 0.603,
"step": 261
},
{
"epoch": 0.8204592901878914,
"grad_norm": 1.3722106218338013,
"learning_rate": 4.902250493112458e-06,
"loss": 0.5939,
"step": 262
},
{
"epoch": 0.8235908141962421,
"grad_norm": 1.1002827882766724,
"learning_rate": 4.901048033461537e-06,
"loss": 0.6452,
"step": 263
},
{
"epoch": 0.826722338204593,
"grad_norm": 0.8428632020950317,
"learning_rate": 4.89983837226162e-06,
"loss": 0.5956,
"step": 264
},
{
"epoch": 0.8298538622129437,
"grad_norm": 0.7666584849357605,
"learning_rate": 4.898621513140889e-06,
"loss": 0.6067,
"step": 265
},
{
"epoch": 0.8329853862212944,
"grad_norm": 0.8413611054420471,
"learning_rate": 4.897397459749113e-06,
"loss": 0.5985,
"step": 266
},
{
"epoch": 0.8361169102296451,
"grad_norm": 2.3374335765838623,
"learning_rate": 4.896166215757638e-06,
"loss": 0.5885,
"step": 267
},
{
"epoch": 0.8392484342379958,
"grad_norm": 2.236640214920044,
"learning_rate": 4.894927784859377e-06,
"loss": 0.6408,
"step": 268
},
{
"epoch": 0.8423799582463466,
"grad_norm": 0.9715856313705444,
"learning_rate": 4.893682170768802e-06,
"loss": 0.5954,
"step": 269
},
{
"epoch": 0.8455114822546973,
"grad_norm": 1.0249912738800049,
"learning_rate": 4.892429377221928e-06,
"loss": 0.6186,
"step": 270
},
{
"epoch": 0.848643006263048,
"grad_norm": 1.255426049232483,
"learning_rate": 4.891169407976302e-06,
"loss": 0.6351,
"step": 271
},
{
"epoch": 0.8517745302713987,
"grad_norm": 0.9339559674263,
"learning_rate": 4.889902266810995e-06,
"loss": 0.5944,
"step": 272
},
{
"epoch": 0.8549060542797495,
"grad_norm": 1.2473429441452026,
"learning_rate": 4.888627957526589e-06,
"loss": 0.544,
"step": 273
},
{
"epoch": 0.8580375782881002,
"grad_norm": 1.0589442253112793,
"learning_rate": 4.887346483945166e-06,
"loss": 0.5543,
"step": 274
},
{
"epoch": 0.8611691022964509,
"grad_norm": 0.9844024777412415,
"learning_rate": 4.886057849910294e-06,
"loss": 0.5941,
"step": 275
},
{
"epoch": 0.8643006263048016,
"grad_norm": 2.88578200340271,
"learning_rate": 4.8847620592870196e-06,
"loss": 0.6124,
"step": 276
},
{
"epoch": 0.8674321503131524,
"grad_norm": 0.7496054172515869,
"learning_rate": 4.8834591159618524e-06,
"loss": 0.6006,
"step": 277
},
{
"epoch": 0.8705636743215032,
"grad_norm": 0.7403052449226379,
"learning_rate": 4.88214902384276e-06,
"loss": 0.5911,
"step": 278
},
{
"epoch": 0.8736951983298539,
"grad_norm": 0.9003771543502808,
"learning_rate": 4.880831786859146e-06,
"loss": 0.6347,
"step": 279
},
{
"epoch": 0.8768267223382046,
"grad_norm": 1.0345501899719238,
"learning_rate": 4.879507408961847e-06,
"loss": 0.6111,
"step": 280
},
{
"epoch": 0.8799582463465553,
"grad_norm": 1.4385879039764404,
"learning_rate": 4.878175894123116e-06,
"loss": 0.6454,
"step": 281
},
{
"epoch": 0.8830897703549061,
"grad_norm": 0.8469482064247131,
"learning_rate": 4.8768372463366145e-06,
"loss": 0.6163,
"step": 282
},
{
"epoch": 0.8862212943632568,
"grad_norm": 0.8859589695930481,
"learning_rate": 4.875491469617395e-06,
"loss": 0.6144,
"step": 283
},
{
"epoch": 0.8893528183716075,
"grad_norm": 1.8436834812164307,
"learning_rate": 4.874138568001895e-06,
"loss": 0.6275,
"step": 284
},
{
"epoch": 0.8924843423799582,
"grad_norm": 0.6646101474761963,
"learning_rate": 4.87277854554792e-06,
"loss": 0.615,
"step": 285
},
{
"epoch": 0.8956158663883089,
"grad_norm": 1.0070925951004028,
"learning_rate": 4.871411406334633e-06,
"loss": 0.5898,
"step": 286
},
{
"epoch": 0.8987473903966597,
"grad_norm": 0.9785194993019104,
"learning_rate": 4.870037154462545e-06,
"loss": 0.5992,
"step": 287
},
{
"epoch": 0.9018789144050104,
"grad_norm": 0.7244889736175537,
"learning_rate": 4.868655794053497e-06,
"loss": 0.6078,
"step": 288
},
{
"epoch": 0.9050104384133612,
"grad_norm": 1.4496444463729858,
"learning_rate": 4.8672673292506535e-06,
"loss": 0.5855,
"step": 289
},
{
"epoch": 0.9081419624217119,
"grad_norm": 1.8514957427978516,
"learning_rate": 4.865871764218486e-06,
"loss": 0.5707,
"step": 290
},
{
"epoch": 0.9112734864300627,
"grad_norm": 0.8439773321151733,
"learning_rate": 4.864469103142763e-06,
"loss": 0.5562,
"step": 291
},
{
"epoch": 0.9144050104384134,
"grad_norm": 0.8146086931228638,
"learning_rate": 4.8630593502305355e-06,
"loss": 0.6161,
"step": 292
},
{
"epoch": 0.9175365344467641,
"grad_norm": 0.8920315504074097,
"learning_rate": 4.861642509710126e-06,
"loss": 0.6139,
"step": 293
},
{
"epoch": 0.9206680584551148,
"grad_norm": 1.4980088472366333,
"learning_rate": 4.860218585831116e-06,
"loss": 0.6187,
"step": 294
},
{
"epoch": 0.9237995824634656,
"grad_norm": 0.9910127520561218,
"learning_rate": 4.8587875828643285e-06,
"loss": 0.5852,
"step": 295
},
{
"epoch": 0.9269311064718163,
"grad_norm": 0.819600522518158,
"learning_rate": 4.857349505101823e-06,
"loss": 0.6172,
"step": 296
},
{
"epoch": 0.930062630480167,
"grad_norm": 1.1059772968292236,
"learning_rate": 4.855904356856878e-06,
"loss": 0.5868,
"step": 297
},
{
"epoch": 0.9331941544885177,
"grad_norm": 1.2362196445465088,
"learning_rate": 4.854452142463977e-06,
"loss": 0.625,
"step": 298
},
{
"epoch": 0.9363256784968684,
"grad_norm": 0.9956470727920532,
"learning_rate": 4.852992866278799e-06,
"loss": 0.5923,
"step": 299
},
{
"epoch": 0.9394572025052192,
"grad_norm": 0.864109218120575,
"learning_rate": 4.851526532678203e-06,
"loss": 0.6315,
"step": 300
},
{
"epoch": 0.94258872651357,
"grad_norm": 0.8900614380836487,
"learning_rate": 4.850053146060217e-06,
"loss": 0.6128,
"step": 301
},
{
"epoch": 0.9457202505219207,
"grad_norm": 0.927254855632782,
"learning_rate": 4.84857271084402e-06,
"loss": 0.5955,
"step": 302
},
{
"epoch": 0.9488517745302714,
"grad_norm": 1.0046517848968506,
"learning_rate": 4.847085231469935e-06,
"loss": 0.6134,
"step": 303
},
{
"epoch": 0.9519832985386222,
"grad_norm": 0.734597384929657,
"learning_rate": 4.8455907123994125e-06,
"loss": 0.5927,
"step": 304
},
{
"epoch": 0.9551148225469729,
"grad_norm": 0.7338348031044006,
"learning_rate": 4.844089158115016e-06,
"loss": 0.5897,
"step": 305
},
{
"epoch": 0.9582463465553236,
"grad_norm": 0.9163988828659058,
"learning_rate": 4.8425805731204106e-06,
"loss": 0.6051,
"step": 306
},
{
"epoch": 0.9613778705636743,
"grad_norm": 1.050246238708496,
"learning_rate": 4.84106496194035e-06,
"loss": 0.5751,
"step": 307
},
{
"epoch": 0.964509394572025,
"grad_norm": 0.7637603878974915,
"learning_rate": 4.83954232912066e-06,
"loss": 0.5677,
"step": 308
},
{
"epoch": 0.9676409185803758,
"grad_norm": 0.7110525965690613,
"learning_rate": 4.838012679228229e-06,
"loss": 0.6051,
"step": 309
},
{
"epoch": 0.9707724425887265,
"grad_norm": 0.7662068605422974,
"learning_rate": 4.836476016850988e-06,
"loss": 0.59,
"step": 310
},
{
"epoch": 0.9739039665970772,
"grad_norm": 0.8907375335693359,
"learning_rate": 4.834932346597906e-06,
"loss": 0.5792,
"step": 311
},
{
"epoch": 0.9770354906054279,
"grad_norm": 0.8939849138259888,
"learning_rate": 4.833381673098966e-06,
"loss": 0.6062,
"step": 312
},
{
"epoch": 0.9801670146137788,
"grad_norm": 0.8878788948059082,
"learning_rate": 4.8318240010051595e-06,
"loss": 0.5694,
"step": 313
},
{
"epoch": 0.9832985386221295,
"grad_norm": 1.2523870468139648,
"learning_rate": 4.830259334988468e-06,
"loss": 0.5809,
"step": 314
},
{
"epoch": 0.9864300626304802,
"grad_norm": 1.0836797952651978,
"learning_rate": 4.82868767974185e-06,
"loss": 0.5949,
"step": 315
},
{
"epoch": 0.9895615866388309,
"grad_norm": 0.7985473871231079,
"learning_rate": 4.827109039979226e-06,
"loss": 0.6057,
"step": 316
},
{
"epoch": 0.9926931106471816,
"grad_norm": 1.042951226234436,
"learning_rate": 4.825523420435469e-06,
"loss": 0.6004,
"step": 317
},
{
"epoch": 0.9958246346555324,
"grad_norm": 0.7845115661621094,
"learning_rate": 4.823930825866381e-06,
"loss": 0.6161,
"step": 318
},
{
"epoch": 0.9989561586638831,
"grad_norm": 0.931854784488678,
"learning_rate": 4.82233126104869e-06,
"loss": 0.5912,
"step": 319
}
],
"logging_steps": 1,
"max_steps": 1914,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 319,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 9.681902049591034e+18,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}