1SV47 / checkpoint-2400 /trainer_state.json
gotzmann's picture
..
dbdfa08
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.7405121875964209,
"eval_steps": 500,
"global_step": 2400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.000308546744831842,
"grad_norm": 3.8197367191314697,
"learning_rate": 1.0000000000000001e-07,
"loss": 4.3327,
"step": 1
},
{
"epoch": 0.000617093489663684,
"grad_norm": 3.611845016479492,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.2669,
"step": 2
},
{
"epoch": 0.0009256402344955261,
"grad_norm": 5.70314359664917,
"learning_rate": 3.0000000000000004e-07,
"loss": 5.4971,
"step": 3
},
{
"epoch": 0.001234186979327368,
"grad_norm": 3.392024517059326,
"learning_rate": 4.0000000000000003e-07,
"loss": 4.1838,
"step": 4
},
{
"epoch": 0.0015427337241592102,
"grad_norm": 4.388611316680908,
"learning_rate": 5e-07,
"loss": 4.8521,
"step": 5
},
{
"epoch": 0.0018512804689910522,
"grad_norm": 3.260829210281372,
"learning_rate": 6.000000000000001e-07,
"loss": 4.0387,
"step": 6
},
{
"epoch": 0.0021598272138228943,
"grad_norm": 6.02992057800293,
"learning_rate": 7.000000000000001e-07,
"loss": 5.9947,
"step": 7
},
{
"epoch": 0.002468373958654736,
"grad_norm": 4.1300554275512695,
"learning_rate": 8.000000000000001e-07,
"loss": 4.6317,
"step": 8
},
{
"epoch": 0.002776920703486578,
"grad_norm": 4.799147605895996,
"learning_rate": 9e-07,
"loss": 4.9666,
"step": 9
},
{
"epoch": 0.0030854674483184203,
"grad_norm": 3.797393798828125,
"learning_rate": 1e-06,
"loss": 4.4269,
"step": 10
},
{
"epoch": 0.003394014193150262,
"grad_norm": 3.5223026275634766,
"learning_rate": 1.1e-06,
"loss": 4.1057,
"step": 11
},
{
"epoch": 0.0037025609379821045,
"grad_norm": 4.609493255615234,
"learning_rate": 1.2000000000000002e-06,
"loss": 5.0402,
"step": 12
},
{
"epoch": 0.004011107682813947,
"grad_norm": 2.8431596755981445,
"learning_rate": 1.3e-06,
"loss": 3.7792,
"step": 13
},
{
"epoch": 0.004319654427645789,
"grad_norm": 5.449502944946289,
"learning_rate": 1.4000000000000001e-06,
"loss": 5.3713,
"step": 14
},
{
"epoch": 0.0046282011724776305,
"grad_norm": 4.320559501647949,
"learning_rate": 1.5e-06,
"loss": 4.6332,
"step": 15
},
{
"epoch": 0.004936747917309472,
"grad_norm": 4.168860912322998,
"learning_rate": 1.6000000000000001e-06,
"loss": 4.5831,
"step": 16
},
{
"epoch": 0.005245294662141314,
"grad_norm": 3.160491943359375,
"learning_rate": 1.7e-06,
"loss": 4.0278,
"step": 17
},
{
"epoch": 0.005553841406973156,
"grad_norm": 4.066011428833008,
"learning_rate": 1.8e-06,
"loss": 4.7029,
"step": 18
},
{
"epoch": 0.005862388151804999,
"grad_norm": 5.592798233032227,
"learning_rate": 1.9000000000000002e-06,
"loss": 5.9086,
"step": 19
},
{
"epoch": 0.006170934896636841,
"grad_norm": 2.722200870513916,
"learning_rate": 2e-06,
"loss": 3.5714,
"step": 20
},
{
"epoch": 0.0064794816414686825,
"grad_norm": 6.213608741760254,
"learning_rate": 2.1000000000000002e-06,
"loss": 5.5262,
"step": 21
},
{
"epoch": 0.006788028386300524,
"grad_norm": 6.304937839508057,
"learning_rate": 2.2e-06,
"loss": 5.9214,
"step": 22
},
{
"epoch": 0.007096575131132366,
"grad_norm": 4.180171012878418,
"learning_rate": 2.3e-06,
"loss": 4.5205,
"step": 23
},
{
"epoch": 0.007405121875964209,
"grad_norm": 3.755070209503174,
"learning_rate": 2.4000000000000003e-06,
"loss": 4.2664,
"step": 24
},
{
"epoch": 0.007713668620796051,
"grad_norm": 2.4497368335723877,
"learning_rate": 2.4999999999999998e-06,
"loss": 3.497,
"step": 25
},
{
"epoch": 0.008022215365627893,
"grad_norm": 4.276626110076904,
"learning_rate": 2.6e-06,
"loss": 4.7956,
"step": 26
},
{
"epoch": 0.008330762110459735,
"grad_norm": 4.274627208709717,
"learning_rate": 2.7e-06,
"loss": 4.9595,
"step": 27
},
{
"epoch": 0.008639308855291577,
"grad_norm": 2.989254951477051,
"learning_rate": 2.8000000000000003e-06,
"loss": 4.0622,
"step": 28
},
{
"epoch": 0.008947855600123419,
"grad_norm": 4.647172451019287,
"learning_rate": 2.9e-06,
"loss": 5.7777,
"step": 29
},
{
"epoch": 0.009256402344955261,
"grad_norm": 3.569882869720459,
"learning_rate": 3e-06,
"loss": 4.9304,
"step": 30
},
{
"epoch": 0.009564949089787103,
"grad_norm": 2.1073899269104004,
"learning_rate": 3.1e-06,
"loss": 3.8024,
"step": 31
},
{
"epoch": 0.009873495834618945,
"grad_norm": 1.0976536273956299,
"learning_rate": 3.2000000000000003e-06,
"loss": 2.873,
"step": 32
},
{
"epoch": 0.010182042579450786,
"grad_norm": 2.685044527053833,
"learning_rate": 3.3e-06,
"loss": 4.6695,
"step": 33
},
{
"epoch": 0.010490589324282628,
"grad_norm": 1.5810956954956055,
"learning_rate": 3.4e-06,
"loss": 3.5091,
"step": 34
},
{
"epoch": 0.01079913606911447,
"grad_norm": 1.168277621269226,
"learning_rate": 3.5e-06,
"loss": 3.1201,
"step": 35
},
{
"epoch": 0.011107682813946312,
"grad_norm": 1.4001338481903076,
"learning_rate": 3.6e-06,
"loss": 3.4514,
"step": 36
},
{
"epoch": 0.011416229558778156,
"grad_norm": 1.0257220268249512,
"learning_rate": 3.7e-06,
"loss": 2.9701,
"step": 37
},
{
"epoch": 0.011724776303609998,
"grad_norm": 1.1348458528518677,
"learning_rate": 3.8000000000000005e-06,
"loss": 3.2321,
"step": 38
},
{
"epoch": 0.01203332304844184,
"grad_norm": 1.5109179019927979,
"learning_rate": 3.9e-06,
"loss": 3.8248,
"step": 39
},
{
"epoch": 0.012341869793273681,
"grad_norm": 1.1383541822433472,
"learning_rate": 4e-06,
"loss": 3.2195,
"step": 40
},
{
"epoch": 0.012650416538105523,
"grad_norm": 1.4988147020339966,
"learning_rate": 4.1e-06,
"loss": 3.7383,
"step": 41
},
{
"epoch": 0.012958963282937365,
"grad_norm": 0.7626239061355591,
"learning_rate": 4.2000000000000004e-06,
"loss": 2.8485,
"step": 42
},
{
"epoch": 0.013267510027769207,
"grad_norm": 1.1359997987747192,
"learning_rate": 4.3e-06,
"loss": 3.3959,
"step": 43
},
{
"epoch": 0.013576056772601049,
"grad_norm": 1.991447925567627,
"learning_rate": 4.4e-06,
"loss": 4.5384,
"step": 44
},
{
"epoch": 0.01388460351743289,
"grad_norm": 1.2268764972686768,
"learning_rate": 4.5e-06,
"loss": 3.5075,
"step": 45
},
{
"epoch": 0.014193150262264732,
"grad_norm": 0.9016637802124023,
"learning_rate": 4.6e-06,
"loss": 3.2113,
"step": 46
},
{
"epoch": 0.014501697007096576,
"grad_norm": 1.3222975730895996,
"learning_rate": 4.700000000000001e-06,
"loss": 3.9122,
"step": 47
},
{
"epoch": 0.014810243751928418,
"grad_norm": 0.5912073850631714,
"learning_rate": 4.800000000000001e-06,
"loss": 2.8042,
"step": 48
},
{
"epoch": 0.01511879049676026,
"grad_norm": 1.091262936592102,
"learning_rate": 4.9e-06,
"loss": 3.5044,
"step": 49
},
{
"epoch": 0.015427337241592102,
"grad_norm": 1.0276159048080444,
"learning_rate": 4.9999999999999996e-06,
"loss": 3.5583,
"step": 50
},
{
"epoch": 0.01573588398642394,
"grad_norm": 1.0583761930465698,
"learning_rate": 5.1e-06,
"loss": 3.8359,
"step": 51
},
{
"epoch": 0.016044430731255787,
"grad_norm": 1.0619677305221558,
"learning_rate": 5.2e-06,
"loss": 3.8607,
"step": 52
},
{
"epoch": 0.01635297747608763,
"grad_norm": 0.8387205004692078,
"learning_rate": 5.3e-06,
"loss": 3.232,
"step": 53
},
{
"epoch": 0.01666152422091947,
"grad_norm": 1.1671696901321411,
"learning_rate": 5.4e-06,
"loss": 4.1826,
"step": 54
},
{
"epoch": 0.016970070965751313,
"grad_norm": 0.8083593249320984,
"learning_rate": 5.5e-06,
"loss": 3.4237,
"step": 55
},
{
"epoch": 0.017278617710583154,
"grad_norm": 0.99895179271698,
"learning_rate": 5.600000000000001e-06,
"loss": 3.8469,
"step": 56
},
{
"epoch": 0.017587164455414996,
"grad_norm": 0.7100948095321655,
"learning_rate": 5.7000000000000005e-06,
"loss": 3.2944,
"step": 57
},
{
"epoch": 0.017895711200246838,
"grad_norm": 0.6428288221359253,
"learning_rate": 5.8e-06,
"loss": 3.0704,
"step": 58
},
{
"epoch": 0.01820425794507868,
"grad_norm": 0.8618065118789673,
"learning_rate": 5.899999999999999e-06,
"loss": 3.8168,
"step": 59
},
{
"epoch": 0.018512804689910522,
"grad_norm": 0.7153589725494385,
"learning_rate": 6e-06,
"loss": 3.5421,
"step": 60
},
{
"epoch": 0.018821351434742364,
"grad_norm": 0.5318284630775452,
"learning_rate": 6.1e-06,
"loss": 3.1603,
"step": 61
},
{
"epoch": 0.019129898179574206,
"grad_norm": 0.348753422498703,
"learning_rate": 6.2e-06,
"loss": 2.692,
"step": 62
},
{
"epoch": 0.019438444924406047,
"grad_norm": 0.5460414290428162,
"learning_rate": 6.3e-06,
"loss": 3.08,
"step": 63
},
{
"epoch": 0.01974699166923789,
"grad_norm": 0.9770793318748474,
"learning_rate": 6.4000000000000006e-06,
"loss": 3.4058,
"step": 64
},
{
"epoch": 0.02005553841406973,
"grad_norm": 0.55307936668396,
"learning_rate": 6.5000000000000004e-06,
"loss": 2.9789,
"step": 65
},
{
"epoch": 0.020364085158901573,
"grad_norm": 0.7234558463096619,
"learning_rate": 6.6e-06,
"loss": 3.3505,
"step": 66
},
{
"epoch": 0.020672631903733415,
"grad_norm": 0.5592244863510132,
"learning_rate": 6.7e-06,
"loss": 3.1349,
"step": 67
},
{
"epoch": 0.020981178648565257,
"grad_norm": 0.8060568571090698,
"learning_rate": 6.8e-06,
"loss": 3.7522,
"step": 68
},
{
"epoch": 0.0212897253933971,
"grad_norm": 0.6033865809440613,
"learning_rate": 6.900000000000001e-06,
"loss": 3.5573,
"step": 69
},
{
"epoch": 0.02159827213822894,
"grad_norm": 0.5775837302207947,
"learning_rate": 7e-06,
"loss": 3.3392,
"step": 70
},
{
"epoch": 0.021906818883060782,
"grad_norm": 0.591890811920166,
"learning_rate": 7.1e-06,
"loss": 3.1473,
"step": 71
},
{
"epoch": 0.022215365627892624,
"grad_norm": 0.6378189325332642,
"learning_rate": 7.2e-06,
"loss": 3.2234,
"step": 72
},
{
"epoch": 0.02252391237272447,
"grad_norm": 0.5335432887077332,
"learning_rate": 7.3e-06,
"loss": 3.0607,
"step": 73
},
{
"epoch": 0.02283245911755631,
"grad_norm": 0.4598855674266815,
"learning_rate": 7.4e-06,
"loss": 3.0668,
"step": 74
},
{
"epoch": 0.023141005862388153,
"grad_norm": 0.5637675523757935,
"learning_rate": 7.5e-06,
"loss": 3.3607,
"step": 75
},
{
"epoch": 0.023449552607219995,
"grad_norm": 0.6915042996406555,
"learning_rate": 7.600000000000001e-06,
"loss": 3.1566,
"step": 76
},
{
"epoch": 0.023758099352051837,
"grad_norm": 0.44307759404182434,
"learning_rate": 7.699999999999999e-06,
"loss": 3.1364,
"step": 77
},
{
"epoch": 0.02406664609688368,
"grad_norm": 0.43005770444869995,
"learning_rate": 7.8e-06,
"loss": 3.1683,
"step": 78
},
{
"epoch": 0.02437519284171552,
"grad_norm": 0.6191526055335999,
"learning_rate": 7.899999999999999e-06,
"loss": 3.3363,
"step": 79
},
{
"epoch": 0.024683739586547362,
"grad_norm": 0.35434824228286743,
"learning_rate": 8e-06,
"loss": 2.5193,
"step": 80
},
{
"epoch": 0.024992286331379204,
"grad_norm": 0.656644344329834,
"learning_rate": 8.1e-06,
"loss": 3.7158,
"step": 81
},
{
"epoch": 0.025300833076211046,
"grad_norm": 0.40206533670425415,
"learning_rate": 8.2e-06,
"loss": 2.7845,
"step": 82
},
{
"epoch": 0.025609379821042888,
"grad_norm": 0.5501867532730103,
"learning_rate": 8.3e-06,
"loss": 2.7171,
"step": 83
},
{
"epoch": 0.02591792656587473,
"grad_norm": 0.4792475998401642,
"learning_rate": 8.400000000000001e-06,
"loss": 3.0822,
"step": 84
},
{
"epoch": 0.02622647331070657,
"grad_norm": 0.3702377676963806,
"learning_rate": 8.5e-06,
"loss": 2.6305,
"step": 85
},
{
"epoch": 0.026535020055538414,
"grad_norm": 0.37643131613731384,
"learning_rate": 8.6e-06,
"loss": 2.6588,
"step": 86
},
{
"epoch": 0.026843566800370255,
"grad_norm": 0.604988157749176,
"learning_rate": 8.7e-06,
"loss": 3.3621,
"step": 87
},
{
"epoch": 0.027152113545202097,
"grad_norm": 1.1603400707244873,
"learning_rate": 8.8e-06,
"loss": 2.8903,
"step": 88
},
{
"epoch": 0.02746066029003394,
"grad_norm": 0.6672317981719971,
"learning_rate": 8.900000000000001e-06,
"loss": 3.202,
"step": 89
},
{
"epoch": 0.02776920703486578,
"grad_norm": 0.404778391122818,
"learning_rate": 9e-06,
"loss": 3.0092,
"step": 90
},
{
"epoch": 0.028077753779697623,
"grad_norm": 0.7324615716934204,
"learning_rate": 9.100000000000001e-06,
"loss": 3.3472,
"step": 91
},
{
"epoch": 0.028386300524529465,
"grad_norm": 0.5391788482666016,
"learning_rate": 9.2e-06,
"loss": 2.9677,
"step": 92
},
{
"epoch": 0.028694847269361307,
"grad_norm": 0.4934414327144623,
"learning_rate": 9.3e-06,
"loss": 3.1551,
"step": 93
},
{
"epoch": 0.029003394014193152,
"grad_norm": 0.6892746686935425,
"learning_rate": 9.400000000000001e-06,
"loss": 3.6399,
"step": 94
},
{
"epoch": 0.029311940759024994,
"grad_norm": 0.40343931317329407,
"learning_rate": 9.5e-06,
"loss": 2.6643,
"step": 95
},
{
"epoch": 0.029620487503856836,
"grad_norm": 0.37016206979751587,
"learning_rate": 9.600000000000001e-06,
"loss": 2.8084,
"step": 96
},
{
"epoch": 0.029929034248688677,
"grad_norm": 0.31692177057266235,
"learning_rate": 9.699999999999999e-06,
"loss": 2.5649,
"step": 97
},
{
"epoch": 0.03023758099352052,
"grad_norm": 0.48820602893829346,
"learning_rate": 9.8e-06,
"loss": 2.7648,
"step": 98
},
{
"epoch": 0.03054612773835236,
"grad_norm": 0.4864860773086548,
"learning_rate": 9.9e-06,
"loss": 2.778,
"step": 99
},
{
"epoch": 0.030854674483184203,
"grad_norm": 0.4217410087585449,
"learning_rate": 9.999999999999999e-06,
"loss": 2.6541,
"step": 100
},
{
"epoch": 0.031163221228016045,
"grad_norm": 0.41729119420051575,
"learning_rate": 1.01e-05,
"loss": 2.5602,
"step": 101
},
{
"epoch": 0.03147176797284788,
"grad_norm": 0.5560382604598999,
"learning_rate": 1.02e-05,
"loss": 3.1223,
"step": 102
},
{
"epoch": 0.031780314717679725,
"grad_norm": 0.3572539985179901,
"learning_rate": 1.03e-05,
"loss": 2.706,
"step": 103
},
{
"epoch": 0.032088861462511574,
"grad_norm": 0.49619024991989136,
"learning_rate": 1.04e-05,
"loss": 3.0865,
"step": 104
},
{
"epoch": 0.032397408207343416,
"grad_norm": 0.4121740758419037,
"learning_rate": 1.05e-05,
"loss": 2.9064,
"step": 105
},
{
"epoch": 0.03270595495217526,
"grad_norm": 0.5392472743988037,
"learning_rate": 1.06e-05,
"loss": 3.0245,
"step": 106
},
{
"epoch": 0.0330145016970071,
"grad_norm": 0.37481802701950073,
"learning_rate": 1.0700000000000001e-05,
"loss": 3.0453,
"step": 107
},
{
"epoch": 0.03332304844183894,
"grad_norm": 0.6298946142196655,
"learning_rate": 1.08e-05,
"loss": 2.9881,
"step": 108
},
{
"epoch": 0.03363159518667078,
"grad_norm": 0.495768666267395,
"learning_rate": 1.09e-05,
"loss": 3.3919,
"step": 109
},
{
"epoch": 0.033940141931502625,
"grad_norm": 0.7116994261741638,
"learning_rate": 1.1e-05,
"loss": 3.188,
"step": 110
},
{
"epoch": 0.03424868867633447,
"grad_norm": 0.6338837146759033,
"learning_rate": 1.11e-05,
"loss": 3.1988,
"step": 111
},
{
"epoch": 0.03455723542116631,
"grad_norm": 0.6400672793388367,
"learning_rate": 1.1200000000000001e-05,
"loss": 3.3323,
"step": 112
},
{
"epoch": 0.03486578216599815,
"grad_norm": 0.48958930373191833,
"learning_rate": 1.13e-05,
"loss": 2.7639,
"step": 113
},
{
"epoch": 0.03517432891082999,
"grad_norm": 0.7853876948356628,
"learning_rate": 1.1400000000000001e-05,
"loss": 3.216,
"step": 114
},
{
"epoch": 0.035482875655661834,
"grad_norm": 0.4167158603668213,
"learning_rate": 1.1500000000000002e-05,
"loss": 2.5009,
"step": 115
},
{
"epoch": 0.035791422400493676,
"grad_norm": 0.43414178490638733,
"learning_rate": 1.16e-05,
"loss": 2.9166,
"step": 116
},
{
"epoch": 0.03609996914532552,
"grad_norm": 0.5888383984565735,
"learning_rate": 1.1700000000000001e-05,
"loss": 3.4765,
"step": 117
},
{
"epoch": 0.03640851589015736,
"grad_norm": 0.4863530099391937,
"learning_rate": 1.1799999999999999e-05,
"loss": 2.6866,
"step": 118
},
{
"epoch": 0.0367170626349892,
"grad_norm": 0.5108136534690857,
"learning_rate": 1.19e-05,
"loss": 3.0799,
"step": 119
},
{
"epoch": 0.037025609379821044,
"grad_norm": 0.72972172498703,
"learning_rate": 1.2e-05,
"loss": 3.3096,
"step": 120
},
{
"epoch": 0.037334156124652886,
"grad_norm": 0.4917832016944885,
"learning_rate": 1.21e-05,
"loss": 3.3209,
"step": 121
},
{
"epoch": 0.03764270286948473,
"grad_norm": 0.5651227831840515,
"learning_rate": 1.22e-05,
"loss": 3.2901,
"step": 122
},
{
"epoch": 0.03795124961431657,
"grad_norm": 0.4422552287578583,
"learning_rate": 1.2299999999999999e-05,
"loss": 2.7225,
"step": 123
},
{
"epoch": 0.03825979635914841,
"grad_norm": 0.4900197982788086,
"learning_rate": 1.24e-05,
"loss": 2.676,
"step": 124
},
{
"epoch": 0.03856834310398025,
"grad_norm": 0.5758654475212097,
"learning_rate": 1.25e-05,
"loss": 2.7869,
"step": 125
},
{
"epoch": 0.038876889848812095,
"grad_norm": 0.4216962456703186,
"learning_rate": 1.26e-05,
"loss": 2.5913,
"step": 126
},
{
"epoch": 0.03918543659364394,
"grad_norm": 0.5779691934585571,
"learning_rate": 1.27e-05,
"loss": 2.93,
"step": 127
},
{
"epoch": 0.03949398333847578,
"grad_norm": 0.5078710317611694,
"learning_rate": 1.2800000000000001e-05,
"loss": 2.873,
"step": 128
},
{
"epoch": 0.03980253008330762,
"grad_norm": 0.7252984046936035,
"learning_rate": 1.29e-05,
"loss": 3.2651,
"step": 129
},
{
"epoch": 0.04011107682813946,
"grad_norm": 0.30763328075408936,
"learning_rate": 1.3000000000000001e-05,
"loss": 2.5202,
"step": 130
},
{
"epoch": 0.040419623572971304,
"grad_norm": 0.5326187610626221,
"learning_rate": 1.31e-05,
"loss": 2.7101,
"step": 131
},
{
"epoch": 0.040728170317803146,
"grad_norm": 0.5274989604949951,
"learning_rate": 1.32e-05,
"loss": 2.8888,
"step": 132
},
{
"epoch": 0.04103671706263499,
"grad_norm": 0.4721854031085968,
"learning_rate": 1.3300000000000001e-05,
"loss": 3.1288,
"step": 133
},
{
"epoch": 0.04134526380746683,
"grad_norm": 0.4977727234363556,
"learning_rate": 1.34e-05,
"loss": 2.7822,
"step": 134
},
{
"epoch": 0.04165381055229867,
"grad_norm": 0.5609750747680664,
"learning_rate": 1.3500000000000001e-05,
"loss": 2.9943,
"step": 135
},
{
"epoch": 0.04196235729713051,
"grad_norm": 0.5416237711906433,
"learning_rate": 1.36e-05,
"loss": 2.9631,
"step": 136
},
{
"epoch": 0.042270904041962355,
"grad_norm": 0.7931625247001648,
"learning_rate": 1.3700000000000001e-05,
"loss": 3.2442,
"step": 137
},
{
"epoch": 0.0425794507867942,
"grad_norm": 0.41734540462493896,
"learning_rate": 1.3800000000000002e-05,
"loss": 2.7296,
"step": 138
},
{
"epoch": 0.04288799753162604,
"grad_norm": 0.5195217132568359,
"learning_rate": 1.39e-05,
"loss": 3.0406,
"step": 139
},
{
"epoch": 0.04319654427645788,
"grad_norm": 0.43490850925445557,
"learning_rate": 1.4e-05,
"loss": 2.7431,
"step": 140
},
{
"epoch": 0.04350509102128972,
"grad_norm": 0.6502295732498169,
"learning_rate": 1.4099999999999999e-05,
"loss": 3.0238,
"step": 141
},
{
"epoch": 0.043813637766121565,
"grad_norm": 0.44002243876457214,
"learning_rate": 1.42e-05,
"loss": 2.9071,
"step": 142
},
{
"epoch": 0.044122184510953406,
"grad_norm": 0.578681468963623,
"learning_rate": 1.43e-05,
"loss": 3.1955,
"step": 143
},
{
"epoch": 0.04443073125578525,
"grad_norm": 0.5037577152252197,
"learning_rate": 1.44e-05,
"loss": 2.6283,
"step": 144
},
{
"epoch": 0.04473927800061709,
"grad_norm": 0.46894946694374084,
"learning_rate": 1.45e-05,
"loss": 2.5679,
"step": 145
},
{
"epoch": 0.04504782474544894,
"grad_norm": 0.7387483716011047,
"learning_rate": 1.46e-05,
"loss": 3.3251,
"step": 146
},
{
"epoch": 0.04535637149028078,
"grad_norm": 0.4696255624294281,
"learning_rate": 1.47e-05,
"loss": 2.7964,
"step": 147
},
{
"epoch": 0.04566491823511262,
"grad_norm": 0.6008668541908264,
"learning_rate": 1.48e-05,
"loss": 2.7697,
"step": 148
},
{
"epoch": 0.045973464979944464,
"grad_norm": 0.8156336545944214,
"learning_rate": 1.49e-05,
"loss": 3.2155,
"step": 149
},
{
"epoch": 0.046282011724776306,
"grad_norm": 0.37692368030548096,
"learning_rate": 1.5e-05,
"loss": 2.7007,
"step": 150
},
{
"epoch": 0.04659055846960815,
"grad_norm": 0.7642585635185242,
"learning_rate": 1.51e-05,
"loss": 3.6619,
"step": 151
},
{
"epoch": 0.04689910521443999,
"grad_norm": 0.6754474639892578,
"learning_rate": 1.5200000000000002e-05,
"loss": 3.1602,
"step": 152
},
{
"epoch": 0.04720765195927183,
"grad_norm": 0.6416701674461365,
"learning_rate": 1.53e-05,
"loss": 3.3756,
"step": 153
},
{
"epoch": 0.047516198704103674,
"grad_norm": 0.654334306716919,
"learning_rate": 1.5399999999999998e-05,
"loss": 3.168,
"step": 154
},
{
"epoch": 0.047824745448935516,
"grad_norm": 0.6520740389823914,
"learning_rate": 1.55e-05,
"loss": 3.2226,
"step": 155
},
{
"epoch": 0.04813329219376736,
"grad_norm": 0.384902685880661,
"learning_rate": 1.56e-05,
"loss": 2.5777,
"step": 156
},
{
"epoch": 0.0484418389385992,
"grad_norm": 0.5706138014793396,
"learning_rate": 1.57e-05,
"loss": 3.2847,
"step": 157
},
{
"epoch": 0.04875038568343104,
"grad_norm": 0.5485690832138062,
"learning_rate": 1.5799999999999998e-05,
"loss": 3.1548,
"step": 158
},
{
"epoch": 0.04905893242826288,
"grad_norm": 0.5101959705352783,
"learning_rate": 1.59e-05,
"loss": 2.8384,
"step": 159
},
{
"epoch": 0.049367479173094725,
"grad_norm": 0.4596351683139801,
"learning_rate": 1.6e-05,
"loss": 2.9464,
"step": 160
},
{
"epoch": 0.04967602591792657,
"grad_norm": 0.6397862434387207,
"learning_rate": 1.61e-05,
"loss": 3.2851,
"step": 161
},
{
"epoch": 0.04998457266275841,
"grad_norm": 0.3608058989048004,
"learning_rate": 1.62e-05,
"loss": 2.43,
"step": 162
},
{
"epoch": 0.05029311940759025,
"grad_norm": 0.7656980752944946,
"learning_rate": 1.63e-05,
"loss": 3.5049,
"step": 163
},
{
"epoch": 0.05060166615242209,
"grad_norm": 0.4537852108478546,
"learning_rate": 1.64e-05,
"loss": 2.6858,
"step": 164
},
{
"epoch": 0.050910212897253934,
"grad_norm": 0.46467283368110657,
"learning_rate": 1.65e-05,
"loss": 2.8103,
"step": 165
},
{
"epoch": 0.051218759642085776,
"grad_norm": 0.6642769575119019,
"learning_rate": 1.66e-05,
"loss": 3.2552,
"step": 166
},
{
"epoch": 0.05152730638691762,
"grad_norm": 0.707314133644104,
"learning_rate": 1.67e-05,
"loss": 3.5667,
"step": 167
},
{
"epoch": 0.05183585313174946,
"grad_norm": 0.4103851020336151,
"learning_rate": 1.6800000000000002e-05,
"loss": 2.8039,
"step": 168
},
{
"epoch": 0.0521443998765813,
"grad_norm": 0.978696882724762,
"learning_rate": 1.69e-05,
"loss": 2.8174,
"step": 169
},
{
"epoch": 0.05245294662141314,
"grad_norm": 0.4563823640346527,
"learning_rate": 1.7e-05,
"loss": 3.1365,
"step": 170
},
{
"epoch": 0.052761493366244985,
"grad_norm": 0.4271509051322937,
"learning_rate": 1.71e-05,
"loss": 2.6638,
"step": 171
},
{
"epoch": 0.05307004011107683,
"grad_norm": 0.570706844329834,
"learning_rate": 1.72e-05,
"loss": 2.9269,
"step": 172
},
{
"epoch": 0.05337858685590867,
"grad_norm": 0.38479119539260864,
"learning_rate": 1.73e-05,
"loss": 2.1779,
"step": 173
},
{
"epoch": 0.05368713360074051,
"grad_norm": 0.5205680727958679,
"learning_rate": 1.74e-05,
"loss": 2.8916,
"step": 174
},
{
"epoch": 0.05399568034557235,
"grad_norm": 0.5419281721115112,
"learning_rate": 1.7500000000000002e-05,
"loss": 2.8412,
"step": 175
},
{
"epoch": 0.054304227090404195,
"grad_norm": 0.5400970578193665,
"learning_rate": 1.76e-05,
"loss": 3.1398,
"step": 176
},
{
"epoch": 0.054612773835236036,
"grad_norm": 0.4808214604854584,
"learning_rate": 1.77e-05,
"loss": 2.9332,
"step": 177
},
{
"epoch": 0.05492132058006788,
"grad_norm": 0.5370422005653381,
"learning_rate": 1.7800000000000002e-05,
"loss": 2.6539,
"step": 178
},
{
"epoch": 0.05522986732489972,
"grad_norm": 0.5261011719703674,
"learning_rate": 1.79e-05,
"loss": 2.6934,
"step": 179
},
{
"epoch": 0.05553841406973156,
"grad_norm": 0.4394947588443756,
"learning_rate": 1.8e-05,
"loss": 2.6768,
"step": 180
},
{
"epoch": 0.055846960814563404,
"grad_norm": 0.3960530459880829,
"learning_rate": 1.8100000000000003e-05,
"loss": 2.7511,
"step": 181
},
{
"epoch": 0.056155507559395246,
"grad_norm": 0.4033471941947937,
"learning_rate": 1.8200000000000002e-05,
"loss": 2.6748,
"step": 182
},
{
"epoch": 0.05646405430422709,
"grad_norm": 0.46211346983909607,
"learning_rate": 1.83e-05,
"loss": 2.5696,
"step": 183
},
{
"epoch": 0.05677260104905893,
"grad_norm": 0.48176342248916626,
"learning_rate": 1.84e-05,
"loss": 2.6858,
"step": 184
},
{
"epoch": 0.05708114779389077,
"grad_norm": 0.5655994415283203,
"learning_rate": 1.8500000000000002e-05,
"loss": 3.2977,
"step": 185
},
{
"epoch": 0.05738969453872261,
"grad_norm": 0.7159984111785889,
"learning_rate": 1.86e-05,
"loss": 3.7336,
"step": 186
},
{
"epoch": 0.05769824128355446,
"grad_norm": 0.5511050224304199,
"learning_rate": 1.87e-05,
"loss": 2.7869,
"step": 187
},
{
"epoch": 0.058006788028386304,
"grad_norm": 0.5503979921340942,
"learning_rate": 1.8800000000000003e-05,
"loss": 2.7981,
"step": 188
},
{
"epoch": 0.058315334773218146,
"grad_norm": 0.4197019040584564,
"learning_rate": 1.8900000000000002e-05,
"loss": 2.5163,
"step": 189
},
{
"epoch": 0.05862388151804999,
"grad_norm": 0.5114262104034424,
"learning_rate": 1.9e-05,
"loss": 2.9493,
"step": 190
},
{
"epoch": 0.05893242826288183,
"grad_norm": 0.8843671679496765,
"learning_rate": 1.9100000000000003e-05,
"loss": 3.295,
"step": 191
},
{
"epoch": 0.05924097500771367,
"grad_norm": 0.3419288396835327,
"learning_rate": 1.9200000000000003e-05,
"loss": 2.3456,
"step": 192
},
{
"epoch": 0.05954952175254551,
"grad_norm": 0.5502027869224548,
"learning_rate": 1.9299999999999998e-05,
"loss": 2.3674,
"step": 193
},
{
"epoch": 0.059858068497377355,
"grad_norm": 0.7232564091682434,
"learning_rate": 1.9399999999999997e-05,
"loss": 3.4577,
"step": 194
},
{
"epoch": 0.0601666152422092,
"grad_norm": 0.4990410804748535,
"learning_rate": 1.95e-05,
"loss": 2.5966,
"step": 195
},
{
"epoch": 0.06047516198704104,
"grad_norm": 0.5583244562149048,
"learning_rate": 1.96e-05,
"loss": 2.58,
"step": 196
},
{
"epoch": 0.06078370873187288,
"grad_norm": 0.8695673942565918,
"learning_rate": 1.9699999999999998e-05,
"loss": 3.292,
"step": 197
},
{
"epoch": 0.06109225547670472,
"grad_norm": 0.47713199257850647,
"learning_rate": 1.98e-05,
"loss": 2.5254,
"step": 198
},
{
"epoch": 0.061400802221536564,
"grad_norm": 0.7606462836265564,
"learning_rate": 1.99e-05,
"loss": 2.9146,
"step": 199
},
{
"epoch": 0.061709348966368406,
"grad_norm": 0.6246155500411987,
"learning_rate": 1.9999999999999998e-05,
"loss": 3.1394,
"step": 200
},
{
"epoch": 0.06201789571120025,
"grad_norm": 0.741635262966156,
"learning_rate": 2.01e-05,
"loss": 2.8496,
"step": 201
},
{
"epoch": 0.06232644245603209,
"grad_norm": 0.5045515894889832,
"learning_rate": 2.02e-05,
"loss": 2.7577,
"step": 202
},
{
"epoch": 0.06263498920086392,
"grad_norm": 0.6223952174186707,
"learning_rate": 2.03e-05,
"loss": 2.7214,
"step": 203
},
{
"epoch": 0.06294353594569577,
"grad_norm": 0.5237772464752197,
"learning_rate": 2.04e-05,
"loss": 2.9416,
"step": 204
},
{
"epoch": 0.06325208269052761,
"grad_norm": 1.014267086982727,
"learning_rate": 2.05e-05,
"loss": 3.0007,
"step": 205
},
{
"epoch": 0.06356062943535945,
"grad_norm": 0.6386702060699463,
"learning_rate": 2.06e-05,
"loss": 2.8931,
"step": 206
},
{
"epoch": 0.06386917618019129,
"grad_norm": 0.6047970056533813,
"learning_rate": 2.07e-05,
"loss": 2.4792,
"step": 207
},
{
"epoch": 0.06417772292502315,
"grad_norm": 0.5924057364463806,
"learning_rate": 2.08e-05,
"loss": 2.9367,
"step": 208
},
{
"epoch": 0.06448626966985499,
"grad_norm": 0.5173370242118835,
"learning_rate": 2.09e-05,
"loss": 2.499,
"step": 209
},
{
"epoch": 0.06479481641468683,
"grad_norm": 0.9348816275596619,
"learning_rate": 2.1e-05,
"loss": 2.9291,
"step": 210
},
{
"epoch": 0.06510336315951867,
"grad_norm": 0.500065267086029,
"learning_rate": 2.11e-05,
"loss": 3.0239,
"step": 211
},
{
"epoch": 0.06541190990435052,
"grad_norm": 0.6069537401199341,
"learning_rate": 2.12e-05,
"loss": 2.9213,
"step": 212
},
{
"epoch": 0.06572045664918236,
"grad_norm": 0.8060280084609985,
"learning_rate": 2.13e-05,
"loss": 2.9865,
"step": 213
},
{
"epoch": 0.0660290033940142,
"grad_norm": 0.5638638138771057,
"learning_rate": 2.1400000000000002e-05,
"loss": 3.1433,
"step": 214
},
{
"epoch": 0.06633755013884604,
"grad_norm": 0.9096368551254272,
"learning_rate": 2.15e-05,
"loss": 3.3942,
"step": 215
},
{
"epoch": 0.06664609688367788,
"grad_norm": 0.4683953523635864,
"learning_rate": 2.16e-05,
"loss": 2.5365,
"step": 216
},
{
"epoch": 0.06695464362850972,
"grad_norm": 0.6851359009742737,
"learning_rate": 2.1700000000000002e-05,
"loss": 2.6351,
"step": 217
},
{
"epoch": 0.06726319037334157,
"grad_norm": 0.5846662521362305,
"learning_rate": 2.18e-05,
"loss": 2.8887,
"step": 218
},
{
"epoch": 0.06757173711817341,
"grad_norm": 0.42902109026908875,
"learning_rate": 2.19e-05,
"loss": 2.5048,
"step": 219
},
{
"epoch": 0.06788028386300525,
"grad_norm": 0.5018395185470581,
"learning_rate": 2.2e-05,
"loss": 2.854,
"step": 220
},
{
"epoch": 0.06818883060783709,
"grad_norm": 0.5672473311424255,
"learning_rate": 2.2100000000000002e-05,
"loss": 2.6505,
"step": 221
},
{
"epoch": 0.06849737735266893,
"grad_norm": 0.4538055956363678,
"learning_rate": 2.22e-05,
"loss": 2.6871,
"step": 222
},
{
"epoch": 0.06880592409750078,
"grad_norm": 0.4177427589893341,
"learning_rate": 2.23e-05,
"loss": 2.5534,
"step": 223
},
{
"epoch": 0.06911447084233262,
"grad_norm": 0.5405595898628235,
"learning_rate": 2.2400000000000002e-05,
"loss": 2.8893,
"step": 224
},
{
"epoch": 0.06942301758716446,
"grad_norm": 0.5323094725608826,
"learning_rate": 2.25e-05,
"loss": 2.5933,
"step": 225
},
{
"epoch": 0.0697315643319963,
"grad_norm": 0.6036468744277954,
"learning_rate": 2.26e-05,
"loss": 2.8331,
"step": 226
},
{
"epoch": 0.07004011107682814,
"grad_norm": 0.545752763748169,
"learning_rate": 2.2700000000000003e-05,
"loss": 2.6558,
"step": 227
},
{
"epoch": 0.07034865782165999,
"grad_norm": 0.3827911913394928,
"learning_rate": 2.2800000000000002e-05,
"loss": 2.3423,
"step": 228
},
{
"epoch": 0.07065720456649183,
"grad_norm": 0.4605464041233063,
"learning_rate": 2.29e-05,
"loss": 2.7205,
"step": 229
},
{
"epoch": 0.07096575131132367,
"grad_norm": 0.5113175511360168,
"learning_rate": 2.3000000000000003e-05,
"loss": 2.5991,
"step": 230
},
{
"epoch": 0.07127429805615551,
"grad_norm": 1.0119787454605103,
"learning_rate": 2.3100000000000002e-05,
"loss": 3.7089,
"step": 231
},
{
"epoch": 0.07158284480098735,
"grad_norm": 0.9721714854240417,
"learning_rate": 2.32e-05,
"loss": 2.7576,
"step": 232
},
{
"epoch": 0.0718913915458192,
"grad_norm": 0.7502716779708862,
"learning_rate": 2.33e-05,
"loss": 2.5823,
"step": 233
},
{
"epoch": 0.07219993829065104,
"grad_norm": 0.46226513385772705,
"learning_rate": 2.3400000000000003e-05,
"loss": 2.4788,
"step": 234
},
{
"epoch": 0.07250848503548288,
"grad_norm": 0.6776145696640015,
"learning_rate": 2.3500000000000002e-05,
"loss": 2.7757,
"step": 235
},
{
"epoch": 0.07281703178031472,
"grad_norm": 1.051665186882019,
"learning_rate": 2.3599999999999998e-05,
"loss": 3.2042,
"step": 236
},
{
"epoch": 0.07312557852514656,
"grad_norm": 0.4878885746002197,
"learning_rate": 2.37e-05,
"loss": 2.6037,
"step": 237
},
{
"epoch": 0.0734341252699784,
"grad_norm": 1.0094560384750366,
"learning_rate": 2.38e-05,
"loss": 3.0565,
"step": 238
},
{
"epoch": 0.07374267201481025,
"grad_norm": 0.6273016929626465,
"learning_rate": 2.3899999999999998e-05,
"loss": 2.6155,
"step": 239
},
{
"epoch": 0.07405121875964209,
"grad_norm": 0.7550265192985535,
"learning_rate": 2.4e-05,
"loss": 2.7951,
"step": 240
},
{
"epoch": 0.07435976550447393,
"grad_norm": 0.9445892572402954,
"learning_rate": 2.41e-05,
"loss": 2.8222,
"step": 241
},
{
"epoch": 0.07466831224930577,
"grad_norm": 0.7777389883995056,
"learning_rate": 2.42e-05,
"loss": 3.1079,
"step": 242
},
{
"epoch": 0.07497685899413761,
"grad_norm": 0.6355181932449341,
"learning_rate": 2.43e-05,
"loss": 2.6854,
"step": 243
},
{
"epoch": 0.07528540573896945,
"grad_norm": 1.261012315750122,
"learning_rate": 2.44e-05,
"loss": 2.6395,
"step": 244
},
{
"epoch": 0.0755939524838013,
"grad_norm": 0.659038245677948,
"learning_rate": 2.45e-05,
"loss": 2.5814,
"step": 245
},
{
"epoch": 0.07590249922863314,
"grad_norm": 0.873461127281189,
"learning_rate": 2.4599999999999998e-05,
"loss": 3.2166,
"step": 246
},
{
"epoch": 0.07621104597346498,
"grad_norm": 0.5640694499015808,
"learning_rate": 2.47e-05,
"loss": 2.6797,
"step": 247
},
{
"epoch": 0.07651959271829682,
"grad_norm": 0.8357604742050171,
"learning_rate": 2.48e-05,
"loss": 3.1053,
"step": 248
},
{
"epoch": 0.07682813946312866,
"grad_norm": 0.6787241697311401,
"learning_rate": 2.49e-05,
"loss": 3.0971,
"step": 249
},
{
"epoch": 0.0771366862079605,
"grad_norm": 1.1458957195281982,
"learning_rate": 2.5e-05,
"loss": 3.6878,
"step": 250
},
{
"epoch": 0.07744523295279235,
"grad_norm": 0.5414482355117798,
"learning_rate": 2.51e-05,
"loss": 2.7741,
"step": 251
},
{
"epoch": 0.07775377969762419,
"grad_norm": 0.6754476428031921,
"learning_rate": 2.52e-05,
"loss": 2.9239,
"step": 252
},
{
"epoch": 0.07806232644245603,
"grad_norm": 0.6492197513580322,
"learning_rate": 2.5300000000000002e-05,
"loss": 2.9217,
"step": 253
},
{
"epoch": 0.07837087318728787,
"grad_norm": 0.4921364188194275,
"learning_rate": 2.54e-05,
"loss": 2.4709,
"step": 254
},
{
"epoch": 0.07867941993211972,
"grad_norm": 0.535729169845581,
"learning_rate": 2.55e-05,
"loss": 2.4383,
"step": 255
},
{
"epoch": 0.07898796667695156,
"grad_norm": 0.6355189085006714,
"learning_rate": 2.5600000000000002e-05,
"loss": 2.8076,
"step": 256
},
{
"epoch": 0.0792965134217834,
"grad_norm": 0.5118688344955444,
"learning_rate": 2.57e-05,
"loss": 2.8182,
"step": 257
},
{
"epoch": 0.07960506016661524,
"grad_norm": 0.49077171087265015,
"learning_rate": 2.58e-05,
"loss": 2.4566,
"step": 258
},
{
"epoch": 0.07991360691144708,
"grad_norm": 0.698340654373169,
"learning_rate": 2.59e-05,
"loss": 2.8407,
"step": 259
},
{
"epoch": 0.08022215365627892,
"grad_norm": 0.6589793562889099,
"learning_rate": 2.6000000000000002e-05,
"loss": 2.7079,
"step": 260
},
{
"epoch": 0.08053070040111077,
"grad_norm": 0.4942406117916107,
"learning_rate": 2.61e-05,
"loss": 2.7984,
"step": 261
},
{
"epoch": 0.08083924714594261,
"grad_norm": 0.5735039710998535,
"learning_rate": 2.62e-05,
"loss": 2.7639,
"step": 262
},
{
"epoch": 0.08114779389077445,
"grad_norm": 0.5074241757392883,
"learning_rate": 2.6300000000000002e-05,
"loss": 2.8036,
"step": 263
},
{
"epoch": 0.08145634063560629,
"grad_norm": 0.4602759778499603,
"learning_rate": 2.64e-05,
"loss": 2.257,
"step": 264
},
{
"epoch": 0.08176488738043813,
"grad_norm": 0.8142499327659607,
"learning_rate": 2.65e-05,
"loss": 3.1813,
"step": 265
},
{
"epoch": 0.08207343412526998,
"grad_norm": 0.44730252027511597,
"learning_rate": 2.6600000000000003e-05,
"loss": 2.6948,
"step": 266
},
{
"epoch": 0.08238198087010182,
"grad_norm": 0.3990883529186249,
"learning_rate": 2.6700000000000002e-05,
"loss": 2.5447,
"step": 267
},
{
"epoch": 0.08269052761493366,
"grad_norm": 0.7322868704795837,
"learning_rate": 2.68e-05,
"loss": 2.9178,
"step": 268
},
{
"epoch": 0.0829990743597655,
"grad_norm": 0.7331518530845642,
"learning_rate": 2.69e-05,
"loss": 2.953,
"step": 269
},
{
"epoch": 0.08330762110459734,
"grad_norm": 0.44720855355262756,
"learning_rate": 2.7000000000000002e-05,
"loss": 2.6966,
"step": 270
},
{
"epoch": 0.08361616784942918,
"grad_norm": 0.4966452717781067,
"learning_rate": 2.71e-05,
"loss": 2.9062,
"step": 271
},
{
"epoch": 0.08392471459426103,
"grad_norm": 0.6715177893638611,
"learning_rate": 2.72e-05,
"loss": 3.301,
"step": 272
},
{
"epoch": 0.08423326133909287,
"grad_norm": 0.5892104506492615,
"learning_rate": 2.7300000000000003e-05,
"loss": 3.0694,
"step": 273
},
{
"epoch": 0.08454180808392471,
"grad_norm": 0.7469586730003357,
"learning_rate": 2.7400000000000002e-05,
"loss": 3.3209,
"step": 274
},
{
"epoch": 0.08485035482875655,
"grad_norm": 0.41894596815109253,
"learning_rate": 2.75e-05,
"loss": 2.5125,
"step": 275
},
{
"epoch": 0.0851589015735884,
"grad_norm": 0.7170995473861694,
"learning_rate": 2.7600000000000003e-05,
"loss": 2.9459,
"step": 276
},
{
"epoch": 0.08546744831842024,
"grad_norm": 0.4706147015094757,
"learning_rate": 2.7700000000000002e-05,
"loss": 2.3327,
"step": 277
},
{
"epoch": 0.08577599506325208,
"grad_norm": 0.6144742965698242,
"learning_rate": 2.78e-05,
"loss": 2.6478,
"step": 278
},
{
"epoch": 0.08608454180808392,
"grad_norm": 0.5859728455543518,
"learning_rate": 2.79e-05,
"loss": 2.6092,
"step": 279
},
{
"epoch": 0.08639308855291576,
"grad_norm": 0.5228575468063354,
"learning_rate": 2.8e-05,
"loss": 2.8229,
"step": 280
},
{
"epoch": 0.0867016352977476,
"grad_norm": 0.7771852612495422,
"learning_rate": 2.81e-05,
"loss": 3.0858,
"step": 281
},
{
"epoch": 0.08701018204257945,
"grad_norm": 0.6817198395729065,
"learning_rate": 2.8199999999999998e-05,
"loss": 2.5982,
"step": 282
},
{
"epoch": 0.08731872878741129,
"grad_norm": 0.8393194079399109,
"learning_rate": 2.83e-05,
"loss": 3.2083,
"step": 283
},
{
"epoch": 0.08762727553224313,
"grad_norm": 0.5846584439277649,
"learning_rate": 2.84e-05,
"loss": 2.6578,
"step": 284
},
{
"epoch": 0.08793582227707497,
"grad_norm": 0.7105833292007446,
"learning_rate": 2.8499999999999998e-05,
"loss": 2.3854,
"step": 285
},
{
"epoch": 0.08824436902190681,
"grad_norm": 0.5590451955795288,
"learning_rate": 2.86e-05,
"loss": 2.7077,
"step": 286
},
{
"epoch": 0.08855291576673865,
"grad_norm": 0.3551070988178253,
"learning_rate": 2.87e-05,
"loss": 2.3749,
"step": 287
},
{
"epoch": 0.0888614625115705,
"grad_norm": 0.6778650283813477,
"learning_rate": 2.88e-05,
"loss": 2.5384,
"step": 288
},
{
"epoch": 0.08917000925640234,
"grad_norm": 0.5005120635032654,
"learning_rate": 2.89e-05,
"loss": 2.4651,
"step": 289
},
{
"epoch": 0.08947855600123418,
"grad_norm": 0.7873884439468384,
"learning_rate": 2.9e-05,
"loss": 3.0269,
"step": 290
},
{
"epoch": 0.08978710274606604,
"grad_norm": 0.8063921928405762,
"learning_rate": 2.91e-05,
"loss": 2.8679,
"step": 291
},
{
"epoch": 0.09009564949089788,
"grad_norm": 0.75018310546875,
"learning_rate": 2.92e-05,
"loss": 2.6179,
"step": 292
},
{
"epoch": 0.09040419623572972,
"grad_norm": 0.3952403962612152,
"learning_rate": 2.93e-05,
"loss": 2.3271,
"step": 293
},
{
"epoch": 0.09071274298056156,
"grad_norm": 0.49011290073394775,
"learning_rate": 2.94e-05,
"loss": 2.8457,
"step": 294
},
{
"epoch": 0.0910212897253934,
"grad_norm": 0.720333456993103,
"learning_rate": 2.95e-05,
"loss": 2.5857,
"step": 295
},
{
"epoch": 0.09132983647022525,
"grad_norm": 0.4137563705444336,
"learning_rate": 2.96e-05,
"loss": 2.4882,
"step": 296
},
{
"epoch": 0.09163838321505709,
"grad_norm": 0.5216336846351624,
"learning_rate": 2.97e-05,
"loss": 2.7284,
"step": 297
},
{
"epoch": 0.09194692995988893,
"grad_norm": 0.7018035054206848,
"learning_rate": 2.98e-05,
"loss": 3.4228,
"step": 298
},
{
"epoch": 0.09225547670472077,
"grad_norm": 0.5406361818313599,
"learning_rate": 2.9900000000000002e-05,
"loss": 3.1286,
"step": 299
},
{
"epoch": 0.09256402344955261,
"grad_norm": 0.48260602355003357,
"learning_rate": 3e-05,
"loss": 2.9251,
"step": 300
},
{
"epoch": 0.09287257019438445,
"grad_norm": 0.599492073059082,
"learning_rate": 3e-05,
"loss": 2.9756,
"step": 301
},
{
"epoch": 0.0931811169392163,
"grad_norm": 0.4386523962020874,
"learning_rate": 3e-05,
"loss": 2.8846,
"step": 302
},
{
"epoch": 0.09348966368404814,
"grad_norm": 0.45703089237213135,
"learning_rate": 3e-05,
"loss": 2.8327,
"step": 303
},
{
"epoch": 0.09379821042887998,
"grad_norm": 0.5618659257888794,
"learning_rate": 3e-05,
"loss": 2.442,
"step": 304
},
{
"epoch": 0.09410675717371182,
"grad_norm": 0.5637118220329285,
"learning_rate": 3e-05,
"loss": 3.2367,
"step": 305
},
{
"epoch": 0.09441530391854366,
"grad_norm": 0.7520857453346252,
"learning_rate": 3e-05,
"loss": 2.925,
"step": 306
},
{
"epoch": 0.0947238506633755,
"grad_norm": 0.5939932465553284,
"learning_rate": 3e-05,
"loss": 3.1811,
"step": 307
},
{
"epoch": 0.09503239740820735,
"grad_norm": 0.3925078809261322,
"learning_rate": 3e-05,
"loss": 2.8408,
"step": 308
},
{
"epoch": 0.09534094415303919,
"grad_norm": 0.415669322013855,
"learning_rate": 3e-05,
"loss": 2.5195,
"step": 309
},
{
"epoch": 0.09564949089787103,
"grad_norm": 0.6630593538284302,
"learning_rate": 3e-05,
"loss": 3.2672,
"step": 310
},
{
"epoch": 0.09595803764270287,
"grad_norm": 0.4107421934604645,
"learning_rate": 3e-05,
"loss": 2.4262,
"step": 311
},
{
"epoch": 0.09626658438753471,
"grad_norm": 0.5593919157981873,
"learning_rate": 3e-05,
"loss": 2.5541,
"step": 312
},
{
"epoch": 0.09657513113236656,
"grad_norm": 0.42898955941200256,
"learning_rate": 3e-05,
"loss": 2.5179,
"step": 313
},
{
"epoch": 0.0968836778771984,
"grad_norm": 0.31650015711784363,
"learning_rate": 3e-05,
"loss": 2.3117,
"step": 314
},
{
"epoch": 0.09719222462203024,
"grad_norm": 0.6258980631828308,
"learning_rate": 3e-05,
"loss": 2.6524,
"step": 315
},
{
"epoch": 0.09750077136686208,
"grad_norm": 0.45299839973449707,
"learning_rate": 3e-05,
"loss": 2.689,
"step": 316
},
{
"epoch": 0.09780931811169392,
"grad_norm": 0.4562253952026367,
"learning_rate": 3e-05,
"loss": 2.5171,
"step": 317
},
{
"epoch": 0.09811786485652577,
"grad_norm": 0.5021069049835205,
"learning_rate": 3e-05,
"loss": 2.4173,
"step": 318
},
{
"epoch": 0.09842641160135761,
"grad_norm": 0.664849042892456,
"learning_rate": 3e-05,
"loss": 3.1166,
"step": 319
},
{
"epoch": 0.09873495834618945,
"grad_norm": 0.45309123396873474,
"learning_rate": 3e-05,
"loss": 2.7596,
"step": 320
},
{
"epoch": 0.09904350509102129,
"grad_norm": 0.7038612365722656,
"learning_rate": 3e-05,
"loss": 2.7522,
"step": 321
},
{
"epoch": 0.09935205183585313,
"grad_norm": 1.059171199798584,
"learning_rate": 3e-05,
"loss": 3.175,
"step": 322
},
{
"epoch": 0.09966059858068498,
"grad_norm": 0.5896137356758118,
"learning_rate": 3e-05,
"loss": 2.9216,
"step": 323
},
{
"epoch": 0.09996914532551682,
"grad_norm": 0.637228786945343,
"learning_rate": 3e-05,
"loss": 2.8087,
"step": 324
},
{
"epoch": 0.10027769207034866,
"grad_norm": 0.8929901123046875,
"learning_rate": 3e-05,
"loss": 2.948,
"step": 325
},
{
"epoch": 0.1005862388151805,
"grad_norm": 0.48769569396972656,
"learning_rate": 3e-05,
"loss": 2.7344,
"step": 326
},
{
"epoch": 0.10089478556001234,
"grad_norm": 0.507175862789154,
"learning_rate": 3e-05,
"loss": 2.5428,
"step": 327
},
{
"epoch": 0.10120333230484418,
"grad_norm": 1.1506534814834595,
"learning_rate": 3e-05,
"loss": 2.9784,
"step": 328
},
{
"epoch": 0.10151187904967603,
"grad_norm": 0.8921215534210205,
"learning_rate": 3e-05,
"loss": 3.1315,
"step": 329
},
{
"epoch": 0.10182042579450787,
"grad_norm": 0.9674636125564575,
"learning_rate": 3e-05,
"loss": 2.9945,
"step": 330
},
{
"epoch": 0.10212897253933971,
"grad_norm": 0.7912929058074951,
"learning_rate": 3e-05,
"loss": 2.7517,
"step": 331
},
{
"epoch": 0.10243751928417155,
"grad_norm": 0.6095521450042725,
"learning_rate": 3e-05,
"loss": 3.4603,
"step": 332
},
{
"epoch": 0.1027460660290034,
"grad_norm": 0.3913911283016205,
"learning_rate": 3e-05,
"loss": 2.3178,
"step": 333
},
{
"epoch": 0.10305461277383524,
"grad_norm": 0.5703157782554626,
"learning_rate": 3e-05,
"loss": 2.7245,
"step": 334
},
{
"epoch": 0.10336315951866708,
"grad_norm": 0.4117796719074249,
"learning_rate": 3e-05,
"loss": 2.2675,
"step": 335
},
{
"epoch": 0.10367170626349892,
"grad_norm": 0.4819619357585907,
"learning_rate": 3e-05,
"loss": 2.5655,
"step": 336
},
{
"epoch": 0.10398025300833076,
"grad_norm": 0.579757809638977,
"learning_rate": 3e-05,
"loss": 2.8346,
"step": 337
},
{
"epoch": 0.1042887997531626,
"grad_norm": 0.5951722860336304,
"learning_rate": 3e-05,
"loss": 3.3011,
"step": 338
},
{
"epoch": 0.10459734649799445,
"grad_norm": 0.37846288084983826,
"learning_rate": 3e-05,
"loss": 2.4172,
"step": 339
},
{
"epoch": 0.10490589324282629,
"grad_norm": 0.5596647262573242,
"learning_rate": 3e-05,
"loss": 2.8481,
"step": 340
},
{
"epoch": 0.10521443998765813,
"grad_norm": 0.4432225227355957,
"learning_rate": 3e-05,
"loss": 2.9247,
"step": 341
},
{
"epoch": 0.10552298673248997,
"grad_norm": 0.5610328316688538,
"learning_rate": 3e-05,
"loss": 3.0414,
"step": 342
},
{
"epoch": 0.10583153347732181,
"grad_norm": 0.6313160061836243,
"learning_rate": 3e-05,
"loss": 2.7949,
"step": 343
},
{
"epoch": 0.10614008022215365,
"grad_norm": 0.5100327730178833,
"learning_rate": 3e-05,
"loss": 2.9491,
"step": 344
},
{
"epoch": 0.1064486269669855,
"grad_norm": 0.4908035099506378,
"learning_rate": 3e-05,
"loss": 2.727,
"step": 345
},
{
"epoch": 0.10675717371181734,
"grad_norm": 0.5060238242149353,
"learning_rate": 3e-05,
"loss": 2.4032,
"step": 346
},
{
"epoch": 0.10706572045664918,
"grad_norm": 0.5218526124954224,
"learning_rate": 3e-05,
"loss": 2.8393,
"step": 347
},
{
"epoch": 0.10737426720148102,
"grad_norm": 0.45432546734809875,
"learning_rate": 3e-05,
"loss": 2.3492,
"step": 348
},
{
"epoch": 0.10768281394631286,
"grad_norm": 0.39769670367240906,
"learning_rate": 3e-05,
"loss": 2.4955,
"step": 349
},
{
"epoch": 0.1079913606911447,
"grad_norm": 0.4049716591835022,
"learning_rate": 3e-05,
"loss": 2.5978,
"step": 350
},
{
"epoch": 0.10829990743597655,
"grad_norm": 0.43709230422973633,
"learning_rate": 3e-05,
"loss": 2.4538,
"step": 351
},
{
"epoch": 0.10860845418080839,
"grad_norm": 0.6582475900650024,
"learning_rate": 3e-05,
"loss": 2.7746,
"step": 352
},
{
"epoch": 0.10891700092564023,
"grad_norm": 0.5041898488998413,
"learning_rate": 3e-05,
"loss": 2.8402,
"step": 353
},
{
"epoch": 0.10922554767047207,
"grad_norm": 0.4751797318458557,
"learning_rate": 3e-05,
"loss": 2.5371,
"step": 354
},
{
"epoch": 0.10953409441530391,
"grad_norm": 0.8427959084510803,
"learning_rate": 3e-05,
"loss": 2.6777,
"step": 355
},
{
"epoch": 0.10984264116013576,
"grad_norm": 0.41446399688720703,
"learning_rate": 3e-05,
"loss": 2.5396,
"step": 356
},
{
"epoch": 0.1101511879049676,
"grad_norm": 0.6712360978126526,
"learning_rate": 3e-05,
"loss": 3.3444,
"step": 357
},
{
"epoch": 0.11045973464979944,
"grad_norm": 0.6713085174560547,
"learning_rate": 3e-05,
"loss": 3.2281,
"step": 358
},
{
"epoch": 0.11076828139463128,
"grad_norm": 0.9303755164146423,
"learning_rate": 3e-05,
"loss": 3.2125,
"step": 359
},
{
"epoch": 0.11107682813946312,
"grad_norm": 0.4587060511112213,
"learning_rate": 3e-05,
"loss": 2.786,
"step": 360
},
{
"epoch": 0.11138537488429497,
"grad_norm": 0.7472328543663025,
"learning_rate": 3e-05,
"loss": 2.8521,
"step": 361
},
{
"epoch": 0.11169392162912681,
"grad_norm": 0.502673864364624,
"learning_rate": 3e-05,
"loss": 2.7606,
"step": 362
},
{
"epoch": 0.11200246837395865,
"grad_norm": 0.631190836429596,
"learning_rate": 3e-05,
"loss": 2.9696,
"step": 363
},
{
"epoch": 0.11231101511879049,
"grad_norm": 0.6717808842658997,
"learning_rate": 3e-05,
"loss": 3.1333,
"step": 364
},
{
"epoch": 0.11261956186362233,
"grad_norm": 0.8173141479492188,
"learning_rate": 3e-05,
"loss": 3.5001,
"step": 365
},
{
"epoch": 0.11292810860845418,
"grad_norm": 0.5130720138549805,
"learning_rate": 3e-05,
"loss": 3.0127,
"step": 366
},
{
"epoch": 0.11323665535328602,
"grad_norm": 0.5507920980453491,
"learning_rate": 3e-05,
"loss": 2.6635,
"step": 367
},
{
"epoch": 0.11354520209811786,
"grad_norm": 0.5814347267150879,
"learning_rate": 3e-05,
"loss": 2.5873,
"step": 368
},
{
"epoch": 0.1138537488429497,
"grad_norm": 0.4552783668041229,
"learning_rate": 3e-05,
"loss": 2.2747,
"step": 369
},
{
"epoch": 0.11416229558778154,
"grad_norm": 0.48348626494407654,
"learning_rate": 3e-05,
"loss": 2.5721,
"step": 370
},
{
"epoch": 0.11447084233261338,
"grad_norm": 0.7298465967178345,
"learning_rate": 3e-05,
"loss": 3.0346,
"step": 371
},
{
"epoch": 0.11477938907744523,
"grad_norm": 0.4409029483795166,
"learning_rate": 3e-05,
"loss": 2.7849,
"step": 372
},
{
"epoch": 0.11508793582227707,
"grad_norm": 0.47219765186309814,
"learning_rate": 3e-05,
"loss": 2.7278,
"step": 373
},
{
"epoch": 0.11539648256710892,
"grad_norm": 0.6541340947151184,
"learning_rate": 3e-05,
"loss": 2.9001,
"step": 374
},
{
"epoch": 0.11570502931194077,
"grad_norm": 0.696343183517456,
"learning_rate": 3e-05,
"loss": 2.6636,
"step": 375
},
{
"epoch": 0.11601357605677261,
"grad_norm": 0.8152115941047668,
"learning_rate": 3e-05,
"loss": 2.7692,
"step": 376
},
{
"epoch": 0.11632212280160445,
"grad_norm": 0.5533241033554077,
"learning_rate": 3e-05,
"loss": 2.6548,
"step": 377
},
{
"epoch": 0.11663066954643629,
"grad_norm": 0.7412006855010986,
"learning_rate": 3e-05,
"loss": 2.847,
"step": 378
},
{
"epoch": 0.11693921629126813,
"grad_norm": 0.3919863700866699,
"learning_rate": 3e-05,
"loss": 2.4736,
"step": 379
},
{
"epoch": 0.11724776303609998,
"grad_norm": 1.019760251045227,
"learning_rate": 3e-05,
"loss": 3.2336,
"step": 380
},
{
"epoch": 0.11755630978093182,
"grad_norm": 0.6420919895172119,
"learning_rate": 3e-05,
"loss": 2.887,
"step": 381
},
{
"epoch": 0.11786485652576366,
"grad_norm": 0.3596365749835968,
"learning_rate": 3e-05,
"loss": 2.2894,
"step": 382
},
{
"epoch": 0.1181734032705955,
"grad_norm": 0.5054413676261902,
"learning_rate": 3e-05,
"loss": 2.1737,
"step": 383
},
{
"epoch": 0.11848195001542734,
"grad_norm": 0.6912227869033813,
"learning_rate": 3e-05,
"loss": 2.8684,
"step": 384
},
{
"epoch": 0.11879049676025918,
"grad_norm": 0.5603222846984863,
"learning_rate": 3e-05,
"loss": 2.9749,
"step": 385
},
{
"epoch": 0.11909904350509103,
"grad_norm": 0.4751299321651459,
"learning_rate": 3e-05,
"loss": 2.743,
"step": 386
},
{
"epoch": 0.11940759024992287,
"grad_norm": 0.9485656023025513,
"learning_rate": 3e-05,
"loss": 3.1394,
"step": 387
},
{
"epoch": 0.11971613699475471,
"grad_norm": 0.528243362903595,
"learning_rate": 3e-05,
"loss": 3.0556,
"step": 388
},
{
"epoch": 0.12002468373958655,
"grad_norm": 0.48346731066703796,
"learning_rate": 3e-05,
"loss": 2.92,
"step": 389
},
{
"epoch": 0.1203332304844184,
"grad_norm": 0.8272796869277954,
"learning_rate": 3e-05,
"loss": 2.78,
"step": 390
},
{
"epoch": 0.12064177722925024,
"grad_norm": 0.5784688591957092,
"learning_rate": 3e-05,
"loss": 2.7566,
"step": 391
},
{
"epoch": 0.12095032397408208,
"grad_norm": 0.5071232914924622,
"learning_rate": 3e-05,
"loss": 2.7876,
"step": 392
},
{
"epoch": 0.12125887071891392,
"grad_norm": 0.4624871015548706,
"learning_rate": 3e-05,
"loss": 2.6045,
"step": 393
},
{
"epoch": 0.12156741746374576,
"grad_norm": 0.7949879169464111,
"learning_rate": 3e-05,
"loss": 3.0567,
"step": 394
},
{
"epoch": 0.1218759642085776,
"grad_norm": 0.4112931191921234,
"learning_rate": 3e-05,
"loss": 2.5882,
"step": 395
},
{
"epoch": 0.12218451095340944,
"grad_norm": 0.508385956287384,
"learning_rate": 3e-05,
"loss": 2.8354,
"step": 396
},
{
"epoch": 0.12249305769824129,
"grad_norm": 0.571725606918335,
"learning_rate": 3e-05,
"loss": 3.1637,
"step": 397
},
{
"epoch": 0.12280160444307313,
"grad_norm": 0.4010452628135681,
"learning_rate": 3e-05,
"loss": 2.2202,
"step": 398
},
{
"epoch": 0.12311015118790497,
"grad_norm": 0.6162316203117371,
"learning_rate": 3e-05,
"loss": 3.0857,
"step": 399
},
{
"epoch": 0.12341869793273681,
"grad_norm": 0.5714832544326782,
"learning_rate": 3e-05,
"loss": 2.6672,
"step": 400
},
{
"epoch": 0.12372724467756865,
"grad_norm": 0.7332398295402527,
"learning_rate": 3e-05,
"loss": 2.6827,
"step": 401
},
{
"epoch": 0.1240357914224005,
"grad_norm": 0.8438281416893005,
"learning_rate": 3e-05,
"loss": 3.2429,
"step": 402
},
{
"epoch": 0.12434433816723234,
"grad_norm": 0.5126581192016602,
"learning_rate": 3e-05,
"loss": 2.7263,
"step": 403
},
{
"epoch": 0.12465288491206418,
"grad_norm": 1.201810598373413,
"learning_rate": 3e-05,
"loss": 2.9671,
"step": 404
},
{
"epoch": 0.12496143165689602,
"grad_norm": 0.5391373634338379,
"learning_rate": 3e-05,
"loss": 2.596,
"step": 405
},
{
"epoch": 0.12526997840172785,
"grad_norm": 0.49124547839164734,
"learning_rate": 3e-05,
"loss": 2.772,
"step": 406
},
{
"epoch": 0.1255785251465597,
"grad_norm": 0.6671253442764282,
"learning_rate": 3e-05,
"loss": 2.4786,
"step": 407
},
{
"epoch": 0.12588707189139153,
"grad_norm": 0.671453058719635,
"learning_rate": 3e-05,
"loss": 2.7801,
"step": 408
},
{
"epoch": 0.1261956186362234,
"grad_norm": 0.5589339137077332,
"learning_rate": 3e-05,
"loss": 2.4786,
"step": 409
},
{
"epoch": 0.12650416538105522,
"grad_norm": 0.48147571086883545,
"learning_rate": 3e-05,
"loss": 2.5111,
"step": 410
},
{
"epoch": 0.12681271212588707,
"grad_norm": 0.7984548807144165,
"learning_rate": 3e-05,
"loss": 3.3761,
"step": 411
},
{
"epoch": 0.1271212588707189,
"grad_norm": 0.4393971264362335,
"learning_rate": 3e-05,
"loss": 2.6791,
"step": 412
},
{
"epoch": 0.12742980561555076,
"grad_norm": 0.3642028868198395,
"learning_rate": 3e-05,
"loss": 2.3538,
"step": 413
},
{
"epoch": 0.12773835236038258,
"grad_norm": 0.4370051324367523,
"learning_rate": 3e-05,
"loss": 2.6664,
"step": 414
},
{
"epoch": 0.12804689910521444,
"grad_norm": 0.4502509534358978,
"learning_rate": 3e-05,
"loss": 2.4552,
"step": 415
},
{
"epoch": 0.1283554458500463,
"grad_norm": 0.8213755488395691,
"learning_rate": 3e-05,
"loss": 3.2743,
"step": 416
},
{
"epoch": 0.12866399259487812,
"grad_norm": 0.3885883390903473,
"learning_rate": 3e-05,
"loss": 2.625,
"step": 417
},
{
"epoch": 0.12897253933970998,
"grad_norm": 0.672049343585968,
"learning_rate": 3e-05,
"loss": 2.5793,
"step": 418
},
{
"epoch": 0.1292810860845418,
"grad_norm": 0.8375455141067505,
"learning_rate": 3e-05,
"loss": 3.2976,
"step": 419
},
{
"epoch": 0.12958963282937366,
"grad_norm": 0.29910895228385925,
"learning_rate": 3e-05,
"loss": 2.1132,
"step": 420
},
{
"epoch": 0.1298981795742055,
"grad_norm": 0.5657820701599121,
"learning_rate": 3e-05,
"loss": 3.0597,
"step": 421
},
{
"epoch": 0.13020672631903735,
"grad_norm": 0.5639760494232178,
"learning_rate": 3e-05,
"loss": 2.8884,
"step": 422
},
{
"epoch": 0.13051527306386917,
"grad_norm": 0.567143976688385,
"learning_rate": 3e-05,
"loss": 3.3492,
"step": 423
},
{
"epoch": 0.13082381980870103,
"grad_norm": 0.42509710788726807,
"learning_rate": 3e-05,
"loss": 2.754,
"step": 424
},
{
"epoch": 0.13113236655353286,
"grad_norm": 0.5575029850006104,
"learning_rate": 3e-05,
"loss": 2.7359,
"step": 425
},
{
"epoch": 0.13144091329836471,
"grad_norm": 0.577087938785553,
"learning_rate": 3e-05,
"loss": 2.4267,
"step": 426
},
{
"epoch": 0.13174946004319654,
"grad_norm": 0.6447242498397827,
"learning_rate": 3e-05,
"loss": 2.7924,
"step": 427
},
{
"epoch": 0.1320580067880284,
"grad_norm": 0.5749005675315857,
"learning_rate": 3e-05,
"loss": 2.9875,
"step": 428
},
{
"epoch": 0.13236655353286023,
"grad_norm": 0.5711660385131836,
"learning_rate": 3e-05,
"loss": 2.7619,
"step": 429
},
{
"epoch": 0.13267510027769208,
"grad_norm": 0.9919552803039551,
"learning_rate": 3e-05,
"loss": 3.5351,
"step": 430
},
{
"epoch": 0.1329836470225239,
"grad_norm": 0.5573298335075378,
"learning_rate": 3e-05,
"loss": 2.6573,
"step": 431
},
{
"epoch": 0.13329219376735577,
"grad_norm": 0.6087166666984558,
"learning_rate": 3e-05,
"loss": 2.871,
"step": 432
},
{
"epoch": 0.1336007405121876,
"grad_norm": 0.9867172241210938,
"learning_rate": 3e-05,
"loss": 2.6612,
"step": 433
},
{
"epoch": 0.13390928725701945,
"grad_norm": 0.4709426760673523,
"learning_rate": 3e-05,
"loss": 2.494,
"step": 434
},
{
"epoch": 0.13421783400185128,
"grad_norm": 0.8016806244850159,
"learning_rate": 3e-05,
"loss": 3.5046,
"step": 435
},
{
"epoch": 0.13452638074668313,
"grad_norm": 0.8725690245628357,
"learning_rate": 3e-05,
"loss": 3.0465,
"step": 436
},
{
"epoch": 0.13483492749151496,
"grad_norm": 0.4316865801811218,
"learning_rate": 3e-05,
"loss": 2.5318,
"step": 437
},
{
"epoch": 0.13514347423634682,
"grad_norm": 0.5138392448425293,
"learning_rate": 3e-05,
"loss": 2.2298,
"step": 438
},
{
"epoch": 0.13545202098117864,
"grad_norm": 0.5295316576957703,
"learning_rate": 3e-05,
"loss": 2.3288,
"step": 439
},
{
"epoch": 0.1357605677260105,
"grad_norm": 0.3984488248825073,
"learning_rate": 3e-05,
"loss": 2.3586,
"step": 440
},
{
"epoch": 0.13606911447084233,
"grad_norm": 0.8097387552261353,
"learning_rate": 3e-05,
"loss": 3.2978,
"step": 441
},
{
"epoch": 0.13637766121567418,
"grad_norm": 0.6348075270652771,
"learning_rate": 3e-05,
"loss": 2.7686,
"step": 442
},
{
"epoch": 0.136686207960506,
"grad_norm": 0.6103842854499817,
"learning_rate": 3e-05,
"loss": 2.8613,
"step": 443
},
{
"epoch": 0.13699475470533787,
"grad_norm": 0.5461227297782898,
"learning_rate": 3e-05,
"loss": 2.8534,
"step": 444
},
{
"epoch": 0.1373033014501697,
"grad_norm": 0.6284303069114685,
"learning_rate": 3e-05,
"loss": 2.6065,
"step": 445
},
{
"epoch": 0.13761184819500155,
"grad_norm": 0.3893953561782837,
"learning_rate": 3e-05,
"loss": 2.599,
"step": 446
},
{
"epoch": 0.13792039493983338,
"grad_norm": 0.34796831011772156,
"learning_rate": 3e-05,
"loss": 2.2519,
"step": 447
},
{
"epoch": 0.13822894168466524,
"grad_norm": 0.4476280212402344,
"learning_rate": 3e-05,
"loss": 2.4558,
"step": 448
},
{
"epoch": 0.13853748842949706,
"grad_norm": 0.5699636936187744,
"learning_rate": 3e-05,
"loss": 2.6878,
"step": 449
},
{
"epoch": 0.13884603517432892,
"grad_norm": 0.2940067648887634,
"learning_rate": 3e-05,
"loss": 2.281,
"step": 450
},
{
"epoch": 0.13915458191916075,
"grad_norm": 0.9075848460197449,
"learning_rate": 3e-05,
"loss": 3.2938,
"step": 451
},
{
"epoch": 0.1394631286639926,
"grad_norm": 0.4113154113292694,
"learning_rate": 3e-05,
"loss": 2.5557,
"step": 452
},
{
"epoch": 0.13977167540882443,
"grad_norm": 0.5481420755386353,
"learning_rate": 3e-05,
"loss": 2.5868,
"step": 453
},
{
"epoch": 0.1400802221536563,
"grad_norm": 0.4203355312347412,
"learning_rate": 3e-05,
"loss": 2.5495,
"step": 454
},
{
"epoch": 0.14038876889848811,
"grad_norm": 0.40559303760528564,
"learning_rate": 3e-05,
"loss": 2.5116,
"step": 455
},
{
"epoch": 0.14069731564331997,
"grad_norm": 0.5221837162971497,
"learning_rate": 3e-05,
"loss": 2.8499,
"step": 456
},
{
"epoch": 0.1410058623881518,
"grad_norm": 0.48368218541145325,
"learning_rate": 3e-05,
"loss": 2.6947,
"step": 457
},
{
"epoch": 0.14131440913298365,
"grad_norm": 0.4436626732349396,
"learning_rate": 3e-05,
"loss": 2.5644,
"step": 458
},
{
"epoch": 0.14162295587781548,
"grad_norm": 0.5758005380630493,
"learning_rate": 3e-05,
"loss": 2.5727,
"step": 459
},
{
"epoch": 0.14193150262264734,
"grad_norm": 0.2992756962776184,
"learning_rate": 3e-05,
"loss": 2.1367,
"step": 460
},
{
"epoch": 0.14224004936747917,
"grad_norm": 0.4820541739463806,
"learning_rate": 3e-05,
"loss": 2.8957,
"step": 461
},
{
"epoch": 0.14254859611231102,
"grad_norm": 0.6373788118362427,
"learning_rate": 3e-05,
"loss": 2.9102,
"step": 462
},
{
"epoch": 0.14285714285714285,
"grad_norm": 0.49387943744659424,
"learning_rate": 3e-05,
"loss": 2.9304,
"step": 463
},
{
"epoch": 0.1431656896019747,
"grad_norm": 0.3972255289554596,
"learning_rate": 3e-05,
"loss": 2.5099,
"step": 464
},
{
"epoch": 0.14347423634680653,
"grad_norm": 0.49275124073028564,
"learning_rate": 3e-05,
"loss": 2.6418,
"step": 465
},
{
"epoch": 0.1437827830916384,
"grad_norm": 0.39669597148895264,
"learning_rate": 3e-05,
"loss": 2.3448,
"step": 466
},
{
"epoch": 0.14409132983647022,
"grad_norm": 0.6432366967201233,
"learning_rate": 3e-05,
"loss": 2.9027,
"step": 467
},
{
"epoch": 0.14439987658130207,
"grad_norm": 0.43117955327033997,
"learning_rate": 3e-05,
"loss": 2.7841,
"step": 468
},
{
"epoch": 0.1447084233261339,
"grad_norm": 0.4256863296031952,
"learning_rate": 3e-05,
"loss": 2.5825,
"step": 469
},
{
"epoch": 0.14501697007096576,
"grad_norm": 0.43717724084854126,
"learning_rate": 3e-05,
"loss": 2.6433,
"step": 470
},
{
"epoch": 0.14532551681579758,
"grad_norm": 0.759493887424469,
"learning_rate": 3e-05,
"loss": 2.7323,
"step": 471
},
{
"epoch": 0.14563406356062944,
"grad_norm": 0.33263617753982544,
"learning_rate": 3e-05,
"loss": 2.417,
"step": 472
},
{
"epoch": 0.14594261030546127,
"grad_norm": 0.34977760910987854,
"learning_rate": 3e-05,
"loss": 2.3743,
"step": 473
},
{
"epoch": 0.14625115705029312,
"grad_norm": 0.5000836253166199,
"learning_rate": 3e-05,
"loss": 2.5705,
"step": 474
},
{
"epoch": 0.14655970379512495,
"grad_norm": 1.2762246131896973,
"learning_rate": 3e-05,
"loss": 3.133,
"step": 475
},
{
"epoch": 0.1468682505399568,
"grad_norm": 0.6496342420578003,
"learning_rate": 3e-05,
"loss": 2.9256,
"step": 476
},
{
"epoch": 0.14717679728478864,
"grad_norm": 0.716373085975647,
"learning_rate": 3e-05,
"loss": 2.5074,
"step": 477
},
{
"epoch": 0.1474853440296205,
"grad_norm": 0.7680580019950867,
"learning_rate": 3e-05,
"loss": 2.8563,
"step": 478
},
{
"epoch": 0.14779389077445232,
"grad_norm": 0.4760282337665558,
"learning_rate": 3e-05,
"loss": 2.4567,
"step": 479
},
{
"epoch": 0.14810243751928417,
"grad_norm": 0.7085632681846619,
"learning_rate": 3e-05,
"loss": 3.3149,
"step": 480
},
{
"epoch": 0.148410984264116,
"grad_norm": 0.48510226607322693,
"learning_rate": 3e-05,
"loss": 2.9825,
"step": 481
},
{
"epoch": 0.14871953100894786,
"grad_norm": 0.5194237232208252,
"learning_rate": 3e-05,
"loss": 2.8049,
"step": 482
},
{
"epoch": 0.1490280777537797,
"grad_norm": 0.6330525279045105,
"learning_rate": 3e-05,
"loss": 3.0564,
"step": 483
},
{
"epoch": 0.14933662449861154,
"grad_norm": 0.38547223806381226,
"learning_rate": 3e-05,
"loss": 2.7878,
"step": 484
},
{
"epoch": 0.14964517124344337,
"grad_norm": 0.5610853433609009,
"learning_rate": 3e-05,
"loss": 3.0895,
"step": 485
},
{
"epoch": 0.14995371798827523,
"grad_norm": 0.6889973878860474,
"learning_rate": 3e-05,
"loss": 3.396,
"step": 486
},
{
"epoch": 0.15026226473310705,
"grad_norm": 0.374765008687973,
"learning_rate": 3e-05,
"loss": 2.2703,
"step": 487
},
{
"epoch": 0.1505708114779389,
"grad_norm": 0.5405288338661194,
"learning_rate": 3e-05,
"loss": 2.8522,
"step": 488
},
{
"epoch": 0.15087935822277074,
"grad_norm": 0.4556944668292999,
"learning_rate": 3e-05,
"loss": 2.9866,
"step": 489
},
{
"epoch": 0.1511879049676026,
"grad_norm": 0.326506108045578,
"learning_rate": 3e-05,
"loss": 2.2067,
"step": 490
},
{
"epoch": 0.15149645171243442,
"grad_norm": 0.3182726204395294,
"learning_rate": 3e-05,
"loss": 2.134,
"step": 491
},
{
"epoch": 0.15180499845726628,
"grad_norm": 0.3820968568325043,
"learning_rate": 3e-05,
"loss": 2.5072,
"step": 492
},
{
"epoch": 0.1521135452020981,
"grad_norm": 0.43182340264320374,
"learning_rate": 3e-05,
"loss": 2.5903,
"step": 493
},
{
"epoch": 0.15242209194692996,
"grad_norm": 0.614032506942749,
"learning_rate": 3e-05,
"loss": 3.0159,
"step": 494
},
{
"epoch": 0.1527306386917618,
"grad_norm": 0.34464317560195923,
"learning_rate": 3e-05,
"loss": 2.5565,
"step": 495
},
{
"epoch": 0.15303918543659364,
"grad_norm": 0.7838470935821533,
"learning_rate": 3e-05,
"loss": 3.299,
"step": 496
},
{
"epoch": 0.15334773218142547,
"grad_norm": 0.7292729616165161,
"learning_rate": 3e-05,
"loss": 3.1353,
"step": 497
},
{
"epoch": 0.15365627892625733,
"grad_norm": 0.6598738431930542,
"learning_rate": 3e-05,
"loss": 3.0993,
"step": 498
},
{
"epoch": 0.15396482567108918,
"grad_norm": 0.3278651535511017,
"learning_rate": 3e-05,
"loss": 2.4751,
"step": 499
},
{
"epoch": 0.154273372415921,
"grad_norm": 0.3951056897640228,
"learning_rate": 3e-05,
"loss": 2.3225,
"step": 500
},
{
"epoch": 0.15458191916075287,
"grad_norm": 0.5355321764945984,
"learning_rate": 3e-05,
"loss": 2.6751,
"step": 501
},
{
"epoch": 0.1548904659055847,
"grad_norm": 0.4398941993713379,
"learning_rate": 3e-05,
"loss": 2.3192,
"step": 502
},
{
"epoch": 0.15519901265041655,
"grad_norm": 0.9720609784126282,
"learning_rate": 3e-05,
"loss": 3.1218,
"step": 503
},
{
"epoch": 0.15550755939524838,
"grad_norm": 0.5093293190002441,
"learning_rate": 3e-05,
"loss": 2.6208,
"step": 504
},
{
"epoch": 0.15581610614008023,
"grad_norm": 0.5958328247070312,
"learning_rate": 3e-05,
"loss": 2.9366,
"step": 505
},
{
"epoch": 0.15612465288491206,
"grad_norm": 1.0065058469772339,
"learning_rate": 3e-05,
"loss": 3.086,
"step": 506
},
{
"epoch": 0.15643319962974392,
"grad_norm": 0.9630736708641052,
"learning_rate": 3e-05,
"loss": 2.7257,
"step": 507
},
{
"epoch": 0.15674174637457575,
"grad_norm": 0.730649471282959,
"learning_rate": 3e-05,
"loss": 2.6617,
"step": 508
},
{
"epoch": 0.1570502931194076,
"grad_norm": 0.9365665316581726,
"learning_rate": 3e-05,
"loss": 3.1122,
"step": 509
},
{
"epoch": 0.15735883986423943,
"grad_norm": 0.6446232795715332,
"learning_rate": 3e-05,
"loss": 2.7233,
"step": 510
},
{
"epoch": 0.15766738660907129,
"grad_norm": 0.37989816069602966,
"learning_rate": 3e-05,
"loss": 2.6314,
"step": 511
},
{
"epoch": 0.15797593335390311,
"grad_norm": 0.7095012068748474,
"learning_rate": 3e-05,
"loss": 3.1006,
"step": 512
},
{
"epoch": 0.15828448009873497,
"grad_norm": 0.7077836990356445,
"learning_rate": 3e-05,
"loss": 2.7112,
"step": 513
},
{
"epoch": 0.1585930268435668,
"grad_norm": 0.6317113637924194,
"learning_rate": 3e-05,
"loss": 2.8544,
"step": 514
},
{
"epoch": 0.15890157358839865,
"grad_norm": 0.53568035364151,
"learning_rate": 3e-05,
"loss": 2.8805,
"step": 515
},
{
"epoch": 0.15921012033323048,
"grad_norm": 0.42564329504966736,
"learning_rate": 3e-05,
"loss": 2.4129,
"step": 516
},
{
"epoch": 0.15951866707806234,
"grad_norm": 0.8857656121253967,
"learning_rate": 3e-05,
"loss": 3.2094,
"step": 517
},
{
"epoch": 0.15982721382289417,
"grad_norm": 0.717786431312561,
"learning_rate": 3e-05,
"loss": 3.2728,
"step": 518
},
{
"epoch": 0.16013576056772602,
"grad_norm": 0.5533412098884583,
"learning_rate": 3e-05,
"loss": 3.1927,
"step": 519
},
{
"epoch": 0.16044430731255785,
"grad_norm": 0.5395249724388123,
"learning_rate": 3e-05,
"loss": 2.2287,
"step": 520
},
{
"epoch": 0.1607528540573897,
"grad_norm": 1.1015528440475464,
"learning_rate": 3e-05,
"loss": 3.1084,
"step": 521
},
{
"epoch": 0.16106140080222153,
"grad_norm": 0.4234045743942261,
"learning_rate": 3e-05,
"loss": 2.7718,
"step": 522
},
{
"epoch": 0.1613699475470534,
"grad_norm": 0.5807662606239319,
"learning_rate": 3e-05,
"loss": 2.5903,
"step": 523
},
{
"epoch": 0.16167849429188522,
"grad_norm": 0.6421242356300354,
"learning_rate": 3e-05,
"loss": 2.7322,
"step": 524
},
{
"epoch": 0.16198704103671707,
"grad_norm": 0.5576215386390686,
"learning_rate": 3e-05,
"loss": 2.4455,
"step": 525
},
{
"epoch": 0.1622955877815489,
"grad_norm": 0.3627455234527588,
"learning_rate": 3e-05,
"loss": 2.6403,
"step": 526
},
{
"epoch": 0.16260413452638076,
"grad_norm": 0.6139925122261047,
"learning_rate": 3e-05,
"loss": 2.7791,
"step": 527
},
{
"epoch": 0.16291268127121258,
"grad_norm": 0.475038081407547,
"learning_rate": 3e-05,
"loss": 2.3931,
"step": 528
},
{
"epoch": 0.16322122801604444,
"grad_norm": 0.5573770999908447,
"learning_rate": 3e-05,
"loss": 3.0576,
"step": 529
},
{
"epoch": 0.16352977476087627,
"grad_norm": 0.3340505361557007,
"learning_rate": 3e-05,
"loss": 2.427,
"step": 530
},
{
"epoch": 0.16383832150570812,
"grad_norm": 0.3796028792858124,
"learning_rate": 3e-05,
"loss": 2.623,
"step": 531
},
{
"epoch": 0.16414686825053995,
"grad_norm": 0.499763160943985,
"learning_rate": 3e-05,
"loss": 2.6086,
"step": 532
},
{
"epoch": 0.1644554149953718,
"grad_norm": 0.5351141691207886,
"learning_rate": 3e-05,
"loss": 2.773,
"step": 533
},
{
"epoch": 0.16476396174020363,
"grad_norm": 0.5103338956832886,
"learning_rate": 3e-05,
"loss": 2.8165,
"step": 534
},
{
"epoch": 0.1650725084850355,
"grad_norm": 0.49195244908332825,
"learning_rate": 3e-05,
"loss": 2.8805,
"step": 535
},
{
"epoch": 0.16538105522986732,
"grad_norm": 0.6390337347984314,
"learning_rate": 3e-05,
"loss": 2.556,
"step": 536
},
{
"epoch": 0.16568960197469917,
"grad_norm": 0.6301625370979309,
"learning_rate": 3e-05,
"loss": 2.8134,
"step": 537
},
{
"epoch": 0.165998148719531,
"grad_norm": 0.30376285314559937,
"learning_rate": 3e-05,
"loss": 2.1669,
"step": 538
},
{
"epoch": 0.16630669546436286,
"grad_norm": 0.4260924756526947,
"learning_rate": 3e-05,
"loss": 2.578,
"step": 539
},
{
"epoch": 0.16661524220919469,
"grad_norm": 0.9274812936782837,
"learning_rate": 3e-05,
"loss": 3.1846,
"step": 540
},
{
"epoch": 0.16692378895402654,
"grad_norm": 0.31101086735725403,
"learning_rate": 3e-05,
"loss": 2.3578,
"step": 541
},
{
"epoch": 0.16723233569885837,
"grad_norm": 0.4664568603038788,
"learning_rate": 3e-05,
"loss": 2.6042,
"step": 542
},
{
"epoch": 0.16754088244369023,
"grad_norm": 0.41750529408454895,
"learning_rate": 3e-05,
"loss": 2.707,
"step": 543
},
{
"epoch": 0.16784942918852205,
"grad_norm": 0.745094895362854,
"learning_rate": 3e-05,
"loss": 2.5031,
"step": 544
},
{
"epoch": 0.1681579759333539,
"grad_norm": 0.53835129737854,
"learning_rate": 3e-05,
"loss": 2.6455,
"step": 545
},
{
"epoch": 0.16846652267818574,
"grad_norm": 0.47485771775245667,
"learning_rate": 3e-05,
"loss": 2.8771,
"step": 546
},
{
"epoch": 0.1687750694230176,
"grad_norm": 0.427839457988739,
"learning_rate": 3e-05,
"loss": 2.4684,
"step": 547
},
{
"epoch": 0.16908361616784942,
"grad_norm": 0.459409236907959,
"learning_rate": 3e-05,
"loss": 2.834,
"step": 548
},
{
"epoch": 0.16939216291268128,
"grad_norm": 0.594539225101471,
"learning_rate": 3e-05,
"loss": 2.8524,
"step": 549
},
{
"epoch": 0.1697007096575131,
"grad_norm": 0.3290991485118866,
"learning_rate": 3e-05,
"loss": 2.4509,
"step": 550
},
{
"epoch": 0.17000925640234496,
"grad_norm": 0.6383848786354065,
"learning_rate": 3e-05,
"loss": 3.06,
"step": 551
},
{
"epoch": 0.1703178031471768,
"grad_norm": 0.4995775818824768,
"learning_rate": 3e-05,
"loss": 2.6543,
"step": 552
},
{
"epoch": 0.17062634989200864,
"grad_norm": 0.2920311391353607,
"learning_rate": 3e-05,
"loss": 2.3851,
"step": 553
},
{
"epoch": 0.17093489663684047,
"grad_norm": 0.37657687067985535,
"learning_rate": 3e-05,
"loss": 2.5885,
"step": 554
},
{
"epoch": 0.17124344338167233,
"grad_norm": 0.3489648997783661,
"learning_rate": 3e-05,
"loss": 2.3189,
"step": 555
},
{
"epoch": 0.17155199012650416,
"grad_norm": 0.5701825618743896,
"learning_rate": 3e-05,
"loss": 2.7273,
"step": 556
},
{
"epoch": 0.171860536871336,
"grad_norm": 0.4302188456058502,
"learning_rate": 3e-05,
"loss": 2.7104,
"step": 557
},
{
"epoch": 0.17216908361616784,
"grad_norm": 0.39768311381340027,
"learning_rate": 3e-05,
"loss": 2.7275,
"step": 558
},
{
"epoch": 0.1724776303609997,
"grad_norm": 0.3526521623134613,
"learning_rate": 3e-05,
"loss": 2.4574,
"step": 559
},
{
"epoch": 0.17278617710583152,
"grad_norm": 0.4580765962600708,
"learning_rate": 3e-05,
"loss": 2.6142,
"step": 560
},
{
"epoch": 0.17309472385066338,
"grad_norm": 0.6257379055023193,
"learning_rate": 3e-05,
"loss": 2.6212,
"step": 561
},
{
"epoch": 0.1734032705954952,
"grad_norm": 0.5225767493247986,
"learning_rate": 3e-05,
"loss": 3.0037,
"step": 562
},
{
"epoch": 0.17371181734032706,
"grad_norm": 0.3681781589984894,
"learning_rate": 3e-05,
"loss": 2.4477,
"step": 563
},
{
"epoch": 0.1740203640851589,
"grad_norm": 0.3147395849227905,
"learning_rate": 3e-05,
"loss": 2.291,
"step": 564
},
{
"epoch": 0.17432891082999075,
"grad_norm": 0.5599997639656067,
"learning_rate": 3e-05,
"loss": 2.761,
"step": 565
},
{
"epoch": 0.17463745757482257,
"grad_norm": 0.895828127861023,
"learning_rate": 3e-05,
"loss": 3.2657,
"step": 566
},
{
"epoch": 0.17494600431965443,
"grad_norm": 0.48360684514045715,
"learning_rate": 3e-05,
"loss": 3.0389,
"step": 567
},
{
"epoch": 0.17525455106448626,
"grad_norm": 0.4734765887260437,
"learning_rate": 3e-05,
"loss": 2.7359,
"step": 568
},
{
"epoch": 0.1755630978093181,
"grad_norm": 0.5472472310066223,
"learning_rate": 3e-05,
"loss": 2.5452,
"step": 569
},
{
"epoch": 0.17587164455414994,
"grad_norm": 0.5069577693939209,
"learning_rate": 3e-05,
"loss": 3.0763,
"step": 570
},
{
"epoch": 0.1761801912989818,
"grad_norm": 0.6229672431945801,
"learning_rate": 3e-05,
"loss": 3.2135,
"step": 571
},
{
"epoch": 0.17648873804381363,
"grad_norm": 0.5861048102378845,
"learning_rate": 3e-05,
"loss": 3.4906,
"step": 572
},
{
"epoch": 0.17679728478864548,
"grad_norm": 0.4805934429168701,
"learning_rate": 3e-05,
"loss": 2.8969,
"step": 573
},
{
"epoch": 0.1771058315334773,
"grad_norm": 0.6094735860824585,
"learning_rate": 3e-05,
"loss": 3.0226,
"step": 574
},
{
"epoch": 0.17741437827830916,
"grad_norm": 0.44033339619636536,
"learning_rate": 3e-05,
"loss": 2.4886,
"step": 575
},
{
"epoch": 0.177722925023141,
"grad_norm": 1.1240124702453613,
"learning_rate": 3e-05,
"loss": 3.4272,
"step": 576
},
{
"epoch": 0.17803147176797285,
"grad_norm": 0.4928964078426361,
"learning_rate": 3e-05,
"loss": 2.3991,
"step": 577
},
{
"epoch": 0.17834001851280468,
"grad_norm": 0.45760032534599304,
"learning_rate": 3e-05,
"loss": 2.5567,
"step": 578
},
{
"epoch": 0.17864856525763653,
"grad_norm": 0.46122652292251587,
"learning_rate": 3e-05,
"loss": 2.4686,
"step": 579
},
{
"epoch": 0.17895711200246836,
"grad_norm": 0.4241897761821747,
"learning_rate": 3e-05,
"loss": 2.3896,
"step": 580
},
{
"epoch": 0.17926565874730022,
"grad_norm": 0.7329486012458801,
"learning_rate": 3e-05,
"loss": 2.8377,
"step": 581
},
{
"epoch": 0.17957420549213207,
"grad_norm": 0.5408822894096375,
"learning_rate": 3e-05,
"loss": 2.895,
"step": 582
},
{
"epoch": 0.1798827522369639,
"grad_norm": 0.4442683756351471,
"learning_rate": 3e-05,
"loss": 2.4398,
"step": 583
},
{
"epoch": 0.18019129898179576,
"grad_norm": 0.7230685949325562,
"learning_rate": 3e-05,
"loss": 3.0208,
"step": 584
},
{
"epoch": 0.18049984572662758,
"grad_norm": 0.45991250872612,
"learning_rate": 3e-05,
"loss": 2.8429,
"step": 585
},
{
"epoch": 0.18080839247145944,
"grad_norm": 0.5567431449890137,
"learning_rate": 3e-05,
"loss": 3.0907,
"step": 586
},
{
"epoch": 0.18111693921629127,
"grad_norm": 0.39667779207229614,
"learning_rate": 3e-05,
"loss": 2.5474,
"step": 587
},
{
"epoch": 0.18142548596112312,
"grad_norm": 0.4259324371814728,
"learning_rate": 3e-05,
"loss": 2.937,
"step": 588
},
{
"epoch": 0.18173403270595495,
"grad_norm": 0.5309762358665466,
"learning_rate": 3e-05,
"loss": 2.904,
"step": 589
},
{
"epoch": 0.1820425794507868,
"grad_norm": 0.4646052420139313,
"learning_rate": 3e-05,
"loss": 2.7825,
"step": 590
},
{
"epoch": 0.18235112619561863,
"grad_norm": 0.5502670407295227,
"learning_rate": 3e-05,
"loss": 3.3798,
"step": 591
},
{
"epoch": 0.1826596729404505,
"grad_norm": 0.4678283929824829,
"learning_rate": 3e-05,
"loss": 2.7709,
"step": 592
},
{
"epoch": 0.18296821968528232,
"grad_norm": 0.44272318482398987,
"learning_rate": 3e-05,
"loss": 2.7197,
"step": 593
},
{
"epoch": 0.18327676643011417,
"grad_norm": 0.34605056047439575,
"learning_rate": 3e-05,
"loss": 2.6256,
"step": 594
},
{
"epoch": 0.183585313174946,
"grad_norm": 0.32907217741012573,
"learning_rate": 3e-05,
"loss": 2.183,
"step": 595
},
{
"epoch": 0.18389385991977786,
"grad_norm": 0.5004732608795166,
"learning_rate": 3e-05,
"loss": 2.4808,
"step": 596
},
{
"epoch": 0.18420240666460969,
"grad_norm": 0.4207105338573456,
"learning_rate": 3e-05,
"loss": 2.7112,
"step": 597
},
{
"epoch": 0.18451095340944154,
"grad_norm": 0.4427211284637451,
"learning_rate": 3e-05,
"loss": 2.7198,
"step": 598
},
{
"epoch": 0.18481950015427337,
"grad_norm": 0.3510686159133911,
"learning_rate": 3e-05,
"loss": 2.6319,
"step": 599
},
{
"epoch": 0.18512804689910523,
"grad_norm": 0.3347536623477936,
"learning_rate": 3e-05,
"loss": 2.479,
"step": 600
},
{
"epoch": 0.18543659364393705,
"grad_norm": 0.4486810266971588,
"learning_rate": 3e-05,
"loss": 2.7633,
"step": 601
},
{
"epoch": 0.1857451403887689,
"grad_norm": 0.7267583012580872,
"learning_rate": 3e-05,
"loss": 3.2443,
"step": 602
},
{
"epoch": 0.18605368713360074,
"grad_norm": 0.5485150814056396,
"learning_rate": 3e-05,
"loss": 3.0257,
"step": 603
},
{
"epoch": 0.1863622338784326,
"grad_norm": 0.6103752255439758,
"learning_rate": 3e-05,
"loss": 2.8336,
"step": 604
},
{
"epoch": 0.18667078062326442,
"grad_norm": 0.4755418002605438,
"learning_rate": 3e-05,
"loss": 2.7703,
"step": 605
},
{
"epoch": 0.18697932736809628,
"grad_norm": 0.523270308971405,
"learning_rate": 3e-05,
"loss": 2.792,
"step": 606
},
{
"epoch": 0.1872878741129281,
"grad_norm": 0.7174893021583557,
"learning_rate": 3e-05,
"loss": 3.1485,
"step": 607
},
{
"epoch": 0.18759642085775996,
"grad_norm": 0.4338546097278595,
"learning_rate": 3e-05,
"loss": 2.6676,
"step": 608
},
{
"epoch": 0.1879049676025918,
"grad_norm": 0.5212303996086121,
"learning_rate": 3e-05,
"loss": 2.5303,
"step": 609
},
{
"epoch": 0.18821351434742364,
"grad_norm": 0.4013713002204895,
"learning_rate": 3e-05,
"loss": 2.5814,
"step": 610
},
{
"epoch": 0.18852206109225547,
"grad_norm": 0.38565197587013245,
"learning_rate": 3e-05,
"loss": 2.5641,
"step": 611
},
{
"epoch": 0.18883060783708733,
"grad_norm": 0.5289196372032166,
"learning_rate": 3e-05,
"loss": 3.1017,
"step": 612
},
{
"epoch": 0.18913915458191916,
"grad_norm": 0.4261009097099304,
"learning_rate": 3e-05,
"loss": 2.7536,
"step": 613
},
{
"epoch": 0.189447701326751,
"grad_norm": 0.6289636492729187,
"learning_rate": 3e-05,
"loss": 2.5995,
"step": 614
},
{
"epoch": 0.18975624807158284,
"grad_norm": 0.3841557800769806,
"learning_rate": 3e-05,
"loss": 2.9889,
"step": 615
},
{
"epoch": 0.1900647948164147,
"grad_norm": 0.37914833426475525,
"learning_rate": 3e-05,
"loss": 2.4147,
"step": 616
},
{
"epoch": 0.19037334156124652,
"grad_norm": 0.5321159958839417,
"learning_rate": 3e-05,
"loss": 2.8679,
"step": 617
},
{
"epoch": 0.19068188830607838,
"grad_norm": 0.4842563271522522,
"learning_rate": 3e-05,
"loss": 2.7819,
"step": 618
},
{
"epoch": 0.1909904350509102,
"grad_norm": 0.35459670424461365,
"learning_rate": 3e-05,
"loss": 2.768,
"step": 619
},
{
"epoch": 0.19129898179574206,
"grad_norm": 0.6710373163223267,
"learning_rate": 3e-05,
"loss": 3.077,
"step": 620
},
{
"epoch": 0.1916075285405739,
"grad_norm": 0.4304696023464203,
"learning_rate": 3e-05,
"loss": 2.4043,
"step": 621
},
{
"epoch": 0.19191607528540575,
"grad_norm": 0.500830352306366,
"learning_rate": 3e-05,
"loss": 2.6387,
"step": 622
},
{
"epoch": 0.19222462203023757,
"grad_norm": 0.5076797008514404,
"learning_rate": 3e-05,
"loss": 2.3244,
"step": 623
},
{
"epoch": 0.19253316877506943,
"grad_norm": 0.30540212988853455,
"learning_rate": 3e-05,
"loss": 2.0853,
"step": 624
},
{
"epoch": 0.19284171551990126,
"grad_norm": 0.6184133887290955,
"learning_rate": 3e-05,
"loss": 2.6062,
"step": 625
},
{
"epoch": 0.1931502622647331,
"grad_norm": 0.4118046164512634,
"learning_rate": 3e-05,
"loss": 2.1961,
"step": 626
},
{
"epoch": 0.19345880900956494,
"grad_norm": 0.6084356904029846,
"learning_rate": 3e-05,
"loss": 2.8398,
"step": 627
},
{
"epoch": 0.1937673557543968,
"grad_norm": 0.44872429966926575,
"learning_rate": 3e-05,
"loss": 2.9109,
"step": 628
},
{
"epoch": 0.19407590249922863,
"grad_norm": 0.5909293293952942,
"learning_rate": 3e-05,
"loss": 2.4278,
"step": 629
},
{
"epoch": 0.19438444924406048,
"grad_norm": 0.5620918273925781,
"learning_rate": 3e-05,
"loss": 2.8395,
"step": 630
},
{
"epoch": 0.1946929959888923,
"grad_norm": 0.5947256088256836,
"learning_rate": 3e-05,
"loss": 3.1204,
"step": 631
},
{
"epoch": 0.19500154273372416,
"grad_norm": 0.42888760566711426,
"learning_rate": 3e-05,
"loss": 2.6668,
"step": 632
},
{
"epoch": 0.195310089478556,
"grad_norm": 0.8526038527488708,
"learning_rate": 3e-05,
"loss": 2.9337,
"step": 633
},
{
"epoch": 0.19561863622338785,
"grad_norm": 0.5663727521896362,
"learning_rate": 3e-05,
"loss": 3.103,
"step": 634
},
{
"epoch": 0.19592718296821968,
"grad_norm": 0.5431751012802124,
"learning_rate": 3e-05,
"loss": 2.6397,
"step": 635
},
{
"epoch": 0.19623572971305153,
"grad_norm": 0.5891326069831848,
"learning_rate": 3e-05,
"loss": 3.2095,
"step": 636
},
{
"epoch": 0.19654427645788336,
"grad_norm": 0.5660468339920044,
"learning_rate": 3e-05,
"loss": 2.9853,
"step": 637
},
{
"epoch": 0.19685282320271522,
"grad_norm": 0.3460230529308319,
"learning_rate": 3e-05,
"loss": 2.4755,
"step": 638
},
{
"epoch": 0.19716136994754704,
"grad_norm": 0.30621814727783203,
"learning_rate": 3e-05,
"loss": 2.5505,
"step": 639
},
{
"epoch": 0.1974699166923789,
"grad_norm": 0.4897528290748596,
"learning_rate": 3e-05,
"loss": 2.8266,
"step": 640
},
{
"epoch": 0.19777846343721073,
"grad_norm": 0.7390705347061157,
"learning_rate": 3e-05,
"loss": 3.0495,
"step": 641
},
{
"epoch": 0.19808701018204258,
"grad_norm": 0.403690904378891,
"learning_rate": 3e-05,
"loss": 2.6824,
"step": 642
},
{
"epoch": 0.1983955569268744,
"grad_norm": 0.6214193105697632,
"learning_rate": 3e-05,
"loss": 2.9677,
"step": 643
},
{
"epoch": 0.19870410367170627,
"grad_norm": 0.3992424011230469,
"learning_rate": 3e-05,
"loss": 2.7133,
"step": 644
},
{
"epoch": 0.1990126504165381,
"grad_norm": 0.5738241672515869,
"learning_rate": 3e-05,
"loss": 2.7717,
"step": 645
},
{
"epoch": 0.19932119716136995,
"grad_norm": 0.5687406659126282,
"learning_rate": 3e-05,
"loss": 3.1489,
"step": 646
},
{
"epoch": 0.19962974390620178,
"grad_norm": 0.44407641887664795,
"learning_rate": 3e-05,
"loss": 3.0794,
"step": 647
},
{
"epoch": 0.19993829065103363,
"grad_norm": 0.5712667107582092,
"learning_rate": 3e-05,
"loss": 2.7482,
"step": 648
},
{
"epoch": 0.20024683739586546,
"grad_norm": 0.3764342963695526,
"learning_rate": 3e-05,
"loss": 2.6002,
"step": 649
},
{
"epoch": 0.20055538414069732,
"grad_norm": 0.3133735954761505,
"learning_rate": 3e-05,
"loss": 2.3715,
"step": 650
},
{
"epoch": 0.20086393088552915,
"grad_norm": 0.5320349931716919,
"learning_rate": 3e-05,
"loss": 3.0982,
"step": 651
},
{
"epoch": 0.201172477630361,
"grad_norm": 0.498043030500412,
"learning_rate": 3e-05,
"loss": 3.0611,
"step": 652
},
{
"epoch": 0.20148102437519283,
"grad_norm": 0.611578106880188,
"learning_rate": 3e-05,
"loss": 3.2704,
"step": 653
},
{
"epoch": 0.20178957112002469,
"grad_norm": 0.43669798970222473,
"learning_rate": 3e-05,
"loss": 2.5288,
"step": 654
},
{
"epoch": 0.2020981178648565,
"grad_norm": 0.4220907688140869,
"learning_rate": 3e-05,
"loss": 2.8483,
"step": 655
},
{
"epoch": 0.20240666460968837,
"grad_norm": 0.46355968713760376,
"learning_rate": 3e-05,
"loss": 3.1131,
"step": 656
},
{
"epoch": 0.2027152113545202,
"grad_norm": 0.3843071758747101,
"learning_rate": 3e-05,
"loss": 2.6449,
"step": 657
},
{
"epoch": 0.20302375809935205,
"grad_norm": 0.3454858660697937,
"learning_rate": 3e-05,
"loss": 2.4922,
"step": 658
},
{
"epoch": 0.20333230484418388,
"grad_norm": 0.31287965178489685,
"learning_rate": 3e-05,
"loss": 2.5445,
"step": 659
},
{
"epoch": 0.20364085158901574,
"grad_norm": 0.49061527848243713,
"learning_rate": 3e-05,
"loss": 2.4979,
"step": 660
},
{
"epoch": 0.20394939833384756,
"grad_norm": 0.5406720638275146,
"learning_rate": 3e-05,
"loss": 2.7902,
"step": 661
},
{
"epoch": 0.20425794507867942,
"grad_norm": 0.5429649353027344,
"learning_rate": 3e-05,
"loss": 2.8121,
"step": 662
},
{
"epoch": 0.20456649182351125,
"grad_norm": 0.47672000527381897,
"learning_rate": 3e-05,
"loss": 3.1438,
"step": 663
},
{
"epoch": 0.2048750385683431,
"grad_norm": 0.3863488435745239,
"learning_rate": 3e-05,
"loss": 2.4956,
"step": 664
},
{
"epoch": 0.20518358531317496,
"grad_norm": 0.5311136245727539,
"learning_rate": 3e-05,
"loss": 2.404,
"step": 665
},
{
"epoch": 0.2054921320580068,
"grad_norm": 0.30825114250183105,
"learning_rate": 3e-05,
"loss": 2.2225,
"step": 666
},
{
"epoch": 0.20580067880283864,
"grad_norm": 0.4122128188610077,
"learning_rate": 3e-05,
"loss": 2.4823,
"step": 667
},
{
"epoch": 0.20610922554767047,
"grad_norm": 0.45997029542922974,
"learning_rate": 3e-05,
"loss": 2.456,
"step": 668
},
{
"epoch": 0.20641777229250233,
"grad_norm": 0.38074791431427,
"learning_rate": 3e-05,
"loss": 2.4485,
"step": 669
},
{
"epoch": 0.20672631903733416,
"grad_norm": 0.7642366886138916,
"learning_rate": 3e-05,
"loss": 3.2876,
"step": 670
},
{
"epoch": 0.207034865782166,
"grad_norm": 0.6110647320747375,
"learning_rate": 3e-05,
"loss": 2.901,
"step": 671
},
{
"epoch": 0.20734341252699784,
"grad_norm": 0.36458998918533325,
"learning_rate": 3e-05,
"loss": 2.2886,
"step": 672
},
{
"epoch": 0.2076519592718297,
"grad_norm": 0.49400243163108826,
"learning_rate": 3e-05,
"loss": 2.7685,
"step": 673
},
{
"epoch": 0.20796050601666152,
"grad_norm": 0.4072665274143219,
"learning_rate": 3e-05,
"loss": 2.6262,
"step": 674
},
{
"epoch": 0.20826905276149338,
"grad_norm": 0.3744329512119293,
"learning_rate": 3e-05,
"loss": 2.6089,
"step": 675
},
{
"epoch": 0.2085775995063252,
"grad_norm": 0.4581242501735687,
"learning_rate": 3e-05,
"loss": 2.6608,
"step": 676
},
{
"epoch": 0.20888614625115706,
"grad_norm": 0.46983492374420166,
"learning_rate": 3e-05,
"loss": 2.4597,
"step": 677
},
{
"epoch": 0.2091946929959889,
"grad_norm": 0.5508672595024109,
"learning_rate": 3e-05,
"loss": 2.8413,
"step": 678
},
{
"epoch": 0.20950323974082075,
"grad_norm": 0.3159502148628235,
"learning_rate": 3e-05,
"loss": 2.3252,
"step": 679
},
{
"epoch": 0.20981178648565257,
"grad_norm": 0.5377674102783203,
"learning_rate": 3e-05,
"loss": 2.2233,
"step": 680
},
{
"epoch": 0.21012033323048443,
"grad_norm": 0.40905702114105225,
"learning_rate": 3e-05,
"loss": 2.1659,
"step": 681
},
{
"epoch": 0.21042887997531626,
"grad_norm": 0.3416579067707062,
"learning_rate": 3e-05,
"loss": 2.0977,
"step": 682
},
{
"epoch": 0.2107374267201481,
"grad_norm": 0.5408785939216614,
"learning_rate": 3e-05,
"loss": 3.0337,
"step": 683
},
{
"epoch": 0.21104597346497994,
"grad_norm": 0.6323843002319336,
"learning_rate": 3e-05,
"loss": 2.9733,
"step": 684
},
{
"epoch": 0.2113545202098118,
"grad_norm": 0.8620406985282898,
"learning_rate": 3e-05,
"loss": 3.3109,
"step": 685
},
{
"epoch": 0.21166306695464362,
"grad_norm": 0.38318270444869995,
"learning_rate": 3e-05,
"loss": 2.786,
"step": 686
},
{
"epoch": 0.21197161369947548,
"grad_norm": 0.44390928745269775,
"learning_rate": 3e-05,
"loss": 2.9674,
"step": 687
},
{
"epoch": 0.2122801604443073,
"grad_norm": 0.6823718547821045,
"learning_rate": 3e-05,
"loss": 3.0883,
"step": 688
},
{
"epoch": 0.21258870718913916,
"grad_norm": 0.451790988445282,
"learning_rate": 3e-05,
"loss": 2.5927,
"step": 689
},
{
"epoch": 0.212897253933971,
"grad_norm": 0.4029495120048523,
"learning_rate": 3e-05,
"loss": 2.6611,
"step": 690
},
{
"epoch": 0.21320580067880285,
"grad_norm": 0.4392080307006836,
"learning_rate": 3e-05,
"loss": 2.7574,
"step": 691
},
{
"epoch": 0.21351434742363468,
"grad_norm": 0.6579667925834656,
"learning_rate": 3e-05,
"loss": 3.1202,
"step": 692
},
{
"epoch": 0.21382289416846653,
"grad_norm": 0.5804861783981323,
"learning_rate": 3e-05,
"loss": 3.3316,
"step": 693
},
{
"epoch": 0.21413144091329836,
"grad_norm": 0.4412527084350586,
"learning_rate": 3e-05,
"loss": 3.7257,
"step": 694
},
{
"epoch": 0.21443998765813022,
"grad_norm": 0.6037363409996033,
"learning_rate": 3e-05,
"loss": 2.5479,
"step": 695
},
{
"epoch": 0.21474853440296204,
"grad_norm": 0.6885823011398315,
"learning_rate": 3e-05,
"loss": 3.063,
"step": 696
},
{
"epoch": 0.2150570811477939,
"grad_norm": 0.5372409224510193,
"learning_rate": 3e-05,
"loss": 2.7169,
"step": 697
},
{
"epoch": 0.21536562789262573,
"grad_norm": 0.5530219674110413,
"learning_rate": 3e-05,
"loss": 3.0568,
"step": 698
},
{
"epoch": 0.21567417463745758,
"grad_norm": 0.7457128167152405,
"learning_rate": 3e-05,
"loss": 2.5894,
"step": 699
},
{
"epoch": 0.2159827213822894,
"grad_norm": 0.6711127161979675,
"learning_rate": 3e-05,
"loss": 2.7789,
"step": 700
},
{
"epoch": 0.21629126812712127,
"grad_norm": 0.6075369119644165,
"learning_rate": 3e-05,
"loss": 2.9943,
"step": 701
},
{
"epoch": 0.2165998148719531,
"grad_norm": 0.5779369473457336,
"learning_rate": 3e-05,
"loss": 2.5556,
"step": 702
},
{
"epoch": 0.21690836161678495,
"grad_norm": 0.9133353233337402,
"learning_rate": 3e-05,
"loss": 2.6227,
"step": 703
},
{
"epoch": 0.21721690836161678,
"grad_norm": 0.8465149402618408,
"learning_rate": 3e-05,
"loss": 3.1094,
"step": 704
},
{
"epoch": 0.21752545510644863,
"grad_norm": 0.30936139822006226,
"learning_rate": 3e-05,
"loss": 2.7395,
"step": 705
},
{
"epoch": 0.21783400185128046,
"grad_norm": 0.7300909757614136,
"learning_rate": 3e-05,
"loss": 2.9833,
"step": 706
},
{
"epoch": 0.21814254859611232,
"grad_norm": 0.7368529438972473,
"learning_rate": 3e-05,
"loss": 2.7301,
"step": 707
},
{
"epoch": 0.21845109534094415,
"grad_norm": 0.4861367642879486,
"learning_rate": 3e-05,
"loss": 2.5439,
"step": 708
},
{
"epoch": 0.218759642085776,
"grad_norm": 0.4448826313018799,
"learning_rate": 3e-05,
"loss": 2.7462,
"step": 709
},
{
"epoch": 0.21906818883060783,
"grad_norm": 0.43111348152160645,
"learning_rate": 3e-05,
"loss": 2.2179,
"step": 710
},
{
"epoch": 0.21937673557543969,
"grad_norm": 0.6709856390953064,
"learning_rate": 3e-05,
"loss": 2.3648,
"step": 711
},
{
"epoch": 0.2196852823202715,
"grad_norm": 0.5811859965324402,
"learning_rate": 3e-05,
"loss": 2.2098,
"step": 712
},
{
"epoch": 0.21999382906510337,
"grad_norm": 0.44703492522239685,
"learning_rate": 3e-05,
"loss": 2.4668,
"step": 713
},
{
"epoch": 0.2203023758099352,
"grad_norm": 0.32512566447257996,
"learning_rate": 3e-05,
"loss": 2.5741,
"step": 714
},
{
"epoch": 0.22061092255476705,
"grad_norm": 0.4460441768169403,
"learning_rate": 3e-05,
"loss": 2.5822,
"step": 715
},
{
"epoch": 0.22091946929959888,
"grad_norm": 0.46903368830680847,
"learning_rate": 3e-05,
"loss": 2.3405,
"step": 716
},
{
"epoch": 0.22122801604443074,
"grad_norm": 0.4886974096298218,
"learning_rate": 3e-05,
"loss": 2.3086,
"step": 717
},
{
"epoch": 0.22153656278926256,
"grad_norm": 0.7623785734176636,
"learning_rate": 3e-05,
"loss": 3.0441,
"step": 718
},
{
"epoch": 0.22184510953409442,
"grad_norm": 0.39860019087791443,
"learning_rate": 3e-05,
"loss": 2.7577,
"step": 719
},
{
"epoch": 0.22215365627892625,
"grad_norm": 0.5369051098823547,
"learning_rate": 3e-05,
"loss": 2.4445,
"step": 720
},
{
"epoch": 0.2224622030237581,
"grad_norm": 0.6205699443817139,
"learning_rate": 3e-05,
"loss": 3.322,
"step": 721
},
{
"epoch": 0.22277074976858993,
"grad_norm": 0.4737318456172943,
"learning_rate": 3e-05,
"loss": 2.5368,
"step": 722
},
{
"epoch": 0.2230792965134218,
"grad_norm": 0.4158051311969757,
"learning_rate": 3e-05,
"loss": 2.3423,
"step": 723
},
{
"epoch": 0.22338784325825362,
"grad_norm": 0.44715094566345215,
"learning_rate": 3e-05,
"loss": 2.9117,
"step": 724
},
{
"epoch": 0.22369639000308547,
"grad_norm": 0.40355610847473145,
"learning_rate": 3e-05,
"loss": 2.8261,
"step": 725
},
{
"epoch": 0.2240049367479173,
"grad_norm": 0.40845543146133423,
"learning_rate": 3e-05,
"loss": 2.9491,
"step": 726
},
{
"epoch": 0.22431348349274915,
"grad_norm": 0.40344637632369995,
"learning_rate": 3e-05,
"loss": 2.6911,
"step": 727
},
{
"epoch": 0.22462203023758098,
"grad_norm": 0.44544005393981934,
"learning_rate": 3e-05,
"loss": 2.5146,
"step": 728
},
{
"epoch": 0.22493057698241284,
"grad_norm": 0.33173346519470215,
"learning_rate": 3e-05,
"loss": 2.3432,
"step": 729
},
{
"epoch": 0.22523912372724467,
"grad_norm": 0.3437408208847046,
"learning_rate": 3e-05,
"loss": 2.642,
"step": 730
},
{
"epoch": 0.22554767047207652,
"grad_norm": 0.3419845700263977,
"learning_rate": 3e-05,
"loss": 2.7049,
"step": 731
},
{
"epoch": 0.22585621721690835,
"grad_norm": 0.38495057821273804,
"learning_rate": 3e-05,
"loss": 2.7977,
"step": 732
},
{
"epoch": 0.2261647639617402,
"grad_norm": 0.46839049458503723,
"learning_rate": 3e-05,
"loss": 2.3997,
"step": 733
},
{
"epoch": 0.22647331070657203,
"grad_norm": 0.5214048624038696,
"learning_rate": 3e-05,
"loss": 3.0801,
"step": 734
},
{
"epoch": 0.2267818574514039,
"grad_norm": 0.32730308175086975,
"learning_rate": 3e-05,
"loss": 2.6502,
"step": 735
},
{
"epoch": 0.22709040419623572,
"grad_norm": 0.459028035402298,
"learning_rate": 3e-05,
"loss": 2.8593,
"step": 736
},
{
"epoch": 0.22739895094106757,
"grad_norm": 0.5912216901779175,
"learning_rate": 3e-05,
"loss": 3.0517,
"step": 737
},
{
"epoch": 0.2277074976858994,
"grad_norm": 0.5747165083885193,
"learning_rate": 3e-05,
"loss": 3.1073,
"step": 738
},
{
"epoch": 0.22801604443073126,
"grad_norm": 0.39477410912513733,
"learning_rate": 3e-05,
"loss": 2.531,
"step": 739
},
{
"epoch": 0.22832459117556309,
"grad_norm": 0.47743627429008484,
"learning_rate": 3e-05,
"loss": 2.7878,
"step": 740
},
{
"epoch": 0.22863313792039494,
"grad_norm": 0.6934757232666016,
"learning_rate": 3e-05,
"loss": 2.844,
"step": 741
},
{
"epoch": 0.22894168466522677,
"grad_norm": 0.39980217814445496,
"learning_rate": 3e-05,
"loss": 2.7462,
"step": 742
},
{
"epoch": 0.22925023141005862,
"grad_norm": 0.393174409866333,
"learning_rate": 3e-05,
"loss": 2.6384,
"step": 743
},
{
"epoch": 0.22955877815489045,
"grad_norm": 0.5676349997520447,
"learning_rate": 3e-05,
"loss": 3.0358,
"step": 744
},
{
"epoch": 0.2298673248997223,
"grad_norm": 0.8145315647125244,
"learning_rate": 3e-05,
"loss": 3.4264,
"step": 745
},
{
"epoch": 0.23017587164455414,
"grad_norm": 0.3674360513687134,
"learning_rate": 3e-05,
"loss": 2.6974,
"step": 746
},
{
"epoch": 0.230484418389386,
"grad_norm": 0.39227521419525146,
"learning_rate": 3e-05,
"loss": 2.466,
"step": 747
},
{
"epoch": 0.23079296513421785,
"grad_norm": 0.6145668625831604,
"learning_rate": 3e-05,
"loss": 2.9016,
"step": 748
},
{
"epoch": 0.23110151187904968,
"grad_norm": 0.3767462968826294,
"learning_rate": 3e-05,
"loss": 2.8179,
"step": 749
},
{
"epoch": 0.23141005862388153,
"grad_norm": 0.358978807926178,
"learning_rate": 3e-05,
"loss": 2.4825,
"step": 750
},
{
"epoch": 0.23171860536871336,
"grad_norm": 0.4695563018321991,
"learning_rate": 3e-05,
"loss": 3.0992,
"step": 751
},
{
"epoch": 0.23202715211354522,
"grad_norm": 0.4999081790447235,
"learning_rate": 3e-05,
"loss": 2.8782,
"step": 752
},
{
"epoch": 0.23233569885837704,
"grad_norm": 0.4756012558937073,
"learning_rate": 3e-05,
"loss": 3.299,
"step": 753
},
{
"epoch": 0.2326442456032089,
"grad_norm": 0.42113542556762695,
"learning_rate": 3e-05,
"loss": 2.9299,
"step": 754
},
{
"epoch": 0.23295279234804073,
"grad_norm": 0.4862482249736786,
"learning_rate": 3e-05,
"loss": 2.6376,
"step": 755
},
{
"epoch": 0.23326133909287258,
"grad_norm": 0.44577065110206604,
"learning_rate": 3e-05,
"loss": 2.8854,
"step": 756
},
{
"epoch": 0.2335698858377044,
"grad_norm": 0.36711904406547546,
"learning_rate": 3e-05,
"loss": 2.7238,
"step": 757
},
{
"epoch": 0.23387843258253627,
"grad_norm": 0.45583394169807434,
"learning_rate": 3e-05,
"loss": 2.9171,
"step": 758
},
{
"epoch": 0.2341869793273681,
"grad_norm": 0.4282878041267395,
"learning_rate": 3e-05,
"loss": 2.753,
"step": 759
},
{
"epoch": 0.23449552607219995,
"grad_norm": 0.3927743136882782,
"learning_rate": 3e-05,
"loss": 2.8325,
"step": 760
},
{
"epoch": 0.23480407281703178,
"grad_norm": 0.4064248204231262,
"learning_rate": 3e-05,
"loss": 2.3949,
"step": 761
},
{
"epoch": 0.23511261956186363,
"grad_norm": 0.5085972547531128,
"learning_rate": 3e-05,
"loss": 3.3134,
"step": 762
},
{
"epoch": 0.23542116630669546,
"grad_norm": 0.43735840916633606,
"learning_rate": 3e-05,
"loss": 2.8876,
"step": 763
},
{
"epoch": 0.23572971305152732,
"grad_norm": 0.5270010232925415,
"learning_rate": 3e-05,
"loss": 3.1755,
"step": 764
},
{
"epoch": 0.23603825979635915,
"grad_norm": 0.4824548065662384,
"learning_rate": 3e-05,
"loss": 2.8461,
"step": 765
},
{
"epoch": 0.236346806541191,
"grad_norm": 0.5891832113265991,
"learning_rate": 3e-05,
"loss": 2.864,
"step": 766
},
{
"epoch": 0.23665535328602283,
"grad_norm": 0.3647357225418091,
"learning_rate": 3e-05,
"loss": 3.0185,
"step": 767
},
{
"epoch": 0.23696390003085469,
"grad_norm": 0.3849000930786133,
"learning_rate": 3e-05,
"loss": 2.5547,
"step": 768
},
{
"epoch": 0.2372724467756865,
"grad_norm": 0.3377460241317749,
"learning_rate": 3e-05,
"loss": 2.3713,
"step": 769
},
{
"epoch": 0.23758099352051837,
"grad_norm": 0.4434642493724823,
"learning_rate": 3e-05,
"loss": 2.5315,
"step": 770
},
{
"epoch": 0.2378895402653502,
"grad_norm": 0.3554992079734802,
"learning_rate": 3e-05,
"loss": 2.2536,
"step": 771
},
{
"epoch": 0.23819808701018205,
"grad_norm": 0.49346596002578735,
"learning_rate": 3e-05,
"loss": 2.6857,
"step": 772
},
{
"epoch": 0.23850663375501388,
"grad_norm": 0.5243836641311646,
"learning_rate": 3e-05,
"loss": 2.9841,
"step": 773
},
{
"epoch": 0.23881518049984574,
"grad_norm": 0.47306209802627563,
"learning_rate": 3e-05,
"loss": 2.5651,
"step": 774
},
{
"epoch": 0.23912372724467756,
"grad_norm": 0.4085429310798645,
"learning_rate": 3e-05,
"loss": 2.7709,
"step": 775
},
{
"epoch": 0.23943227398950942,
"grad_norm": 0.5058587789535522,
"learning_rate": 3e-05,
"loss": 2.8431,
"step": 776
},
{
"epoch": 0.23974082073434125,
"grad_norm": 0.423749178647995,
"learning_rate": 3e-05,
"loss": 3.1645,
"step": 777
},
{
"epoch": 0.2400493674791731,
"grad_norm": 0.44455215334892273,
"learning_rate": 3e-05,
"loss": 2.4221,
"step": 778
},
{
"epoch": 0.24035791422400493,
"grad_norm": 0.4022437334060669,
"learning_rate": 3e-05,
"loss": 2.7517,
"step": 779
},
{
"epoch": 0.2406664609688368,
"grad_norm": 0.3913156986236572,
"learning_rate": 3e-05,
"loss": 2.8265,
"step": 780
},
{
"epoch": 0.24097500771366862,
"grad_norm": 0.5418980717658997,
"learning_rate": 3e-05,
"loss": 3.1478,
"step": 781
},
{
"epoch": 0.24128355445850047,
"grad_norm": 0.3950507938861847,
"learning_rate": 3e-05,
"loss": 2.5474,
"step": 782
},
{
"epoch": 0.2415921012033323,
"grad_norm": 0.43344080448150635,
"learning_rate": 3e-05,
"loss": 2.5813,
"step": 783
},
{
"epoch": 0.24190064794816415,
"grad_norm": 0.32457104325294495,
"learning_rate": 3e-05,
"loss": 2.6936,
"step": 784
},
{
"epoch": 0.24220919469299598,
"grad_norm": 0.29046568274497986,
"learning_rate": 3e-05,
"loss": 2.2027,
"step": 785
},
{
"epoch": 0.24251774143782784,
"grad_norm": 0.47084319591522217,
"learning_rate": 3e-05,
"loss": 2.7889,
"step": 786
},
{
"epoch": 0.24282628818265967,
"grad_norm": 0.4512122571468353,
"learning_rate": 3e-05,
"loss": 2.8724,
"step": 787
},
{
"epoch": 0.24313483492749152,
"grad_norm": 0.3429430425167084,
"learning_rate": 3e-05,
"loss": 2.5882,
"step": 788
},
{
"epoch": 0.24344338167232335,
"grad_norm": 0.2793130576610565,
"learning_rate": 3e-05,
"loss": 2.3702,
"step": 789
},
{
"epoch": 0.2437519284171552,
"grad_norm": 0.6752581000328064,
"learning_rate": 3e-05,
"loss": 2.9438,
"step": 790
},
{
"epoch": 0.24406047516198703,
"grad_norm": 0.3320951461791992,
"learning_rate": 3e-05,
"loss": 2.5836,
"step": 791
},
{
"epoch": 0.2443690219068189,
"grad_norm": 0.443316787481308,
"learning_rate": 3e-05,
"loss": 2.8967,
"step": 792
},
{
"epoch": 0.24467756865165072,
"grad_norm": 0.44384002685546875,
"learning_rate": 3e-05,
"loss": 2.1344,
"step": 793
},
{
"epoch": 0.24498611539648257,
"grad_norm": 0.5478724241256714,
"learning_rate": 3e-05,
"loss": 2.8676,
"step": 794
},
{
"epoch": 0.2452946621413144,
"grad_norm": 0.4789648950099945,
"learning_rate": 3e-05,
"loss": 2.954,
"step": 795
},
{
"epoch": 0.24560320888614626,
"grad_norm": 0.30075979232788086,
"learning_rate": 3e-05,
"loss": 2.2147,
"step": 796
},
{
"epoch": 0.24591175563097808,
"grad_norm": 0.8388468027114868,
"learning_rate": 3e-05,
"loss": 3.0801,
"step": 797
},
{
"epoch": 0.24622030237580994,
"grad_norm": 0.46896687150001526,
"learning_rate": 3e-05,
"loss": 3.0219,
"step": 798
},
{
"epoch": 0.24652884912064177,
"grad_norm": 0.37118545174598694,
"learning_rate": 3e-05,
"loss": 2.7809,
"step": 799
},
{
"epoch": 0.24683739586547362,
"grad_norm": 0.3290826380252838,
"learning_rate": 3e-05,
"loss": 2.4913,
"step": 800
},
{
"epoch": 0.24714594261030545,
"grad_norm": 0.4458785951137543,
"learning_rate": 3e-05,
"loss": 2.8324,
"step": 801
},
{
"epoch": 0.2474544893551373,
"grad_norm": 0.39164820313453674,
"learning_rate": 3e-05,
"loss": 2.8643,
"step": 802
},
{
"epoch": 0.24776303609996914,
"grad_norm": 0.37570327520370483,
"learning_rate": 3e-05,
"loss": 2.6302,
"step": 803
},
{
"epoch": 0.248071582844801,
"grad_norm": 0.36291664838790894,
"learning_rate": 3e-05,
"loss": 2.3188,
"step": 804
},
{
"epoch": 0.24838012958963282,
"grad_norm": 0.7497803568840027,
"learning_rate": 3e-05,
"loss": 3.1793,
"step": 805
},
{
"epoch": 0.24868867633446468,
"grad_norm": 0.3503245413303375,
"learning_rate": 3e-05,
"loss": 2.6478,
"step": 806
},
{
"epoch": 0.2489972230792965,
"grad_norm": 0.4229944944381714,
"learning_rate": 3e-05,
"loss": 3.166,
"step": 807
},
{
"epoch": 0.24930576982412836,
"grad_norm": 0.39964744448661804,
"learning_rate": 3e-05,
"loss": 2.5633,
"step": 808
},
{
"epoch": 0.2496143165689602,
"grad_norm": 0.4407540559768677,
"learning_rate": 3e-05,
"loss": 2.8108,
"step": 809
},
{
"epoch": 0.24992286331379204,
"grad_norm": 0.5962763428688049,
"learning_rate": 3e-05,
"loss": 2.7762,
"step": 810
},
{
"epoch": 0.25023141005862387,
"grad_norm": 0.35750457644462585,
"learning_rate": 3e-05,
"loss": 2.5792,
"step": 811
},
{
"epoch": 0.2505399568034557,
"grad_norm": 0.6067323088645935,
"learning_rate": 3e-05,
"loss": 2.9188,
"step": 812
},
{
"epoch": 0.2508485035482876,
"grad_norm": 0.5473812818527222,
"learning_rate": 3e-05,
"loss": 2.9941,
"step": 813
},
{
"epoch": 0.2511570502931194,
"grad_norm": 0.4247765839099884,
"learning_rate": 3e-05,
"loss": 2.9066,
"step": 814
},
{
"epoch": 0.25146559703795124,
"grad_norm": 0.36779630184173584,
"learning_rate": 3e-05,
"loss": 2.1791,
"step": 815
},
{
"epoch": 0.25177414378278307,
"grad_norm": 0.4601776599884033,
"learning_rate": 3e-05,
"loss": 2.4513,
"step": 816
},
{
"epoch": 0.25208269052761495,
"grad_norm": 0.4455990493297577,
"learning_rate": 3e-05,
"loss": 3.0133,
"step": 817
},
{
"epoch": 0.2523912372724468,
"grad_norm": 0.469711035490036,
"learning_rate": 3e-05,
"loss": 2.7601,
"step": 818
},
{
"epoch": 0.2526997840172786,
"grad_norm": 0.35248449444770813,
"learning_rate": 3e-05,
"loss": 2.4141,
"step": 819
},
{
"epoch": 0.25300833076211043,
"grad_norm": 0.5230732560157776,
"learning_rate": 3e-05,
"loss": 2.7922,
"step": 820
},
{
"epoch": 0.2533168775069423,
"grad_norm": 0.4009235203266144,
"learning_rate": 3e-05,
"loss": 2.8466,
"step": 821
},
{
"epoch": 0.25362542425177415,
"grad_norm": 0.4277903139591217,
"learning_rate": 3e-05,
"loss": 2.6193,
"step": 822
},
{
"epoch": 0.253933970996606,
"grad_norm": 0.35308367013931274,
"learning_rate": 3e-05,
"loss": 2.7359,
"step": 823
},
{
"epoch": 0.2542425177414378,
"grad_norm": 0.5506107807159424,
"learning_rate": 3e-05,
"loss": 3.3045,
"step": 824
},
{
"epoch": 0.2545510644862697,
"grad_norm": 0.4381810128688812,
"learning_rate": 3e-05,
"loss": 2.5703,
"step": 825
},
{
"epoch": 0.2548596112311015,
"grad_norm": 0.6535665392875671,
"learning_rate": 3e-05,
"loss": 2.9752,
"step": 826
},
{
"epoch": 0.25516815797593334,
"grad_norm": 0.31507301330566406,
"learning_rate": 3e-05,
"loss": 2.4288,
"step": 827
},
{
"epoch": 0.25547670472076517,
"grad_norm": 0.628995418548584,
"learning_rate": 3e-05,
"loss": 2.9277,
"step": 828
},
{
"epoch": 0.25578525146559705,
"grad_norm": 0.3475759029388428,
"learning_rate": 3e-05,
"loss": 2.6116,
"step": 829
},
{
"epoch": 0.2560937982104289,
"grad_norm": 0.35610201954841614,
"learning_rate": 3e-05,
"loss": 2.5474,
"step": 830
},
{
"epoch": 0.2564023449552607,
"grad_norm": 0.3411518335342407,
"learning_rate": 3e-05,
"loss": 2.6045,
"step": 831
},
{
"epoch": 0.2567108917000926,
"grad_norm": 0.36465325951576233,
"learning_rate": 3e-05,
"loss": 2.3901,
"step": 832
},
{
"epoch": 0.2570194384449244,
"grad_norm": 0.3037632703781128,
"learning_rate": 3e-05,
"loss": 2.3478,
"step": 833
},
{
"epoch": 0.25732798518975625,
"grad_norm": 0.44962364435195923,
"learning_rate": 3e-05,
"loss": 2.8154,
"step": 834
},
{
"epoch": 0.2576365319345881,
"grad_norm": 0.3245837688446045,
"learning_rate": 3e-05,
"loss": 2.4922,
"step": 835
},
{
"epoch": 0.25794507867941996,
"grad_norm": 0.37248003482818604,
"learning_rate": 3e-05,
"loss": 2.6044,
"step": 836
},
{
"epoch": 0.2582536254242518,
"grad_norm": 0.3687119483947754,
"learning_rate": 3e-05,
"loss": 2.7271,
"step": 837
},
{
"epoch": 0.2585621721690836,
"grad_norm": 0.3978298604488373,
"learning_rate": 3e-05,
"loss": 2.9363,
"step": 838
},
{
"epoch": 0.25887071891391544,
"grad_norm": 0.43831732869148254,
"learning_rate": 3e-05,
"loss": 2.633,
"step": 839
},
{
"epoch": 0.2591792656587473,
"grad_norm": 0.39611366391181946,
"learning_rate": 3e-05,
"loss": 2.9044,
"step": 840
},
{
"epoch": 0.25948781240357915,
"grad_norm": 0.37481510639190674,
"learning_rate": 3e-05,
"loss": 2.7067,
"step": 841
},
{
"epoch": 0.259796359148411,
"grad_norm": 0.33353596925735474,
"learning_rate": 3e-05,
"loss": 2.4132,
"step": 842
},
{
"epoch": 0.2601049058932428,
"grad_norm": 0.42422613501548767,
"learning_rate": 3e-05,
"loss": 2.4306,
"step": 843
},
{
"epoch": 0.2604134526380747,
"grad_norm": 0.7475231289863586,
"learning_rate": 3e-05,
"loss": 3.2851,
"step": 844
},
{
"epoch": 0.2607219993829065,
"grad_norm": 0.8938561081886292,
"learning_rate": 3e-05,
"loss": 3.2137,
"step": 845
},
{
"epoch": 0.26103054612773835,
"grad_norm": 0.5334686636924744,
"learning_rate": 3e-05,
"loss": 2.3994,
"step": 846
},
{
"epoch": 0.2613390928725702,
"grad_norm": 0.6619825959205627,
"learning_rate": 3e-05,
"loss": 2.5175,
"step": 847
},
{
"epoch": 0.26164763961740206,
"grad_norm": 0.8206683993339539,
"learning_rate": 3e-05,
"loss": 2.6581,
"step": 848
},
{
"epoch": 0.2619561863622339,
"grad_norm": 0.4362182319164276,
"learning_rate": 3e-05,
"loss": 3.1203,
"step": 849
},
{
"epoch": 0.2622647331070657,
"grad_norm": 0.5157708525657654,
"learning_rate": 3e-05,
"loss": 2.8433,
"step": 850
},
{
"epoch": 0.26257327985189755,
"grad_norm": 0.5386294722557068,
"learning_rate": 3e-05,
"loss": 2.5983,
"step": 851
},
{
"epoch": 0.26288182659672943,
"grad_norm": 0.7352175712585449,
"learning_rate": 3e-05,
"loss": 2.8269,
"step": 852
},
{
"epoch": 0.26319037334156126,
"grad_norm": 0.4389644265174866,
"learning_rate": 3e-05,
"loss": 2.7614,
"step": 853
},
{
"epoch": 0.2634989200863931,
"grad_norm": 0.5747153162956238,
"learning_rate": 3e-05,
"loss": 2.8439,
"step": 854
},
{
"epoch": 0.2638074668312249,
"grad_norm": 0.442501962184906,
"learning_rate": 3e-05,
"loss": 2.5673,
"step": 855
},
{
"epoch": 0.2641160135760568,
"grad_norm": 0.5041112899780273,
"learning_rate": 3e-05,
"loss": 3.4168,
"step": 856
},
{
"epoch": 0.2644245603208886,
"grad_norm": 0.4333510994911194,
"learning_rate": 3e-05,
"loss": 2.5954,
"step": 857
},
{
"epoch": 0.26473310706572045,
"grad_norm": 0.2841757535934448,
"learning_rate": 3e-05,
"loss": 2.2267,
"step": 858
},
{
"epoch": 0.2650416538105523,
"grad_norm": 0.3240799903869629,
"learning_rate": 3e-05,
"loss": 2.4861,
"step": 859
},
{
"epoch": 0.26535020055538416,
"grad_norm": 0.8906437754631042,
"learning_rate": 3e-05,
"loss": 3.1556,
"step": 860
},
{
"epoch": 0.265658747300216,
"grad_norm": 0.503738284111023,
"learning_rate": 3e-05,
"loss": 2.8386,
"step": 861
},
{
"epoch": 0.2659672940450478,
"grad_norm": 0.3821960985660553,
"learning_rate": 3e-05,
"loss": 2.4592,
"step": 862
},
{
"epoch": 0.26627584078987965,
"grad_norm": 0.4790137708187103,
"learning_rate": 3e-05,
"loss": 2.9267,
"step": 863
},
{
"epoch": 0.26658438753471153,
"grad_norm": 0.4529775083065033,
"learning_rate": 3e-05,
"loss": 2.7667,
"step": 864
},
{
"epoch": 0.26689293427954336,
"grad_norm": 0.38088151812553406,
"learning_rate": 3e-05,
"loss": 2.8161,
"step": 865
},
{
"epoch": 0.2672014810243752,
"grad_norm": 0.38208597898483276,
"learning_rate": 3e-05,
"loss": 2.6606,
"step": 866
},
{
"epoch": 0.267510027769207,
"grad_norm": 0.3878786861896515,
"learning_rate": 3e-05,
"loss": 2.8936,
"step": 867
},
{
"epoch": 0.2678185745140389,
"grad_norm": 0.3223757743835449,
"learning_rate": 3e-05,
"loss": 2.6747,
"step": 868
},
{
"epoch": 0.2681271212588707,
"grad_norm": 0.380728155374527,
"learning_rate": 3e-05,
"loss": 2.9688,
"step": 869
},
{
"epoch": 0.26843566800370255,
"grad_norm": 0.3616756796836853,
"learning_rate": 3e-05,
"loss": 2.9107,
"step": 870
},
{
"epoch": 0.2687442147485344,
"grad_norm": 0.4382908344268799,
"learning_rate": 3e-05,
"loss": 2.7253,
"step": 871
},
{
"epoch": 0.26905276149336627,
"grad_norm": 0.6541967988014221,
"learning_rate": 3e-05,
"loss": 2.9171,
"step": 872
},
{
"epoch": 0.2693613082381981,
"grad_norm": 0.5041164755821228,
"learning_rate": 3e-05,
"loss": 2.697,
"step": 873
},
{
"epoch": 0.2696698549830299,
"grad_norm": 0.40384742617607117,
"learning_rate": 3e-05,
"loss": 2.832,
"step": 874
},
{
"epoch": 0.26997840172786175,
"grad_norm": 0.4746786952018738,
"learning_rate": 3e-05,
"loss": 3.0941,
"step": 875
},
{
"epoch": 0.27028694847269363,
"grad_norm": 0.49661922454833984,
"learning_rate": 3e-05,
"loss": 2.9883,
"step": 876
},
{
"epoch": 0.27059549521752546,
"grad_norm": 0.323425829410553,
"learning_rate": 3e-05,
"loss": 2.4742,
"step": 877
},
{
"epoch": 0.2709040419623573,
"grad_norm": 0.383983314037323,
"learning_rate": 3e-05,
"loss": 2.5993,
"step": 878
},
{
"epoch": 0.2712125887071891,
"grad_norm": 0.44553110003471375,
"learning_rate": 3e-05,
"loss": 2.6372,
"step": 879
},
{
"epoch": 0.271521135452021,
"grad_norm": 0.5241272449493408,
"learning_rate": 3e-05,
"loss": 3.2384,
"step": 880
},
{
"epoch": 0.27182968219685283,
"grad_norm": 0.6117975115776062,
"learning_rate": 3e-05,
"loss": 2.8964,
"step": 881
},
{
"epoch": 0.27213822894168466,
"grad_norm": 0.3425690233707428,
"learning_rate": 3e-05,
"loss": 2.3628,
"step": 882
},
{
"epoch": 0.2724467756865165,
"grad_norm": 0.49667888879776,
"learning_rate": 3e-05,
"loss": 3.1708,
"step": 883
},
{
"epoch": 0.27275532243134837,
"grad_norm": 0.3282347321510315,
"learning_rate": 3e-05,
"loss": 2.5675,
"step": 884
},
{
"epoch": 0.2730638691761802,
"grad_norm": 0.371405690908432,
"learning_rate": 3e-05,
"loss": 2.3648,
"step": 885
},
{
"epoch": 0.273372415921012,
"grad_norm": 0.46397897601127625,
"learning_rate": 3e-05,
"loss": 2.4369,
"step": 886
},
{
"epoch": 0.27368096266584385,
"grad_norm": 0.45648089051246643,
"learning_rate": 3e-05,
"loss": 2.5306,
"step": 887
},
{
"epoch": 0.27398950941067574,
"grad_norm": 0.3604811728000641,
"learning_rate": 3e-05,
"loss": 2.6633,
"step": 888
},
{
"epoch": 0.27429805615550756,
"grad_norm": 0.3597293496131897,
"learning_rate": 3e-05,
"loss": 2.5673,
"step": 889
},
{
"epoch": 0.2746066029003394,
"grad_norm": 0.4115181565284729,
"learning_rate": 3e-05,
"loss": 2.6737,
"step": 890
},
{
"epoch": 0.2749151496451712,
"grad_norm": 0.5663295388221741,
"learning_rate": 3e-05,
"loss": 3.3189,
"step": 891
},
{
"epoch": 0.2752236963900031,
"grad_norm": 0.3877559006214142,
"learning_rate": 3e-05,
"loss": 2.3662,
"step": 892
},
{
"epoch": 0.27553224313483493,
"grad_norm": 0.3957102596759796,
"learning_rate": 3e-05,
"loss": 2.6481,
"step": 893
},
{
"epoch": 0.27584078987966676,
"grad_norm": 0.4517107307910919,
"learning_rate": 3e-05,
"loss": 2.6274,
"step": 894
},
{
"epoch": 0.2761493366244986,
"grad_norm": 0.3920423090457916,
"learning_rate": 3e-05,
"loss": 2.3127,
"step": 895
},
{
"epoch": 0.27645788336933047,
"grad_norm": 0.4129326641559601,
"learning_rate": 3e-05,
"loss": 2.8305,
"step": 896
},
{
"epoch": 0.2767664301141623,
"grad_norm": 0.434194415807724,
"learning_rate": 3e-05,
"loss": 2.9748,
"step": 897
},
{
"epoch": 0.2770749768589941,
"grad_norm": 0.35281941294670105,
"learning_rate": 3e-05,
"loss": 2.6046,
"step": 898
},
{
"epoch": 0.27738352360382595,
"grad_norm": 0.3993188142776489,
"learning_rate": 3e-05,
"loss": 3.0656,
"step": 899
},
{
"epoch": 0.27769207034865784,
"grad_norm": 0.38514411449432373,
"learning_rate": 3e-05,
"loss": 2.6093,
"step": 900
},
{
"epoch": 0.27800061709348967,
"grad_norm": 0.46261972188949585,
"learning_rate": 3e-05,
"loss": 2.7449,
"step": 901
},
{
"epoch": 0.2783091638383215,
"grad_norm": 0.35552743077278137,
"learning_rate": 3e-05,
"loss": 2.4989,
"step": 902
},
{
"epoch": 0.2786177105831533,
"grad_norm": 0.3849910795688629,
"learning_rate": 3e-05,
"loss": 2.8201,
"step": 903
},
{
"epoch": 0.2789262573279852,
"grad_norm": 0.3854105770587921,
"learning_rate": 3e-05,
"loss": 2.866,
"step": 904
},
{
"epoch": 0.27923480407281703,
"grad_norm": 0.41260039806365967,
"learning_rate": 3e-05,
"loss": 2.6602,
"step": 905
},
{
"epoch": 0.27954335081764886,
"grad_norm": 0.347160667181015,
"learning_rate": 3e-05,
"loss": 2.3522,
"step": 906
},
{
"epoch": 0.2798518975624807,
"grad_norm": 0.2681293785572052,
"learning_rate": 3e-05,
"loss": 2.2104,
"step": 907
},
{
"epoch": 0.2801604443073126,
"grad_norm": 0.5947923064231873,
"learning_rate": 3e-05,
"loss": 2.9183,
"step": 908
},
{
"epoch": 0.2804689910521444,
"grad_norm": 0.3329774737358093,
"learning_rate": 3e-05,
"loss": 2.6277,
"step": 909
},
{
"epoch": 0.28077753779697623,
"grad_norm": 0.5521478652954102,
"learning_rate": 3e-05,
"loss": 3.0918,
"step": 910
},
{
"epoch": 0.28108608454180806,
"grad_norm": 0.4059542417526245,
"learning_rate": 3e-05,
"loss": 2.7583,
"step": 911
},
{
"epoch": 0.28139463128663994,
"grad_norm": 0.406534343957901,
"learning_rate": 3e-05,
"loss": 2.8539,
"step": 912
},
{
"epoch": 0.28170317803147177,
"grad_norm": 0.6377788782119751,
"learning_rate": 3e-05,
"loss": 2.7076,
"step": 913
},
{
"epoch": 0.2820117247763036,
"grad_norm": 0.3285945653915405,
"learning_rate": 3e-05,
"loss": 2.233,
"step": 914
},
{
"epoch": 0.2823202715211355,
"grad_norm": 0.3589547276496887,
"learning_rate": 3e-05,
"loss": 2.6757,
"step": 915
},
{
"epoch": 0.2826288182659673,
"grad_norm": 0.5185350775718689,
"learning_rate": 3e-05,
"loss": 2.84,
"step": 916
},
{
"epoch": 0.28293736501079914,
"grad_norm": 0.5448761582374573,
"learning_rate": 3e-05,
"loss": 2.6148,
"step": 917
},
{
"epoch": 0.28324591175563096,
"grad_norm": 0.40239185094833374,
"learning_rate": 3e-05,
"loss": 2.6744,
"step": 918
},
{
"epoch": 0.28355445850046285,
"grad_norm": 0.4547919034957886,
"learning_rate": 3e-05,
"loss": 3.0762,
"step": 919
},
{
"epoch": 0.2838630052452947,
"grad_norm": 0.6027969717979431,
"learning_rate": 3e-05,
"loss": 2.5929,
"step": 920
},
{
"epoch": 0.2841715519901265,
"grad_norm": 0.4554506242275238,
"learning_rate": 3e-05,
"loss": 2.4028,
"step": 921
},
{
"epoch": 0.28448009873495833,
"grad_norm": 0.4088301360607147,
"learning_rate": 3e-05,
"loss": 2.2121,
"step": 922
},
{
"epoch": 0.2847886454797902,
"grad_norm": 0.38209256529808044,
"learning_rate": 3e-05,
"loss": 2.8373,
"step": 923
},
{
"epoch": 0.28509719222462204,
"grad_norm": 0.3328472673892975,
"learning_rate": 3e-05,
"loss": 2.4444,
"step": 924
},
{
"epoch": 0.28540573896945387,
"grad_norm": 0.33774638175964355,
"learning_rate": 3e-05,
"loss": 2.3775,
"step": 925
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.5520322322845459,
"learning_rate": 3e-05,
"loss": 2.6919,
"step": 926
},
{
"epoch": 0.2860228324591176,
"grad_norm": 0.42964938282966614,
"learning_rate": 3e-05,
"loss": 2.7063,
"step": 927
},
{
"epoch": 0.2863313792039494,
"grad_norm": 0.3997243642807007,
"learning_rate": 3e-05,
"loss": 2.4581,
"step": 928
},
{
"epoch": 0.28663992594878124,
"grad_norm": 0.34988248348236084,
"learning_rate": 3e-05,
"loss": 2.6436,
"step": 929
},
{
"epoch": 0.28694847269361307,
"grad_norm": 0.5160298347473145,
"learning_rate": 3e-05,
"loss": 2.5583,
"step": 930
},
{
"epoch": 0.28725701943844495,
"grad_norm": 0.5120193362236023,
"learning_rate": 3e-05,
"loss": 2.879,
"step": 931
},
{
"epoch": 0.2875655661832768,
"grad_norm": 0.380231112241745,
"learning_rate": 3e-05,
"loss": 2.8559,
"step": 932
},
{
"epoch": 0.2878741129281086,
"grad_norm": 0.3843991756439209,
"learning_rate": 3e-05,
"loss": 2.547,
"step": 933
},
{
"epoch": 0.28818265967294043,
"grad_norm": 0.48810240626335144,
"learning_rate": 3e-05,
"loss": 2.7941,
"step": 934
},
{
"epoch": 0.2884912064177723,
"grad_norm": 0.4541907012462616,
"learning_rate": 3e-05,
"loss": 2.6922,
"step": 935
},
{
"epoch": 0.28879975316260414,
"grad_norm": 0.45531538128852844,
"learning_rate": 3e-05,
"loss": 2.9196,
"step": 936
},
{
"epoch": 0.289108299907436,
"grad_norm": 0.33889535069465637,
"learning_rate": 3e-05,
"loss": 2.6043,
"step": 937
},
{
"epoch": 0.2894168466522678,
"grad_norm": 0.41679373383522034,
"learning_rate": 3e-05,
"loss": 2.3954,
"step": 938
},
{
"epoch": 0.2897253933970997,
"grad_norm": 0.5239324569702148,
"learning_rate": 3e-05,
"loss": 2.7328,
"step": 939
},
{
"epoch": 0.2900339401419315,
"grad_norm": 0.45569634437561035,
"learning_rate": 3e-05,
"loss": 2.8944,
"step": 940
},
{
"epoch": 0.29034248688676334,
"grad_norm": 0.29057788848876953,
"learning_rate": 3e-05,
"loss": 2.1483,
"step": 941
},
{
"epoch": 0.29065103363159517,
"grad_norm": 0.3790557384490967,
"learning_rate": 3e-05,
"loss": 2.4723,
"step": 942
},
{
"epoch": 0.29095958037642705,
"grad_norm": 0.4859466850757599,
"learning_rate": 3e-05,
"loss": 2.8131,
"step": 943
},
{
"epoch": 0.2912681271212589,
"grad_norm": 0.5878845453262329,
"learning_rate": 3e-05,
"loss": 2.8616,
"step": 944
},
{
"epoch": 0.2915766738660907,
"grad_norm": 0.3621906638145447,
"learning_rate": 3e-05,
"loss": 2.6452,
"step": 945
},
{
"epoch": 0.29188522061092254,
"grad_norm": 0.39704960584640503,
"learning_rate": 3e-05,
"loss": 2.688,
"step": 946
},
{
"epoch": 0.2921937673557544,
"grad_norm": 0.438273549079895,
"learning_rate": 3e-05,
"loss": 2.6861,
"step": 947
},
{
"epoch": 0.29250231410058625,
"grad_norm": 0.42470529675483704,
"learning_rate": 3e-05,
"loss": 2.7117,
"step": 948
},
{
"epoch": 0.2928108608454181,
"grad_norm": 0.3004021942615509,
"learning_rate": 3e-05,
"loss": 2.2417,
"step": 949
},
{
"epoch": 0.2931194075902499,
"grad_norm": 0.3381553590297699,
"learning_rate": 3e-05,
"loss": 2.7548,
"step": 950
},
{
"epoch": 0.2934279543350818,
"grad_norm": 0.32822176814079285,
"learning_rate": 3e-05,
"loss": 2.6632,
"step": 951
},
{
"epoch": 0.2937365010799136,
"grad_norm": 0.48720303177833557,
"learning_rate": 3e-05,
"loss": 2.9131,
"step": 952
},
{
"epoch": 0.29404504782474544,
"grad_norm": 0.5922092795372009,
"learning_rate": 3e-05,
"loss": 2.6821,
"step": 953
},
{
"epoch": 0.29435359456957727,
"grad_norm": 0.36750638484954834,
"learning_rate": 3e-05,
"loss": 2.9744,
"step": 954
},
{
"epoch": 0.29466214131440915,
"grad_norm": 0.3950546085834503,
"learning_rate": 3e-05,
"loss": 2.8421,
"step": 955
},
{
"epoch": 0.294970688059241,
"grad_norm": 0.33338621258735657,
"learning_rate": 3e-05,
"loss": 2.3283,
"step": 956
},
{
"epoch": 0.2952792348040728,
"grad_norm": 0.32808199524879456,
"learning_rate": 3e-05,
"loss": 2.575,
"step": 957
},
{
"epoch": 0.29558778154890464,
"grad_norm": 0.40493538975715637,
"learning_rate": 3e-05,
"loss": 2.696,
"step": 958
},
{
"epoch": 0.2958963282937365,
"grad_norm": 0.5946307182312012,
"learning_rate": 3e-05,
"loss": 3.0542,
"step": 959
},
{
"epoch": 0.29620487503856835,
"grad_norm": 0.49499088525772095,
"learning_rate": 3e-05,
"loss": 2.9535,
"step": 960
},
{
"epoch": 0.2965134217834002,
"grad_norm": 0.3947402238845825,
"learning_rate": 3e-05,
"loss": 2.2794,
"step": 961
},
{
"epoch": 0.296821968528232,
"grad_norm": 0.6068766713142395,
"learning_rate": 3e-05,
"loss": 2.7137,
"step": 962
},
{
"epoch": 0.2971305152730639,
"grad_norm": 0.3848678469657898,
"learning_rate": 3e-05,
"loss": 2.8856,
"step": 963
},
{
"epoch": 0.2974390620178957,
"grad_norm": 0.3639110028743744,
"learning_rate": 3e-05,
"loss": 2.6542,
"step": 964
},
{
"epoch": 0.29774760876272754,
"grad_norm": 0.4631502330303192,
"learning_rate": 3e-05,
"loss": 2.801,
"step": 965
},
{
"epoch": 0.2980561555075594,
"grad_norm": 0.4895254671573639,
"learning_rate": 3e-05,
"loss": 2.5745,
"step": 966
},
{
"epoch": 0.29836470225239126,
"grad_norm": 0.32088950276374817,
"learning_rate": 3e-05,
"loss": 2.5099,
"step": 967
},
{
"epoch": 0.2986732489972231,
"grad_norm": 0.4241865277290344,
"learning_rate": 3e-05,
"loss": 2.8371,
"step": 968
},
{
"epoch": 0.2989817957420549,
"grad_norm": 0.4249207675457001,
"learning_rate": 3e-05,
"loss": 2.786,
"step": 969
},
{
"epoch": 0.29929034248688674,
"grad_norm": 0.3566865622997284,
"learning_rate": 3e-05,
"loss": 2.4608,
"step": 970
},
{
"epoch": 0.2995988892317186,
"grad_norm": 0.4732634127140045,
"learning_rate": 3e-05,
"loss": 2.4006,
"step": 971
},
{
"epoch": 0.29990743597655045,
"grad_norm": 0.31523409485816956,
"learning_rate": 3e-05,
"loss": 2.5125,
"step": 972
},
{
"epoch": 0.3002159827213823,
"grad_norm": 0.31227585673332214,
"learning_rate": 3e-05,
"loss": 2.5859,
"step": 973
},
{
"epoch": 0.3005245294662141,
"grad_norm": 0.5433022379875183,
"learning_rate": 3e-05,
"loss": 2.7815,
"step": 974
},
{
"epoch": 0.300833076211046,
"grad_norm": 0.32463932037353516,
"learning_rate": 3e-05,
"loss": 2.5574,
"step": 975
},
{
"epoch": 0.3011416229558778,
"grad_norm": 0.45843788981437683,
"learning_rate": 3e-05,
"loss": 3.3021,
"step": 976
},
{
"epoch": 0.30145016970070965,
"grad_norm": 0.43129628896713257,
"learning_rate": 3e-05,
"loss": 2.7665,
"step": 977
},
{
"epoch": 0.3017587164455415,
"grad_norm": 0.4639509618282318,
"learning_rate": 3e-05,
"loss": 2.9852,
"step": 978
},
{
"epoch": 0.30206726319037336,
"grad_norm": 0.6034327745437622,
"learning_rate": 3e-05,
"loss": 2.7809,
"step": 979
},
{
"epoch": 0.3023758099352052,
"grad_norm": 0.4655909538269043,
"learning_rate": 3e-05,
"loss": 2.788,
"step": 980
},
{
"epoch": 0.302684356680037,
"grad_norm": 0.43474000692367554,
"learning_rate": 3e-05,
"loss": 2.7155,
"step": 981
},
{
"epoch": 0.30299290342486884,
"grad_norm": 0.4621677100658417,
"learning_rate": 3e-05,
"loss": 2.649,
"step": 982
},
{
"epoch": 0.3033014501697007,
"grad_norm": 0.39728179574012756,
"learning_rate": 3e-05,
"loss": 2.7499,
"step": 983
},
{
"epoch": 0.30360999691453255,
"grad_norm": 0.41515350341796875,
"learning_rate": 3e-05,
"loss": 2.4945,
"step": 984
},
{
"epoch": 0.3039185436593644,
"grad_norm": 0.4541874825954437,
"learning_rate": 3e-05,
"loss": 2.9019,
"step": 985
},
{
"epoch": 0.3042270904041962,
"grad_norm": 0.4044342339038849,
"learning_rate": 3e-05,
"loss": 2.5939,
"step": 986
},
{
"epoch": 0.3045356371490281,
"grad_norm": 0.5499434471130371,
"learning_rate": 3e-05,
"loss": 3.1831,
"step": 987
},
{
"epoch": 0.3048441838938599,
"grad_norm": 0.4253259599208832,
"learning_rate": 3e-05,
"loss": 2.6482,
"step": 988
},
{
"epoch": 0.30515273063869175,
"grad_norm": 0.5634761452674866,
"learning_rate": 3e-05,
"loss": 3.0694,
"step": 989
},
{
"epoch": 0.3054612773835236,
"grad_norm": 0.3745432496070862,
"learning_rate": 3e-05,
"loss": 2.864,
"step": 990
},
{
"epoch": 0.30576982412835546,
"grad_norm": 0.31692636013031006,
"learning_rate": 3e-05,
"loss": 2.4637,
"step": 991
},
{
"epoch": 0.3060783708731873,
"grad_norm": 0.45177918672561646,
"learning_rate": 3e-05,
"loss": 2.7045,
"step": 992
},
{
"epoch": 0.3063869176180191,
"grad_norm": 0.3548758924007416,
"learning_rate": 3e-05,
"loss": 2.4143,
"step": 993
},
{
"epoch": 0.30669546436285094,
"grad_norm": 0.4161062240600586,
"learning_rate": 3e-05,
"loss": 3.0351,
"step": 994
},
{
"epoch": 0.30700401110768283,
"grad_norm": 0.5312502384185791,
"learning_rate": 3e-05,
"loss": 2.8126,
"step": 995
},
{
"epoch": 0.30731255785251466,
"grad_norm": 0.4165649712085724,
"learning_rate": 3e-05,
"loss": 2.311,
"step": 996
},
{
"epoch": 0.3076211045973465,
"grad_norm": 0.4763493537902832,
"learning_rate": 3e-05,
"loss": 2.5855,
"step": 997
},
{
"epoch": 0.30792965134217837,
"grad_norm": 0.3590225279331207,
"learning_rate": 3e-05,
"loss": 2.4563,
"step": 998
},
{
"epoch": 0.3082381980870102,
"grad_norm": 0.43557247519493103,
"learning_rate": 3e-05,
"loss": 2.7756,
"step": 999
},
{
"epoch": 0.308546744831842,
"grad_norm": 0.5323253870010376,
"learning_rate": 3e-05,
"loss": 2.9261,
"step": 1000
},
{
"epoch": 0.30885529157667385,
"grad_norm": 0.3511796295642853,
"learning_rate": 3e-05,
"loss": 2.7418,
"step": 1001
},
{
"epoch": 0.30916383832150574,
"grad_norm": 0.37199896574020386,
"learning_rate": 3e-05,
"loss": 2.6009,
"step": 1002
},
{
"epoch": 0.30947238506633756,
"grad_norm": 0.34085142612457275,
"learning_rate": 3e-05,
"loss": 2.6959,
"step": 1003
},
{
"epoch": 0.3097809318111694,
"grad_norm": 0.4471310079097748,
"learning_rate": 3e-05,
"loss": 2.8914,
"step": 1004
},
{
"epoch": 0.3100894785560012,
"grad_norm": 0.3000938296318054,
"learning_rate": 3e-05,
"loss": 2.3222,
"step": 1005
},
{
"epoch": 0.3103980253008331,
"grad_norm": 0.4500029385089874,
"learning_rate": 3e-05,
"loss": 2.6863,
"step": 1006
},
{
"epoch": 0.31070657204566493,
"grad_norm": 0.36391642689704895,
"learning_rate": 3e-05,
"loss": 2.4052,
"step": 1007
},
{
"epoch": 0.31101511879049676,
"grad_norm": 0.33557793498039246,
"learning_rate": 3e-05,
"loss": 2.4544,
"step": 1008
},
{
"epoch": 0.3113236655353286,
"grad_norm": 0.5357667803764343,
"learning_rate": 3e-05,
"loss": 2.9365,
"step": 1009
},
{
"epoch": 0.31163221228016047,
"grad_norm": 0.40679875016212463,
"learning_rate": 3e-05,
"loss": 2.6265,
"step": 1010
},
{
"epoch": 0.3119407590249923,
"grad_norm": 0.4948636591434479,
"learning_rate": 3e-05,
"loss": 3.1441,
"step": 1011
},
{
"epoch": 0.3122493057698241,
"grad_norm": 0.3034040331840515,
"learning_rate": 3e-05,
"loss": 2.331,
"step": 1012
},
{
"epoch": 0.31255785251465595,
"grad_norm": 0.4402029812335968,
"learning_rate": 3e-05,
"loss": 2.7928,
"step": 1013
},
{
"epoch": 0.31286639925948784,
"grad_norm": 0.40119442343711853,
"learning_rate": 3e-05,
"loss": 2.7245,
"step": 1014
},
{
"epoch": 0.31317494600431967,
"grad_norm": 0.4709239900112152,
"learning_rate": 3e-05,
"loss": 2.8281,
"step": 1015
},
{
"epoch": 0.3134834927491515,
"grad_norm": 0.43850257992744446,
"learning_rate": 3e-05,
"loss": 2.4768,
"step": 1016
},
{
"epoch": 0.3137920394939833,
"grad_norm": 0.3689449429512024,
"learning_rate": 3e-05,
"loss": 2.4916,
"step": 1017
},
{
"epoch": 0.3141005862388152,
"grad_norm": 0.3573774993419647,
"learning_rate": 3e-05,
"loss": 2.5842,
"step": 1018
},
{
"epoch": 0.31440913298364703,
"grad_norm": 0.5338215231895447,
"learning_rate": 3e-05,
"loss": 3.1707,
"step": 1019
},
{
"epoch": 0.31471767972847886,
"grad_norm": 0.47685402631759644,
"learning_rate": 3e-05,
"loss": 2.8287,
"step": 1020
},
{
"epoch": 0.3150262264733107,
"grad_norm": 0.29378730058670044,
"learning_rate": 3e-05,
"loss": 2.3566,
"step": 1021
},
{
"epoch": 0.31533477321814257,
"grad_norm": 0.3345448076725006,
"learning_rate": 3e-05,
"loss": 2.8883,
"step": 1022
},
{
"epoch": 0.3156433199629744,
"grad_norm": 0.3130493760108948,
"learning_rate": 3e-05,
"loss": 2.2636,
"step": 1023
},
{
"epoch": 0.31595186670780623,
"grad_norm": 0.4794589877128601,
"learning_rate": 3e-05,
"loss": 2.515,
"step": 1024
},
{
"epoch": 0.31626041345263806,
"grad_norm": 0.4308234751224518,
"learning_rate": 3e-05,
"loss": 3.2071,
"step": 1025
},
{
"epoch": 0.31656896019746994,
"grad_norm": 0.33414486050605774,
"learning_rate": 3e-05,
"loss": 2.5691,
"step": 1026
},
{
"epoch": 0.31687750694230177,
"grad_norm": 0.3229790925979614,
"learning_rate": 3e-05,
"loss": 2.3644,
"step": 1027
},
{
"epoch": 0.3171860536871336,
"grad_norm": 0.5011124610900879,
"learning_rate": 3e-05,
"loss": 2.7893,
"step": 1028
},
{
"epoch": 0.3174946004319654,
"grad_norm": 0.37093424797058105,
"learning_rate": 3e-05,
"loss": 2.7773,
"step": 1029
},
{
"epoch": 0.3178031471767973,
"grad_norm": 0.5820983052253723,
"learning_rate": 3e-05,
"loss": 2.826,
"step": 1030
},
{
"epoch": 0.31811169392162914,
"grad_norm": 0.3745529353618622,
"learning_rate": 3e-05,
"loss": 2.678,
"step": 1031
},
{
"epoch": 0.31842024066646096,
"grad_norm": 0.4362945854663849,
"learning_rate": 3e-05,
"loss": 2.7813,
"step": 1032
},
{
"epoch": 0.3187287874112928,
"grad_norm": 0.45973077416419983,
"learning_rate": 3e-05,
"loss": 3.0007,
"step": 1033
},
{
"epoch": 0.3190373341561247,
"grad_norm": 0.3230498731136322,
"learning_rate": 3e-05,
"loss": 2.1809,
"step": 1034
},
{
"epoch": 0.3193458809009565,
"grad_norm": 0.3093605041503906,
"learning_rate": 3e-05,
"loss": 2.6337,
"step": 1035
},
{
"epoch": 0.31965442764578833,
"grad_norm": 0.33831560611724854,
"learning_rate": 3e-05,
"loss": 2.667,
"step": 1036
},
{
"epoch": 0.31996297439062016,
"grad_norm": 0.5394222140312195,
"learning_rate": 3e-05,
"loss": 3.0874,
"step": 1037
},
{
"epoch": 0.32027152113545204,
"grad_norm": 0.46193042397499084,
"learning_rate": 3e-05,
"loss": 3.1571,
"step": 1038
},
{
"epoch": 0.32058006788028387,
"grad_norm": 0.4071999788284302,
"learning_rate": 3e-05,
"loss": 2.7319,
"step": 1039
},
{
"epoch": 0.3208886146251157,
"grad_norm": 0.4034838080406189,
"learning_rate": 3e-05,
"loss": 2.5469,
"step": 1040
},
{
"epoch": 0.3211971613699475,
"grad_norm": 0.4690152406692505,
"learning_rate": 3e-05,
"loss": 2.7243,
"step": 1041
},
{
"epoch": 0.3215057081147794,
"grad_norm": 0.33456355333328247,
"learning_rate": 3e-05,
"loss": 2.2573,
"step": 1042
},
{
"epoch": 0.32181425485961124,
"grad_norm": 0.3276689350605011,
"learning_rate": 3e-05,
"loss": 2.3228,
"step": 1043
},
{
"epoch": 0.32212280160444307,
"grad_norm": 0.3934038579463959,
"learning_rate": 3e-05,
"loss": 2.7742,
"step": 1044
},
{
"epoch": 0.3224313483492749,
"grad_norm": 0.3162141740322113,
"learning_rate": 3e-05,
"loss": 2.3948,
"step": 1045
},
{
"epoch": 0.3227398950941068,
"grad_norm": 0.35283178091049194,
"learning_rate": 3e-05,
"loss": 2.5482,
"step": 1046
},
{
"epoch": 0.3230484418389386,
"grad_norm": 0.30801281332969666,
"learning_rate": 3e-05,
"loss": 2.3499,
"step": 1047
},
{
"epoch": 0.32335698858377043,
"grad_norm": 0.2911578118801117,
"learning_rate": 3e-05,
"loss": 2.5108,
"step": 1048
},
{
"epoch": 0.32366553532860226,
"grad_norm": 0.4680328965187073,
"learning_rate": 3e-05,
"loss": 2.8629,
"step": 1049
},
{
"epoch": 0.32397408207343414,
"grad_norm": 0.27995920181274414,
"learning_rate": 3e-05,
"loss": 2.2291,
"step": 1050
},
{
"epoch": 0.32428262881826597,
"grad_norm": 0.29879486560821533,
"learning_rate": 3e-05,
"loss": 2.4503,
"step": 1051
},
{
"epoch": 0.3245911755630978,
"grad_norm": 0.4487646520137787,
"learning_rate": 3e-05,
"loss": 3.1378,
"step": 1052
},
{
"epoch": 0.32489972230792963,
"grad_norm": 0.4540494680404663,
"learning_rate": 3e-05,
"loss": 3.0161,
"step": 1053
},
{
"epoch": 0.3252082690527615,
"grad_norm": 0.6287296414375305,
"learning_rate": 3e-05,
"loss": 2.7699,
"step": 1054
},
{
"epoch": 0.32551681579759334,
"grad_norm": 0.3549972176551819,
"learning_rate": 3e-05,
"loss": 2.517,
"step": 1055
},
{
"epoch": 0.32582536254242517,
"grad_norm": 0.35076603293418884,
"learning_rate": 3e-05,
"loss": 2.5529,
"step": 1056
},
{
"epoch": 0.326133909287257,
"grad_norm": 0.5306907296180725,
"learning_rate": 3e-05,
"loss": 2.6375,
"step": 1057
},
{
"epoch": 0.3264424560320889,
"grad_norm": 0.6471700072288513,
"learning_rate": 3e-05,
"loss": 3.0257,
"step": 1058
},
{
"epoch": 0.3267510027769207,
"grad_norm": 0.4791683256626129,
"learning_rate": 3e-05,
"loss": 2.673,
"step": 1059
},
{
"epoch": 0.32705954952175254,
"grad_norm": 0.3479447364807129,
"learning_rate": 3e-05,
"loss": 2.4918,
"step": 1060
},
{
"epoch": 0.32736809626658436,
"grad_norm": 0.5895714163780212,
"learning_rate": 3e-05,
"loss": 2.7425,
"step": 1061
},
{
"epoch": 0.32767664301141625,
"grad_norm": 0.600107729434967,
"learning_rate": 3e-05,
"loss": 3.1736,
"step": 1062
},
{
"epoch": 0.3279851897562481,
"grad_norm": 0.37535980343818665,
"learning_rate": 3e-05,
"loss": 2.5499,
"step": 1063
},
{
"epoch": 0.3282937365010799,
"grad_norm": 0.5155684351921082,
"learning_rate": 3e-05,
"loss": 2.6339,
"step": 1064
},
{
"epoch": 0.32860228324591173,
"grad_norm": 0.4304593801498413,
"learning_rate": 3e-05,
"loss": 2.9669,
"step": 1065
},
{
"epoch": 0.3289108299907436,
"grad_norm": 0.38700851798057556,
"learning_rate": 3e-05,
"loss": 2.6592,
"step": 1066
},
{
"epoch": 0.32921937673557544,
"grad_norm": 0.42323800921440125,
"learning_rate": 3e-05,
"loss": 2.468,
"step": 1067
},
{
"epoch": 0.32952792348040727,
"grad_norm": 0.4084995687007904,
"learning_rate": 3e-05,
"loss": 2.908,
"step": 1068
},
{
"epoch": 0.3298364702252391,
"grad_norm": 0.4313088655471802,
"learning_rate": 3e-05,
"loss": 2.6691,
"step": 1069
},
{
"epoch": 0.330145016970071,
"grad_norm": 0.7936158776283264,
"learning_rate": 3e-05,
"loss": 2.912,
"step": 1070
},
{
"epoch": 0.3304535637149028,
"grad_norm": 0.300073504447937,
"learning_rate": 3e-05,
"loss": 2.2288,
"step": 1071
},
{
"epoch": 0.33076211045973464,
"grad_norm": 0.37451618909835815,
"learning_rate": 3e-05,
"loss": 2.9299,
"step": 1072
},
{
"epoch": 0.33107065720456647,
"grad_norm": 0.5415565967559814,
"learning_rate": 3e-05,
"loss": 2.7914,
"step": 1073
},
{
"epoch": 0.33137920394939835,
"grad_norm": 0.35867804288864136,
"learning_rate": 3e-05,
"loss": 2.8536,
"step": 1074
},
{
"epoch": 0.3316877506942302,
"grad_norm": 0.4286486506462097,
"learning_rate": 3e-05,
"loss": 2.7861,
"step": 1075
},
{
"epoch": 0.331996297439062,
"grad_norm": 0.31424498558044434,
"learning_rate": 3e-05,
"loss": 2.5602,
"step": 1076
},
{
"epoch": 0.33230484418389383,
"grad_norm": 0.2903349697589874,
"learning_rate": 3e-05,
"loss": 2.2673,
"step": 1077
},
{
"epoch": 0.3326133909287257,
"grad_norm": 0.3674512505531311,
"learning_rate": 3e-05,
"loss": 2.7277,
"step": 1078
},
{
"epoch": 0.33292193767355754,
"grad_norm": 0.5027512311935425,
"learning_rate": 3e-05,
"loss": 2.6751,
"step": 1079
},
{
"epoch": 0.33323048441838937,
"grad_norm": 0.33197149634361267,
"learning_rate": 3e-05,
"loss": 2.2715,
"step": 1080
},
{
"epoch": 0.33353903116322126,
"grad_norm": 0.3024391233921051,
"learning_rate": 3e-05,
"loss": 2.5931,
"step": 1081
},
{
"epoch": 0.3338475779080531,
"grad_norm": 0.5677501559257507,
"learning_rate": 3e-05,
"loss": 2.7854,
"step": 1082
},
{
"epoch": 0.3341561246528849,
"grad_norm": 0.349448025226593,
"learning_rate": 3e-05,
"loss": 2.5758,
"step": 1083
},
{
"epoch": 0.33446467139771674,
"grad_norm": 0.4105369746685028,
"learning_rate": 3e-05,
"loss": 2.5552,
"step": 1084
},
{
"epoch": 0.3347732181425486,
"grad_norm": 0.423350989818573,
"learning_rate": 3e-05,
"loss": 2.8894,
"step": 1085
},
{
"epoch": 0.33508176488738045,
"grad_norm": 0.45168107748031616,
"learning_rate": 3e-05,
"loss": 2.7562,
"step": 1086
},
{
"epoch": 0.3353903116322123,
"grad_norm": 0.354754775762558,
"learning_rate": 3e-05,
"loss": 2.8504,
"step": 1087
},
{
"epoch": 0.3356988583770441,
"grad_norm": 0.31282752752304077,
"learning_rate": 3e-05,
"loss": 2.6208,
"step": 1088
},
{
"epoch": 0.336007405121876,
"grad_norm": 0.3565126359462738,
"learning_rate": 3e-05,
"loss": 2.7854,
"step": 1089
},
{
"epoch": 0.3363159518667078,
"grad_norm": 0.4445134401321411,
"learning_rate": 3e-05,
"loss": 2.8022,
"step": 1090
},
{
"epoch": 0.33662449861153965,
"grad_norm": 0.682829737663269,
"learning_rate": 3e-05,
"loss": 2.7264,
"step": 1091
},
{
"epoch": 0.3369330453563715,
"grad_norm": 0.4143361747264862,
"learning_rate": 3e-05,
"loss": 2.8082,
"step": 1092
},
{
"epoch": 0.33724159210120336,
"grad_norm": 0.4276883602142334,
"learning_rate": 3e-05,
"loss": 2.5857,
"step": 1093
},
{
"epoch": 0.3375501388460352,
"grad_norm": 0.5904972553253174,
"learning_rate": 3e-05,
"loss": 2.4174,
"step": 1094
},
{
"epoch": 0.337858685590867,
"grad_norm": 0.5379177927970886,
"learning_rate": 3e-05,
"loss": 2.9506,
"step": 1095
},
{
"epoch": 0.33816723233569884,
"grad_norm": 0.3578889071941376,
"learning_rate": 3e-05,
"loss": 2.6252,
"step": 1096
},
{
"epoch": 0.3384757790805307,
"grad_norm": 0.459988534450531,
"learning_rate": 3e-05,
"loss": 2.7379,
"step": 1097
},
{
"epoch": 0.33878432582536255,
"grad_norm": 0.5335902571678162,
"learning_rate": 3e-05,
"loss": 2.7689,
"step": 1098
},
{
"epoch": 0.3390928725701944,
"grad_norm": 0.427128404378891,
"learning_rate": 3e-05,
"loss": 2.5377,
"step": 1099
},
{
"epoch": 0.3394014193150262,
"grad_norm": 0.36371105909347534,
"learning_rate": 3e-05,
"loss": 2.3228,
"step": 1100
},
{
"epoch": 0.3397099660598581,
"grad_norm": 0.6602774262428284,
"learning_rate": 3e-05,
"loss": 2.7878,
"step": 1101
},
{
"epoch": 0.3400185128046899,
"grad_norm": 0.27885714173316956,
"learning_rate": 3e-05,
"loss": 2.2656,
"step": 1102
},
{
"epoch": 0.34032705954952175,
"grad_norm": 0.5975661277770996,
"learning_rate": 3e-05,
"loss": 2.9421,
"step": 1103
},
{
"epoch": 0.3406356062943536,
"grad_norm": 0.6099026203155518,
"learning_rate": 3e-05,
"loss": 3.1214,
"step": 1104
},
{
"epoch": 0.34094415303918546,
"grad_norm": 0.4762013554573059,
"learning_rate": 3e-05,
"loss": 2.3022,
"step": 1105
},
{
"epoch": 0.3412526997840173,
"grad_norm": 0.6812524795532227,
"learning_rate": 3e-05,
"loss": 2.5754,
"step": 1106
},
{
"epoch": 0.3415612465288491,
"grad_norm": 0.41604599356651306,
"learning_rate": 3e-05,
"loss": 2.7934,
"step": 1107
},
{
"epoch": 0.34186979327368094,
"grad_norm": 0.40861639380455017,
"learning_rate": 3e-05,
"loss": 2.7791,
"step": 1108
},
{
"epoch": 0.3421783400185128,
"grad_norm": 0.533877968788147,
"learning_rate": 3e-05,
"loss": 3.0471,
"step": 1109
},
{
"epoch": 0.34248688676334466,
"grad_norm": 0.34040915966033936,
"learning_rate": 3e-05,
"loss": 2.1942,
"step": 1110
},
{
"epoch": 0.3427954335081765,
"grad_norm": 0.5931209921836853,
"learning_rate": 3e-05,
"loss": 2.6718,
"step": 1111
},
{
"epoch": 0.3431039802530083,
"grad_norm": 0.4841914772987366,
"learning_rate": 3e-05,
"loss": 2.5222,
"step": 1112
},
{
"epoch": 0.3434125269978402,
"grad_norm": 0.32610583305358887,
"learning_rate": 3e-05,
"loss": 2.5451,
"step": 1113
},
{
"epoch": 0.343721073742672,
"grad_norm": 0.42859357595443726,
"learning_rate": 3e-05,
"loss": 2.9902,
"step": 1114
},
{
"epoch": 0.34402962048750385,
"grad_norm": 0.4420587122440338,
"learning_rate": 3e-05,
"loss": 2.6019,
"step": 1115
},
{
"epoch": 0.3443381672323357,
"grad_norm": 0.4523164927959442,
"learning_rate": 3e-05,
"loss": 2.6256,
"step": 1116
},
{
"epoch": 0.34464671397716756,
"grad_norm": 0.4764743447303772,
"learning_rate": 3e-05,
"loss": 2.942,
"step": 1117
},
{
"epoch": 0.3449552607219994,
"grad_norm": 0.40522849559783936,
"learning_rate": 3e-05,
"loss": 2.6806,
"step": 1118
},
{
"epoch": 0.3452638074668312,
"grad_norm": 0.331582248210907,
"learning_rate": 3e-05,
"loss": 2.3276,
"step": 1119
},
{
"epoch": 0.34557235421166305,
"grad_norm": 0.4719706177711487,
"learning_rate": 3e-05,
"loss": 3.0291,
"step": 1120
},
{
"epoch": 0.34588090095649493,
"grad_norm": 0.44904038310050964,
"learning_rate": 3e-05,
"loss": 2.8547,
"step": 1121
},
{
"epoch": 0.34618944770132676,
"grad_norm": 0.43688273429870605,
"learning_rate": 3e-05,
"loss": 2.4615,
"step": 1122
},
{
"epoch": 0.3464979944461586,
"grad_norm": 0.4403668940067291,
"learning_rate": 3e-05,
"loss": 3.0372,
"step": 1123
},
{
"epoch": 0.3468065411909904,
"grad_norm": 0.3114491105079651,
"learning_rate": 3e-05,
"loss": 2.5904,
"step": 1124
},
{
"epoch": 0.3471150879358223,
"grad_norm": 0.43030625581741333,
"learning_rate": 3e-05,
"loss": 2.5914,
"step": 1125
},
{
"epoch": 0.3474236346806541,
"grad_norm": 0.5228371620178223,
"learning_rate": 3e-05,
"loss": 3.269,
"step": 1126
},
{
"epoch": 0.34773218142548595,
"grad_norm": 0.35842400789260864,
"learning_rate": 3e-05,
"loss": 2.6338,
"step": 1127
},
{
"epoch": 0.3480407281703178,
"grad_norm": 0.33594179153442383,
"learning_rate": 3e-05,
"loss": 2.5405,
"step": 1128
},
{
"epoch": 0.34834927491514966,
"grad_norm": 0.34110668301582336,
"learning_rate": 3e-05,
"loss": 2.5228,
"step": 1129
},
{
"epoch": 0.3486578216599815,
"grad_norm": 0.42432937026023865,
"learning_rate": 3e-05,
"loss": 2.8293,
"step": 1130
},
{
"epoch": 0.3489663684048133,
"grad_norm": 0.42318588495254517,
"learning_rate": 3e-05,
"loss": 2.2771,
"step": 1131
},
{
"epoch": 0.34927491514964515,
"grad_norm": 0.34015128016471863,
"learning_rate": 3e-05,
"loss": 2.7245,
"step": 1132
},
{
"epoch": 0.34958346189447703,
"grad_norm": 0.3692342936992645,
"learning_rate": 3e-05,
"loss": 2.5157,
"step": 1133
},
{
"epoch": 0.34989200863930886,
"grad_norm": 0.5286168456077576,
"learning_rate": 3e-05,
"loss": 3.3151,
"step": 1134
},
{
"epoch": 0.3502005553841407,
"grad_norm": 0.4966541528701782,
"learning_rate": 3e-05,
"loss": 3.3489,
"step": 1135
},
{
"epoch": 0.3505091021289725,
"grad_norm": 0.5056973695755005,
"learning_rate": 3e-05,
"loss": 2.8628,
"step": 1136
},
{
"epoch": 0.3508176488738044,
"grad_norm": 0.40126872062683105,
"learning_rate": 3e-05,
"loss": 3.2355,
"step": 1137
},
{
"epoch": 0.3511261956186362,
"grad_norm": 0.4347645938396454,
"learning_rate": 3e-05,
"loss": 2.3129,
"step": 1138
},
{
"epoch": 0.35143474236346806,
"grad_norm": 0.5079309940338135,
"learning_rate": 3e-05,
"loss": 2.6779,
"step": 1139
},
{
"epoch": 0.3517432891082999,
"grad_norm": 0.34270283579826355,
"learning_rate": 3e-05,
"loss": 2.6367,
"step": 1140
},
{
"epoch": 0.35205183585313177,
"grad_norm": 0.3936125636100769,
"learning_rate": 3e-05,
"loss": 2.7157,
"step": 1141
},
{
"epoch": 0.3523603825979636,
"grad_norm": 0.5401539206504822,
"learning_rate": 3e-05,
"loss": 3.1242,
"step": 1142
},
{
"epoch": 0.3526689293427954,
"grad_norm": 0.3918668329715729,
"learning_rate": 3e-05,
"loss": 2.9379,
"step": 1143
},
{
"epoch": 0.35297747608762725,
"grad_norm": 0.3724942207336426,
"learning_rate": 3e-05,
"loss": 2.5844,
"step": 1144
},
{
"epoch": 0.35328602283245913,
"grad_norm": 0.4116598963737488,
"learning_rate": 3e-05,
"loss": 2.6892,
"step": 1145
},
{
"epoch": 0.35359456957729096,
"grad_norm": 0.5307642817497253,
"learning_rate": 3e-05,
"loss": 2.7121,
"step": 1146
},
{
"epoch": 0.3539031163221228,
"grad_norm": 0.34816232323646545,
"learning_rate": 3e-05,
"loss": 2.6197,
"step": 1147
},
{
"epoch": 0.3542116630669546,
"grad_norm": 0.3448748290538788,
"learning_rate": 3e-05,
"loss": 2.5077,
"step": 1148
},
{
"epoch": 0.3545202098117865,
"grad_norm": 0.39467447996139526,
"learning_rate": 3e-05,
"loss": 2.3641,
"step": 1149
},
{
"epoch": 0.35482875655661833,
"grad_norm": 0.39322465658187866,
"learning_rate": 3e-05,
"loss": 2.6104,
"step": 1150
},
{
"epoch": 0.35513730330145016,
"grad_norm": 0.34313109517097473,
"learning_rate": 3e-05,
"loss": 2.7748,
"step": 1151
},
{
"epoch": 0.355445850046282,
"grad_norm": 0.32297638058662415,
"learning_rate": 3e-05,
"loss": 2.6006,
"step": 1152
},
{
"epoch": 0.35575439679111387,
"grad_norm": 0.35026779770851135,
"learning_rate": 3e-05,
"loss": 2.3611,
"step": 1153
},
{
"epoch": 0.3560629435359457,
"grad_norm": 0.4179763197898865,
"learning_rate": 3e-05,
"loss": 3.1878,
"step": 1154
},
{
"epoch": 0.3563714902807775,
"grad_norm": 0.3217645287513733,
"learning_rate": 3e-05,
"loss": 2.3414,
"step": 1155
},
{
"epoch": 0.35668003702560935,
"grad_norm": 0.43228569626808167,
"learning_rate": 3e-05,
"loss": 3.0959,
"step": 1156
},
{
"epoch": 0.35698858377044124,
"grad_norm": 0.5505173802375793,
"learning_rate": 3e-05,
"loss": 3.0136,
"step": 1157
},
{
"epoch": 0.35729713051527306,
"grad_norm": 0.31909194588661194,
"learning_rate": 3e-05,
"loss": 2.4238,
"step": 1158
},
{
"epoch": 0.3576056772601049,
"grad_norm": 0.38182443380355835,
"learning_rate": 3e-05,
"loss": 2.8554,
"step": 1159
},
{
"epoch": 0.3579142240049367,
"grad_norm": 0.3917055130004883,
"learning_rate": 3e-05,
"loss": 2.4918,
"step": 1160
},
{
"epoch": 0.3582227707497686,
"grad_norm": 0.30668020248413086,
"learning_rate": 3e-05,
"loss": 2.4466,
"step": 1161
},
{
"epoch": 0.35853131749460043,
"grad_norm": 0.4843204617500305,
"learning_rate": 3e-05,
"loss": 3.0447,
"step": 1162
},
{
"epoch": 0.35883986423943226,
"grad_norm": 0.3819452226161957,
"learning_rate": 3e-05,
"loss": 2.8378,
"step": 1163
},
{
"epoch": 0.35914841098426414,
"grad_norm": 0.41383904218673706,
"learning_rate": 3e-05,
"loss": 2.558,
"step": 1164
},
{
"epoch": 0.35945695772909597,
"grad_norm": 0.31443139910697937,
"learning_rate": 3e-05,
"loss": 2.7383,
"step": 1165
},
{
"epoch": 0.3597655044739278,
"grad_norm": 0.39199626445770264,
"learning_rate": 3e-05,
"loss": 2.7038,
"step": 1166
},
{
"epoch": 0.3600740512187596,
"grad_norm": 0.3552488386631012,
"learning_rate": 3e-05,
"loss": 2.3299,
"step": 1167
},
{
"epoch": 0.3603825979635915,
"grad_norm": 0.45004716515541077,
"learning_rate": 3e-05,
"loss": 3.0991,
"step": 1168
},
{
"epoch": 0.36069114470842334,
"grad_norm": 0.3720468282699585,
"learning_rate": 3e-05,
"loss": 2.5241,
"step": 1169
},
{
"epoch": 0.36099969145325517,
"grad_norm": 0.4281458258628845,
"learning_rate": 3e-05,
"loss": 2.9697,
"step": 1170
},
{
"epoch": 0.361308238198087,
"grad_norm": 0.4345632791519165,
"learning_rate": 3e-05,
"loss": 2.473,
"step": 1171
},
{
"epoch": 0.3616167849429189,
"grad_norm": 0.28444620966911316,
"learning_rate": 3e-05,
"loss": 2.2297,
"step": 1172
},
{
"epoch": 0.3619253316877507,
"grad_norm": 0.33514413237571716,
"learning_rate": 3e-05,
"loss": 2.7778,
"step": 1173
},
{
"epoch": 0.36223387843258253,
"grad_norm": 0.29754677414894104,
"learning_rate": 3e-05,
"loss": 2.5157,
"step": 1174
},
{
"epoch": 0.36254242517741436,
"grad_norm": 0.3521910309791565,
"learning_rate": 3e-05,
"loss": 2.8631,
"step": 1175
},
{
"epoch": 0.36285097192224625,
"grad_norm": 0.35081109404563904,
"learning_rate": 3e-05,
"loss": 2.3547,
"step": 1176
},
{
"epoch": 0.3631595186670781,
"grad_norm": 0.6115928292274475,
"learning_rate": 3e-05,
"loss": 3.0622,
"step": 1177
},
{
"epoch": 0.3634680654119099,
"grad_norm": 0.3421701192855835,
"learning_rate": 3e-05,
"loss": 2.5053,
"step": 1178
},
{
"epoch": 0.36377661215674173,
"grad_norm": 0.42282208800315857,
"learning_rate": 3e-05,
"loss": 3.1358,
"step": 1179
},
{
"epoch": 0.3640851589015736,
"grad_norm": 0.3527933359146118,
"learning_rate": 3e-05,
"loss": 2.7091,
"step": 1180
},
{
"epoch": 0.36439370564640544,
"grad_norm": 0.40788233280181885,
"learning_rate": 3e-05,
"loss": 2.7765,
"step": 1181
},
{
"epoch": 0.36470225239123727,
"grad_norm": 0.39160051941871643,
"learning_rate": 3e-05,
"loss": 2.8549,
"step": 1182
},
{
"epoch": 0.3650107991360691,
"grad_norm": 0.4997316598892212,
"learning_rate": 3e-05,
"loss": 2.7178,
"step": 1183
},
{
"epoch": 0.365319345880901,
"grad_norm": 0.376682847738266,
"learning_rate": 3e-05,
"loss": 2.7221,
"step": 1184
},
{
"epoch": 0.3656278926257328,
"grad_norm": 0.5439577102661133,
"learning_rate": 3e-05,
"loss": 2.7612,
"step": 1185
},
{
"epoch": 0.36593643937056464,
"grad_norm": 0.375169962644577,
"learning_rate": 3e-05,
"loss": 2.8623,
"step": 1186
},
{
"epoch": 0.36624498611539646,
"grad_norm": 0.3876233994960785,
"learning_rate": 3e-05,
"loss": 2.7473,
"step": 1187
},
{
"epoch": 0.36655353286022835,
"grad_norm": 0.36275288462638855,
"learning_rate": 3e-05,
"loss": 2.7514,
"step": 1188
},
{
"epoch": 0.3668620796050602,
"grad_norm": 0.36678746342658997,
"learning_rate": 3e-05,
"loss": 2.8159,
"step": 1189
},
{
"epoch": 0.367170626349892,
"grad_norm": 0.4482974410057068,
"learning_rate": 3e-05,
"loss": 2.6262,
"step": 1190
},
{
"epoch": 0.36747917309472383,
"grad_norm": 0.5268934369087219,
"learning_rate": 3e-05,
"loss": 3.0075,
"step": 1191
},
{
"epoch": 0.3677877198395557,
"grad_norm": 0.5856001377105713,
"learning_rate": 3e-05,
"loss": 3.1768,
"step": 1192
},
{
"epoch": 0.36809626658438754,
"grad_norm": 0.36955198645591736,
"learning_rate": 3e-05,
"loss": 2.2928,
"step": 1193
},
{
"epoch": 0.36840481332921937,
"grad_norm": 0.4243110120296478,
"learning_rate": 3e-05,
"loss": 2.8757,
"step": 1194
},
{
"epoch": 0.3687133600740512,
"grad_norm": 0.5759037733078003,
"learning_rate": 3e-05,
"loss": 3.1536,
"step": 1195
},
{
"epoch": 0.3690219068188831,
"grad_norm": 0.39592766761779785,
"learning_rate": 3e-05,
"loss": 2.6084,
"step": 1196
},
{
"epoch": 0.3693304535637149,
"grad_norm": 0.4080641567707062,
"learning_rate": 3e-05,
"loss": 2.5055,
"step": 1197
},
{
"epoch": 0.36963900030854674,
"grad_norm": 0.46009865403175354,
"learning_rate": 3e-05,
"loss": 2.9701,
"step": 1198
},
{
"epoch": 0.36994754705337857,
"grad_norm": 0.2832425832748413,
"learning_rate": 3e-05,
"loss": 2.3101,
"step": 1199
},
{
"epoch": 0.37025609379821045,
"grad_norm": 0.3997393548488617,
"learning_rate": 3e-05,
"loss": 2.5356,
"step": 1200
},
{
"epoch": 0.3705646405430423,
"grad_norm": 0.3031362295150757,
"learning_rate": 3e-05,
"loss": 2.0162,
"step": 1201
},
{
"epoch": 0.3708731872878741,
"grad_norm": 0.29112961888313293,
"learning_rate": 3e-05,
"loss": 2.1462,
"step": 1202
},
{
"epoch": 0.37118173403270593,
"grad_norm": 0.38319069147109985,
"learning_rate": 3e-05,
"loss": 2.0506,
"step": 1203
},
{
"epoch": 0.3714902807775378,
"grad_norm": 0.4330032467842102,
"learning_rate": 3e-05,
"loss": 2.3374,
"step": 1204
},
{
"epoch": 0.37179882752236965,
"grad_norm": 0.2930798828601837,
"learning_rate": 3e-05,
"loss": 2.4065,
"step": 1205
},
{
"epoch": 0.3721073742672015,
"grad_norm": 0.37502041459083557,
"learning_rate": 3e-05,
"loss": 2.5894,
"step": 1206
},
{
"epoch": 0.3724159210120333,
"grad_norm": 0.3615609109401703,
"learning_rate": 3e-05,
"loss": 2.4882,
"step": 1207
},
{
"epoch": 0.3727244677568652,
"grad_norm": 0.4277931749820709,
"learning_rate": 3e-05,
"loss": 2.4733,
"step": 1208
},
{
"epoch": 0.373033014501697,
"grad_norm": 0.33783355355262756,
"learning_rate": 3e-05,
"loss": 2.66,
"step": 1209
},
{
"epoch": 0.37334156124652884,
"grad_norm": 0.37479284405708313,
"learning_rate": 3e-05,
"loss": 2.575,
"step": 1210
},
{
"epoch": 0.37365010799136067,
"grad_norm": 0.4059560298919678,
"learning_rate": 3e-05,
"loss": 2.8207,
"step": 1211
},
{
"epoch": 0.37395865473619255,
"grad_norm": 0.50154709815979,
"learning_rate": 3e-05,
"loss": 2.8768,
"step": 1212
},
{
"epoch": 0.3742672014810244,
"grad_norm": 0.3010912239551544,
"learning_rate": 3e-05,
"loss": 2.2838,
"step": 1213
},
{
"epoch": 0.3745757482258562,
"grad_norm": 0.45400598645210266,
"learning_rate": 3e-05,
"loss": 2.504,
"step": 1214
},
{
"epoch": 0.37488429497068804,
"grad_norm": 0.3583899140357971,
"learning_rate": 3e-05,
"loss": 2.8666,
"step": 1215
},
{
"epoch": 0.3751928417155199,
"grad_norm": 0.5383654236793518,
"learning_rate": 3e-05,
"loss": 2.9554,
"step": 1216
},
{
"epoch": 0.37550138846035175,
"grad_norm": 0.29187503457069397,
"learning_rate": 3e-05,
"loss": 2.2723,
"step": 1217
},
{
"epoch": 0.3758099352051836,
"grad_norm": 0.32889237999916077,
"learning_rate": 3e-05,
"loss": 2.3481,
"step": 1218
},
{
"epoch": 0.3761184819500154,
"grad_norm": 0.29874467849731445,
"learning_rate": 3e-05,
"loss": 2.1303,
"step": 1219
},
{
"epoch": 0.3764270286948473,
"grad_norm": 0.3520304560661316,
"learning_rate": 3e-05,
"loss": 2.5011,
"step": 1220
},
{
"epoch": 0.3767355754396791,
"grad_norm": 0.31358984112739563,
"learning_rate": 3e-05,
"loss": 2.3208,
"step": 1221
},
{
"epoch": 0.37704412218451094,
"grad_norm": 0.921454668045044,
"learning_rate": 3e-05,
"loss": 3.7345,
"step": 1222
},
{
"epoch": 0.37735266892934277,
"grad_norm": 0.600284218788147,
"learning_rate": 3e-05,
"loss": 2.6006,
"step": 1223
},
{
"epoch": 0.37766121567417466,
"grad_norm": 0.6821380853652954,
"learning_rate": 3e-05,
"loss": 2.7102,
"step": 1224
},
{
"epoch": 0.3779697624190065,
"grad_norm": 0.46705061197280884,
"learning_rate": 3e-05,
"loss": 2.6111,
"step": 1225
},
{
"epoch": 0.3782783091638383,
"grad_norm": 0.41461801528930664,
"learning_rate": 3e-05,
"loss": 2.5879,
"step": 1226
},
{
"epoch": 0.37858685590867014,
"grad_norm": 0.3981480896472931,
"learning_rate": 3e-05,
"loss": 2.5165,
"step": 1227
},
{
"epoch": 0.378895402653502,
"grad_norm": 0.6518423557281494,
"learning_rate": 3e-05,
"loss": 2.8353,
"step": 1228
},
{
"epoch": 0.37920394939833385,
"grad_norm": 0.5395017862319946,
"learning_rate": 3e-05,
"loss": 2.6184,
"step": 1229
},
{
"epoch": 0.3795124961431657,
"grad_norm": 0.4610118865966797,
"learning_rate": 3e-05,
"loss": 2.6831,
"step": 1230
},
{
"epoch": 0.3798210428879975,
"grad_norm": 0.38779354095458984,
"learning_rate": 3e-05,
"loss": 2.6815,
"step": 1231
},
{
"epoch": 0.3801295896328294,
"grad_norm": 0.5472552180290222,
"learning_rate": 3e-05,
"loss": 2.4982,
"step": 1232
},
{
"epoch": 0.3804381363776612,
"grad_norm": 0.5459097623825073,
"learning_rate": 3e-05,
"loss": 2.4581,
"step": 1233
},
{
"epoch": 0.38074668312249305,
"grad_norm": 0.47404393553733826,
"learning_rate": 3e-05,
"loss": 2.7392,
"step": 1234
},
{
"epoch": 0.3810552298673249,
"grad_norm": 0.428065687417984,
"learning_rate": 3e-05,
"loss": 2.6488,
"step": 1235
},
{
"epoch": 0.38136377661215676,
"grad_norm": 0.6436012983322144,
"learning_rate": 3e-05,
"loss": 2.8324,
"step": 1236
},
{
"epoch": 0.3816723233569886,
"grad_norm": 0.7773986458778381,
"learning_rate": 3e-05,
"loss": 2.7858,
"step": 1237
},
{
"epoch": 0.3819808701018204,
"grad_norm": 0.4160424470901489,
"learning_rate": 3e-05,
"loss": 2.7753,
"step": 1238
},
{
"epoch": 0.38228941684665224,
"grad_norm": 0.3402724266052246,
"learning_rate": 3e-05,
"loss": 2.6381,
"step": 1239
},
{
"epoch": 0.3825979635914841,
"grad_norm": 0.4877413511276245,
"learning_rate": 3e-05,
"loss": 3.0552,
"step": 1240
},
{
"epoch": 0.38290651033631595,
"grad_norm": 0.565540611743927,
"learning_rate": 3e-05,
"loss": 2.4953,
"step": 1241
},
{
"epoch": 0.3832150570811478,
"grad_norm": 0.4378884434700012,
"learning_rate": 3e-05,
"loss": 2.3667,
"step": 1242
},
{
"epoch": 0.3835236038259796,
"grad_norm": 0.31386178731918335,
"learning_rate": 3e-05,
"loss": 2.5664,
"step": 1243
},
{
"epoch": 0.3838321505708115,
"grad_norm": 0.3567987382411957,
"learning_rate": 3e-05,
"loss": 2.6788,
"step": 1244
},
{
"epoch": 0.3841406973156433,
"grad_norm": 0.31117892265319824,
"learning_rate": 3e-05,
"loss": 2.3698,
"step": 1245
},
{
"epoch": 0.38444924406047515,
"grad_norm": 0.32202041149139404,
"learning_rate": 3e-05,
"loss": 2.4283,
"step": 1246
},
{
"epoch": 0.38475779080530703,
"grad_norm": 0.3621535897254944,
"learning_rate": 3e-05,
"loss": 2.4739,
"step": 1247
},
{
"epoch": 0.38506633755013886,
"grad_norm": 0.3923341929912567,
"learning_rate": 3e-05,
"loss": 2.7244,
"step": 1248
},
{
"epoch": 0.3853748842949707,
"grad_norm": 0.3279580771923065,
"learning_rate": 3e-05,
"loss": 2.395,
"step": 1249
},
{
"epoch": 0.3856834310398025,
"grad_norm": 0.36575204133987427,
"learning_rate": 3e-05,
"loss": 2.5109,
"step": 1250
},
{
"epoch": 0.3859919777846344,
"grad_norm": 0.4692917764186859,
"learning_rate": 3e-05,
"loss": 3.1119,
"step": 1251
},
{
"epoch": 0.3863005245294662,
"grad_norm": 0.4051806330680847,
"learning_rate": 3e-05,
"loss": 2.6323,
"step": 1252
},
{
"epoch": 0.38660907127429806,
"grad_norm": 0.4611109793186188,
"learning_rate": 3e-05,
"loss": 2.4552,
"step": 1253
},
{
"epoch": 0.3869176180191299,
"grad_norm": 0.5289100408554077,
"learning_rate": 3e-05,
"loss": 3.3514,
"step": 1254
},
{
"epoch": 0.38722616476396177,
"grad_norm": 0.3087036907672882,
"learning_rate": 3e-05,
"loss": 2.6185,
"step": 1255
},
{
"epoch": 0.3875347115087936,
"grad_norm": 0.46260780096054077,
"learning_rate": 3e-05,
"loss": 2.8345,
"step": 1256
},
{
"epoch": 0.3878432582536254,
"grad_norm": 0.3516508936882019,
"learning_rate": 3e-05,
"loss": 2.7413,
"step": 1257
},
{
"epoch": 0.38815180499845725,
"grad_norm": 0.3896012008190155,
"learning_rate": 3e-05,
"loss": 2.8107,
"step": 1258
},
{
"epoch": 0.38846035174328913,
"grad_norm": 0.35049739480018616,
"learning_rate": 3e-05,
"loss": 2.3946,
"step": 1259
},
{
"epoch": 0.38876889848812096,
"grad_norm": 0.401770681142807,
"learning_rate": 3e-05,
"loss": 2.8708,
"step": 1260
},
{
"epoch": 0.3890774452329528,
"grad_norm": 0.3014017939567566,
"learning_rate": 3e-05,
"loss": 2.7118,
"step": 1261
},
{
"epoch": 0.3893859919777846,
"grad_norm": 0.32284262776374817,
"learning_rate": 3e-05,
"loss": 2.7672,
"step": 1262
},
{
"epoch": 0.3896945387226165,
"grad_norm": 0.38780876994132996,
"learning_rate": 3e-05,
"loss": 2.5998,
"step": 1263
},
{
"epoch": 0.39000308546744833,
"grad_norm": 0.2658548057079315,
"learning_rate": 3e-05,
"loss": 2.3036,
"step": 1264
},
{
"epoch": 0.39031163221228016,
"grad_norm": 0.5888837575912476,
"learning_rate": 3e-05,
"loss": 3.4526,
"step": 1265
},
{
"epoch": 0.390620178957112,
"grad_norm": 0.3705357015132904,
"learning_rate": 3e-05,
"loss": 2.9579,
"step": 1266
},
{
"epoch": 0.39092872570194387,
"grad_norm": 0.28175267577171326,
"learning_rate": 3e-05,
"loss": 2.1632,
"step": 1267
},
{
"epoch": 0.3912372724467757,
"grad_norm": 0.33350950479507446,
"learning_rate": 3e-05,
"loss": 2.9705,
"step": 1268
},
{
"epoch": 0.3915458191916075,
"grad_norm": 0.4463180601596832,
"learning_rate": 3e-05,
"loss": 2.6577,
"step": 1269
},
{
"epoch": 0.39185436593643935,
"grad_norm": 0.5363352298736572,
"learning_rate": 3e-05,
"loss": 3.4181,
"step": 1270
},
{
"epoch": 0.39216291268127124,
"grad_norm": 0.32660403847694397,
"learning_rate": 3e-05,
"loss": 2.6473,
"step": 1271
},
{
"epoch": 0.39247145942610306,
"grad_norm": 0.4326009750366211,
"learning_rate": 3e-05,
"loss": 2.6244,
"step": 1272
},
{
"epoch": 0.3927800061709349,
"grad_norm": 0.43666842579841614,
"learning_rate": 3e-05,
"loss": 2.6443,
"step": 1273
},
{
"epoch": 0.3930885529157667,
"grad_norm": 0.3615734279155731,
"learning_rate": 3e-05,
"loss": 2.4956,
"step": 1274
},
{
"epoch": 0.3933970996605986,
"grad_norm": 0.6071893572807312,
"learning_rate": 3e-05,
"loss": 3.1909,
"step": 1275
},
{
"epoch": 0.39370564640543043,
"grad_norm": 0.3208840489387512,
"learning_rate": 3e-05,
"loss": 2.4802,
"step": 1276
},
{
"epoch": 0.39401419315026226,
"grad_norm": 0.3932779133319855,
"learning_rate": 3e-05,
"loss": 2.7946,
"step": 1277
},
{
"epoch": 0.3943227398950941,
"grad_norm": 0.3697027862071991,
"learning_rate": 3e-05,
"loss": 2.7385,
"step": 1278
},
{
"epoch": 0.39463128663992597,
"grad_norm": 0.6150335669517517,
"learning_rate": 3e-05,
"loss": 2.8217,
"step": 1279
},
{
"epoch": 0.3949398333847578,
"grad_norm": 0.3720497190952301,
"learning_rate": 3e-05,
"loss": 2.7876,
"step": 1280
},
{
"epoch": 0.3952483801295896,
"grad_norm": 0.2867538034915924,
"learning_rate": 3e-05,
"loss": 2.6693,
"step": 1281
},
{
"epoch": 0.39555692687442146,
"grad_norm": 0.3619423806667328,
"learning_rate": 3e-05,
"loss": 2.4328,
"step": 1282
},
{
"epoch": 0.39586547361925334,
"grad_norm": 0.37228068709373474,
"learning_rate": 3e-05,
"loss": 3.076,
"step": 1283
},
{
"epoch": 0.39617402036408517,
"grad_norm": 0.39720866084098816,
"learning_rate": 3e-05,
"loss": 2.6513,
"step": 1284
},
{
"epoch": 0.396482567108917,
"grad_norm": 0.3063104748725891,
"learning_rate": 3e-05,
"loss": 2.3469,
"step": 1285
},
{
"epoch": 0.3967911138537488,
"grad_norm": 0.3493167459964752,
"learning_rate": 3e-05,
"loss": 2.9156,
"step": 1286
},
{
"epoch": 0.3970996605985807,
"grad_norm": 0.4875669479370117,
"learning_rate": 3e-05,
"loss": 3.035,
"step": 1287
},
{
"epoch": 0.39740820734341253,
"grad_norm": 0.5185248255729675,
"learning_rate": 3e-05,
"loss": 3.5732,
"step": 1288
},
{
"epoch": 0.39771675408824436,
"grad_norm": 0.5274576544761658,
"learning_rate": 3e-05,
"loss": 2.708,
"step": 1289
},
{
"epoch": 0.3980253008330762,
"grad_norm": 0.3633483648300171,
"learning_rate": 3e-05,
"loss": 2.4895,
"step": 1290
},
{
"epoch": 0.3983338475779081,
"grad_norm": 0.283071905374527,
"learning_rate": 3e-05,
"loss": 2.1112,
"step": 1291
},
{
"epoch": 0.3986423943227399,
"grad_norm": 0.31986409425735474,
"learning_rate": 3e-05,
"loss": 2.2668,
"step": 1292
},
{
"epoch": 0.39895094106757173,
"grad_norm": 0.7418584227561951,
"learning_rate": 3e-05,
"loss": 3.4388,
"step": 1293
},
{
"epoch": 0.39925948781240356,
"grad_norm": 0.4027244746685028,
"learning_rate": 3e-05,
"loss": 2.592,
"step": 1294
},
{
"epoch": 0.39956803455723544,
"grad_norm": 0.29873213171958923,
"learning_rate": 3e-05,
"loss": 2.4954,
"step": 1295
},
{
"epoch": 0.39987658130206727,
"grad_norm": 0.3049558401107788,
"learning_rate": 3e-05,
"loss": 2.4802,
"step": 1296
},
{
"epoch": 0.4001851280468991,
"grad_norm": 0.4675412178039551,
"learning_rate": 3e-05,
"loss": 2.6097,
"step": 1297
},
{
"epoch": 0.4004936747917309,
"grad_norm": 0.5763147473335266,
"learning_rate": 3e-05,
"loss": 2.6048,
"step": 1298
},
{
"epoch": 0.4008022215365628,
"grad_norm": 0.45807382464408875,
"learning_rate": 3e-05,
"loss": 2.7465,
"step": 1299
},
{
"epoch": 0.40111076828139464,
"grad_norm": 0.5493050813674927,
"learning_rate": 3e-05,
"loss": 2.9135,
"step": 1300
},
{
"epoch": 0.40141931502622646,
"grad_norm": 0.610711395740509,
"learning_rate": 3e-05,
"loss": 2.7627,
"step": 1301
},
{
"epoch": 0.4017278617710583,
"grad_norm": 0.31486013531684875,
"learning_rate": 3e-05,
"loss": 2.3012,
"step": 1302
},
{
"epoch": 0.4020364085158902,
"grad_norm": 0.37602242827415466,
"learning_rate": 3e-05,
"loss": 2.7181,
"step": 1303
},
{
"epoch": 0.402344955260722,
"grad_norm": 0.33764341473579407,
"learning_rate": 3e-05,
"loss": 2.4903,
"step": 1304
},
{
"epoch": 0.40265350200555383,
"grad_norm": 0.35062283277511597,
"learning_rate": 3e-05,
"loss": 2.5738,
"step": 1305
},
{
"epoch": 0.40296204875038566,
"grad_norm": 0.3441402316093445,
"learning_rate": 3e-05,
"loss": 2.3253,
"step": 1306
},
{
"epoch": 0.40327059549521754,
"grad_norm": 0.5749024748802185,
"learning_rate": 3e-05,
"loss": 2.9657,
"step": 1307
},
{
"epoch": 0.40357914224004937,
"grad_norm": 0.30308666825294495,
"learning_rate": 3e-05,
"loss": 2.2349,
"step": 1308
},
{
"epoch": 0.4038876889848812,
"grad_norm": 0.40580713748931885,
"learning_rate": 3e-05,
"loss": 2.5753,
"step": 1309
},
{
"epoch": 0.404196235729713,
"grad_norm": 0.419880211353302,
"learning_rate": 3e-05,
"loss": 2.5283,
"step": 1310
},
{
"epoch": 0.4045047824745449,
"grad_norm": 0.42699503898620605,
"learning_rate": 3e-05,
"loss": 2.6258,
"step": 1311
},
{
"epoch": 0.40481332921937674,
"grad_norm": 0.3106153607368469,
"learning_rate": 3e-05,
"loss": 2.1418,
"step": 1312
},
{
"epoch": 0.40512187596420857,
"grad_norm": 0.32699504494667053,
"learning_rate": 3e-05,
"loss": 2.6335,
"step": 1313
},
{
"epoch": 0.4054304227090404,
"grad_norm": 0.4115197956562042,
"learning_rate": 3e-05,
"loss": 2.7467,
"step": 1314
},
{
"epoch": 0.4057389694538723,
"grad_norm": 0.4634549021720886,
"learning_rate": 3e-05,
"loss": 2.6238,
"step": 1315
},
{
"epoch": 0.4060475161987041,
"grad_norm": 0.396408349275589,
"learning_rate": 3e-05,
"loss": 2.8943,
"step": 1316
},
{
"epoch": 0.40635606294353593,
"grad_norm": 0.253656804561615,
"learning_rate": 3e-05,
"loss": 2.0561,
"step": 1317
},
{
"epoch": 0.40666460968836776,
"grad_norm": 0.3059282600879669,
"learning_rate": 3e-05,
"loss": 2.6393,
"step": 1318
},
{
"epoch": 0.40697315643319965,
"grad_norm": 0.3378694951534271,
"learning_rate": 3e-05,
"loss": 2.6338,
"step": 1319
},
{
"epoch": 0.4072817031780315,
"grad_norm": 0.5034033060073853,
"learning_rate": 3e-05,
"loss": 3.1591,
"step": 1320
},
{
"epoch": 0.4075902499228633,
"grad_norm": 0.35838401317596436,
"learning_rate": 3e-05,
"loss": 2.3609,
"step": 1321
},
{
"epoch": 0.40789879666769513,
"grad_norm": 0.4967813789844513,
"learning_rate": 3e-05,
"loss": 3.0929,
"step": 1322
},
{
"epoch": 0.408207343412527,
"grad_norm": 0.3491065800189972,
"learning_rate": 3e-05,
"loss": 2.9074,
"step": 1323
},
{
"epoch": 0.40851589015735884,
"grad_norm": 0.37351828813552856,
"learning_rate": 3e-05,
"loss": 2.6009,
"step": 1324
},
{
"epoch": 0.40882443690219067,
"grad_norm": 0.4349062442779541,
"learning_rate": 3e-05,
"loss": 3.0212,
"step": 1325
},
{
"epoch": 0.4091329836470225,
"grad_norm": 0.3631637692451477,
"learning_rate": 3e-05,
"loss": 2.7591,
"step": 1326
},
{
"epoch": 0.4094415303918544,
"grad_norm": 0.392930805683136,
"learning_rate": 3e-05,
"loss": 2.7822,
"step": 1327
},
{
"epoch": 0.4097500771366862,
"grad_norm": 0.571189820766449,
"learning_rate": 3e-05,
"loss": 3.0766,
"step": 1328
},
{
"epoch": 0.41005862388151804,
"grad_norm": 0.30686402320861816,
"learning_rate": 3e-05,
"loss": 2.684,
"step": 1329
},
{
"epoch": 0.4103671706263499,
"grad_norm": 0.3239377737045288,
"learning_rate": 3e-05,
"loss": 2.7809,
"step": 1330
},
{
"epoch": 0.41067571737118175,
"grad_norm": 0.35130640864372253,
"learning_rate": 3e-05,
"loss": 2.452,
"step": 1331
},
{
"epoch": 0.4109842641160136,
"grad_norm": 0.3733953535556793,
"learning_rate": 3e-05,
"loss": 3.0438,
"step": 1332
},
{
"epoch": 0.4112928108608454,
"grad_norm": 0.2904767692089081,
"learning_rate": 3e-05,
"loss": 2.5109,
"step": 1333
},
{
"epoch": 0.4116013576056773,
"grad_norm": 0.3014267086982727,
"learning_rate": 3e-05,
"loss": 2.2298,
"step": 1334
},
{
"epoch": 0.4119099043505091,
"grad_norm": 0.40220725536346436,
"learning_rate": 3e-05,
"loss": 2.7179,
"step": 1335
},
{
"epoch": 0.41221845109534094,
"grad_norm": 0.3351367115974426,
"learning_rate": 3e-05,
"loss": 2.2936,
"step": 1336
},
{
"epoch": 0.41252699784017277,
"grad_norm": 0.3121192753314972,
"learning_rate": 3e-05,
"loss": 2.265,
"step": 1337
},
{
"epoch": 0.41283554458500465,
"grad_norm": 0.4137488305568695,
"learning_rate": 3e-05,
"loss": 2.7979,
"step": 1338
},
{
"epoch": 0.4131440913298365,
"grad_norm": 0.4146516025066376,
"learning_rate": 3e-05,
"loss": 3.174,
"step": 1339
},
{
"epoch": 0.4134526380746683,
"grad_norm": 0.39826127886772156,
"learning_rate": 3e-05,
"loss": 2.5499,
"step": 1340
},
{
"epoch": 0.41376118481950014,
"grad_norm": 0.46398836374282837,
"learning_rate": 3e-05,
"loss": 2.6571,
"step": 1341
},
{
"epoch": 0.414069731564332,
"grad_norm": 0.42536866664886475,
"learning_rate": 3e-05,
"loss": 2.6297,
"step": 1342
},
{
"epoch": 0.41437827830916385,
"grad_norm": 0.3115003705024719,
"learning_rate": 3e-05,
"loss": 2.4774,
"step": 1343
},
{
"epoch": 0.4146868250539957,
"grad_norm": 0.3130146563053131,
"learning_rate": 3e-05,
"loss": 2.5763,
"step": 1344
},
{
"epoch": 0.4149953717988275,
"grad_norm": 0.4441472291946411,
"learning_rate": 3e-05,
"loss": 2.9602,
"step": 1345
},
{
"epoch": 0.4153039185436594,
"grad_norm": 0.8076886534690857,
"learning_rate": 3e-05,
"loss": 2.6896,
"step": 1346
},
{
"epoch": 0.4156124652884912,
"grad_norm": 0.7255078554153442,
"learning_rate": 3e-05,
"loss": 3.3754,
"step": 1347
},
{
"epoch": 0.41592101203332305,
"grad_norm": 0.3382607698440552,
"learning_rate": 3e-05,
"loss": 2.5589,
"step": 1348
},
{
"epoch": 0.4162295587781549,
"grad_norm": 0.46596959233283997,
"learning_rate": 3e-05,
"loss": 2.6977,
"step": 1349
},
{
"epoch": 0.41653810552298676,
"grad_norm": 0.6461755037307739,
"learning_rate": 3e-05,
"loss": 3.2361,
"step": 1350
},
{
"epoch": 0.4168466522678186,
"grad_norm": 0.3971562087535858,
"learning_rate": 3e-05,
"loss": 2.4609,
"step": 1351
},
{
"epoch": 0.4171551990126504,
"grad_norm": 0.4004113972187042,
"learning_rate": 3e-05,
"loss": 2.763,
"step": 1352
},
{
"epoch": 0.41746374575748224,
"grad_norm": 0.37319812178611755,
"learning_rate": 3e-05,
"loss": 2.8121,
"step": 1353
},
{
"epoch": 0.4177722925023141,
"grad_norm": 0.5117070078849792,
"learning_rate": 3e-05,
"loss": 2.9268,
"step": 1354
},
{
"epoch": 0.41808083924714595,
"grad_norm": 0.5662671327590942,
"learning_rate": 3e-05,
"loss": 3.2365,
"step": 1355
},
{
"epoch": 0.4183893859919778,
"grad_norm": 0.3150057792663574,
"learning_rate": 3e-05,
"loss": 2.7096,
"step": 1356
},
{
"epoch": 0.4186979327368096,
"grad_norm": 0.3373428285121918,
"learning_rate": 3e-05,
"loss": 2.9863,
"step": 1357
},
{
"epoch": 0.4190064794816415,
"grad_norm": 0.3957524001598358,
"learning_rate": 3e-05,
"loss": 2.6506,
"step": 1358
},
{
"epoch": 0.4193150262264733,
"grad_norm": 0.3644879460334778,
"learning_rate": 3e-05,
"loss": 2.4042,
"step": 1359
},
{
"epoch": 0.41962357297130515,
"grad_norm": 0.44521406292915344,
"learning_rate": 3e-05,
"loss": 2.5451,
"step": 1360
},
{
"epoch": 0.419932119716137,
"grad_norm": 0.3452889621257782,
"learning_rate": 3e-05,
"loss": 2.4424,
"step": 1361
},
{
"epoch": 0.42024066646096886,
"grad_norm": 0.4142851233482361,
"learning_rate": 3e-05,
"loss": 2.7783,
"step": 1362
},
{
"epoch": 0.4205492132058007,
"grad_norm": 0.5085824728012085,
"learning_rate": 3e-05,
"loss": 2.8652,
"step": 1363
},
{
"epoch": 0.4208577599506325,
"grad_norm": 0.2973617911338806,
"learning_rate": 3e-05,
"loss": 2.4887,
"step": 1364
},
{
"epoch": 0.42116630669546434,
"grad_norm": 0.48054924607276917,
"learning_rate": 3e-05,
"loss": 3.1104,
"step": 1365
},
{
"epoch": 0.4214748534402962,
"grad_norm": 0.3088330030441284,
"learning_rate": 3e-05,
"loss": 2.8398,
"step": 1366
},
{
"epoch": 0.42178340018512805,
"grad_norm": 0.34353551268577576,
"learning_rate": 3e-05,
"loss": 2.6529,
"step": 1367
},
{
"epoch": 0.4220919469299599,
"grad_norm": 0.3366641700267792,
"learning_rate": 3e-05,
"loss": 2.7679,
"step": 1368
},
{
"epoch": 0.4224004936747917,
"grad_norm": 0.3791579604148865,
"learning_rate": 3e-05,
"loss": 2.8372,
"step": 1369
},
{
"epoch": 0.4227090404196236,
"grad_norm": 0.33375903964042664,
"learning_rate": 3e-05,
"loss": 2.7064,
"step": 1370
},
{
"epoch": 0.4230175871644554,
"grad_norm": 0.3554198741912842,
"learning_rate": 3e-05,
"loss": 2.953,
"step": 1371
},
{
"epoch": 0.42332613390928725,
"grad_norm": 0.3274098336696625,
"learning_rate": 3e-05,
"loss": 2.4668,
"step": 1372
},
{
"epoch": 0.4236346806541191,
"grad_norm": 0.4714450538158417,
"learning_rate": 3e-05,
"loss": 3.1614,
"step": 1373
},
{
"epoch": 0.42394322739895096,
"grad_norm": 0.4060400724411011,
"learning_rate": 3e-05,
"loss": 3.0015,
"step": 1374
},
{
"epoch": 0.4242517741437828,
"grad_norm": 0.32625189423561096,
"learning_rate": 3e-05,
"loss": 2.4997,
"step": 1375
},
{
"epoch": 0.4245603208886146,
"grad_norm": 0.2833174765110016,
"learning_rate": 3e-05,
"loss": 2.207,
"step": 1376
},
{
"epoch": 0.42486886763344645,
"grad_norm": 0.36846381425857544,
"learning_rate": 3e-05,
"loss": 2.5808,
"step": 1377
},
{
"epoch": 0.42517741437827833,
"grad_norm": 0.3334082365036011,
"learning_rate": 3e-05,
"loss": 2.643,
"step": 1378
},
{
"epoch": 0.42548596112311016,
"grad_norm": 0.3491818606853485,
"learning_rate": 3e-05,
"loss": 2.8605,
"step": 1379
},
{
"epoch": 0.425794507867942,
"grad_norm": 0.4421471953392029,
"learning_rate": 3e-05,
"loss": 3.3898,
"step": 1380
},
{
"epoch": 0.4261030546127738,
"grad_norm": 0.2874299883842468,
"learning_rate": 3e-05,
"loss": 2.4055,
"step": 1381
},
{
"epoch": 0.4264116013576057,
"grad_norm": 0.2746589779853821,
"learning_rate": 3e-05,
"loss": 2.5162,
"step": 1382
},
{
"epoch": 0.4267201481024375,
"grad_norm": 0.33837470412254333,
"learning_rate": 3e-05,
"loss": 2.3699,
"step": 1383
},
{
"epoch": 0.42702869484726935,
"grad_norm": 0.40471774339675903,
"learning_rate": 3e-05,
"loss": 2.9301,
"step": 1384
},
{
"epoch": 0.4273372415921012,
"grad_norm": 0.3374662399291992,
"learning_rate": 3e-05,
"loss": 2.4965,
"step": 1385
},
{
"epoch": 0.42764578833693306,
"grad_norm": 0.42680230736732483,
"learning_rate": 3e-05,
"loss": 3.1878,
"step": 1386
},
{
"epoch": 0.4279543350817649,
"grad_norm": 0.31313976645469666,
"learning_rate": 3e-05,
"loss": 2.0341,
"step": 1387
},
{
"epoch": 0.4282628818265967,
"grad_norm": 0.25408491492271423,
"learning_rate": 3e-05,
"loss": 2.4309,
"step": 1388
},
{
"epoch": 0.42857142857142855,
"grad_norm": 0.31811076402664185,
"learning_rate": 3e-05,
"loss": 2.6167,
"step": 1389
},
{
"epoch": 0.42887997531626043,
"grad_norm": 0.3181273341178894,
"learning_rate": 3e-05,
"loss": 2.4618,
"step": 1390
},
{
"epoch": 0.42918852206109226,
"grad_norm": 0.3027171790599823,
"learning_rate": 3e-05,
"loss": 2.6972,
"step": 1391
},
{
"epoch": 0.4294970688059241,
"grad_norm": 0.3148479759693146,
"learning_rate": 3e-05,
"loss": 2.5805,
"step": 1392
},
{
"epoch": 0.4298056155507559,
"grad_norm": 0.3551836311817169,
"learning_rate": 3e-05,
"loss": 2.5305,
"step": 1393
},
{
"epoch": 0.4301141622955878,
"grad_norm": 0.3691128194332123,
"learning_rate": 3e-05,
"loss": 2.5768,
"step": 1394
},
{
"epoch": 0.4304227090404196,
"grad_norm": 0.30554333329200745,
"learning_rate": 3e-05,
"loss": 2.2353,
"step": 1395
},
{
"epoch": 0.43073125578525145,
"grad_norm": 0.390206515789032,
"learning_rate": 3e-05,
"loss": 2.8924,
"step": 1396
},
{
"epoch": 0.4310398025300833,
"grad_norm": 0.3919602036476135,
"learning_rate": 3e-05,
"loss": 2.7581,
"step": 1397
},
{
"epoch": 0.43134834927491517,
"grad_norm": 0.48594582080841064,
"learning_rate": 3e-05,
"loss": 2.9195,
"step": 1398
},
{
"epoch": 0.431656896019747,
"grad_norm": 0.33339229226112366,
"learning_rate": 3e-05,
"loss": 2.703,
"step": 1399
},
{
"epoch": 0.4319654427645788,
"grad_norm": 0.3424081802368164,
"learning_rate": 3e-05,
"loss": 2.3623,
"step": 1400
},
{
"epoch": 0.43227398950941065,
"grad_norm": 0.37659206986427307,
"learning_rate": 3e-05,
"loss": 3.0833,
"step": 1401
},
{
"epoch": 0.43258253625424253,
"grad_norm": 0.4342521131038666,
"learning_rate": 3e-05,
"loss": 2.5336,
"step": 1402
},
{
"epoch": 0.43289108299907436,
"grad_norm": 0.3215969204902649,
"learning_rate": 3e-05,
"loss": 2.6314,
"step": 1403
},
{
"epoch": 0.4331996297439062,
"grad_norm": 0.3317095637321472,
"learning_rate": 3e-05,
"loss": 2.2092,
"step": 1404
},
{
"epoch": 0.433508176488738,
"grad_norm": 0.4227984845638275,
"learning_rate": 3e-05,
"loss": 2.9534,
"step": 1405
},
{
"epoch": 0.4338167232335699,
"grad_norm": 0.38567742705345154,
"learning_rate": 3e-05,
"loss": 2.7021,
"step": 1406
},
{
"epoch": 0.43412526997840173,
"grad_norm": 0.3301149606704712,
"learning_rate": 3e-05,
"loss": 2.6226,
"step": 1407
},
{
"epoch": 0.43443381672323356,
"grad_norm": 0.3673669099807739,
"learning_rate": 3e-05,
"loss": 2.6008,
"step": 1408
},
{
"epoch": 0.4347423634680654,
"grad_norm": 0.3407856523990631,
"learning_rate": 3e-05,
"loss": 2.3941,
"step": 1409
},
{
"epoch": 0.43505091021289727,
"grad_norm": 0.43250688910484314,
"learning_rate": 3e-05,
"loss": 3.001,
"step": 1410
},
{
"epoch": 0.4353594569577291,
"grad_norm": 0.3385581374168396,
"learning_rate": 3e-05,
"loss": 2.9602,
"step": 1411
},
{
"epoch": 0.4356680037025609,
"grad_norm": 0.3739365041255951,
"learning_rate": 3e-05,
"loss": 2.8393,
"step": 1412
},
{
"epoch": 0.4359765504473928,
"grad_norm": 0.5004026889801025,
"learning_rate": 3e-05,
"loss": 2.8947,
"step": 1413
},
{
"epoch": 0.43628509719222464,
"grad_norm": 0.3384886085987091,
"learning_rate": 3e-05,
"loss": 2.4085,
"step": 1414
},
{
"epoch": 0.43659364393705646,
"grad_norm": 0.511809229850769,
"learning_rate": 3e-05,
"loss": 2.864,
"step": 1415
},
{
"epoch": 0.4369021906818883,
"grad_norm": 0.37064921855926514,
"learning_rate": 3e-05,
"loss": 2.8768,
"step": 1416
},
{
"epoch": 0.4372107374267202,
"grad_norm": 0.3196764290332794,
"learning_rate": 3e-05,
"loss": 2.4019,
"step": 1417
},
{
"epoch": 0.437519284171552,
"grad_norm": 0.3673863708972931,
"learning_rate": 3e-05,
"loss": 2.6811,
"step": 1418
},
{
"epoch": 0.43782783091638383,
"grad_norm": 0.3803464472293854,
"learning_rate": 3e-05,
"loss": 2.3291,
"step": 1419
},
{
"epoch": 0.43813637766121566,
"grad_norm": 0.5707358717918396,
"learning_rate": 3e-05,
"loss": 3.048,
"step": 1420
},
{
"epoch": 0.43844492440604754,
"grad_norm": 0.3424266278743744,
"learning_rate": 3e-05,
"loss": 2.3497,
"step": 1421
},
{
"epoch": 0.43875347115087937,
"grad_norm": 0.5494610071182251,
"learning_rate": 3e-05,
"loss": 3.0777,
"step": 1422
},
{
"epoch": 0.4390620178957112,
"grad_norm": 0.5954529643058777,
"learning_rate": 3e-05,
"loss": 2.7765,
"step": 1423
},
{
"epoch": 0.439370564640543,
"grad_norm": 0.33620685338974,
"learning_rate": 3e-05,
"loss": 2.3952,
"step": 1424
},
{
"epoch": 0.4396791113853749,
"grad_norm": 0.3967253565788269,
"learning_rate": 3e-05,
"loss": 2.8079,
"step": 1425
},
{
"epoch": 0.43998765813020674,
"grad_norm": 0.2961389124393463,
"learning_rate": 3e-05,
"loss": 2.3434,
"step": 1426
},
{
"epoch": 0.44029620487503857,
"grad_norm": 0.41013649106025696,
"learning_rate": 3e-05,
"loss": 2.79,
"step": 1427
},
{
"epoch": 0.4406047516198704,
"grad_norm": 0.3336520791053772,
"learning_rate": 3e-05,
"loss": 2.6722,
"step": 1428
},
{
"epoch": 0.4409132983647023,
"grad_norm": 0.4669612646102905,
"learning_rate": 3e-05,
"loss": 2.6199,
"step": 1429
},
{
"epoch": 0.4412218451095341,
"grad_norm": 0.3168468475341797,
"learning_rate": 3e-05,
"loss": 2.499,
"step": 1430
},
{
"epoch": 0.44153039185436593,
"grad_norm": 0.3851400911808014,
"learning_rate": 3e-05,
"loss": 2.5927,
"step": 1431
},
{
"epoch": 0.44183893859919776,
"grad_norm": 0.31471362709999084,
"learning_rate": 3e-05,
"loss": 2.5986,
"step": 1432
},
{
"epoch": 0.44214748534402964,
"grad_norm": 0.405691534280777,
"learning_rate": 3e-05,
"loss": 2.8578,
"step": 1433
},
{
"epoch": 0.4424560320888615,
"grad_norm": 0.3536216914653778,
"learning_rate": 3e-05,
"loss": 2.6194,
"step": 1434
},
{
"epoch": 0.4427645788336933,
"grad_norm": 0.3257675766944885,
"learning_rate": 3e-05,
"loss": 2.5817,
"step": 1435
},
{
"epoch": 0.44307312557852513,
"grad_norm": 0.3345182538032532,
"learning_rate": 3e-05,
"loss": 2.423,
"step": 1436
},
{
"epoch": 0.443381672323357,
"grad_norm": 0.47484976053237915,
"learning_rate": 3e-05,
"loss": 2.8201,
"step": 1437
},
{
"epoch": 0.44369021906818884,
"grad_norm": 0.4050092399120331,
"learning_rate": 3e-05,
"loss": 2.4457,
"step": 1438
},
{
"epoch": 0.44399876581302067,
"grad_norm": 0.3550967574119568,
"learning_rate": 3e-05,
"loss": 2.5056,
"step": 1439
},
{
"epoch": 0.4443073125578525,
"grad_norm": 0.34624183177948,
"learning_rate": 3e-05,
"loss": 2.5355,
"step": 1440
},
{
"epoch": 0.4446158593026844,
"grad_norm": 0.4038742482662201,
"learning_rate": 3e-05,
"loss": 2.7499,
"step": 1441
},
{
"epoch": 0.4449244060475162,
"grad_norm": 0.3393418788909912,
"learning_rate": 3e-05,
"loss": 2.7613,
"step": 1442
},
{
"epoch": 0.44523295279234804,
"grad_norm": 0.4069559872150421,
"learning_rate": 3e-05,
"loss": 2.7251,
"step": 1443
},
{
"epoch": 0.44554149953717986,
"grad_norm": 0.42860448360443115,
"learning_rate": 3e-05,
"loss": 2.7757,
"step": 1444
},
{
"epoch": 0.44585004628201175,
"grad_norm": 0.3287833034992218,
"learning_rate": 3e-05,
"loss": 2.3999,
"step": 1445
},
{
"epoch": 0.4461585930268436,
"grad_norm": 0.359164297580719,
"learning_rate": 3e-05,
"loss": 2.7029,
"step": 1446
},
{
"epoch": 0.4464671397716754,
"grad_norm": 0.4843924641609192,
"learning_rate": 3e-05,
"loss": 3.0231,
"step": 1447
},
{
"epoch": 0.44677568651650723,
"grad_norm": 0.4894542396068573,
"learning_rate": 3e-05,
"loss": 3.0614,
"step": 1448
},
{
"epoch": 0.4470842332613391,
"grad_norm": 0.33690640330314636,
"learning_rate": 3e-05,
"loss": 2.6946,
"step": 1449
},
{
"epoch": 0.44739278000617094,
"grad_norm": 0.2866448163986206,
"learning_rate": 3e-05,
"loss": 2.3611,
"step": 1450
},
{
"epoch": 0.44770132675100277,
"grad_norm": 0.7917083501815796,
"learning_rate": 3e-05,
"loss": 3.5579,
"step": 1451
},
{
"epoch": 0.4480098734958346,
"grad_norm": 0.2805081903934479,
"learning_rate": 3e-05,
"loss": 2.1076,
"step": 1452
},
{
"epoch": 0.4483184202406665,
"grad_norm": 0.3295937180519104,
"learning_rate": 3e-05,
"loss": 2.8668,
"step": 1453
},
{
"epoch": 0.4486269669854983,
"grad_norm": 0.30099961161613464,
"learning_rate": 3e-05,
"loss": 2.3013,
"step": 1454
},
{
"epoch": 0.44893551373033014,
"grad_norm": 0.543934166431427,
"learning_rate": 3e-05,
"loss": 2.9462,
"step": 1455
},
{
"epoch": 0.44924406047516197,
"grad_norm": 0.36943310499191284,
"learning_rate": 3e-05,
"loss": 2.93,
"step": 1456
},
{
"epoch": 0.44955260721999385,
"grad_norm": 0.5213918685913086,
"learning_rate": 3e-05,
"loss": 2.5094,
"step": 1457
},
{
"epoch": 0.4498611539648257,
"grad_norm": 0.2893174886703491,
"learning_rate": 3e-05,
"loss": 2.4578,
"step": 1458
},
{
"epoch": 0.4501697007096575,
"grad_norm": 0.36389556527137756,
"learning_rate": 3e-05,
"loss": 2.8613,
"step": 1459
},
{
"epoch": 0.45047824745448933,
"grad_norm": 0.4273573160171509,
"learning_rate": 3e-05,
"loss": 2.7277,
"step": 1460
},
{
"epoch": 0.4507867941993212,
"grad_norm": 0.5575438141822815,
"learning_rate": 3e-05,
"loss": 3.4139,
"step": 1461
},
{
"epoch": 0.45109534094415304,
"grad_norm": 0.4661589562892914,
"learning_rate": 3e-05,
"loss": 2.9162,
"step": 1462
},
{
"epoch": 0.4514038876889849,
"grad_norm": 0.350808709859848,
"learning_rate": 3e-05,
"loss": 2.8438,
"step": 1463
},
{
"epoch": 0.4517124344338167,
"grad_norm": 0.26493504643440247,
"learning_rate": 3e-05,
"loss": 2.3362,
"step": 1464
},
{
"epoch": 0.4520209811786486,
"grad_norm": 0.30495163798332214,
"learning_rate": 3e-05,
"loss": 2.6332,
"step": 1465
},
{
"epoch": 0.4523295279234804,
"grad_norm": 0.32672178745269775,
"learning_rate": 3e-05,
"loss": 2.4577,
"step": 1466
},
{
"epoch": 0.45263807466831224,
"grad_norm": 0.29373109340667725,
"learning_rate": 3e-05,
"loss": 2.3206,
"step": 1467
},
{
"epoch": 0.45294662141314407,
"grad_norm": 0.47060441970825195,
"learning_rate": 3e-05,
"loss": 2.6481,
"step": 1468
},
{
"epoch": 0.45325516815797595,
"grad_norm": 0.4044017195701599,
"learning_rate": 3e-05,
"loss": 2.888,
"step": 1469
},
{
"epoch": 0.4535637149028078,
"grad_norm": 0.5061796307563782,
"learning_rate": 3e-05,
"loss": 2.5166,
"step": 1470
},
{
"epoch": 0.4538722616476396,
"grad_norm": 0.35933852195739746,
"learning_rate": 3e-05,
"loss": 2.6528,
"step": 1471
},
{
"epoch": 0.45418080839247144,
"grad_norm": 0.36869826912879944,
"learning_rate": 3e-05,
"loss": 2.8693,
"step": 1472
},
{
"epoch": 0.4544893551373033,
"grad_norm": 0.3967495560646057,
"learning_rate": 3e-05,
"loss": 2.6779,
"step": 1473
},
{
"epoch": 0.45479790188213515,
"grad_norm": 0.4149707853794098,
"learning_rate": 3e-05,
"loss": 2.8755,
"step": 1474
},
{
"epoch": 0.455106448626967,
"grad_norm": 0.39458996057510376,
"learning_rate": 3e-05,
"loss": 2.6415,
"step": 1475
},
{
"epoch": 0.4554149953717988,
"grad_norm": 0.3556062877178192,
"learning_rate": 3e-05,
"loss": 2.7043,
"step": 1476
},
{
"epoch": 0.4557235421166307,
"grad_norm": 0.5347737073898315,
"learning_rate": 3e-05,
"loss": 3.1037,
"step": 1477
},
{
"epoch": 0.4560320888614625,
"grad_norm": 0.3868623673915863,
"learning_rate": 3e-05,
"loss": 2.9386,
"step": 1478
},
{
"epoch": 0.45634063560629434,
"grad_norm": 0.3561474680900574,
"learning_rate": 3e-05,
"loss": 2.9683,
"step": 1479
},
{
"epoch": 0.45664918235112617,
"grad_norm": 0.4054335951805115,
"learning_rate": 3e-05,
"loss": 3.0608,
"step": 1480
},
{
"epoch": 0.45695772909595805,
"grad_norm": 0.3604312241077423,
"learning_rate": 3e-05,
"loss": 2.858,
"step": 1481
},
{
"epoch": 0.4572662758407899,
"grad_norm": 0.41026541590690613,
"learning_rate": 3e-05,
"loss": 2.8464,
"step": 1482
},
{
"epoch": 0.4575748225856217,
"grad_norm": 0.39170393347740173,
"learning_rate": 3e-05,
"loss": 2.5438,
"step": 1483
},
{
"epoch": 0.45788336933045354,
"grad_norm": 0.3100852072238922,
"learning_rate": 3e-05,
"loss": 2.4244,
"step": 1484
},
{
"epoch": 0.4581919160752854,
"grad_norm": 0.3728077709674835,
"learning_rate": 3e-05,
"loss": 2.9126,
"step": 1485
},
{
"epoch": 0.45850046282011725,
"grad_norm": 0.32681816816329956,
"learning_rate": 3e-05,
"loss": 2.7418,
"step": 1486
},
{
"epoch": 0.4588090095649491,
"grad_norm": 0.31139644980430603,
"learning_rate": 3e-05,
"loss": 2.587,
"step": 1487
},
{
"epoch": 0.4591175563097809,
"grad_norm": 0.3643485903739929,
"learning_rate": 3e-05,
"loss": 2.3148,
"step": 1488
},
{
"epoch": 0.4594261030546128,
"grad_norm": 0.37991657853126526,
"learning_rate": 3e-05,
"loss": 2.6316,
"step": 1489
},
{
"epoch": 0.4597346497994446,
"grad_norm": 0.33139222860336304,
"learning_rate": 3e-05,
"loss": 2.577,
"step": 1490
},
{
"epoch": 0.46004319654427644,
"grad_norm": 0.39415374398231506,
"learning_rate": 3e-05,
"loss": 2.6474,
"step": 1491
},
{
"epoch": 0.4603517432891083,
"grad_norm": 0.43900495767593384,
"learning_rate": 3e-05,
"loss": 2.9312,
"step": 1492
},
{
"epoch": 0.46066029003394016,
"grad_norm": 0.4236634075641632,
"learning_rate": 3e-05,
"loss": 2.4871,
"step": 1493
},
{
"epoch": 0.460968836778772,
"grad_norm": 0.37256962060928345,
"learning_rate": 3e-05,
"loss": 2.6436,
"step": 1494
},
{
"epoch": 0.4612773835236038,
"grad_norm": 0.3243587613105774,
"learning_rate": 3e-05,
"loss": 2.6336,
"step": 1495
},
{
"epoch": 0.4615859302684357,
"grad_norm": 0.40223172307014465,
"learning_rate": 3e-05,
"loss": 2.4542,
"step": 1496
},
{
"epoch": 0.4618944770132675,
"grad_norm": 0.24286819994449615,
"learning_rate": 3e-05,
"loss": 1.953,
"step": 1497
},
{
"epoch": 0.46220302375809935,
"grad_norm": 0.3773837685585022,
"learning_rate": 3e-05,
"loss": 2.7311,
"step": 1498
},
{
"epoch": 0.4625115705029312,
"grad_norm": 0.43442192673683167,
"learning_rate": 3e-05,
"loss": 2.3662,
"step": 1499
},
{
"epoch": 0.46282011724776306,
"grad_norm": 0.35620713233947754,
"learning_rate": 3e-05,
"loss": 2.562,
"step": 1500
},
{
"epoch": 0.4631286639925949,
"grad_norm": 0.3047196567058563,
"learning_rate": 3e-05,
"loss": 2.4955,
"step": 1501
},
{
"epoch": 0.4634372107374267,
"grad_norm": 0.31453654170036316,
"learning_rate": 3e-05,
"loss": 2.3444,
"step": 1502
},
{
"epoch": 0.46374575748225855,
"grad_norm": 0.39251789450645447,
"learning_rate": 3e-05,
"loss": 2.405,
"step": 1503
},
{
"epoch": 0.46405430422709043,
"grad_norm": 0.3584946393966675,
"learning_rate": 3e-05,
"loss": 2.5729,
"step": 1504
},
{
"epoch": 0.46436285097192226,
"grad_norm": 0.3041662573814392,
"learning_rate": 3e-05,
"loss": 2.3803,
"step": 1505
},
{
"epoch": 0.4646713977167541,
"grad_norm": 0.44750258326530457,
"learning_rate": 3e-05,
"loss": 3.1487,
"step": 1506
},
{
"epoch": 0.4649799444615859,
"grad_norm": 0.5141081809997559,
"learning_rate": 3e-05,
"loss": 2.8217,
"step": 1507
},
{
"epoch": 0.4652884912064178,
"grad_norm": 0.5313743352890015,
"learning_rate": 3e-05,
"loss": 3.2112,
"step": 1508
},
{
"epoch": 0.4655970379512496,
"grad_norm": 0.2799358367919922,
"learning_rate": 3e-05,
"loss": 2.3401,
"step": 1509
},
{
"epoch": 0.46590558469608145,
"grad_norm": 0.485971063375473,
"learning_rate": 3e-05,
"loss": 3.0464,
"step": 1510
},
{
"epoch": 0.4662141314409133,
"grad_norm": 0.2679968774318695,
"learning_rate": 3e-05,
"loss": 2.3394,
"step": 1511
},
{
"epoch": 0.46652267818574517,
"grad_norm": 0.37413671612739563,
"learning_rate": 3e-05,
"loss": 2.8974,
"step": 1512
},
{
"epoch": 0.466831224930577,
"grad_norm": 0.39536064863204956,
"learning_rate": 3e-05,
"loss": 3.2332,
"step": 1513
},
{
"epoch": 0.4671397716754088,
"grad_norm": 0.38000598549842834,
"learning_rate": 3e-05,
"loss": 2.7148,
"step": 1514
},
{
"epoch": 0.46744831842024065,
"grad_norm": 0.48554322123527527,
"learning_rate": 3e-05,
"loss": 2.6587,
"step": 1515
},
{
"epoch": 0.46775686516507253,
"grad_norm": 0.4193074405193329,
"learning_rate": 3e-05,
"loss": 2.9054,
"step": 1516
},
{
"epoch": 0.46806541190990436,
"grad_norm": 0.29958027601242065,
"learning_rate": 3e-05,
"loss": 2.3647,
"step": 1517
},
{
"epoch": 0.4683739586547362,
"grad_norm": 0.2912478744983673,
"learning_rate": 3e-05,
"loss": 2.3545,
"step": 1518
},
{
"epoch": 0.468682505399568,
"grad_norm": 0.268606036901474,
"learning_rate": 3e-05,
"loss": 2.1819,
"step": 1519
},
{
"epoch": 0.4689910521443999,
"grad_norm": 0.38793525099754333,
"learning_rate": 3e-05,
"loss": 2.8047,
"step": 1520
},
{
"epoch": 0.46929959888923173,
"grad_norm": 0.33550551533699036,
"learning_rate": 3e-05,
"loss": 3.0669,
"step": 1521
},
{
"epoch": 0.46960814563406356,
"grad_norm": 0.36648938059806824,
"learning_rate": 3e-05,
"loss": 2.513,
"step": 1522
},
{
"epoch": 0.4699166923788954,
"grad_norm": 0.4586170017719269,
"learning_rate": 3e-05,
"loss": 2.8867,
"step": 1523
},
{
"epoch": 0.47022523912372727,
"grad_norm": 0.3348916471004486,
"learning_rate": 3e-05,
"loss": 2.6242,
"step": 1524
},
{
"epoch": 0.4705337858685591,
"grad_norm": 0.29232484102249146,
"learning_rate": 3e-05,
"loss": 2.3149,
"step": 1525
},
{
"epoch": 0.4708423326133909,
"grad_norm": 0.3628198504447937,
"learning_rate": 3e-05,
"loss": 2.3215,
"step": 1526
},
{
"epoch": 0.47115087935822275,
"grad_norm": 0.46447861194610596,
"learning_rate": 3e-05,
"loss": 2.948,
"step": 1527
},
{
"epoch": 0.47145942610305464,
"grad_norm": 0.3290894627571106,
"learning_rate": 3e-05,
"loss": 2.8251,
"step": 1528
},
{
"epoch": 0.47176797284788646,
"grad_norm": 0.36014366149902344,
"learning_rate": 3e-05,
"loss": 2.499,
"step": 1529
},
{
"epoch": 0.4720765195927183,
"grad_norm": 0.29595625400543213,
"learning_rate": 3e-05,
"loss": 2.2951,
"step": 1530
},
{
"epoch": 0.4723850663375501,
"grad_norm": 0.4257338345050812,
"learning_rate": 3e-05,
"loss": 3.1167,
"step": 1531
},
{
"epoch": 0.472693613082382,
"grad_norm": 0.36370155215263367,
"learning_rate": 3e-05,
"loss": 2.7506,
"step": 1532
},
{
"epoch": 0.47300215982721383,
"grad_norm": 0.2574162185192108,
"learning_rate": 3e-05,
"loss": 2.2529,
"step": 1533
},
{
"epoch": 0.47331070657204566,
"grad_norm": 0.3576168417930603,
"learning_rate": 3e-05,
"loss": 2.6559,
"step": 1534
},
{
"epoch": 0.4736192533168775,
"grad_norm": 0.5774997472763062,
"learning_rate": 3e-05,
"loss": 2.8159,
"step": 1535
},
{
"epoch": 0.47392780006170937,
"grad_norm": 0.37460872530937195,
"learning_rate": 3e-05,
"loss": 3.3666,
"step": 1536
},
{
"epoch": 0.4742363468065412,
"grad_norm": 0.36277443170547485,
"learning_rate": 3e-05,
"loss": 2.6648,
"step": 1537
},
{
"epoch": 0.474544893551373,
"grad_norm": 0.41957375407218933,
"learning_rate": 3e-05,
"loss": 2.6658,
"step": 1538
},
{
"epoch": 0.47485344029620485,
"grad_norm": 0.4152233898639679,
"learning_rate": 3e-05,
"loss": 2.6572,
"step": 1539
},
{
"epoch": 0.47516198704103674,
"grad_norm": 0.2616024911403656,
"learning_rate": 3e-05,
"loss": 2.228,
"step": 1540
},
{
"epoch": 0.47547053378586857,
"grad_norm": 0.31821849942207336,
"learning_rate": 3e-05,
"loss": 2.5554,
"step": 1541
},
{
"epoch": 0.4757790805307004,
"grad_norm": 0.4284205734729767,
"learning_rate": 3e-05,
"loss": 2.8339,
"step": 1542
},
{
"epoch": 0.4760876272755322,
"grad_norm": 0.4077942669391632,
"learning_rate": 3e-05,
"loss": 2.7667,
"step": 1543
},
{
"epoch": 0.4763961740203641,
"grad_norm": 0.46257877349853516,
"learning_rate": 3e-05,
"loss": 2.9476,
"step": 1544
},
{
"epoch": 0.47670472076519593,
"grad_norm": 0.37353515625,
"learning_rate": 3e-05,
"loss": 2.6627,
"step": 1545
},
{
"epoch": 0.47701326751002776,
"grad_norm": 0.29298368096351624,
"learning_rate": 3e-05,
"loss": 2.3048,
"step": 1546
},
{
"epoch": 0.4773218142548596,
"grad_norm": 0.46679583191871643,
"learning_rate": 3e-05,
"loss": 2.4399,
"step": 1547
},
{
"epoch": 0.4776303609996915,
"grad_norm": 0.38306209444999695,
"learning_rate": 3e-05,
"loss": 2.4255,
"step": 1548
},
{
"epoch": 0.4779389077445233,
"grad_norm": 0.2915458679199219,
"learning_rate": 3e-05,
"loss": 2.4377,
"step": 1549
},
{
"epoch": 0.47824745448935513,
"grad_norm": 0.3321347236633301,
"learning_rate": 3e-05,
"loss": 2.1925,
"step": 1550
},
{
"epoch": 0.47855600123418696,
"grad_norm": 0.5388374924659729,
"learning_rate": 3e-05,
"loss": 2.6837,
"step": 1551
},
{
"epoch": 0.47886454797901884,
"grad_norm": 0.5425001978874207,
"learning_rate": 3e-05,
"loss": 2.747,
"step": 1552
},
{
"epoch": 0.47917309472385067,
"grad_norm": 0.3192477524280548,
"learning_rate": 3e-05,
"loss": 2.6687,
"step": 1553
},
{
"epoch": 0.4794816414686825,
"grad_norm": 0.542382001876831,
"learning_rate": 3e-05,
"loss": 3.0074,
"step": 1554
},
{
"epoch": 0.4797901882135143,
"grad_norm": 0.5276587605476379,
"learning_rate": 3e-05,
"loss": 2.5688,
"step": 1555
},
{
"epoch": 0.4800987349583462,
"grad_norm": 0.35362979769706726,
"learning_rate": 3e-05,
"loss": 2.2445,
"step": 1556
},
{
"epoch": 0.48040728170317804,
"grad_norm": 0.309165894985199,
"learning_rate": 3e-05,
"loss": 2.7821,
"step": 1557
},
{
"epoch": 0.48071582844800986,
"grad_norm": 0.335504412651062,
"learning_rate": 3e-05,
"loss": 2.4358,
"step": 1558
},
{
"epoch": 0.4810243751928417,
"grad_norm": 0.6521844863891602,
"learning_rate": 3e-05,
"loss": 3.2967,
"step": 1559
},
{
"epoch": 0.4813329219376736,
"grad_norm": 0.32530689239501953,
"learning_rate": 3e-05,
"loss": 2.4366,
"step": 1560
},
{
"epoch": 0.4816414686825054,
"grad_norm": 0.27962008118629456,
"learning_rate": 3e-05,
"loss": 2.1322,
"step": 1561
},
{
"epoch": 0.48195001542733723,
"grad_norm": 0.328016072511673,
"learning_rate": 3e-05,
"loss": 2.6175,
"step": 1562
},
{
"epoch": 0.48225856217216906,
"grad_norm": 0.45414265990257263,
"learning_rate": 3e-05,
"loss": 2.5736,
"step": 1563
},
{
"epoch": 0.48256710891700094,
"grad_norm": 0.4768297076225281,
"learning_rate": 3e-05,
"loss": 2.4566,
"step": 1564
},
{
"epoch": 0.48287565566183277,
"grad_norm": 0.31878459453582764,
"learning_rate": 3e-05,
"loss": 2.3206,
"step": 1565
},
{
"epoch": 0.4831842024066646,
"grad_norm": 0.34920403361320496,
"learning_rate": 3e-05,
"loss": 2.5046,
"step": 1566
},
{
"epoch": 0.4834927491514964,
"grad_norm": 0.3578214943408966,
"learning_rate": 3e-05,
"loss": 2.5467,
"step": 1567
},
{
"epoch": 0.4838012958963283,
"grad_norm": 0.31990334391593933,
"learning_rate": 3e-05,
"loss": 2.2028,
"step": 1568
},
{
"epoch": 0.48410984264116014,
"grad_norm": 0.5464645624160767,
"learning_rate": 3e-05,
"loss": 2.9154,
"step": 1569
},
{
"epoch": 0.48441838938599197,
"grad_norm": 0.3837225139141083,
"learning_rate": 3e-05,
"loss": 3.1693,
"step": 1570
},
{
"epoch": 0.4847269361308238,
"grad_norm": 0.4334166646003723,
"learning_rate": 3e-05,
"loss": 2.6183,
"step": 1571
},
{
"epoch": 0.4850354828756557,
"grad_norm": 0.6002141237258911,
"learning_rate": 3e-05,
"loss": 2.7773,
"step": 1572
},
{
"epoch": 0.4853440296204875,
"grad_norm": 0.4231824278831482,
"learning_rate": 3e-05,
"loss": 3.0285,
"step": 1573
},
{
"epoch": 0.48565257636531933,
"grad_norm": 0.5055903196334839,
"learning_rate": 3e-05,
"loss": 2.8379,
"step": 1574
},
{
"epoch": 0.48596112311015116,
"grad_norm": 0.4704802632331848,
"learning_rate": 3e-05,
"loss": 2.9247,
"step": 1575
},
{
"epoch": 0.48626966985498304,
"grad_norm": 0.34470391273498535,
"learning_rate": 3e-05,
"loss": 2.595,
"step": 1576
},
{
"epoch": 0.4865782165998149,
"grad_norm": 0.3747197091579437,
"learning_rate": 3e-05,
"loss": 2.7041,
"step": 1577
},
{
"epoch": 0.4868867633446467,
"grad_norm": 0.3777099549770355,
"learning_rate": 3e-05,
"loss": 2.8153,
"step": 1578
},
{
"epoch": 0.4871953100894786,
"grad_norm": 0.3668653666973114,
"learning_rate": 3e-05,
"loss": 2.8264,
"step": 1579
},
{
"epoch": 0.4875038568343104,
"grad_norm": 0.36916372179985046,
"learning_rate": 3e-05,
"loss": 2.6379,
"step": 1580
},
{
"epoch": 0.48781240357914224,
"grad_norm": 0.4489986300468445,
"learning_rate": 3e-05,
"loss": 2.743,
"step": 1581
},
{
"epoch": 0.48812095032397407,
"grad_norm": 0.4028119146823883,
"learning_rate": 3e-05,
"loss": 2.7651,
"step": 1582
},
{
"epoch": 0.48842949706880595,
"grad_norm": 0.5546548366546631,
"learning_rate": 3e-05,
"loss": 2.9192,
"step": 1583
},
{
"epoch": 0.4887380438136378,
"grad_norm": 0.30430009961128235,
"learning_rate": 3e-05,
"loss": 2.5499,
"step": 1584
},
{
"epoch": 0.4890465905584696,
"grad_norm": 0.27419066429138184,
"learning_rate": 3e-05,
"loss": 2.1188,
"step": 1585
},
{
"epoch": 0.48935513730330144,
"grad_norm": 0.4444429576396942,
"learning_rate": 3e-05,
"loss": 2.5329,
"step": 1586
},
{
"epoch": 0.4896636840481333,
"grad_norm": 0.5690097808837891,
"learning_rate": 3e-05,
"loss": 3.0373,
"step": 1587
},
{
"epoch": 0.48997223079296515,
"grad_norm": 0.48694705963134766,
"learning_rate": 3e-05,
"loss": 3.1918,
"step": 1588
},
{
"epoch": 0.490280777537797,
"grad_norm": 0.34520235657691956,
"learning_rate": 3e-05,
"loss": 2.7452,
"step": 1589
},
{
"epoch": 0.4905893242826288,
"grad_norm": 0.3020474910736084,
"learning_rate": 3e-05,
"loss": 2.078,
"step": 1590
},
{
"epoch": 0.4908978710274607,
"grad_norm": 0.6240926384925842,
"learning_rate": 3e-05,
"loss": 2.9503,
"step": 1591
},
{
"epoch": 0.4912064177722925,
"grad_norm": 0.45031264424324036,
"learning_rate": 3e-05,
"loss": 2.7756,
"step": 1592
},
{
"epoch": 0.49151496451712434,
"grad_norm": 0.394246369600296,
"learning_rate": 3e-05,
"loss": 2.353,
"step": 1593
},
{
"epoch": 0.49182351126195617,
"grad_norm": 0.3401414155960083,
"learning_rate": 3e-05,
"loss": 2.9227,
"step": 1594
},
{
"epoch": 0.49213205800678805,
"grad_norm": 0.37477779388427734,
"learning_rate": 3e-05,
"loss": 2.2483,
"step": 1595
},
{
"epoch": 0.4924406047516199,
"grad_norm": 0.451190710067749,
"learning_rate": 3e-05,
"loss": 2.6409,
"step": 1596
},
{
"epoch": 0.4927491514964517,
"grad_norm": 0.5501927733421326,
"learning_rate": 3e-05,
"loss": 3.0029,
"step": 1597
},
{
"epoch": 0.49305769824128354,
"grad_norm": 0.4859730303287506,
"learning_rate": 3e-05,
"loss": 2.7112,
"step": 1598
},
{
"epoch": 0.4933662449861154,
"grad_norm": 0.6070705056190491,
"learning_rate": 3e-05,
"loss": 2.9249,
"step": 1599
},
{
"epoch": 0.49367479173094725,
"grad_norm": 0.5782085061073303,
"learning_rate": 3e-05,
"loss": 2.7502,
"step": 1600
},
{
"epoch": 0.4939833384757791,
"grad_norm": 0.43459030985832214,
"learning_rate": 3e-05,
"loss": 2.6673,
"step": 1601
},
{
"epoch": 0.4942918852206109,
"grad_norm": 0.3972328305244446,
"learning_rate": 3e-05,
"loss": 2.1902,
"step": 1602
},
{
"epoch": 0.4946004319654428,
"grad_norm": 0.4112645089626312,
"learning_rate": 3e-05,
"loss": 2.5025,
"step": 1603
},
{
"epoch": 0.4949089787102746,
"grad_norm": 0.31184130907058716,
"learning_rate": 3e-05,
"loss": 2.4411,
"step": 1604
},
{
"epoch": 0.49521752545510644,
"grad_norm": 0.396083265542984,
"learning_rate": 3e-05,
"loss": 2.5261,
"step": 1605
},
{
"epoch": 0.4955260721999383,
"grad_norm": 0.4644416272640228,
"learning_rate": 3e-05,
"loss": 2.9669,
"step": 1606
},
{
"epoch": 0.49583461894477016,
"grad_norm": 0.3891617953777313,
"learning_rate": 3e-05,
"loss": 2.6902,
"step": 1607
},
{
"epoch": 0.496143165689602,
"grad_norm": 0.5242297649383545,
"learning_rate": 3e-05,
"loss": 3.0694,
"step": 1608
},
{
"epoch": 0.4964517124344338,
"grad_norm": 0.4301431477069855,
"learning_rate": 3e-05,
"loss": 2.5702,
"step": 1609
},
{
"epoch": 0.49676025917926564,
"grad_norm": 0.40124115347862244,
"learning_rate": 3e-05,
"loss": 2.6013,
"step": 1610
},
{
"epoch": 0.4970688059240975,
"grad_norm": 0.6594104170799255,
"learning_rate": 3e-05,
"loss": 2.7237,
"step": 1611
},
{
"epoch": 0.49737735266892935,
"grad_norm": 0.533812940120697,
"learning_rate": 3e-05,
"loss": 3.1564,
"step": 1612
},
{
"epoch": 0.4976858994137612,
"grad_norm": 0.29430219531059265,
"learning_rate": 3e-05,
"loss": 2.6577,
"step": 1613
},
{
"epoch": 0.497994446158593,
"grad_norm": 0.40662962198257446,
"learning_rate": 3e-05,
"loss": 2.8116,
"step": 1614
},
{
"epoch": 0.4983029929034249,
"grad_norm": 0.3725249767303467,
"learning_rate": 3e-05,
"loss": 2.3387,
"step": 1615
},
{
"epoch": 0.4986115396482567,
"grad_norm": 0.37109196186065674,
"learning_rate": 3e-05,
"loss": 2.4788,
"step": 1616
},
{
"epoch": 0.49892008639308855,
"grad_norm": 0.3833397626876831,
"learning_rate": 3e-05,
"loss": 2.4994,
"step": 1617
},
{
"epoch": 0.4992286331379204,
"grad_norm": 0.6564522981643677,
"learning_rate": 3e-05,
"loss": 3.3482,
"step": 1618
},
{
"epoch": 0.49953717988275226,
"grad_norm": 0.3033236265182495,
"learning_rate": 3e-05,
"loss": 2.1542,
"step": 1619
},
{
"epoch": 0.4998457266275841,
"grad_norm": 0.3454381823539734,
"learning_rate": 3e-05,
"loss": 2.4953,
"step": 1620
},
{
"epoch": 0.5001542733724159,
"grad_norm": 0.42523840069770813,
"learning_rate": 3e-05,
"loss": 2.7933,
"step": 1621
},
{
"epoch": 0.5004628201172477,
"grad_norm": 0.4610409140586853,
"learning_rate": 3e-05,
"loss": 2.6401,
"step": 1622
},
{
"epoch": 0.5007713668620796,
"grad_norm": 0.3483074903488159,
"learning_rate": 3e-05,
"loss": 2.6013,
"step": 1623
},
{
"epoch": 0.5010799136069114,
"grad_norm": 0.36740410327911377,
"learning_rate": 3e-05,
"loss": 2.7199,
"step": 1624
},
{
"epoch": 0.5013884603517433,
"grad_norm": 0.3452313542366028,
"learning_rate": 3e-05,
"loss": 2.6963,
"step": 1625
},
{
"epoch": 0.5016970070965752,
"grad_norm": 0.5462652444839478,
"learning_rate": 3e-05,
"loss": 2.8419,
"step": 1626
},
{
"epoch": 0.502005553841407,
"grad_norm": 0.31306546926498413,
"learning_rate": 3e-05,
"loss": 2.3641,
"step": 1627
},
{
"epoch": 0.5023141005862388,
"grad_norm": 0.3925100266933441,
"learning_rate": 3e-05,
"loss": 2.6604,
"step": 1628
},
{
"epoch": 0.5026226473310706,
"grad_norm": 0.3354152739048004,
"learning_rate": 3e-05,
"loss": 2.4355,
"step": 1629
},
{
"epoch": 0.5029311940759025,
"grad_norm": 0.38791340589523315,
"learning_rate": 3e-05,
"loss": 2.6835,
"step": 1630
},
{
"epoch": 0.5032397408207343,
"grad_norm": 0.405977725982666,
"learning_rate": 3e-05,
"loss": 3.1869,
"step": 1631
},
{
"epoch": 0.5035482875655661,
"grad_norm": 0.34836137294769287,
"learning_rate": 3e-05,
"loss": 2.661,
"step": 1632
},
{
"epoch": 0.5038568343103981,
"grad_norm": 0.3889111280441284,
"learning_rate": 3e-05,
"loss": 2.7348,
"step": 1633
},
{
"epoch": 0.5041653810552299,
"grad_norm": 0.30361315608024597,
"learning_rate": 3e-05,
"loss": 2.4406,
"step": 1634
},
{
"epoch": 0.5044739278000617,
"grad_norm": 0.3601548373699188,
"learning_rate": 3e-05,
"loss": 2.8035,
"step": 1635
},
{
"epoch": 0.5047824745448936,
"grad_norm": 0.4318731725215912,
"learning_rate": 3e-05,
"loss": 2.6381,
"step": 1636
},
{
"epoch": 0.5050910212897254,
"grad_norm": 0.30395030975341797,
"learning_rate": 3e-05,
"loss": 2.3321,
"step": 1637
},
{
"epoch": 0.5053995680345572,
"grad_norm": 0.4935227036476135,
"learning_rate": 3e-05,
"loss": 3.006,
"step": 1638
},
{
"epoch": 0.505708114779389,
"grad_norm": 0.32266750931739807,
"learning_rate": 3e-05,
"loss": 2.7027,
"step": 1639
},
{
"epoch": 0.5060166615242209,
"grad_norm": 0.4440366327762604,
"learning_rate": 3e-05,
"loss": 3.0615,
"step": 1640
},
{
"epoch": 0.5063252082690528,
"grad_norm": 0.3273499608039856,
"learning_rate": 3e-05,
"loss": 2.489,
"step": 1641
},
{
"epoch": 0.5066337550138846,
"grad_norm": 0.33660799264907837,
"learning_rate": 3e-05,
"loss": 2.4475,
"step": 1642
},
{
"epoch": 0.5069423017587165,
"grad_norm": 0.3202866017818451,
"learning_rate": 3e-05,
"loss": 2.4899,
"step": 1643
},
{
"epoch": 0.5072508485035483,
"grad_norm": 0.292321115732193,
"learning_rate": 3e-05,
"loss": 2.3956,
"step": 1644
},
{
"epoch": 0.5075593952483801,
"grad_norm": 0.35103198885917664,
"learning_rate": 3e-05,
"loss": 2.6357,
"step": 1645
},
{
"epoch": 0.507867941993212,
"grad_norm": 0.4939737319946289,
"learning_rate": 3e-05,
"loss": 2.8856,
"step": 1646
},
{
"epoch": 0.5081764887380438,
"grad_norm": 0.2993530333042145,
"learning_rate": 3e-05,
"loss": 2.3868,
"step": 1647
},
{
"epoch": 0.5084850354828756,
"grad_norm": 0.3006252944469452,
"learning_rate": 3e-05,
"loss": 2.5518,
"step": 1648
},
{
"epoch": 0.5087935822277075,
"grad_norm": 0.3943158984184265,
"learning_rate": 3e-05,
"loss": 2.7638,
"step": 1649
},
{
"epoch": 0.5091021289725394,
"grad_norm": 0.5232028961181641,
"learning_rate": 3e-05,
"loss": 3.0938,
"step": 1650
},
{
"epoch": 0.5094106757173712,
"grad_norm": 0.2864915132522583,
"learning_rate": 3e-05,
"loss": 2.5598,
"step": 1651
},
{
"epoch": 0.509719222462203,
"grad_norm": 0.3905394673347473,
"learning_rate": 3e-05,
"loss": 2.8282,
"step": 1652
},
{
"epoch": 0.5100277692070349,
"grad_norm": 0.3154192566871643,
"learning_rate": 3e-05,
"loss": 2.4099,
"step": 1653
},
{
"epoch": 0.5103363159518667,
"grad_norm": 0.4238516092300415,
"learning_rate": 3e-05,
"loss": 2.7145,
"step": 1654
},
{
"epoch": 0.5106448626966985,
"grad_norm": 0.37606295943260193,
"learning_rate": 3e-05,
"loss": 2.4632,
"step": 1655
},
{
"epoch": 0.5109534094415303,
"grad_norm": 0.3119703531265259,
"learning_rate": 3e-05,
"loss": 2.7921,
"step": 1656
},
{
"epoch": 0.5112619561863623,
"grad_norm": 0.4015417993068695,
"learning_rate": 3e-05,
"loss": 2.7777,
"step": 1657
},
{
"epoch": 0.5115705029311941,
"grad_norm": 0.314378947019577,
"learning_rate": 3e-05,
"loss": 2.7296,
"step": 1658
},
{
"epoch": 0.5118790496760259,
"grad_norm": 0.3702321946620941,
"learning_rate": 3e-05,
"loss": 2.8218,
"step": 1659
},
{
"epoch": 0.5121875964208578,
"grad_norm": 0.6027920842170715,
"learning_rate": 3e-05,
"loss": 3.028,
"step": 1660
},
{
"epoch": 0.5124961431656896,
"grad_norm": 0.2812676727771759,
"learning_rate": 3e-05,
"loss": 2.4171,
"step": 1661
},
{
"epoch": 0.5128046899105214,
"grad_norm": 0.442354679107666,
"learning_rate": 3e-05,
"loss": 2.8097,
"step": 1662
},
{
"epoch": 0.5131132366553532,
"grad_norm": 0.3620881736278534,
"learning_rate": 3e-05,
"loss": 2.2628,
"step": 1663
},
{
"epoch": 0.5134217834001852,
"grad_norm": 0.530297040939331,
"learning_rate": 3e-05,
"loss": 2.602,
"step": 1664
},
{
"epoch": 0.513730330145017,
"grad_norm": 0.31801244616508484,
"learning_rate": 3e-05,
"loss": 2.471,
"step": 1665
},
{
"epoch": 0.5140388768898488,
"grad_norm": 0.32291916012763977,
"learning_rate": 3e-05,
"loss": 2.7074,
"step": 1666
},
{
"epoch": 0.5143474236346807,
"grad_norm": 0.49042975902557373,
"learning_rate": 3e-05,
"loss": 2.6864,
"step": 1667
},
{
"epoch": 0.5146559703795125,
"grad_norm": 0.3325033187866211,
"learning_rate": 3e-05,
"loss": 2.5405,
"step": 1668
},
{
"epoch": 0.5149645171243443,
"grad_norm": 0.3791443109512329,
"learning_rate": 3e-05,
"loss": 2.7489,
"step": 1669
},
{
"epoch": 0.5152730638691762,
"grad_norm": 0.37896838784217834,
"learning_rate": 3e-05,
"loss": 3.0854,
"step": 1670
},
{
"epoch": 0.515581610614008,
"grad_norm": 0.4317682087421417,
"learning_rate": 3e-05,
"loss": 3.3075,
"step": 1671
},
{
"epoch": 0.5158901573588399,
"grad_norm": 0.4479047954082489,
"learning_rate": 3e-05,
"loss": 2.5176,
"step": 1672
},
{
"epoch": 0.5161987041036717,
"grad_norm": 0.37080270051956177,
"learning_rate": 3e-05,
"loss": 2.942,
"step": 1673
},
{
"epoch": 0.5165072508485036,
"grad_norm": 0.28573471307754517,
"learning_rate": 3e-05,
"loss": 2.2821,
"step": 1674
},
{
"epoch": 0.5168157975933354,
"grad_norm": 0.46885693073272705,
"learning_rate": 3e-05,
"loss": 3.0661,
"step": 1675
},
{
"epoch": 0.5171243443381672,
"grad_norm": 0.2819637060165405,
"learning_rate": 3e-05,
"loss": 2.4839,
"step": 1676
},
{
"epoch": 0.5174328910829991,
"grad_norm": 0.41865482926368713,
"learning_rate": 3e-05,
"loss": 2.9538,
"step": 1677
},
{
"epoch": 0.5177414378278309,
"grad_norm": 0.43568989634513855,
"learning_rate": 3e-05,
"loss": 2.9169,
"step": 1678
},
{
"epoch": 0.5180499845726627,
"grad_norm": 0.30851465463638306,
"learning_rate": 3e-05,
"loss": 2.8745,
"step": 1679
},
{
"epoch": 0.5183585313174947,
"grad_norm": 0.3105289340019226,
"learning_rate": 3e-05,
"loss": 2.4255,
"step": 1680
},
{
"epoch": 0.5186670780623265,
"grad_norm": 0.32589539885520935,
"learning_rate": 3e-05,
"loss": 2.727,
"step": 1681
},
{
"epoch": 0.5189756248071583,
"grad_norm": 0.29433050751686096,
"learning_rate": 3e-05,
"loss": 2.4772,
"step": 1682
},
{
"epoch": 0.5192841715519901,
"grad_norm": 0.3651641607284546,
"learning_rate": 3e-05,
"loss": 2.5789,
"step": 1683
},
{
"epoch": 0.519592718296822,
"grad_norm": 0.2723684310913086,
"learning_rate": 3e-05,
"loss": 2.5751,
"step": 1684
},
{
"epoch": 0.5199012650416538,
"grad_norm": 0.3247137665748596,
"learning_rate": 3e-05,
"loss": 2.4918,
"step": 1685
},
{
"epoch": 0.5202098117864856,
"grad_norm": 0.32286760210990906,
"learning_rate": 3e-05,
"loss": 2.3303,
"step": 1686
},
{
"epoch": 0.5205183585313174,
"grad_norm": 0.25823745131492615,
"learning_rate": 3e-05,
"loss": 2.3304,
"step": 1687
},
{
"epoch": 0.5208269052761494,
"grad_norm": 0.36339297890663147,
"learning_rate": 3e-05,
"loss": 2.6724,
"step": 1688
},
{
"epoch": 0.5211354520209812,
"grad_norm": 0.29126620292663574,
"learning_rate": 3e-05,
"loss": 2.6284,
"step": 1689
},
{
"epoch": 0.521443998765813,
"grad_norm": 0.3497116267681122,
"learning_rate": 3e-05,
"loss": 2.6851,
"step": 1690
},
{
"epoch": 0.5217525455106449,
"grad_norm": 0.324299156665802,
"learning_rate": 3e-05,
"loss": 2.5865,
"step": 1691
},
{
"epoch": 0.5220610922554767,
"grad_norm": 0.3677506148815155,
"learning_rate": 3e-05,
"loss": 2.4579,
"step": 1692
},
{
"epoch": 0.5223696390003085,
"grad_norm": 0.307575523853302,
"learning_rate": 3e-05,
"loss": 2.4014,
"step": 1693
},
{
"epoch": 0.5226781857451404,
"grad_norm": 0.32490015029907227,
"learning_rate": 3e-05,
"loss": 2.5131,
"step": 1694
},
{
"epoch": 0.5229867324899722,
"grad_norm": 0.2470085620880127,
"learning_rate": 3e-05,
"loss": 2.326,
"step": 1695
},
{
"epoch": 0.5232952792348041,
"grad_norm": 0.4868578314781189,
"learning_rate": 3e-05,
"loss": 3.2556,
"step": 1696
},
{
"epoch": 0.523603825979636,
"grad_norm": 0.362221896648407,
"learning_rate": 3e-05,
"loss": 2.7828,
"step": 1697
},
{
"epoch": 0.5239123727244678,
"grad_norm": 0.40775471925735474,
"learning_rate": 3e-05,
"loss": 3.2336,
"step": 1698
},
{
"epoch": 0.5242209194692996,
"grad_norm": 0.3011956810951233,
"learning_rate": 3e-05,
"loss": 2.2592,
"step": 1699
},
{
"epoch": 0.5245294662141314,
"grad_norm": 0.3967321515083313,
"learning_rate": 3e-05,
"loss": 2.81,
"step": 1700
},
{
"epoch": 0.5248380129589633,
"grad_norm": 0.37166154384613037,
"learning_rate": 3e-05,
"loss": 2.5431,
"step": 1701
},
{
"epoch": 0.5251465597037951,
"grad_norm": 0.4062604010105133,
"learning_rate": 3e-05,
"loss": 2.6376,
"step": 1702
},
{
"epoch": 0.5254551064486269,
"grad_norm": 0.313347727060318,
"learning_rate": 3e-05,
"loss": 2.3459,
"step": 1703
},
{
"epoch": 0.5257636531934589,
"grad_norm": 0.8226602077484131,
"learning_rate": 3e-05,
"loss": 3.1876,
"step": 1704
},
{
"epoch": 0.5260721999382907,
"grad_norm": 0.3581107556819916,
"learning_rate": 3e-05,
"loss": 2.6067,
"step": 1705
},
{
"epoch": 0.5263807466831225,
"grad_norm": 0.410627543926239,
"learning_rate": 3e-05,
"loss": 2.6455,
"step": 1706
},
{
"epoch": 0.5266892934279543,
"grad_norm": 0.4492070972919464,
"learning_rate": 3e-05,
"loss": 2.634,
"step": 1707
},
{
"epoch": 0.5269978401727862,
"grad_norm": 0.40184956789016724,
"learning_rate": 3e-05,
"loss": 2.8047,
"step": 1708
},
{
"epoch": 0.527306386917618,
"grad_norm": 0.2819567024707794,
"learning_rate": 3e-05,
"loss": 1.965,
"step": 1709
},
{
"epoch": 0.5276149336624498,
"grad_norm": 0.3046351671218872,
"learning_rate": 3e-05,
"loss": 2.6007,
"step": 1710
},
{
"epoch": 0.5279234804072817,
"grad_norm": 0.338138222694397,
"learning_rate": 3e-05,
"loss": 3.0538,
"step": 1711
},
{
"epoch": 0.5282320271521136,
"grad_norm": 0.38633912801742554,
"learning_rate": 3e-05,
"loss": 2.5772,
"step": 1712
},
{
"epoch": 0.5285405738969454,
"grad_norm": 0.3535228669643402,
"learning_rate": 3e-05,
"loss": 2.6554,
"step": 1713
},
{
"epoch": 0.5288491206417772,
"grad_norm": 0.6468623876571655,
"learning_rate": 3e-05,
"loss": 3.2737,
"step": 1714
},
{
"epoch": 0.5291576673866091,
"grad_norm": 0.31310710310935974,
"learning_rate": 3e-05,
"loss": 2.7591,
"step": 1715
},
{
"epoch": 0.5294662141314409,
"grad_norm": 0.3655259609222412,
"learning_rate": 3e-05,
"loss": 2.7045,
"step": 1716
},
{
"epoch": 0.5297747608762727,
"grad_norm": 0.5266317129135132,
"learning_rate": 3e-05,
"loss": 3.0872,
"step": 1717
},
{
"epoch": 0.5300833076211046,
"grad_norm": 0.3490053713321686,
"learning_rate": 3e-05,
"loss": 2.7577,
"step": 1718
},
{
"epoch": 0.5303918543659364,
"grad_norm": 0.3306678235530853,
"learning_rate": 3e-05,
"loss": 2.4778,
"step": 1719
},
{
"epoch": 0.5307004011107683,
"grad_norm": 0.376544713973999,
"learning_rate": 3e-05,
"loss": 2.6798,
"step": 1720
},
{
"epoch": 0.5310089478556002,
"grad_norm": 0.3733018636703491,
"learning_rate": 3e-05,
"loss": 2.7735,
"step": 1721
},
{
"epoch": 0.531317494600432,
"grad_norm": 0.3042423725128174,
"learning_rate": 3e-05,
"loss": 2.6467,
"step": 1722
},
{
"epoch": 0.5316260413452638,
"grad_norm": 0.39278408885002136,
"learning_rate": 3e-05,
"loss": 2.9289,
"step": 1723
},
{
"epoch": 0.5319345880900956,
"grad_norm": 0.38252609968185425,
"learning_rate": 3e-05,
"loss": 2.5449,
"step": 1724
},
{
"epoch": 0.5322431348349275,
"grad_norm": 0.355350524187088,
"learning_rate": 3e-05,
"loss": 2.4288,
"step": 1725
},
{
"epoch": 0.5325516815797593,
"grad_norm": 0.34510883688926697,
"learning_rate": 3e-05,
"loss": 2.811,
"step": 1726
},
{
"epoch": 0.5328602283245911,
"grad_norm": 0.29719817638397217,
"learning_rate": 3e-05,
"loss": 2.6064,
"step": 1727
},
{
"epoch": 0.5331687750694231,
"grad_norm": 0.297100305557251,
"learning_rate": 3e-05,
"loss": 2.7171,
"step": 1728
},
{
"epoch": 0.5334773218142549,
"grad_norm": 0.42260539531707764,
"learning_rate": 3e-05,
"loss": 2.88,
"step": 1729
},
{
"epoch": 0.5337858685590867,
"grad_norm": 0.3438571095466614,
"learning_rate": 3e-05,
"loss": 2.7997,
"step": 1730
},
{
"epoch": 0.5340944153039185,
"grad_norm": 0.38454294204711914,
"learning_rate": 3e-05,
"loss": 2.8103,
"step": 1731
},
{
"epoch": 0.5344029620487504,
"grad_norm": 0.3376411199569702,
"learning_rate": 3e-05,
"loss": 2.7868,
"step": 1732
},
{
"epoch": 0.5347115087935822,
"grad_norm": 0.3197532892227173,
"learning_rate": 3e-05,
"loss": 2.9247,
"step": 1733
},
{
"epoch": 0.535020055538414,
"grad_norm": 0.380571573972702,
"learning_rate": 3e-05,
"loss": 2.3086,
"step": 1734
},
{
"epoch": 0.5353286022832459,
"grad_norm": 0.42782899737358093,
"learning_rate": 3e-05,
"loss": 2.9169,
"step": 1735
},
{
"epoch": 0.5356371490280778,
"grad_norm": 0.3073023557662964,
"learning_rate": 3e-05,
"loss": 2.5997,
"step": 1736
},
{
"epoch": 0.5359456957729096,
"grad_norm": 0.33757370710372925,
"learning_rate": 3e-05,
"loss": 2.4294,
"step": 1737
},
{
"epoch": 0.5362542425177415,
"grad_norm": 0.3303975462913513,
"learning_rate": 3e-05,
"loss": 2.308,
"step": 1738
},
{
"epoch": 0.5365627892625733,
"grad_norm": 0.35160350799560547,
"learning_rate": 3e-05,
"loss": 2.6371,
"step": 1739
},
{
"epoch": 0.5368713360074051,
"grad_norm": 0.39013320207595825,
"learning_rate": 3e-05,
"loss": 2.9465,
"step": 1740
},
{
"epoch": 0.5371798827522369,
"grad_norm": 0.3293541371822357,
"learning_rate": 3e-05,
"loss": 2.2803,
"step": 1741
},
{
"epoch": 0.5374884294970688,
"grad_norm": 0.34581053256988525,
"learning_rate": 3e-05,
"loss": 2.5024,
"step": 1742
},
{
"epoch": 0.5377969762419006,
"grad_norm": 0.35894790291786194,
"learning_rate": 3e-05,
"loss": 2.6478,
"step": 1743
},
{
"epoch": 0.5381055229867325,
"grad_norm": 0.33879491686820984,
"learning_rate": 3e-05,
"loss": 2.6175,
"step": 1744
},
{
"epoch": 0.5384140697315644,
"grad_norm": 0.3612264394760132,
"learning_rate": 3e-05,
"loss": 2.565,
"step": 1745
},
{
"epoch": 0.5387226164763962,
"grad_norm": 0.49857962131500244,
"learning_rate": 3e-05,
"loss": 3.2948,
"step": 1746
},
{
"epoch": 0.539031163221228,
"grad_norm": 0.3405458629131317,
"learning_rate": 3e-05,
"loss": 2.2905,
"step": 1747
},
{
"epoch": 0.5393397099660598,
"grad_norm": 0.44803398847579956,
"learning_rate": 3e-05,
"loss": 3.7067,
"step": 1748
},
{
"epoch": 0.5396482567108917,
"grad_norm": 0.5877761244773865,
"learning_rate": 3e-05,
"loss": 3.3724,
"step": 1749
},
{
"epoch": 0.5399568034557235,
"grad_norm": 0.34262049198150635,
"learning_rate": 3e-05,
"loss": 3.0155,
"step": 1750
},
{
"epoch": 0.5402653502005554,
"grad_norm": 0.3923507332801819,
"learning_rate": 3e-05,
"loss": 3.2323,
"step": 1751
},
{
"epoch": 0.5405738969453873,
"grad_norm": 0.3871021568775177,
"learning_rate": 3e-05,
"loss": 2.7126,
"step": 1752
},
{
"epoch": 0.5408824436902191,
"grad_norm": 0.31630492210388184,
"learning_rate": 3e-05,
"loss": 2.4423,
"step": 1753
},
{
"epoch": 0.5411909904350509,
"grad_norm": 0.3699815571308136,
"learning_rate": 3e-05,
"loss": 2.8711,
"step": 1754
},
{
"epoch": 0.5414995371798828,
"grad_norm": 0.3360150456428528,
"learning_rate": 3e-05,
"loss": 2.6574,
"step": 1755
},
{
"epoch": 0.5418080839247146,
"grad_norm": 0.5751543641090393,
"learning_rate": 3e-05,
"loss": 3.2725,
"step": 1756
},
{
"epoch": 0.5421166306695464,
"grad_norm": 0.2963869571685791,
"learning_rate": 3e-05,
"loss": 2.5501,
"step": 1757
},
{
"epoch": 0.5424251774143782,
"grad_norm": 0.35280632972717285,
"learning_rate": 3e-05,
"loss": 2.6562,
"step": 1758
},
{
"epoch": 0.5427337241592102,
"grad_norm": 0.42578262090682983,
"learning_rate": 3e-05,
"loss": 2.9552,
"step": 1759
},
{
"epoch": 0.543042270904042,
"grad_norm": 0.26292774081230164,
"learning_rate": 3e-05,
"loss": 2.3602,
"step": 1760
},
{
"epoch": 0.5433508176488738,
"grad_norm": 0.4971487522125244,
"learning_rate": 3e-05,
"loss": 3.02,
"step": 1761
},
{
"epoch": 0.5436593643937057,
"grad_norm": 0.42923474311828613,
"learning_rate": 3e-05,
"loss": 2.9607,
"step": 1762
},
{
"epoch": 0.5439679111385375,
"grad_norm": 0.3830646872520447,
"learning_rate": 3e-05,
"loss": 2.438,
"step": 1763
},
{
"epoch": 0.5442764578833693,
"grad_norm": 0.2715272903442383,
"learning_rate": 3e-05,
"loss": 2.5599,
"step": 1764
},
{
"epoch": 0.5445850046282011,
"grad_norm": 0.3658992052078247,
"learning_rate": 3e-05,
"loss": 2.6659,
"step": 1765
},
{
"epoch": 0.544893551373033,
"grad_norm": 0.4321707785129547,
"learning_rate": 3e-05,
"loss": 2.4854,
"step": 1766
},
{
"epoch": 0.5452020981178649,
"grad_norm": 0.31105655431747437,
"learning_rate": 3e-05,
"loss": 2.6108,
"step": 1767
},
{
"epoch": 0.5455106448626967,
"grad_norm": 0.3162234425544739,
"learning_rate": 3e-05,
"loss": 2.4305,
"step": 1768
},
{
"epoch": 0.5458191916075286,
"grad_norm": 0.2657429277896881,
"learning_rate": 3e-05,
"loss": 2.6477,
"step": 1769
},
{
"epoch": 0.5461277383523604,
"grad_norm": 0.29710811376571655,
"learning_rate": 3e-05,
"loss": 2.446,
"step": 1770
},
{
"epoch": 0.5464362850971922,
"grad_norm": 0.3215124309062958,
"learning_rate": 3e-05,
"loss": 2.2926,
"step": 1771
},
{
"epoch": 0.546744831842024,
"grad_norm": 0.39421600103378296,
"learning_rate": 3e-05,
"loss": 2.9613,
"step": 1772
},
{
"epoch": 0.5470533785868559,
"grad_norm": 0.34313124418258667,
"learning_rate": 3e-05,
"loss": 2.802,
"step": 1773
},
{
"epoch": 0.5473619253316877,
"grad_norm": 0.398807555437088,
"learning_rate": 3e-05,
"loss": 3.447,
"step": 1774
},
{
"epoch": 0.5476704720765196,
"grad_norm": 0.3040825128555298,
"learning_rate": 3e-05,
"loss": 2.3763,
"step": 1775
},
{
"epoch": 0.5479790188213515,
"grad_norm": 0.29558873176574707,
"learning_rate": 3e-05,
"loss": 2.5285,
"step": 1776
},
{
"epoch": 0.5482875655661833,
"grad_norm": 0.35539618134498596,
"learning_rate": 3e-05,
"loss": 2.7637,
"step": 1777
},
{
"epoch": 0.5485961123110151,
"grad_norm": 0.34798794984817505,
"learning_rate": 3e-05,
"loss": 2.5633,
"step": 1778
},
{
"epoch": 0.548904659055847,
"grad_norm": 0.2828461825847626,
"learning_rate": 3e-05,
"loss": 2.6647,
"step": 1779
},
{
"epoch": 0.5492132058006788,
"grad_norm": 0.48053836822509766,
"learning_rate": 3e-05,
"loss": 2.5918,
"step": 1780
},
{
"epoch": 0.5495217525455106,
"grad_norm": 0.31791162490844727,
"learning_rate": 3e-05,
"loss": 2.4877,
"step": 1781
},
{
"epoch": 0.5498302992903424,
"grad_norm": 0.2918544411659241,
"learning_rate": 3e-05,
"loss": 2.3345,
"step": 1782
},
{
"epoch": 0.5501388460351744,
"grad_norm": 0.3297959566116333,
"learning_rate": 3e-05,
"loss": 2.6386,
"step": 1783
},
{
"epoch": 0.5504473927800062,
"grad_norm": 0.3603450357913971,
"learning_rate": 3e-05,
"loss": 2.3877,
"step": 1784
},
{
"epoch": 0.550755939524838,
"grad_norm": 0.4325063228607178,
"learning_rate": 3e-05,
"loss": 2.9395,
"step": 1785
},
{
"epoch": 0.5510644862696699,
"grad_norm": 0.5895464420318604,
"learning_rate": 3e-05,
"loss": 3.4669,
"step": 1786
},
{
"epoch": 0.5513730330145017,
"grad_norm": 0.2964521646499634,
"learning_rate": 3e-05,
"loss": 2.728,
"step": 1787
},
{
"epoch": 0.5516815797593335,
"grad_norm": 0.30058997869491577,
"learning_rate": 3e-05,
"loss": 2.6638,
"step": 1788
},
{
"epoch": 0.5519901265041653,
"grad_norm": 0.3132179081439972,
"learning_rate": 3e-05,
"loss": 2.7093,
"step": 1789
},
{
"epoch": 0.5522986732489972,
"grad_norm": 0.3626979887485504,
"learning_rate": 3e-05,
"loss": 2.4565,
"step": 1790
},
{
"epoch": 0.5526072199938291,
"grad_norm": 0.2594972252845764,
"learning_rate": 3e-05,
"loss": 2.1917,
"step": 1791
},
{
"epoch": 0.5529157667386609,
"grad_norm": 0.2946871817111969,
"learning_rate": 3e-05,
"loss": 2.4383,
"step": 1792
},
{
"epoch": 0.5532243134834928,
"grad_norm": 0.3073224127292633,
"learning_rate": 3e-05,
"loss": 2.4735,
"step": 1793
},
{
"epoch": 0.5535328602283246,
"grad_norm": 0.33085721731185913,
"learning_rate": 3e-05,
"loss": 2.9155,
"step": 1794
},
{
"epoch": 0.5538414069731564,
"grad_norm": 0.3167915344238281,
"learning_rate": 3e-05,
"loss": 2.5648,
"step": 1795
},
{
"epoch": 0.5541499537179883,
"grad_norm": 0.3947449326515198,
"learning_rate": 3e-05,
"loss": 2.7195,
"step": 1796
},
{
"epoch": 0.5544585004628201,
"grad_norm": 0.29865971207618713,
"learning_rate": 3e-05,
"loss": 2.8072,
"step": 1797
},
{
"epoch": 0.5547670472076519,
"grad_norm": 0.32342618703842163,
"learning_rate": 3e-05,
"loss": 2.7242,
"step": 1798
},
{
"epoch": 0.5550755939524838,
"grad_norm": 0.35544127225875854,
"learning_rate": 3e-05,
"loss": 2.4616,
"step": 1799
},
{
"epoch": 0.5553841406973157,
"grad_norm": 0.30906030535697937,
"learning_rate": 3e-05,
"loss": 2.4555,
"step": 1800
},
{
"epoch": 0.5556926874421475,
"grad_norm": 0.2638324499130249,
"learning_rate": 3e-05,
"loss": 2.5501,
"step": 1801
},
{
"epoch": 0.5560012341869793,
"grad_norm": 0.5023085474967957,
"learning_rate": 3e-05,
"loss": 3.0855,
"step": 1802
},
{
"epoch": 0.5563097809318112,
"grad_norm": 0.35941094160079956,
"learning_rate": 3e-05,
"loss": 2.4908,
"step": 1803
},
{
"epoch": 0.556618327676643,
"grad_norm": 0.4491455554962158,
"learning_rate": 3e-05,
"loss": 2.9572,
"step": 1804
},
{
"epoch": 0.5569268744214748,
"grad_norm": 0.44301337003707886,
"learning_rate": 3e-05,
"loss": 2.8863,
"step": 1805
},
{
"epoch": 0.5572354211663066,
"grad_norm": 0.3584831655025482,
"learning_rate": 3e-05,
"loss": 2.6321,
"step": 1806
},
{
"epoch": 0.5575439679111386,
"grad_norm": 0.3179319202899933,
"learning_rate": 3e-05,
"loss": 2.5629,
"step": 1807
},
{
"epoch": 0.5578525146559704,
"grad_norm": 0.39769089221954346,
"learning_rate": 3e-05,
"loss": 2.3598,
"step": 1808
},
{
"epoch": 0.5581610614008022,
"grad_norm": 0.45672088861465454,
"learning_rate": 3e-05,
"loss": 2.7827,
"step": 1809
},
{
"epoch": 0.5584696081456341,
"grad_norm": 0.40564146637916565,
"learning_rate": 3e-05,
"loss": 2.6394,
"step": 1810
},
{
"epoch": 0.5587781548904659,
"grad_norm": 0.3717137575149536,
"learning_rate": 3e-05,
"loss": 2.4997,
"step": 1811
},
{
"epoch": 0.5590867016352977,
"grad_norm": 0.4004184901714325,
"learning_rate": 3e-05,
"loss": 2.4249,
"step": 1812
},
{
"epoch": 0.5593952483801296,
"grad_norm": 0.3864268362522125,
"learning_rate": 3e-05,
"loss": 2.7476,
"step": 1813
},
{
"epoch": 0.5597037951249614,
"grad_norm": 0.2754631042480469,
"learning_rate": 3e-05,
"loss": 2.3015,
"step": 1814
},
{
"epoch": 0.5600123418697933,
"grad_norm": 0.317277193069458,
"learning_rate": 3e-05,
"loss": 2.56,
"step": 1815
},
{
"epoch": 0.5603208886146251,
"grad_norm": 0.4582974314689636,
"learning_rate": 3e-05,
"loss": 3.0188,
"step": 1816
},
{
"epoch": 0.560629435359457,
"grad_norm": 0.3826389014720917,
"learning_rate": 3e-05,
"loss": 2.8016,
"step": 1817
},
{
"epoch": 0.5609379821042888,
"grad_norm": 0.3235574960708618,
"learning_rate": 3e-05,
"loss": 2.5016,
"step": 1818
},
{
"epoch": 0.5612465288491206,
"grad_norm": 0.3079756498336792,
"learning_rate": 3e-05,
"loss": 2.6037,
"step": 1819
},
{
"epoch": 0.5615550755939525,
"grad_norm": 0.3370635509490967,
"learning_rate": 3e-05,
"loss": 2.6834,
"step": 1820
},
{
"epoch": 0.5618636223387843,
"grad_norm": 0.3856896162033081,
"learning_rate": 3e-05,
"loss": 2.5547,
"step": 1821
},
{
"epoch": 0.5621721690836161,
"grad_norm": 0.29247716069221497,
"learning_rate": 3e-05,
"loss": 2.3593,
"step": 1822
},
{
"epoch": 0.562480715828448,
"grad_norm": 0.2804856300354004,
"learning_rate": 3e-05,
"loss": 2.2592,
"step": 1823
},
{
"epoch": 0.5627892625732799,
"grad_norm": 0.38057059049606323,
"learning_rate": 3e-05,
"loss": 2.7212,
"step": 1824
},
{
"epoch": 0.5630978093181117,
"grad_norm": 0.31085407733917236,
"learning_rate": 3e-05,
"loss": 2.3131,
"step": 1825
},
{
"epoch": 0.5634063560629435,
"grad_norm": 0.30950838327407837,
"learning_rate": 3e-05,
"loss": 2.3015,
"step": 1826
},
{
"epoch": 0.5637149028077754,
"grad_norm": 0.2703428864479065,
"learning_rate": 3e-05,
"loss": 2.4096,
"step": 1827
},
{
"epoch": 0.5640234495526072,
"grad_norm": 0.2752431035041809,
"learning_rate": 3e-05,
"loss": 2.2907,
"step": 1828
},
{
"epoch": 0.564331996297439,
"grad_norm": 0.3768213391304016,
"learning_rate": 3e-05,
"loss": 2.706,
"step": 1829
},
{
"epoch": 0.564640543042271,
"grad_norm": 0.3653735816478729,
"learning_rate": 3e-05,
"loss": 2.6885,
"step": 1830
},
{
"epoch": 0.5649490897871028,
"grad_norm": 0.300034761428833,
"learning_rate": 3e-05,
"loss": 2.6125,
"step": 1831
},
{
"epoch": 0.5652576365319346,
"grad_norm": 0.32249727845191956,
"learning_rate": 3e-05,
"loss": 2.6107,
"step": 1832
},
{
"epoch": 0.5655661832767664,
"grad_norm": 0.2547013759613037,
"learning_rate": 3e-05,
"loss": 2.1702,
"step": 1833
},
{
"epoch": 0.5658747300215983,
"grad_norm": 0.3587866723537445,
"learning_rate": 3e-05,
"loss": 2.4859,
"step": 1834
},
{
"epoch": 0.5661832767664301,
"grad_norm": 0.34305340051651,
"learning_rate": 3e-05,
"loss": 2.51,
"step": 1835
},
{
"epoch": 0.5664918235112619,
"grad_norm": 0.3700675368309021,
"learning_rate": 3e-05,
"loss": 3.0394,
"step": 1836
},
{
"epoch": 0.5668003702560938,
"grad_norm": 0.3234868347644806,
"learning_rate": 3e-05,
"loss": 2.5686,
"step": 1837
},
{
"epoch": 0.5671089170009257,
"grad_norm": 0.39365333318710327,
"learning_rate": 3e-05,
"loss": 3.0999,
"step": 1838
},
{
"epoch": 0.5674174637457575,
"grad_norm": 0.41990581154823303,
"learning_rate": 3e-05,
"loss": 2.8201,
"step": 1839
},
{
"epoch": 0.5677260104905894,
"grad_norm": 0.3234783411026001,
"learning_rate": 3e-05,
"loss": 2.9017,
"step": 1840
},
{
"epoch": 0.5680345572354212,
"grad_norm": 0.4942884147167206,
"learning_rate": 3e-05,
"loss": 2.6666,
"step": 1841
},
{
"epoch": 0.568343103980253,
"grad_norm": 0.32762080430984497,
"learning_rate": 3e-05,
"loss": 2.8604,
"step": 1842
},
{
"epoch": 0.5686516507250848,
"grad_norm": 0.32969486713409424,
"learning_rate": 3e-05,
"loss": 2.8232,
"step": 1843
},
{
"epoch": 0.5689601974699167,
"grad_norm": 0.41623249650001526,
"learning_rate": 3e-05,
"loss": 2.7924,
"step": 1844
},
{
"epoch": 0.5692687442147485,
"grad_norm": 0.47765466570854187,
"learning_rate": 3e-05,
"loss": 3.2767,
"step": 1845
},
{
"epoch": 0.5695772909595804,
"grad_norm": 0.3534036874771118,
"learning_rate": 3e-05,
"loss": 2.6968,
"step": 1846
},
{
"epoch": 0.5698858377044123,
"grad_norm": 0.36876803636550903,
"learning_rate": 3e-05,
"loss": 2.7692,
"step": 1847
},
{
"epoch": 0.5701943844492441,
"grad_norm": 0.30010735988616943,
"learning_rate": 3e-05,
"loss": 2.7339,
"step": 1848
},
{
"epoch": 0.5705029311940759,
"grad_norm": 0.41695329546928406,
"learning_rate": 3e-05,
"loss": 2.6282,
"step": 1849
},
{
"epoch": 0.5708114779389077,
"grad_norm": 0.43772029876708984,
"learning_rate": 3e-05,
"loss": 2.8142,
"step": 1850
},
{
"epoch": 0.5711200246837396,
"grad_norm": 0.37401148676872253,
"learning_rate": 3e-05,
"loss": 2.6245,
"step": 1851
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.3227090835571289,
"learning_rate": 3e-05,
"loss": 2.7928,
"step": 1852
},
{
"epoch": 0.5717371181734032,
"grad_norm": 0.319191575050354,
"learning_rate": 3e-05,
"loss": 2.478,
"step": 1853
},
{
"epoch": 0.5720456649182352,
"grad_norm": 0.3070712983608246,
"learning_rate": 3e-05,
"loss": 2.5363,
"step": 1854
},
{
"epoch": 0.572354211663067,
"grad_norm": 0.31813663244247437,
"learning_rate": 3e-05,
"loss": 2.9719,
"step": 1855
},
{
"epoch": 0.5726627584078988,
"grad_norm": 0.3057642877101898,
"learning_rate": 3e-05,
"loss": 2.4343,
"step": 1856
},
{
"epoch": 0.5729713051527306,
"grad_norm": 0.3192763328552246,
"learning_rate": 3e-05,
"loss": 2.6212,
"step": 1857
},
{
"epoch": 0.5732798518975625,
"grad_norm": 0.42249295115470886,
"learning_rate": 3e-05,
"loss": 2.5999,
"step": 1858
},
{
"epoch": 0.5735883986423943,
"grad_norm": 0.29535210132598877,
"learning_rate": 3e-05,
"loss": 2.4362,
"step": 1859
},
{
"epoch": 0.5738969453872261,
"grad_norm": 0.4919167757034302,
"learning_rate": 3e-05,
"loss": 2.7813,
"step": 1860
},
{
"epoch": 0.574205492132058,
"grad_norm": 0.320404976606369,
"learning_rate": 3e-05,
"loss": 2.6745,
"step": 1861
},
{
"epoch": 0.5745140388768899,
"grad_norm": 0.31205329298973083,
"learning_rate": 3e-05,
"loss": 2.496,
"step": 1862
},
{
"epoch": 0.5748225856217217,
"grad_norm": 0.2652618885040283,
"learning_rate": 3e-05,
"loss": 2.2061,
"step": 1863
},
{
"epoch": 0.5751311323665536,
"grad_norm": 0.3447934687137604,
"learning_rate": 3e-05,
"loss": 2.8457,
"step": 1864
},
{
"epoch": 0.5754396791113854,
"grad_norm": 0.323257714509964,
"learning_rate": 3e-05,
"loss": 2.7075,
"step": 1865
},
{
"epoch": 0.5757482258562172,
"grad_norm": 0.2977539300918579,
"learning_rate": 3e-05,
"loss": 2.576,
"step": 1866
},
{
"epoch": 0.576056772601049,
"grad_norm": 0.3683079183101654,
"learning_rate": 3e-05,
"loss": 2.7179,
"step": 1867
},
{
"epoch": 0.5763653193458809,
"grad_norm": 0.4104117155075073,
"learning_rate": 3e-05,
"loss": 2.6387,
"step": 1868
},
{
"epoch": 0.5766738660907127,
"grad_norm": 0.4164542257785797,
"learning_rate": 3e-05,
"loss": 3.1218,
"step": 1869
},
{
"epoch": 0.5769824128355446,
"grad_norm": 0.35790860652923584,
"learning_rate": 3e-05,
"loss": 3.0051,
"step": 1870
},
{
"epoch": 0.5772909595803765,
"grad_norm": 0.38595104217529297,
"learning_rate": 3e-05,
"loss": 2.5418,
"step": 1871
},
{
"epoch": 0.5775995063252083,
"grad_norm": 0.30152931809425354,
"learning_rate": 3e-05,
"loss": 2.6598,
"step": 1872
},
{
"epoch": 0.5779080530700401,
"grad_norm": 0.45305702090263367,
"learning_rate": 3e-05,
"loss": 3.059,
"step": 1873
},
{
"epoch": 0.578216599814872,
"grad_norm": 0.3736101984977722,
"learning_rate": 3e-05,
"loss": 2.6267,
"step": 1874
},
{
"epoch": 0.5785251465597038,
"grad_norm": 0.3496648371219635,
"learning_rate": 3e-05,
"loss": 2.5717,
"step": 1875
},
{
"epoch": 0.5788336933045356,
"grad_norm": 0.4360155463218689,
"learning_rate": 3e-05,
"loss": 3.1713,
"step": 1876
},
{
"epoch": 0.5791422400493674,
"grad_norm": 0.3444565236568451,
"learning_rate": 3e-05,
"loss": 2.5872,
"step": 1877
},
{
"epoch": 0.5794507867941994,
"grad_norm": 0.3689393103122711,
"learning_rate": 3e-05,
"loss": 2.8699,
"step": 1878
},
{
"epoch": 0.5797593335390312,
"grad_norm": 0.3451610803604126,
"learning_rate": 3e-05,
"loss": 2.7843,
"step": 1879
},
{
"epoch": 0.580067880283863,
"grad_norm": 0.32051753997802734,
"learning_rate": 3e-05,
"loss": 2.6073,
"step": 1880
},
{
"epoch": 0.5803764270286949,
"grad_norm": 0.49073344469070435,
"learning_rate": 3e-05,
"loss": 2.6237,
"step": 1881
},
{
"epoch": 0.5806849737735267,
"grad_norm": 0.3311246633529663,
"learning_rate": 3e-05,
"loss": 2.5781,
"step": 1882
},
{
"epoch": 0.5809935205183585,
"grad_norm": 0.31692609190940857,
"learning_rate": 3e-05,
"loss": 2.4897,
"step": 1883
},
{
"epoch": 0.5813020672631903,
"grad_norm": 0.5018044114112854,
"learning_rate": 3e-05,
"loss": 2.7427,
"step": 1884
},
{
"epoch": 0.5816106140080222,
"grad_norm": 0.30075499415397644,
"learning_rate": 3e-05,
"loss": 2.4889,
"step": 1885
},
{
"epoch": 0.5819191607528541,
"grad_norm": 0.36535558104515076,
"learning_rate": 3e-05,
"loss": 2.696,
"step": 1886
},
{
"epoch": 0.5822277074976859,
"grad_norm": 0.3899480104446411,
"learning_rate": 3e-05,
"loss": 2.7502,
"step": 1887
},
{
"epoch": 0.5825362542425178,
"grad_norm": 0.3767227232456207,
"learning_rate": 3e-05,
"loss": 2.8132,
"step": 1888
},
{
"epoch": 0.5828448009873496,
"grad_norm": 0.6538413763046265,
"learning_rate": 3e-05,
"loss": 3.192,
"step": 1889
},
{
"epoch": 0.5831533477321814,
"grad_norm": 0.4631282389163971,
"learning_rate": 3e-05,
"loss": 2.9992,
"step": 1890
},
{
"epoch": 0.5834618944770132,
"grad_norm": 0.3080557584762573,
"learning_rate": 3e-05,
"loss": 2.6843,
"step": 1891
},
{
"epoch": 0.5837704412218451,
"grad_norm": 0.5268321633338928,
"learning_rate": 3e-05,
"loss": 2.722,
"step": 1892
},
{
"epoch": 0.5840789879666769,
"grad_norm": 0.38069623708724976,
"learning_rate": 3e-05,
"loss": 2.6325,
"step": 1893
},
{
"epoch": 0.5843875347115088,
"grad_norm": 0.3551689386367798,
"learning_rate": 3e-05,
"loss": 2.734,
"step": 1894
},
{
"epoch": 0.5846960814563407,
"grad_norm": 0.49280086159706116,
"learning_rate": 3e-05,
"loss": 2.7902,
"step": 1895
},
{
"epoch": 0.5850046282011725,
"grad_norm": 0.334772527217865,
"learning_rate": 3e-05,
"loss": 2.5278,
"step": 1896
},
{
"epoch": 0.5853131749460043,
"grad_norm": 0.4307243227958679,
"learning_rate": 3e-05,
"loss": 3.0175,
"step": 1897
},
{
"epoch": 0.5856217216908362,
"grad_norm": 0.4769163429737091,
"learning_rate": 3e-05,
"loss": 2.7155,
"step": 1898
},
{
"epoch": 0.585930268435668,
"grad_norm": 0.4567604660987854,
"learning_rate": 3e-05,
"loss": 2.6868,
"step": 1899
},
{
"epoch": 0.5862388151804998,
"grad_norm": 0.2927672564983368,
"learning_rate": 3e-05,
"loss": 2.4188,
"step": 1900
},
{
"epoch": 0.5865473619253316,
"grad_norm": 0.35626882314682007,
"learning_rate": 3e-05,
"loss": 2.4569,
"step": 1901
},
{
"epoch": 0.5868559086701636,
"grad_norm": 0.3931029736995697,
"learning_rate": 3e-05,
"loss": 2.704,
"step": 1902
},
{
"epoch": 0.5871644554149954,
"grad_norm": 0.3271540105342865,
"learning_rate": 3e-05,
"loss": 2.8947,
"step": 1903
},
{
"epoch": 0.5874730021598272,
"grad_norm": 0.4138778746128082,
"learning_rate": 3e-05,
"loss": 2.9432,
"step": 1904
},
{
"epoch": 0.5877815489046591,
"grad_norm": 0.305519163608551,
"learning_rate": 3e-05,
"loss": 2.6207,
"step": 1905
},
{
"epoch": 0.5880900956494909,
"grad_norm": 0.33586934208869934,
"learning_rate": 3e-05,
"loss": 2.5194,
"step": 1906
},
{
"epoch": 0.5883986423943227,
"grad_norm": 0.3901278078556061,
"learning_rate": 3e-05,
"loss": 2.4353,
"step": 1907
},
{
"epoch": 0.5887071891391545,
"grad_norm": 0.28809359669685364,
"learning_rate": 3e-05,
"loss": 2.6155,
"step": 1908
},
{
"epoch": 0.5890157358839864,
"grad_norm": 0.3274940252304077,
"learning_rate": 3e-05,
"loss": 2.2618,
"step": 1909
},
{
"epoch": 0.5893242826288183,
"grad_norm": 0.9131669402122498,
"learning_rate": 3e-05,
"loss": 2.8335,
"step": 1910
},
{
"epoch": 0.5896328293736501,
"grad_norm": 0.3802897036075592,
"learning_rate": 3e-05,
"loss": 2.5784,
"step": 1911
},
{
"epoch": 0.589941376118482,
"grad_norm": 0.4873533546924591,
"learning_rate": 3e-05,
"loss": 2.6349,
"step": 1912
},
{
"epoch": 0.5902499228633138,
"grad_norm": 0.33420997858047485,
"learning_rate": 3e-05,
"loss": 2.4609,
"step": 1913
},
{
"epoch": 0.5905584696081456,
"grad_norm": 0.4681174159049988,
"learning_rate": 3e-05,
"loss": 2.4985,
"step": 1914
},
{
"epoch": 0.5908670163529774,
"grad_norm": 0.3231395184993744,
"learning_rate": 3e-05,
"loss": 2.4141,
"step": 1915
},
{
"epoch": 0.5911755630978093,
"grad_norm": 0.37578973174095154,
"learning_rate": 3e-05,
"loss": 2.6424,
"step": 1916
},
{
"epoch": 0.5914841098426412,
"grad_norm": 0.3429258167743683,
"learning_rate": 3e-05,
"loss": 2.3949,
"step": 1917
},
{
"epoch": 0.591792656587473,
"grad_norm": 0.42337566614151,
"learning_rate": 3e-05,
"loss": 2.5829,
"step": 1918
},
{
"epoch": 0.5921012033323049,
"grad_norm": 0.3989293873310089,
"learning_rate": 3e-05,
"loss": 2.9044,
"step": 1919
},
{
"epoch": 0.5924097500771367,
"grad_norm": 0.351583331823349,
"learning_rate": 3e-05,
"loss": 2.5472,
"step": 1920
},
{
"epoch": 0.5927182968219685,
"grad_norm": 0.28526926040649414,
"learning_rate": 3e-05,
"loss": 2.336,
"step": 1921
},
{
"epoch": 0.5930268435668004,
"grad_norm": 0.292166143655777,
"learning_rate": 3e-05,
"loss": 2.3399,
"step": 1922
},
{
"epoch": 0.5933353903116322,
"grad_norm": 0.3061083257198334,
"learning_rate": 3e-05,
"loss": 2.5137,
"step": 1923
},
{
"epoch": 0.593643937056464,
"grad_norm": 0.3518201410770416,
"learning_rate": 3e-05,
"loss": 2.6903,
"step": 1924
},
{
"epoch": 0.593952483801296,
"grad_norm": 0.34315404295921326,
"learning_rate": 3e-05,
"loss": 2.7476,
"step": 1925
},
{
"epoch": 0.5942610305461278,
"grad_norm": 0.3130031228065491,
"learning_rate": 3e-05,
"loss": 2.303,
"step": 1926
},
{
"epoch": 0.5945695772909596,
"grad_norm": 0.2985696792602539,
"learning_rate": 3e-05,
"loss": 2.1554,
"step": 1927
},
{
"epoch": 0.5948781240357914,
"grad_norm": 0.28823089599609375,
"learning_rate": 3e-05,
"loss": 2.4544,
"step": 1928
},
{
"epoch": 0.5951866707806233,
"grad_norm": 0.2691349387168884,
"learning_rate": 3e-05,
"loss": 2.3395,
"step": 1929
},
{
"epoch": 0.5954952175254551,
"grad_norm": 0.42032185196876526,
"learning_rate": 3e-05,
"loss": 3.0267,
"step": 1930
},
{
"epoch": 0.5958037642702869,
"grad_norm": 0.3613561987876892,
"learning_rate": 3e-05,
"loss": 2.6168,
"step": 1931
},
{
"epoch": 0.5961123110151187,
"grad_norm": 0.34824803471565247,
"learning_rate": 3e-05,
"loss": 2.8337,
"step": 1932
},
{
"epoch": 0.5964208577599507,
"grad_norm": 0.3117191195487976,
"learning_rate": 3e-05,
"loss": 2.7659,
"step": 1933
},
{
"epoch": 0.5967294045047825,
"grad_norm": 0.42382311820983887,
"learning_rate": 3e-05,
"loss": 3.3237,
"step": 1934
},
{
"epoch": 0.5970379512496143,
"grad_norm": 0.3390701711177826,
"learning_rate": 3e-05,
"loss": 2.7139,
"step": 1935
},
{
"epoch": 0.5973464979944462,
"grad_norm": 0.3503396213054657,
"learning_rate": 3e-05,
"loss": 2.9288,
"step": 1936
},
{
"epoch": 0.597655044739278,
"grad_norm": 0.3063940405845642,
"learning_rate": 3e-05,
"loss": 2.1386,
"step": 1937
},
{
"epoch": 0.5979635914841098,
"grad_norm": 0.3633251190185547,
"learning_rate": 3e-05,
"loss": 2.7778,
"step": 1938
},
{
"epoch": 0.5982721382289417,
"grad_norm": 0.337150514125824,
"learning_rate": 3e-05,
"loss": 2.7948,
"step": 1939
},
{
"epoch": 0.5985806849737735,
"grad_norm": 0.31596246361732483,
"learning_rate": 3e-05,
"loss": 2.5579,
"step": 1940
},
{
"epoch": 0.5988892317186054,
"grad_norm": 0.3002220094203949,
"learning_rate": 3e-05,
"loss": 2.5221,
"step": 1941
},
{
"epoch": 0.5991977784634372,
"grad_norm": 0.3110668957233429,
"learning_rate": 3e-05,
"loss": 2.6875,
"step": 1942
},
{
"epoch": 0.5995063252082691,
"grad_norm": 0.43362733721733093,
"learning_rate": 3e-05,
"loss": 2.8798,
"step": 1943
},
{
"epoch": 0.5998148719531009,
"grad_norm": 0.3057228624820709,
"learning_rate": 3e-05,
"loss": 2.6287,
"step": 1944
},
{
"epoch": 0.6001234186979327,
"grad_norm": 0.42439699172973633,
"learning_rate": 3e-05,
"loss": 3.182,
"step": 1945
},
{
"epoch": 0.6004319654427646,
"grad_norm": 0.3645000159740448,
"learning_rate": 3e-05,
"loss": 3.1542,
"step": 1946
},
{
"epoch": 0.6007405121875964,
"grad_norm": 0.32982340455055237,
"learning_rate": 3e-05,
"loss": 2.5876,
"step": 1947
},
{
"epoch": 0.6010490589324282,
"grad_norm": 0.3214912414550781,
"learning_rate": 3e-05,
"loss": 2.392,
"step": 1948
},
{
"epoch": 0.6013576056772602,
"grad_norm": 0.39832058548927307,
"learning_rate": 3e-05,
"loss": 2.6137,
"step": 1949
},
{
"epoch": 0.601666152422092,
"grad_norm": 0.31585508584976196,
"learning_rate": 3e-05,
"loss": 2.687,
"step": 1950
},
{
"epoch": 0.6019746991669238,
"grad_norm": 0.5885326862335205,
"learning_rate": 3e-05,
"loss": 3.2389,
"step": 1951
},
{
"epoch": 0.6022832459117556,
"grad_norm": 0.3510866165161133,
"learning_rate": 3e-05,
"loss": 2.7795,
"step": 1952
},
{
"epoch": 0.6025917926565875,
"grad_norm": 0.29248693585395813,
"learning_rate": 3e-05,
"loss": 2.4253,
"step": 1953
},
{
"epoch": 0.6029003394014193,
"grad_norm": 0.3324500322341919,
"learning_rate": 3e-05,
"loss": 2.4282,
"step": 1954
},
{
"epoch": 0.6032088861462511,
"grad_norm": 0.4674556851387024,
"learning_rate": 3e-05,
"loss": 3.3254,
"step": 1955
},
{
"epoch": 0.603517432891083,
"grad_norm": 0.30010566115379333,
"learning_rate": 3e-05,
"loss": 2.3326,
"step": 1956
},
{
"epoch": 0.6038259796359149,
"grad_norm": 0.8941420912742615,
"learning_rate": 3e-05,
"loss": 2.8991,
"step": 1957
},
{
"epoch": 0.6041345263807467,
"grad_norm": 0.3649481534957886,
"learning_rate": 3e-05,
"loss": 2.9411,
"step": 1958
},
{
"epoch": 0.6044430731255785,
"grad_norm": 0.40504541993141174,
"learning_rate": 3e-05,
"loss": 3.1132,
"step": 1959
},
{
"epoch": 0.6047516198704104,
"grad_norm": 0.36313414573669434,
"learning_rate": 3e-05,
"loss": 2.8904,
"step": 1960
},
{
"epoch": 0.6050601666152422,
"grad_norm": 0.5115112662315369,
"learning_rate": 3e-05,
"loss": 3.2337,
"step": 1961
},
{
"epoch": 0.605368713360074,
"grad_norm": 0.33497315645217896,
"learning_rate": 3e-05,
"loss": 2.5356,
"step": 1962
},
{
"epoch": 0.6056772601049059,
"grad_norm": 0.38686394691467285,
"learning_rate": 3e-05,
"loss": 2.5937,
"step": 1963
},
{
"epoch": 0.6059858068497377,
"grad_norm": 0.6908053159713745,
"learning_rate": 3e-05,
"loss": 2.828,
"step": 1964
},
{
"epoch": 0.6062943535945696,
"grad_norm": 0.31312212347984314,
"learning_rate": 3e-05,
"loss": 2.4178,
"step": 1965
},
{
"epoch": 0.6066029003394015,
"grad_norm": 0.29186975955963135,
"learning_rate": 3e-05,
"loss": 2.1984,
"step": 1966
},
{
"epoch": 0.6069114470842333,
"grad_norm": 0.37484097480773926,
"learning_rate": 3e-05,
"loss": 2.4074,
"step": 1967
},
{
"epoch": 0.6072199938290651,
"grad_norm": 0.32743147015571594,
"learning_rate": 3e-05,
"loss": 2.4768,
"step": 1968
},
{
"epoch": 0.6075285405738969,
"grad_norm": 0.4841730296611786,
"learning_rate": 3e-05,
"loss": 2.5,
"step": 1969
},
{
"epoch": 0.6078370873187288,
"grad_norm": 0.2945593297481537,
"learning_rate": 3e-05,
"loss": 2.478,
"step": 1970
},
{
"epoch": 0.6081456340635606,
"grad_norm": 0.4916343688964844,
"learning_rate": 3e-05,
"loss": 2.4608,
"step": 1971
},
{
"epoch": 0.6084541808083924,
"grad_norm": 0.4401153028011322,
"learning_rate": 3e-05,
"loss": 2.4569,
"step": 1972
},
{
"epoch": 0.6087627275532244,
"grad_norm": 0.2683543264865875,
"learning_rate": 3e-05,
"loss": 2.4189,
"step": 1973
},
{
"epoch": 0.6090712742980562,
"grad_norm": 0.35089823603630066,
"learning_rate": 3e-05,
"loss": 2.4496,
"step": 1974
},
{
"epoch": 0.609379821042888,
"grad_norm": 0.3995623290538788,
"learning_rate": 3e-05,
"loss": 2.6005,
"step": 1975
},
{
"epoch": 0.6096883677877198,
"grad_norm": 0.37342095375061035,
"learning_rate": 3e-05,
"loss": 2.5541,
"step": 1976
},
{
"epoch": 0.6099969145325517,
"grad_norm": 0.3966485857963562,
"learning_rate": 3e-05,
"loss": 2.7672,
"step": 1977
},
{
"epoch": 0.6103054612773835,
"grad_norm": 0.33932629227638245,
"learning_rate": 3e-05,
"loss": 2.3939,
"step": 1978
},
{
"epoch": 0.6106140080222153,
"grad_norm": 0.3646182119846344,
"learning_rate": 3e-05,
"loss": 2.8896,
"step": 1979
},
{
"epoch": 0.6109225547670472,
"grad_norm": 0.3388557434082031,
"learning_rate": 3e-05,
"loss": 2.5835,
"step": 1980
},
{
"epoch": 0.6112311015118791,
"grad_norm": 0.3514586091041565,
"learning_rate": 3e-05,
"loss": 2.8596,
"step": 1981
},
{
"epoch": 0.6115396482567109,
"grad_norm": 0.3328869044780731,
"learning_rate": 3e-05,
"loss": 2.6065,
"step": 1982
},
{
"epoch": 0.6118481950015427,
"grad_norm": 0.3014579713344574,
"learning_rate": 3e-05,
"loss": 2.3762,
"step": 1983
},
{
"epoch": 0.6121567417463746,
"grad_norm": 0.3286787271499634,
"learning_rate": 3e-05,
"loss": 2.819,
"step": 1984
},
{
"epoch": 0.6124652884912064,
"grad_norm": 0.30594438314437866,
"learning_rate": 3e-05,
"loss": 2.2396,
"step": 1985
},
{
"epoch": 0.6127738352360382,
"grad_norm": 0.34099289774894714,
"learning_rate": 3e-05,
"loss": 2.9636,
"step": 1986
},
{
"epoch": 0.6130823819808701,
"grad_norm": 0.3839113414287567,
"learning_rate": 3e-05,
"loss": 2.8976,
"step": 1987
},
{
"epoch": 0.6133909287257019,
"grad_norm": 0.3939029574394226,
"learning_rate": 3e-05,
"loss": 2.964,
"step": 1988
},
{
"epoch": 0.6136994754705338,
"grad_norm": 0.31318363547325134,
"learning_rate": 3e-05,
"loss": 2.5215,
"step": 1989
},
{
"epoch": 0.6140080222153657,
"grad_norm": 0.44314906001091003,
"learning_rate": 3e-05,
"loss": 2.7397,
"step": 1990
},
{
"epoch": 0.6143165689601975,
"grad_norm": 0.2839794158935547,
"learning_rate": 3e-05,
"loss": 2.6783,
"step": 1991
},
{
"epoch": 0.6146251157050293,
"grad_norm": 0.4167096018791199,
"learning_rate": 3e-05,
"loss": 2.8253,
"step": 1992
},
{
"epoch": 0.6149336624498611,
"grad_norm": 0.48042523860931396,
"learning_rate": 3e-05,
"loss": 2.6572,
"step": 1993
},
{
"epoch": 0.615242209194693,
"grad_norm": 0.3192354142665863,
"learning_rate": 3e-05,
"loss": 2.5442,
"step": 1994
},
{
"epoch": 0.6155507559395248,
"grad_norm": 0.3579767048358917,
"learning_rate": 3e-05,
"loss": 2.8193,
"step": 1995
},
{
"epoch": 0.6158593026843567,
"grad_norm": 0.4392094314098358,
"learning_rate": 3e-05,
"loss": 2.7971,
"step": 1996
},
{
"epoch": 0.6161678494291886,
"grad_norm": 0.383989155292511,
"learning_rate": 3e-05,
"loss": 2.8751,
"step": 1997
},
{
"epoch": 0.6164763961740204,
"grad_norm": 0.36968907713890076,
"learning_rate": 3e-05,
"loss": 3.19,
"step": 1998
},
{
"epoch": 0.6167849429188522,
"grad_norm": 0.4232364594936371,
"learning_rate": 3e-05,
"loss": 3.0788,
"step": 1999
},
{
"epoch": 0.617093489663684,
"grad_norm": 0.41558605432510376,
"learning_rate": 3e-05,
"loss": 3.3741,
"step": 2000
},
{
"epoch": 0.6174020364085159,
"grad_norm": 0.3285547196865082,
"learning_rate": 3e-05,
"loss": 2.6336,
"step": 2001
},
{
"epoch": 0.6177105831533477,
"grad_norm": 0.311625212430954,
"learning_rate": 3e-05,
"loss": 2.6533,
"step": 2002
},
{
"epoch": 0.6180191298981795,
"grad_norm": 0.4083147644996643,
"learning_rate": 3e-05,
"loss": 3.1001,
"step": 2003
},
{
"epoch": 0.6183276766430115,
"grad_norm": 0.44464874267578125,
"learning_rate": 3e-05,
"loss": 2.9055,
"step": 2004
},
{
"epoch": 0.6186362233878433,
"grad_norm": 0.2967284619808197,
"learning_rate": 3e-05,
"loss": 2.8229,
"step": 2005
},
{
"epoch": 0.6189447701326751,
"grad_norm": 0.3772972822189331,
"learning_rate": 3e-05,
"loss": 2.5243,
"step": 2006
},
{
"epoch": 0.619253316877507,
"grad_norm": 0.3279479146003723,
"learning_rate": 3e-05,
"loss": 2.9745,
"step": 2007
},
{
"epoch": 0.6195618636223388,
"grad_norm": 0.3854341208934784,
"learning_rate": 3e-05,
"loss": 2.936,
"step": 2008
},
{
"epoch": 0.6198704103671706,
"grad_norm": 0.355197548866272,
"learning_rate": 3e-05,
"loss": 2.074,
"step": 2009
},
{
"epoch": 0.6201789571120024,
"grad_norm": 0.3269404172897339,
"learning_rate": 3e-05,
"loss": 2.3624,
"step": 2010
},
{
"epoch": 0.6204875038568343,
"grad_norm": 0.40607500076293945,
"learning_rate": 3e-05,
"loss": 2.9721,
"step": 2011
},
{
"epoch": 0.6207960506016662,
"grad_norm": 0.3014424741268158,
"learning_rate": 3e-05,
"loss": 2.641,
"step": 2012
},
{
"epoch": 0.621104597346498,
"grad_norm": 0.3271016478538513,
"learning_rate": 3e-05,
"loss": 2.8161,
"step": 2013
},
{
"epoch": 0.6214131440913299,
"grad_norm": 0.3622637689113617,
"learning_rate": 3e-05,
"loss": 3.0245,
"step": 2014
},
{
"epoch": 0.6217216908361617,
"grad_norm": 0.33080458641052246,
"learning_rate": 3e-05,
"loss": 2.3558,
"step": 2015
},
{
"epoch": 0.6220302375809935,
"grad_norm": 0.2808302938938141,
"learning_rate": 3e-05,
"loss": 2.3971,
"step": 2016
},
{
"epoch": 0.6223387843258253,
"grad_norm": 0.3635484576225281,
"learning_rate": 3e-05,
"loss": 2.6886,
"step": 2017
},
{
"epoch": 0.6226473310706572,
"grad_norm": 0.33641016483306885,
"learning_rate": 3e-05,
"loss": 2.6517,
"step": 2018
},
{
"epoch": 0.622955877815489,
"grad_norm": 0.3255454897880554,
"learning_rate": 3e-05,
"loss": 2.7133,
"step": 2019
},
{
"epoch": 0.6232644245603209,
"grad_norm": 0.46022775769233704,
"learning_rate": 3e-05,
"loss": 2.9981,
"step": 2020
},
{
"epoch": 0.6235729713051528,
"grad_norm": 0.26282113790512085,
"learning_rate": 3e-05,
"loss": 2.0888,
"step": 2021
},
{
"epoch": 0.6238815180499846,
"grad_norm": 0.3235068619251251,
"learning_rate": 3e-05,
"loss": 2.5061,
"step": 2022
},
{
"epoch": 0.6241900647948164,
"grad_norm": 0.41372352838516235,
"learning_rate": 3e-05,
"loss": 2.9735,
"step": 2023
},
{
"epoch": 0.6244986115396483,
"grad_norm": 0.3517864942550659,
"learning_rate": 3e-05,
"loss": 2.5726,
"step": 2024
},
{
"epoch": 0.6248071582844801,
"grad_norm": 0.38335707783699036,
"learning_rate": 3e-05,
"loss": 2.7474,
"step": 2025
},
{
"epoch": 0.6251157050293119,
"grad_norm": 0.3006962537765503,
"learning_rate": 3e-05,
"loss": 2.5282,
"step": 2026
},
{
"epoch": 0.6254242517741437,
"grad_norm": 0.47257086634635925,
"learning_rate": 3e-05,
"loss": 3.1522,
"step": 2027
},
{
"epoch": 0.6257327985189757,
"grad_norm": 0.3759831488132477,
"learning_rate": 3e-05,
"loss": 2.8287,
"step": 2028
},
{
"epoch": 0.6260413452638075,
"grad_norm": 0.27641305327415466,
"learning_rate": 3e-05,
"loss": 2.0199,
"step": 2029
},
{
"epoch": 0.6263498920086393,
"grad_norm": 0.3770776391029358,
"learning_rate": 3e-05,
"loss": 3.1056,
"step": 2030
},
{
"epoch": 0.6266584387534712,
"grad_norm": 0.32263755798339844,
"learning_rate": 3e-05,
"loss": 2.6827,
"step": 2031
},
{
"epoch": 0.626966985498303,
"grad_norm": 0.29439646005630493,
"learning_rate": 3e-05,
"loss": 2.7194,
"step": 2032
},
{
"epoch": 0.6272755322431348,
"grad_norm": 0.35637763142585754,
"learning_rate": 3e-05,
"loss": 2.4663,
"step": 2033
},
{
"epoch": 0.6275840789879666,
"grad_norm": 0.30794280767440796,
"learning_rate": 3e-05,
"loss": 2.5098,
"step": 2034
},
{
"epoch": 0.6278926257327985,
"grad_norm": 0.4070144593715668,
"learning_rate": 3e-05,
"loss": 2.6898,
"step": 2035
},
{
"epoch": 0.6282011724776304,
"grad_norm": 0.3939863443374634,
"learning_rate": 3e-05,
"loss": 2.9802,
"step": 2036
},
{
"epoch": 0.6285097192224622,
"grad_norm": 0.338907390832901,
"learning_rate": 3e-05,
"loss": 2.8044,
"step": 2037
},
{
"epoch": 0.6288182659672941,
"grad_norm": 0.30836790800094604,
"learning_rate": 3e-05,
"loss": 2.4929,
"step": 2038
},
{
"epoch": 0.6291268127121259,
"grad_norm": 0.3368026614189148,
"learning_rate": 3e-05,
"loss": 2.291,
"step": 2039
},
{
"epoch": 0.6294353594569577,
"grad_norm": 0.290962278842926,
"learning_rate": 3e-05,
"loss": 2.4745,
"step": 2040
},
{
"epoch": 0.6297439062017895,
"grad_norm": 0.41740748286247253,
"learning_rate": 3e-05,
"loss": 2.7949,
"step": 2041
},
{
"epoch": 0.6300524529466214,
"grad_norm": 0.338375449180603,
"learning_rate": 3e-05,
"loss": 2.8744,
"step": 2042
},
{
"epoch": 0.6303609996914532,
"grad_norm": 0.35139888525009155,
"learning_rate": 3e-05,
"loss": 2.7917,
"step": 2043
},
{
"epoch": 0.6306695464362851,
"grad_norm": 0.2601359486579895,
"learning_rate": 3e-05,
"loss": 2.1908,
"step": 2044
},
{
"epoch": 0.630978093181117,
"grad_norm": 0.46779265999794006,
"learning_rate": 3e-05,
"loss": 3.4857,
"step": 2045
},
{
"epoch": 0.6312866399259488,
"grad_norm": 0.3154003322124481,
"learning_rate": 3e-05,
"loss": 2.4742,
"step": 2046
},
{
"epoch": 0.6315951866707806,
"grad_norm": 0.3528044521808624,
"learning_rate": 3e-05,
"loss": 2.511,
"step": 2047
},
{
"epoch": 0.6319037334156125,
"grad_norm": 0.33536121249198914,
"learning_rate": 3e-05,
"loss": 2.5747,
"step": 2048
},
{
"epoch": 0.6322122801604443,
"grad_norm": 0.2813878357410431,
"learning_rate": 3e-05,
"loss": 2.7885,
"step": 2049
},
{
"epoch": 0.6325208269052761,
"grad_norm": 0.37101298570632935,
"learning_rate": 3e-05,
"loss": 2.4285,
"step": 2050
},
{
"epoch": 0.6328293736501079,
"grad_norm": 0.36067718267440796,
"learning_rate": 3e-05,
"loss": 2.4265,
"step": 2051
},
{
"epoch": 0.6331379203949399,
"grad_norm": 0.2749241292476654,
"learning_rate": 3e-05,
"loss": 2.348,
"step": 2052
},
{
"epoch": 0.6334464671397717,
"grad_norm": 0.37541571259498596,
"learning_rate": 3e-05,
"loss": 2.9153,
"step": 2053
},
{
"epoch": 0.6337550138846035,
"grad_norm": 0.3796222507953644,
"learning_rate": 3e-05,
"loss": 2.5818,
"step": 2054
},
{
"epoch": 0.6340635606294354,
"grad_norm": 0.36146241426467896,
"learning_rate": 3e-05,
"loss": 2.6657,
"step": 2055
},
{
"epoch": 0.6343721073742672,
"grad_norm": 0.37858709692955017,
"learning_rate": 3e-05,
"loss": 2.8254,
"step": 2056
},
{
"epoch": 0.634680654119099,
"grad_norm": 0.38670918345451355,
"learning_rate": 3e-05,
"loss": 2.6653,
"step": 2057
},
{
"epoch": 0.6349892008639308,
"grad_norm": 0.3363918960094452,
"learning_rate": 3e-05,
"loss": 2.6632,
"step": 2058
},
{
"epoch": 0.6352977476087627,
"grad_norm": 0.23550312221050262,
"learning_rate": 3e-05,
"loss": 2.0558,
"step": 2059
},
{
"epoch": 0.6356062943535946,
"grad_norm": 0.332854688167572,
"learning_rate": 3e-05,
"loss": 2.6517,
"step": 2060
},
{
"epoch": 0.6359148410984264,
"grad_norm": 0.3064941167831421,
"learning_rate": 3e-05,
"loss": 2.746,
"step": 2061
},
{
"epoch": 0.6362233878432583,
"grad_norm": 0.40314847230911255,
"learning_rate": 3e-05,
"loss": 2.5667,
"step": 2062
},
{
"epoch": 0.6365319345880901,
"grad_norm": 0.36766573786735535,
"learning_rate": 3e-05,
"loss": 2.8295,
"step": 2063
},
{
"epoch": 0.6368404813329219,
"grad_norm": 0.4863782227039337,
"learning_rate": 3e-05,
"loss": 3.1202,
"step": 2064
},
{
"epoch": 0.6371490280777538,
"grad_norm": 0.35342922806739807,
"learning_rate": 3e-05,
"loss": 2.8253,
"step": 2065
},
{
"epoch": 0.6374575748225856,
"grad_norm": 0.2922891676425934,
"learning_rate": 3e-05,
"loss": 2.1615,
"step": 2066
},
{
"epoch": 0.6377661215674174,
"grad_norm": 0.4159747362136841,
"learning_rate": 3e-05,
"loss": 2.7532,
"step": 2067
},
{
"epoch": 0.6380746683122493,
"grad_norm": 0.24633346498012543,
"learning_rate": 3e-05,
"loss": 2.3447,
"step": 2068
},
{
"epoch": 0.6383832150570812,
"grad_norm": 0.48890185356140137,
"learning_rate": 3e-05,
"loss": 2.9833,
"step": 2069
},
{
"epoch": 0.638691761801913,
"grad_norm": 0.49807408452033997,
"learning_rate": 3e-05,
"loss": 3.0582,
"step": 2070
},
{
"epoch": 0.6390003085467448,
"grad_norm": 0.3025662899017334,
"learning_rate": 3e-05,
"loss": 2.3173,
"step": 2071
},
{
"epoch": 0.6393088552915767,
"grad_norm": 0.2955687642097473,
"learning_rate": 3e-05,
"loss": 2.4217,
"step": 2072
},
{
"epoch": 0.6396174020364085,
"grad_norm": 0.44148918986320496,
"learning_rate": 3e-05,
"loss": 2.7124,
"step": 2073
},
{
"epoch": 0.6399259487812403,
"grad_norm": 0.39978307485580444,
"learning_rate": 3e-05,
"loss": 2.7428,
"step": 2074
},
{
"epoch": 0.6402344955260721,
"grad_norm": 0.2851976454257965,
"learning_rate": 3e-05,
"loss": 2.4972,
"step": 2075
},
{
"epoch": 0.6405430422709041,
"grad_norm": 0.33834779262542725,
"learning_rate": 3e-05,
"loss": 2.503,
"step": 2076
},
{
"epoch": 0.6408515890157359,
"grad_norm": 0.4237276017665863,
"learning_rate": 3e-05,
"loss": 2.973,
"step": 2077
},
{
"epoch": 0.6411601357605677,
"grad_norm": 0.4513942301273346,
"learning_rate": 3e-05,
"loss": 3.0904,
"step": 2078
},
{
"epoch": 0.6414686825053996,
"grad_norm": 0.35915786027908325,
"learning_rate": 3e-05,
"loss": 2.4066,
"step": 2079
},
{
"epoch": 0.6417772292502314,
"grad_norm": 0.3897479772567749,
"learning_rate": 3e-05,
"loss": 2.8671,
"step": 2080
},
{
"epoch": 0.6420857759950632,
"grad_norm": 0.3450873792171478,
"learning_rate": 3e-05,
"loss": 2.9807,
"step": 2081
},
{
"epoch": 0.642394322739895,
"grad_norm": 0.3396153151988983,
"learning_rate": 3e-05,
"loss": 2.7134,
"step": 2082
},
{
"epoch": 0.642702869484727,
"grad_norm": 0.41863974928855896,
"learning_rate": 3e-05,
"loss": 2.6853,
"step": 2083
},
{
"epoch": 0.6430114162295588,
"grad_norm": 0.5577157735824585,
"learning_rate": 3e-05,
"loss": 2.989,
"step": 2084
},
{
"epoch": 0.6433199629743906,
"grad_norm": 0.4542025327682495,
"learning_rate": 3e-05,
"loss": 3.2736,
"step": 2085
},
{
"epoch": 0.6436285097192225,
"grad_norm": 0.3049847185611725,
"learning_rate": 3e-05,
"loss": 2.6011,
"step": 2086
},
{
"epoch": 0.6439370564640543,
"grad_norm": 0.5583184957504272,
"learning_rate": 3e-05,
"loss": 2.9329,
"step": 2087
},
{
"epoch": 0.6442456032088861,
"grad_norm": 0.31898394227027893,
"learning_rate": 3e-05,
"loss": 2.7457,
"step": 2088
},
{
"epoch": 0.644554149953718,
"grad_norm": 0.3565310537815094,
"learning_rate": 3e-05,
"loss": 2.7388,
"step": 2089
},
{
"epoch": 0.6448626966985498,
"grad_norm": 0.2690911889076233,
"learning_rate": 3e-05,
"loss": 2.061,
"step": 2090
},
{
"epoch": 0.6451712434433817,
"grad_norm": 0.339018851518631,
"learning_rate": 3e-05,
"loss": 2.5711,
"step": 2091
},
{
"epoch": 0.6454797901882136,
"grad_norm": 0.314058780670166,
"learning_rate": 3e-05,
"loss": 2.5641,
"step": 2092
},
{
"epoch": 0.6457883369330454,
"grad_norm": 0.28353381156921387,
"learning_rate": 3e-05,
"loss": 2.3022,
"step": 2093
},
{
"epoch": 0.6460968836778772,
"grad_norm": 0.4895442724227905,
"learning_rate": 3e-05,
"loss": 2.861,
"step": 2094
},
{
"epoch": 0.646405430422709,
"grad_norm": 0.3713444769382477,
"learning_rate": 3e-05,
"loss": 2.5318,
"step": 2095
},
{
"epoch": 0.6467139771675409,
"grad_norm": 0.2743374705314636,
"learning_rate": 3e-05,
"loss": 2.2817,
"step": 2096
},
{
"epoch": 0.6470225239123727,
"grad_norm": 0.3508983254432678,
"learning_rate": 3e-05,
"loss": 2.4427,
"step": 2097
},
{
"epoch": 0.6473310706572045,
"grad_norm": 0.2961727976799011,
"learning_rate": 3e-05,
"loss": 2.2046,
"step": 2098
},
{
"epoch": 0.6476396174020365,
"grad_norm": 0.32885807752609253,
"learning_rate": 3e-05,
"loss": 2.6128,
"step": 2099
},
{
"epoch": 0.6479481641468683,
"grad_norm": 0.34111347794532776,
"learning_rate": 3e-05,
"loss": 2.4245,
"step": 2100
},
{
"epoch": 0.6482567108917001,
"grad_norm": 0.3031350076198578,
"learning_rate": 3e-05,
"loss": 2.7567,
"step": 2101
},
{
"epoch": 0.6485652576365319,
"grad_norm": 0.3701673746109009,
"learning_rate": 3e-05,
"loss": 3.1212,
"step": 2102
},
{
"epoch": 0.6488738043813638,
"grad_norm": 0.3395228683948517,
"learning_rate": 3e-05,
"loss": 2.7044,
"step": 2103
},
{
"epoch": 0.6491823511261956,
"grad_norm": 0.40879321098327637,
"learning_rate": 3e-05,
"loss": 2.6475,
"step": 2104
},
{
"epoch": 0.6494908978710274,
"grad_norm": 0.41927212476730347,
"learning_rate": 3e-05,
"loss": 2.9938,
"step": 2105
},
{
"epoch": 0.6497994446158593,
"grad_norm": 0.2867232859134674,
"learning_rate": 3e-05,
"loss": 2.229,
"step": 2106
},
{
"epoch": 0.6501079913606912,
"grad_norm": 0.41298848390579224,
"learning_rate": 3e-05,
"loss": 2.9525,
"step": 2107
},
{
"epoch": 0.650416538105523,
"grad_norm": 0.38462120294570923,
"learning_rate": 3e-05,
"loss": 2.833,
"step": 2108
},
{
"epoch": 0.6507250848503549,
"grad_norm": 0.2860693633556366,
"learning_rate": 3e-05,
"loss": 2.6447,
"step": 2109
},
{
"epoch": 0.6510336315951867,
"grad_norm": 0.3113842308521271,
"learning_rate": 3e-05,
"loss": 2.4201,
"step": 2110
},
{
"epoch": 0.6513421783400185,
"grad_norm": 0.27946069836616516,
"learning_rate": 3e-05,
"loss": 2.4673,
"step": 2111
},
{
"epoch": 0.6516507250848503,
"grad_norm": 0.2512831687927246,
"learning_rate": 3e-05,
"loss": 2.2427,
"step": 2112
},
{
"epoch": 0.6519592718296822,
"grad_norm": 0.5824667811393738,
"learning_rate": 3e-05,
"loss": 2.8793,
"step": 2113
},
{
"epoch": 0.652267818574514,
"grad_norm": 0.6139059066772461,
"learning_rate": 3e-05,
"loss": 3.2765,
"step": 2114
},
{
"epoch": 0.6525763653193459,
"grad_norm": 0.3947453200817108,
"learning_rate": 3e-05,
"loss": 2.6216,
"step": 2115
},
{
"epoch": 0.6528849120641778,
"grad_norm": 0.2993795871734619,
"learning_rate": 3e-05,
"loss": 2.2209,
"step": 2116
},
{
"epoch": 0.6531934588090096,
"grad_norm": 0.33383437991142273,
"learning_rate": 3e-05,
"loss": 2.427,
"step": 2117
},
{
"epoch": 0.6535020055538414,
"grad_norm": 0.46814224123954773,
"learning_rate": 3e-05,
"loss": 2.6804,
"step": 2118
},
{
"epoch": 0.6538105522986732,
"grad_norm": 0.35672876238822937,
"learning_rate": 3e-05,
"loss": 2.8386,
"step": 2119
},
{
"epoch": 0.6541190990435051,
"grad_norm": 0.44050756096839905,
"learning_rate": 3e-05,
"loss": 2.8095,
"step": 2120
},
{
"epoch": 0.6544276457883369,
"grad_norm": 0.3837873339653015,
"learning_rate": 3e-05,
"loss": 2.6712,
"step": 2121
},
{
"epoch": 0.6547361925331687,
"grad_norm": 0.44696587324142456,
"learning_rate": 3e-05,
"loss": 2.8583,
"step": 2122
},
{
"epoch": 0.6550447392780007,
"grad_norm": 0.3543272912502289,
"learning_rate": 3e-05,
"loss": 2.6896,
"step": 2123
},
{
"epoch": 0.6553532860228325,
"grad_norm": 0.3472782373428345,
"learning_rate": 3e-05,
"loss": 2.7716,
"step": 2124
},
{
"epoch": 0.6556618327676643,
"grad_norm": 0.3943750858306885,
"learning_rate": 3e-05,
"loss": 2.6629,
"step": 2125
},
{
"epoch": 0.6559703795124961,
"grad_norm": 0.37474364042282104,
"learning_rate": 3e-05,
"loss": 2.7249,
"step": 2126
},
{
"epoch": 0.656278926257328,
"grad_norm": 0.32506734132766724,
"learning_rate": 3e-05,
"loss": 2.7404,
"step": 2127
},
{
"epoch": 0.6565874730021598,
"grad_norm": 0.30232688784599304,
"learning_rate": 3e-05,
"loss": 2.6386,
"step": 2128
},
{
"epoch": 0.6568960197469916,
"grad_norm": 0.3200061321258545,
"learning_rate": 3e-05,
"loss": 3.0038,
"step": 2129
},
{
"epoch": 0.6572045664918235,
"grad_norm": 0.43252453207969666,
"learning_rate": 3e-05,
"loss": 2.8757,
"step": 2130
},
{
"epoch": 0.6575131132366554,
"grad_norm": 0.3240208923816681,
"learning_rate": 3e-05,
"loss": 2.5438,
"step": 2131
},
{
"epoch": 0.6578216599814872,
"grad_norm": 0.38468319177627563,
"learning_rate": 3e-05,
"loss": 3.0328,
"step": 2132
},
{
"epoch": 0.6581302067263191,
"grad_norm": 0.28264886140823364,
"learning_rate": 3e-05,
"loss": 2.3216,
"step": 2133
},
{
"epoch": 0.6584387534711509,
"grad_norm": 0.3538890480995178,
"learning_rate": 3e-05,
"loss": 3.0705,
"step": 2134
},
{
"epoch": 0.6587473002159827,
"grad_norm": 0.29800945520401,
"learning_rate": 3e-05,
"loss": 2.5841,
"step": 2135
},
{
"epoch": 0.6590558469608145,
"grad_norm": 0.39261680841445923,
"learning_rate": 3e-05,
"loss": 2.4659,
"step": 2136
},
{
"epoch": 0.6593643937056464,
"grad_norm": 0.25242921710014343,
"learning_rate": 3e-05,
"loss": 2.4418,
"step": 2137
},
{
"epoch": 0.6596729404504782,
"grad_norm": 0.35038331151008606,
"learning_rate": 3e-05,
"loss": 2.399,
"step": 2138
},
{
"epoch": 0.6599814871953101,
"grad_norm": 0.36697402596473694,
"learning_rate": 3e-05,
"loss": 2.377,
"step": 2139
},
{
"epoch": 0.660290033940142,
"grad_norm": 0.3055066466331482,
"learning_rate": 3e-05,
"loss": 2.4264,
"step": 2140
},
{
"epoch": 0.6605985806849738,
"grad_norm": 0.3446613848209381,
"learning_rate": 3e-05,
"loss": 2.5709,
"step": 2141
},
{
"epoch": 0.6609071274298056,
"grad_norm": 0.49223238229751587,
"learning_rate": 3e-05,
"loss": 2.8673,
"step": 2142
},
{
"epoch": 0.6612156741746374,
"grad_norm": 0.29385796189308167,
"learning_rate": 3e-05,
"loss": 2.4047,
"step": 2143
},
{
"epoch": 0.6615242209194693,
"grad_norm": 0.3069056570529938,
"learning_rate": 3e-05,
"loss": 2.6242,
"step": 2144
},
{
"epoch": 0.6618327676643011,
"grad_norm": 0.3820006251335144,
"learning_rate": 3e-05,
"loss": 2.9872,
"step": 2145
},
{
"epoch": 0.6621413144091329,
"grad_norm": 0.27881932258605957,
"learning_rate": 3e-05,
"loss": 2.3606,
"step": 2146
},
{
"epoch": 0.6624498611539649,
"grad_norm": 0.4184323847293854,
"learning_rate": 3e-05,
"loss": 3.138,
"step": 2147
},
{
"epoch": 0.6627584078987967,
"grad_norm": 0.2955660820007324,
"learning_rate": 3e-05,
"loss": 2.3884,
"step": 2148
},
{
"epoch": 0.6630669546436285,
"grad_norm": 0.31548479199409485,
"learning_rate": 3e-05,
"loss": 2.3839,
"step": 2149
},
{
"epoch": 0.6633755013884604,
"grad_norm": 0.3798123896121979,
"learning_rate": 3e-05,
"loss": 2.5527,
"step": 2150
},
{
"epoch": 0.6636840481332922,
"grad_norm": 0.31726929545402527,
"learning_rate": 3e-05,
"loss": 2.4075,
"step": 2151
},
{
"epoch": 0.663992594878124,
"grad_norm": 0.25782397389411926,
"learning_rate": 3e-05,
"loss": 1.9784,
"step": 2152
},
{
"epoch": 0.6643011416229558,
"grad_norm": 0.23787148296833038,
"learning_rate": 3e-05,
"loss": 2.0268,
"step": 2153
},
{
"epoch": 0.6646096883677877,
"grad_norm": 0.43553170561790466,
"learning_rate": 3e-05,
"loss": 2.7882,
"step": 2154
},
{
"epoch": 0.6649182351126196,
"grad_norm": 0.3706422448158264,
"learning_rate": 3e-05,
"loss": 2.8122,
"step": 2155
},
{
"epoch": 0.6652267818574514,
"grad_norm": 0.3282051682472229,
"learning_rate": 3e-05,
"loss": 2.22,
"step": 2156
},
{
"epoch": 0.6655353286022833,
"grad_norm": 0.3240806460380554,
"learning_rate": 3e-05,
"loss": 2.84,
"step": 2157
},
{
"epoch": 0.6658438753471151,
"grad_norm": 0.3654410243034363,
"learning_rate": 3e-05,
"loss": 2.6916,
"step": 2158
},
{
"epoch": 0.6661524220919469,
"grad_norm": 0.36625534296035767,
"learning_rate": 3e-05,
"loss": 2.4161,
"step": 2159
},
{
"epoch": 0.6664609688367787,
"grad_norm": 0.30736368894577026,
"learning_rate": 3e-05,
"loss": 2.566,
"step": 2160
},
{
"epoch": 0.6667695155816106,
"grad_norm": 0.3654194176197052,
"learning_rate": 3e-05,
"loss": 2.8405,
"step": 2161
},
{
"epoch": 0.6670780623264425,
"grad_norm": 0.319302499294281,
"learning_rate": 3e-05,
"loss": 2.8201,
"step": 2162
},
{
"epoch": 0.6673866090712743,
"grad_norm": 0.41080933809280396,
"learning_rate": 3e-05,
"loss": 2.881,
"step": 2163
},
{
"epoch": 0.6676951558161062,
"grad_norm": 0.2875690162181854,
"learning_rate": 3e-05,
"loss": 2.4054,
"step": 2164
},
{
"epoch": 0.668003702560938,
"grad_norm": 0.4758496582508087,
"learning_rate": 3e-05,
"loss": 2.9073,
"step": 2165
},
{
"epoch": 0.6683122493057698,
"grad_norm": 0.40784206986427307,
"learning_rate": 3e-05,
"loss": 3.0187,
"step": 2166
},
{
"epoch": 0.6686207960506017,
"grad_norm": 0.3624708652496338,
"learning_rate": 3e-05,
"loss": 2.4545,
"step": 2167
},
{
"epoch": 0.6689293427954335,
"grad_norm": 0.2465389370918274,
"learning_rate": 3e-05,
"loss": 2.3118,
"step": 2168
},
{
"epoch": 0.6692378895402653,
"grad_norm": 0.45873787999153137,
"learning_rate": 3e-05,
"loss": 2.9189,
"step": 2169
},
{
"epoch": 0.6695464362850972,
"grad_norm": 0.42442765831947327,
"learning_rate": 3e-05,
"loss": 2.6501,
"step": 2170
},
{
"epoch": 0.6698549830299291,
"grad_norm": 0.31840279698371887,
"learning_rate": 3e-05,
"loss": 2.7538,
"step": 2171
},
{
"epoch": 0.6701635297747609,
"grad_norm": 0.5180163383483887,
"learning_rate": 3e-05,
"loss": 3.147,
"step": 2172
},
{
"epoch": 0.6704720765195927,
"grad_norm": 0.32313767075538635,
"learning_rate": 3e-05,
"loss": 2.4871,
"step": 2173
},
{
"epoch": 0.6707806232644246,
"grad_norm": 0.46035996079444885,
"learning_rate": 3e-05,
"loss": 2.6938,
"step": 2174
},
{
"epoch": 0.6710891700092564,
"grad_norm": 0.36801087856292725,
"learning_rate": 3e-05,
"loss": 2.5027,
"step": 2175
},
{
"epoch": 0.6713977167540882,
"grad_norm": 0.32743167877197266,
"learning_rate": 3e-05,
"loss": 2.8545,
"step": 2176
},
{
"epoch": 0.67170626349892,
"grad_norm": 0.29395878314971924,
"learning_rate": 3e-05,
"loss": 2.5953,
"step": 2177
},
{
"epoch": 0.672014810243752,
"grad_norm": 0.289230078458786,
"learning_rate": 3e-05,
"loss": 2.4384,
"step": 2178
},
{
"epoch": 0.6723233569885838,
"grad_norm": 0.45950746536254883,
"learning_rate": 3e-05,
"loss": 2.8386,
"step": 2179
},
{
"epoch": 0.6726319037334156,
"grad_norm": 0.3325382471084595,
"learning_rate": 3e-05,
"loss": 2.7935,
"step": 2180
},
{
"epoch": 0.6729404504782475,
"grad_norm": 0.26703134179115295,
"learning_rate": 3e-05,
"loss": 2.4701,
"step": 2181
},
{
"epoch": 0.6732489972230793,
"grad_norm": 0.5108687877655029,
"learning_rate": 3e-05,
"loss": 3.3905,
"step": 2182
},
{
"epoch": 0.6735575439679111,
"grad_norm": 0.3172953128814697,
"learning_rate": 3e-05,
"loss": 2.1606,
"step": 2183
},
{
"epoch": 0.673866090712743,
"grad_norm": 0.35311633348464966,
"learning_rate": 3e-05,
"loss": 2.3739,
"step": 2184
},
{
"epoch": 0.6741746374575748,
"grad_norm": 0.46544140577316284,
"learning_rate": 3e-05,
"loss": 2.7557,
"step": 2185
},
{
"epoch": 0.6744831842024067,
"grad_norm": 0.2531960904598236,
"learning_rate": 3e-05,
"loss": 2.2056,
"step": 2186
},
{
"epoch": 0.6747917309472385,
"grad_norm": 0.33246132731437683,
"learning_rate": 3e-05,
"loss": 2.6602,
"step": 2187
},
{
"epoch": 0.6751002776920704,
"grad_norm": 0.3067862391471863,
"learning_rate": 3e-05,
"loss": 2.1033,
"step": 2188
},
{
"epoch": 0.6754088244369022,
"grad_norm": 0.749748170375824,
"learning_rate": 3e-05,
"loss": 2.6375,
"step": 2189
},
{
"epoch": 0.675717371181734,
"grad_norm": 0.48797351121902466,
"learning_rate": 3e-05,
"loss": 3.048,
"step": 2190
},
{
"epoch": 0.6760259179265659,
"grad_norm": 0.7949218153953552,
"learning_rate": 3e-05,
"loss": 2.4808,
"step": 2191
},
{
"epoch": 0.6763344646713977,
"grad_norm": 0.5216763019561768,
"learning_rate": 3e-05,
"loss": 3.0119,
"step": 2192
},
{
"epoch": 0.6766430114162295,
"grad_norm": 0.48725980520248413,
"learning_rate": 3e-05,
"loss": 2.2706,
"step": 2193
},
{
"epoch": 0.6769515581610615,
"grad_norm": 0.34293922781944275,
"learning_rate": 3e-05,
"loss": 2.3799,
"step": 2194
},
{
"epoch": 0.6772601049058933,
"grad_norm": 0.286062628030777,
"learning_rate": 3e-05,
"loss": 2.4532,
"step": 2195
},
{
"epoch": 0.6775686516507251,
"grad_norm": 0.3767567276954651,
"learning_rate": 3e-05,
"loss": 2.7406,
"step": 2196
},
{
"epoch": 0.6778771983955569,
"grad_norm": 0.31832119822502136,
"learning_rate": 3e-05,
"loss": 2.4595,
"step": 2197
},
{
"epoch": 0.6781857451403888,
"grad_norm": 0.5705392360687256,
"learning_rate": 3e-05,
"loss": 3.2181,
"step": 2198
},
{
"epoch": 0.6784942918852206,
"grad_norm": 0.3068188726902008,
"learning_rate": 3e-05,
"loss": 2.3873,
"step": 2199
},
{
"epoch": 0.6788028386300524,
"grad_norm": 0.3332638144493103,
"learning_rate": 3e-05,
"loss": 2.7779,
"step": 2200
},
{
"epoch": 0.6791113853748842,
"grad_norm": 0.30800509452819824,
"learning_rate": 3e-05,
"loss": 2.1011,
"step": 2201
},
{
"epoch": 0.6794199321197162,
"grad_norm": 0.5739889144897461,
"learning_rate": 3e-05,
"loss": 2.9388,
"step": 2202
},
{
"epoch": 0.679728478864548,
"grad_norm": 0.29751482605934143,
"learning_rate": 3e-05,
"loss": 2.6044,
"step": 2203
},
{
"epoch": 0.6800370256093798,
"grad_norm": 0.29991525411605835,
"learning_rate": 3e-05,
"loss": 2.3541,
"step": 2204
},
{
"epoch": 0.6803455723542117,
"grad_norm": 0.526593029499054,
"learning_rate": 3e-05,
"loss": 3.2972,
"step": 2205
},
{
"epoch": 0.6806541190990435,
"grad_norm": 0.32622289657592773,
"learning_rate": 3e-05,
"loss": 2.6286,
"step": 2206
},
{
"epoch": 0.6809626658438753,
"grad_norm": 0.45844268798828125,
"learning_rate": 3e-05,
"loss": 3.0493,
"step": 2207
},
{
"epoch": 0.6812712125887072,
"grad_norm": 0.3432754576206207,
"learning_rate": 3e-05,
"loss": 2.6928,
"step": 2208
},
{
"epoch": 0.681579759333539,
"grad_norm": 0.413926362991333,
"learning_rate": 3e-05,
"loss": 3.2148,
"step": 2209
},
{
"epoch": 0.6818883060783709,
"grad_norm": 0.3242056369781494,
"learning_rate": 3e-05,
"loss": 2.8108,
"step": 2210
},
{
"epoch": 0.6821968528232027,
"grad_norm": 0.3689955770969391,
"learning_rate": 3e-05,
"loss": 2.2757,
"step": 2211
},
{
"epoch": 0.6825053995680346,
"grad_norm": 0.4456981122493744,
"learning_rate": 3e-05,
"loss": 2.6915,
"step": 2212
},
{
"epoch": 0.6828139463128664,
"grad_norm": 0.3460436761379242,
"learning_rate": 3e-05,
"loss": 2.4635,
"step": 2213
},
{
"epoch": 0.6831224930576982,
"grad_norm": 0.32900822162628174,
"learning_rate": 3e-05,
"loss": 2.423,
"step": 2214
},
{
"epoch": 0.6834310398025301,
"grad_norm": 0.37845373153686523,
"learning_rate": 3e-05,
"loss": 2.34,
"step": 2215
},
{
"epoch": 0.6837395865473619,
"grad_norm": 0.432632178068161,
"learning_rate": 3e-05,
"loss": 2.6306,
"step": 2216
},
{
"epoch": 0.6840481332921937,
"grad_norm": 0.4410339891910553,
"learning_rate": 3e-05,
"loss": 2.7681,
"step": 2217
},
{
"epoch": 0.6843566800370257,
"grad_norm": 0.3067202866077423,
"learning_rate": 3e-05,
"loss": 2.4562,
"step": 2218
},
{
"epoch": 0.6846652267818575,
"grad_norm": 0.39880362153053284,
"learning_rate": 3e-05,
"loss": 2.6653,
"step": 2219
},
{
"epoch": 0.6849737735266893,
"grad_norm": 0.3374853730201721,
"learning_rate": 3e-05,
"loss": 2.5644,
"step": 2220
},
{
"epoch": 0.6852823202715211,
"grad_norm": 0.32741785049438477,
"learning_rate": 3e-05,
"loss": 2.5256,
"step": 2221
},
{
"epoch": 0.685590867016353,
"grad_norm": 0.3135084807872772,
"learning_rate": 3e-05,
"loss": 2.8652,
"step": 2222
},
{
"epoch": 0.6858994137611848,
"grad_norm": 0.2992698848247528,
"learning_rate": 3e-05,
"loss": 2.5192,
"step": 2223
},
{
"epoch": 0.6862079605060166,
"grad_norm": 0.2987714409828186,
"learning_rate": 3e-05,
"loss": 2.3355,
"step": 2224
},
{
"epoch": 0.6865165072508485,
"grad_norm": 0.28761017322540283,
"learning_rate": 3e-05,
"loss": 2.6733,
"step": 2225
},
{
"epoch": 0.6868250539956804,
"grad_norm": 0.45268577337265015,
"learning_rate": 3e-05,
"loss": 3.0758,
"step": 2226
},
{
"epoch": 0.6871336007405122,
"grad_norm": 0.44058606028556824,
"learning_rate": 3e-05,
"loss": 3.1584,
"step": 2227
},
{
"epoch": 0.687442147485344,
"grad_norm": 0.3867708146572113,
"learning_rate": 3e-05,
"loss": 3.1498,
"step": 2228
},
{
"epoch": 0.6877506942301759,
"grad_norm": 0.3580076992511749,
"learning_rate": 3e-05,
"loss": 2.8775,
"step": 2229
},
{
"epoch": 0.6880592409750077,
"grad_norm": 0.4155197739601135,
"learning_rate": 3e-05,
"loss": 2.7409,
"step": 2230
},
{
"epoch": 0.6883677877198395,
"grad_norm": 0.4667437672615051,
"learning_rate": 3e-05,
"loss": 2.9636,
"step": 2231
},
{
"epoch": 0.6886763344646714,
"grad_norm": 0.27438244223594666,
"learning_rate": 3e-05,
"loss": 2.2065,
"step": 2232
},
{
"epoch": 0.6889848812095032,
"grad_norm": 0.32291701436042786,
"learning_rate": 3e-05,
"loss": 2.4505,
"step": 2233
},
{
"epoch": 0.6892934279543351,
"grad_norm": 0.33502185344696045,
"learning_rate": 3e-05,
"loss": 2.3098,
"step": 2234
},
{
"epoch": 0.689601974699167,
"grad_norm": 0.37546586990356445,
"learning_rate": 3e-05,
"loss": 2.105,
"step": 2235
},
{
"epoch": 0.6899105214439988,
"grad_norm": 0.7218531370162964,
"learning_rate": 3e-05,
"loss": 3.3048,
"step": 2236
},
{
"epoch": 0.6902190681888306,
"grad_norm": 0.29030007123947144,
"learning_rate": 3e-05,
"loss": 2.2994,
"step": 2237
},
{
"epoch": 0.6905276149336624,
"grad_norm": 0.35308146476745605,
"learning_rate": 3e-05,
"loss": 3.0852,
"step": 2238
},
{
"epoch": 0.6908361616784943,
"grad_norm": 0.38699471950531006,
"learning_rate": 3e-05,
"loss": 2.7178,
"step": 2239
},
{
"epoch": 0.6911447084233261,
"grad_norm": 0.39251628518104553,
"learning_rate": 3e-05,
"loss": 2.2669,
"step": 2240
},
{
"epoch": 0.6914532551681579,
"grad_norm": 0.48583975434303284,
"learning_rate": 3e-05,
"loss": 3.0221,
"step": 2241
},
{
"epoch": 0.6917618019129899,
"grad_norm": 0.310846209526062,
"learning_rate": 3e-05,
"loss": 2.5904,
"step": 2242
},
{
"epoch": 0.6920703486578217,
"grad_norm": 0.30022484064102173,
"learning_rate": 3e-05,
"loss": 2.4378,
"step": 2243
},
{
"epoch": 0.6923788954026535,
"grad_norm": 0.48664936423301697,
"learning_rate": 3e-05,
"loss": 2.8075,
"step": 2244
},
{
"epoch": 0.6926874421474853,
"grad_norm": 0.3220667839050293,
"learning_rate": 3e-05,
"loss": 2.5881,
"step": 2245
},
{
"epoch": 0.6929959888923172,
"grad_norm": 0.28465014696121216,
"learning_rate": 3e-05,
"loss": 2.2752,
"step": 2246
},
{
"epoch": 0.693304535637149,
"grad_norm": 0.2912628650665283,
"learning_rate": 3e-05,
"loss": 2.4114,
"step": 2247
},
{
"epoch": 0.6936130823819808,
"grad_norm": 0.348395973443985,
"learning_rate": 3e-05,
"loss": 2.812,
"step": 2248
},
{
"epoch": 0.6939216291268128,
"grad_norm": 0.3242470622062683,
"learning_rate": 3e-05,
"loss": 2.7246,
"step": 2249
},
{
"epoch": 0.6942301758716446,
"grad_norm": 0.3317355215549469,
"learning_rate": 3e-05,
"loss": 2.4041,
"step": 2250
},
{
"epoch": 0.6945387226164764,
"grad_norm": 0.4242478609085083,
"learning_rate": 3e-05,
"loss": 2.5728,
"step": 2251
},
{
"epoch": 0.6948472693613083,
"grad_norm": 0.35139262676239014,
"learning_rate": 3e-05,
"loss": 2.47,
"step": 2252
},
{
"epoch": 0.6951558161061401,
"grad_norm": 0.3759293854236603,
"learning_rate": 3e-05,
"loss": 2.8471,
"step": 2253
},
{
"epoch": 0.6954643628509719,
"grad_norm": 0.42364293336868286,
"learning_rate": 3e-05,
"loss": 2.7891,
"step": 2254
},
{
"epoch": 0.6957729095958037,
"grad_norm": 0.37337446212768555,
"learning_rate": 3e-05,
"loss": 2.5027,
"step": 2255
},
{
"epoch": 0.6960814563406356,
"grad_norm": 0.5470480918884277,
"learning_rate": 3e-05,
"loss": 3.0105,
"step": 2256
},
{
"epoch": 0.6963900030854675,
"grad_norm": 0.46059450507164,
"learning_rate": 3e-05,
"loss": 2.7711,
"step": 2257
},
{
"epoch": 0.6966985498302993,
"grad_norm": 0.259276807308197,
"learning_rate": 3e-05,
"loss": 2.2483,
"step": 2258
},
{
"epoch": 0.6970070965751312,
"grad_norm": 0.5910064578056335,
"learning_rate": 3e-05,
"loss": 2.8551,
"step": 2259
},
{
"epoch": 0.697315643319963,
"grad_norm": 0.4100971221923828,
"learning_rate": 3e-05,
"loss": 2.7009,
"step": 2260
},
{
"epoch": 0.6976241900647948,
"grad_norm": 0.4783870279788971,
"learning_rate": 3e-05,
"loss": 3.0588,
"step": 2261
},
{
"epoch": 0.6979327368096266,
"grad_norm": 0.2990737855434418,
"learning_rate": 3e-05,
"loss": 2.6226,
"step": 2262
},
{
"epoch": 0.6982412835544585,
"grad_norm": 0.3180517554283142,
"learning_rate": 3e-05,
"loss": 2.7246,
"step": 2263
},
{
"epoch": 0.6985498302992903,
"grad_norm": 0.47897204756736755,
"learning_rate": 3e-05,
"loss": 2.9415,
"step": 2264
},
{
"epoch": 0.6988583770441222,
"grad_norm": 0.36933448910713196,
"learning_rate": 3e-05,
"loss": 2.5065,
"step": 2265
},
{
"epoch": 0.6991669237889541,
"grad_norm": 0.3363001346588135,
"learning_rate": 3e-05,
"loss": 2.4655,
"step": 2266
},
{
"epoch": 0.6994754705337859,
"grad_norm": 0.35361260175704956,
"learning_rate": 3e-05,
"loss": 2.7079,
"step": 2267
},
{
"epoch": 0.6997840172786177,
"grad_norm": 0.6832797527313232,
"learning_rate": 3e-05,
"loss": 3.281,
"step": 2268
},
{
"epoch": 0.7000925640234495,
"grad_norm": 0.4083409309387207,
"learning_rate": 3e-05,
"loss": 3.1841,
"step": 2269
},
{
"epoch": 0.7004011107682814,
"grad_norm": 0.36310717463493347,
"learning_rate": 3e-05,
"loss": 2.4988,
"step": 2270
},
{
"epoch": 0.7007096575131132,
"grad_norm": 0.3296610713005066,
"learning_rate": 3e-05,
"loss": 2.5112,
"step": 2271
},
{
"epoch": 0.701018204257945,
"grad_norm": 0.3596273958683014,
"learning_rate": 3e-05,
"loss": 3.1719,
"step": 2272
},
{
"epoch": 0.701326751002777,
"grad_norm": 0.30044686794281006,
"learning_rate": 3e-05,
"loss": 2.525,
"step": 2273
},
{
"epoch": 0.7016352977476088,
"grad_norm": 0.3514106869697571,
"learning_rate": 3e-05,
"loss": 2.8474,
"step": 2274
},
{
"epoch": 0.7019438444924406,
"grad_norm": 0.3375399708747864,
"learning_rate": 3e-05,
"loss": 2.6595,
"step": 2275
},
{
"epoch": 0.7022523912372725,
"grad_norm": 0.3137775659561157,
"learning_rate": 3e-05,
"loss": 2.7717,
"step": 2276
},
{
"epoch": 0.7025609379821043,
"grad_norm": 0.440255343914032,
"learning_rate": 3e-05,
"loss": 2.9353,
"step": 2277
},
{
"epoch": 0.7028694847269361,
"grad_norm": 0.3127903342247009,
"learning_rate": 3e-05,
"loss": 2.4178,
"step": 2278
},
{
"epoch": 0.7031780314717679,
"grad_norm": 0.3616493046283722,
"learning_rate": 3e-05,
"loss": 2.8495,
"step": 2279
},
{
"epoch": 0.7034865782165998,
"grad_norm": 0.3875301480293274,
"learning_rate": 3e-05,
"loss": 2.7257,
"step": 2280
},
{
"epoch": 0.7037951249614317,
"grad_norm": 0.44572484493255615,
"learning_rate": 3e-05,
"loss": 2.7335,
"step": 2281
},
{
"epoch": 0.7041036717062635,
"grad_norm": 0.35633429884910583,
"learning_rate": 3e-05,
"loss": 2.919,
"step": 2282
},
{
"epoch": 0.7044122184510954,
"grad_norm": 0.3577132821083069,
"learning_rate": 3e-05,
"loss": 2.7033,
"step": 2283
},
{
"epoch": 0.7047207651959272,
"grad_norm": 0.37120798230171204,
"learning_rate": 3e-05,
"loss": 2.8152,
"step": 2284
},
{
"epoch": 0.705029311940759,
"grad_norm": 0.2999494969844818,
"learning_rate": 3e-05,
"loss": 2.2868,
"step": 2285
},
{
"epoch": 0.7053378586855908,
"grad_norm": 0.2723415493965149,
"learning_rate": 3e-05,
"loss": 2.2569,
"step": 2286
},
{
"epoch": 0.7056464054304227,
"grad_norm": 0.2613217532634735,
"learning_rate": 3e-05,
"loss": 2.1881,
"step": 2287
},
{
"epoch": 0.7059549521752545,
"grad_norm": 0.30193865299224854,
"learning_rate": 3e-05,
"loss": 2.6832,
"step": 2288
},
{
"epoch": 0.7062634989200864,
"grad_norm": 0.35747671127319336,
"learning_rate": 3e-05,
"loss": 2.6943,
"step": 2289
},
{
"epoch": 0.7065720456649183,
"grad_norm": 0.33091968297958374,
"learning_rate": 3e-05,
"loss": 2.4817,
"step": 2290
},
{
"epoch": 0.7068805924097501,
"grad_norm": 0.28639718890190125,
"learning_rate": 3e-05,
"loss": 2.3967,
"step": 2291
},
{
"epoch": 0.7071891391545819,
"grad_norm": 0.33338162302970886,
"learning_rate": 3e-05,
"loss": 2.5865,
"step": 2292
},
{
"epoch": 0.7074976858994138,
"grad_norm": 0.33903318643569946,
"learning_rate": 3e-05,
"loss": 2.795,
"step": 2293
},
{
"epoch": 0.7078062326442456,
"grad_norm": 0.38681912422180176,
"learning_rate": 3e-05,
"loss": 2.6127,
"step": 2294
},
{
"epoch": 0.7081147793890774,
"grad_norm": 0.3772861659526825,
"learning_rate": 3e-05,
"loss": 2.6537,
"step": 2295
},
{
"epoch": 0.7084233261339092,
"grad_norm": 0.31994327902793884,
"learning_rate": 3e-05,
"loss": 2.4539,
"step": 2296
},
{
"epoch": 0.7087318728787412,
"grad_norm": 0.4272918403148651,
"learning_rate": 3e-05,
"loss": 2.7252,
"step": 2297
},
{
"epoch": 0.709040419623573,
"grad_norm": 0.3606460392475128,
"learning_rate": 3e-05,
"loss": 2.3777,
"step": 2298
},
{
"epoch": 0.7093489663684048,
"grad_norm": 0.3049137592315674,
"learning_rate": 3e-05,
"loss": 2.5834,
"step": 2299
},
{
"epoch": 0.7096575131132367,
"grad_norm": 0.30312472581863403,
"learning_rate": 3e-05,
"loss": 2.4526,
"step": 2300
},
{
"epoch": 0.7099660598580685,
"grad_norm": 0.39378124475479126,
"learning_rate": 3e-05,
"loss": 2.5959,
"step": 2301
},
{
"epoch": 0.7102746066029003,
"grad_norm": 0.3223033547401428,
"learning_rate": 3e-05,
"loss": 2.5562,
"step": 2302
},
{
"epoch": 0.7105831533477321,
"grad_norm": 0.30526670813560486,
"learning_rate": 3e-05,
"loss": 2.6841,
"step": 2303
},
{
"epoch": 0.710891700092564,
"grad_norm": 0.29967185854911804,
"learning_rate": 3e-05,
"loss": 2.8474,
"step": 2304
},
{
"epoch": 0.7112002468373959,
"grad_norm": 0.44183349609375,
"learning_rate": 3e-05,
"loss": 2.7405,
"step": 2305
},
{
"epoch": 0.7115087935822277,
"grad_norm": 0.3366570770740509,
"learning_rate": 3e-05,
"loss": 2.5091,
"step": 2306
},
{
"epoch": 0.7118173403270596,
"grad_norm": 0.3268311023712158,
"learning_rate": 3e-05,
"loss": 2.7072,
"step": 2307
},
{
"epoch": 0.7121258870718914,
"grad_norm": 0.32370471954345703,
"learning_rate": 3e-05,
"loss": 2.7471,
"step": 2308
},
{
"epoch": 0.7124344338167232,
"grad_norm": 0.24951313436031342,
"learning_rate": 3e-05,
"loss": 2.3114,
"step": 2309
},
{
"epoch": 0.712742980561555,
"grad_norm": 0.31242477893829346,
"learning_rate": 3e-05,
"loss": 2.6775,
"step": 2310
},
{
"epoch": 0.7130515273063869,
"grad_norm": 0.42314693331718445,
"learning_rate": 3e-05,
"loss": 2.7215,
"step": 2311
},
{
"epoch": 0.7133600740512187,
"grad_norm": 0.41124439239501953,
"learning_rate": 3e-05,
"loss": 3.0069,
"step": 2312
},
{
"epoch": 0.7136686207960506,
"grad_norm": 0.34377771615982056,
"learning_rate": 3e-05,
"loss": 2.6546,
"step": 2313
},
{
"epoch": 0.7139771675408825,
"grad_norm": 0.3865838348865509,
"learning_rate": 3e-05,
"loss": 3.2176,
"step": 2314
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.3056287169456482,
"learning_rate": 3e-05,
"loss": 2.6893,
"step": 2315
},
{
"epoch": 0.7145942610305461,
"grad_norm": 0.3255837857723236,
"learning_rate": 3e-05,
"loss": 2.6466,
"step": 2316
},
{
"epoch": 0.714902807775378,
"grad_norm": 0.3461015522480011,
"learning_rate": 3e-05,
"loss": 2.2741,
"step": 2317
},
{
"epoch": 0.7152113545202098,
"grad_norm": 0.3450034558773041,
"learning_rate": 3e-05,
"loss": 2.343,
"step": 2318
},
{
"epoch": 0.7155199012650416,
"grad_norm": 0.31079721450805664,
"learning_rate": 3e-05,
"loss": 2.9378,
"step": 2319
},
{
"epoch": 0.7158284480098734,
"grad_norm": 0.39686399698257446,
"learning_rate": 3e-05,
"loss": 2.8702,
"step": 2320
},
{
"epoch": 0.7161369947547054,
"grad_norm": 0.44539371132850647,
"learning_rate": 3e-05,
"loss": 2.6015,
"step": 2321
},
{
"epoch": 0.7164455414995372,
"grad_norm": 0.2945401668548584,
"learning_rate": 3e-05,
"loss": 2.2512,
"step": 2322
},
{
"epoch": 0.716754088244369,
"grad_norm": 0.46408987045288086,
"learning_rate": 3e-05,
"loss": 3.037,
"step": 2323
},
{
"epoch": 0.7170626349892009,
"grad_norm": 0.36283478140830994,
"learning_rate": 3e-05,
"loss": 2.5426,
"step": 2324
},
{
"epoch": 0.7173711817340327,
"grad_norm": 0.37654486298561096,
"learning_rate": 3e-05,
"loss": 2.5525,
"step": 2325
},
{
"epoch": 0.7176797284788645,
"grad_norm": 0.32116761803627014,
"learning_rate": 3e-05,
"loss": 2.357,
"step": 2326
},
{
"epoch": 0.7179882752236963,
"grad_norm": 0.3310379683971405,
"learning_rate": 3e-05,
"loss": 2.7374,
"step": 2327
},
{
"epoch": 0.7182968219685283,
"grad_norm": 0.33591607213020325,
"learning_rate": 3e-05,
"loss": 2.8397,
"step": 2328
},
{
"epoch": 0.7186053687133601,
"grad_norm": 0.2981739640235901,
"learning_rate": 3e-05,
"loss": 2.5991,
"step": 2329
},
{
"epoch": 0.7189139154581919,
"grad_norm": 0.2821868360042572,
"learning_rate": 3e-05,
"loss": 2.3263,
"step": 2330
},
{
"epoch": 0.7192224622030238,
"grad_norm": 0.31829720735549927,
"learning_rate": 3e-05,
"loss": 2.9571,
"step": 2331
},
{
"epoch": 0.7195310089478556,
"grad_norm": 0.2890639901161194,
"learning_rate": 3e-05,
"loss": 2.5357,
"step": 2332
},
{
"epoch": 0.7198395556926874,
"grad_norm": 0.3317052721977234,
"learning_rate": 3e-05,
"loss": 2.4097,
"step": 2333
},
{
"epoch": 0.7201481024375193,
"grad_norm": 0.3682270050048828,
"learning_rate": 3e-05,
"loss": 2.7066,
"step": 2334
},
{
"epoch": 0.7204566491823511,
"grad_norm": 0.35335469245910645,
"learning_rate": 3e-05,
"loss": 2.5117,
"step": 2335
},
{
"epoch": 0.720765195927183,
"grad_norm": 0.26136720180511475,
"learning_rate": 3e-05,
"loss": 2.3658,
"step": 2336
},
{
"epoch": 0.7210737426720149,
"grad_norm": 0.2938997447490692,
"learning_rate": 3e-05,
"loss": 2.8205,
"step": 2337
},
{
"epoch": 0.7213822894168467,
"grad_norm": 0.37984588742256165,
"learning_rate": 3e-05,
"loss": 2.796,
"step": 2338
},
{
"epoch": 0.7216908361616785,
"grad_norm": 0.34478721022605896,
"learning_rate": 3e-05,
"loss": 2.4592,
"step": 2339
},
{
"epoch": 0.7219993829065103,
"grad_norm": 0.2819859981536865,
"learning_rate": 3e-05,
"loss": 2.3211,
"step": 2340
},
{
"epoch": 0.7223079296513422,
"grad_norm": 0.3126926124095917,
"learning_rate": 3e-05,
"loss": 2.5203,
"step": 2341
},
{
"epoch": 0.722616476396174,
"grad_norm": 0.28043824434280396,
"learning_rate": 3e-05,
"loss": 2.7156,
"step": 2342
},
{
"epoch": 0.7229250231410058,
"grad_norm": 0.48454147577285767,
"learning_rate": 3e-05,
"loss": 3.0008,
"step": 2343
},
{
"epoch": 0.7232335698858378,
"grad_norm": 0.4411979615688324,
"learning_rate": 3e-05,
"loss": 3.1171,
"step": 2344
},
{
"epoch": 0.7235421166306696,
"grad_norm": 0.3608051836490631,
"learning_rate": 3e-05,
"loss": 2.8618,
"step": 2345
},
{
"epoch": 0.7238506633755014,
"grad_norm": 0.3951874077320099,
"learning_rate": 3e-05,
"loss": 2.7568,
"step": 2346
},
{
"epoch": 0.7241592101203332,
"grad_norm": 0.4376699924468994,
"learning_rate": 3e-05,
"loss": 2.5694,
"step": 2347
},
{
"epoch": 0.7244677568651651,
"grad_norm": 0.3012159466743469,
"learning_rate": 3e-05,
"loss": 2.6437,
"step": 2348
},
{
"epoch": 0.7247763036099969,
"grad_norm": 0.2726997435092926,
"learning_rate": 3e-05,
"loss": 2.3439,
"step": 2349
},
{
"epoch": 0.7250848503548287,
"grad_norm": 0.4913809895515442,
"learning_rate": 3e-05,
"loss": 2.9323,
"step": 2350
},
{
"epoch": 0.7253933970996606,
"grad_norm": 0.34712734818458557,
"learning_rate": 3e-05,
"loss": 2.352,
"step": 2351
},
{
"epoch": 0.7257019438444925,
"grad_norm": 0.2753252387046814,
"learning_rate": 3e-05,
"loss": 2.6161,
"step": 2352
},
{
"epoch": 0.7260104905893243,
"grad_norm": 0.290897935628891,
"learning_rate": 3e-05,
"loss": 2.5956,
"step": 2353
},
{
"epoch": 0.7263190373341561,
"grad_norm": 0.3504074215888977,
"learning_rate": 3e-05,
"loss": 2.9418,
"step": 2354
},
{
"epoch": 0.726627584078988,
"grad_norm": 0.4185696840286255,
"learning_rate": 3e-05,
"loss": 2.6918,
"step": 2355
},
{
"epoch": 0.7269361308238198,
"grad_norm": 0.36609721183776855,
"learning_rate": 3e-05,
"loss": 2.7385,
"step": 2356
},
{
"epoch": 0.7272446775686516,
"grad_norm": 0.35895973443984985,
"learning_rate": 3e-05,
"loss": 2.9608,
"step": 2357
},
{
"epoch": 0.7275532243134835,
"grad_norm": 0.39654770493507385,
"learning_rate": 3e-05,
"loss": 3.4329,
"step": 2358
},
{
"epoch": 0.7278617710583153,
"grad_norm": 0.33351320028305054,
"learning_rate": 3e-05,
"loss": 2.3718,
"step": 2359
},
{
"epoch": 0.7281703178031472,
"grad_norm": 0.4044846296310425,
"learning_rate": 3e-05,
"loss": 2.6055,
"step": 2360
},
{
"epoch": 0.728478864547979,
"grad_norm": 0.3423720598220825,
"learning_rate": 3e-05,
"loss": 2.7944,
"step": 2361
},
{
"epoch": 0.7287874112928109,
"grad_norm": 0.3391677439212799,
"learning_rate": 3e-05,
"loss": 3.0567,
"step": 2362
},
{
"epoch": 0.7290959580376427,
"grad_norm": 0.2994338572025299,
"learning_rate": 3e-05,
"loss": 2.5462,
"step": 2363
},
{
"epoch": 0.7294045047824745,
"grad_norm": 0.5571287870407104,
"learning_rate": 3e-05,
"loss": 3.2259,
"step": 2364
},
{
"epoch": 0.7297130515273064,
"grad_norm": 0.36647090315818787,
"learning_rate": 3e-05,
"loss": 3.1436,
"step": 2365
},
{
"epoch": 0.7300215982721382,
"grad_norm": 0.3304094970226288,
"learning_rate": 3e-05,
"loss": 2.7925,
"step": 2366
},
{
"epoch": 0.73033014501697,
"grad_norm": 0.3421422243118286,
"learning_rate": 3e-05,
"loss": 2.757,
"step": 2367
},
{
"epoch": 0.730638691761802,
"grad_norm": 0.5458205342292786,
"learning_rate": 3e-05,
"loss": 2.8702,
"step": 2368
},
{
"epoch": 0.7309472385066338,
"grad_norm": 0.32926011085510254,
"learning_rate": 3e-05,
"loss": 2.3621,
"step": 2369
},
{
"epoch": 0.7312557852514656,
"grad_norm": 0.3372330963611603,
"learning_rate": 3e-05,
"loss": 2.8953,
"step": 2370
},
{
"epoch": 0.7315643319962974,
"grad_norm": 0.3673820197582245,
"learning_rate": 3e-05,
"loss": 2.7747,
"step": 2371
},
{
"epoch": 0.7318728787411293,
"grad_norm": 0.5362467765808105,
"learning_rate": 3e-05,
"loss": 3.1949,
"step": 2372
},
{
"epoch": 0.7321814254859611,
"grad_norm": 0.37843358516693115,
"learning_rate": 3e-05,
"loss": 2.982,
"step": 2373
},
{
"epoch": 0.7324899722307929,
"grad_norm": 0.31489112973213196,
"learning_rate": 3e-05,
"loss": 2.5436,
"step": 2374
},
{
"epoch": 0.7327985189756248,
"grad_norm": 0.3280992805957794,
"learning_rate": 3e-05,
"loss": 2.4243,
"step": 2375
},
{
"epoch": 0.7331070657204567,
"grad_norm": 0.32287856936454773,
"learning_rate": 3e-05,
"loss": 2.4056,
"step": 2376
},
{
"epoch": 0.7334156124652885,
"grad_norm": 0.3963980972766876,
"learning_rate": 3e-05,
"loss": 2.8322,
"step": 2377
},
{
"epoch": 0.7337241592101204,
"grad_norm": 0.3684110641479492,
"learning_rate": 3e-05,
"loss": 2.9733,
"step": 2378
},
{
"epoch": 0.7340327059549522,
"grad_norm": 0.2812758684158325,
"learning_rate": 3e-05,
"loss": 2.3254,
"step": 2379
},
{
"epoch": 0.734341252699784,
"grad_norm": 0.2867478132247925,
"learning_rate": 3e-05,
"loss": 2.3585,
"step": 2380
},
{
"epoch": 0.7346497994446158,
"grad_norm": 0.34234970808029175,
"learning_rate": 3e-05,
"loss": 2.4571,
"step": 2381
},
{
"epoch": 0.7349583461894477,
"grad_norm": 0.29766973853111267,
"learning_rate": 3e-05,
"loss": 2.1928,
"step": 2382
},
{
"epoch": 0.7352668929342795,
"grad_norm": 0.41757407784461975,
"learning_rate": 3e-05,
"loss": 2.9724,
"step": 2383
},
{
"epoch": 0.7355754396791114,
"grad_norm": 0.2655417323112488,
"learning_rate": 3e-05,
"loss": 2.3478,
"step": 2384
},
{
"epoch": 0.7358839864239433,
"grad_norm": 0.4270665645599365,
"learning_rate": 3e-05,
"loss": 3.1655,
"step": 2385
},
{
"epoch": 0.7361925331687751,
"grad_norm": 0.35242992639541626,
"learning_rate": 3e-05,
"loss": 2.8984,
"step": 2386
},
{
"epoch": 0.7365010799136069,
"grad_norm": 0.32121220231056213,
"learning_rate": 3e-05,
"loss": 2.8393,
"step": 2387
},
{
"epoch": 0.7368096266584387,
"grad_norm": 0.31955909729003906,
"learning_rate": 3e-05,
"loss": 2.5745,
"step": 2388
},
{
"epoch": 0.7371181734032706,
"grad_norm": 0.362232506275177,
"learning_rate": 3e-05,
"loss": 2.6013,
"step": 2389
},
{
"epoch": 0.7374267201481024,
"grad_norm": 0.321138471364975,
"learning_rate": 3e-05,
"loss": 2.6018,
"step": 2390
},
{
"epoch": 0.7377352668929342,
"grad_norm": 0.33530327677726746,
"learning_rate": 3e-05,
"loss": 2.4887,
"step": 2391
},
{
"epoch": 0.7380438136377662,
"grad_norm": 0.2641347348690033,
"learning_rate": 3e-05,
"loss": 2.4245,
"step": 2392
},
{
"epoch": 0.738352360382598,
"grad_norm": 0.3557533025741577,
"learning_rate": 3e-05,
"loss": 2.9062,
"step": 2393
},
{
"epoch": 0.7386609071274298,
"grad_norm": 0.3297819495201111,
"learning_rate": 3e-05,
"loss": 2.3035,
"step": 2394
},
{
"epoch": 0.7389694538722617,
"grad_norm": 0.3426196277141571,
"learning_rate": 3e-05,
"loss": 2.7256,
"step": 2395
},
{
"epoch": 0.7392780006170935,
"grad_norm": 0.305744469165802,
"learning_rate": 3e-05,
"loss": 2.3195,
"step": 2396
},
{
"epoch": 0.7395865473619253,
"grad_norm": 0.31769564747810364,
"learning_rate": 3e-05,
"loss": 2.4878,
"step": 2397
},
{
"epoch": 0.7398950941067571,
"grad_norm": 0.25210025906562805,
"learning_rate": 3e-05,
"loss": 2.2466,
"step": 2398
},
{
"epoch": 0.740203640851589,
"grad_norm": 0.34738588333129883,
"learning_rate": 3e-05,
"loss": 2.6832,
"step": 2399
},
{
"epoch": 0.7405121875964209,
"grad_norm": 0.32754284143447876,
"learning_rate": 3e-05,
"loss": 2.5207,
"step": 2400
}
],
"logging_steps": 1,
"max_steps": 3241,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.8497987130949632e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}