bge_embedding / trainer_state.json
Jrinky's picture
Upload 23 files
df666b8 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.1381406271584473,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0001381406271584473,
"grad_norm": 0.13134899735450745,
"learning_rate": 0.0,
"loss": 0.2546,
"step": 1
},
{
"epoch": 0.0002762812543168946,
"grad_norm": 0.1231406033039093,
"learning_rate": 8.695768706980801e-06,
"loss": 0.2051,
"step": 2
},
{
"epoch": 0.0004144218814753419,
"grad_norm": 0.08561521023511887,
"learning_rate": 1.3782467315509068e-05,
"loss": 0.1455,
"step": 3
},
{
"epoch": 0.0005525625086337892,
"grad_norm": 0.1449182778596878,
"learning_rate": 1.7391537413961602e-05,
"loss": 0.3489,
"step": 4
},
{
"epoch": 0.0006907031357922365,
"grad_norm": 0.07311359792947769,
"learning_rate": 2.019094966738107e-05,
"loss": 0.1115,
"step": 5
},
{
"epoch": 0.0008288437629506838,
"grad_norm": 0.09480362385511398,
"learning_rate": 2.247823602248987e-05,
"loss": 0.1602,
"step": 6
},
{
"epoch": 0.0009669843901091311,
"grad_norm": 0.08785154670476913,
"learning_rate": 2.441210908061704e-05,
"loss": 0.1448,
"step": 7
},
{
"epoch": 0.0011051250172675784,
"grad_norm": 0.10113295167684555,
"learning_rate": 2.60873061209424e-05,
"loss": 0.1726,
"step": 8
},
{
"epoch": 0.0012432656444260257,
"grad_norm": 0.10489944368600845,
"learning_rate": 2.7564934631018137e-05,
"loss": 0.1343,
"step": 9
},
{
"epoch": 0.001381406271584473,
"grad_norm": 0.07018060237169266,
"learning_rate": 2.8886718374361883e-05,
"loss": 0.0986,
"step": 10
},
{
"epoch": 0.0015195468987429202,
"grad_norm": 0.07492844760417938,
"learning_rate": 3.008241721328615e-05,
"loss": 0.1243,
"step": 11
},
{
"epoch": 0.0016576875259013675,
"grad_norm": 0.02947733923792839,
"learning_rate": 3.117400472947067e-05,
"loss": 0.0427,
"step": 12
},
{
"epoch": 0.0017958281530598148,
"grad_norm": 0.07098696380853653,
"learning_rate": 3.217816790308016e-05,
"loss": 0.074,
"step": 13
},
{
"epoch": 0.0019339687802182623,
"grad_norm": 0.06729952991008759,
"learning_rate": 3.3107877787597844e-05,
"loss": 0.0756,
"step": 14
},
{
"epoch": 0.0020721094073767096,
"grad_norm": 0.047634709626436234,
"learning_rate": 3.397341698289014e-05,
"loss": 0.0335,
"step": 15
},
{
"epoch": 0.002210250034535157,
"grad_norm": 0.01928865537047386,
"learning_rate": 3.4783074827923204e-05,
"loss": 0.0253,
"step": 16
},
{
"epoch": 0.002348390661693604,
"grad_norm": 0.10047394782304764,
"learning_rate": 3.5543631465891534e-05,
"loss": 0.118,
"step": 17
},
{
"epoch": 0.0024865312888520514,
"grad_norm": 0.05856269970536232,
"learning_rate": 3.626070333799893e-05,
"loss": 0.0372,
"step": 18
},
{
"epoch": 0.0026246719160104987,
"grad_norm": 0.011952841654419899,
"learning_rate": 3.69389951409255e-05,
"loss": 0.0123,
"step": 19
},
{
"epoch": 0.002762812543168946,
"grad_norm": 0.0623110830783844,
"learning_rate": 3.7582487081342674e-05,
"loss": 0.0615,
"step": 20
},
{
"epoch": 0.002900953170327393,
"grad_norm": 0.020977556705474854,
"learning_rate": 3.819457639612611e-05,
"loss": 0.0215,
"step": 21
},
{
"epoch": 0.0030390937974858405,
"grad_norm": 0.02789384499192238,
"learning_rate": 3.8778185920266955e-05,
"loss": 0.0263,
"step": 22
},
{
"epoch": 0.0031772344246442877,
"grad_norm": 0.04567106068134308,
"learning_rate": 3.9335848501569437e-05,
"loss": 0.0298,
"step": 23
},
{
"epoch": 0.003315375051802735,
"grad_norm": 0.09989040344953537,
"learning_rate": 3.9869773436451475e-05,
"loss": 0.0894,
"step": 24
},
{
"epoch": 0.0034535156789611823,
"grad_norm": 0.07369614392518997,
"learning_rate": 4.038189933476214e-05,
"loss": 0.0671,
"step": 25
},
{
"epoch": 0.0035916563061196296,
"grad_norm": 0.1597849428653717,
"learning_rate": 4.087393661006097e-05,
"loss": 0.0934,
"step": 26
},
{
"epoch": 0.0037297969332780773,
"grad_norm": 0.06050405278801918,
"learning_rate": 4.13474019465272e-05,
"loss": 0.0418,
"step": 27
},
{
"epoch": 0.0038679375604365246,
"grad_norm": 0.12362563610076904,
"learning_rate": 4.180364649457864e-05,
"loss": 0.1035,
"step": 28
},
{
"epoch": 0.004006078187594971,
"grad_norm": 0.0370742604136467,
"learning_rate": 4.22438791165378e-05,
"loss": 0.0298,
"step": 29
},
{
"epoch": 0.004144218814753419,
"grad_norm": 0.13234247267246246,
"learning_rate": 4.2669185689870944e-05,
"loss": 0.1149,
"step": 30
},
{
"epoch": 0.004282359441911866,
"grad_norm": 0.11454839259386063,
"learning_rate": 4.308054524410194e-05,
"loss": 0.0825,
"step": 31
},
{
"epoch": 0.004420500069070314,
"grad_norm": 0.055333029478788376,
"learning_rate": 4.347884353490401e-05,
"loss": 0.0309,
"step": 32
},
{
"epoch": 0.0045586406962287605,
"grad_norm": 0.010015130043029785,
"learning_rate": 4.386488452879522e-05,
"loss": 0.0112,
"step": 33
},
{
"epoch": 0.004696781323387208,
"grad_norm": 0.02108495496213436,
"learning_rate": 4.423940017287234e-05,
"loss": 0.0144,
"step": 34
},
{
"epoch": 0.004834921950545655,
"grad_norm": 0.045554034411907196,
"learning_rate": 4.460305874799812e-05,
"loss": 0.0325,
"step": 35
},
{
"epoch": 0.004973062577704103,
"grad_norm": 0.05177578702569008,
"learning_rate": 4.495647204497974e-05,
"loss": 0.0333,
"step": 36
},
{
"epoch": 0.0051112032048625505,
"grad_norm": 0.08447037637233734,
"learning_rate": 4.5300201557312036e-05,
"loss": 0.0489,
"step": 37
},
{
"epoch": 0.005249343832020997,
"grad_norm": 0.03207926079630852,
"learning_rate": 4.56347638479063e-05,
"loss": 0.0217,
"step": 38
},
{
"epoch": 0.005387484459179445,
"grad_norm": 0.04534975811839104,
"learning_rate": 4.596063521858923e-05,
"loss": 0.0332,
"step": 39
},
{
"epoch": 0.005525625086337892,
"grad_norm": 0.04216322675347328,
"learning_rate": 4.627825578832348e-05,
"loss": 0.0363,
"step": 40
},
{
"epoch": 0.0056637657134963396,
"grad_norm": 0.04155502840876579,
"learning_rate": 4.65880330677802e-05,
"loss": 0.0238,
"step": 41
},
{
"epoch": 0.005801906340654786,
"grad_norm": 0.15111327171325684,
"learning_rate": 4.6890345103106905e-05,
"loss": 0.1467,
"step": 42
},
{
"epoch": 0.005940046967813234,
"grad_norm": 0.057506952434778214,
"learning_rate": 4.7185543249731356e-05,
"loss": 0.0366,
"step": 43
},
{
"epoch": 0.006078187594971681,
"grad_norm": 0.15154612064361572,
"learning_rate": 4.747395462724775e-05,
"loss": 0.2312,
"step": 44
},
{
"epoch": 0.006216328222130129,
"grad_norm": 0.1836182028055191,
"learning_rate": 4.775588429839921e-05,
"loss": 0.0679,
"step": 45
},
{
"epoch": 0.0063544688492885755,
"grad_norm": 0.04606263339519501,
"learning_rate": 4.803161720855024e-05,
"loss": 0.0216,
"step": 46
},
{
"epoch": 0.006492609476447023,
"grad_norm": 0.07287360727787018,
"learning_rate": 4.830141991656282e-05,
"loss": 0.0355,
"step": 47
},
{
"epoch": 0.00663075010360547,
"grad_norm": 0.10376694798469543,
"learning_rate": 4.856554214343227e-05,
"loss": 0.0384,
"step": 48
},
{
"epoch": 0.006768890730763918,
"grad_norm": 0.0253737885504961,
"learning_rate": 4.882421816123408e-05,
"loss": 0.0145,
"step": 49
},
{
"epoch": 0.006907031357922365,
"grad_norm": 0.045460280030965805,
"learning_rate": 4.9077668041742954e-05,
"loss": 0.0272,
"step": 50
},
{
"epoch": 0.007045171985080812,
"grad_norm": 0.00954690482467413,
"learning_rate": 4.93260987814006e-05,
"loss": 0.0086,
"step": 51
},
{
"epoch": 0.007183312612239259,
"grad_norm": 0.12355463951826096,
"learning_rate": 4.956970531704177e-05,
"loss": 0.0398,
"step": 52
},
{
"epoch": 0.007321453239397707,
"grad_norm": 0.028379764407873154,
"learning_rate": 4.9808671444865916e-05,
"loss": 0.0166,
"step": 53
},
{
"epoch": 0.0074595938665561546,
"grad_norm": 0.029451319947838783,
"learning_rate": 5.0043170653507996e-05,
"loss": 0.0285,
"step": 54
},
{
"epoch": 0.007597734493714601,
"grad_norm": 0.1718253642320633,
"learning_rate": 5.027336688066723e-05,
"loss": 0.1021,
"step": 55
},
{
"epoch": 0.007735875120873049,
"grad_norm": 0.02496183291077614,
"learning_rate": 5.049941520155945e-05,
"loss": 0.0167,
"step": 56
},
{
"epoch": 0.007874015748031496,
"grad_norm": 0.14813463389873505,
"learning_rate": 5.072146245643457e-05,
"loss": 0.0571,
"step": 57
},
{
"epoch": 0.008012156375189943,
"grad_norm": 0.004978721961379051,
"learning_rate": 5.09396478235186e-05,
"loss": 0.0063,
"step": 58
},
{
"epoch": 0.008150297002348391,
"grad_norm": 0.027830282226204872,
"learning_rate": 5.115410334297882e-05,
"loss": 0.0209,
"step": 59
},
{
"epoch": 0.008288437629506838,
"grad_norm": 0.02240496128797531,
"learning_rate": 5.136495439685174e-05,
"loss": 0.0166,
"step": 60
},
{
"epoch": 0.008426578256665285,
"grad_norm": 0.004362566862255335,
"learning_rate": 5.157232014930198e-05,
"loss": 0.005,
"step": 61
},
{
"epoch": 0.008564718883823732,
"grad_norm": 0.31787729263305664,
"learning_rate": 5.1776313951082735e-05,
"loss": 0.1389,
"step": 62
},
{
"epoch": 0.00870285951098218,
"grad_norm": 0.009066242724657059,
"learning_rate": 5.1977043711635176e-05,
"loss": 0.0081,
"step": 63
},
{
"epoch": 0.008841000138140627,
"grad_norm": 0.006892406847327948,
"learning_rate": 5.21746122418848e-05,
"loss": 0.007,
"step": 64
},
{
"epoch": 0.008979140765299074,
"grad_norm": 0.06632635742425919,
"learning_rate": 5.236911757046123e-05,
"loss": 0.0397,
"step": 65
},
{
"epoch": 0.009117281392457521,
"grad_norm": 0.03089607134461403,
"learning_rate": 5.2560653235776016e-05,
"loss": 0.0194,
"step": 66
},
{
"epoch": 0.00925542201961597,
"grad_norm": 0.010297919623553753,
"learning_rate": 5.27493085561372e-05,
"loss": 0.0094,
"step": 67
},
{
"epoch": 0.009393562646774416,
"grad_norm": 0.00596292968839407,
"learning_rate": 5.293516887985315e-05,
"loss": 0.0056,
"step": 68
},
{
"epoch": 0.009531703273932863,
"grad_norm": 0.03583955764770508,
"learning_rate": 5.31183158170785e-05,
"loss": 0.034,
"step": 69
},
{
"epoch": 0.00966984390109131,
"grad_norm": 0.022406477481126785,
"learning_rate": 5.3298827454978915e-05,
"loss": 0.0177,
"step": 70
},
{
"epoch": 0.009807984528249759,
"grad_norm": 0.02521800622344017,
"learning_rate": 5.347677855763414e-05,
"loss": 0.0137,
"step": 71
},
{
"epoch": 0.009946125155408206,
"grad_norm": 0.023860285058617592,
"learning_rate": 5.3652240751960537e-05,
"loss": 0.0223,
"step": 72
},
{
"epoch": 0.010084265782566652,
"grad_norm": 0.044385574758052826,
"learning_rate": 5.3825282700810095e-05,
"loss": 0.023,
"step": 73
},
{
"epoch": 0.010222406409725101,
"grad_norm": 0.011455412954092026,
"learning_rate": 5.399597026429285e-05,
"loss": 0.0115,
"step": 74
},
{
"epoch": 0.010360547036883548,
"grad_norm": 0.04733498394489288,
"learning_rate": 5.416436665027121e-05,
"loss": 0.0301,
"step": 75
},
{
"epoch": 0.010498687664041995,
"grad_norm": 0.00907475408166647,
"learning_rate": 5.43305325548871e-05,
"loss": 0.0068,
"step": 76
},
{
"epoch": 0.010636828291200441,
"grad_norm": 0.14968900382518768,
"learning_rate": 5.449452629390319e-05,
"loss": 0.0619,
"step": 77
},
{
"epoch": 0.01077496891835889,
"grad_norm": 0.15661145746707916,
"learning_rate": 5.4656403925570034e-05,
"loss": 0.0722,
"step": 78
},
{
"epoch": 0.010913109545517337,
"grad_norm": 0.004802990239113569,
"learning_rate": 5.481621936566648e-05,
"loss": 0.0054,
"step": 79
},
{
"epoch": 0.011051250172675784,
"grad_norm": 0.0075927311554551125,
"learning_rate": 5.4974024495304276e-05,
"loss": 0.0071,
"step": 80
},
{
"epoch": 0.01118939079983423,
"grad_norm": 0.11258488148450851,
"learning_rate": 5.5129869262036274e-05,
"loss": 0.0985,
"step": 81
},
{
"epoch": 0.011327531426992679,
"grad_norm": 0.052467264235019684,
"learning_rate": 5.5283801774761e-05,
"loss": 0.0298,
"step": 82
},
{
"epoch": 0.011465672054151126,
"grad_norm": 0.05677814409136772,
"learning_rate": 5.543586839287528e-05,
"loss": 0.0272,
"step": 83
},
{
"epoch": 0.011603812681309573,
"grad_norm": 0.11758122593164444,
"learning_rate": 5.55861138100877e-05,
"loss": 0.0555,
"step": 84
},
{
"epoch": 0.01174195330846802,
"grad_norm": 0.021579308435320854,
"learning_rate": 5.573458113327261e-05,
"loss": 0.0141,
"step": 85
},
{
"epoch": 0.011880093935626468,
"grad_norm": 0.018838461488485336,
"learning_rate": 5.5881311956712153e-05,
"loss": 0.016,
"step": 86
},
{
"epoch": 0.012018234562784915,
"grad_norm": 0.07577263563871384,
"learning_rate": 5.602634643204686e-05,
"loss": 0.0568,
"step": 87
},
{
"epoch": 0.012156375189943362,
"grad_norm": 0.16609540581703186,
"learning_rate": 5.6169723334228564e-05,
"loss": 0.2139,
"step": 88
},
{
"epoch": 0.01229451581710181,
"grad_norm": 0.008787001483142376,
"learning_rate": 5.631148012374702e-05,
"loss": 0.0088,
"step": 89
},
{
"epoch": 0.012432656444260257,
"grad_norm": 0.005372604820877314,
"learning_rate": 5.645165300538001e-05,
"loss": 0.0064,
"step": 90
},
{
"epoch": 0.012570797071418704,
"grad_norm": 0.020076818764209747,
"learning_rate": 5.65902769836972e-05,
"loss": 0.0146,
"step": 91
},
{
"epoch": 0.012708937698577151,
"grad_norm": 0.02066107466816902,
"learning_rate": 5.672738591553104e-05,
"loss": 0.0245,
"step": 92
},
{
"epoch": 0.0128470783257356,
"grad_norm": 0.03774566575884819,
"learning_rate": 5.6863012559611006e-05,
"loss": 0.0254,
"step": 93
},
{
"epoch": 0.012985218952894046,
"grad_norm": 0.0144899757578969,
"learning_rate": 5.699718862354363e-05,
"loss": 0.0153,
"step": 94
},
{
"epoch": 0.013123359580052493,
"grad_norm": 0.06870149821043015,
"learning_rate": 5.712994480830657e-05,
"loss": 0.0499,
"step": 95
},
{
"epoch": 0.01326150020721094,
"grad_norm": 0.02667887881398201,
"learning_rate": 5.726131085041308e-05,
"loss": 0.0202,
"step": 96
},
{
"epoch": 0.013399640834369389,
"grad_norm": 0.012837301008403301,
"learning_rate": 5.7391315561891545e-05,
"loss": 0.0113,
"step": 97
},
{
"epoch": 0.013537781461527836,
"grad_norm": 0.052629996091127396,
"learning_rate": 5.751998686821489e-05,
"loss": 0.0505,
"step": 98
},
{
"epoch": 0.013675922088686282,
"grad_norm": 0.020552391186356544,
"learning_rate": 5.764735184430429e-05,
"loss": 0.0186,
"step": 99
},
{
"epoch": 0.01381406271584473,
"grad_norm": 0.019090712070465088,
"learning_rate": 5.7773436748723766e-05,
"loss": 0.0126,
"step": 100
},
{
"epoch": 0.013952203343003178,
"grad_norm": 0.05564524605870247,
"learning_rate": 5.7898267056173305e-05,
"loss": 0.0311,
"step": 101
},
{
"epoch": 0.014090343970161625,
"grad_norm": 0.003988645505160093,
"learning_rate": 5.80218674883814e-05,
"loss": 0.0043,
"step": 102
},
{
"epoch": 0.014228484597320071,
"grad_norm": 0.014693093486130238,
"learning_rate": 5.814426204349046e-05,
"loss": 0.0135,
"step": 103
},
{
"epoch": 0.014366625224478518,
"grad_norm": 0.010103048756718636,
"learning_rate": 5.826547402402257e-05,
"loss": 0.0088,
"step": 104
},
{
"epoch": 0.014504765851636967,
"grad_norm": 0.06549032032489777,
"learning_rate": 5.838552606350718e-05,
"loss": 0.0285,
"step": 105
},
{
"epoch": 0.014642906478795414,
"grad_norm": 0.014358027838170528,
"learning_rate": 5.850444015184672e-05,
"loss": 0.0131,
"step": 106
},
{
"epoch": 0.01478104710595386,
"grad_norm": 0.009209529496729374,
"learning_rate": 5.862223765949125e-05,
"loss": 0.0084,
"step": 107
},
{
"epoch": 0.014919187733112309,
"grad_norm": 0.004671560134738684,
"learning_rate": 5.873893936048881e-05,
"loss": 0.0058,
"step": 108
},
{
"epoch": 0.015057328360270756,
"grad_norm": 0.018950628116726875,
"learning_rate": 5.885456545447318e-05,
"loss": 0.0122,
"step": 109
},
{
"epoch": 0.015195468987429203,
"grad_norm": 0.009532412514090538,
"learning_rate": 5.8969135587648026e-05,
"loss": 0.0081,
"step": 110
},
{
"epoch": 0.01533360961458765,
"grad_norm": 0.11297975480556488,
"learning_rate": 5.90826688728211e-05,
"loss": 0.0985,
"step": 111
},
{
"epoch": 0.015471750241746098,
"grad_norm": 0.021297922357916832,
"learning_rate": 5.9195183908540244e-05,
"loss": 0.0172,
"step": 112
},
{
"epoch": 0.015609890868904545,
"grad_norm": 0.011318448930978775,
"learning_rate": 5.930669879737879e-05,
"loss": 0.0106,
"step": 113
},
{
"epoch": 0.015748031496062992,
"grad_norm": 0.01391973253339529,
"learning_rate": 5.9417231163415365e-05,
"loss": 0.0135,
"step": 114
},
{
"epoch": 0.01588617212322144,
"grad_norm": 0.004168018698692322,
"learning_rate": 5.9526798168950515e-05,
"loss": 0.0048,
"step": 115
},
{
"epoch": 0.016024312750379886,
"grad_norm": 0.08173353970050812,
"learning_rate": 5.96354165304994e-05,
"loss": 0.0481,
"step": 116
},
{
"epoch": 0.016162453377538334,
"grad_norm": 0.03505641967058182,
"learning_rate": 5.9743102534098305e-05,
"loss": 0.0212,
"step": 117
},
{
"epoch": 0.016300594004696783,
"grad_norm": 0.020770832896232605,
"learning_rate": 5.9849872049959624e-05,
"loss": 0.0154,
"step": 118
},
{
"epoch": 0.016438734631855228,
"grad_norm": 0.013789334334433079,
"learning_rate": 5.9955740546508573e-05,
"loss": 0.0138,
"step": 119
},
{
"epoch": 0.016576875259013676,
"grad_norm": 0.004777516704052687,
"learning_rate": 6.006072310383254e-05,
"loss": 0.0051,
"step": 120
},
{
"epoch": 0.01671501588617212,
"grad_norm": 0.09303977340459824,
"learning_rate": 6.01648344265723e-05,
"loss": 0.0718,
"step": 121
},
{
"epoch": 0.01685315651333057,
"grad_norm": 0.006040315609425306,
"learning_rate": 6.026808885628279e-05,
"loss": 0.006,
"step": 122
},
{
"epoch": 0.01699129714048902,
"grad_norm": 0.0029070712625980377,
"learning_rate": 6.0370500383289253e-05,
"loss": 0.0041,
"step": 123
},
{
"epoch": 0.017129437767647464,
"grad_norm": 0.018031740561127663,
"learning_rate": 6.0472082658063546e-05,
"loss": 0.0118,
"step": 124
},
{
"epoch": 0.017267578394805912,
"grad_norm": 0.0054465411230921745,
"learning_rate": 6.057284900214323e-05,
"loss": 0.006,
"step": 125
},
{
"epoch": 0.01740571902196436,
"grad_norm": 0.024758221581578255,
"learning_rate": 6.0672812418615974e-05,
"loss": 0.0181,
"step": 126
},
{
"epoch": 0.017543859649122806,
"grad_norm": 0.060118090361356735,
"learning_rate": 6.077198560218932e-05,
"loss": 0.0314,
"step": 127
},
{
"epoch": 0.017682000276281255,
"grad_norm": 0.0032233481761068106,
"learning_rate": 6.087038094886561e-05,
"loss": 0.0044,
"step": 128
},
{
"epoch": 0.017820140903439703,
"grad_norm": 0.023020045831799507,
"learning_rate": 6.096801056524042e-05,
"loss": 0.0205,
"step": 129
},
{
"epoch": 0.01795828153059815,
"grad_norm": 0.012697757221758366,
"learning_rate": 6.106488627744203e-05,
"loss": 0.0126,
"step": 130
},
{
"epoch": 0.018096422157756597,
"grad_norm": 0.023354003205895424,
"learning_rate": 6.116101963972834e-05,
"loss": 0.012,
"step": 131
},
{
"epoch": 0.018234562784915042,
"grad_norm": 0.020162884145975113,
"learning_rate": 6.125642194275681e-05,
"loss": 0.0168,
"step": 132
},
{
"epoch": 0.01837270341207349,
"grad_norm": 0.02640388533473015,
"learning_rate": 6.135110422154255e-05,
"loss": 0.0148,
"step": 133
},
{
"epoch": 0.01851084403923194,
"grad_norm": 0.004422938451170921,
"learning_rate": 6.1445077263118e-05,
"loss": 0.0053,
"step": 134
},
{
"epoch": 0.018648984666390384,
"grad_norm": 0.0029348707757890224,
"learning_rate": 6.153835161390827e-05,
"loss": 0.0041,
"step": 135
},
{
"epoch": 0.018787125293548833,
"grad_norm": 0.008122744038701057,
"learning_rate": 6.163093758683395e-05,
"loss": 0.0072,
"step": 136
},
{
"epoch": 0.01892526592070728,
"grad_norm": 0.012685508467257023,
"learning_rate": 6.172284526815391e-05,
"loss": 0.0108,
"step": 137
},
{
"epoch": 0.019063406547865726,
"grad_norm": 0.07143422216176987,
"learning_rate": 6.181408452405932e-05,
"loss": 0.0354,
"step": 138
},
{
"epoch": 0.019201547175024175,
"grad_norm": 0.013023455627262592,
"learning_rate": 6.190466500702942e-05,
"loss": 0.0121,
"step": 139
},
{
"epoch": 0.01933968780218262,
"grad_norm": 0.11455456912517548,
"learning_rate": 6.199459616195971e-05,
"loss": 0.0916,
"step": 140
},
{
"epoch": 0.01947782842934107,
"grad_norm": 0.004115304443985224,
"learning_rate": 6.208388723207189e-05,
"loss": 0.005,
"step": 141
},
{
"epoch": 0.019615969056499517,
"grad_norm": 0.006390172056853771,
"learning_rate": 6.217254726461495e-05,
"loss": 0.0056,
"step": 142
},
{
"epoch": 0.019754109683657962,
"grad_norm": 0.0032778135500848293,
"learning_rate": 6.22605851163663e-05,
"loss": 0.0043,
"step": 143
},
{
"epoch": 0.01989225031081641,
"grad_norm": 0.005337041802704334,
"learning_rate": 6.234800945894134e-05,
"loss": 0.0054,
"step": 144
},
{
"epoch": 0.02003039093797486,
"grad_norm": 0.004124751314520836,
"learning_rate": 6.243482878391886e-05,
"loss": 0.0051,
"step": 145
},
{
"epoch": 0.020168531565133305,
"grad_norm": 0.005773474462330341,
"learning_rate": 6.25210514077909e-05,
"loss": 0.006,
"step": 146
},
{
"epoch": 0.020306672192291753,
"grad_norm": 0.003976911772042513,
"learning_rate": 6.260668547674315e-05,
"loss": 0.005,
"step": 147
},
{
"epoch": 0.020444812819450202,
"grad_norm": 0.015533742494881153,
"learning_rate": 6.269173897127364e-05,
"loss": 0.0115,
"step": 148
},
{
"epoch": 0.020582953446608647,
"grad_norm": 0.007217871956527233,
"learning_rate": 6.277621971065575e-05,
"loss": 0.007,
"step": 149
},
{
"epoch": 0.020721094073767096,
"grad_norm": 0.02741631306707859,
"learning_rate": 6.286013535725201e-05,
"loss": 0.0262,
"step": 150
},
{
"epoch": 0.02085923470092554,
"grad_norm": 0.009277957491576672,
"learning_rate": 6.294349342068456e-05,
"loss": 0.0074,
"step": 151
},
{
"epoch": 0.02099737532808399,
"grad_norm": 0.03387785702943802,
"learning_rate": 6.30263012618679e-05,
"loss": 0.02,
"step": 152
},
{
"epoch": 0.021135515955242438,
"grad_norm": 0.02018630877137184,
"learning_rate": 6.310856609690967e-05,
"loss": 0.0135,
"step": 153
},
{
"epoch": 0.021273656582400883,
"grad_norm": 0.0030231664422899485,
"learning_rate": 6.3190295000884e-05,
"loss": 0.0039,
"step": 154
},
{
"epoch": 0.02141179720955933,
"grad_norm": 0.038589298725128174,
"learning_rate": 6.327149491148301e-05,
"loss": 0.0358,
"step": 155
},
{
"epoch": 0.02154993783671778,
"grad_norm": 0.11658922582864761,
"learning_rate": 6.335217263255083e-05,
"loss": 0.0583,
"step": 156
},
{
"epoch": 0.021688078463876225,
"grad_norm": 0.015740083530545235,
"learning_rate": 6.343233483750467e-05,
"loss": 0.0134,
"step": 157
},
{
"epoch": 0.021826219091034674,
"grad_norm": 0.04937628656625748,
"learning_rate": 6.351198807264728e-05,
"loss": 0.0245,
"step": 158
},
{
"epoch": 0.021964359718193122,
"grad_norm": 0.03167060390114784,
"learning_rate": 6.359113876037498e-05,
"loss": 0.0142,
"step": 159
},
{
"epoch": 0.022102500345351567,
"grad_norm": 0.0016946723917499185,
"learning_rate": 6.366979320228508e-05,
"loss": 0.0025,
"step": 160
},
{
"epoch": 0.022240640972510016,
"grad_norm": 0.017945002764463425,
"learning_rate": 6.374795758218648e-05,
"loss": 0.0114,
"step": 161
},
{
"epoch": 0.02237878159966846,
"grad_norm": 0.015089811757206917,
"learning_rate": 6.382563796901706e-05,
"loss": 0.0104,
"step": 162
},
{
"epoch": 0.02251692222682691,
"grad_norm": 0.037023257464170456,
"learning_rate": 6.390284031967139e-05,
"loss": 0.0212,
"step": 163
},
{
"epoch": 0.022655062853985358,
"grad_norm": 0.00312859402038157,
"learning_rate": 6.397957048174181e-05,
"loss": 0.0041,
"step": 164
},
{
"epoch": 0.022793203481143803,
"grad_norm": 0.016800010576844215,
"learning_rate": 6.405583419617628e-05,
"loss": 0.0117,
"step": 165
},
{
"epoch": 0.022931344108302252,
"grad_norm": 0.007050058338791132,
"learning_rate": 6.413163709985608e-05,
"loss": 0.0077,
"step": 166
},
{
"epoch": 0.0230694847354607,
"grad_norm": 0.010559717193245888,
"learning_rate": 6.420698472809568e-05,
"loss": 0.0101,
"step": 167
},
{
"epoch": 0.023207625362619146,
"grad_norm": 0.007447375915944576,
"learning_rate": 6.428188251706851e-05,
"loss": 0.0063,
"step": 168
},
{
"epoch": 0.023345765989777594,
"grad_norm": 0.016936203464865685,
"learning_rate": 6.435633580616032e-05,
"loss": 0.0123,
"step": 169
},
{
"epoch": 0.02348390661693604,
"grad_norm": 0.012545132078230381,
"learning_rate": 6.443034984025341e-05,
"loss": 0.0106,
"step": 170
},
{
"epoch": 0.023622047244094488,
"grad_norm": 0.004759025294333696,
"learning_rate": 6.450392977194364e-05,
"loss": 0.0055,
"step": 171
},
{
"epoch": 0.023760187871252936,
"grad_norm": 0.004177019465714693,
"learning_rate": 6.457708066369296e-05,
"loss": 0.005,
"step": 172
},
{
"epoch": 0.02389832849841138,
"grad_norm": 0.005387986544519663,
"learning_rate": 6.464980748991957e-05,
"loss": 0.0052,
"step": 173
},
{
"epoch": 0.02403646912556983,
"grad_norm": 0.014265534467995167,
"learning_rate": 6.472211513902766e-05,
"loss": 0.0105,
"step": 174
},
{
"epoch": 0.02417460975272828,
"grad_norm": 0.009414114989340305,
"learning_rate": 6.47940084153792e-05,
"loss": 0.0082,
"step": 175
},
{
"epoch": 0.024312750379886724,
"grad_norm": 0.006094431504607201,
"learning_rate": 6.486549204120935e-05,
"loss": 0.007,
"step": 176
},
{
"epoch": 0.024450891007045172,
"grad_norm": 0.01183071918785572,
"learning_rate": 6.493657065848789e-05,
"loss": 0.0051,
"step": 177
},
{
"epoch": 0.02458903163420362,
"grad_norm": 0.01014156173914671,
"learning_rate": 6.500724883072781e-05,
"loss": 0.008,
"step": 178
},
{
"epoch": 0.024727172261362066,
"grad_norm": 0.011028112843632698,
"learning_rate": 6.507753104474373e-05,
"loss": 0.0117,
"step": 179
},
{
"epoch": 0.024865312888520515,
"grad_norm": 0.09993032366037369,
"learning_rate": 6.514742171236081e-05,
"loss": 0.0368,
"step": 180
},
{
"epoch": 0.02500345351567896,
"grad_norm": 0.00730351684615016,
"learning_rate": 6.521692517207681e-05,
"loss": 0.0055,
"step": 181
},
{
"epoch": 0.02514159414283741,
"grad_norm": 0.012081836350262165,
"learning_rate": 6.5286045690678e-05,
"loss": 0.0096,
"step": 182
},
{
"epoch": 0.025279734769995857,
"grad_norm": 0.03789973631501198,
"learning_rate": 6.535478746481105e-05,
"loss": 0.0214,
"step": 183
},
{
"epoch": 0.025417875397154302,
"grad_norm": 0.15203222632408142,
"learning_rate": 6.542315462251184e-05,
"loss": 0.173,
"step": 184
},
{
"epoch": 0.02555601602431275,
"grad_norm": 0.01032046414911747,
"learning_rate": 6.54911512246931e-05,
"loss": 0.0096,
"step": 185
},
{
"epoch": 0.0256941566514712,
"grad_norm": 0.028638096526265144,
"learning_rate": 6.555878126659181e-05,
"loss": 0.0145,
"step": 186
},
{
"epoch": 0.025832297278629644,
"grad_norm": 0.003815449308604002,
"learning_rate": 6.562604867917768e-05,
"loss": 0.005,
"step": 187
},
{
"epoch": 0.025970437905788093,
"grad_norm": 0.02196453884243965,
"learning_rate": 6.569295733052443e-05,
"loss": 0.0163,
"step": 188
},
{
"epoch": 0.026108578532946538,
"grad_norm": 0.027504578232765198,
"learning_rate": 6.575951102714424e-05,
"loss": 0.0236,
"step": 189
},
{
"epoch": 0.026246719160104987,
"grad_norm": 0.01594018191099167,
"learning_rate": 6.582571351528738e-05,
"loss": 0.0151,
"step": 190
},
{
"epoch": 0.026384859787263435,
"grad_norm": 0.014933554455637932,
"learning_rate": 6.589156848220747e-05,
"loss": 0.0081,
"step": 191
},
{
"epoch": 0.02652300041442188,
"grad_norm": 0.0055963508784770966,
"learning_rate": 6.595707955739388e-05,
"loss": 0.0062,
"step": 192
},
{
"epoch": 0.02666114104158033,
"grad_norm": 0.050148047506809235,
"learning_rate": 6.602225031377195e-05,
"loss": 0.0194,
"step": 193
},
{
"epoch": 0.026799281668738777,
"grad_norm": 0.029830805957317352,
"learning_rate": 6.608708426887235e-05,
"loss": 0.0205,
"step": 194
},
{
"epoch": 0.026937422295897222,
"grad_norm": 0.011922827921807766,
"learning_rate": 6.61515848859703e-05,
"loss": 0.0096,
"step": 195
},
{
"epoch": 0.02707556292305567,
"grad_norm": 0.009799070656299591,
"learning_rate": 6.621575557519569e-05,
"loss": 0.01,
"step": 196
},
{
"epoch": 0.02721370355021412,
"grad_norm": 0.01243631262332201,
"learning_rate": 6.627959969461484e-05,
"loss": 0.0099,
"step": 197
},
{
"epoch": 0.027351844177372565,
"grad_norm": 0.07121865451335907,
"learning_rate": 6.634312055128508e-05,
"loss": 0.0304,
"step": 198
},
{
"epoch": 0.027489984804531013,
"grad_norm": 0.017733663320541382,
"learning_rate": 6.64063214022826e-05,
"loss": 0.0137,
"step": 199
},
{
"epoch": 0.02762812543168946,
"grad_norm": 0.012368605472147465,
"learning_rate": 6.646920545570455e-05,
"loss": 0.0084,
"step": 200
},
{
"epoch": 0.027766266058847907,
"grad_norm": 0.02112652361392975,
"learning_rate": 6.653177587164626e-05,
"loss": 0.0179,
"step": 201
},
{
"epoch": 0.027904406686006356,
"grad_norm": 0.02623148076236248,
"learning_rate": 6.659403576315411e-05,
"loss": 0.0197,
"step": 202
},
{
"epoch": 0.0280425473131648,
"grad_norm": 0.03918753191828728,
"learning_rate": 6.665598819715483e-05,
"loss": 0.0258,
"step": 203
},
{
"epoch": 0.02818068794032325,
"grad_norm": 0.013951225206255913,
"learning_rate": 6.67176361953622e-05,
"loss": 0.0135,
"step": 204
},
{
"epoch": 0.028318828567481698,
"grad_norm": 0.007952759973704815,
"learning_rate": 6.677898273516128e-05,
"loss": 0.0089,
"step": 205
},
{
"epoch": 0.028456969194640143,
"grad_norm": 0.018430588766932487,
"learning_rate": 6.684003075047128e-05,
"loss": 0.0149,
"step": 206
},
{
"epoch": 0.02859510982179859,
"grad_norm": 0.023949814960360527,
"learning_rate": 6.690078313258756e-05,
"loss": 0.0132,
"step": 207
},
{
"epoch": 0.028733250448957037,
"grad_norm": 0.024928990751504898,
"learning_rate": 6.696124273100337e-05,
"loss": 0.0167,
"step": 208
},
{
"epoch": 0.028871391076115485,
"grad_norm": 0.030003704130649567,
"learning_rate": 6.702141235421166e-05,
"loss": 0.0172,
"step": 209
},
{
"epoch": 0.029009531703273934,
"grad_norm": 0.0721101239323616,
"learning_rate": 6.708129477048798e-05,
"loss": 0.0368,
"step": 210
},
{
"epoch": 0.02914767233043238,
"grad_norm": 0.08490179479122162,
"learning_rate": 6.71408927086548e-05,
"loss": 0.0536,
"step": 211
},
{
"epoch": 0.029285812957590827,
"grad_norm": 0.03323817253112793,
"learning_rate": 6.720020885882752e-05,
"loss": 0.0178,
"step": 212
},
{
"epoch": 0.029423953584749276,
"grad_norm": 0.1837650090456009,
"learning_rate": 6.72592458731432e-05,
"loss": 0.1725,
"step": 213
},
{
"epoch": 0.02956209421190772,
"grad_norm": 0.019839327782392502,
"learning_rate": 6.731800636647207e-05,
"loss": 0.0181,
"step": 214
},
{
"epoch": 0.02970023483906617,
"grad_norm": 0.1045767292380333,
"learning_rate": 6.737649291711243e-05,
"loss": 0.0833,
"step": 215
},
{
"epoch": 0.029838375466224618,
"grad_norm": 0.006606792565435171,
"learning_rate": 6.743470806746961e-05,
"loss": 0.0075,
"step": 216
},
{
"epoch": 0.029976516093383063,
"grad_norm": 0.03129244223237038,
"learning_rate": 6.749265432471897e-05,
"loss": 0.0193,
"step": 217
},
{
"epoch": 0.030114656720541512,
"grad_norm": 0.045653145760297775,
"learning_rate": 6.755033416145399e-05,
"loss": 0.0199,
"step": 218
},
{
"epoch": 0.030252797347699957,
"grad_norm": 0.010944471694529057,
"learning_rate": 6.760775001631917e-05,
"loss": 0.0095,
"step": 219
},
{
"epoch": 0.030390937974858406,
"grad_norm": 0.01569080539047718,
"learning_rate": 6.766490429462884e-05,
"loss": 0.0104,
"step": 220
},
{
"epoch": 0.030529078602016854,
"grad_norm": 0.021444043144583702,
"learning_rate": 6.772179936897169e-05,
"loss": 0.0142,
"step": 221
},
{
"epoch": 0.0306672192291753,
"grad_norm": 0.28277868032455444,
"learning_rate": 6.77784375798019e-05,
"loss": 0.2374,
"step": 222
},
{
"epoch": 0.030805359856333748,
"grad_norm": 0.45913952589035034,
"learning_rate": 6.783482123601665e-05,
"loss": 0.4551,
"step": 223
},
{
"epoch": 0.030943500483492196,
"grad_norm": 0.020372901111841202,
"learning_rate": 6.789095261552104e-05,
"loss": 0.0168,
"step": 224
},
{
"epoch": 0.03108164111065064,
"grad_norm": 0.004885848145931959,
"learning_rate": 6.794683396578028e-05,
"loss": 0.005,
"step": 225
},
{
"epoch": 0.03121978173780909,
"grad_norm": 0.01693105325102806,
"learning_rate": 6.80024675043596e-05,
"loss": 0.0163,
"step": 226
},
{
"epoch": 0.03135792236496754,
"grad_norm": 0.11799801141023636,
"learning_rate": 6.805785541945228e-05,
"loss": 0.0717,
"step": 227
},
{
"epoch": 0.031496062992125984,
"grad_norm": 0.036976467818021774,
"learning_rate": 6.811299987039618e-05,
"loss": 0.0245,
"step": 228
},
{
"epoch": 0.03163420361928443,
"grad_norm": 0.02155802771449089,
"learning_rate": 6.816790298817876e-05,
"loss": 0.0177,
"step": 229
},
{
"epoch": 0.03177234424644288,
"grad_norm": 0.07842877507209778,
"learning_rate": 6.822256687593131e-05,
"loss": 0.0581,
"step": 230
},
{
"epoch": 0.031910484873601326,
"grad_norm": 0.03338398411870003,
"learning_rate": 6.827699360941226e-05,
"loss": 0.0325,
"step": 231
},
{
"epoch": 0.03204862550075977,
"grad_norm": 0.032553404569625854,
"learning_rate": 6.83311852374802e-05,
"loss": 0.0322,
"step": 232
},
{
"epoch": 0.03218676612791822,
"grad_norm": 0.04135012626647949,
"learning_rate": 6.838514378255668e-05,
"loss": 0.0309,
"step": 233
},
{
"epoch": 0.03232490675507667,
"grad_norm": 0.015881696715950966,
"learning_rate": 6.84388712410791e-05,
"loss": 0.0122,
"step": 234
},
{
"epoch": 0.03246304738223511,
"grad_norm": 0.007995386607944965,
"learning_rate": 6.849236958394389e-05,
"loss": 0.0064,
"step": 235
},
{
"epoch": 0.032601188009393565,
"grad_norm": 0.02736758068203926,
"learning_rate": 6.854564075694041e-05,
"loss": 0.0235,
"step": 236
},
{
"epoch": 0.03273932863655201,
"grad_norm": 0.002618682337924838,
"learning_rate": 6.859868668117555e-05,
"loss": 0.0042,
"step": 237
},
{
"epoch": 0.032877469263710456,
"grad_norm": 0.005231305491179228,
"learning_rate": 6.865150925348938e-05,
"loss": 0.0068,
"step": 238
},
{
"epoch": 0.03301560989086891,
"grad_norm": 0.09725295752286911,
"learning_rate": 6.870411034686229e-05,
"loss": 0.0792,
"step": 239
},
{
"epoch": 0.03315375051802735,
"grad_norm": 0.050293877720832825,
"learning_rate": 6.875649181081335e-05,
"loss": 0.0283,
"step": 240
},
{
"epoch": 0.0332918911451858,
"grad_norm": 0.028621939942240715,
"learning_rate": 6.880865547179058e-05,
"loss": 0.0281,
"step": 241
},
{
"epoch": 0.03343003177234424,
"grad_norm": 0.008942426182329655,
"learning_rate": 6.886060313355311e-05,
"loss": 0.0102,
"step": 242
},
{
"epoch": 0.033568172399502695,
"grad_norm": 0.017691925168037415,
"learning_rate": 6.891233657754533e-05,
"loss": 0.0144,
"step": 243
},
{
"epoch": 0.03370631302666114,
"grad_norm": 0.06508173048496246,
"learning_rate": 6.896385756326359e-05,
"loss": 0.0417,
"step": 244
},
{
"epoch": 0.033844453653819585,
"grad_norm": 0.04150944948196411,
"learning_rate": 6.901516782861516e-05,
"loss": 0.0291,
"step": 245
},
{
"epoch": 0.03398259428097804,
"grad_norm": 0.03793828561902046,
"learning_rate": 6.906626909027006e-05,
"loss": 0.0233,
"step": 246
},
{
"epoch": 0.03412073490813648,
"grad_norm": 0.010130356065928936,
"learning_rate": 6.911716304400567e-05,
"loss": 0.0099,
"step": 247
},
{
"epoch": 0.03425887553529493,
"grad_norm": 0.13118235766887665,
"learning_rate": 6.916785136504435e-05,
"loss": 0.0867,
"step": 248
},
{
"epoch": 0.03439701616245338,
"grad_norm": 0.01260526105761528,
"learning_rate": 6.921833570838434e-05,
"loss": 0.0125,
"step": 249
},
{
"epoch": 0.034535156789611825,
"grad_norm": 0.008999558165669441,
"learning_rate": 6.926861770912402e-05,
"loss": 0.0073,
"step": 250
},
{
"epoch": 0.03467329741677027,
"grad_norm": 0.00830650795251131,
"learning_rate": 6.931869898277965e-05,
"loss": 0.0091,
"step": 251
},
{
"epoch": 0.03481143804392872,
"grad_norm": 0.008377288468182087,
"learning_rate": 6.936858112559677e-05,
"loss": 0.0075,
"step": 252
},
{
"epoch": 0.03494957867108717,
"grad_norm": 0.053149525076150894,
"learning_rate": 6.941826571485559e-05,
"loss": 0.0127,
"step": 253
},
{
"epoch": 0.03508771929824561,
"grad_norm": 0.00812042597681284,
"learning_rate": 6.946775430917013e-05,
"loss": 0.0089,
"step": 254
},
{
"epoch": 0.035225859925404064,
"grad_norm": 0.11070810258388519,
"learning_rate": 6.951704844878168e-05,
"loss": 0.0831,
"step": 255
},
{
"epoch": 0.03536400055256251,
"grad_norm": 0.012675175443291664,
"learning_rate": 6.956614965584641e-05,
"loss": 0.0114,
"step": 256
},
{
"epoch": 0.035502141179720954,
"grad_norm": 0.04479183256626129,
"learning_rate": 6.96150594347174e-05,
"loss": 0.0212,
"step": 257
},
{
"epoch": 0.035640281806879406,
"grad_norm": 0.0070395260117948055,
"learning_rate": 6.966377927222123e-05,
"loss": 0.0085,
"step": 258
},
{
"epoch": 0.03577842243403785,
"grad_norm": 0.0065799071453511715,
"learning_rate": 6.971231063792908e-05,
"loss": 0.0077,
"step": 259
},
{
"epoch": 0.0359165630611963,
"grad_norm": 0.011478329077363014,
"learning_rate": 6.976065498442284e-05,
"loss": 0.0096,
"step": 260
},
{
"epoch": 0.03605470368835474,
"grad_norm": 0.005571560934185982,
"learning_rate": 6.980881374755593e-05,
"loss": 0.0058,
"step": 261
},
{
"epoch": 0.036192844315513194,
"grad_norm": 0.15769460797309875,
"learning_rate": 6.985678834670915e-05,
"loss": 0.057,
"step": 262
},
{
"epoch": 0.03633098494267164,
"grad_norm": 0.010631869547069073,
"learning_rate": 6.990458018504173e-05,
"loss": 0.0106,
"step": 263
},
{
"epoch": 0.036469125569830084,
"grad_norm": 0.020581720396876335,
"learning_rate": 6.995219064973763e-05,
"loss": 0.0151,
"step": 264
},
{
"epoch": 0.036607266196988536,
"grad_norm": 0.15721233189105988,
"learning_rate": 6.9999621112247e-05,
"loss": 0.1997,
"step": 265
},
{
"epoch": 0.03674540682414698,
"grad_norm": 0.08665527403354645,
"learning_rate": 7.004687292852334e-05,
"loss": 0.0567,
"step": 266
},
{
"epoch": 0.036883547451305426,
"grad_norm": 0.01629851944744587,
"learning_rate": 7.009394743925609e-05,
"loss": 0.0129,
"step": 267
},
{
"epoch": 0.03702168807846388,
"grad_norm": 0.017079714685678482,
"learning_rate": 7.01408459700988e-05,
"loss": 0.0122,
"step": 268
},
{
"epoch": 0.03715982870562232,
"grad_norm": 0.011573737487196922,
"learning_rate": 7.018756983189322e-05,
"loss": 0.0111,
"step": 269
},
{
"epoch": 0.03729796933278077,
"grad_norm": 0.057232815772295,
"learning_rate": 7.023412032088907e-05,
"loss": 0.0334,
"step": 270
},
{
"epoch": 0.03743610995993922,
"grad_norm": 0.01176871731877327,
"learning_rate": 7.028049871895989e-05,
"loss": 0.0109,
"step": 271
},
{
"epoch": 0.037574250587097666,
"grad_norm": 0.005038474686443806,
"learning_rate": 7.032670629381475e-05,
"loss": 0.0057,
"step": 272
},
{
"epoch": 0.03771239121425611,
"grad_norm": 0.02611648477613926,
"learning_rate": 7.037274429920628e-05,
"loss": 0.0212,
"step": 273
},
{
"epoch": 0.03785053184141456,
"grad_norm": 0.04503704607486725,
"learning_rate": 7.04186139751347e-05,
"loss": 0.0316,
"step": 274
},
{
"epoch": 0.03798867246857301,
"grad_norm": 0.02312229387462139,
"learning_rate": 7.046431654804831e-05,
"loss": 0.0195,
"step": 275
},
{
"epoch": 0.03812681309573145,
"grad_norm": 0.07315024733543396,
"learning_rate": 7.05098532310401e-05,
"loss": 0.0422,
"step": 276
},
{
"epoch": 0.038264953722889905,
"grad_norm": 0.01006357092410326,
"learning_rate": 7.055522522404115e-05,
"loss": 0.0083,
"step": 277
},
{
"epoch": 0.03840309435004835,
"grad_norm": 0.02410217933356762,
"learning_rate": 7.060043371401022e-05,
"loss": 0.0164,
"step": 278
},
{
"epoch": 0.038541234977206795,
"grad_norm": 0.029703989624977112,
"learning_rate": 7.064547987512007e-05,
"loss": 0.0232,
"step": 279
},
{
"epoch": 0.03867937560436524,
"grad_norm": 0.03558236360549927,
"learning_rate": 7.069036486894051e-05,
"loss": 0.0194,
"step": 280
},
{
"epoch": 0.03881751623152369,
"grad_norm": 0.004356220830231905,
"learning_rate": 7.073508984461812e-05,
"loss": 0.006,
"step": 281
},
{
"epoch": 0.03895565685868214,
"grad_norm": 0.06183575093746185,
"learning_rate": 7.077965593905269e-05,
"loss": 0.0374,
"step": 282
},
{
"epoch": 0.03909379748584058,
"grad_norm": 0.03367290645837784,
"learning_rate": 7.082406427707072e-05,
"loss": 0.0147,
"step": 283
},
{
"epoch": 0.039231938112999035,
"grad_norm": 0.05731538310647011,
"learning_rate": 7.086831597159574e-05,
"loss": 0.0267,
"step": 284
},
{
"epoch": 0.03937007874015748,
"grad_norm": 0.013960711658000946,
"learning_rate": 7.091241212381564e-05,
"loss": 0.0115,
"step": 285
},
{
"epoch": 0.039508219367315925,
"grad_norm": 0.04287987947463989,
"learning_rate": 7.095635382334712e-05,
"loss": 0.0186,
"step": 286
},
{
"epoch": 0.03964635999447438,
"grad_norm": 0.02721596322953701,
"learning_rate": 7.100014214839724e-05,
"loss": 0.0153,
"step": 287
},
{
"epoch": 0.03978450062163282,
"grad_norm": 0.09616994112730026,
"learning_rate": 7.104377816592214e-05,
"loss": 0.0479,
"step": 288
},
{
"epoch": 0.03992264124879127,
"grad_norm": 0.01681303046643734,
"learning_rate": 7.108726293178307e-05,
"loss": 0.0099,
"step": 289
},
{
"epoch": 0.04006078187594972,
"grad_norm": 0.007514542900025845,
"learning_rate": 7.113059749089967e-05,
"loss": 0.007,
"step": 290
},
{
"epoch": 0.040198922503108164,
"grad_norm": 0.00759028410539031,
"learning_rate": 7.117378287740062e-05,
"loss": 0.0093,
"step": 291
},
{
"epoch": 0.04033706313026661,
"grad_norm": 0.04445767030119896,
"learning_rate": 7.12168201147717e-05,
"loss": 0.0192,
"step": 292
},
{
"epoch": 0.04047520375742506,
"grad_norm": 0.004815852735191584,
"learning_rate": 7.125971021600142e-05,
"loss": 0.0058,
"step": 293
},
{
"epoch": 0.04061334438458351,
"grad_norm": 0.01803855411708355,
"learning_rate": 7.130245418372394e-05,
"loss": 0.0189,
"step": 294
},
{
"epoch": 0.04075148501174195,
"grad_norm": 0.022579418495297432,
"learning_rate": 7.13450530103599e-05,
"loss": 0.0124,
"step": 295
},
{
"epoch": 0.040889625638900404,
"grad_norm": 0.006007243413478136,
"learning_rate": 7.138750767825444e-05,
"loss": 0.0067,
"step": 296
},
{
"epoch": 0.04102776626605885,
"grad_norm": 0.01632005348801613,
"learning_rate": 7.142981915981336e-05,
"loss": 0.0115,
"step": 297
},
{
"epoch": 0.041165906893217294,
"grad_norm": 0.009706880897283554,
"learning_rate": 7.147198841763656e-05,
"loss": 0.0096,
"step": 298
},
{
"epoch": 0.041304047520375746,
"grad_norm": 0.004515258129686117,
"learning_rate": 7.15140164046496e-05,
"loss": 0.0058,
"step": 299
},
{
"epoch": 0.04144218814753419,
"grad_norm": 0.05710865557193756,
"learning_rate": 7.155590406423282e-05,
"loss": 0.0388,
"step": 300
},
{
"epoch": 0.041580328774692636,
"grad_norm": 0.012932428158819675,
"learning_rate": 7.15976523303484e-05,
"loss": 0.0075,
"step": 301
},
{
"epoch": 0.04171846940185108,
"grad_norm": 0.08358635753393173,
"learning_rate": 7.163926212766535e-05,
"loss": 0.0512,
"step": 302
},
{
"epoch": 0.04185661002900953,
"grad_norm": 0.00845788512378931,
"learning_rate": 7.168073437168235e-05,
"loss": 0.0085,
"step": 303
},
{
"epoch": 0.04199475065616798,
"grad_norm": 0.008571326732635498,
"learning_rate": 7.172206996884871e-05,
"loss": 0.0074,
"step": 304
},
{
"epoch": 0.042132891283326424,
"grad_norm": 0.1380782574415207,
"learning_rate": 7.176326981668306e-05,
"loss": 0.0893,
"step": 305
},
{
"epoch": 0.042271031910484876,
"grad_norm": 0.10377588123083115,
"learning_rate": 7.180433480389047e-05,
"loss": 0.1076,
"step": 306
},
{
"epoch": 0.04240917253764332,
"grad_norm": 0.005449495278298855,
"learning_rate": 7.184526581047739e-05,
"loss": 0.0056,
"step": 307
},
{
"epoch": 0.042547313164801766,
"grad_norm": 0.016402093693614006,
"learning_rate": 7.18860637078648e-05,
"loss": 0.0144,
"step": 308
},
{
"epoch": 0.04268545379196022,
"grad_norm": 0.057461198419332504,
"learning_rate": 7.192672935899954e-05,
"loss": 0.0335,
"step": 309
},
{
"epoch": 0.04282359441911866,
"grad_norm": 0.018814753741025925,
"learning_rate": 7.196726361846382e-05,
"loss": 0.019,
"step": 310
},
{
"epoch": 0.04296173504627711,
"grad_norm": 0.021789349615573883,
"learning_rate": 7.200766733258301e-05,
"loss": 0.0174,
"step": 311
},
{
"epoch": 0.04309987567343556,
"grad_norm": 0.05062391981482506,
"learning_rate": 7.204794133953164e-05,
"loss": 0.0256,
"step": 312
},
{
"epoch": 0.043238016300594005,
"grad_norm": 0.05671432614326477,
"learning_rate": 7.208808646943773e-05,
"loss": 0.0402,
"step": 313
},
{
"epoch": 0.04337615692775245,
"grad_norm": 0.02019777148962021,
"learning_rate": 7.212810354448547e-05,
"loss": 0.0152,
"step": 314
},
{
"epoch": 0.0435142975549109,
"grad_norm": 0.020292505621910095,
"learning_rate": 7.216799337901625e-05,
"loss": 0.0189,
"step": 315
},
{
"epoch": 0.04365243818206935,
"grad_norm": 0.03928740695118904,
"learning_rate": 7.220775677962808e-05,
"loss": 0.0383,
"step": 316
},
{
"epoch": 0.04379057880922779,
"grad_norm": 0.005962614435702562,
"learning_rate": 7.224739454527347e-05,
"loss": 0.0069,
"step": 317
},
{
"epoch": 0.043928719436386245,
"grad_norm": 0.011562081053853035,
"learning_rate": 7.228690746735578e-05,
"loss": 0.0113,
"step": 318
},
{
"epoch": 0.04406686006354469,
"grad_norm": 0.009331891313195229,
"learning_rate": 7.232629632982394e-05,
"loss": 0.0118,
"step": 319
},
{
"epoch": 0.044205000690703135,
"grad_norm": 0.07156947255134583,
"learning_rate": 7.236556190926588e-05,
"loss": 0.0474,
"step": 320
},
{
"epoch": 0.04434314131786158,
"grad_norm": 0.0070722345262765884,
"learning_rate": 7.240470497500033e-05,
"loss": 0.0071,
"step": 321
},
{
"epoch": 0.04448128194502003,
"grad_norm": 0.24819540977478027,
"learning_rate": 7.244372628916727e-05,
"loss": 0.0668,
"step": 322
},
{
"epoch": 0.04461942257217848,
"grad_norm": 0.012170674279332161,
"learning_rate": 7.248262660681704e-05,
"loss": 0.0139,
"step": 323
},
{
"epoch": 0.04475756319933692,
"grad_norm": 0.01726563833653927,
"learning_rate": 7.252140667599786e-05,
"loss": 0.0148,
"step": 324
},
{
"epoch": 0.044895703826495374,
"grad_norm": 0.006273672450333834,
"learning_rate": 7.256006723784231e-05,
"loss": 0.0073,
"step": 325
},
{
"epoch": 0.04503384445365382,
"grad_norm": 0.02530139684677124,
"learning_rate": 7.25986090266522e-05,
"loss": 0.0145,
"step": 326
},
{
"epoch": 0.045171985080812264,
"grad_norm": 0.043121714144945145,
"learning_rate": 7.263703276998225e-05,
"loss": 0.0374,
"step": 327
},
{
"epoch": 0.045310125707970716,
"grad_norm": 0.011094048619270325,
"learning_rate": 7.26753391887226e-05,
"loss": 0.0098,
"step": 328
},
{
"epoch": 0.04544826633512916,
"grad_norm": 0.05844907462596893,
"learning_rate": 7.271352899717988e-05,
"loss": 0.0314,
"step": 329
},
{
"epoch": 0.04558640696228761,
"grad_norm": 0.008745652623474598,
"learning_rate": 7.27516029031571e-05,
"loss": 0.0107,
"step": 330
},
{
"epoch": 0.04572454758944606,
"grad_norm": 0.12875714898109436,
"learning_rate": 7.278956160803247e-05,
"loss": 0.0946,
"step": 331
},
{
"epoch": 0.045862688216604504,
"grad_norm": 0.09134670346975327,
"learning_rate": 7.282740580683686e-05,
"loss": 0.0767,
"step": 332
},
{
"epoch": 0.04600082884376295,
"grad_norm": 0.01726178452372551,
"learning_rate": 7.286513618833017e-05,
"loss": 0.0131,
"step": 333
},
{
"epoch": 0.0461389694709214,
"grad_norm": 0.006260738708078861,
"learning_rate": 7.290275343507648e-05,
"loss": 0.0069,
"step": 334
},
{
"epoch": 0.046277110098079846,
"grad_norm": 0.02264183759689331,
"learning_rate": 7.294025822351828e-05,
"loss": 0.0157,
"step": 335
},
{
"epoch": 0.04641525072523829,
"grad_norm": 0.033730894327163696,
"learning_rate": 7.297765122404931e-05,
"loss": 0.0338,
"step": 336
},
{
"epoch": 0.04655339135239674,
"grad_norm": 0.010735332034528255,
"learning_rate": 7.301493310108658e-05,
"loss": 0.0117,
"step": 337
},
{
"epoch": 0.04669153197955519,
"grad_norm": 0.13428980112075806,
"learning_rate": 7.305210451314113e-05,
"loss": 0.0662,
"step": 338
},
{
"epoch": 0.04682967260671363,
"grad_norm": 0.007416439242660999,
"learning_rate": 7.308916611288784e-05,
"loss": 0.0093,
"step": 339
},
{
"epoch": 0.04696781323387208,
"grad_norm": 0.029140817001461983,
"learning_rate": 7.312611854723422e-05,
"loss": 0.0268,
"step": 340
},
{
"epoch": 0.04710595386103053,
"grad_norm": 0.002688678679987788,
"learning_rate": 7.316296245738808e-05,
"loss": 0.0041,
"step": 341
},
{
"epoch": 0.047244094488188976,
"grad_norm": 0.09536877274513245,
"learning_rate": 7.319969847892443e-05,
"loss": 0.0513,
"step": 342
},
{
"epoch": 0.04738223511534742,
"grad_norm": 0.0194696132093668,
"learning_rate": 7.323632724185112e-05,
"loss": 0.0156,
"step": 343
},
{
"epoch": 0.04752037574250587,
"grad_norm": 0.012019694782793522,
"learning_rate": 7.327284937067376e-05,
"loss": 0.0128,
"step": 344
},
{
"epoch": 0.04765851636966432,
"grad_norm": 0.07920549809932709,
"learning_rate": 7.330926548445958e-05,
"loss": 0.0496,
"step": 345
},
{
"epoch": 0.04779665699682276,
"grad_norm": 0.012154892086982727,
"learning_rate": 7.334557619690038e-05,
"loss": 0.0163,
"step": 346
},
{
"epoch": 0.047934797623981215,
"grad_norm": 0.02246297337114811,
"learning_rate": 7.338178211637459e-05,
"loss": 0.016,
"step": 347
},
{
"epoch": 0.04807293825113966,
"grad_norm": 0.1408204585313797,
"learning_rate": 7.341788384600846e-05,
"loss": 0.1492,
"step": 348
},
{
"epoch": 0.048211078878298105,
"grad_norm": 0.03998423367738724,
"learning_rate": 7.345388198373633e-05,
"loss": 0.0386,
"step": 349
},
{
"epoch": 0.04834921950545656,
"grad_norm": 0.0037037734873592854,
"learning_rate": 7.348977712236e-05,
"loss": 0.0052,
"step": 350
},
{
"epoch": 0.048487360132615,
"grad_norm": 0.01490323431789875,
"learning_rate": 7.352556984960736e-05,
"loss": 0.0154,
"step": 351
},
{
"epoch": 0.04862550075977345,
"grad_norm": 0.022191043943166733,
"learning_rate": 7.356126074819015e-05,
"loss": 0.0173,
"step": 352
},
{
"epoch": 0.0487636413869319,
"grad_norm": 0.016748666763305664,
"learning_rate": 7.359685039586083e-05,
"loss": 0.0147,
"step": 353
},
{
"epoch": 0.048901782014090345,
"grad_norm": 0.018724625930190086,
"learning_rate": 7.363233936546869e-05,
"loss": 0.0196,
"step": 354
},
{
"epoch": 0.04903992264124879,
"grad_norm": 0.04513927921652794,
"learning_rate": 7.366772822501523e-05,
"loss": 0.024,
"step": 355
},
{
"epoch": 0.04917806326840724,
"grad_norm": 0.017063314095139503,
"learning_rate": 7.370301753770863e-05,
"loss": 0.0164,
"step": 356
},
{
"epoch": 0.04931620389556569,
"grad_norm": 0.01662248931825161,
"learning_rate": 7.373820786201764e-05,
"loss": 0.0148,
"step": 357
},
{
"epoch": 0.04945434452272413,
"grad_norm": 0.023914910852909088,
"learning_rate": 7.377329975172453e-05,
"loss": 0.0205,
"step": 358
},
{
"epoch": 0.04959248514988258,
"grad_norm": 0.01144400890916586,
"learning_rate": 7.380829375597736e-05,
"loss": 0.0135,
"step": 359
},
{
"epoch": 0.04973062577704103,
"grad_norm": 0.04367682710289955,
"learning_rate": 7.384319041934161e-05,
"loss": 0.0376,
"step": 360
},
{
"epoch": 0.049868766404199474,
"grad_norm": 0.05894111096858978,
"learning_rate": 7.3877990281851e-05,
"loss": 0.0239,
"step": 361
},
{
"epoch": 0.05000690703135792,
"grad_norm": 0.008600580506026745,
"learning_rate": 7.391269387905761e-05,
"loss": 0.01,
"step": 362
},
{
"epoch": 0.05014504765851637,
"grad_norm": 0.012402276508510113,
"learning_rate": 7.394730174208137e-05,
"loss": 0.0165,
"step": 363
},
{
"epoch": 0.05028318828567482,
"grad_norm": 0.017745958641171455,
"learning_rate": 7.398181439765882e-05,
"loss": 0.0218,
"step": 364
},
{
"epoch": 0.05042132891283326,
"grad_norm": 0.05549019202589989,
"learning_rate": 7.401623236819118e-05,
"loss": 0.0429,
"step": 365
},
{
"epoch": 0.050559469539991714,
"grad_norm": 0.0038389600813388824,
"learning_rate": 7.405055617179185e-05,
"loss": 0.0056,
"step": 366
},
{
"epoch": 0.05069761016715016,
"grad_norm": 0.14552558958530426,
"learning_rate": 7.408478632233319e-05,
"loss": 0.0786,
"step": 367
},
{
"epoch": 0.050835750794308604,
"grad_norm": 0.011212456971406937,
"learning_rate": 7.411892332949265e-05,
"loss": 0.008,
"step": 368
},
{
"epoch": 0.050973891421467056,
"grad_norm": 0.09112522006034851,
"learning_rate": 7.415296769879832e-05,
"loss": 0.0501,
"step": 369
},
{
"epoch": 0.0511120320486255,
"grad_norm": 0.0417109876871109,
"learning_rate": 7.418691993167391e-05,
"loss": 0.0289,
"step": 370
},
{
"epoch": 0.051250172675783946,
"grad_norm": 0.03118273988366127,
"learning_rate": 7.422078052548295e-05,
"loss": 0.023,
"step": 371
},
{
"epoch": 0.0513883133029424,
"grad_norm": 0.014401586726307869,
"learning_rate": 7.42545499735726e-05,
"loss": 0.0159,
"step": 372
},
{
"epoch": 0.05152645393010084,
"grad_norm": 0.0387452095746994,
"learning_rate": 7.428822876531674e-05,
"loss": 0.0359,
"step": 373
},
{
"epoch": 0.05166459455725929,
"grad_norm": 0.0016639974201098084,
"learning_rate": 7.43218173861585e-05,
"loss": 0.0028,
"step": 374
},
{
"epoch": 0.05180273518441774,
"grad_norm": 0.007370566017925739,
"learning_rate": 7.435531631765229e-05,
"loss": 0.0091,
"step": 375
},
{
"epoch": 0.051940875811576186,
"grad_norm": 0.008531290106475353,
"learning_rate": 7.438872603750523e-05,
"loss": 0.0091,
"step": 376
},
{
"epoch": 0.05207901643873463,
"grad_norm": 0.02294515073299408,
"learning_rate": 7.442204701961796e-05,
"loss": 0.0254,
"step": 377
},
{
"epoch": 0.052217157065893076,
"grad_norm": 0.02039843425154686,
"learning_rate": 7.445527973412506e-05,
"loss": 0.0208,
"step": 378
},
{
"epoch": 0.05235529769305153,
"grad_norm": 0.050735872238874435,
"learning_rate": 7.448842464743471e-05,
"loss": 0.0319,
"step": 379
},
{
"epoch": 0.05249343832020997,
"grad_norm": 0.016932478174567223,
"learning_rate": 7.452148222226818e-05,
"loss": 0.0109,
"step": 380
},
{
"epoch": 0.05263157894736842,
"grad_norm": 0.01010705903172493,
"learning_rate": 7.455445291769839e-05,
"loss": 0.0106,
"step": 381
},
{
"epoch": 0.05276971957452687,
"grad_norm": 0.2523377239704132,
"learning_rate": 7.458733718918828e-05,
"loss": 0.1156,
"step": 382
},
{
"epoch": 0.052907860201685315,
"grad_norm": 0.006449633743613958,
"learning_rate": 7.462013548862848e-05,
"loss": 0.0086,
"step": 383
},
{
"epoch": 0.05304600082884376,
"grad_norm": 0.0557723194360733,
"learning_rate": 7.465284826437468e-05,
"loss": 0.0106,
"step": 384
},
{
"epoch": 0.05318414145600221,
"grad_norm": 0.0064123859629035,
"learning_rate": 7.468547596128427e-05,
"loss": 0.0085,
"step": 385
},
{
"epoch": 0.05332228208316066,
"grad_norm": 0.009457213804125786,
"learning_rate": 7.471801902075274e-05,
"loss": 0.0117,
"step": 386
},
{
"epoch": 0.0534604227103191,
"grad_norm": 0.0031539765186607838,
"learning_rate": 7.475047788074949e-05,
"loss": 0.0047,
"step": 387
},
{
"epoch": 0.053598563337477555,
"grad_norm": 0.023614773526787758,
"learning_rate": 7.478285297585316e-05,
"loss": 0.0219,
"step": 388
},
{
"epoch": 0.053736703964636,
"grad_norm": 0.10714581608772278,
"learning_rate": 7.481514473728654e-05,
"loss": 0.0712,
"step": 389
},
{
"epoch": 0.053874844591794445,
"grad_norm": 0.014973816461861134,
"learning_rate": 7.484735359295111e-05,
"loss": 0.0115,
"step": 390
},
{
"epoch": 0.0540129852189529,
"grad_norm": 0.035133246332407,
"learning_rate": 7.487947996746097e-05,
"loss": 0.0189,
"step": 391
},
{
"epoch": 0.05415112584611134,
"grad_norm": 0.01953275129199028,
"learning_rate": 7.491152428217649e-05,
"loss": 0.0255,
"step": 392
},
{
"epoch": 0.05428926647326979,
"grad_norm": 0.012475092895328999,
"learning_rate": 7.494348695523741e-05,
"loss": 0.0117,
"step": 393
},
{
"epoch": 0.05442740710042824,
"grad_norm": 0.018982090055942535,
"learning_rate": 7.497536840159565e-05,
"loss": 0.0186,
"step": 394
},
{
"epoch": 0.054565547727586684,
"grad_norm": 0.0650404840707779,
"learning_rate": 7.500716903304755e-05,
"loss": 0.0505,
"step": 395
},
{
"epoch": 0.05470368835474513,
"grad_norm": 0.02308955229818821,
"learning_rate": 7.503888925826588e-05,
"loss": 0.0208,
"step": 396
},
{
"epoch": 0.054841828981903575,
"grad_norm": 0.006316799204796553,
"learning_rate": 7.50705294828313e-05,
"loss": 0.009,
"step": 397
},
{
"epoch": 0.05497996960906203,
"grad_norm": 0.03284195438027382,
"learning_rate": 7.510209010926341e-05,
"loss": 0.0286,
"step": 398
},
{
"epoch": 0.05511811023622047,
"grad_norm": 0.03874586522579193,
"learning_rate": 7.51335715370516e-05,
"loss": 0.0282,
"step": 399
},
{
"epoch": 0.05525625086337892,
"grad_norm": 0.017570259049534798,
"learning_rate": 7.516497416268535e-05,
"loss": 0.0159,
"step": 400
},
{
"epoch": 0.05539439149053737,
"grad_norm": 0.1028120145201683,
"learning_rate": 7.519629837968414e-05,
"loss": 0.0638,
"step": 401
},
{
"epoch": 0.055532532117695814,
"grad_norm": 0.04638221859931946,
"learning_rate": 7.522754457862707e-05,
"loss": 0.0261,
"step": 402
},
{
"epoch": 0.05567067274485426,
"grad_norm": 0.006671587936580181,
"learning_rate": 7.52587131471821e-05,
"loss": 0.0098,
"step": 403
},
{
"epoch": 0.05580881337201271,
"grad_norm": 0.0016172940377146006,
"learning_rate": 7.528980447013491e-05,
"loss": 0.0032,
"step": 404
},
{
"epoch": 0.055946953999171156,
"grad_norm": 0.009872229769825935,
"learning_rate": 7.532081892941734e-05,
"loss": 0.0102,
"step": 405
},
{
"epoch": 0.0560850946263296,
"grad_norm": 0.014002018608152866,
"learning_rate": 7.535175690413565e-05,
"loss": 0.0126,
"step": 406
},
{
"epoch": 0.05622323525348805,
"grad_norm": 0.01186671108007431,
"learning_rate": 7.538261877059817e-05,
"loss": 0.0151,
"step": 407
},
{
"epoch": 0.0563613758806465,
"grad_norm": 0.010572116822004318,
"learning_rate": 7.541340490234301e-05,
"loss": 0.0128,
"step": 408
},
{
"epoch": 0.056499516507804944,
"grad_norm": 0.006026785355061293,
"learning_rate": 7.544411567016487e-05,
"loss": 0.009,
"step": 409
},
{
"epoch": 0.056637657134963396,
"grad_norm": 0.0200533214956522,
"learning_rate": 7.547475144214207e-05,
"loss": 0.0171,
"step": 410
},
{
"epoch": 0.05677579776212184,
"grad_norm": 0.016931114718317986,
"learning_rate": 7.550531258366296e-05,
"loss": 0.015,
"step": 411
},
{
"epoch": 0.056913938389280286,
"grad_norm": 0.011858226731419563,
"learning_rate": 7.553579945745208e-05,
"loss": 0.0113,
"step": 412
},
{
"epoch": 0.05705207901643874,
"grad_norm": 0.02081177569925785,
"learning_rate": 7.556621242359586e-05,
"loss": 0.0161,
"step": 413
},
{
"epoch": 0.05719021964359718,
"grad_norm": 0.008650501258671284,
"learning_rate": 7.559655183956836e-05,
"loss": 0.0096,
"step": 414
},
{
"epoch": 0.05732836027075563,
"grad_norm": 0.008785598911345005,
"learning_rate": 7.562681806025635e-05,
"loss": 0.0096,
"step": 415
},
{
"epoch": 0.05746650089791407,
"grad_norm": 0.004337130580097437,
"learning_rate": 7.565701143798417e-05,
"loss": 0.006,
"step": 416
},
{
"epoch": 0.057604641525072525,
"grad_norm": 0.01971902698278427,
"learning_rate": 7.568713232253847e-05,
"loss": 0.0144,
"step": 417
},
{
"epoch": 0.05774278215223097,
"grad_norm": 0.003371615894138813,
"learning_rate": 7.571718106119245e-05,
"loss": 0.004,
"step": 418
},
{
"epoch": 0.057880922779389415,
"grad_norm": 0.08240024745464325,
"learning_rate": 7.574715799872985e-05,
"loss": 0.0443,
"step": 419
},
{
"epoch": 0.05801906340654787,
"grad_norm": 0.013157228007912636,
"learning_rate": 7.577706347746878e-05,
"loss": 0.01,
"step": 420
},
{
"epoch": 0.05815720403370631,
"grad_norm": 0.08836307376623154,
"learning_rate": 7.58068978372851e-05,
"loss": 0.053,
"step": 421
},
{
"epoch": 0.05829534466086476,
"grad_norm": 0.008075368590652943,
"learning_rate": 7.58366614156356e-05,
"loss": 0.0103,
"step": 422
},
{
"epoch": 0.05843348528802321,
"grad_norm": 0.013115230947732925,
"learning_rate": 7.586635454758096e-05,
"loss": 0.0145,
"step": 423
},
{
"epoch": 0.058571625915181655,
"grad_norm": 0.041858140379190445,
"learning_rate": 7.589597756580832e-05,
"loss": 0.0266,
"step": 424
},
{
"epoch": 0.0587097665423401,
"grad_norm": 0.006484493613243103,
"learning_rate": 7.592553080065369e-05,
"loss": 0.007,
"step": 425
},
{
"epoch": 0.05884790716949855,
"grad_norm": 0.08854290097951889,
"learning_rate": 7.5955014580124e-05,
"loss": 0.0769,
"step": 426
},
{
"epoch": 0.058986047796657,
"grad_norm": 0.03287418931722641,
"learning_rate": 7.598442922991903e-05,
"loss": 0.0359,
"step": 427
},
{
"epoch": 0.05912418842381544,
"grad_norm": 0.0041122897528111935,
"learning_rate": 7.601377507345287e-05,
"loss": 0.0056,
"step": 428
},
{
"epoch": 0.059262329050973894,
"grad_norm": 0.01932491548359394,
"learning_rate": 7.604305243187539e-05,
"loss": 0.0185,
"step": 429
},
{
"epoch": 0.05940046967813234,
"grad_norm": 0.034386295825242996,
"learning_rate": 7.607226162409324e-05,
"loss": 0.0259,
"step": 430
},
{
"epoch": 0.059538610305290784,
"grad_norm": 0.014645845629274845,
"learning_rate": 7.610140296679071e-05,
"loss": 0.0159,
"step": 431
},
{
"epoch": 0.059676750932449237,
"grad_norm": 0.025981949642300606,
"learning_rate": 7.613047677445041e-05,
"loss": 0.0199,
"step": 432
},
{
"epoch": 0.05981489155960768,
"grad_norm": 0.00898673851042986,
"learning_rate": 7.615948335937346e-05,
"loss": 0.0087,
"step": 433
},
{
"epoch": 0.05995303218676613,
"grad_norm": 0.005743580870330334,
"learning_rate": 7.618842303169978e-05,
"loss": 0.0069,
"step": 434
},
{
"epoch": 0.06009117281392457,
"grad_norm": 0.00816387590020895,
"learning_rate": 7.621729609942794e-05,
"loss": 0.0101,
"step": 435
},
{
"epoch": 0.060229313441083024,
"grad_norm": 0.021145803853869438,
"learning_rate": 7.624610286843479e-05,
"loss": 0.0145,
"step": 436
},
{
"epoch": 0.06036745406824147,
"grad_norm": 0.01337206270545721,
"learning_rate": 7.627484364249493e-05,
"loss": 0.0123,
"step": 437
},
{
"epoch": 0.060505594695399914,
"grad_norm": 0.03297748416662216,
"learning_rate": 7.630351872329997e-05,
"loss": 0.0201,
"step": 438
},
{
"epoch": 0.060643735322558366,
"grad_norm": 0.05304405465722084,
"learning_rate": 7.633212841047743e-05,
"loss": 0.026,
"step": 439
},
{
"epoch": 0.06078187594971681,
"grad_norm": 0.009639502502977848,
"learning_rate": 7.636067300160962e-05,
"loss": 0.0098,
"step": 440
},
{
"epoch": 0.060920016576875256,
"grad_norm": 0.08749550580978394,
"learning_rate": 7.638915279225222e-05,
"loss": 0.0489,
"step": 441
},
{
"epoch": 0.06105815720403371,
"grad_norm": 0.0455004945397377,
"learning_rate": 7.64175680759525e-05,
"loss": 0.0276,
"step": 442
},
{
"epoch": 0.061196297831192153,
"grad_norm": 0.07244568318128586,
"learning_rate": 7.644591914426769e-05,
"loss": 0.0554,
"step": 443
},
{
"epoch": 0.0613344384583506,
"grad_norm": 0.007952029816806316,
"learning_rate": 7.64742062867827e-05,
"loss": 0.0101,
"step": 444
},
{
"epoch": 0.06147257908550905,
"grad_norm": 0.045144565403461456,
"learning_rate": 7.65024297911281e-05,
"loss": 0.0249,
"step": 445
},
{
"epoch": 0.061610719712667496,
"grad_norm": 0.00880199670791626,
"learning_rate": 7.653058994299745e-05,
"loss": 0.0077,
"step": 446
},
{
"epoch": 0.06174886033982594,
"grad_norm": 0.015727581456303596,
"learning_rate": 7.655868702616483e-05,
"loss": 0.0163,
"step": 447
},
{
"epoch": 0.06188700096698439,
"grad_norm": 0.002647354966029525,
"learning_rate": 7.658672132250185e-05,
"loss": 0.0046,
"step": 448
},
{
"epoch": 0.06202514159414284,
"grad_norm": 0.0048969099298119545,
"learning_rate": 7.661469311199475e-05,
"loss": 0.0063,
"step": 449
},
{
"epoch": 0.06216328222130128,
"grad_norm": 0.010342615656554699,
"learning_rate": 7.664260267276109e-05,
"loss": 0.011,
"step": 450
},
{
"epoch": 0.062301422848459735,
"grad_norm": 0.004208502359688282,
"learning_rate": 7.667045028106635e-05,
"loss": 0.0059,
"step": 451
},
{
"epoch": 0.06243956347561818,
"grad_norm": 0.012316681444644928,
"learning_rate": 7.66982362113404e-05,
"loss": 0.0106,
"step": 452
},
{
"epoch": 0.06257770410277663,
"grad_norm": 0.006314276251941919,
"learning_rate": 7.672596073619361e-05,
"loss": 0.0086,
"step": 453
},
{
"epoch": 0.06271584472993508,
"grad_norm": 0.004086961969733238,
"learning_rate": 7.675362412643307e-05,
"loss": 0.0052,
"step": 454
},
{
"epoch": 0.06285398535709352,
"grad_norm": 0.048082876950502396,
"learning_rate": 7.678122665107829e-05,
"loss": 0.0333,
"step": 455
},
{
"epoch": 0.06299212598425197,
"grad_norm": 0.01329890824854374,
"learning_rate": 7.680876857737698e-05,
"loss": 0.0115,
"step": 456
},
{
"epoch": 0.06313026661141041,
"grad_norm": 0.02435714565217495,
"learning_rate": 7.683625017082056e-05,
"loss": 0.0225,
"step": 457
},
{
"epoch": 0.06326840723856886,
"grad_norm": 0.09511169046163559,
"learning_rate": 7.686367169515956e-05,
"loss": 0.084,
"step": 458
},
{
"epoch": 0.06340654786572732,
"grad_norm": 0.01172893587499857,
"learning_rate": 7.689103341241874e-05,
"loss": 0.0115,
"step": 459
},
{
"epoch": 0.06354468849288576,
"grad_norm": 0.022892113775014877,
"learning_rate": 7.691833558291211e-05,
"loss": 0.0145,
"step": 460
},
{
"epoch": 0.06368282912004421,
"grad_norm": 0.005384576041251421,
"learning_rate": 7.694557846525789e-05,
"loss": 0.0055,
"step": 461
},
{
"epoch": 0.06382096974720265,
"grad_norm": 0.007982897572219372,
"learning_rate": 7.697276231639306e-05,
"loss": 0.009,
"step": 462
},
{
"epoch": 0.0639591103743611,
"grad_norm": 0.014571700245141983,
"learning_rate": 7.699988739158804e-05,
"loss": 0.0134,
"step": 463
},
{
"epoch": 0.06409725100151954,
"grad_norm": 0.048437412828207016,
"learning_rate": 7.7026953944461e-05,
"loss": 0.0285,
"step": 464
},
{
"epoch": 0.06423539162867799,
"grad_norm": 0.1020222008228302,
"learning_rate": 7.705396222699208e-05,
"loss": 0.0604,
"step": 465
},
{
"epoch": 0.06437353225583645,
"grad_norm": 0.002950117690488696,
"learning_rate": 7.708091248953748e-05,
"loss": 0.0039,
"step": 466
},
{
"epoch": 0.06451167288299489,
"grad_norm": 0.011465366929769516,
"learning_rate": 7.710780498084345e-05,
"loss": 0.0105,
"step": 467
},
{
"epoch": 0.06464981351015334,
"grad_norm": 0.009519292041659355,
"learning_rate": 7.71346399480599e-05,
"loss": 0.0091,
"step": 468
},
{
"epoch": 0.06478795413731178,
"grad_norm": 0.009986692108213902,
"learning_rate": 7.716141763675424e-05,
"loss": 0.0111,
"step": 469
},
{
"epoch": 0.06492609476447023,
"grad_norm": 0.01423732005059719,
"learning_rate": 7.718813829092471e-05,
"loss": 0.0126,
"step": 470
},
{
"epoch": 0.06506423539162867,
"grad_norm": 0.008685347624123096,
"learning_rate": 7.721480215301373e-05,
"loss": 0.0114,
"step": 471
},
{
"epoch": 0.06520237601878713,
"grad_norm": 0.003593148896470666,
"learning_rate": 7.724140946392123e-05,
"loss": 0.0048,
"step": 472
},
{
"epoch": 0.06534051664594558,
"grad_norm": 0.019208243116736412,
"learning_rate": 7.726796046301751e-05,
"loss": 0.0131,
"step": 473
},
{
"epoch": 0.06547865727310402,
"grad_norm": 0.0028865146450698376,
"learning_rate": 7.729445538815635e-05,
"loss": 0.004,
"step": 474
},
{
"epoch": 0.06561679790026247,
"grad_norm": 0.006914031691849232,
"learning_rate": 7.732089447568765e-05,
"loss": 0.0073,
"step": 475
},
{
"epoch": 0.06575493852742091,
"grad_norm": 0.005969736259430647,
"learning_rate": 7.734727796047018e-05,
"loss": 0.0061,
"step": 476
},
{
"epoch": 0.06589307915457936,
"grad_norm": 0.009544942528009415,
"learning_rate": 7.737360607588405e-05,
"loss": 0.0109,
"step": 477
},
{
"epoch": 0.06603121978173782,
"grad_norm": 0.017079075798392296,
"learning_rate": 7.73998790538431e-05,
"loss": 0.0119,
"step": 478
},
{
"epoch": 0.06616936040889626,
"grad_norm": 0.006762394681572914,
"learning_rate": 7.742609712480713e-05,
"loss": 0.006,
"step": 479
},
{
"epoch": 0.0663075010360547,
"grad_norm": 0.006828032899647951,
"learning_rate": 7.745226051779415e-05,
"loss": 0.0069,
"step": 480
},
{
"epoch": 0.06644564166321315,
"grad_norm": 0.008298908360302448,
"learning_rate": 7.74783694603922e-05,
"loss": 0.0083,
"step": 481
},
{
"epoch": 0.0665837822903716,
"grad_norm": 0.15002965927124023,
"learning_rate": 7.750442417877138e-05,
"loss": 0.0388,
"step": 482
},
{
"epoch": 0.06672192291753004,
"grad_norm": 0.00954232458025217,
"learning_rate": 7.753042489769554e-05,
"loss": 0.0088,
"step": 483
},
{
"epoch": 0.06686006354468849,
"grad_norm": 0.10171414166688919,
"learning_rate": 7.755637184053391e-05,
"loss": 0.058,
"step": 484
},
{
"epoch": 0.06699820417184695,
"grad_norm": 0.0051827989518642426,
"learning_rate": 7.758226522927262e-05,
"loss": 0.0043,
"step": 485
},
{
"epoch": 0.06713634479900539,
"grad_norm": 0.00380541174672544,
"learning_rate": 7.760810528452614e-05,
"loss": 0.0052,
"step": 486
},
{
"epoch": 0.06727448542616384,
"grad_norm": 0.0701836422085762,
"learning_rate": 7.763389222554847e-05,
"loss": 0.0342,
"step": 487
},
{
"epoch": 0.06741262605332228,
"grad_norm": 0.0025863810442388058,
"learning_rate": 7.765962627024439e-05,
"loss": 0.0041,
"step": 488
},
{
"epoch": 0.06755076668048073,
"grad_norm": 0.004156921990215778,
"learning_rate": 7.768530763518046e-05,
"loss": 0.0057,
"step": 489
},
{
"epoch": 0.06768890730763917,
"grad_norm": 0.020250679925084114,
"learning_rate": 7.771093653559595e-05,
"loss": 0.0098,
"step": 490
},
{
"epoch": 0.06782704793479763,
"grad_norm": 0.008420642465353012,
"learning_rate": 7.773651318541372e-05,
"loss": 0.0073,
"step": 491
},
{
"epoch": 0.06796518856195607,
"grad_norm": 0.021448403596878052,
"learning_rate": 7.776203779725086e-05,
"loss": 0.0201,
"step": 492
},
{
"epoch": 0.06810332918911452,
"grad_norm": 0.00401209807023406,
"learning_rate": 7.778751058242933e-05,
"loss": 0.0044,
"step": 493
},
{
"epoch": 0.06824146981627296,
"grad_norm": 0.004071755334734917,
"learning_rate": 7.781293175098647e-05,
"loss": 0.0048,
"step": 494
},
{
"epoch": 0.06837961044343141,
"grad_norm": 0.07812987267971039,
"learning_rate": 7.783830151168537e-05,
"loss": 0.0596,
"step": 495
},
{
"epoch": 0.06851775107058986,
"grad_norm": 0.006245663855224848,
"learning_rate": 7.786362007202515e-05,
"loss": 0.0084,
"step": 496
},
{
"epoch": 0.06865589169774831,
"grad_norm": 0.029952548444271088,
"learning_rate": 7.788888763825119e-05,
"loss": 0.0229,
"step": 497
},
{
"epoch": 0.06879403232490676,
"grad_norm": 0.00528394291177392,
"learning_rate": 7.791410441536515e-05,
"loss": 0.0072,
"step": 498
},
{
"epoch": 0.0689321729520652,
"grad_norm": 0.004051877185702324,
"learning_rate": 7.793927060713498e-05,
"loss": 0.0049,
"step": 499
},
{
"epoch": 0.06907031357922365,
"grad_norm": 0.036640387028455734,
"learning_rate": 7.796438641610483e-05,
"loss": 0.0186,
"step": 500
},
{
"epoch": 0.0692084542063821,
"grad_norm": 0.004171683453023434,
"learning_rate": 7.798945204360475e-05,
"loss": 0.0067,
"step": 501
},
{
"epoch": 0.06934659483354054,
"grad_norm": 0.012148253619670868,
"learning_rate": 7.801446768976046e-05,
"loss": 0.0156,
"step": 502
},
{
"epoch": 0.06948473546069898,
"grad_norm": 0.0024077165871858597,
"learning_rate": 7.803943355350285e-05,
"loss": 0.0042,
"step": 503
},
{
"epoch": 0.06962287608785744,
"grad_norm": 0.018709257245063782,
"learning_rate": 7.806434983257758e-05,
"loss": 0.0147,
"step": 504
},
{
"epoch": 0.06976101671501589,
"grad_norm": 0.004529232159256935,
"learning_rate": 7.808921672355438e-05,
"loss": 0.0051,
"step": 505
},
{
"epoch": 0.06989915734217433,
"grad_norm": 0.08686839044094086,
"learning_rate": 7.811403442183638e-05,
"loss": 0.0485,
"step": 506
},
{
"epoch": 0.07003729796933278,
"grad_norm": 0.009920633397996426,
"learning_rate": 7.81388031216694e-05,
"loss": 0.0093,
"step": 507
},
{
"epoch": 0.07017543859649122,
"grad_norm": 0.11127305775880814,
"learning_rate": 7.816352301615093e-05,
"loss": 0.0544,
"step": 508
},
{
"epoch": 0.07031357922364967,
"grad_norm": 0.010723556391894817,
"learning_rate": 7.818819429723929e-05,
"loss": 0.0121,
"step": 509
},
{
"epoch": 0.07045171985080813,
"grad_norm": 0.009187200106680393,
"learning_rate": 7.821281715576248e-05,
"loss": 0.0091,
"step": 510
},
{
"epoch": 0.07058986047796657,
"grad_norm": 0.012122333981096745,
"learning_rate": 7.823739178142714e-05,
"loss": 0.0117,
"step": 511
},
{
"epoch": 0.07072800110512502,
"grad_norm": 0.008281617425382137,
"learning_rate": 7.826191836282722e-05,
"loss": 0.0073,
"step": 512
},
{
"epoch": 0.07086614173228346,
"grad_norm": 0.018169116228818893,
"learning_rate": 7.82863970874527e-05,
"loss": 0.0199,
"step": 513
},
{
"epoch": 0.07100428235944191,
"grad_norm": 0.02657570131123066,
"learning_rate": 7.831082814169822e-05,
"loss": 0.0206,
"step": 514
},
{
"epoch": 0.07114242298660035,
"grad_norm": 0.006763003766536713,
"learning_rate": 7.833521171087153e-05,
"loss": 0.0081,
"step": 515
},
{
"epoch": 0.07128056361375881,
"grad_norm": 0.14291508495807648,
"learning_rate": 7.835954797920203e-05,
"loss": 0.1127,
"step": 516
},
{
"epoch": 0.07141870424091726,
"grad_norm": 0.015182922594249249,
"learning_rate": 7.838383712984899e-05,
"loss": 0.0183,
"step": 517
},
{
"epoch": 0.0715568448680757,
"grad_norm": 0.006437589880079031,
"learning_rate": 7.840807934490987e-05,
"loss": 0.007,
"step": 518
},
{
"epoch": 0.07169498549523415,
"grad_norm": 0.006644203793257475,
"learning_rate": 7.843227480542863e-05,
"loss": 0.007,
"step": 519
},
{
"epoch": 0.0718331261223926,
"grad_norm": 0.0024770349264144897,
"learning_rate": 7.845642369140365e-05,
"loss": 0.0037,
"step": 520
},
{
"epoch": 0.07197126674955104,
"grad_norm": 0.007362083997577429,
"learning_rate": 7.848052618179587e-05,
"loss": 0.0083,
"step": 521
},
{
"epoch": 0.07210940737670948,
"grad_norm": 0.019205942749977112,
"learning_rate": 7.850458245453673e-05,
"loss": 0.0222,
"step": 522
},
{
"epoch": 0.07224754800386794,
"grad_norm": 0.009427196346223354,
"learning_rate": 7.852859268653608e-05,
"loss": 0.0113,
"step": 523
},
{
"epoch": 0.07238568863102639,
"grad_norm": 0.006181191653013229,
"learning_rate": 7.855255705368995e-05,
"loss": 0.006,
"step": 524
},
{
"epoch": 0.07252382925818483,
"grad_norm": 0.0038815487641841173,
"learning_rate": 7.857647573088825e-05,
"loss": 0.006,
"step": 525
},
{
"epoch": 0.07266196988534328,
"grad_norm": 0.002488507889211178,
"learning_rate": 7.860034889202254e-05,
"loss": 0.0038,
"step": 526
},
{
"epoch": 0.07280011051250172,
"grad_norm": 0.007900571450591087,
"learning_rate": 7.862417670999348e-05,
"loss": 0.0086,
"step": 527
},
{
"epoch": 0.07293825113966017,
"grad_norm": 0.025396516546607018,
"learning_rate": 7.864795935671842e-05,
"loss": 0.0152,
"step": 528
},
{
"epoch": 0.07307639176681863,
"grad_norm": 0.03293461725115776,
"learning_rate": 7.867169700313887e-05,
"loss": 0.0226,
"step": 529
},
{
"epoch": 0.07321453239397707,
"grad_norm": 0.017437491565942764,
"learning_rate": 7.869538981922779e-05,
"loss": 0.0159,
"step": 530
},
{
"epoch": 0.07335267302113552,
"grad_norm": 0.005919649265706539,
"learning_rate": 7.871903797399694e-05,
"loss": 0.0073,
"step": 531
},
{
"epoch": 0.07349081364829396,
"grad_norm": 0.009071402251720428,
"learning_rate": 7.874264163550414e-05,
"loss": 0.0095,
"step": 532
},
{
"epoch": 0.07362895427545241,
"grad_norm": 0.005668788682669401,
"learning_rate": 7.876620097086035e-05,
"loss": 0.0073,
"step": 533
},
{
"epoch": 0.07376709490261085,
"grad_norm": 0.02198665961623192,
"learning_rate": 7.87897161462369e-05,
"loss": 0.0133,
"step": 534
},
{
"epoch": 0.07390523552976931,
"grad_norm": 0.00984006654471159,
"learning_rate": 7.881318732687234e-05,
"loss": 0.008,
"step": 535
},
{
"epoch": 0.07404337615692776,
"grad_norm": 0.006679135840386152,
"learning_rate": 7.88366146770796e-05,
"loss": 0.0078,
"step": 536
},
{
"epoch": 0.0741815167840862,
"grad_norm": 0.016889598220586777,
"learning_rate": 7.88599983602528e-05,
"loss": 0.0151,
"step": 537
},
{
"epoch": 0.07431965741124465,
"grad_norm": 0.09604030102491379,
"learning_rate": 7.888333853887401e-05,
"loss": 0.0759,
"step": 538
},
{
"epoch": 0.07445779803840309,
"grad_norm": 0.01921008713543415,
"learning_rate": 7.890663537452024e-05,
"loss": 0.0166,
"step": 539
},
{
"epoch": 0.07459593866556154,
"grad_norm": 0.008759930729866028,
"learning_rate": 7.892988902786988e-05,
"loss": 0.01,
"step": 540
},
{
"epoch": 0.07473407929271998,
"grad_norm": 0.0019715612288564444,
"learning_rate": 7.895309965870956e-05,
"loss": 0.0036,
"step": 541
},
{
"epoch": 0.07487221991987844,
"grad_norm": 0.012198393233120441,
"learning_rate": 7.897626742594068e-05,
"loss": 0.0103,
"step": 542
},
{
"epoch": 0.07501036054703689,
"grad_norm": 0.009106654673814774,
"learning_rate": 7.899939248758587e-05,
"loss": 0.0087,
"step": 543
},
{
"epoch": 0.07514850117419533,
"grad_norm": 0.010135483928024769,
"learning_rate": 7.902247500079554e-05,
"loss": 0.0102,
"step": 544
},
{
"epoch": 0.07528664180135378,
"grad_norm": 0.03742365166544914,
"learning_rate": 7.904551512185426e-05,
"loss": 0.016,
"step": 545
},
{
"epoch": 0.07542478242851222,
"grad_norm": 0.004954047035425901,
"learning_rate": 7.906851300618707e-05,
"loss": 0.0065,
"step": 546
},
{
"epoch": 0.07556292305567067,
"grad_norm": 0.02959989383816719,
"learning_rate": 7.909146880836586e-05,
"loss": 0.0156,
"step": 547
},
{
"epoch": 0.07570106368282913,
"grad_norm": 0.016155797988176346,
"learning_rate": 7.911438268211552e-05,
"loss": 0.0175,
"step": 548
},
{
"epoch": 0.07583920430998757,
"grad_norm": 0.007207171525806189,
"learning_rate": 7.913725478032012e-05,
"loss": 0.0074,
"step": 549
},
{
"epoch": 0.07597734493714602,
"grad_norm": 0.004485739395022392,
"learning_rate": 7.916008525502909e-05,
"loss": 0.0054,
"step": 550
},
{
"epoch": 0.07611548556430446,
"grad_norm": 0.02434372715651989,
"learning_rate": 7.91828742574633e-05,
"loss": 0.0146,
"step": 551
},
{
"epoch": 0.0762536261914629,
"grad_norm": 0.01013586763292551,
"learning_rate": 7.92056219380209e-05,
"loss": 0.0116,
"step": 552
},
{
"epoch": 0.07639176681862135,
"grad_norm": 0.0093398317694664,
"learning_rate": 7.922832844628351e-05,
"loss": 0.0085,
"step": 553
},
{
"epoch": 0.07652990744577981,
"grad_norm": 0.018571248278021812,
"learning_rate": 7.925099393102196e-05,
"loss": 0.0153,
"step": 554
},
{
"epoch": 0.07666804807293826,
"grad_norm": 0.006554738152772188,
"learning_rate": 7.927361854020218e-05,
"loss": 0.0079,
"step": 555
},
{
"epoch": 0.0768061887000967,
"grad_norm": 0.004420983139425516,
"learning_rate": 7.929620242099101e-05,
"loss": 0.0057,
"step": 556
},
{
"epoch": 0.07694432932725515,
"grad_norm": 0.0557420551776886,
"learning_rate": 7.931874571976197e-05,
"loss": 0.0338,
"step": 557
},
{
"epoch": 0.07708246995441359,
"grad_norm": 0.031253259629011154,
"learning_rate": 7.934124858210087e-05,
"loss": 0.028,
"step": 558
},
{
"epoch": 0.07722061058157204,
"grad_norm": 0.005481123924255371,
"learning_rate": 7.936371115281153e-05,
"loss": 0.0075,
"step": 559
},
{
"epoch": 0.07735875120873048,
"grad_norm": 0.013984930701553822,
"learning_rate": 7.938613357592132e-05,
"loss": 0.0168,
"step": 560
},
{
"epoch": 0.07749689183588894,
"grad_norm": 0.052795786410570145,
"learning_rate": 7.940851599468675e-05,
"loss": 0.0454,
"step": 561
},
{
"epoch": 0.07763503246304738,
"grad_norm": 0.02815697155892849,
"learning_rate": 7.943085855159892e-05,
"loss": 0.0204,
"step": 562
},
{
"epoch": 0.07777317309020583,
"grad_norm": 0.0057924659922719,
"learning_rate": 7.945316138838898e-05,
"loss": 0.0071,
"step": 563
},
{
"epoch": 0.07791131371736428,
"grad_norm": 0.015068231150507927,
"learning_rate": 7.94754246460335e-05,
"loss": 0.0121,
"step": 564
},
{
"epoch": 0.07804945434452272,
"grad_norm": 0.006565915886312723,
"learning_rate": 7.949764846475986e-05,
"loss": 0.006,
"step": 565
},
{
"epoch": 0.07818759497168117,
"grad_norm": 0.026122109964489937,
"learning_rate": 7.951983298405152e-05,
"loss": 0.0229,
"step": 566
},
{
"epoch": 0.07832573559883962,
"grad_norm": 0.009172670543193817,
"learning_rate": 7.95419783426533e-05,
"loss": 0.0109,
"step": 567
},
{
"epoch": 0.07846387622599807,
"grad_norm": 0.012615257874131203,
"learning_rate": 7.956408467857654e-05,
"loss": 0.0137,
"step": 568
},
{
"epoch": 0.07860201685315651,
"grad_norm": 0.015784192830324173,
"learning_rate": 7.95861521291043e-05,
"loss": 0.0144,
"step": 569
},
{
"epoch": 0.07874015748031496,
"grad_norm": 0.009694431908428669,
"learning_rate": 7.960818083079644e-05,
"loss": 0.0091,
"step": 570
},
{
"epoch": 0.0788782981074734,
"grad_norm": 0.010170293040573597,
"learning_rate": 7.963017091949477e-05,
"loss": 0.0113,
"step": 571
},
{
"epoch": 0.07901643873463185,
"grad_norm": 0.016057651489973068,
"learning_rate": 7.965212253032793e-05,
"loss": 0.0123,
"step": 572
},
{
"epoch": 0.07915457936179031,
"grad_norm": 0.002096309559419751,
"learning_rate": 7.967403579771654e-05,
"loss": 0.0037,
"step": 573
},
{
"epoch": 0.07929271998894875,
"grad_norm": 0.002688502660021186,
"learning_rate": 7.969591085537804e-05,
"loss": 0.0038,
"step": 574
},
{
"epoch": 0.0794308606161072,
"grad_norm": 0.019958576187491417,
"learning_rate": 7.97177478363316e-05,
"loss": 0.0188,
"step": 575
},
{
"epoch": 0.07956900124326564,
"grad_norm": 0.03227323293685913,
"learning_rate": 7.973954687290295e-05,
"loss": 0.0295,
"step": 576
},
{
"epoch": 0.07970714187042409,
"grad_norm": 0.0040475050918757915,
"learning_rate": 7.976130809672927e-05,
"loss": 0.0051,
"step": 577
},
{
"epoch": 0.07984528249758253,
"grad_norm": 0.029720835387706757,
"learning_rate": 7.978303163876388e-05,
"loss": 0.012,
"step": 578
},
{
"epoch": 0.079983423124741,
"grad_norm": 0.02928655594587326,
"learning_rate": 7.980471762928102e-05,
"loss": 0.019,
"step": 579
},
{
"epoch": 0.08012156375189944,
"grad_norm": 0.0031133065931499004,
"learning_rate": 7.982636619788048e-05,
"loss": 0.0047,
"step": 580
},
{
"epoch": 0.08025970437905788,
"grad_norm": 0.009925676509737968,
"learning_rate": 7.984797747349231e-05,
"loss": 0.0104,
"step": 581
},
{
"epoch": 0.08039784500621633,
"grad_norm": 0.013947058469057083,
"learning_rate": 7.986955158438142e-05,
"loss": 0.0148,
"step": 582
},
{
"epoch": 0.08053598563337477,
"grad_norm": 0.07294765114784241,
"learning_rate": 7.989108865815207e-05,
"loss": 0.0709,
"step": 583
},
{
"epoch": 0.08067412626053322,
"grad_norm": 0.021420951932668686,
"learning_rate": 7.99125888217525e-05,
"loss": 0.0134,
"step": 584
},
{
"epoch": 0.08081226688769166,
"grad_norm": 0.002957011340186,
"learning_rate": 7.993405220147938e-05,
"loss": 0.0042,
"step": 585
},
{
"epoch": 0.08095040751485012,
"grad_norm": 0.0029216560069471598,
"learning_rate": 7.995547892298221e-05,
"loss": 0.0036,
"step": 586
},
{
"epoch": 0.08108854814200857,
"grad_norm": 0.010200831107795238,
"learning_rate": 7.997686911126785e-05,
"loss": 0.011,
"step": 587
},
{
"epoch": 0.08122668876916701,
"grad_norm": 0.009586147964000702,
"learning_rate": 7.999822289070476e-05,
"loss": 0.0102,
"step": 588
},
{
"epoch": 0.08136482939632546,
"grad_norm": 0.024438267573714256,
"learning_rate": 8.001954038502744e-05,
"loss": 0.0247,
"step": 589
},
{
"epoch": 0.0815029700234839,
"grad_norm": 0.01673005148768425,
"learning_rate": 8.004082171734068e-05,
"loss": 0.0135,
"step": 590
},
{
"epoch": 0.08164111065064235,
"grad_norm": 0.03330973908305168,
"learning_rate": 8.006206701012391e-05,
"loss": 0.0231,
"step": 591
},
{
"epoch": 0.08177925127780081,
"grad_norm": 0.015957612544298172,
"learning_rate": 8.008327638523524e-05,
"loss": 0.0117,
"step": 592
},
{
"epoch": 0.08191739190495925,
"grad_norm": 0.0030719093047082424,
"learning_rate": 8.010444996391589e-05,
"loss": 0.005,
"step": 593
},
{
"epoch": 0.0820555325321177,
"grad_norm": 0.029894646257162094,
"learning_rate": 8.012558786679415e-05,
"loss": 0.0171,
"step": 594
},
{
"epoch": 0.08219367315927614,
"grad_norm": 0.0035989086609333754,
"learning_rate": 8.014669021388965e-05,
"loss": 0.0043,
"step": 595
},
{
"epoch": 0.08233181378643459,
"grad_norm": 0.0025325831957161427,
"learning_rate": 8.016775712461736e-05,
"loss": 0.0041,
"step": 596
},
{
"epoch": 0.08246995441359303,
"grad_norm": 0.023065388202667236,
"learning_rate": 8.018878871779166e-05,
"loss": 0.0185,
"step": 597
},
{
"epoch": 0.08260809504075149,
"grad_norm": 0.003533895593136549,
"learning_rate": 8.02097851116304e-05,
"loss": 0.0057,
"step": 598
},
{
"epoch": 0.08274623566790994,
"grad_norm": 0.0017970139160752296,
"learning_rate": 8.023074642375884e-05,
"loss": 0.0034,
"step": 599
},
{
"epoch": 0.08288437629506838,
"grad_norm": 0.009932472370564938,
"learning_rate": 8.025167277121362e-05,
"loss": 0.0103,
"step": 600
},
{
"epoch": 0.08302251692222683,
"grad_norm": 0.010555654764175415,
"learning_rate": 8.02725642704467e-05,
"loss": 0.009,
"step": 601
},
{
"epoch": 0.08316065754938527,
"grad_norm": 0.008634793572127819,
"learning_rate": 8.02934210373292e-05,
"loss": 0.0087,
"step": 602
},
{
"epoch": 0.08329879817654372,
"grad_norm": 0.010200063697993755,
"learning_rate": 8.031424318715534e-05,
"loss": 0.0088,
"step": 603
},
{
"epoch": 0.08343693880370216,
"grad_norm": 0.012205686420202255,
"learning_rate": 8.033503083464615e-05,
"loss": 0.0106,
"step": 604
},
{
"epoch": 0.08357507943086062,
"grad_norm": 0.010989578440785408,
"learning_rate": 8.035578409395338e-05,
"loss": 0.0085,
"step": 605
},
{
"epoch": 0.08371322005801907,
"grad_norm": 0.028562815859913826,
"learning_rate": 8.037650307866316e-05,
"loss": 0.0207,
"step": 606
},
{
"epoch": 0.08385136068517751,
"grad_norm": 0.011450926773250103,
"learning_rate": 8.039718790179982e-05,
"loss": 0.0141,
"step": 607
},
{
"epoch": 0.08398950131233596,
"grad_norm": 0.0031172465533018112,
"learning_rate": 8.041783867582952e-05,
"loss": 0.0047,
"step": 608
},
{
"epoch": 0.0841276419394944,
"grad_norm": 0.033424630761146545,
"learning_rate": 8.04384555126639e-05,
"loss": 0.0168,
"step": 609
},
{
"epoch": 0.08426578256665285,
"grad_norm": 0.033151671290397644,
"learning_rate": 8.045903852366387e-05,
"loss": 0.0197,
"step": 610
},
{
"epoch": 0.0844039231938113,
"grad_norm": 0.00440793065354228,
"learning_rate": 8.0479587819643e-05,
"loss": 0.0046,
"step": 611
},
{
"epoch": 0.08454206382096975,
"grad_norm": 0.019544310867786407,
"learning_rate": 8.050010351087127e-05,
"loss": 0.0152,
"step": 612
},
{
"epoch": 0.0846802044481282,
"grad_norm": 0.012192309834063053,
"learning_rate": 8.052058570707858e-05,
"loss": 0.0107,
"step": 613
},
{
"epoch": 0.08481834507528664,
"grad_norm": 0.00563014717772603,
"learning_rate": 8.054103451745819e-05,
"loss": 0.0059,
"step": 614
},
{
"epoch": 0.08495648570244509,
"grad_norm": 0.006608523894101381,
"learning_rate": 8.056145005067033e-05,
"loss": 0.0072,
"step": 615
},
{
"epoch": 0.08509462632960353,
"grad_norm": 0.006565593648701906,
"learning_rate": 8.05818324148456e-05,
"loss": 0.007,
"step": 616
},
{
"epoch": 0.08523276695676199,
"grad_norm": 0.0052340151742100716,
"learning_rate": 8.060218171758839e-05,
"loss": 0.0051,
"step": 617
},
{
"epoch": 0.08537090758392044,
"grad_norm": 0.0058824713341891766,
"learning_rate": 8.062249806598032e-05,
"loss": 0.0068,
"step": 618
},
{
"epoch": 0.08550904821107888,
"grad_norm": 0.008156881667673588,
"learning_rate": 8.064278156658369e-05,
"loss": 0.0073,
"step": 619
},
{
"epoch": 0.08564718883823733,
"grad_norm": 0.003906435798853636,
"learning_rate": 8.066303232544462e-05,
"loss": 0.0044,
"step": 620
},
{
"epoch": 0.08578532946539577,
"grad_norm": 0.004190659616142511,
"learning_rate": 8.068325044809664e-05,
"loss": 0.0045,
"step": 621
},
{
"epoch": 0.08592347009255422,
"grad_norm": 0.004072085488587618,
"learning_rate": 8.070343603956381e-05,
"loss": 0.0048,
"step": 622
},
{
"epoch": 0.08606161071971266,
"grad_norm": 0.08516935259103775,
"learning_rate": 8.072358920436406e-05,
"loss": 0.065,
"step": 623
},
{
"epoch": 0.08619975134687112,
"grad_norm": 0.12318930774927139,
"learning_rate": 8.074371004651244e-05,
"loss": 0.0462,
"step": 624
},
{
"epoch": 0.08633789197402957,
"grad_norm": 0.026846202090382576,
"learning_rate": 8.076379866952429e-05,
"loss": 0.0188,
"step": 625
},
{
"epoch": 0.08647603260118801,
"grad_norm": 0.036953698843717575,
"learning_rate": 8.078385517641853e-05,
"loss": 0.0196,
"step": 626
},
{
"epoch": 0.08661417322834646,
"grad_norm": 0.07914821058511734,
"learning_rate": 8.080387966972071e-05,
"loss": 0.0722,
"step": 627
},
{
"epoch": 0.0867523138555049,
"grad_norm": 0.05335409194231033,
"learning_rate": 8.082387225146627e-05,
"loss": 0.043,
"step": 628
},
{
"epoch": 0.08689045448266335,
"grad_norm": 0.008553317748010159,
"learning_rate": 8.084383302320358e-05,
"loss": 0.0077,
"step": 629
},
{
"epoch": 0.0870285951098218,
"grad_norm": 0.005956161767244339,
"learning_rate": 8.086376208599705e-05,
"loss": 0.0067,
"step": 630
},
{
"epoch": 0.08716673573698025,
"grad_norm": 0.03032430075109005,
"learning_rate": 8.088365954043025e-05,
"loss": 0.021,
"step": 631
},
{
"epoch": 0.0873048763641387,
"grad_norm": 0.0037184508983045816,
"learning_rate": 8.090352548660889e-05,
"loss": 0.0053,
"step": 632
},
{
"epoch": 0.08744301699129714,
"grad_norm": 0.014337614178657532,
"learning_rate": 8.092336002416385e-05,
"loss": 0.0122,
"step": 633
},
{
"epoch": 0.08758115761845559,
"grad_norm": 0.007453493308275938,
"learning_rate": 8.094316325225429e-05,
"loss": 0.0097,
"step": 634
},
{
"epoch": 0.08771929824561403,
"grad_norm": 0.019370267167687416,
"learning_rate": 8.096293526957041e-05,
"loss": 0.0165,
"step": 635
},
{
"epoch": 0.08785743887277249,
"grad_norm": 0.01445276290178299,
"learning_rate": 8.098267617433659e-05,
"loss": 0.016,
"step": 636
},
{
"epoch": 0.08799557949993093,
"grad_norm": 0.017797252163290977,
"learning_rate": 8.100238606431425e-05,
"loss": 0.0122,
"step": 637
},
{
"epoch": 0.08813372012708938,
"grad_norm": 0.014362436719238758,
"learning_rate": 8.102206503680476e-05,
"loss": 0.0126,
"step": 638
},
{
"epoch": 0.08827186075424782,
"grad_norm": 0.10174919664859772,
"learning_rate": 8.104171318865227e-05,
"loss": 0.0851,
"step": 639
},
{
"epoch": 0.08841000138140627,
"grad_norm": 0.015217412263154984,
"learning_rate": 8.106133061624669e-05,
"loss": 0.0163,
"step": 640
},
{
"epoch": 0.08854814200856471,
"grad_norm": 0.002396307187154889,
"learning_rate": 8.108091741552639e-05,
"loss": 0.0039,
"step": 641
},
{
"epoch": 0.08868628263572316,
"grad_norm": 0.002735432470217347,
"learning_rate": 8.110047368198112e-05,
"loss": 0.0045,
"step": 642
},
{
"epoch": 0.08882442326288162,
"grad_norm": 0.007928053848445415,
"learning_rate": 8.111999951065476e-05,
"loss": 0.0095,
"step": 643
},
{
"epoch": 0.08896256389004006,
"grad_norm": 0.004873833619058132,
"learning_rate": 8.113949499614808e-05,
"loss": 0.0054,
"step": 644
},
{
"epoch": 0.08910070451719851,
"grad_norm": 0.005578899290412664,
"learning_rate": 8.11589602326215e-05,
"loss": 0.0066,
"step": 645
},
{
"epoch": 0.08923884514435695,
"grad_norm": 0.004303886089473963,
"learning_rate": 8.117839531379784e-05,
"loss": 0.0062,
"step": 646
},
{
"epoch": 0.0893769857715154,
"grad_norm": 0.038032252341508865,
"learning_rate": 8.1197800332965e-05,
"loss": 0.0369,
"step": 647
},
{
"epoch": 0.08951512639867384,
"grad_norm": 0.06186262145638466,
"learning_rate": 8.121717538297867e-05,
"loss": 0.031,
"step": 648
},
{
"epoch": 0.0896532670258323,
"grad_norm": 0.029956836253404617,
"learning_rate": 8.123652055626497e-05,
"loss": 0.0184,
"step": 649
},
{
"epoch": 0.08979140765299075,
"grad_norm": 0.005330318119376898,
"learning_rate": 8.125583594482312e-05,
"loss": 0.0073,
"step": 650
},
{
"epoch": 0.0899295482801492,
"grad_norm": 0.006597398314625025,
"learning_rate": 8.127512164022804e-05,
"loss": 0.0071,
"step": 651
},
{
"epoch": 0.09006768890730764,
"grad_norm": 0.012712563388049603,
"learning_rate": 8.1294377733633e-05,
"loss": 0.0152,
"step": 652
},
{
"epoch": 0.09020582953446608,
"grad_norm": 0.0039291055873036385,
"learning_rate": 8.131360431577212e-05,
"loss": 0.0049,
"step": 653
},
{
"epoch": 0.09034397016162453,
"grad_norm": 0.009396882727742195,
"learning_rate": 8.133280147696304e-05,
"loss": 0.0122,
"step": 654
},
{
"epoch": 0.09048211078878299,
"grad_norm": 0.003607046790421009,
"learning_rate": 8.13519693071094e-05,
"loss": 0.0062,
"step": 655
},
{
"epoch": 0.09062025141594143,
"grad_norm": 0.060046661645174026,
"learning_rate": 8.137110789570339e-05,
"loss": 0.0501,
"step": 656
},
{
"epoch": 0.09075839204309988,
"grad_norm": 0.020894410088658333,
"learning_rate": 8.139021733182823e-05,
"loss": 0.0164,
"step": 657
},
{
"epoch": 0.09089653267025832,
"grad_norm": 0.01064255740493536,
"learning_rate": 8.140929770416067e-05,
"loss": 0.0113,
"step": 658
},
{
"epoch": 0.09103467329741677,
"grad_norm": 0.01144749391824007,
"learning_rate": 8.142834910097348e-05,
"loss": 0.0116,
"step": 659
},
{
"epoch": 0.09117281392457521,
"grad_norm": 0.0037209605798125267,
"learning_rate": 8.144737161013789e-05,
"loss": 0.0042,
"step": 660
},
{
"epoch": 0.09131095455173366,
"grad_norm": 0.014418653212487698,
"learning_rate": 8.146636531912602e-05,
"loss": 0.019,
"step": 661
},
{
"epoch": 0.09144909517889212,
"grad_norm": 0.004248477052897215,
"learning_rate": 8.148533031501328e-05,
"loss": 0.004,
"step": 662
},
{
"epoch": 0.09158723580605056,
"grad_norm": 0.023498015478253365,
"learning_rate": 8.150426668448077e-05,
"loss": 0.0156,
"step": 663
},
{
"epoch": 0.09172537643320901,
"grad_norm": 0.02856908179819584,
"learning_rate": 8.152317451381767e-05,
"loss": 0.021,
"step": 664
},
{
"epoch": 0.09186351706036745,
"grad_norm": 0.03920527175068855,
"learning_rate": 8.154205388892362e-05,
"loss": 0.041,
"step": 665
},
{
"epoch": 0.0920016576875259,
"grad_norm": 0.006108124274760485,
"learning_rate": 8.156090489531097e-05,
"loss": 0.0053,
"step": 666
},
{
"epoch": 0.09213979831468434,
"grad_norm": 0.040040936321020126,
"learning_rate": 8.157972761810723e-05,
"loss": 0.0089,
"step": 667
},
{
"epoch": 0.0922779389418428,
"grad_norm": 0.008494898676872253,
"learning_rate": 8.159852214205728e-05,
"loss": 0.0117,
"step": 668
},
{
"epoch": 0.09241607956900125,
"grad_norm": 0.07821296900510788,
"learning_rate": 8.161728855152573e-05,
"loss": 0.0578,
"step": 669
},
{
"epoch": 0.09255422019615969,
"grad_norm": 0.027581755071878433,
"learning_rate": 8.163602693049908e-05,
"loss": 0.0198,
"step": 670
},
{
"epoch": 0.09269236082331814,
"grad_norm": 0.007122738752514124,
"learning_rate": 8.165473736258814e-05,
"loss": 0.009,
"step": 671
},
{
"epoch": 0.09283050145047658,
"grad_norm": 0.028153732419013977,
"learning_rate": 8.167341993103011e-05,
"loss": 0.0241,
"step": 672
},
{
"epoch": 0.09296864207763503,
"grad_norm": 0.05543315038084984,
"learning_rate": 8.169207471869094e-05,
"loss": 0.0428,
"step": 673
},
{
"epoch": 0.09310678270479349,
"grad_norm": 0.013588045723736286,
"learning_rate": 8.171070180806739e-05,
"loss": 0.0097,
"step": 674
},
{
"epoch": 0.09324492333195193,
"grad_norm": 0.009861117228865623,
"learning_rate": 8.172930128128935e-05,
"loss": 0.009,
"step": 675
},
{
"epoch": 0.09338306395911038,
"grad_norm": 0.004336031153798103,
"learning_rate": 8.174787322012195e-05,
"loss": 0.0061,
"step": 676
},
{
"epoch": 0.09352120458626882,
"grad_norm": 0.008924839086830616,
"learning_rate": 8.176641770596768e-05,
"loss": 0.0101,
"step": 677
},
{
"epoch": 0.09365934521342727,
"grad_norm": 0.005986085161566734,
"learning_rate": 8.178493481986864e-05,
"loss": 0.0083,
"step": 678
},
{
"epoch": 0.09379748584058571,
"grad_norm": 0.006522186566144228,
"learning_rate": 8.180342464250859e-05,
"loss": 0.0075,
"step": 679
},
{
"epoch": 0.09393562646774416,
"grad_norm": 0.007981918752193451,
"learning_rate": 8.182188725421502e-05,
"loss": 0.0058,
"step": 680
},
{
"epoch": 0.09407376709490262,
"grad_norm": 0.018126454204320908,
"learning_rate": 8.184032273496133e-05,
"loss": 0.013,
"step": 681
},
{
"epoch": 0.09421190772206106,
"grad_norm": 0.0011805971153080463,
"learning_rate": 8.185873116436889e-05,
"loss": 0.0023,
"step": 682
},
{
"epoch": 0.0943500483492195,
"grad_norm": 0.016573583707213402,
"learning_rate": 8.187711262170905e-05,
"loss": 0.0152,
"step": 683
},
{
"epoch": 0.09448818897637795,
"grad_norm": 0.0179180596023798,
"learning_rate": 8.189546718590522e-05,
"loss": 0.0141,
"step": 684
},
{
"epoch": 0.0946263296035364,
"grad_norm": 0.012282337993383408,
"learning_rate": 8.191379493553499e-05,
"loss": 0.0115,
"step": 685
},
{
"epoch": 0.09476447023069484,
"grad_norm": 0.01766025647521019,
"learning_rate": 8.193209594883192e-05,
"loss": 0.0159,
"step": 686
},
{
"epoch": 0.0949026108578533,
"grad_norm": 0.002552238991484046,
"learning_rate": 8.195037030368783e-05,
"loss": 0.0041,
"step": 687
},
{
"epoch": 0.09504075148501175,
"grad_norm": 0.004465501289814711,
"learning_rate": 8.196861807765457e-05,
"loss": 0.0051,
"step": 688
},
{
"epoch": 0.09517889211217019,
"grad_norm": 0.012139561586081982,
"learning_rate": 8.198683934794608e-05,
"loss": 0.0082,
"step": 689
},
{
"epoch": 0.09531703273932864,
"grad_norm": 0.010561689734458923,
"learning_rate": 8.200503419144038e-05,
"loss": 0.0114,
"step": 690
},
{
"epoch": 0.09545517336648708,
"grad_norm": 0.025389693677425385,
"learning_rate": 8.202320268468144e-05,
"loss": 0.0187,
"step": 691
},
{
"epoch": 0.09559331399364553,
"grad_norm": 0.0038718711584806442,
"learning_rate": 8.204134490388117e-05,
"loss": 0.0048,
"step": 692
},
{
"epoch": 0.09573145462080399,
"grad_norm": 0.007652644068002701,
"learning_rate": 8.205946092492133e-05,
"loss": 0.0082,
"step": 693
},
{
"epoch": 0.09586959524796243,
"grad_norm": 0.006432169582694769,
"learning_rate": 8.207755082335539e-05,
"loss": 0.007,
"step": 694
},
{
"epoch": 0.09600773587512088,
"grad_norm": 0.0037059388123452663,
"learning_rate": 8.20956146744105e-05,
"loss": 0.0063,
"step": 695
},
{
"epoch": 0.09614587650227932,
"grad_norm": 0.0046118441969156265,
"learning_rate": 8.211365255298927e-05,
"loss": 0.0059,
"step": 696
},
{
"epoch": 0.09628401712943777,
"grad_norm": 0.0039950888603925705,
"learning_rate": 8.213166453367173e-05,
"loss": 0.0057,
"step": 697
},
{
"epoch": 0.09642215775659621,
"grad_norm": 0.010843550786376,
"learning_rate": 8.214965069071713e-05,
"loss": 0.0119,
"step": 698
},
{
"epoch": 0.09656029838375466,
"grad_norm": 0.0019335473189130425,
"learning_rate": 8.216761109806576e-05,
"loss": 0.0034,
"step": 699
},
{
"epoch": 0.09669843901091311,
"grad_norm": 0.031584322452545166,
"learning_rate": 8.218554582934079e-05,
"loss": 0.0217,
"step": 700
},
{
"epoch": 0.09683657963807156,
"grad_norm": 0.001754386001266539,
"learning_rate": 8.220345495785013e-05,
"loss": 0.0031,
"step": 701
},
{
"epoch": 0.09697472026523,
"grad_norm": 0.02052963897585869,
"learning_rate": 8.222133855658817e-05,
"loss": 0.0121,
"step": 702
},
{
"epoch": 0.09711286089238845,
"grad_norm": 0.02404898777604103,
"learning_rate": 8.223919669823754e-05,
"loss": 0.0218,
"step": 703
},
{
"epoch": 0.0972510015195469,
"grad_norm": 0.004977188538759947,
"learning_rate": 8.225702945517095e-05,
"loss": 0.0062,
"step": 704
},
{
"epoch": 0.09738914214670534,
"grad_norm": 0.007673116400837898,
"learning_rate": 8.227483689945297e-05,
"loss": 0.0083,
"step": 705
},
{
"epoch": 0.0975272827738638,
"grad_norm": 0.016122104600071907,
"learning_rate": 8.229261910284163e-05,
"loss": 0.0114,
"step": 706
},
{
"epoch": 0.09766542340102224,
"grad_norm": 0.0029692344833165407,
"learning_rate": 8.231037613679035e-05,
"loss": 0.0032,
"step": 707
},
{
"epoch": 0.09780356402818069,
"grad_norm": 0.016377883031964302,
"learning_rate": 8.232810807244948e-05,
"loss": 0.0107,
"step": 708
},
{
"epoch": 0.09794170465533913,
"grad_norm": 0.0028426761273294687,
"learning_rate": 8.23458149806682e-05,
"loss": 0.005,
"step": 709
},
{
"epoch": 0.09807984528249758,
"grad_norm": 0.004359726328402758,
"learning_rate": 8.236349693199602e-05,
"loss": 0.0051,
"step": 710
},
{
"epoch": 0.09821798590965602,
"grad_norm": 0.014035112224519253,
"learning_rate": 8.23811539966846e-05,
"loss": 0.013,
"step": 711
},
{
"epoch": 0.09835612653681448,
"grad_norm": 0.0015871514333412051,
"learning_rate": 8.239878624468942e-05,
"loss": 0.0029,
"step": 712
},
{
"epoch": 0.09849426716397293,
"grad_norm": 0.0035029149148613214,
"learning_rate": 8.241639374567137e-05,
"loss": 0.0044,
"step": 713
},
{
"epoch": 0.09863240779113137,
"grad_norm": 0.008812729269266129,
"learning_rate": 8.243397656899844e-05,
"loss": 0.0065,
"step": 714
},
{
"epoch": 0.09877054841828982,
"grad_norm": 0.03456171602010727,
"learning_rate": 8.245153478374738e-05,
"loss": 0.025,
"step": 715
},
{
"epoch": 0.09890868904544826,
"grad_norm": 0.0055639478377997875,
"learning_rate": 8.246906845870534e-05,
"loss": 0.0058,
"step": 716
},
{
"epoch": 0.09904682967260671,
"grad_norm": 0.013393756933510303,
"learning_rate": 8.248657766237134e-05,
"loss": 0.011,
"step": 717
},
{
"epoch": 0.09918497029976515,
"grad_norm": 0.006130352150648832,
"learning_rate": 8.250406246295815e-05,
"loss": 0.0049,
"step": 718
},
{
"epoch": 0.09932311092692361,
"grad_norm": 0.008264213800430298,
"learning_rate": 8.252152292839364e-05,
"loss": 0.0096,
"step": 719
},
{
"epoch": 0.09946125155408206,
"grad_norm": 0.05160053074359894,
"learning_rate": 8.253895912632242e-05,
"loss": 0.0319,
"step": 720
},
{
"epoch": 0.0995993921812405,
"grad_norm": 0.026709305122494698,
"learning_rate": 8.25563711241075e-05,
"loss": 0.018,
"step": 721
},
{
"epoch": 0.09973753280839895,
"grad_norm": 0.04814797267317772,
"learning_rate": 8.25737589888318e-05,
"loss": 0.0403,
"step": 722
},
{
"epoch": 0.0998756734355574,
"grad_norm": 0.009802248328924179,
"learning_rate": 8.259112278729965e-05,
"loss": 0.0069,
"step": 723
},
{
"epoch": 0.10001381406271584,
"grad_norm": 0.004918436054140329,
"learning_rate": 8.260846258603841e-05,
"loss": 0.0061,
"step": 724
},
{
"epoch": 0.1001519546898743,
"grad_norm": 0.00288878520950675,
"learning_rate": 8.262577845129994e-05,
"loss": 0.0035,
"step": 725
},
{
"epoch": 0.10029009531703274,
"grad_norm": 0.0023648052010685205,
"learning_rate": 8.264307044906217e-05,
"loss": 0.0035,
"step": 726
},
{
"epoch": 0.10042823594419119,
"grad_norm": 0.001214659190736711,
"learning_rate": 8.266033864503055e-05,
"loss": 0.0023,
"step": 727
},
{
"epoch": 0.10056637657134963,
"grad_norm": 0.00870110746473074,
"learning_rate": 8.267758310463961e-05,
"loss": 0.0082,
"step": 728
},
{
"epoch": 0.10070451719850808,
"grad_norm": 0.001736936392262578,
"learning_rate": 8.26948038930544e-05,
"loss": 0.0032,
"step": 729
},
{
"epoch": 0.10084265782566652,
"grad_norm": 0.00857970304787159,
"learning_rate": 8.271200107517198e-05,
"loss": 0.0084,
"step": 730
},
{
"epoch": 0.10098079845282498,
"grad_norm": 0.0043541984632611275,
"learning_rate": 8.27291747156229e-05,
"loss": 0.0059,
"step": 731
},
{
"epoch": 0.10111893907998343,
"grad_norm": 0.017549846321344376,
"learning_rate": 8.274632487877266e-05,
"loss": 0.0156,
"step": 732
},
{
"epoch": 0.10125707970714187,
"grad_norm": 0.02662530541419983,
"learning_rate": 8.276345162872311e-05,
"loss": 0.0148,
"step": 733
},
{
"epoch": 0.10139522033430032,
"grad_norm": 0.016454922035336494,
"learning_rate": 8.278055502931398e-05,
"loss": 0.0126,
"step": 734
},
{
"epoch": 0.10153336096145876,
"grad_norm": 0.0410042479634285,
"learning_rate": 8.279763514412423e-05,
"loss": 0.0273,
"step": 735
},
{
"epoch": 0.10167150158861721,
"grad_norm": 0.03968954086303711,
"learning_rate": 8.281469203647345e-05,
"loss": 0.0259,
"step": 736
},
{
"epoch": 0.10180964221577565,
"grad_norm": 0.014899387024343014,
"learning_rate": 8.283172576942335e-05,
"loss": 0.0072,
"step": 737
},
{
"epoch": 0.10194778284293411,
"grad_norm": 0.012564278207719326,
"learning_rate": 8.284873640577912e-05,
"loss": 0.0108,
"step": 738
},
{
"epoch": 0.10208592347009256,
"grad_norm": 0.0013456420274451375,
"learning_rate": 8.286572400809081e-05,
"loss": 0.0028,
"step": 739
},
{
"epoch": 0.102224064097251,
"grad_norm": 0.014120531268417835,
"learning_rate": 8.288268863865471e-05,
"loss": 0.0127,
"step": 740
},
{
"epoch": 0.10236220472440945,
"grad_norm": 0.0025778827257454395,
"learning_rate": 8.289963035951473e-05,
"loss": 0.0037,
"step": 741
},
{
"epoch": 0.10250034535156789,
"grad_norm": 0.06253144890069962,
"learning_rate": 8.291654923246375e-05,
"loss": 0.0239,
"step": 742
},
{
"epoch": 0.10263848597872634,
"grad_norm": 0.0070442091673612595,
"learning_rate": 8.293344531904503e-05,
"loss": 0.0078,
"step": 743
},
{
"epoch": 0.1027766266058848,
"grad_norm": 0.02459808997809887,
"learning_rate": 8.29503186805534e-05,
"loss": 0.0195,
"step": 744
},
{
"epoch": 0.10291476723304324,
"grad_norm": 0.0076606678776443005,
"learning_rate": 8.296716937803683e-05,
"loss": 0.0096,
"step": 745
},
{
"epoch": 0.10305290786020169,
"grad_norm": 0.011984504759311676,
"learning_rate": 8.298399747229752e-05,
"loss": 0.0139,
"step": 746
},
{
"epoch": 0.10319104848736013,
"grad_norm": 0.00821708794683218,
"learning_rate": 8.30008030238934e-05,
"loss": 0.0089,
"step": 747
},
{
"epoch": 0.10332918911451858,
"grad_norm": 0.03523290902376175,
"learning_rate": 8.30175860931393e-05,
"loss": 0.0264,
"step": 748
},
{
"epoch": 0.10346732974167702,
"grad_norm": 0.018691513687372208,
"learning_rate": 8.303434674010831e-05,
"loss": 0.0121,
"step": 749
},
{
"epoch": 0.10360547036883548,
"grad_norm": 0.004369445610791445,
"learning_rate": 8.305108502463309e-05,
"loss": 0.0072,
"step": 750
},
{
"epoch": 0.10374361099599393,
"grad_norm": 0.034502919763326645,
"learning_rate": 8.306780100630714e-05,
"loss": 0.0242,
"step": 751
},
{
"epoch": 0.10388175162315237,
"grad_norm": 0.0035339398309588432,
"learning_rate": 8.308449474448603e-05,
"loss": 0.0039,
"step": 752
},
{
"epoch": 0.10401989225031082,
"grad_norm": 0.0015408458421006799,
"learning_rate": 8.310116629828871e-05,
"loss": 0.0027,
"step": 753
},
{
"epoch": 0.10415803287746926,
"grad_norm": 0.09164151549339294,
"learning_rate": 8.311781572659877e-05,
"loss": 0.0502,
"step": 754
},
{
"epoch": 0.1042961735046277,
"grad_norm": 0.009650464169681072,
"learning_rate": 8.313444308806562e-05,
"loss": 0.0087,
"step": 755
},
{
"epoch": 0.10443431413178615,
"grad_norm": 0.038072582334280014,
"learning_rate": 8.315104844110585e-05,
"loss": 0.0405,
"step": 756
},
{
"epoch": 0.10457245475894461,
"grad_norm": 0.019803661853075027,
"learning_rate": 8.316763184390432e-05,
"loss": 0.0156,
"step": 757
},
{
"epoch": 0.10471059538610306,
"grad_norm": 0.02868686057627201,
"learning_rate": 8.318419335441551e-05,
"loss": 0.0259,
"step": 758
},
{
"epoch": 0.1048487360132615,
"grad_norm": 0.04778165742754936,
"learning_rate": 8.320073303036467e-05,
"loss": 0.0396,
"step": 759
},
{
"epoch": 0.10498687664041995,
"grad_norm": 0.009861230850219727,
"learning_rate": 8.321725092924898e-05,
"loss": 0.0119,
"step": 760
},
{
"epoch": 0.10512501726757839,
"grad_norm": 0.014904690906405449,
"learning_rate": 8.32337471083389e-05,
"loss": 0.0121,
"step": 761
},
{
"epoch": 0.10526315789473684,
"grad_norm": 0.022462379187345505,
"learning_rate": 8.32502216246792e-05,
"loss": 0.0252,
"step": 762
},
{
"epoch": 0.1054012985218953,
"grad_norm": 0.03091195411980152,
"learning_rate": 8.326667453509023e-05,
"loss": 0.0193,
"step": 763
},
{
"epoch": 0.10553943914905374,
"grad_norm": 0.00843064021319151,
"learning_rate": 8.328310589616908e-05,
"loss": 0.0073,
"step": 764
},
{
"epoch": 0.10567757977621219,
"grad_norm": 0.00403135921806097,
"learning_rate": 8.329951576429075e-05,
"loss": 0.0061,
"step": 765
},
{
"epoch": 0.10581572040337063,
"grad_norm": 0.024939602240920067,
"learning_rate": 8.331590419560928e-05,
"loss": 0.0218,
"step": 766
},
{
"epoch": 0.10595386103052908,
"grad_norm": 0.00784094538539648,
"learning_rate": 8.333227124605897e-05,
"loss": 0.0069,
"step": 767
},
{
"epoch": 0.10609200165768752,
"grad_norm": 0.014025907963514328,
"learning_rate": 8.334861697135548e-05,
"loss": 0.0129,
"step": 768
},
{
"epoch": 0.10623014228484598,
"grad_norm": 0.004523341543972492,
"learning_rate": 8.336494142699693e-05,
"loss": 0.0049,
"step": 769
},
{
"epoch": 0.10636828291200442,
"grad_norm": 0.09783921390771866,
"learning_rate": 8.338124466826507e-05,
"loss": 0.0225,
"step": 770
},
{
"epoch": 0.10650642353916287,
"grad_norm": 0.0024887770414352417,
"learning_rate": 8.339752675022647e-05,
"loss": 0.0039,
"step": 771
},
{
"epoch": 0.10664456416632132,
"grad_norm": 0.022068385034799576,
"learning_rate": 8.341378772773356e-05,
"loss": 0.0169,
"step": 772
},
{
"epoch": 0.10678270479347976,
"grad_norm": 0.02197118103504181,
"learning_rate": 8.343002765542567e-05,
"loss": 0.0161,
"step": 773
},
{
"epoch": 0.1069208454206382,
"grad_norm": 0.004046064335852861,
"learning_rate": 8.34462465877303e-05,
"loss": 0.0058,
"step": 774
},
{
"epoch": 0.10705898604779665,
"grad_norm": 0.013067352585494518,
"learning_rate": 8.346244457886409e-05,
"loss": 0.0119,
"step": 775
},
{
"epoch": 0.10719712667495511,
"grad_norm": 0.017200065776705742,
"learning_rate": 8.347862168283396e-05,
"loss": 0.0146,
"step": 776
},
{
"epoch": 0.10733526730211355,
"grad_norm": 0.007307767402380705,
"learning_rate": 8.349477795343814e-05,
"loss": 0.0089,
"step": 777
},
{
"epoch": 0.107473407929272,
"grad_norm": 0.06693489849567413,
"learning_rate": 8.351091344426733e-05,
"loss": 0.0603,
"step": 778
},
{
"epoch": 0.10761154855643044,
"grad_norm": 0.00934355054050684,
"learning_rate": 8.352702820870569e-05,
"loss": 0.0108,
"step": 779
},
{
"epoch": 0.10774968918358889,
"grad_norm": 0.0020181015133857727,
"learning_rate": 8.354312229993191e-05,
"loss": 0.0036,
"step": 780
},
{
"epoch": 0.10788782981074733,
"grad_norm": 0.018208837136626244,
"learning_rate": 8.35591957709203e-05,
"loss": 0.0162,
"step": 781
},
{
"epoch": 0.1080259704379058,
"grad_norm": 0.026561802253127098,
"learning_rate": 8.357524867444177e-05,
"loss": 0.0139,
"step": 782
},
{
"epoch": 0.10816411106506424,
"grad_norm": 0.03126946836709976,
"learning_rate": 8.359128106306499e-05,
"loss": 0.0312,
"step": 783
},
{
"epoch": 0.10830225169222268,
"grad_norm": 0.002232447499409318,
"learning_rate": 8.360729298915728e-05,
"loss": 0.0038,
"step": 784
},
{
"epoch": 0.10844039231938113,
"grad_norm": 0.012769825756549835,
"learning_rate": 8.362328450488575e-05,
"loss": 0.0112,
"step": 785
},
{
"epoch": 0.10857853294653957,
"grad_norm": 0.005585674196481705,
"learning_rate": 8.36392556622182e-05,
"loss": 0.0063,
"step": 786
},
{
"epoch": 0.10871667357369802,
"grad_norm": 0.0049601225182414055,
"learning_rate": 8.365520651292431e-05,
"loss": 0.0055,
"step": 787
},
{
"epoch": 0.10885481420085648,
"grad_norm": 0.008773828856647015,
"learning_rate": 8.367113710857645e-05,
"loss": 0.0102,
"step": 788
},
{
"epoch": 0.10899295482801492,
"grad_norm": 0.009176612831652164,
"learning_rate": 8.36870475005508e-05,
"loss": 0.0098,
"step": 789
},
{
"epoch": 0.10913109545517337,
"grad_norm": 0.04428212344646454,
"learning_rate": 8.370293774002835e-05,
"loss": 0.0264,
"step": 790
},
{
"epoch": 0.10926923608233181,
"grad_norm": 0.0029104934073984623,
"learning_rate": 8.371880787799582e-05,
"loss": 0.0039,
"step": 791
},
{
"epoch": 0.10940737670949026,
"grad_norm": 0.01047749724239111,
"learning_rate": 8.37346579652467e-05,
"loss": 0.0093,
"step": 792
},
{
"epoch": 0.1095455173366487,
"grad_norm": 0.01725156232714653,
"learning_rate": 8.375048805238215e-05,
"loss": 0.0174,
"step": 793
},
{
"epoch": 0.10968365796380715,
"grad_norm": 0.0077973040752112865,
"learning_rate": 8.37662981898121e-05,
"loss": 0.0073,
"step": 794
},
{
"epoch": 0.10982179859096561,
"grad_norm": 0.0034406129270792007,
"learning_rate": 8.378208842775606e-05,
"loss": 0.0047,
"step": 795
},
{
"epoch": 0.10995993921812405,
"grad_norm": 0.004409421235322952,
"learning_rate": 8.37978588162442e-05,
"loss": 0.0052,
"step": 796
},
{
"epoch": 0.1100980798452825,
"grad_norm": 0.020356660708785057,
"learning_rate": 8.381360940511824e-05,
"loss": 0.0189,
"step": 797
},
{
"epoch": 0.11023622047244094,
"grad_norm": 0.009040805511176586,
"learning_rate": 8.382934024403241e-05,
"loss": 0.0075,
"step": 798
},
{
"epoch": 0.11037436109959939,
"grad_norm": 0.004525200929492712,
"learning_rate": 8.384505138245437e-05,
"loss": 0.0055,
"step": 799
},
{
"epoch": 0.11051250172675783,
"grad_norm": 0.0028125497046858072,
"learning_rate": 8.386074286966616e-05,
"loss": 0.0045,
"step": 800
},
{
"epoch": 0.11065064235391629,
"grad_norm": 0.007167664356529713,
"learning_rate": 8.387641475476517e-05,
"loss": 0.0089,
"step": 801
},
{
"epoch": 0.11078878298107474,
"grad_norm": 0.02145918644964695,
"learning_rate": 8.389206708666495e-05,
"loss": 0.0109,
"step": 802
},
{
"epoch": 0.11092692360823318,
"grad_norm": 0.07421177625656128,
"learning_rate": 8.390769991409625e-05,
"loss": 0.0339,
"step": 803
},
{
"epoch": 0.11106506423539163,
"grad_norm": 0.016273144632577896,
"learning_rate": 8.392331328560788e-05,
"loss": 0.0138,
"step": 804
},
{
"epoch": 0.11120320486255007,
"grad_norm": 0.006689921021461487,
"learning_rate": 8.393890724956755e-05,
"loss": 0.0051,
"step": 805
},
{
"epoch": 0.11134134548970852,
"grad_norm": 0.013465960510075092,
"learning_rate": 8.39544818541629e-05,
"loss": 0.0142,
"step": 806
},
{
"epoch": 0.11147948611686698,
"grad_norm": 0.003067552577704191,
"learning_rate": 8.39700371474023e-05,
"loss": 0.0035,
"step": 807
},
{
"epoch": 0.11161762674402542,
"grad_norm": 0.002693150658160448,
"learning_rate": 8.398557317711569e-05,
"loss": 0.0042,
"step": 808
},
{
"epoch": 0.11175576737118387,
"grad_norm": 0.0046765501610934734,
"learning_rate": 8.400108999095567e-05,
"loss": 0.0049,
"step": 809
},
{
"epoch": 0.11189390799834231,
"grad_norm": 0.004729992710053921,
"learning_rate": 8.401658763639814e-05,
"loss": 0.0059,
"step": 810
},
{
"epoch": 0.11203204862550076,
"grad_norm": 0.011979191564023495,
"learning_rate": 8.403206616074328e-05,
"loss": 0.015,
"step": 811
},
{
"epoch": 0.1121701892526592,
"grad_norm": 0.002755200956016779,
"learning_rate": 8.404752561111644e-05,
"loss": 0.0038,
"step": 812
},
{
"epoch": 0.11230832987981765,
"grad_norm": 0.010400881990790367,
"learning_rate": 8.406296603446894e-05,
"loss": 0.0117,
"step": 813
},
{
"epoch": 0.1124464705069761,
"grad_norm": 0.006119609344750643,
"learning_rate": 8.407838747757898e-05,
"loss": 0.0071,
"step": 814
},
{
"epoch": 0.11258461113413455,
"grad_norm": 0.005961157381534576,
"learning_rate": 8.409378998705247e-05,
"loss": 0.0072,
"step": 815
},
{
"epoch": 0.112722751761293,
"grad_norm": 0.0021235195454210043,
"learning_rate": 8.41091736093238e-05,
"loss": 0.0033,
"step": 816
},
{
"epoch": 0.11286089238845144,
"grad_norm": 0.004867125768214464,
"learning_rate": 8.412453839065685e-05,
"loss": 0.0047,
"step": 817
},
{
"epoch": 0.11299903301560989,
"grad_norm": 0.021859489381313324,
"learning_rate": 8.413988437714566e-05,
"loss": 0.0218,
"step": 818
},
{
"epoch": 0.11313717364276833,
"grad_norm": 0.004767335951328278,
"learning_rate": 8.415521161471534e-05,
"loss": 0.005,
"step": 819
},
{
"epoch": 0.11327531426992679,
"grad_norm": 0.0023131791967898607,
"learning_rate": 8.417052014912287e-05,
"loss": 0.003,
"step": 820
},
{
"epoch": 0.11341345489708524,
"grad_norm": 0.001652119099162519,
"learning_rate": 8.418581002595795e-05,
"loss": 0.003,
"step": 821
},
{
"epoch": 0.11355159552424368,
"grad_norm": 0.06345450133085251,
"learning_rate": 8.420108129064377e-05,
"loss": 0.0511,
"step": 822
},
{
"epoch": 0.11368973615140213,
"grad_norm": 0.001477993791922927,
"learning_rate": 8.421633398843785e-05,
"loss": 0.0027,
"step": 823
},
{
"epoch": 0.11382787677856057,
"grad_norm": 0.010723377577960491,
"learning_rate": 8.423156816443287e-05,
"loss": 0.0119,
"step": 824
},
{
"epoch": 0.11396601740571902,
"grad_norm": 0.005179021507501602,
"learning_rate": 8.424678386355736e-05,
"loss": 0.0044,
"step": 825
},
{
"epoch": 0.11410415803287748,
"grad_norm": 0.004340772982686758,
"learning_rate": 8.426198113057666e-05,
"loss": 0.0051,
"step": 826
},
{
"epoch": 0.11424229866003592,
"grad_norm": 0.011915227398276329,
"learning_rate": 8.427716001009356e-05,
"loss": 0.0123,
"step": 827
},
{
"epoch": 0.11438043928719437,
"grad_norm": 0.006282525602728128,
"learning_rate": 8.429232054654917e-05,
"loss": 0.0064,
"step": 828
},
{
"epoch": 0.11451857991435281,
"grad_norm": 0.10944747924804688,
"learning_rate": 8.43074627842237e-05,
"loss": 0.0614,
"step": 829
},
{
"epoch": 0.11465672054151126,
"grad_norm": 0.002959243021905422,
"learning_rate": 8.432258676723714e-05,
"loss": 0.0042,
"step": 830
},
{
"epoch": 0.1147948611686697,
"grad_norm": 0.010976544581353664,
"learning_rate": 8.433769253955023e-05,
"loss": 0.0088,
"step": 831
},
{
"epoch": 0.11493300179582815,
"grad_norm": 0.015172791667282581,
"learning_rate": 8.435278014496498e-05,
"loss": 0.0099,
"step": 832
},
{
"epoch": 0.1150711424229866,
"grad_norm": 0.0273448433727026,
"learning_rate": 8.436784962712563e-05,
"loss": 0.0216,
"step": 833
},
{
"epoch": 0.11520928305014505,
"grad_norm": 0.05054100230336189,
"learning_rate": 8.438290102951929e-05,
"loss": 0.0301,
"step": 834
},
{
"epoch": 0.1153474236773035,
"grad_norm": 0.006080084480345249,
"learning_rate": 8.439793439547675e-05,
"loss": 0.0086,
"step": 835
},
{
"epoch": 0.11548556430446194,
"grad_norm": 0.008105085231363773,
"learning_rate": 8.441294976817325e-05,
"loss": 0.0088,
"step": 836
},
{
"epoch": 0.11562370493162039,
"grad_norm": 0.030704544857144356,
"learning_rate": 8.442794719062914e-05,
"loss": 0.029,
"step": 837
},
{
"epoch": 0.11576184555877883,
"grad_norm": 0.09055589139461517,
"learning_rate": 8.444292670571065e-05,
"loss": 0.0448,
"step": 838
},
{
"epoch": 0.11589998618593729,
"grad_norm": 0.03190995752811432,
"learning_rate": 8.445788835613072e-05,
"loss": 0.0238,
"step": 839
},
{
"epoch": 0.11603812681309573,
"grad_norm": 0.0042504677549004555,
"learning_rate": 8.447283218444959e-05,
"loss": 0.0047,
"step": 840
},
{
"epoch": 0.11617626744025418,
"grad_norm": 0.006505970843136311,
"learning_rate": 8.44877582330756e-05,
"loss": 0.0067,
"step": 841
},
{
"epoch": 0.11631440806741263,
"grad_norm": 0.0037945907097309828,
"learning_rate": 8.450266654426589e-05,
"loss": 0.0043,
"step": 842
},
{
"epoch": 0.11645254869457107,
"grad_norm": 0.14548099040985107,
"learning_rate": 8.451755716012719e-05,
"loss": 0.2458,
"step": 843
},
{
"epoch": 0.11659068932172952,
"grad_norm": 0.01771988905966282,
"learning_rate": 8.453243012261641e-05,
"loss": 0.0138,
"step": 844
},
{
"epoch": 0.11672882994888797,
"grad_norm": 0.01083680521696806,
"learning_rate": 8.454728547354141e-05,
"loss": 0.0101,
"step": 845
},
{
"epoch": 0.11686697057604642,
"grad_norm": 0.006191121879965067,
"learning_rate": 8.456212325456176e-05,
"loss": 0.007,
"step": 846
},
{
"epoch": 0.11700511120320486,
"grad_norm": 0.035239238291978836,
"learning_rate": 8.457694350718935e-05,
"loss": 0.0178,
"step": 847
},
{
"epoch": 0.11714325183036331,
"grad_norm": 0.020794779062271118,
"learning_rate": 8.459174627278912e-05,
"loss": 0.0143,
"step": 848
},
{
"epoch": 0.11728139245752175,
"grad_norm": 0.0027690723072737455,
"learning_rate": 8.460653159257979e-05,
"loss": 0.0043,
"step": 849
},
{
"epoch": 0.1174195330846802,
"grad_norm": 0.011040976271033287,
"learning_rate": 8.46212995076345e-05,
"loss": 0.0097,
"step": 850
},
{
"epoch": 0.11755767371183865,
"grad_norm": 0.05898001417517662,
"learning_rate": 8.463605005888147e-05,
"loss": 0.0432,
"step": 851
},
{
"epoch": 0.1176958143389971,
"grad_norm": 0.03841552138328552,
"learning_rate": 8.465078328710481e-05,
"loss": 0.0265,
"step": 852
},
{
"epoch": 0.11783395496615555,
"grad_norm": 0.09727146476507187,
"learning_rate": 8.466549923294504e-05,
"loss": 0.062,
"step": 853
},
{
"epoch": 0.117972095593314,
"grad_norm": 0.006218411959707737,
"learning_rate": 8.468019793689983e-05,
"loss": 0.0076,
"step": 854
},
{
"epoch": 0.11811023622047244,
"grad_norm": 0.008461462333798409,
"learning_rate": 8.469487943932471e-05,
"loss": 0.0089,
"step": 855
},
{
"epoch": 0.11824837684763088,
"grad_norm": 0.011293224059045315,
"learning_rate": 8.470954378043366e-05,
"loss": 0.0106,
"step": 856
},
{
"epoch": 0.11838651747478933,
"grad_norm": 0.001236687763594091,
"learning_rate": 8.472419100029984e-05,
"loss": 0.0026,
"step": 857
},
{
"epoch": 0.11852465810194779,
"grad_norm": 0.017730748280882835,
"learning_rate": 8.473882113885618e-05,
"loss": 0.0171,
"step": 858
},
{
"epoch": 0.11866279872910623,
"grad_norm": 0.017721619457006454,
"learning_rate": 8.475343423589608e-05,
"loss": 0.0151,
"step": 859
},
{
"epoch": 0.11880093935626468,
"grad_norm": 0.005204800516366959,
"learning_rate": 8.476803033107404e-05,
"loss": 0.0048,
"step": 860
},
{
"epoch": 0.11893907998342312,
"grad_norm": 0.013895371928811073,
"learning_rate": 8.47826094639063e-05,
"loss": 0.0091,
"step": 861
},
{
"epoch": 0.11907722061058157,
"grad_norm": 0.046134259551763535,
"learning_rate": 8.479717167377152e-05,
"loss": 0.0408,
"step": 862
},
{
"epoch": 0.11921536123774001,
"grad_norm": 0.00713316909968853,
"learning_rate": 8.481171699991137e-05,
"loss": 0.0094,
"step": 863
},
{
"epoch": 0.11935350186489847,
"grad_norm": 0.08703207969665527,
"learning_rate": 8.48262454814312e-05,
"loss": 0.0581,
"step": 864
},
{
"epoch": 0.11949164249205692,
"grad_norm": 0.0035085680428892374,
"learning_rate": 8.484075715730064e-05,
"loss": 0.0053,
"step": 865
},
{
"epoch": 0.11962978311921536,
"grad_norm": 0.01007845625281334,
"learning_rate": 8.485525206635425e-05,
"loss": 0.0089,
"step": 866
},
{
"epoch": 0.11976792374637381,
"grad_norm": 0.02679300867021084,
"learning_rate": 8.486973024729214e-05,
"loss": 0.0161,
"step": 867
},
{
"epoch": 0.11990606437353225,
"grad_norm": 0.006040680222213268,
"learning_rate": 8.488419173868058e-05,
"loss": 0.0079,
"step": 868
},
{
"epoch": 0.1200442050006907,
"grad_norm": 0.004664016887545586,
"learning_rate": 8.489863657895262e-05,
"loss": 0.006,
"step": 869
},
{
"epoch": 0.12018234562784914,
"grad_norm": 0.01343533769249916,
"learning_rate": 8.491306480640874e-05,
"loss": 0.0146,
"step": 870
},
{
"epoch": 0.1203204862550076,
"grad_norm": 0.019741838797926903,
"learning_rate": 8.492747645921737e-05,
"loss": 0.0166,
"step": 871
},
{
"epoch": 0.12045862688216605,
"grad_norm": 0.01905057206749916,
"learning_rate": 8.494187157541558e-05,
"loss": 0.0118,
"step": 872
},
{
"epoch": 0.12059676750932449,
"grad_norm": 0.02122565358877182,
"learning_rate": 8.495625019290968e-05,
"loss": 0.0171,
"step": 873
},
{
"epoch": 0.12073490813648294,
"grad_norm": 0.006704521831125021,
"learning_rate": 8.497061234947573e-05,
"loss": 0.0061,
"step": 874
},
{
"epoch": 0.12087304876364138,
"grad_norm": 0.0038077067583799362,
"learning_rate": 8.498495808276026e-05,
"loss": 0.005,
"step": 875
},
{
"epoch": 0.12101118939079983,
"grad_norm": 0.036112312227487564,
"learning_rate": 8.499928743028077e-05,
"loss": 0.0333,
"step": 876
},
{
"epoch": 0.12114933001795829,
"grad_norm": 0.005154281854629517,
"learning_rate": 8.501360042942633e-05,
"loss": 0.0075,
"step": 877
},
{
"epoch": 0.12128747064511673,
"grad_norm": 0.0023703796323388815,
"learning_rate": 8.502789711745823e-05,
"loss": 0.0042,
"step": 878
},
{
"epoch": 0.12142561127227518,
"grad_norm": 0.03203393518924713,
"learning_rate": 8.504217753151046e-05,
"loss": 0.0235,
"step": 879
},
{
"epoch": 0.12156375189943362,
"grad_norm": 0.02325097844004631,
"learning_rate": 8.505644170859043e-05,
"loss": 0.0168,
"step": 880
},
{
"epoch": 0.12170189252659207,
"grad_norm": 0.04458438232541084,
"learning_rate": 8.507068968557937e-05,
"loss": 0.0298,
"step": 881
},
{
"epoch": 0.12184003315375051,
"grad_norm": 0.022350402548909187,
"learning_rate": 8.508492149923301e-05,
"loss": 0.0173,
"step": 882
},
{
"epoch": 0.12197817378090897,
"grad_norm": 0.0038661733269691467,
"learning_rate": 8.509913718618219e-05,
"loss": 0.0058,
"step": 883
},
{
"epoch": 0.12211631440806742,
"grad_norm": 0.03452976420521736,
"learning_rate": 8.511333678293331e-05,
"loss": 0.0153,
"step": 884
},
{
"epoch": 0.12225445503522586,
"grad_norm": 0.00885214377194643,
"learning_rate": 8.512752032586895e-05,
"loss": 0.0092,
"step": 885
},
{
"epoch": 0.12239259566238431,
"grad_norm": 0.01287474762648344,
"learning_rate": 8.514168785124848e-05,
"loss": 0.0101,
"step": 886
},
{
"epoch": 0.12253073628954275,
"grad_norm": 0.0044240327551960945,
"learning_rate": 8.51558393952085e-05,
"loss": 0.004,
"step": 887
},
{
"epoch": 0.1226688769167012,
"grad_norm": 0.0011661675525829196,
"learning_rate": 8.516997499376351e-05,
"loss": 0.0023,
"step": 888
},
{
"epoch": 0.12280701754385964,
"grad_norm": 0.014828033745288849,
"learning_rate": 8.518409468280637e-05,
"loss": 0.0093,
"step": 889
},
{
"epoch": 0.1229451581710181,
"grad_norm": 0.006530273240059614,
"learning_rate": 8.51981984981089e-05,
"loss": 0.0072,
"step": 890
},
{
"epoch": 0.12308329879817655,
"grad_norm": 0.010840199887752533,
"learning_rate": 8.521228647532241e-05,
"loss": 0.0099,
"step": 891
},
{
"epoch": 0.12322143942533499,
"grad_norm": 0.012677641585469246,
"learning_rate": 8.522635864997826e-05,
"loss": 0.0098,
"step": 892
},
{
"epoch": 0.12335958005249344,
"grad_norm": 0.011761125177145004,
"learning_rate": 8.524041505748831e-05,
"loss": 0.0112,
"step": 893
},
{
"epoch": 0.12349772067965188,
"grad_norm": 0.05986681953072548,
"learning_rate": 8.525445573314562e-05,
"loss": 0.0406,
"step": 894
},
{
"epoch": 0.12363586130681033,
"grad_norm": 0.015386831946671009,
"learning_rate": 8.52684807121248e-05,
"loss": 0.0148,
"step": 895
},
{
"epoch": 0.12377400193396879,
"grad_norm": 0.011633604764938354,
"learning_rate": 8.528249002948265e-05,
"loss": 0.0101,
"step": 896
},
{
"epoch": 0.12391214256112723,
"grad_norm": 0.008506370708346367,
"learning_rate": 8.529648372015867e-05,
"loss": 0.0066,
"step": 897
},
{
"epoch": 0.12405028318828568,
"grad_norm": 0.05086902156472206,
"learning_rate": 8.531046181897555e-05,
"loss": 0.023,
"step": 898
},
{
"epoch": 0.12418842381544412,
"grad_norm": 0.001341451657935977,
"learning_rate": 8.532442436063973e-05,
"loss": 0.0021,
"step": 899
},
{
"epoch": 0.12432656444260257,
"grad_norm": 0.05210532248020172,
"learning_rate": 8.533837137974189e-05,
"loss": 0.0323,
"step": 900
},
{
"epoch": 0.12446470506976101,
"grad_norm": 0.007349935360252857,
"learning_rate": 8.535230291075745e-05,
"loss": 0.0087,
"step": 901
},
{
"epoch": 0.12460284569691947,
"grad_norm": 0.004401295445859432,
"learning_rate": 8.536621898804715e-05,
"loss": 0.0056,
"step": 902
},
{
"epoch": 0.12474098632407792,
"grad_norm": 0.016419509425759315,
"learning_rate": 8.538011964585747e-05,
"loss": 0.0146,
"step": 903
},
{
"epoch": 0.12487912695123636,
"grad_norm": 0.012662366963922977,
"learning_rate": 8.539400491832118e-05,
"loss": 0.0102,
"step": 904
},
{
"epoch": 0.12501726757839482,
"grad_norm": 0.0041525522246956825,
"learning_rate": 8.540787483945788e-05,
"loss": 0.0056,
"step": 905
},
{
"epoch": 0.12515540820555326,
"grad_norm": 0.001923445495776832,
"learning_rate": 8.542172944317442e-05,
"loss": 0.0033,
"step": 906
},
{
"epoch": 0.1252935488327117,
"grad_norm": 0.039593107998371124,
"learning_rate": 8.543556876326543e-05,
"loss": 0.0238,
"step": 907
},
{
"epoch": 0.12543168945987015,
"grad_norm": 0.004473451524972916,
"learning_rate": 8.544939283341387e-05,
"loss": 0.0058,
"step": 908
},
{
"epoch": 0.1255698300870286,
"grad_norm": 0.004258911591023207,
"learning_rate": 8.546320168719143e-05,
"loss": 0.005,
"step": 909
},
{
"epoch": 0.12570797071418705,
"grad_norm": 0.026720302179455757,
"learning_rate": 8.547699535805908e-05,
"loss": 0.022,
"step": 910
},
{
"epoch": 0.1258461113413455,
"grad_norm": 0.005456454586237669,
"learning_rate": 8.549077387936755e-05,
"loss": 0.0052,
"step": 911
},
{
"epoch": 0.12598425196850394,
"grad_norm": 0.01937148906290531,
"learning_rate": 8.550453728435777e-05,
"loss": 0.0169,
"step": 912
},
{
"epoch": 0.12612239259566238,
"grad_norm": 0.14971759915351868,
"learning_rate": 8.551828560616142e-05,
"loss": 0.2231,
"step": 913
},
{
"epoch": 0.12626053322282083,
"grad_norm": 0.003774692304432392,
"learning_rate": 8.553201887780136e-05,
"loss": 0.0051,
"step": 914
},
{
"epoch": 0.12639867384997927,
"grad_norm": 0.012158801779150963,
"learning_rate": 8.554573713219211e-05,
"loss": 0.0086,
"step": 915
},
{
"epoch": 0.12653681447713772,
"grad_norm": 0.0024195557925850153,
"learning_rate": 8.555944040214037e-05,
"loss": 0.004,
"step": 916
},
{
"epoch": 0.12667495510429616,
"grad_norm": 0.012309069745242596,
"learning_rate": 8.557312872034538e-05,
"loss": 0.0113,
"step": 917
},
{
"epoch": 0.12681309573145463,
"grad_norm": 0.017295846715569496,
"learning_rate": 8.558680211939955e-05,
"loss": 0.0169,
"step": 918
},
{
"epoch": 0.12695123635861308,
"grad_norm": 0.013580954633653164,
"learning_rate": 8.560046063178874e-05,
"loss": 0.014,
"step": 919
},
{
"epoch": 0.12708937698577152,
"grad_norm": 0.03825406730175018,
"learning_rate": 8.561410428989292e-05,
"loss": 0.0345,
"step": 920
},
{
"epoch": 0.12722751761292997,
"grad_norm": 0.010689822025597095,
"learning_rate": 8.562773312598646e-05,
"loss": 0.0128,
"step": 921
},
{
"epoch": 0.12736565824008841,
"grad_norm": 0.005025971680879593,
"learning_rate": 8.564134717223869e-05,
"loss": 0.0053,
"step": 922
},
{
"epoch": 0.12750379886724686,
"grad_norm": 0.005272683221846819,
"learning_rate": 8.56549464607143e-05,
"loss": 0.0048,
"step": 923
},
{
"epoch": 0.1276419394944053,
"grad_norm": 0.006757293362170458,
"learning_rate": 8.566853102337387e-05,
"loss": 0.0071,
"step": 924
},
{
"epoch": 0.12778008012156375,
"grad_norm": 0.013046073727309704,
"learning_rate": 8.568210089207418e-05,
"loss": 0.0075,
"step": 925
},
{
"epoch": 0.1279182207487222,
"grad_norm": 0.06407853960990906,
"learning_rate": 8.569565609856885e-05,
"loss": 0.0422,
"step": 926
},
{
"epoch": 0.12805636137588064,
"grad_norm": 0.02519175596535206,
"learning_rate": 8.570919667450859e-05,
"loss": 0.0159,
"step": 927
},
{
"epoch": 0.12819450200303908,
"grad_norm": 0.0054557062685489655,
"learning_rate": 8.57227226514418e-05,
"loss": 0.0065,
"step": 928
},
{
"epoch": 0.12833264263019753,
"grad_norm": 0.023621072992682457,
"learning_rate": 8.573623406081491e-05,
"loss": 0.0177,
"step": 929
},
{
"epoch": 0.12847078325735597,
"grad_norm": 0.0037180378567427397,
"learning_rate": 8.574973093397288e-05,
"loss": 0.0041,
"step": 930
},
{
"epoch": 0.12860892388451445,
"grad_norm": 0.020301776006817818,
"learning_rate": 8.576321330215958e-05,
"loss": 0.0146,
"step": 931
},
{
"epoch": 0.1287470645116729,
"grad_norm": 0.004708379507064819,
"learning_rate": 8.577668119651829e-05,
"loss": 0.0063,
"step": 932
},
{
"epoch": 0.12888520513883134,
"grad_norm": 0.0036832920741289854,
"learning_rate": 8.579013464809206e-05,
"loss": 0.0043,
"step": 933
},
{
"epoch": 0.12902334576598978,
"grad_norm": 0.016041239723563194,
"learning_rate": 8.580357368782425e-05,
"loss": 0.0126,
"step": 934
},
{
"epoch": 0.12916148639314823,
"grad_norm": 0.07763788849115372,
"learning_rate": 8.581699834655876e-05,
"loss": 0.0569,
"step": 935
},
{
"epoch": 0.12929962702030667,
"grad_norm": 0.04748247563838959,
"learning_rate": 8.58304086550407e-05,
"loss": 0.0277,
"step": 936
},
{
"epoch": 0.12943776764746512,
"grad_norm": 0.006311520934104919,
"learning_rate": 8.584380464391663e-05,
"loss": 0.0056,
"step": 937
},
{
"epoch": 0.12957590827462356,
"grad_norm": 0.004489540588110685,
"learning_rate": 8.585718634373504e-05,
"loss": 0.0057,
"step": 938
},
{
"epoch": 0.129714048901782,
"grad_norm": 0.01480119675397873,
"learning_rate": 8.587055378494679e-05,
"loss": 0.0148,
"step": 939
},
{
"epoch": 0.12985218952894045,
"grad_norm": 0.025000635534524918,
"learning_rate": 8.588390699790551e-05,
"loss": 0.0173,
"step": 940
},
{
"epoch": 0.1299903301560989,
"grad_norm": 0.0023975621443241835,
"learning_rate": 8.589724601286795e-05,
"loss": 0.0031,
"step": 941
},
{
"epoch": 0.13012847078325734,
"grad_norm": 0.036758214235305786,
"learning_rate": 8.591057085999454e-05,
"loss": 0.0375,
"step": 942
},
{
"epoch": 0.13026661141041582,
"grad_norm": 0.02752687968313694,
"learning_rate": 8.592388156934964e-05,
"loss": 0.0153,
"step": 943
},
{
"epoch": 0.13040475203757426,
"grad_norm": 0.0026282810140401125,
"learning_rate": 8.593717817090202e-05,
"loss": 0.004,
"step": 944
},
{
"epoch": 0.1305428926647327,
"grad_norm": 0.00695823086425662,
"learning_rate": 8.595046069452531e-05,
"loss": 0.0067,
"step": 945
},
{
"epoch": 0.13068103329189115,
"grad_norm": 0.005962789058685303,
"learning_rate": 8.596372916999831e-05,
"loss": 0.007,
"step": 946
},
{
"epoch": 0.1308191739190496,
"grad_norm": 0.19016483426094055,
"learning_rate": 8.597698362700545e-05,
"loss": 0.1038,
"step": 947
},
{
"epoch": 0.13095731454620804,
"grad_norm": 0.009827204048633575,
"learning_rate": 8.599022409513714e-05,
"loss": 0.0071,
"step": 948
},
{
"epoch": 0.1310954551733665,
"grad_norm": 0.0029148852918297052,
"learning_rate": 8.600345060389027e-05,
"loss": 0.0041,
"step": 949
},
{
"epoch": 0.13123359580052493,
"grad_norm": 0.04867846518754959,
"learning_rate": 8.601666318266846e-05,
"loss": 0.0244,
"step": 950
},
{
"epoch": 0.13137173642768338,
"grad_norm": 0.005912700667977333,
"learning_rate": 8.602986186078254e-05,
"loss": 0.0086,
"step": 951
},
{
"epoch": 0.13150987705484182,
"grad_norm": 0.006628013215959072,
"learning_rate": 8.604304666745098e-05,
"loss": 0.0081,
"step": 952
},
{
"epoch": 0.13164801768200027,
"grad_norm": 0.008250492624938488,
"learning_rate": 8.605621763180016e-05,
"loss": 0.0074,
"step": 953
},
{
"epoch": 0.1317861583091587,
"grad_norm": 0.013309494592249393,
"learning_rate": 8.606937478286485e-05,
"loss": 0.0143,
"step": 954
},
{
"epoch": 0.13192429893631716,
"grad_norm": 0.0022110934369266033,
"learning_rate": 8.608251814958855e-05,
"loss": 0.0041,
"step": 955
},
{
"epoch": 0.13206243956347563,
"grad_norm": 0.009347507730126381,
"learning_rate": 8.609564776082388e-05,
"loss": 0.0117,
"step": 956
},
{
"epoch": 0.13220058019063408,
"grad_norm": 0.007068824488669634,
"learning_rate": 8.610876364533301e-05,
"loss": 0.0107,
"step": 957
},
{
"epoch": 0.13233872081779252,
"grad_norm": 0.02662699855864048,
"learning_rate": 8.612186583178794e-05,
"loss": 0.0315,
"step": 958
},
{
"epoch": 0.13247686144495097,
"grad_norm": 0.015548110008239746,
"learning_rate": 8.613495434877095e-05,
"loss": 0.016,
"step": 959
},
{
"epoch": 0.1326150020721094,
"grad_norm": 0.0028302946593612432,
"learning_rate": 8.614802922477495e-05,
"loss": 0.0037,
"step": 960
},
{
"epoch": 0.13275314269926786,
"grad_norm": 0.0047238729894161224,
"learning_rate": 8.616109048820388e-05,
"loss": 0.006,
"step": 961
},
{
"epoch": 0.1328912833264263,
"grad_norm": 0.016504082828760147,
"learning_rate": 8.617413816737302e-05,
"loss": 0.014,
"step": 962
},
{
"epoch": 0.13302942395358475,
"grad_norm": 0.03382395952939987,
"learning_rate": 8.61871722905094e-05,
"loss": 0.0266,
"step": 963
},
{
"epoch": 0.1331675645807432,
"grad_norm": 0.008775300346314907,
"learning_rate": 8.620019288575218e-05,
"loss": 0.01,
"step": 964
},
{
"epoch": 0.13330570520790164,
"grad_norm": 0.0034813378006219864,
"learning_rate": 8.621319998115303e-05,
"loss": 0.0044,
"step": 965
},
{
"epoch": 0.13344384583506008,
"grad_norm": 0.007844419218599796,
"learning_rate": 8.622619360467634e-05,
"loss": 0.0125,
"step": 966
},
{
"epoch": 0.13358198646221853,
"grad_norm": 0.0025214802008122206,
"learning_rate": 8.623917378419984e-05,
"loss": 0.0046,
"step": 967
},
{
"epoch": 0.13372012708937697,
"grad_norm": 0.02048412896692753,
"learning_rate": 8.625214054751471e-05,
"loss": 0.0141,
"step": 968
},
{
"epoch": 0.13385826771653545,
"grad_norm": 0.008773239329457283,
"learning_rate": 8.62650939223261e-05,
"loss": 0.0126,
"step": 969
},
{
"epoch": 0.1339964083436939,
"grad_norm": 0.07702983915805817,
"learning_rate": 8.627803393625343e-05,
"loss": 0.0595,
"step": 970
},
{
"epoch": 0.13413454897085234,
"grad_norm": 0.012115233577787876,
"learning_rate": 8.629096061683071e-05,
"loss": 0.0109,
"step": 971
},
{
"epoch": 0.13427268959801078,
"grad_norm": 0.013093401677906513,
"learning_rate": 8.630387399150694e-05,
"loss": 0.0094,
"step": 972
},
{
"epoch": 0.13441083022516923,
"grad_norm": 0.006639427505433559,
"learning_rate": 8.631677408764646e-05,
"loss": 0.0076,
"step": 973
},
{
"epoch": 0.13454897085232767,
"grad_norm": 0.04810924455523491,
"learning_rate": 8.632966093252927e-05,
"loss": 0.0368,
"step": 974
},
{
"epoch": 0.13468711147948612,
"grad_norm": 0.007319958880543709,
"learning_rate": 8.634253455335138e-05,
"loss": 0.0072,
"step": 975
},
{
"epoch": 0.13482525210664456,
"grad_norm": 0.05134734511375427,
"learning_rate": 8.635539497722518e-05,
"loss": 0.0367,
"step": 976
},
{
"epoch": 0.134963392733803,
"grad_norm": 0.010004381649196148,
"learning_rate": 8.636824223117976e-05,
"loss": 0.0104,
"step": 977
},
{
"epoch": 0.13510153336096145,
"grad_norm": 0.00346842291764915,
"learning_rate": 8.638107634216126e-05,
"loss": 0.0052,
"step": 978
},
{
"epoch": 0.1352396739881199,
"grad_norm": 0.01166492234915495,
"learning_rate": 8.639389733703317e-05,
"loss": 0.0085,
"step": 979
},
{
"epoch": 0.13537781461527834,
"grad_norm": 0.02950691431760788,
"learning_rate": 8.640670524257675e-05,
"loss": 0.0222,
"step": 980
},
{
"epoch": 0.13551595524243681,
"grad_norm": 0.010350341908633709,
"learning_rate": 8.641950008549133e-05,
"loss": 0.0077,
"step": 981
},
{
"epoch": 0.13565409586959526,
"grad_norm": 0.0018771348986774683,
"learning_rate": 8.643228189239453e-05,
"loss": 0.003,
"step": 982
},
{
"epoch": 0.1357922364967537,
"grad_norm": 0.012851797975599766,
"learning_rate": 8.644505068982283e-05,
"loss": 0.011,
"step": 983
},
{
"epoch": 0.13593037712391215,
"grad_norm": 0.004215499386191368,
"learning_rate": 8.645780650423166e-05,
"loss": 0.0054,
"step": 984
},
{
"epoch": 0.1360685177510706,
"grad_norm": 0.13906902074813843,
"learning_rate": 8.647054936199591e-05,
"loss": 0.0696,
"step": 985
},
{
"epoch": 0.13620665837822904,
"grad_norm": 0.007836158387362957,
"learning_rate": 8.648327928941014e-05,
"loss": 0.0095,
"step": 986
},
{
"epoch": 0.13634479900538748,
"grad_norm": 0.0017098481766879559,
"learning_rate": 8.649599631268893e-05,
"loss": 0.0028,
"step": 987
},
{
"epoch": 0.13648293963254593,
"grad_norm": 0.004099471494555473,
"learning_rate": 8.650870045796727e-05,
"loss": 0.0046,
"step": 988
},
{
"epoch": 0.13662108025970437,
"grad_norm": 0.0024917221162468195,
"learning_rate": 8.65213917513008e-05,
"loss": 0.0032,
"step": 989
},
{
"epoch": 0.13675922088686282,
"grad_norm": 0.006787384394556284,
"learning_rate": 8.653407021866616e-05,
"loss": 0.0052,
"step": 990
},
{
"epoch": 0.13689736151402127,
"grad_norm": 0.07070998102426529,
"learning_rate": 8.654673588596133e-05,
"loss": 0.0427,
"step": 991
},
{
"epoch": 0.1370355021411797,
"grad_norm": 0.02854343131184578,
"learning_rate": 8.655938877900593e-05,
"loss": 0.0225,
"step": 992
},
{
"epoch": 0.13717364276833816,
"grad_norm": 0.014134407043457031,
"learning_rate": 8.657202892354155e-05,
"loss": 0.0097,
"step": 993
},
{
"epoch": 0.13731178339549663,
"grad_norm": 0.006957308389246464,
"learning_rate": 8.658465634523199e-05,
"loss": 0.0075,
"step": 994
},
{
"epoch": 0.13744992402265507,
"grad_norm": 0.008567973971366882,
"learning_rate": 8.659727106966367e-05,
"loss": 0.0095,
"step": 995
},
{
"epoch": 0.13758806464981352,
"grad_norm": 0.010392943397164345,
"learning_rate": 8.660987312234595e-05,
"loss": 0.0079,
"step": 996
},
{
"epoch": 0.13772620527697196,
"grad_norm": 0.09674349427223206,
"learning_rate": 8.662246252871131e-05,
"loss": 0.0385,
"step": 997
},
{
"epoch": 0.1378643459041304,
"grad_norm": 0.002456868067383766,
"learning_rate": 8.663503931411578e-05,
"loss": 0.0032,
"step": 998
},
{
"epoch": 0.13800248653128885,
"grad_norm": 0.07229039818048477,
"learning_rate": 8.664760350383924e-05,
"loss": 0.0491,
"step": 999
},
{
"epoch": 0.1381406271584473,
"grad_norm": 0.00896090641617775,
"learning_rate": 8.666015512308563e-05,
"loss": 0.0083,
"step": 1000
}
],
"logging_steps": 1.0,
"max_steps": 28956,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}