fin-bert-tr / checkpoint-5170 /trainer_state.json
Toxotes's picture
feat: Turkish Financial BERT (fin-bert-tr mini)
f39ac71 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 10.0,
"eval_steps": 500,
"global_step": 5170,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0025906735751295338,
"grad_norm": 1.9656798839569092,
"learning_rate": 0.0,
"loss": 10.453109741210938,
"step": 1
},
{
"epoch": 0.04922279792746114,
"grad_norm": 1.8898563385009766,
"learning_rate": 7.792207792207792e-06,
"loss": 10.416481018066406,
"step": 19
},
{
"epoch": 0.09844559585492228,
"grad_norm": 1.5997825860977173,
"learning_rate": 1.6017316017316017e-05,
"loss": 10.20559371145148,
"step": 38
},
{
"epoch": 0.14766839378238342,
"grad_norm": 1.4691131114959717,
"learning_rate": 2.4242424242424244e-05,
"loss": 9.938501458418997,
"step": 57
},
{
"epoch": 0.19689119170984457,
"grad_norm": 1.5397157669067383,
"learning_rate": 3.246753246753247e-05,
"loss": 9.666742425215872,
"step": 76
},
{
"epoch": 0.24611398963730569,
"grad_norm": 1.5672590732574463,
"learning_rate": 4.0692640692640695e-05,
"loss": 9.390644274259868,
"step": 95
},
{
"epoch": 0.29533678756476683,
"grad_norm": 1.4179465770721436,
"learning_rate": 4.8917748917748915e-05,
"loss": 9.110911319130345,
"step": 114
},
{
"epoch": 0.344559585492228,
"grad_norm": 1.3159687519073486,
"learning_rate": 5.714285714285714e-05,
"loss": 8.792612176192435,
"step": 133
},
{
"epoch": 0.39378238341968913,
"grad_norm": 1.0485610961914062,
"learning_rate": 6.536796536796536e-05,
"loss": 8.488385652240954,
"step": 152
},
{
"epoch": 0.4430051813471503,
"grad_norm": 0.8901123404502869,
"learning_rate": 7.35930735930736e-05,
"loss": 8.201339721679688,
"step": 171
},
{
"epoch": 0.49222797927461137,
"grad_norm": 0.7018402218818665,
"learning_rate": 8.181818181818183e-05,
"loss": 7.978334125719573,
"step": 190
},
{
"epoch": 0.5414507772020726,
"grad_norm": 0.5120431184768677,
"learning_rate": 9.004329004329005e-05,
"loss": 7.839283993369655,
"step": 209
},
{
"epoch": 0.5906735751295337,
"grad_norm": 0.5026654601097107,
"learning_rate": 9.826839826839827e-05,
"loss": 7.7916211579975325,
"step": 228
},
{
"epoch": 0.6398963730569949,
"grad_norm": 0.638586163520813,
"learning_rate": 9.999578456659054e-05,
"loss": 7.715636403937089,
"step": 247
},
{
"epoch": 0.689119170984456,
"grad_norm": 0.6381962895393372,
"learning_rate": 9.997834329912887e-05,
"loss": 7.697683233963816,
"step": 266
},
{
"epoch": 0.7383419689119171,
"grad_norm": 0.6784098148345947,
"learning_rate": 9.994738114801949e-05,
"loss": 7.658172607421875,
"step": 285
},
{
"epoch": 0.7875647668393783,
"grad_norm": 0.7522804141044617,
"learning_rate": 9.990290648960332e-05,
"loss": 7.618246781198602,
"step": 304
},
{
"epoch": 0.8367875647668394,
"grad_norm": 1.413805365562439,
"learning_rate": 9.984493135582543e-05,
"loss": 7.569692511307566,
"step": 323
},
{
"epoch": 0.8860103626943006,
"grad_norm": 0.769112229347229,
"learning_rate": 9.977347143098e-05,
"loss": 7.520751953125,
"step": 342
},
{
"epoch": 0.9352331606217616,
"grad_norm": 0.8767187595367432,
"learning_rate": 9.96885460474671e-05,
"loss": 7.502451043379934,
"step": 361
},
{
"epoch": 0.9844559585492227,
"grad_norm": 0.7236428260803223,
"learning_rate": 9.959017818056273e-05,
"loss": 7.4918670654296875,
"step": 380
},
{
"epoch": 1.0336787564766838,
"grad_norm": 0.5859951376914978,
"learning_rate": 9.947839444220306e-05,
"loss": 7.4534374036287,
"step": 399
},
{
"epoch": 1.0829015544041452,
"grad_norm": 0.770540714263916,
"learning_rate": 9.935322507378509e-05,
"loss": 7.40715187474301,
"step": 418
},
{
"epoch": 1.1321243523316062,
"grad_norm": 0.6562390327453613,
"learning_rate": 9.921470393798522e-05,
"loss": 7.423827321905839,
"step": 437
},
{
"epoch": 1.1813471502590673,
"grad_norm": 0.7159621715545654,
"learning_rate": 9.906286850959825e-05,
"loss": 7.380163895456414,
"step": 456
},
{
"epoch": 1.2305699481865284,
"grad_norm": 0.6420731544494629,
"learning_rate": 9.889775986539913e-05,
"loss": 7.33871781198602,
"step": 475
},
{
"epoch": 1.2797927461139897,
"grad_norm": 0.7125868797302246,
"learning_rate": 9.871942267303034e-05,
"loss": 7.3665418122944075,
"step": 494
},
{
"epoch": 1.3290155440414508,
"grad_norm": 0.7381535768508911,
"learning_rate": 9.852790517891754e-05,
"loss": 7.347101311934622,
"step": 513
},
{
"epoch": 1.378238341968912,
"grad_norm": 0.7120394110679626,
"learning_rate": 9.83232591952175e-05,
"loss": 7.310685810289885,
"step": 532
},
{
"epoch": 1.427461139896373,
"grad_norm": 0.7593790888786316,
"learning_rate": 9.810554008580081e-05,
"loss": 7.298673127826891,
"step": 551
},
{
"epoch": 1.4766839378238341,
"grad_norm": 0.7787287831306458,
"learning_rate": 9.787480675127431e-05,
"loss": 7.280764931126645,
"step": 570
},
{
"epoch": 1.5259067357512954,
"grad_norm": 0.8570913076400757,
"learning_rate": 9.763112161304621e-05,
"loss": 7.271910014905427,
"step": 589
},
{
"epoch": 1.5751295336787565,
"grad_norm": 0.6566023826599121,
"learning_rate": 9.737455059643903e-05,
"loss": 7.260608070775082,
"step": 608
},
{
"epoch": 1.6243523316062176,
"grad_norm": 0.6554204821586609,
"learning_rate": 9.710516311285445e-05,
"loss": 7.235391717208059,
"step": 627
},
{
"epoch": 1.6735751295336787,
"grad_norm": 0.8252356648445129,
"learning_rate": 9.682303204099517e-05,
"loss": 7.23214400442023,
"step": 646
},
{
"epoch": 1.7227979274611398,
"grad_norm": 0.7991335988044739,
"learning_rate": 9.652823370714861e-05,
"loss": 7.189540662263569,
"step": 665
},
{
"epoch": 1.7720207253886011,
"grad_norm": 0.6291921734809875,
"learning_rate": 9.622084786453804e-05,
"loss": 7.1787647448088,
"step": 684
},
{
"epoch": 1.8212435233160622,
"grad_norm": 0.6076451539993286,
"learning_rate": 9.590095767174654e-05,
"loss": 7.1707924290707235,
"step": 703
},
{
"epoch": 1.8704663212435233,
"grad_norm": 0.7197585105895996,
"learning_rate": 9.556864967021966e-05,
"loss": 7.1632947419819075,
"step": 722
},
{
"epoch": 1.9196891191709846,
"grad_norm": 0.8579983711242676,
"learning_rate": 9.522401376085302e-05,
"loss": 7.2033129240337175,
"step": 741
},
{
"epoch": 1.9689119170984455,
"grad_norm": 0.7442916631698608,
"learning_rate": 9.486714317967097e-05,
"loss": 7.15290671900699,
"step": 760
},
{
"epoch": 2.018134715025907,
"grad_norm": 0.6029990911483765,
"learning_rate": 9.449813447260292e-05,
"loss": 7.155892623098273,
"step": 779
},
{
"epoch": 2.0673575129533677,
"grad_norm": 0.8712852597236633,
"learning_rate": 9.411708746936439e-05,
"loss": 7.1117409153988485,
"step": 798
},
{
"epoch": 2.116580310880829,
"grad_norm": 0.7027563452720642,
"learning_rate": 9.372410525644952e-05,
"loss": 7.13547074167352,
"step": 817
},
{
"epoch": 2.1658031088082903,
"grad_norm": 0.7402092814445496,
"learning_rate": 9.33192941492427e-05,
"loss": 7.12723099557977,
"step": 836
},
{
"epoch": 2.215025906735751,
"grad_norm": 0.6641818284988403,
"learning_rate": 9.290276366325638e-05,
"loss": 7.079131276983964,
"step": 855
},
{
"epoch": 2.2642487046632125,
"grad_norm": 0.654100239276886,
"learning_rate": 9.247462648450348e-05,
"loss": 7.120608681126645,
"step": 874
},
{
"epoch": 2.313471502590674,
"grad_norm": 0.6241465210914612,
"learning_rate": 9.203499843901173e-05,
"loss": 7.0452880859375,
"step": 893
},
{
"epoch": 2.3626943005181347,
"grad_norm": 0.6422539353370667,
"learning_rate": 9.158399846148886e-05,
"loss": 7.0627602025082235,
"step": 912
},
{
"epoch": 2.411917098445596,
"grad_norm": 0.9347654581069946,
"learning_rate": 9.11217485631465e-05,
"loss": 7.087360582853618,
"step": 931
},
{
"epoch": 2.461139896373057,
"grad_norm": 0.6797104477882385,
"learning_rate": 9.064837379869189e-05,
"loss": 7.03591597707648,
"step": 950
},
{
"epoch": 2.510362694300518,
"grad_norm": 0.7567751407623291,
"learning_rate": 9.016400223249635e-05,
"loss": 7.0485181306537825,
"step": 969
},
{
"epoch": 2.5595854922279795,
"grad_norm": 0.9307654500007629,
"learning_rate": 8.966876490394927e-05,
"loss": 7.068600303248355,
"step": 988
},
{
"epoch": 2.6088082901554404,
"grad_norm": 0.6442763805389404,
"learning_rate": 8.91627957920074e-05,
"loss": 7.0317848607113485,
"step": 1007
},
{
"epoch": 2.6580310880829017,
"grad_norm": 0.755132257938385,
"learning_rate": 8.8646231778949e-05,
"loss": 7.035190783048931,
"step": 1026
},
{
"epoch": 2.7072538860103625,
"grad_norm": 0.9993160367012024,
"learning_rate": 8.811921261334224e-05,
"loss": 7.045703285618832,
"step": 1045
},
{
"epoch": 2.756476683937824,
"grad_norm": 0.6708552837371826,
"learning_rate": 8.758188087223845e-05,
"loss": 7.088768406918175,
"step": 1064
},
{
"epoch": 2.805699481865285,
"grad_norm": 0.6677550077438354,
"learning_rate": 8.703438192260007e-05,
"loss": 7.025689376027961,
"step": 1083
},
{
"epoch": 2.854922279792746,
"grad_norm": 0.8610202074050903,
"learning_rate": 8.647686388197374e-05,
"loss": 6.9846753572162825,
"step": 1102
},
{
"epoch": 2.9041450777202074,
"grad_norm": 0.6401988863945007,
"learning_rate": 8.59094775784194e-05,
"loss": 7.020596955951891,
"step": 1121
},
{
"epoch": 2.9533678756476682,
"grad_norm": 0.7033092975616455,
"learning_rate": 8.533237650970602e-05,
"loss": 7.012691297029194,
"step": 1140
},
{
"epoch": 2.241779497098646,
"grad_norm": 1.8401119709014893,
"learning_rate": 8.474571680178515e-05,
"loss": 7.131598949432373,
"step": 1159
},
{
"epoch": 2.2785299806576402,
"grad_norm": 0.9385874271392822,
"learning_rate": 9.233682395815343e-05,
"loss": 6.8397754869963,
"step": 1178
},
{
"epoch": 2.3152804642166345,
"grad_norm": 0.9496298432350159,
"learning_rate": 9.200700008023644e-05,
"loss": 6.565899096037212,
"step": 1197
},
{
"epoch": 2.3520309477756287,
"grad_norm": 0.7686163187026978,
"learning_rate": 9.167084229191691e-05,
"loss": 6.4427024439761515,
"step": 1216
},
{
"epoch": 2.388781431334623,
"grad_norm": 0.6687182188034058,
"learning_rate": 9.132840127982587e-05,
"loss": 6.356098375822368,
"step": 1235
},
{
"epoch": 2.425531914893617,
"grad_norm": 0.9565212726593018,
"learning_rate": 9.097972867799301e-05,
"loss": 6.326987818667763,
"step": 1254
},
{
"epoch": 2.4622823984526114,
"grad_norm": 0.9246956706047058,
"learning_rate": 9.062487706006115e-05,
"loss": 6.313424762926604,
"step": 1273
},
{
"epoch": 2.4990328820116052,
"grad_norm": 0.7130828499794006,
"learning_rate": 9.026389993135918e-05,
"loss": 6.297733106111226,
"step": 1292
},
{
"epoch": 2.5357833655705995,
"grad_norm": 0.6469439268112183,
"learning_rate": 8.989685172083433e-05,
"loss": 6.265300549958882,
"step": 1311
},
{
"epoch": 2.5725338491295937,
"grad_norm": 0.7045807838439941,
"learning_rate": 8.952378777284526e-05,
"loss": 6.24040422941509,
"step": 1330
},
{
"epoch": 2.609284332688588,
"grad_norm": 0.8098029494285583,
"learning_rate": 8.914476433881713e-05,
"loss": 6.2236998708624585,
"step": 1349
},
{
"epoch": 2.646034816247582,
"grad_norm": 0.737579345703125,
"learning_rate": 8.875983856875986e-05,
"loss": 6.20395901328639,
"step": 1368
},
{
"epoch": 2.6827852998065764,
"grad_norm": 0.8462916612625122,
"learning_rate": 8.836906850265096e-05,
"loss": 6.195942125822368,
"step": 1387
},
{
"epoch": 2.7195357833655707,
"grad_norm": 0.8085110187530518,
"learning_rate": 8.797251306168407e-05,
"loss": 6.188254908511513,
"step": 1406
},
{
"epoch": 2.756286266924565,
"grad_norm": 1.0851175785064697,
"learning_rate": 8.757023203938474e-05,
"loss": 6.1910757767526725,
"step": 1425
},
{
"epoch": 2.793036750483559,
"grad_norm": 1.278171420097351,
"learning_rate": 8.716228609259462e-05,
"loss": 6.186161643580387,
"step": 1444
},
{
"epoch": 2.829787234042553,
"grad_norm": 0.7596274614334106,
"learning_rate": 8.674873673232546e-05,
"loss": 6.173150313527961,
"step": 1463
},
{
"epoch": 2.866537717601547,
"grad_norm": 0.7558140754699707,
"learning_rate": 8.632964631448441e-05,
"loss": 6.155330457185444,
"step": 1482
},
{
"epoch": 2.9032882011605414,
"grad_norm": 0.6216167211532593,
"learning_rate": 8.590507803047172e-05,
"loss": 6.167594106573808,
"step": 1501
},
{
"epoch": 2.9400386847195357,
"grad_norm": 0.643828272819519,
"learning_rate": 8.547509589765275e-05,
"loss": 6.1355847810444075,
"step": 1520
},
{
"epoch": 2.97678916827853,
"grad_norm": 0.8299704194068909,
"learning_rate": 8.503976474970517e-05,
"loss": 6.138166327225535,
"step": 1539
},
{
"epoch": 3.013539651837524,
"grad_norm": 1.1598100662231445,
"learning_rate": 8.459915022684329e-05,
"loss": 6.094761497096012,
"step": 1558
},
{
"epoch": 3.0502901353965184,
"grad_norm": 0.6949910521507263,
"learning_rate": 8.415331876592055e-05,
"loss": 6.0926979466488485,
"step": 1577
},
{
"epoch": 3.0870406189555126,
"grad_norm": 0.6773774027824402,
"learning_rate": 8.370233759041219e-05,
"loss": 6.107613814504523,
"step": 1596
},
{
"epoch": 3.123791102514507,
"grad_norm": 0.6850952506065369,
"learning_rate": 8.324627470027901e-05,
"loss": 6.105125025699013,
"step": 1615
},
{
"epoch": 3.160541586073501,
"grad_norm": 0.7101475596427917,
"learning_rate": 8.278519886171423e-05,
"loss": 6.1307517603824015,
"step": 1634
},
{
"epoch": 3.1972920696324953,
"grad_norm": 0.8034424781799316,
"learning_rate": 8.231917959677473e-05,
"loss": 6.124847412109375,
"step": 1653
},
{
"epoch": 3.2340425531914896,
"grad_norm": 0.7378991842269897,
"learning_rate": 8.184828717289845e-05,
"loss": 6.102732608192845,
"step": 1672
},
{
"epoch": 3.2707930367504834,
"grad_norm": 0.8416258692741394,
"learning_rate": 8.13725925923092e-05,
"loss": 6.12664112291838,
"step": 1691
},
{
"epoch": 3.3075435203094776,
"grad_norm": 0.7685152292251587,
"learning_rate": 8.089216758131087e-05,
"loss": 6.120386224043997,
"step": 1710
},
{
"epoch": 3.344294003868472,
"grad_norm": 0.8043097853660583,
"learning_rate": 8.04070845794723e-05,
"loss": 6.096702575683594,
"step": 1729
},
{
"epoch": 3.381044487427466,
"grad_norm": 0.8346231579780579,
"learning_rate": 7.991741672870475e-05,
"loss": 6.11234564530222,
"step": 1748
},
{
"epoch": 3.4177949709864603,
"grad_norm": 0.6343578100204468,
"learning_rate": 7.942323786223333e-05,
"loss": 6.072033932334499,
"step": 1767
},
{
"epoch": 3.4545454545454546,
"grad_norm": 0.7200827598571777,
"learning_rate": 7.892462249346432e-05,
"loss": 6.075145922209087,
"step": 1786
},
{
"epoch": 3.491295938104449,
"grad_norm": 0.9759312868118286,
"learning_rate": 7.84216458047498e-05,
"loss": 6.069999694824219,
"step": 1805
},
{
"epoch": 3.528046421663443,
"grad_norm": 0.8275994658470154,
"learning_rate": 7.79143836360516e-05,
"loss": 6.080008255807977,
"step": 1824
},
{
"epoch": 3.564796905222437,
"grad_norm": 0.7960038185119629,
"learning_rate": 7.740291247350581e-05,
"loss": 6.059996353952508,
"step": 1843
},
{
"epoch": 3.601547388781431,
"grad_norm": 0.7582727074623108,
"learning_rate": 7.688730943789023e-05,
"loss": 6.085317511307566,
"step": 1862
},
{
"epoch": 3.6382978723404253,
"grad_norm": 0.804373025894165,
"learning_rate": 7.636765227299576e-05,
"loss": 6.070657027395148,
"step": 1881
},
{
"epoch": 3.6750483558994196,
"grad_norm": 0.8290799856185913,
"learning_rate": 7.584401933390404e-05,
"loss": 6.05766457005551,
"step": 1900
},
{
"epoch": 3.711798839458414,
"grad_norm": 0.9162618517875671,
"learning_rate": 7.531648957517301e-05,
"loss": 6.049548098915501,
"step": 1919
},
{
"epoch": 3.748549323017408,
"grad_norm": 0.8927680253982544,
"learning_rate": 7.478514253893181e-05,
"loss": 6.04520697342722,
"step": 1938
},
{
"epoch": 3.7852998065764023,
"grad_norm": 0.7886133790016174,
"learning_rate": 7.425005834288738e-05,
"loss": 6.05512157239412,
"step": 1957
},
{
"epoch": 3.8220502901353965,
"grad_norm": 0.9593386054039001,
"learning_rate": 7.371131766824399e-05,
"loss": 6.040921261436061,
"step": 1976
},
{
"epoch": 3.858800773694391,
"grad_norm": 0.7840445637702942,
"learning_rate": 7.316900174753806e-05,
"loss": 6.032860203793175,
"step": 1995
},
{
"epoch": 3.895551257253385,
"grad_norm": 0.7778313159942627,
"learning_rate": 7.262319235238967e-05,
"loss": 6.04224355597245,
"step": 2014
},
{
"epoch": 3.9323017408123793,
"grad_norm": 0.6466183662414551,
"learning_rate": 7.207397178117286e-05,
"loss": 6.039953934518914,
"step": 2033
},
{
"epoch": 3.9690522243713735,
"grad_norm": 0.8412506580352783,
"learning_rate": 7.152142284660659e-05,
"loss": 6.043909173262747,
"step": 2052
},
{
"epoch": 4.005802707930368,
"grad_norm": 0.7424644231796265,
"learning_rate": 7.096562886326784e-05,
"loss": 6.020910965768914,
"step": 2071
},
{
"epoch": 4.042553191489362,
"grad_norm": 0.8717330694198608,
"learning_rate": 7.040667363502946e-05,
"loss": 6.038821973298726,
"step": 2090
},
{
"epoch": 4.079303675048356,
"grad_norm": 0.6821705102920532,
"learning_rate": 6.984464144242395e-05,
"loss": 5.9910033376593335,
"step": 2109
},
{
"epoch": 4.1160541586073505,
"grad_norm": 0.8593968749046326,
"learning_rate": 6.92796170299354e-05,
"loss": 6.038429260253906,
"step": 2128
},
{
"epoch": 4.152804642166345,
"grad_norm": 0.6946661472320557,
"learning_rate": 6.871168559322163e-05,
"loss": 6.043051468698602,
"step": 2147
},
{
"epoch": 4.189555125725338,
"grad_norm": 0.872968316078186,
"learning_rate": 6.814093276626812e-05,
"loss": 6.0379281294973275,
"step": 2166
},
{
"epoch": 4.226305609284332,
"grad_norm": 0.8149793744087219,
"learning_rate": 6.756744460847593e-05,
"loss": 6.025306300113075,
"step": 2185
},
{
"epoch": 4.2630560928433265,
"grad_norm": 0.8134008646011353,
"learning_rate": 6.699130759168552e-05,
"loss": 6.029500860916941,
"step": 2204
},
{
"epoch": 4.299806576402321,
"grad_norm": 0.6807850003242493,
"learning_rate": 6.641260858713825e-05,
"loss": 6.02039899324116,
"step": 2223
},
{
"epoch": 4.336557059961315,
"grad_norm": 0.7424802780151367,
"learning_rate": 6.583143485237783e-05,
"loss": 6.042245965254934,
"step": 2242
},
{
"epoch": 4.373307543520309,
"grad_norm": 1.0088832378387451,
"learning_rate": 6.524787401809335e-05,
"loss": 5.990176953767476,
"step": 2261
},
{
"epoch": 4.4100580270793035,
"grad_norm": 0.8164299130439758,
"learning_rate": 6.466201407490622e-05,
"loss": 6.0073804353412825,
"step": 2280
},
{
"epoch": 4.446808510638298,
"grad_norm": 0.8493334650993347,
"learning_rate": 6.40739433601026e-05,
"loss": 6.001832259328742,
"step": 2299
},
{
"epoch": 4.483558994197292,
"grad_norm": 0.8219246864318848,
"learning_rate": 6.348375054431385e-05,
"loss": 6.0019788240131575,
"step": 2318
},
{
"epoch": 4.520309477756286,
"grad_norm": 0.8987888693809509,
"learning_rate": 6.289152461814648e-05,
"loss": 5.987865648771587,
"step": 2337
},
{
"epoch": 4.5570599613152805,
"grad_norm": 0.7489388585090637,
"learning_rate": 6.229735487876398e-05,
"loss": 6.025086252312911,
"step": 2356
},
{
"epoch": 4.593810444874275,
"grad_norm": 0.8458240032196045,
"learning_rate": 6.170133091642245e-05,
"loss": 5.987234015213816,
"step": 2375
},
{
"epoch": 4.630560928433269,
"grad_norm": 0.7269588112831116,
"learning_rate": 6.110354260096183e-05,
"loss": 5.985632645456414,
"step": 2394
},
{
"epoch": 4.667311411992263,
"grad_norm": 0.7618328928947449,
"learning_rate": 6.050408006825525e-05,
"loss": 5.984134071751645,
"step": 2413
},
{
"epoch": 4.704061895551257,
"grad_norm": 0.8415816426277161,
"learning_rate": 5.9903033706618116e-05,
"loss": 6.002414904142681,
"step": 2432
},
{
"epoch": 4.740812379110252,
"grad_norm": 0.8867862224578857,
"learning_rate": 5.930049414317913e-05,
"loss": 5.995708264802632,
"step": 2451
},
{
"epoch": 4.777562862669246,
"grad_norm": 1.2235946655273438,
"learning_rate": 5.869655223021529e-05,
"loss": 6.0039624665912825,
"step": 2470
},
{
"epoch": 4.81431334622824,
"grad_norm": 0.7073670625686646,
"learning_rate": 5.8091299031453106e-05,
"loss": 6.0098114013671875,
"step": 2489
},
{
"epoch": 4.851063829787234,
"grad_norm": 0.7804876565933228,
"learning_rate": 5.748482580833766e-05,
"loss": 5.9925079345703125,
"step": 2508
},
{
"epoch": 4.887814313346229,
"grad_norm": 0.8264277577400208,
"learning_rate": 5.6877224006272086e-05,
"loss": 5.97403275339227,
"step": 2527
},
{
"epoch": 4.924564796905223,
"grad_norm": 0.9556435942649841,
"learning_rate": 5.626858524082922e-05,
"loss": 6.007706893117804,
"step": 2546
},
{
"epoch": 4.961315280464216,
"grad_norm": 1.08724045753479,
"learning_rate": 5.5659001283937526e-05,
"loss": 5.989010057951274,
"step": 2565
},
{
"epoch": 4.9980657640232105,
"grad_norm": 0.8453856706619263,
"learning_rate": 5.5048564050043637e-05,
"loss": 5.995357714201274,
"step": 2584
},
{
"epoch": 5.034816247582205,
"grad_norm": 1.1737676858901978,
"learning_rate": 5.4437365582253185e-05,
"loss": 5.977565564607319,
"step": 2603
},
{
"epoch": 5.071566731141199,
"grad_norm": 0.934160590171814,
"learning_rate": 5.382549803845235e-05,
"loss": 5.981942427785773,
"step": 2622
},
{
"epoch": 5.108317214700193,
"grad_norm": 0.8727831840515137,
"learning_rate": 5.321305367741215e-05,
"loss": 5.968893352307771,
"step": 2641
},
{
"epoch": 5.145067698259187,
"grad_norm": 0.8856471180915833,
"learning_rate": 5.260012484487739e-05,
"loss": 5.98333057604338,
"step": 2660
},
{
"epoch": 5.181818181818182,
"grad_norm": 0.7901120781898499,
"learning_rate": 5.198680395964256e-05,
"loss": 5.964969434236226,
"step": 2679
},
{
"epoch": 5.218568665377176,
"grad_norm": 0.7979159355163574,
"learning_rate": 5.137318349961677e-05,
"loss": 5.9825082076223275,
"step": 2698
},
{
"epoch": 5.25531914893617,
"grad_norm": 0.9568471312522888,
"learning_rate": 5.07593559878797e-05,
"loss": 5.916827954744038,
"step": 2717
},
{
"epoch": 5.292069632495164,
"grad_norm": 0.6639050245285034,
"learning_rate": 5.0145413978730726e-05,
"loss": 5.972771895559211,
"step": 2736
},
{
"epoch": 5.328820116054159,
"grad_norm": 1.054398536682129,
"learning_rate": 4.9531450043733424e-05,
"loss": 5.95155173853824,
"step": 2755
},
{
"epoch": 5.365570599613153,
"grad_norm": 0.8115559220314026,
"learning_rate": 4.891755675775739e-05,
"loss": 5.972399259868421,
"step": 2774
},
{
"epoch": 5.402321083172147,
"grad_norm": 0.8311302661895752,
"learning_rate": 4.830382668501961e-05,
"loss": 5.989575436240749,
"step": 2793
},
{
"epoch": 5.439071566731141,
"grad_norm": 0.7544533014297485,
"learning_rate": 4.7690352365127384e-05,
"loss": 5.947163230494449,
"step": 2812
},
{
"epoch": 5.475822050290136,
"grad_norm": 0.9242804050445557,
"learning_rate": 4.7077226299125066e-05,
"loss": 5.953185633609169,
"step": 2831
},
{
"epoch": 5.51257253384913,
"grad_norm": 0.8004162311553955,
"learning_rate": 4.646454093554644e-05,
"loss": 5.965155350534539,
"step": 2850
},
{
"epoch": 5.549323017408124,
"grad_norm": 0.9713261127471924,
"learning_rate": 4.5852388656475256e-05,
"loss": 5.955127916837993,
"step": 2869
},
{
"epoch": 5.586073500967118,
"grad_norm": 0.795760452747345,
"learning_rate": 4.524086176361549e-05,
"loss": 5.981726395456414,
"step": 2888
},
{
"epoch": 5.6228239845261125,
"grad_norm": 0.9622194170951843,
"learning_rate": 4.463005246437407e-05,
"loss": 5.9348289088199015,
"step": 2907
},
{
"epoch": 5.659574468085106,
"grad_norm": 0.9427851438522339,
"learning_rate": 4.402005285795745e-05,
"loss": 5.9512381302682975,
"step": 2926
},
{
"epoch": 5.696324951644101,
"grad_norm": 0.8677796721458435,
"learning_rate": 4.341095492148483e-05,
"loss": 5.980510109349301,
"step": 2945
},
{
"epoch": 5.733075435203094,
"grad_norm": 0.8452844619750977,
"learning_rate": 4.2802850496119536e-05,
"loss": 5.963108665064762,
"step": 2964
},
{
"epoch": 5.769825918762089,
"grad_norm": 0.8970301747322083,
"learning_rate": 4.219583127322104e-05,
"loss": 5.97346335963199,
"step": 2983
},
{
"epoch": 5.806576402321083,
"grad_norm": 0.8443690538406372,
"learning_rate": 4.158998878051962e-05,
"loss": 5.9706971017937915,
"step": 3002
},
{
"epoch": 5.843326885880077,
"grad_norm": 0.9244300723075867,
"learning_rate": 4.098541436831541e-05,
"loss": 5.951765361585115,
"step": 3021
},
{
"epoch": 5.880077369439071,
"grad_norm": 0.784065842628479,
"learning_rate": 4.038219919570455e-05,
"loss": 5.960685328433388,
"step": 3040
},
{
"epoch": 5.916827852998066,
"grad_norm": 0.9272547960281372,
"learning_rate": 3.978043421683395e-05,
"loss": 5.95731634842722,
"step": 3059
},
{
"epoch": 5.95357833655706,
"grad_norm": 0.7737032771110535,
"learning_rate": 3.918021016718704e-05,
"loss": 5.947649905556126,
"step": 3078
},
{
"epoch": 5.990328820116054,
"grad_norm": 0.7086819410324097,
"learning_rate": 3.858161754990245e-05,
"loss": 5.95235162032278,
"step": 3097
},
{
"epoch": 6.027079303675048,
"grad_norm": 0.7864798307418823,
"learning_rate": 3.7984746622127765e-05,
"loss": 5.9433951126901725,
"step": 3116
},
{
"epoch": 6.0638297872340425,
"grad_norm": 0.8476478457450867,
"learning_rate": 3.738968738141033e-05,
"loss": 5.926896346242804,
"step": 3135
},
{
"epoch": 6.100580270793037,
"grad_norm": 0.7427075505256653,
"learning_rate": 3.679652955212719e-05,
"loss": 5.956519277472245,
"step": 3154
},
{
"epoch": 6.137330754352031,
"grad_norm": 0.9161301851272583,
"learning_rate": 3.620536257195635e-05,
"loss": 5.917147184673109,
"step": 3173
},
{
"epoch": 6.174081237911025,
"grad_norm": 0.8627088665962219,
"learning_rate": 3.561627557839099e-05,
"loss": 5.942029451069079,
"step": 3192
},
{
"epoch": 6.2108317214700195,
"grad_norm": 0.7476882338523865,
"learning_rate": 3.502935739529928e-05,
"loss": 5.934722097296464,
"step": 3211
},
{
"epoch": 6.247582205029014,
"grad_norm": 0.793505847454071,
"learning_rate": 3.444469651953126e-05,
"loss": 5.916718733938117,
"step": 3230
},
{
"epoch": 6.284332688588008,
"grad_norm": 0.7478469610214233,
"learning_rate": 3.3862381107575005e-05,
"loss": 5.954738416169819,
"step": 3249
},
{
"epoch": 6.321083172147002,
"grad_norm": 0.8610250353813171,
"learning_rate": 3.328249896226428e-05,
"loss": 5.922407852975946,
"step": 3268
},
{
"epoch": 6.3578336557059965,
"grad_norm": 0.8182870745658875,
"learning_rate": 3.270513751953944e-05,
"loss": 5.919796190763774,
"step": 3287
},
{
"epoch": 6.394584139264991,
"grad_norm": 0.7998473644256592,
"learning_rate": 3.213038383526355e-05,
"loss": 5.920766730057566,
"step": 3306
},
{
"epoch": 6.431334622823985,
"grad_norm": 0.8772637248039246,
"learning_rate": 3.155832457209603e-05,
"loss": 5.93222367136102,
"step": 3325
},
{
"epoch": 6.468085106382979,
"grad_norm": 0.7384529709815979,
"learning_rate": 3.0989045986425325e-05,
"loss": 5.92653415077611,
"step": 3344
},
{
"epoch": 6.5048355899419725,
"grad_norm": 0.8872863054275513,
"learning_rate": 3.0422633915363115e-05,
"loss": 5.924022072239926,
"step": 3363
},
{
"epoch": 6.541586073500967,
"grad_norm": 0.7358129024505615,
"learning_rate": 2.9859173763801457e-05,
"loss": 5.946694625051398,
"step": 3382
},
{
"epoch": 6.578336557059961,
"grad_norm": 0.9394431114196777,
"learning_rate": 2.9298750491535382e-05,
"loss": 5.954012017501028,
"step": 3401
},
{
"epoch": 6.615087040618955,
"grad_norm": 0.8594652414321899,
"learning_rate": 2.8741448600452326e-05,
"loss": 5.915107727050781,
"step": 3420
},
{
"epoch": 6.6518375241779495,
"grad_norm": 0.7383516430854797,
"learning_rate": 2.818735212179091e-05,
"loss": 5.930320739746094,
"step": 3439
},
{
"epoch": 6.688588007736944,
"grad_norm": 0.7550167441368103,
"learning_rate": 2.763654460347035e-05,
"loss": 5.959585892526727,
"step": 3458
},
{
"epoch": 6.725338491295938,
"grad_norm": 0.9746566414833069,
"learning_rate": 2.7089109097493003e-05,
"loss": 5.915300469649465,
"step": 3477
},
{
"epoch": 6.762088974854932,
"grad_norm": 0.8552682995796204,
"learning_rate": 2.654512814742159e-05,
"loss": 5.918191608629729,
"step": 3496
},
{
"epoch": 6.7988394584139265,
"grad_norm": 0.7178594470024109,
"learning_rate": 2.6004683775933116e-05,
"loss": 5.931622153834293,
"step": 3515
},
{
"epoch": 6.835589941972921,
"grad_norm": 0.8271778225898743,
"learning_rate": 2.5467857472451234e-05,
"loss": 5.90688042891653,
"step": 3534
},
{
"epoch": 6.872340425531915,
"grad_norm": 0.8247523903846741,
"learning_rate": 2.4934730180859138e-05,
"loss": 5.911947149979441,
"step": 3553
},
{
"epoch": 6.909090909090909,
"grad_norm": 0.9226091504096985,
"learning_rate": 2.4405382287294666e-05,
"loss": 5.909151579204359,
"step": 3572
},
{
"epoch": 6.945841392649903,
"grad_norm": 0.8541250228881836,
"learning_rate": 2.387989360802943e-05,
"loss": 5.93184380782278,
"step": 3591
},
{
"epoch": 6.982591876208898,
"grad_norm": 0.7963822484016418,
"learning_rate": 2.3358343377434074e-05,
"loss": 5.926949752004523,
"step": 3610
},
{
"epoch": 7.019342359767892,
"grad_norm": 0.9335833191871643,
"learning_rate": 2.2840810236030986e-05,
"loss": 5.90260114167866,
"step": 3629
},
{
"epoch": 7.056092843326886,
"grad_norm": 0.8136786222457886,
"learning_rate": 2.2327372218636767e-05,
"loss": 5.914011101973684,
"step": 3648
},
{
"epoch": 7.09284332688588,
"grad_norm": 0.9287970066070557,
"learning_rate": 2.181810674259601e-05,
"loss": 5.9164786087839225,
"step": 3667
},
{
"epoch": 7.129593810444875,
"grad_norm": 0.9184285998344421,
"learning_rate": 2.1313090596108043e-05,
"loss": 5.9290771484375,
"step": 3686
},
{
"epoch": 7.166344294003869,
"grad_norm": 0.7558146715164185,
"learning_rate": 2.081239992664874e-05,
"loss": 5.8995819091796875,
"step": 3705
},
{
"epoch": 7.203094777562863,
"grad_norm": 0.903976321220398,
"learning_rate": 2.0316110229488718e-05,
"loss": 5.905699880499589,
"step": 3724
},
{
"epoch": 7.2398452611218564,
"grad_norm": 0.6883618235588074,
"learning_rate": 1.9824296336310056e-05,
"loss": 5.935149744937294,
"step": 3743
},
{
"epoch": 7.276595744680851,
"grad_norm": 0.82213294506073,
"learning_rate": 1.9337032403923018e-05,
"loss": 5.902831228155839,
"step": 3762
},
{
"epoch": 7.313346228239845,
"grad_norm": 0.8082081079483032,
"learning_rate": 1.8854391903084457e-05,
"loss": 5.928005419279399,
"step": 3781
},
{
"epoch": 7.350096711798839,
"grad_norm": 0.8317619562149048,
"learning_rate": 1.8376447607419833e-05,
"loss": 5.936038368626645,
"step": 3800
},
{
"epoch": 7.386847195357833,
"grad_norm": 0.8557573556900024,
"learning_rate": 1.790327158245012e-05,
"loss": 5.898858321340461,
"step": 3819
},
{
"epoch": 7.423597678916828,
"grad_norm": 0.8798925876617432,
"learning_rate": 1.7434935174725686e-05,
"loss": 5.881325972707648,
"step": 3838
},
{
"epoch": 7.460348162475822,
"grad_norm": 0.7644050121307373,
"learning_rate": 1.697150900106844e-05,
"loss": 5.888987491005345,
"step": 3857
},
{
"epoch": 7.497098646034816,
"grad_norm": 0.8622159361839294,
"learning_rate": 1.6513062937924155e-05,
"loss": 5.928788837633635,
"step": 3876
},
{
"epoch": 7.53384912959381,
"grad_norm": 0.9436091780662537,
"learning_rate": 1.6059666110826277e-05,
"loss": 5.897299114026521,
"step": 3895
},
{
"epoch": 7.570599613152805,
"grad_norm": 0.8347904682159424,
"learning_rate": 1.5611386883972995e-05,
"loss": 5.9460095857319075,
"step": 3914
},
{
"epoch": 7.607350096711799,
"grad_norm": 0.7909878492355347,
"learning_rate": 1.5168292849919185e-05,
"loss": 5.919348465768914,
"step": 3933
},
{
"epoch": 7.644100580270793,
"grad_norm": 0.9378625750541687,
"learning_rate": 1.4730450819384622e-05,
"loss": 5.925768400493421,
"step": 3952
},
{
"epoch": 7.680851063829787,
"grad_norm": 0.7907970547676086,
"learning_rate": 1.4297926811180174e-05,
"loss": 5.891129744680304,
"step": 3971
},
{
"epoch": 7.717601547388782,
"grad_norm": 0.7062816619873047,
"learning_rate": 1.3870786042253225e-05,
"loss": 5.924658925909745,
"step": 3990
},
{
"epoch": 7.754352030947776,
"grad_norm": 0.7920564413070679,
"learning_rate": 1.34490929178542e-05,
"loss": 5.9171387521844165,
"step": 4009
},
{
"epoch": 7.79110251450677,
"grad_norm": 0.8155921697616577,
"learning_rate": 1.3032911021825366e-05,
"loss": 5.90830471641139,
"step": 4028
},
{
"epoch": 7.827852998065764,
"grad_norm": 0.7282528877258301,
"learning_rate": 1.2622303107013512e-05,
"loss": 5.909604925858347,
"step": 4047
},
{
"epoch": 7.8646034816247585,
"grad_norm": 0.9870123267173767,
"learning_rate": 1.2217331085807982e-05,
"loss": 5.930417111045436,
"step": 4066
},
{
"epoch": 7.901353965183753,
"grad_norm": 0.7872030138969421,
"learning_rate": 1.1818056020805302e-05,
"loss": 5.9119214509662825,
"step": 4085
},
{
"epoch": 7.938104448742747,
"grad_norm": 0.8964416980743408,
"learning_rate": 1.1424538115602073e-05,
"loss": 5.888004503752056,
"step": 4104
},
{
"epoch": 7.97485493230174,
"grad_norm": 0.7404572367668152,
"learning_rate": 1.1036836705717363e-05,
"loss": 5.905365391781456,
"step": 4123
},
{
"epoch": 8.011605415860735,
"grad_norm": 0.8973370790481567,
"learning_rate": 1.0655010249645891e-05,
"loss": 5.92196334035773,
"step": 4142
},
{
"epoch": 8.048355899419729,
"grad_norm": 0.8033064603805542,
"learning_rate": 1.0279116320043603e-05,
"loss": 5.9180245650442025,
"step": 4161
},
{
"epoch": 8.085106382978724,
"grad_norm": 0.8662456274032593,
"learning_rate": 9.909211595046663e-06,
"loss": 5.927662899619655,
"step": 4180
},
{
"epoch": 8.121856866537717,
"grad_norm": 0.881951630115509,
"learning_rate": 9.545351849725448e-06,
"loss": 5.897458126670436,
"step": 4199
},
{
"epoch": 8.158607350096712,
"grad_norm": 0.9466047286987305,
"learning_rate": 9.187591947674612e-06,
"loss": 5.879381681743421,
"step": 4218
},
{
"epoch": 8.195357833655706,
"grad_norm": 0.7660216093063354,
"learning_rate": 8.835985832740712e-06,
"loss": 5.892818651701274,
"step": 4237
},
{
"epoch": 8.232108317214701,
"grad_norm": 0.88617342710495,
"learning_rate": 8.490586520888321e-06,
"loss": 5.913487083033512,
"step": 4256
},
{
"epoch": 8.268858800773694,
"grad_norm": 0.8038977384567261,
"learning_rate": 8.15144609220625e-06,
"loss": 5.897986562628495,
"step": 4275
},
{
"epoch": 8.30560928433269,
"grad_norm": 0.8149951100349426,
"learning_rate": 7.818615683054737e-06,
"loss": 5.905342503597862,
"step": 4294
},
{
"epoch": 8.342359767891683,
"grad_norm": 0.8831230401992798,
"learning_rate": 7.492145478355023e-06,
"loss": 5.904563903808594,
"step": 4313
},
{
"epoch": 8.379110251450676,
"grad_norm": 0.7993362545967102,
"learning_rate": 7.172084704022364e-06,
"loss": 5.920640242727179,
"step": 4332
},
{
"epoch": 8.415860735009671,
"grad_norm": 0.8131271004676819,
"learning_rate": 6.8584816195436215e-06,
"loss": 5.89423972681949,
"step": 4351
},
{
"epoch": 8.452611218568665,
"grad_norm": 0.8914048075675964,
"learning_rate": 6.551383510700565e-06,
"loss": 5.895122327302632,
"step": 4370
},
{
"epoch": 8.48936170212766,
"grad_norm": 0.8953275084495544,
"learning_rate": 6.250836682440047e-06,
"loss": 5.9110107421875,
"step": 4389
},
{
"epoch": 8.526112185686653,
"grad_norm": 0.7585981488227844,
"learning_rate": 5.956886451892019e-06,
"loss": 5.89650485390111,
"step": 4408
},
{
"epoch": 8.562862669245648,
"grad_norm": 0.7543755173683167,
"learning_rate": 5.669577141536553e-06,
"loss": 5.915409288908306,
"step": 4427
},
{
"epoch": 8.599613152804642,
"grad_norm": 0.7309451103210449,
"learning_rate": 5.3889520725207366e-06,
"loss": 5.88703717683491,
"step": 4446
},
{
"epoch": 8.636363636363637,
"grad_norm": 0.9153041839599609,
"learning_rate": 5.115053558126653e-06,
"loss": 5.926216125488281,
"step": 4465
},
{
"epoch": 8.67311411992263,
"grad_norm": 0.9788889288902283,
"learning_rate": 4.847922897391266e-06,
"loss": 5.901534632632607,
"step": 4484
},
{
"epoch": 8.709864603481625,
"grad_norm": 0.7951791882514954,
"learning_rate": 4.587600368879308e-06,
"loss": 5.907471907766242,
"step": 4503
},
{
"epoch": 8.746615087040619,
"grad_norm": 0.8469536900520325,
"learning_rate": 4.334125224609903e-06,
"loss": 5.8754529451069075,
"step": 4522
},
{
"epoch": 8.783365570599614,
"grad_norm": 0.8822078108787537,
"learning_rate": 4.087535684138127e-06,
"loss": 5.920900445235403,
"step": 4541
},
{
"epoch": 8.820116054158607,
"grad_norm": 0.8397153615951538,
"learning_rate": 3.84786892879217e-06,
"loss": 5.893511320415296,
"step": 4560
},
{
"epoch": 8.856866537717602,
"grad_norm": 0.8214257955551147,
"learning_rate": 3.615161096066999e-06,
"loss": 5.91493706954153,
"step": 4579
},
{
"epoch": 8.893617021276595,
"grad_norm": 0.7739570140838623,
"learning_rate": 3.389447274175528e-06,
"loss": 5.91633405183491,
"step": 4598
},
{
"epoch": 8.93036750483559,
"grad_norm": 0.7047730684280396,
"learning_rate": 3.1707614967579122e-06,
"loss": 5.886983771073191,
"step": 4617
},
{
"epoch": 8.967117988394584,
"grad_norm": 0.9191731214523315,
"learning_rate": 2.959136737749868e-06,
"loss": 5.887207834344161,
"step": 4636
},
{
"epoch": 9.003868471953579,
"grad_norm": 0.7081539630889893,
"learning_rate": 2.7546049064108013e-06,
"loss": 5.916957252903988,
"step": 4655
},
{
"epoch": 9.040618955512572,
"grad_norm": 0.9107328653335571,
"learning_rate": 2.557196842512455e-06,
"loss": 5.887527867367393,
"step": 4674
},
{
"epoch": 9.077369439071568,
"grad_norm": 0.7918136715888977,
"learning_rate": 2.36694231168883e-06,
"loss": 5.882832175806949,
"step": 4693
},
{
"epoch": 9.114119922630561,
"grad_norm": 0.8014242649078369,
"learning_rate": 2.1838700009480293e-06,
"loss": 5.885363528603001,
"step": 4712
},
{
"epoch": 9.150870406189554,
"grad_norm": 0.8799481987953186,
"learning_rate": 2.008007514346788e-06,
"loss": 5.917357193796258,
"step": 4731
},
{
"epoch": 9.18762088974855,
"grad_norm": 0.8959386944770813,
"learning_rate": 1.8393813688282524e-06,
"loss": 5.908062181974712,
"step": 4750
},
{
"epoch": 9.224371373307543,
"grad_norm": 0.8766935467720032,
"learning_rate": 1.6780169902237241e-06,
"loss": 5.88951954088713,
"step": 4769
},
{
"epoch": 9.261121856866538,
"grad_norm": 0.7538824677467346,
"learning_rate": 1.5239387094188818e-06,
"loss": 5.902222482781661,
"step": 4788
},
{
"epoch": 9.297872340425531,
"grad_norm": 0.805749773979187,
"learning_rate": 1.3771697586850929e-06,
"loss": 5.913856104800575,
"step": 4807
},
{
"epoch": 9.334622823984526,
"grad_norm": 0.8077756762504578,
"learning_rate": 1.237732268176428e-06,
"loss": 5.920063621119449,
"step": 4826
},
{
"epoch": 9.37137330754352,
"grad_norm": 0.833474338054657,
"learning_rate": 1.1056472625928127e-06,
"loss": 5.908903021561472,
"step": 4845
},
{
"epoch": 9.408123791102515,
"grad_norm": 0.7950544357299805,
"learning_rate": 9.80934658009891e-07,
"loss": 5.9271697998046875,
"step": 4864
},
{
"epoch": 9.444874274661508,
"grad_norm": 0.8724213242530823,
"learning_rate": 8.63613258876017e-07,
"loss": 5.9004058837890625,
"step": 4883
},
{
"epoch": 9.481624758220503,
"grad_norm": 0.7703186273574829,
"learning_rate": 7.537007551768782e-07,
"loss": 5.912384836297286,
"step": 4902
},
{
"epoch": 9.518375241779497,
"grad_norm": 0.8170893788337708,
"learning_rate": 6.512137197681733e-07,
"loss": 5.891664203844573,
"step": 4921
},
{
"epoch": 9.555125725338492,
"grad_norm": 0.8812070488929749,
"learning_rate": 5.561676058767007e-07,
"loss": 5.869388781095806,
"step": 4940
},
{
"epoch": 9.591876208897485,
"grad_norm": 0.744022786617279,
"learning_rate": 4.6857674477032154e-07,
"loss": 5.892197859914679,
"step": 4959
},
{
"epoch": 9.62862669245648,
"grad_norm": 0.8482499122619629,
"learning_rate": 3.884543435970056e-07,
"loss": 5.940224095394737,
"step": 4978
},
{
"epoch": 9.665377176015474,
"grad_norm": 0.8590576648712158,
"learning_rate": 3.158124833934684e-07,
"loss": 5.907253466154399,
"step": 4997
},
{
"epoch": 9.702127659574469,
"grad_norm": 0.7035248875617981,
"learning_rate": 2.506621172635615e-07,
"loss": 5.891074732730263,
"step": 5016
},
{
"epoch": 9.738878143133462,
"grad_norm": 0.8274122476577759,
"learning_rate": 1.930130687267051e-07,
"loss": 5.884301436574836,
"step": 5035
},
{
"epoch": 9.775628626692457,
"grad_norm": 0.8533028960227966,
"learning_rate": 1.4287403023673373e-07,
"loss": 5.917849490517064,
"step": 5054
},
{
"epoch": 9.81237911025145,
"grad_norm": 0.9782279133796692,
"learning_rate": 1.0025256187117249e-07,
"loss": 5.881855211759868,
"step": 5073
},
{
"epoch": 9.849129593810446,
"grad_norm": 0.7845222353935242,
"learning_rate": 6.515509019133781e-08,
"loss": 5.9063768888774675,
"step": 5092
},
{
"epoch": 9.885880077369439,
"grad_norm": 0.7692595720291138,
"learning_rate": 3.758690727332925e-08,
"loss": 5.8881988525390625,
"step": 5111
},
{
"epoch": 9.922630560928432,
"grad_norm": 0.8751763701438904,
"learning_rate": 1.7552169910067807e-08,
"loss": 5.899998313502262,
"step": 5130
},
{
"epoch": 9.959381044487428,
"grad_norm": 0.815253496170044,
"learning_rate": 5.053898984519467e-09,
"loss": 5.934799595883018,
"step": 5149
},
{
"epoch": 9.996131528046421,
"grad_norm": 0.7818523645401001,
"learning_rate": 9.397901423180422e-11,
"loss": 5.88957415129009,
"step": 5168
}
],
"logging_steps": 19,
"max_steps": 5170,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.099568358948864e+16,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}