put-in-bowl / trainer_state.json
LegrandFrederic's picture
Upload trainer_state.json with huggingface_hub
f5665f3 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 10.0,
"eval_steps": 500,
"global_step": 2630,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.03802281368821293,
"grad_norm": 10.531242370605469,
"learning_rate": 7.5757575757575764e-06,
"loss": 1.5247,
"step": 10
},
{
"epoch": 0.07604562737642585,
"grad_norm": 5.058220863342285,
"learning_rate": 1.5151515151515153e-05,
"loss": 1.1238,
"step": 20
},
{
"epoch": 0.11406844106463879,
"grad_norm": 2.8926784992218018,
"learning_rate": 2.272727272727273e-05,
"loss": 0.4906,
"step": 30
},
{
"epoch": 0.1520912547528517,
"grad_norm": 2.6709837913513184,
"learning_rate": 3.0303030303030306e-05,
"loss": 0.3857,
"step": 40
},
{
"epoch": 0.19011406844106463,
"grad_norm": 0.935093879699707,
"learning_rate": 3.787878787878788e-05,
"loss": 0.2874,
"step": 50
},
{
"epoch": 0.22813688212927757,
"grad_norm": 1.3582843542099,
"learning_rate": 4.545454545454546e-05,
"loss": 0.2348,
"step": 60
},
{
"epoch": 0.2661596958174905,
"grad_norm": 0.8589798212051392,
"learning_rate": 5.303030303030303e-05,
"loss": 0.1967,
"step": 70
},
{
"epoch": 0.3041825095057034,
"grad_norm": 1.0151523351669312,
"learning_rate": 6.060606060606061e-05,
"loss": 0.1742,
"step": 80
},
{
"epoch": 0.34220532319391633,
"grad_norm": 1.0646296739578247,
"learning_rate": 6.818181818181818e-05,
"loss": 0.1598,
"step": 90
},
{
"epoch": 0.38022813688212925,
"grad_norm": 0.8589377403259277,
"learning_rate": 7.575757575757576e-05,
"loss": 0.1532,
"step": 100
},
{
"epoch": 0.41825095057034223,
"grad_norm": 0.825715184211731,
"learning_rate": 8.333333333333334e-05,
"loss": 0.1468,
"step": 110
},
{
"epoch": 0.45627376425855515,
"grad_norm": 0.9612506031990051,
"learning_rate": 9.090909090909092e-05,
"loss": 0.1401,
"step": 120
},
{
"epoch": 0.49429657794676807,
"grad_norm": 0.8467527627944946,
"learning_rate": 9.848484848484849e-05,
"loss": 0.1281,
"step": 130
},
{
"epoch": 0.532319391634981,
"grad_norm": 1.0839829444885254,
"learning_rate": 9.999746935517458e-05,
"loss": 0.1331,
"step": 140
},
{
"epoch": 0.5703422053231939,
"grad_norm": 1.5719529390335083,
"learning_rate": 9.99871890496051e-05,
"loss": 0.1237,
"step": 150
},
{
"epoch": 0.6083650190114068,
"grad_norm": 0.6265313625335693,
"learning_rate": 9.996900254271908e-05,
"loss": 0.1178,
"step": 160
},
{
"epoch": 0.6463878326996197,
"grad_norm": 0.9848031997680664,
"learning_rate": 9.99429127109772e-05,
"loss": 0.103,
"step": 170
},
{
"epoch": 0.6844106463878327,
"grad_norm": 1.1421473026275635,
"learning_rate": 9.9908923680866e-05,
"loss": 0.1079,
"step": 180
},
{
"epoch": 0.7224334600760456,
"grad_norm": 0.986842691898346,
"learning_rate": 9.986704082824516e-05,
"loss": 0.1017,
"step": 190
},
{
"epoch": 0.7604562737642585,
"grad_norm": 0.8210304975509644,
"learning_rate": 9.981727077749728e-05,
"loss": 0.0995,
"step": 200
},
{
"epoch": 0.7984790874524715,
"grad_norm": 1.1234544515609741,
"learning_rate": 9.975962140048007e-05,
"loss": 0.0937,
"step": 210
},
{
"epoch": 0.8365019011406845,
"grad_norm": 0.9322770833969116,
"learning_rate": 9.969410181528138e-05,
"loss": 0.0908,
"step": 220
},
{
"epoch": 0.8745247148288974,
"grad_norm": 0.960360050201416,
"learning_rate": 9.962072238477698e-05,
"loss": 0.0973,
"step": 230
},
{
"epoch": 0.9125475285171103,
"grad_norm": 0.7615262269973755,
"learning_rate": 9.953949471499157e-05,
"loss": 0.0934,
"step": 240
},
{
"epoch": 0.9505703422053232,
"grad_norm": 0.8841362595558167,
"learning_rate": 9.945043165326309e-05,
"loss": 0.088,
"step": 250
},
{
"epoch": 0.9885931558935361,
"grad_norm": 0.574943482875824,
"learning_rate": 9.935354728621069e-05,
"loss": 0.0858,
"step": 260
},
{
"epoch": 1.026615969581749,
"grad_norm": 0.584782063961029,
"learning_rate": 9.92488569375068e-05,
"loss": 0.0841,
"step": 270
},
{
"epoch": 1.064638783269962,
"grad_norm": 0.7055894732475281,
"learning_rate": 9.913637716545343e-05,
"loss": 0.0827,
"step": 280
},
{
"epoch": 1.102661596958175,
"grad_norm": 0.46008625626564026,
"learning_rate": 9.901612576036325e-05,
"loss": 0.0813,
"step": 290
},
{
"epoch": 1.1406844106463878,
"grad_norm": 0.592440664768219,
"learning_rate": 9.888812174174573e-05,
"loss": 0.0877,
"step": 300
},
{
"epoch": 1.1787072243346008,
"grad_norm": 0.5659314393997192,
"learning_rate": 9.875238535529905e-05,
"loss": 0.0855,
"step": 310
},
{
"epoch": 1.2167300380228137,
"grad_norm": 0.6649744510650635,
"learning_rate": 9.860893806970781e-05,
"loss": 0.0857,
"step": 320
},
{
"epoch": 1.2547528517110267,
"grad_norm": 0.4853374660015106,
"learning_rate": 9.845780257324755e-05,
"loss": 0.0796,
"step": 330
},
{
"epoch": 1.2927756653992395,
"grad_norm": 0.6634009480476379,
"learning_rate": 9.829900277019624e-05,
"loss": 0.0715,
"step": 340
},
{
"epoch": 1.3307984790874525,
"grad_norm": 0.4123174250125885,
"learning_rate": 9.81325637770534e-05,
"loss": 0.0719,
"step": 350
},
{
"epoch": 1.3688212927756653,
"grad_norm": 0.7861672043800354,
"learning_rate": 9.795851191856773e-05,
"loss": 0.0739,
"step": 360
},
{
"epoch": 1.4068441064638784,
"grad_norm": 0.39750707149505615,
"learning_rate": 9.777687472357324e-05,
"loss": 0.0698,
"step": 370
},
{
"epoch": 1.4448669201520912,
"grad_norm": 0.722964882850647,
"learning_rate": 9.758768092063536e-05,
"loss": 0.0699,
"step": 380
},
{
"epoch": 1.4828897338403042,
"grad_norm": 0.6673786044120789,
"learning_rate": 9.739096043350699e-05,
"loss": 0.075,
"step": 390
},
{
"epoch": 1.5209125475285172,
"grad_norm": 0.4799659252166748,
"learning_rate": 9.71867443763957e-05,
"loss": 0.0757,
"step": 400
},
{
"epoch": 1.55893536121673,
"grad_norm": 0.485879510641098,
"learning_rate": 9.697506504904246e-05,
"loss": 0.0631,
"step": 410
},
{
"epoch": 1.5969581749049429,
"grad_norm": 0.5157936811447144,
"learning_rate": 9.675595593161305e-05,
"loss": 0.0663,
"step": 420
},
{
"epoch": 1.6349809885931559,
"grad_norm": 0.3810887634754181,
"learning_rate": 9.65294516794027e-05,
"loss": 0.0704,
"step": 430
},
{
"epoch": 1.673003802281369,
"grad_norm": 0.5194770097732544,
"learning_rate": 9.629558811735476e-05,
"loss": 0.0771,
"step": 440
},
{
"epoch": 1.7110266159695817,
"grad_norm": 0.7283788323402405,
"learning_rate": 9.605440223439453e-05,
"loss": 0.0695,
"step": 450
},
{
"epoch": 1.7490494296577945,
"grad_norm": 0.6051397919654846,
"learning_rate": 9.580593217757894e-05,
"loss": 0.0733,
"step": 460
},
{
"epoch": 1.7870722433460076,
"grad_norm": 0.48710307478904724,
"learning_rate": 9.555021724606298e-05,
"loss": 0.0688,
"step": 470
},
{
"epoch": 1.8250950570342206,
"grad_norm": 0.44367125630378723,
"learning_rate": 9.5287297884884e-05,
"loss": 0.0632,
"step": 480
},
{
"epoch": 1.8631178707224336,
"grad_norm": 0.6892393231391907,
"learning_rate": 9.501721567856474e-05,
"loss": 0.0686,
"step": 490
},
{
"epoch": 1.9011406844106464,
"grad_norm": 0.7118890881538391,
"learning_rate": 9.474001334453612e-05,
"loss": 0.0648,
"step": 500
},
{
"epoch": 1.9391634980988592,
"grad_norm": 0.4286198318004608,
"learning_rate": 9.445573472638085e-05,
"loss": 0.059,
"step": 510
},
{
"epoch": 1.9771863117870723,
"grad_norm": 0.5651097297668457,
"learning_rate": 9.416442478689898e-05,
"loss": 0.0657,
"step": 520
},
{
"epoch": 2.0152091254752853,
"grad_norm": 0.41622453927993774,
"learning_rate": 9.38661296009963e-05,
"loss": 0.0619,
"step": 530
},
{
"epoch": 2.053231939163498,
"grad_norm": 0.40556129813194275,
"learning_rate": 9.3560896348397e-05,
"loss": 0.0598,
"step": 540
},
{
"epoch": 2.091254752851711,
"grad_norm": 0.3720885217189789,
"learning_rate": 9.324877330618143e-05,
"loss": 0.0594,
"step": 550
},
{
"epoch": 2.129277566539924,
"grad_norm": 0.3962866961956024,
"learning_rate": 9.292980984115047e-05,
"loss": 0.0638,
"step": 560
},
{
"epoch": 2.167300380228137,
"grad_norm": 0.5914015173912048,
"learning_rate": 9.260405640201737e-05,
"loss": 0.0618,
"step": 570
},
{
"epoch": 2.20532319391635,
"grad_norm": 0.3687211275100708,
"learning_rate": 9.227156451142862e-05,
"loss": 0.0565,
"step": 580
},
{
"epoch": 2.2433460076045626,
"grad_norm": 0.4098389148712158,
"learning_rate": 9.193238675781482e-05,
"loss": 0.0596,
"step": 590
},
{
"epoch": 2.2813688212927756,
"grad_norm": 0.43146541714668274,
"learning_rate": 9.15865767870731e-05,
"loss": 0.0595,
"step": 600
},
{
"epoch": 2.3193916349809887,
"grad_norm": 0.4519984722137451,
"learning_rate": 9.123418929408225e-05,
"loss": 0.0535,
"step": 610
},
{
"epoch": 2.3574144486692017,
"grad_norm": 0.41412219405174255,
"learning_rate": 9.087528001405194e-05,
"loss": 0.0569,
"step": 620
},
{
"epoch": 2.3954372623574143,
"grad_norm": 0.3652435541152954,
"learning_rate": 9.05099057137073e-05,
"loss": 0.0551,
"step": 630
},
{
"epoch": 2.4334600760456273,
"grad_norm": 0.49258697032928467,
"learning_rate": 9.013812418231056e-05,
"loss": 0.0553,
"step": 640
},
{
"epoch": 2.4714828897338403,
"grad_norm": 0.4152522683143616,
"learning_rate": 8.975999422252071e-05,
"loss": 0.0542,
"step": 650
},
{
"epoch": 2.5095057034220534,
"grad_norm": 0.6340664625167847,
"learning_rate": 8.937557564109308e-05,
"loss": 0.0555,
"step": 660
},
{
"epoch": 2.5475285171102664,
"grad_norm": 0.2747473120689392,
"learning_rate": 8.898492923942007e-05,
"loss": 0.0499,
"step": 670
},
{
"epoch": 2.585551330798479,
"grad_norm": 0.42373764514923096,
"learning_rate": 8.858811680391441e-05,
"loss": 0.0613,
"step": 680
},
{
"epoch": 2.623574144486692,
"grad_norm": 0.46734005212783813,
"learning_rate": 8.818520109623687e-05,
"loss": 0.0572,
"step": 690
},
{
"epoch": 2.661596958174905,
"grad_norm": 0.551827073097229,
"learning_rate": 8.777624584336943e-05,
"loss": 0.0663,
"step": 700
},
{
"epoch": 2.6996197718631176,
"grad_norm": 0.3376104235649109,
"learning_rate": 8.736131572753614e-05,
"loss": 0.0576,
"step": 710
},
{
"epoch": 2.7376425855513307,
"grad_norm": 0.4250059723854065,
"learning_rate": 8.694047637597245e-05,
"loss": 0.055,
"step": 720
},
{
"epoch": 2.7756653992395437,
"grad_norm": 0.4221857190132141,
"learning_rate": 8.65137943505455e-05,
"loss": 0.0556,
"step": 730
},
{
"epoch": 2.8136882129277567,
"grad_norm": 0.29908323287963867,
"learning_rate": 8.608133713722632e-05,
"loss": 0.0528,
"step": 740
},
{
"epoch": 2.8517110266159698,
"grad_norm": 0.5638275742530823,
"learning_rate": 8.56431731354159e-05,
"loss": 0.0581,
"step": 750
},
{
"epoch": 2.8897338403041823,
"grad_norm": 0.4976101815700531,
"learning_rate": 8.519937164712691e-05,
"loss": 0.0581,
"step": 760
},
{
"epoch": 2.9277566539923954,
"grad_norm": 0.5850965976715088,
"learning_rate": 8.475000286602252e-05,
"loss": 0.0555,
"step": 770
},
{
"epoch": 2.9657794676806084,
"grad_norm": 0.5351647734642029,
"learning_rate": 8.429513786631426e-05,
"loss": 0.0598,
"step": 780
},
{
"epoch": 3.0038022813688214,
"grad_norm": 0.3065849840641022,
"learning_rate": 8.383484859152062e-05,
"loss": 0.0549,
"step": 790
},
{
"epoch": 3.041825095057034,
"grad_norm": 0.47382888197898865,
"learning_rate": 8.336920784308813e-05,
"loss": 0.0528,
"step": 800
},
{
"epoch": 3.079847908745247,
"grad_norm": 0.41480347514152527,
"learning_rate": 8.289828926887672e-05,
"loss": 0.0528,
"step": 810
},
{
"epoch": 3.11787072243346,
"grad_norm": 0.4022481441497803,
"learning_rate": 8.24221673515113e-05,
"loss": 0.0523,
"step": 820
},
{
"epoch": 3.155893536121673,
"grad_norm": 0.3361530900001526,
"learning_rate": 8.19409173966012e-05,
"loss": 0.056,
"step": 830
},
{
"epoch": 3.1939163498098857,
"grad_norm": 0.6117600798606873,
"learning_rate": 8.145461552082947e-05,
"loss": 0.0601,
"step": 840
},
{
"epoch": 3.2319391634980987,
"grad_norm": 0.5725025534629822,
"learning_rate": 8.096333863991402e-05,
"loss": 0.0547,
"step": 850
},
{
"epoch": 3.2699619771863118,
"grad_norm": 0.41603338718414307,
"learning_rate": 8.046716445644219e-05,
"loss": 0.0487,
"step": 860
},
{
"epoch": 3.307984790874525,
"grad_norm": 0.358055979013443,
"learning_rate": 7.996617144758094e-05,
"loss": 0.0512,
"step": 870
},
{
"epoch": 3.346007604562738,
"grad_norm": 0.25234535336494446,
"learning_rate": 7.946043885266465e-05,
"loss": 0.0463,
"step": 880
},
{
"epoch": 3.3840304182509504,
"grad_norm": 0.28671276569366455,
"learning_rate": 7.895004666066213e-05,
"loss": 0.0445,
"step": 890
},
{
"epoch": 3.4220532319391634,
"grad_norm": 0.4477708339691162,
"learning_rate": 7.843507559752537e-05,
"loss": 0.048,
"step": 900
},
{
"epoch": 3.4600760456273765,
"grad_norm": 0.4737355411052704,
"learning_rate": 7.791560711342135e-05,
"loss": 0.0475,
"step": 910
},
{
"epoch": 3.4980988593155895,
"grad_norm": 0.4270668625831604,
"learning_rate": 7.739172336984967e-05,
"loss": 0.0493,
"step": 920
},
{
"epoch": 3.5361216730038025,
"grad_norm": 0.3703446388244629,
"learning_rate": 7.68635072266475e-05,
"loss": 0.0496,
"step": 930
},
{
"epoch": 3.574144486692015,
"grad_norm": 0.41201433539390564,
"learning_rate": 7.633104222888396e-05,
"loss": 0.0514,
"step": 940
},
{
"epoch": 3.612167300380228,
"grad_norm": 0.8504674434661865,
"learning_rate": 7.579441259364642e-05,
"loss": 0.0546,
"step": 950
},
{
"epoch": 3.650190114068441,
"grad_norm": 0.46347907185554504,
"learning_rate": 7.525370319672025e-05,
"loss": 0.0545,
"step": 960
},
{
"epoch": 3.6882129277566538,
"grad_norm": 0.42974913120269775,
"learning_rate": 7.470899955916445e-05,
"loss": 0.0497,
"step": 970
},
{
"epoch": 3.726235741444867,
"grad_norm": 0.3402682840824127,
"learning_rate": 7.416038783378539e-05,
"loss": 0.0529,
"step": 980
},
{
"epoch": 3.76425855513308,
"grad_norm": 0.4092146158218384,
"learning_rate": 7.360795479151027e-05,
"loss": 0.0455,
"step": 990
},
{
"epoch": 3.802281368821293,
"grad_norm": 0.35827386379241943,
"learning_rate": 7.305178780766332e-05,
"loss": 0.0488,
"step": 1000
},
{
"epoch": 3.840304182509506,
"grad_norm": 0.3754422664642334,
"learning_rate": 7.249197484814578e-05,
"loss": 0.055,
"step": 1010
},
{
"epoch": 3.8783269961977185,
"grad_norm": 0.2807624638080597,
"learning_rate": 7.192860445552316e-05,
"loss": 0.046,
"step": 1020
},
{
"epoch": 3.9163498098859315,
"grad_norm": 0.2535790503025055,
"learning_rate": 7.136176573502069e-05,
"loss": 0.042,
"step": 1030
},
{
"epoch": 3.9543726235741445,
"grad_norm": 0.3779744803905487,
"learning_rate": 7.079154834043013e-05,
"loss": 0.0465,
"step": 1040
},
{
"epoch": 3.9923954372623576,
"grad_norm": 0.4343276917934418,
"learning_rate": 7.021804245992972e-05,
"loss": 0.0416,
"step": 1050
},
{
"epoch": 4.030418250950571,
"grad_norm": 0.47514665126800537,
"learning_rate": 6.964133880181963e-05,
"loss": 0.0429,
"step": 1060
},
{
"epoch": 4.068441064638783,
"grad_norm": 0.36156734824180603,
"learning_rate": 6.906152858017501e-05,
"loss": 0.0431,
"step": 1070
},
{
"epoch": 4.106463878326996,
"grad_norm": 0.4116564393043518,
"learning_rate": 6.847870350041934e-05,
"loss": 0.0503,
"step": 1080
},
{
"epoch": 4.144486692015209,
"grad_norm": 0.3581191301345825,
"learning_rate": 6.789295574481969e-05,
"loss": 0.0424,
"step": 1090
},
{
"epoch": 4.182509505703422,
"grad_norm": 0.3734349012374878,
"learning_rate": 6.730437795790689e-05,
"loss": 0.0481,
"step": 1100
},
{
"epoch": 4.220532319391635,
"grad_norm": 0.30373069643974304,
"learning_rate": 6.671306323182239e-05,
"loss": 0.0403,
"step": 1110
},
{
"epoch": 4.258555133079848,
"grad_norm": 0.33292144536972046,
"learning_rate": 6.611910509159443e-05,
"loss": 0.0419,
"step": 1120
},
{
"epoch": 4.2965779467680605,
"grad_norm": 0.4385669231414795,
"learning_rate": 6.552259748034555e-05,
"loss": 0.049,
"step": 1130
},
{
"epoch": 4.334600760456274,
"grad_norm": 0.3568478524684906,
"learning_rate": 6.49236347444344e-05,
"loss": 0.0434,
"step": 1140
},
{
"epoch": 4.3726235741444865,
"grad_norm": 0.47680339217185974,
"learning_rate": 6.432231161853317e-05,
"loss": 0.0453,
"step": 1150
},
{
"epoch": 4.4106463878327,
"grad_norm": 0.4149623513221741,
"learning_rate": 6.371872321064414e-05,
"loss": 0.0494,
"step": 1160
},
{
"epoch": 4.448669201520913,
"grad_norm": 0.32099780440330505,
"learning_rate": 6.311296498705691e-05,
"loss": 0.0449,
"step": 1170
},
{
"epoch": 4.486692015209125,
"grad_norm": 0.2793867588043213,
"learning_rate": 6.250513275724896e-05,
"loss": 0.0444,
"step": 1180
},
{
"epoch": 4.524714828897339,
"grad_norm": 0.40786534547805786,
"learning_rate": 6.189532265873209e-05,
"loss": 0.0391,
"step": 1190
},
{
"epoch": 4.562737642585551,
"grad_norm": 0.3745206892490387,
"learning_rate": 6.128363114184676e-05,
"loss": 0.0384,
"step": 1200
},
{
"epoch": 4.600760456273765,
"grad_norm": 0.43438687920570374,
"learning_rate": 6.067015495450715e-05,
"loss": 0.0404,
"step": 1210
},
{
"epoch": 4.638783269961977,
"grad_norm": 0.4575098156929016,
"learning_rate": 6.005499112689905e-05,
"loss": 0.0475,
"step": 1220
},
{
"epoch": 4.67680608365019,
"grad_norm": 0.39668184518814087,
"learning_rate": 5.9438236956133076e-05,
"loss": 0.0428,
"step": 1230
},
{
"epoch": 4.714828897338403,
"grad_norm": 0.3513396382331848,
"learning_rate": 5.881998999085583e-05,
"loss": 0.0461,
"step": 1240
},
{
"epoch": 4.752851711026616,
"grad_norm": 0.5033044219017029,
"learning_rate": 5.820034801582101e-05,
"loss": 0.0447,
"step": 1250
},
{
"epoch": 4.7908745247148286,
"grad_norm": 0.43220236897468567,
"learning_rate": 5.7579409036423424e-05,
"loss": 0.0418,
"step": 1260
},
{
"epoch": 4.828897338403042,
"grad_norm": 0.3379021883010864,
"learning_rate": 5.6957271263198045e-05,
"loss": 0.0404,
"step": 1270
},
{
"epoch": 4.866920152091255,
"grad_norm": 0.3024837374687195,
"learning_rate": 5.633403309628645e-05,
"loss": 0.0396,
"step": 1280
},
{
"epoch": 4.904942965779467,
"grad_norm": 0.5330707430839539,
"learning_rate": 5.5709793109873584e-05,
"loss": 0.0443,
"step": 1290
},
{
"epoch": 4.942965779467681,
"grad_norm": 0.38940224051475525,
"learning_rate": 5.508465003659663e-05,
"loss": 0.0436,
"step": 1300
},
{
"epoch": 4.980988593155893,
"grad_norm": 0.4433404803276062,
"learning_rate": 5.445870275192918e-05,
"loss": 0.0345,
"step": 1310
},
{
"epoch": 5.019011406844107,
"grad_norm": 0.4395654797554016,
"learning_rate": 5.3832050258542474e-05,
"loss": 0.044,
"step": 1320
},
{
"epoch": 5.057034220532319,
"grad_norm": 0.39197906851768494,
"learning_rate": 5.320479167064687e-05,
"loss": 0.0431,
"step": 1330
},
{
"epoch": 5.095057034220532,
"grad_norm": 0.2870258390903473,
"learning_rate": 5.2577026198315315e-05,
"loss": 0.04,
"step": 1340
},
{
"epoch": 5.133079847908745,
"grad_norm": 0.28162604570388794,
"learning_rate": 5.1948853131791955e-05,
"loss": 0.0342,
"step": 1350
},
{
"epoch": 5.171102661596958,
"grad_norm": 0.4061802327632904,
"learning_rate": 5.132037182578785e-05,
"loss": 0.0381,
"step": 1360
},
{
"epoch": 5.2091254752851714,
"grad_norm": 0.41974517703056335,
"learning_rate": 5.069168168376663e-05,
"loss": 0.0388,
"step": 1370
},
{
"epoch": 5.247148288973384,
"grad_norm": 0.2790164053440094,
"learning_rate": 5.006288214222241e-05,
"loss": 0.0364,
"step": 1380
},
{
"epoch": 5.285171102661597,
"grad_norm": 0.23911532759666443,
"learning_rate": 4.943407265495238e-05,
"loss": 0.0383,
"step": 1390
},
{
"epoch": 5.32319391634981,
"grad_norm": 0.47133246064186096,
"learning_rate": 4.8805352677326854e-05,
"loss": 0.0424,
"step": 1400
},
{
"epoch": 5.361216730038023,
"grad_norm": 0.3483254909515381,
"learning_rate": 4.817682165055882e-05,
"loss": 0.033,
"step": 1410
},
{
"epoch": 5.399239543726236,
"grad_norm": 0.6127725839614868,
"learning_rate": 4.7548578985975984e-05,
"loss": 0.0358,
"step": 1420
},
{
"epoch": 5.437262357414449,
"grad_norm": 0.4287540912628174,
"learning_rate": 4.692072404929739e-05,
"loss": 0.0365,
"step": 1430
},
{
"epoch": 5.475285171102661,
"grad_norm": 0.41925495862960815,
"learning_rate": 4.629335614491725e-05,
"loss": 0.033,
"step": 1440
},
{
"epoch": 5.513307984790875,
"grad_norm": 0.4788515865802765,
"learning_rate": 4.566657450019849e-05,
"loss": 0.0342,
"step": 1450
},
{
"epoch": 5.551330798479087,
"grad_norm": 0.42283207178115845,
"learning_rate": 4.504047824977862e-05,
"loss": 0.0368,
"step": 1460
},
{
"epoch": 5.589353612167301,
"grad_norm": 0.4737829267978668,
"learning_rate": 4.441516641989002e-05,
"loss": 0.0358,
"step": 1470
},
{
"epoch": 5.6273764258555135,
"grad_norm": 0.24999397993087769,
"learning_rate": 4.3790737912697586e-05,
"loss": 0.0388,
"step": 1480
},
{
"epoch": 5.665399239543726,
"grad_norm": 0.3402468264102936,
"learning_rate": 4.316729149065579e-05,
"loss": 0.0388,
"step": 1490
},
{
"epoch": 5.7034220532319395,
"grad_norm": 0.32290977239608765,
"learning_rate": 4.254492576088816e-05,
"loss": 0.0376,
"step": 1500
},
{
"epoch": 5.741444866920152,
"grad_norm": 0.2910301685333252,
"learning_rate": 4.192373915959093e-05,
"loss": 0.0396,
"step": 1510
},
{
"epoch": 5.779467680608365,
"grad_norm": 0.2554992437362671,
"learning_rate": 4.130382993646407e-05,
"loss": 0.0374,
"step": 1520
},
{
"epoch": 5.817490494296578,
"grad_norm": 0.23737633228302002,
"learning_rate": 4.068529613917154e-05,
"loss": 0.0374,
"step": 1530
},
{
"epoch": 5.855513307984791,
"grad_norm": 0.36381277441978455,
"learning_rate": 4.006823559783374e-05,
"loss": 0.0345,
"step": 1540
},
{
"epoch": 5.893536121673003,
"grad_norm": 0.5745224952697754,
"learning_rate": 3.94527459095542e-05,
"loss": 0.0327,
"step": 1550
},
{
"epoch": 5.931558935361217,
"grad_norm": 0.2845105528831482,
"learning_rate": 3.883892442298318e-05,
"loss": 0.0348,
"step": 1560
},
{
"epoch": 5.969581749049429,
"grad_norm": 0.21019914746284485,
"learning_rate": 3.822686822292052e-05,
"loss": 0.0301,
"step": 1570
},
{
"epoch": 6.007604562737643,
"grad_norm": 0.46327492594718933,
"learning_rate": 3.761667411496037e-05,
"loss": 0.0356,
"step": 1580
},
{
"epoch": 6.0456273764258555,
"grad_norm": 0.4197891652584076,
"learning_rate": 3.70084386101799e-05,
"loss": 0.0349,
"step": 1590
},
{
"epoch": 6.083650190114068,
"grad_norm": 0.7197537422180176,
"learning_rate": 3.640225790987469e-05,
"loss": 0.0346,
"step": 1600
},
{
"epoch": 6.1216730038022815,
"grad_norm": 0.5813512802124023,
"learning_rate": 3.5798227890343056e-05,
"loss": 0.0357,
"step": 1610
},
{
"epoch": 6.159695817490494,
"grad_norm": 0.29964444041252136,
"learning_rate": 3.519644408772191e-05,
"loss": 0.0336,
"step": 1620
},
{
"epoch": 6.197718631178708,
"grad_norm": 0.3644579350948334,
"learning_rate": 3.459700168287624e-05,
"loss": 0.03,
"step": 1630
},
{
"epoch": 6.23574144486692,
"grad_norm": 0.31609275937080383,
"learning_rate": 3.399999548634494e-05,
"loss": 0.0357,
"step": 1640
},
{
"epoch": 6.273764258555133,
"grad_norm": 0.4375005066394806,
"learning_rate": 3.340551992334505e-05,
"loss": 0.0299,
"step": 1650
},
{
"epoch": 6.311787072243346,
"grad_norm": 0.23590423166751862,
"learning_rate": 3.2813669018837134e-05,
"loss": 0.0298,
"step": 1660
},
{
"epoch": 6.349809885931559,
"grad_norm": 0.21977555751800537,
"learning_rate": 3.2224536382653844e-05,
"loss": 0.0384,
"step": 1670
},
{
"epoch": 6.387832699619771,
"grad_norm": 0.506341278553009,
"learning_rate": 3.163821519469411e-05,
"loss": 0.0319,
"step": 1680
},
{
"epoch": 6.425855513307985,
"grad_norm": 0.42202290892601013,
"learning_rate": 3.1054798190185406e-05,
"loss": 0.0328,
"step": 1690
},
{
"epoch": 6.4638783269961975,
"grad_norm": 0.2345847338438034,
"learning_rate": 3.0474377645016356e-05,
"loss": 0.0303,
"step": 1700
},
{
"epoch": 6.501901140684411,
"grad_norm": 0.2911858558654785,
"learning_rate": 2.9897045361141955e-05,
"loss": 0.0316,
"step": 1710
},
{
"epoch": 6.5399239543726235,
"grad_norm": 0.3863067626953125,
"learning_rate": 2.9322892652063694e-05,
"loss": 0.0294,
"step": 1720
},
{
"epoch": 6.577946768060836,
"grad_norm": 0.32410648465156555,
"learning_rate": 2.8752010328387056e-05,
"loss": 0.028,
"step": 1730
},
{
"epoch": 6.61596958174905,
"grad_norm": 0.3410639762878418,
"learning_rate": 2.818448868345851e-05,
"loss": 0.0416,
"step": 1740
},
{
"epoch": 6.653992395437262,
"grad_norm": 0.27167966961860657,
"learning_rate": 2.7620417479084275e-05,
"loss": 0.0316,
"step": 1750
},
{
"epoch": 6.692015209125476,
"grad_norm": 0.31542375683784485,
"learning_rate": 2.7059885931333163e-05,
"loss": 0.033,
"step": 1760
},
{
"epoch": 6.730038022813688,
"grad_norm": 0.32455578446388245,
"learning_rate": 2.650298269642578e-05,
"loss": 0.0327,
"step": 1770
},
{
"epoch": 6.768060836501901,
"grad_norm": 0.26266616582870483,
"learning_rate": 2.59497958567123e-05,
"loss": 0.0281,
"step": 1780
},
{
"epoch": 6.806083650190114,
"grad_norm": 0.3133273720741272,
"learning_rate": 2.540041290674091e-05,
"loss": 0.0299,
"step": 1790
},
{
"epoch": 6.844106463878327,
"grad_norm": 0.23578499257564545,
"learning_rate": 2.4854920739419257e-05,
"loss": 0.0298,
"step": 1800
},
{
"epoch": 6.8821292775665395,
"grad_norm": 0.2830640375614166,
"learning_rate": 2.4313405632271085e-05,
"loss": 0.0308,
"step": 1810
},
{
"epoch": 6.920152091254753,
"grad_norm": 0.20085899531841278,
"learning_rate": 2.377595323379026e-05,
"loss": 0.0317,
"step": 1820
},
{
"epoch": 6.9581749049429655,
"grad_norm": 0.18935348093509674,
"learning_rate": 2.3242648549894136e-05,
"loss": 0.0274,
"step": 1830
},
{
"epoch": 6.996197718631179,
"grad_norm": 0.33719274401664734,
"learning_rate": 2.27135759304786e-05,
"loss": 0.0322,
"step": 1840
},
{
"epoch": 7.034220532319392,
"grad_norm": 0.4374171197414398,
"learning_rate": 2.2188819056077032e-05,
"loss": 0.0318,
"step": 1850
},
{
"epoch": 7.072243346007604,
"grad_norm": 0.3346361815929413,
"learning_rate": 2.1668460924624873e-05,
"loss": 0.0314,
"step": 1860
},
{
"epoch": 7.110266159695818,
"grad_norm": 0.328633189201355,
"learning_rate": 2.1152583838332423e-05,
"loss": 0.0335,
"step": 1870
},
{
"epoch": 7.14828897338403,
"grad_norm": 0.3334602415561676,
"learning_rate": 2.0641269390667435e-05,
"loss": 0.029,
"step": 1880
},
{
"epoch": 7.186311787072244,
"grad_norm": 0.2650391161441803,
"learning_rate": 2.0134598453449998e-05,
"loss": 0.0272,
"step": 1890
},
{
"epoch": 7.224334600760456,
"grad_norm": 0.24867604672908783,
"learning_rate": 1.963265116406147e-05,
"loss": 0.0365,
"step": 1900
},
{
"epoch": 7.262357414448669,
"grad_norm": 0.3868961036205292,
"learning_rate": 1.9135506912769534e-05,
"loss": 0.0271,
"step": 1910
},
{
"epoch": 7.300380228136882,
"grad_norm": 0.20158067345619202,
"learning_rate": 1.864324433017151e-05,
"loss": 0.0277,
"step": 1920
},
{
"epoch": 7.338403041825095,
"grad_norm": 0.32405081391334534,
"learning_rate": 1.815594127475781e-05,
"loss": 0.0297,
"step": 1930
},
{
"epoch": 7.3764258555133075,
"grad_norm": 0.25651466846466064,
"learning_rate": 1.7673674820597458e-05,
"loss": 0.0327,
"step": 1940
},
{
"epoch": 7.414448669201521,
"grad_norm": 0.44135525822639465,
"learning_rate": 1.7196521245147667e-05,
"loss": 0.0283,
"step": 1950
},
{
"epoch": 7.452471482889734,
"grad_norm": 0.22677309811115265,
"learning_rate": 1.6724556017189492e-05,
"loss": 0.0303,
"step": 1960
},
{
"epoch": 7.490494296577947,
"grad_norm": 0.27086296677589417,
"learning_rate": 1.6257853784891348e-05,
"loss": 0.03,
"step": 1970
},
{
"epoch": 7.52851711026616,
"grad_norm": 0.38143807649612427,
"learning_rate": 1.5796488364002317e-05,
"loss": 0.0287,
"step": 1980
},
{
"epoch": 7.566539923954372,
"grad_norm": 0.28342220187187195,
"learning_rate": 1.5340532726177108e-05,
"loss": 0.0273,
"step": 1990
},
{
"epoch": 7.604562737642586,
"grad_norm": 0.3746427297592163,
"learning_rate": 1.4890058987434435e-05,
"loss": 0.0318,
"step": 2000
},
{
"epoch": 7.642585551330798,
"grad_norm": 0.6539424061775208,
"learning_rate": 1.4445138396750995e-05,
"loss": 0.0265,
"step": 2010
},
{
"epoch": 7.680608365019012,
"grad_norm": 0.2562049925327301,
"learning_rate": 1.4005841324792296e-05,
"loss": 0.026,
"step": 2020
},
{
"epoch": 7.718631178707224,
"grad_norm": 0.2805837094783783,
"learning_rate": 1.3572237252782527e-05,
"loss": 0.0307,
"step": 2030
},
{
"epoch": 7.756653992395437,
"grad_norm": 0.42774826288223267,
"learning_rate": 1.3144394761515077e-05,
"loss": 0.0277,
"step": 2040
},
{
"epoch": 7.79467680608365,
"grad_norm": 0.26309627294540405,
"learning_rate": 1.272238152050551e-05,
"loss": 0.0352,
"step": 2050
},
{
"epoch": 7.832699619771863,
"grad_norm": 0.32600271701812744,
"learning_rate": 1.2306264277288654e-05,
"loss": 0.0291,
"step": 2060
},
{
"epoch": 7.870722433460076,
"grad_norm": 0.4313303828239441,
"learning_rate": 1.1896108846861497e-05,
"loss": 0.0301,
"step": 2070
},
{
"epoch": 7.908745247148289,
"grad_norm": 0.30629509687423706,
"learning_rate": 1.1491980101273553e-05,
"loss": 0.0276,
"step": 2080
},
{
"epoch": 7.946768060836502,
"grad_norm": 0.24339847266674042,
"learning_rate": 1.1093941959366476e-05,
"loss": 0.0306,
"step": 2090
},
{
"epoch": 7.984790874524715,
"grad_norm": 0.307231068611145,
"learning_rate": 1.070205737666427e-05,
"loss": 0.0292,
"step": 2100
},
{
"epoch": 8.022813688212928,
"grad_norm": 0.30481287837028503,
"learning_rate": 1.0316388335416066e-05,
"loss": 0.0268,
"step": 2110
},
{
"epoch": 8.060836501901141,
"grad_norm": 0.2899107336997986,
"learning_rate": 9.936995834792623e-06,
"loss": 0.0223,
"step": 2120
},
{
"epoch": 8.098859315589353,
"grad_norm": 0.2853327989578247,
"learning_rate": 9.56393988123851e-06,
"loss": 0.0288,
"step": 2130
},
{
"epoch": 8.136882129277566,
"grad_norm": 0.28443998098373413,
"learning_rate": 9.197279478981136e-06,
"loss": 0.0274,
"step": 2140
},
{
"epoch": 8.17490494296578,
"grad_norm": 0.24939852952957153,
"learning_rate": 8.83707262069845e-06,
"loss": 0.0255,
"step": 2150
},
{
"epoch": 8.212927756653992,
"grad_norm": 0.28874605894088745,
"learning_rate": 8.483376278346467e-06,
"loss": 0.0238,
"step": 2160
},
{
"epoch": 8.250950570342205,
"grad_norm": 0.3146332800388336,
"learning_rate": 8.13624639414841e-06,
"loss": 0.0255,
"step": 2170
},
{
"epoch": 8.288973384030419,
"grad_norm": 0.33142825961112976,
"learning_rate": 7.795737871746611e-06,
"loss": 0.0259,
"step": 2180
},
{
"epoch": 8.326996197718632,
"grad_norm": 0.22175247967243195,
"learning_rate": 7.46190456751863e-06,
"loss": 0.0274,
"step": 2190
},
{
"epoch": 8.365019011406844,
"grad_norm": 0.3944449722766876,
"learning_rate": 7.134799282059168e-06,
"loss": 0.0295,
"step": 2200
},
{
"epoch": 8.403041825095057,
"grad_norm": 0.25309473276138306,
"learning_rate": 6.814473751828871e-06,
"loss": 0.0278,
"step": 2210
},
{
"epoch": 8.44106463878327,
"grad_norm": 0.27433377504348755,
"learning_rate": 6.500978640971445e-06,
"loss": 0.0305,
"step": 2220
},
{
"epoch": 8.479087452471482,
"grad_norm": 0.2707982063293457,
"learning_rate": 6.194363533300351e-06,
"loss": 0.0302,
"step": 2230
},
{
"epoch": 8.517110266159696,
"grad_norm": 0.4330613315105438,
"learning_rate": 5.894676924456454e-06,
"loss": 0.0251,
"step": 2240
},
{
"epoch": 8.55513307984791,
"grad_norm": 0.32701870799064636,
"learning_rate": 5.601966214237714e-06,
"loss": 0.0278,
"step": 2250
},
{
"epoch": 8.593155893536121,
"grad_norm": 0.181648850440979,
"learning_rate": 5.316277699102223e-06,
"loss": 0.0278,
"step": 2260
},
{
"epoch": 8.631178707224334,
"grad_norm": 0.24093398451805115,
"learning_rate": 5.037656564845711e-06,
"loss": 0.0244,
"step": 2270
},
{
"epoch": 8.669201520912548,
"grad_norm": 0.2580304741859436,
"learning_rate": 4.76614687945483e-06,
"loss": 0.0272,
"step": 2280
},
{
"epoch": 8.70722433460076,
"grad_norm": 0.41510969400405884,
"learning_rate": 4.5017915861370605e-06,
"loss": 0.0294,
"step": 2290
},
{
"epoch": 8.745247148288973,
"grad_norm": 0.4753967225551605,
"learning_rate": 4.244632496528716e-06,
"loss": 0.0259,
"step": 2300
},
{
"epoch": 8.783269961977187,
"grad_norm": 0.23562125861644745,
"learning_rate": 3.9947102840817595e-06,
"loss": 0.0256,
"step": 2310
},
{
"epoch": 8.8212927756654,
"grad_norm": 0.3789341449737549,
"learning_rate": 3.75206447763074e-06,
"loss": 0.032,
"step": 2320
},
{
"epoch": 8.859315589353612,
"grad_norm": 0.36058199405670166,
"learning_rate": 3.516733455140692e-06,
"loss": 0.0284,
"step": 2330
},
{
"epoch": 8.897338403041825,
"grad_norm": 0.42400893568992615,
"learning_rate": 3.288754437637165e-06,
"loss": 0.0295,
"step": 2340
},
{
"epoch": 8.935361216730039,
"grad_norm": 0.33166080713272095,
"learning_rate": 3.068163483319103e-06,
"loss": 0.0291,
"step": 2350
},
{
"epoch": 8.97338403041825,
"grad_norm": 0.5982539653778076,
"learning_rate": 2.8549954818557877e-06,
"loss": 0.0266,
"step": 2360
},
{
"epoch": 9.011406844106464,
"grad_norm": 0.16449838876724243,
"learning_rate": 2.649284148868475e-06,
"loss": 0.026,
"step": 2370
},
{
"epoch": 9.049429657794677,
"grad_norm": 0.5010789036750793,
"learning_rate": 2.4510620205978e-06,
"loss": 0.0277,
"step": 2380
},
{
"epoch": 9.087452471482889,
"grad_norm": 0.24876657128334045,
"learning_rate": 2.2603604487576922e-06,
"loss": 0.0233,
"step": 2390
},
{
"epoch": 9.125475285171103,
"grad_norm": 0.2562822997570038,
"learning_rate": 2.0772095955766625e-06,
"loss": 0.0265,
"step": 2400
},
{
"epoch": 9.163498098859316,
"grad_norm": 0.29126426577568054,
"learning_rate": 1.9016384290271805e-06,
"loss": 0.0302,
"step": 2410
},
{
"epoch": 9.201520912547528,
"grad_norm": 0.36330851912498474,
"learning_rate": 1.7336747182440105e-06,
"loss": 0.0244,
"step": 2420
},
{
"epoch": 9.239543726235741,
"grad_norm": 0.3469178378582001,
"learning_rate": 1.573345029132034e-06,
"loss": 0.0308,
"step": 2430
},
{
"epoch": 9.277566539923955,
"grad_norm": 0.3596096634864807,
"learning_rate": 1.4206747201645777e-06,
"loss": 0.0245,
"step": 2440
},
{
"epoch": 9.315589353612168,
"grad_norm": 0.33200764656066895,
"learning_rate": 1.2756879383725162e-06,
"loss": 0.0276,
"step": 2450
},
{
"epoch": 9.35361216730038,
"grad_norm": 0.26584237813949585,
"learning_rate": 1.1384076155251223e-06,
"loss": 0.0304,
"step": 2460
},
{
"epoch": 9.391634980988593,
"grad_norm": 0.39666983485221863,
"learning_rate": 1.0088554645030169e-06,
"loss": 0.0265,
"step": 2470
},
{
"epoch": 9.429657794676807,
"grad_norm": 0.3055102825164795,
"learning_rate": 8.870519758640172e-07,
"loss": 0.0228,
"step": 2480
},
{
"epoch": 9.467680608365018,
"grad_norm": 0.2553930878639221,
"learning_rate": 7.730164146022179e-07,
"loss": 0.026,
"step": 2490
},
{
"epoch": 9.505703422053232,
"grad_norm": 0.29695436358451843,
"learning_rate": 6.667668171009889e-07,
"loss": 0.0277,
"step": 2500
},
{
"epoch": 9.543726235741445,
"grad_norm": 0.28649795055389404,
"learning_rate": 5.683199882802081e-07,
"loss": 0.0246,
"step": 2510
},
{
"epoch": 9.581749049429657,
"grad_norm": 0.26851099729537964,
"learning_rate": 4.776914989383763e-07,
"loss": 0.0264,
"step": 2520
},
{
"epoch": 9.61977186311787,
"grad_norm": 0.5427762269973755,
"learning_rate": 3.948956832898143e-07,
"loss": 0.0283,
"step": 2530
},
{
"epoch": 9.657794676806084,
"grad_norm": 0.30882003903388977,
"learning_rate": 3.1994563669754953e-07,
"loss": 0.024,
"step": 2540
},
{
"epoch": 9.695817490494296,
"grad_norm": 0.22906525433063507,
"learning_rate": 2.528532136020445e-07,
"loss": 0.0308,
"step": 2550
},
{
"epoch": 9.73384030418251,
"grad_norm": 0.2328147441148758,
"learning_rate": 1.9362902564628583e-07,
"loss": 0.0239,
"step": 2560
},
{
"epoch": 9.771863117870723,
"grad_norm": 0.3251258134841919,
"learning_rate": 1.42282439997371e-07,
"loss": 0.0296,
"step": 2570
},
{
"epoch": 9.809885931558936,
"grad_norm": 0.39408111572265625,
"learning_rate": 9.882157786497149e-08,
"loss": 0.0289,
"step": 2580
},
{
"epoch": 9.847908745247148,
"grad_norm": 0.17637042701244354,
"learning_rate": 6.32533132168378e-08,
"loss": 0.0217,
"step": 2590
},
{
"epoch": 9.885931558935361,
"grad_norm": 0.42764726281166077,
"learning_rate": 3.558327169158604e-08,
"loss": 0.0272,
"step": 2600
},
{
"epoch": 9.923954372623575,
"grad_norm": 0.27274248003959656,
"learning_rate": 1.581582970890394e-08,
"loss": 0.0248,
"step": 2610
},
{
"epoch": 9.961977186311787,
"grad_norm": 0.22425174713134766,
"learning_rate": 3.954113777382418e-09,
"loss": 0.0274,
"step": 2620
},
{
"epoch": 10.0,
"grad_norm": 0.19350482523441315,
"learning_rate": 0.0,
"loss": 0.0251,
"step": 2630
},
{
"epoch": 10.0,
"step": 2630,
"total_flos": 2.7759792547300608e+17,
"train_loss": 0.062387093805314923,
"train_runtime": 2695.4145,
"train_samples_per_second": 47.807,
"train_steps_per_second": 0.976
}
],
"logging_steps": 10,
"max_steps": 2630,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 10000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.7759792547300608e+17,
"train_batch_size": 49,
"trial_name": null,
"trial_params": null
}