Qwen2.5-1.5B-Open-R1-Distill / trainer_state.json
mgcyung's picture
Model save
c986187 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1465,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0034129692832764505,
"grad_norm": 2.50749013823737,
"learning_rate": 2.702702702702703e-06,
"loss": 0.87,
"num_tokens": 1933925.0,
"step": 5
},
{
"epoch": 0.006825938566552901,
"grad_norm": 1.6188623255616226,
"learning_rate": 6.081081081081082e-06,
"loss": 0.8109,
"num_tokens": 3759146.0,
"step": 10
},
{
"epoch": 0.010238907849829351,
"grad_norm": 1.0123465050636375,
"learning_rate": 9.45945945945946e-06,
"loss": 0.7772,
"num_tokens": 5644524.0,
"step": 15
},
{
"epoch": 0.013651877133105802,
"grad_norm": 0.7086005802414309,
"learning_rate": 1.2837837837837838e-05,
"loss": 0.7221,
"num_tokens": 7668808.0,
"step": 20
},
{
"epoch": 0.017064846416382253,
"grad_norm": 0.5775312610735582,
"learning_rate": 1.6216216216216218e-05,
"loss": 0.6862,
"num_tokens": 9513010.0,
"step": 25
},
{
"epoch": 0.020477815699658702,
"grad_norm": 0.5015160747913217,
"learning_rate": 1.9594594594594595e-05,
"loss": 0.648,
"num_tokens": 11368873.0,
"step": 30
},
{
"epoch": 0.023890784982935155,
"grad_norm": 0.4755691629562603,
"learning_rate": 2.2972972972972976e-05,
"loss": 0.6475,
"num_tokens": 13219424.0,
"step": 35
},
{
"epoch": 0.027303754266211604,
"grad_norm": 0.45995404550990876,
"learning_rate": 2.635135135135135e-05,
"loss": 0.6258,
"num_tokens": 15118063.0,
"step": 40
},
{
"epoch": 0.030716723549488054,
"grad_norm": 0.4312148613152685,
"learning_rate": 2.9729729729729733e-05,
"loss": 0.6282,
"num_tokens": 17042190.0,
"step": 45
},
{
"epoch": 0.034129692832764506,
"grad_norm": 0.451819013423788,
"learning_rate": 3.310810810810811e-05,
"loss": 0.6123,
"num_tokens": 18906839.0,
"step": 50
},
{
"epoch": 0.03754266211604096,
"grad_norm": 0.5006689200573762,
"learning_rate": 3.648648648648649e-05,
"loss": 0.5998,
"num_tokens": 20791105.0,
"step": 55
},
{
"epoch": 0.040955631399317405,
"grad_norm": 0.46031777713747535,
"learning_rate": 3.986486486486487e-05,
"loss": 0.6081,
"num_tokens": 22641755.0,
"step": 60
},
{
"epoch": 0.04436860068259386,
"grad_norm": 0.46899875801322866,
"learning_rate": 4.324324324324325e-05,
"loss": 0.6093,
"num_tokens": 24604914.0,
"step": 65
},
{
"epoch": 0.04778156996587031,
"grad_norm": 0.6110895849073993,
"learning_rate": 4.662162162162162e-05,
"loss": 0.6024,
"num_tokens": 26636629.0,
"step": 70
},
{
"epoch": 0.051194539249146756,
"grad_norm": 0.6405781895008703,
"learning_rate": 5e-05,
"loss": 0.5701,
"num_tokens": 28441260.0,
"step": 75
},
{
"epoch": 0.05460750853242321,
"grad_norm": 0.581519178723128,
"learning_rate": 4.999856539149453e-05,
"loss": 0.5927,
"num_tokens": 30417967.0,
"step": 80
},
{
"epoch": 0.05802047781569966,
"grad_norm": 0.44619446982283184,
"learning_rate": 4.99942617489205e-05,
"loss": 0.5881,
"num_tokens": 32351323.0,
"step": 85
},
{
"epoch": 0.06143344709897611,
"grad_norm": 0.5141883933664836,
"learning_rate": 4.998708962108165e-05,
"loss": 0.5876,
"num_tokens": 34231333.0,
"step": 90
},
{
"epoch": 0.06484641638225255,
"grad_norm": 0.5192244585208947,
"learning_rate": 4.9977049922573155e-05,
"loss": 0.584,
"num_tokens": 36068371.0,
"step": 95
},
{
"epoch": 0.06825938566552901,
"grad_norm": 0.46267202773061866,
"learning_rate": 4.9964143933664945e-05,
"loss": 0.5962,
"num_tokens": 37961424.0,
"step": 100
},
{
"epoch": 0.07167235494880546,
"grad_norm": 0.551678204091152,
"learning_rate": 4.99483733001385e-05,
"loss": 0.5953,
"num_tokens": 39846738.0,
"step": 105
},
{
"epoch": 0.07508532423208192,
"grad_norm": 0.5571076806342042,
"learning_rate": 4.9929740033076915e-05,
"loss": 0.5899,
"num_tokens": 41826860.0,
"step": 110
},
{
"epoch": 0.07849829351535836,
"grad_norm": 0.5411739076594996,
"learning_rate": 4.9908246508608506e-05,
"loss": 0.5848,
"num_tokens": 43675049.0,
"step": 115
},
{
"epoch": 0.08191126279863481,
"grad_norm": 0.4485161126231456,
"learning_rate": 4.9883895467603764e-05,
"loss": 0.5773,
"num_tokens": 45543403.0,
"step": 120
},
{
"epoch": 0.08532423208191127,
"grad_norm": 0.4648271403988684,
"learning_rate": 4.985669001532583e-05,
"loss": 0.5904,
"num_tokens": 47466657.0,
"step": 125
},
{
"epoch": 0.08873720136518772,
"grad_norm": 0.4638975404274955,
"learning_rate": 4.982663362103456e-05,
"loss": 0.5818,
"num_tokens": 49369486.0,
"step": 130
},
{
"epoch": 0.09215017064846416,
"grad_norm": 0.533501406227831,
"learning_rate": 4.979373011754409e-05,
"loss": 0.5773,
"num_tokens": 51193029.0,
"step": 135
},
{
"epoch": 0.09556313993174062,
"grad_norm": 0.5456590197317953,
"learning_rate": 4.975798370073404e-05,
"loss": 0.5767,
"num_tokens": 53010874.0,
"step": 140
},
{
"epoch": 0.09897610921501707,
"grad_norm": 0.47396184664647756,
"learning_rate": 4.971939892901451e-05,
"loss": 0.5696,
"num_tokens": 54919625.0,
"step": 145
},
{
"epoch": 0.10238907849829351,
"grad_norm": 0.4121898187313886,
"learning_rate": 4.9677980722744774e-05,
"loss": 0.5881,
"num_tokens": 56909889.0,
"step": 150
},
{
"epoch": 0.10580204778156997,
"grad_norm": 0.4504445666388642,
"learning_rate": 4.9633734363605805e-05,
"loss": 0.5585,
"num_tokens": 58759721.0,
"step": 155
},
{
"epoch": 0.10921501706484642,
"grad_norm": 0.43511695023636615,
"learning_rate": 4.9586665493926773e-05,
"loss": 0.5655,
"num_tokens": 60650570.0,
"step": 160
},
{
"epoch": 0.11262798634812286,
"grad_norm": 0.4529892002607441,
"learning_rate": 4.9536780115965514e-05,
"loss": 0.5499,
"num_tokens": 62662896.0,
"step": 165
},
{
"epoch": 0.11604095563139932,
"grad_norm": 0.4983273663357267,
"learning_rate": 4.9484084591143154e-05,
"loss": 0.562,
"num_tokens": 64564660.0,
"step": 170
},
{
"epoch": 0.11945392491467577,
"grad_norm": 0.4483226301683336,
"learning_rate": 4.942858563923285e-05,
"loss": 0.5669,
"num_tokens": 66485190.0,
"step": 175
},
{
"epoch": 0.12286689419795221,
"grad_norm": 0.49399353340120483,
"learning_rate": 4.9370290337502924e-05,
"loss": 0.5805,
"num_tokens": 68426882.0,
"step": 180
},
{
"epoch": 0.12627986348122866,
"grad_norm": 0.40977009590857616,
"learning_rate": 4.930920611981431e-05,
"loss": 0.5699,
"num_tokens": 70332714.0,
"step": 185
},
{
"epoch": 0.1296928327645051,
"grad_norm": 0.542445950494901,
"learning_rate": 4.9245340775672634e-05,
"loss": 0.5691,
"num_tokens": 72252819.0,
"step": 190
},
{
"epoch": 0.13310580204778158,
"grad_norm": 0.4622793694221624,
"learning_rate": 4.917870244923486e-05,
"loss": 0.5439,
"num_tokens": 74221358.0,
"step": 195
},
{
"epoch": 0.13651877133105803,
"grad_norm": 0.5759125508857202,
"learning_rate": 4.910929963827078e-05,
"loss": 0.5683,
"num_tokens": 76160914.0,
"step": 200
},
{
"epoch": 0.13993174061433447,
"grad_norm": 0.39779641276955163,
"learning_rate": 4.903714119307929e-05,
"loss": 0.5564,
"num_tokens": 78170639.0,
"step": 205
},
{
"epoch": 0.14334470989761092,
"grad_norm": 0.4401620614285602,
"learning_rate": 4.8962236315359896e-05,
"loss": 0.5603,
"num_tokens": 80152955.0,
"step": 210
},
{
"epoch": 0.14675767918088736,
"grad_norm": 0.4752332395853894,
"learning_rate": 4.8884594557039224e-05,
"loss": 0.5587,
"num_tokens": 81973764.0,
"step": 215
},
{
"epoch": 0.15017064846416384,
"grad_norm": 0.4004170259025375,
"learning_rate": 4.8804225819052994e-05,
"loss": 0.5664,
"num_tokens": 83901702.0,
"step": 220
},
{
"epoch": 0.15358361774744028,
"grad_norm": 0.5178389649751411,
"learning_rate": 4.872114035008345e-05,
"loss": 0.5416,
"num_tokens": 85675897.0,
"step": 225
},
{
"epoch": 0.15699658703071673,
"grad_norm": 0.5112947588191477,
"learning_rate": 4.863534874525241e-05,
"loss": 0.582,
"num_tokens": 87691018.0,
"step": 230
},
{
"epoch": 0.16040955631399317,
"grad_norm": 0.5199172012585387,
"learning_rate": 4.854686194477017e-05,
"loss": 0.5514,
"num_tokens": 89579835.0,
"step": 235
},
{
"epoch": 0.16382252559726962,
"grad_norm": 0.5079655450026499,
"learning_rate": 4.845569123254044e-05,
"loss": 0.5503,
"num_tokens": 91542428.0,
"step": 240
},
{
"epoch": 0.16723549488054607,
"grad_norm": 0.4384736028856376,
"learning_rate": 4.836184823472136e-05,
"loss": 0.5511,
"num_tokens": 93492833.0,
"step": 245
},
{
"epoch": 0.17064846416382254,
"grad_norm": 0.4196228017966155,
"learning_rate": 4.8265344918242974e-05,
"loss": 0.565,
"num_tokens": 95438170.0,
"step": 250
},
{
"epoch": 0.17406143344709898,
"grad_norm": 0.5223124856503392,
"learning_rate": 4.8166193589281154e-05,
"loss": 0.5565,
"num_tokens": 97303144.0,
"step": 255
},
{
"epoch": 0.17747440273037543,
"grad_norm": 0.5446858466087424,
"learning_rate": 4.806440689168833e-05,
"loss": 0.5504,
"num_tokens": 99383692.0,
"step": 260
},
{
"epoch": 0.18088737201365188,
"grad_norm": 0.5859886624013145,
"learning_rate": 4.795999780538113e-05,
"loss": 0.5555,
"num_tokens": 101357212.0,
"step": 265
},
{
"epoch": 0.18430034129692832,
"grad_norm": 0.5022040294550851,
"learning_rate": 4.785297964468519e-05,
"loss": 0.5494,
"num_tokens": 103223537.0,
"step": 270
},
{
"epoch": 0.18771331058020477,
"grad_norm": 0.43644085987482656,
"learning_rate": 4.7743366056637266e-05,
"loss": 0.5607,
"num_tokens": 105044617.0,
"step": 275
},
{
"epoch": 0.19112627986348124,
"grad_norm": 0.4171727799014969,
"learning_rate": 4.7631171019244974e-05,
"loss": 0.5691,
"num_tokens": 106901687.0,
"step": 280
},
{
"epoch": 0.1945392491467577,
"grad_norm": 0.41261747021762124,
"learning_rate": 4.7516408839704316e-05,
"loss": 0.5251,
"num_tokens": 108801293.0,
"step": 285
},
{
"epoch": 0.19795221843003413,
"grad_norm": 0.40680116615519046,
"learning_rate": 4.739909415257518e-05,
"loss": 0.5489,
"num_tokens": 110840758.0,
"step": 290
},
{
"epoch": 0.20136518771331058,
"grad_norm": 0.3913282639543854,
"learning_rate": 4.727924191791518e-05,
"loss": 0.552,
"num_tokens": 112713541.0,
"step": 295
},
{
"epoch": 0.20477815699658702,
"grad_norm": 0.4331430104517169,
"learning_rate": 4.7156867419371866e-05,
"loss": 0.5525,
"num_tokens": 114521504.0,
"step": 300
},
{
"epoch": 0.20819112627986347,
"grad_norm": 0.4119990823055153,
"learning_rate": 4.703198626223383e-05,
"loss": 0.5378,
"num_tokens": 116445027.0,
"step": 305
},
{
"epoch": 0.21160409556313994,
"grad_norm": 0.3887498249238803,
"learning_rate": 4.6904614371440654e-05,
"loss": 0.5374,
"num_tokens": 118445759.0,
"step": 310
},
{
"epoch": 0.2150170648464164,
"grad_norm": 0.4309808604161364,
"learning_rate": 4.677476798955213e-05,
"loss": 0.5441,
"num_tokens": 120303683.0,
"step": 315
},
{
"epoch": 0.21843003412969283,
"grad_norm": 0.42995753849368423,
"learning_rate": 4.664246367467707e-05,
"loss": 0.5485,
"num_tokens": 122311693.0,
"step": 320
},
{
"epoch": 0.22184300341296928,
"grad_norm": 0.3731200866529834,
"learning_rate": 4.6507718298361716e-05,
"loss": 0.5508,
"num_tokens": 124240526.0,
"step": 325
},
{
"epoch": 0.22525597269624573,
"grad_norm": 0.39306487790311306,
"learning_rate": 4.637054904343833e-05,
"loss": 0.5465,
"num_tokens": 126094524.0,
"step": 330
},
{
"epoch": 0.22866894197952217,
"grad_norm": 0.42607532993380426,
"learning_rate": 4.623097340183401e-05,
"loss": 0.5296,
"num_tokens": 128075174.0,
"step": 335
},
{
"epoch": 0.23208191126279865,
"grad_norm": 0.42217543753738046,
"learning_rate": 4.60890091723401e-05,
"loss": 0.5547,
"num_tokens": 129971056.0,
"step": 340
},
{
"epoch": 0.2354948805460751,
"grad_norm": 0.4373865514759956,
"learning_rate": 4.5944674458342473e-05,
"loss": 0.5535,
"num_tokens": 131847305.0,
"step": 345
},
{
"epoch": 0.23890784982935154,
"grad_norm": 0.4780257445539072,
"learning_rate": 4.579798766551298e-05,
"loss": 0.5603,
"num_tokens": 133765982.0,
"step": 350
},
{
"epoch": 0.24232081911262798,
"grad_norm": 0.39373360953599906,
"learning_rate": 4.564896749946234e-05,
"loss": 0.5332,
"num_tokens": 135541389.0,
"step": 355
},
{
"epoch": 0.24573378839590443,
"grad_norm": 0.48408274088956066,
"learning_rate": 4.549763296335481e-05,
"loss": 0.5417,
"num_tokens": 137522281.0,
"step": 360
},
{
"epoch": 0.24914675767918087,
"grad_norm": 0.38885053329494734,
"learning_rate": 4.534400335548484e-05,
"loss": 0.5302,
"num_tokens": 139412841.0,
"step": 365
},
{
"epoch": 0.2525597269624573,
"grad_norm": 0.4284079243560024,
"learning_rate": 4.518809826681616e-05,
"loss": 0.5338,
"num_tokens": 141196084.0,
"step": 370
},
{
"epoch": 0.25597269624573377,
"grad_norm": 0.40280789037829534,
"learning_rate": 4.5029937578483566e-05,
"loss": 0.5545,
"num_tokens": 143277757.0,
"step": 375
},
{
"epoch": 0.2593856655290102,
"grad_norm": 0.4139479242813224,
"learning_rate": 4.486954145925761e-05,
"loss": 0.5327,
"num_tokens": 145136704.0,
"step": 380
},
{
"epoch": 0.2627986348122867,
"grad_norm": 0.39471160328650434,
"learning_rate": 4.470693036297268e-05,
"loss": 0.5229,
"num_tokens": 147007396.0,
"step": 385
},
{
"epoch": 0.26621160409556316,
"grad_norm": 0.4071301905270605,
"learning_rate": 4.454212502591871e-05,
"loss": 0.5396,
"num_tokens": 148940122.0,
"step": 390
},
{
"epoch": 0.2696245733788396,
"grad_norm": 0.4385891426510694,
"learning_rate": 4.437514646419687e-05,
"loss": 0.5473,
"num_tokens": 150912454.0,
"step": 395
},
{
"epoch": 0.27303754266211605,
"grad_norm": 0.4067463607989996,
"learning_rate": 4.4206015971039625e-05,
"loss": 0.5346,
"num_tokens": 152809678.0,
"step": 400
},
{
"epoch": 0.2764505119453925,
"grad_norm": 0.4680396603333508,
"learning_rate": 4.403475511409531e-05,
"loss": 0.5192,
"num_tokens": 154713852.0,
"step": 405
},
{
"epoch": 0.27986348122866894,
"grad_norm": 0.39429793945119174,
"learning_rate": 4.386138573267788e-05,
"loss": 0.5495,
"num_tokens": 156683573.0,
"step": 410
},
{
"epoch": 0.2832764505119454,
"grad_norm": 0.4016748463060468,
"learning_rate": 4.368592993498194e-05,
"loss": 0.525,
"num_tokens": 158515202.0,
"step": 415
},
{
"epoch": 0.28668941979522183,
"grad_norm": 0.48270745046669,
"learning_rate": 4.350841009526346e-05,
"loss": 0.5464,
"num_tokens": 160433326.0,
"step": 420
},
{
"epoch": 0.2901023890784983,
"grad_norm": 0.43091858228129815,
"learning_rate": 4.33288488509866e-05,
"loss": 0.5305,
"num_tokens": 162331530.0,
"step": 425
},
{
"epoch": 0.2935153583617747,
"grad_norm": 0.45890742725656564,
"learning_rate": 4.3147269099937014e-05,
"loss": 0.5472,
"num_tokens": 164374316.0,
"step": 430
},
{
"epoch": 0.29692832764505117,
"grad_norm": 0.4169343021344466,
"learning_rate": 4.2963693997301814e-05,
"loss": 0.5139,
"num_tokens": 166249544.0,
"step": 435
},
{
"epoch": 0.3003412969283277,
"grad_norm": 0.38041559154226434,
"learning_rate": 4.2778146952716884e-05,
"loss": 0.5236,
"num_tokens": 168223299.0,
"step": 440
},
{
"epoch": 0.3037542662116041,
"grad_norm": 0.376843828158071,
"learning_rate": 4.259065162728163e-05,
"loss": 0.5424,
"num_tokens": 170189422.0,
"step": 445
},
{
"epoch": 0.30716723549488056,
"grad_norm": 0.39092535624386915,
"learning_rate": 4.240123193054172e-05,
"loss": 0.5204,
"num_tokens": 172132000.0,
"step": 450
},
{
"epoch": 0.310580204778157,
"grad_norm": 0.438997522031723,
"learning_rate": 4.2209912017440066e-05,
"loss": 0.5322,
"num_tokens": 174083393.0,
"step": 455
},
{
"epoch": 0.31399317406143346,
"grad_norm": 0.3770965790416271,
"learning_rate": 4.2016716285236655e-05,
"loss": 0.5213,
"num_tokens": 176086331.0,
"step": 460
},
{
"epoch": 0.3174061433447099,
"grad_norm": 0.44000928731950867,
"learning_rate": 4.182166937039731e-05,
"loss": 0.5061,
"num_tokens": 178019887.0,
"step": 465
},
{
"epoch": 0.32081911262798635,
"grad_norm": 0.4170515747782146,
"learning_rate": 4.1624796145452075e-05,
"loss": 0.5327,
"num_tokens": 179917640.0,
"step": 470
},
{
"epoch": 0.3242320819112628,
"grad_norm": 0.3715898097749977,
"learning_rate": 4.1426121715823455e-05,
"loss": 0.5366,
"num_tokens": 181861425.0,
"step": 475
},
{
"epoch": 0.32764505119453924,
"grad_norm": 0.3953828116703113,
"learning_rate": 4.1225671416624886e-05,
"loss": 0.5265,
"num_tokens": 183746129.0,
"step": 480
},
{
"epoch": 0.3310580204778157,
"grad_norm": 0.43082731831273297,
"learning_rate": 4.1023470809430115e-05,
"loss": 0.543,
"num_tokens": 185690559.0,
"step": 485
},
{
"epoch": 0.33447098976109213,
"grad_norm": 0.3905669096501847,
"learning_rate": 4.081954567901341e-05,
"loss": 0.5364,
"num_tokens": 187699370.0,
"step": 490
},
{
"epoch": 0.3378839590443686,
"grad_norm": 0.38820279646990474,
"learning_rate": 4.0613922030061593e-05,
"loss": 0.5214,
"num_tokens": 189580008.0,
"step": 495
},
{
"epoch": 0.3412969283276451,
"grad_norm": 0.42021271993551723,
"learning_rate": 4.040662608385783e-05,
"loss": 0.5236,
"num_tokens": 191512142.0,
"step": 500
},
{
"epoch": 0.3447098976109215,
"grad_norm": 0.3994553277975334,
"learning_rate": 4.019768427493792e-05,
"loss": 0.5196,
"num_tokens": 193419599.0,
"step": 505
},
{
"epoch": 0.34812286689419797,
"grad_norm": 0.3666548627034848,
"learning_rate": 3.99871232477193e-05,
"loss": 0.5278,
"num_tokens": 195367749.0,
"step": 510
},
{
"epoch": 0.3515358361774744,
"grad_norm": 0.3822859965000655,
"learning_rate": 3.9774969853103334e-05,
"loss": 0.5079,
"num_tokens": 197256984.0,
"step": 515
},
{
"epoch": 0.35494880546075086,
"grad_norm": 0.3731101615447913,
"learning_rate": 3.9561251145051325e-05,
"loss": 0.5172,
"num_tokens": 199215371.0,
"step": 520
},
{
"epoch": 0.3583617747440273,
"grad_norm": 0.4024382355228473,
"learning_rate": 3.934599437713449e-05,
"loss": 0.5435,
"num_tokens": 201102127.0,
"step": 525
},
{
"epoch": 0.36177474402730375,
"grad_norm": 0.42416341608468616,
"learning_rate": 3.912922699905861e-05,
"loss": 0.5203,
"num_tokens": 202903048.0,
"step": 530
},
{
"epoch": 0.3651877133105802,
"grad_norm": 0.4507790300235238,
"learning_rate": 3.8910976653163655e-05,
"loss": 0.5212,
"num_tokens": 204769389.0,
"step": 535
},
{
"epoch": 0.36860068259385664,
"grad_norm": 0.40038853454858814,
"learning_rate": 3.869127117089871e-05,
"loss": 0.5207,
"num_tokens": 206761213.0,
"step": 540
},
{
"epoch": 0.3720136518771331,
"grad_norm": 0.38456031528474893,
"learning_rate": 3.847013856927298e-05,
"loss": 0.5171,
"num_tokens": 208772401.0,
"step": 545
},
{
"epoch": 0.37542662116040953,
"grad_norm": 0.4050638677352149,
"learning_rate": 3.824760704728303e-05,
"loss": 0.5167,
"num_tokens": 210585632.0,
"step": 550
},
{
"epoch": 0.378839590443686,
"grad_norm": 0.41821430327774767,
"learning_rate": 3.802370498231677e-05,
"loss": 0.5265,
"num_tokens": 212446145.0,
"step": 555
},
{
"epoch": 0.3822525597269625,
"grad_norm": 0.3819310209614732,
"learning_rate": 3.7798460926534814e-05,
"loss": 0.5186,
"num_tokens": 214261738.0,
"step": 560
},
{
"epoch": 0.3856655290102389,
"grad_norm": 0.3755106192945339,
"learning_rate": 3.757190360322943e-05,
"loss": 0.5329,
"num_tokens": 216231144.0,
"step": 565
},
{
"epoch": 0.3890784982935154,
"grad_norm": 0.38208455166534944,
"learning_rate": 3.7344061903161796e-05,
"loss": 0.5223,
"num_tokens": 218144024.0,
"step": 570
},
{
"epoch": 0.3924914675767918,
"grad_norm": 0.3539646707114066,
"learning_rate": 3.711496488087775e-05,
"loss": 0.5212,
"num_tokens": 220108684.0,
"step": 575
},
{
"epoch": 0.39590443686006827,
"grad_norm": 0.3589926928321897,
"learning_rate": 3.6884641751002784e-05,
"loss": 0.5236,
"num_tokens": 222057295.0,
"step": 580
},
{
"epoch": 0.3993174061433447,
"grad_norm": 0.3562958553389008,
"learning_rate": 3.6653121884516555e-05,
"loss": 0.5092,
"num_tokens": 223892351.0,
"step": 585
},
{
"epoch": 0.40273037542662116,
"grad_norm": 0.35407235016262395,
"learning_rate": 3.6420434805007476e-05,
"loss": 0.5179,
"num_tokens": 225828573.0,
"step": 590
},
{
"epoch": 0.4061433447098976,
"grad_norm": 0.34046241079182105,
"learning_rate": 3.618661018490784e-05,
"loss": 0.5134,
"num_tokens": 227748575.0,
"step": 595
},
{
"epoch": 0.40955631399317405,
"grad_norm": 0.3329107263813499,
"learning_rate": 3.595167784170998e-05,
"loss": 0.512,
"num_tokens": 229710487.0,
"step": 600
},
{
"epoch": 0.4129692832764505,
"grad_norm": 0.4437872930329917,
"learning_rate": 3.571566773416394e-05,
"loss": 0.5192,
"num_tokens": 231678126.0,
"step": 605
},
{
"epoch": 0.41638225255972694,
"grad_norm": 0.37300167458989936,
"learning_rate": 3.5478609958457056e-05,
"loss": 0.5368,
"num_tokens": 233617525.0,
"step": 610
},
{
"epoch": 0.4197952218430034,
"grad_norm": 0.35893741375216137,
"learning_rate": 3.524053474437611e-05,
"loss": 0.5231,
"num_tokens": 235464027.0,
"step": 615
},
{
"epoch": 0.4232081911262799,
"grad_norm": 0.35587624591010675,
"learning_rate": 3.500147245145238e-05,
"loss": 0.528,
"num_tokens": 237394471.0,
"step": 620
},
{
"epoch": 0.42662116040955633,
"grad_norm": 0.3708746214599896,
"learning_rate": 3.476145356509023e-05,
"loss": 0.5181,
"num_tokens": 239271145.0,
"step": 625
},
{
"epoch": 0.4300341296928328,
"grad_norm": 0.35856346963397057,
"learning_rate": 3.452050869267951e-05,
"loss": 0.5225,
"num_tokens": 241114261.0,
"step": 630
},
{
"epoch": 0.4334470989761092,
"grad_norm": 0.3136928173032959,
"learning_rate": 3.427866855969249e-05,
"loss": 0.5327,
"num_tokens": 242992742.0,
"step": 635
},
{
"epoch": 0.43686006825938567,
"grad_norm": 0.37325632854349405,
"learning_rate": 3.4035964005765786e-05,
"loss": 0.5081,
"num_tokens": 244967987.0,
"step": 640
},
{
"epoch": 0.4402730375426621,
"grad_norm": 0.4618580338006964,
"learning_rate": 3.379242598076761e-05,
"loss": 0.5427,
"num_tokens": 246976387.0,
"step": 645
},
{
"epoch": 0.44368600682593856,
"grad_norm": 0.3586335818960914,
"learning_rate": 3.354808554085103e-05,
"loss": 0.5107,
"num_tokens": 248751095.0,
"step": 650
},
{
"epoch": 0.447098976109215,
"grad_norm": 0.40711927274059334,
"learning_rate": 3.330297384449369e-05,
"loss": 0.5198,
"num_tokens": 250671856.0,
"step": 655
},
{
"epoch": 0.45051194539249145,
"grad_norm": 0.3839556864854868,
"learning_rate": 3.3057122148524394e-05,
"loss": 0.5082,
"num_tokens": 252607265.0,
"step": 660
},
{
"epoch": 0.4539249146757679,
"grad_norm": 0.3965949942432131,
"learning_rate": 3.2810561804137316e-05,
"loss": 0.5213,
"num_tokens": 254497259.0,
"step": 665
},
{
"epoch": 0.45733788395904434,
"grad_norm": 0.3790046349369836,
"learning_rate": 3.2563324252893964e-05,
"loss": 0.5202,
"num_tokens": 256417909.0,
"step": 670
},
{
"epoch": 0.46075085324232085,
"grad_norm": 0.35112804548112325,
"learning_rate": 3.2315441022713796e-05,
"loss": 0.5024,
"num_tokens": 258396985.0,
"step": 675
},
{
"epoch": 0.4641638225255973,
"grad_norm": 0.3498528091945533,
"learning_rate": 3.206694372385375e-05,
"loss": 0.524,
"num_tokens": 260456429.0,
"step": 680
},
{
"epoch": 0.46757679180887374,
"grad_norm": 0.33304357758211456,
"learning_rate": 3.1817864044877236e-05,
"loss": 0.5041,
"num_tokens": 262384995.0,
"step": 685
},
{
"epoch": 0.4709897610921502,
"grad_norm": 0.33178642606056385,
"learning_rate": 3.156823374861324e-05,
"loss": 0.5078,
"num_tokens": 264287905.0,
"step": 690
},
{
"epoch": 0.47440273037542663,
"grad_norm": 0.3667901940014392,
"learning_rate": 3.131808466810586e-05,
"loss": 0.5146,
"num_tokens": 266198448.0,
"step": 695
},
{
"epoch": 0.4778156996587031,
"grad_norm": 0.37162692372701855,
"learning_rate": 3.1067448702554976e-05,
"loss": 0.5143,
"num_tokens": 268098790.0,
"step": 700
},
{
"epoch": 0.4812286689419795,
"grad_norm": 0.3307745019851869,
"learning_rate": 3.08163578132484e-05,
"loss": 0.5257,
"num_tokens": 270033989.0,
"step": 705
},
{
"epoch": 0.48464163822525597,
"grad_norm": 0.34029736690615986,
"learning_rate": 3.056484401948618e-05,
"loss": 0.522,
"num_tokens": 271974065.0,
"step": 710
},
{
"epoch": 0.4880546075085324,
"grad_norm": 0.39322097601658623,
"learning_rate": 3.0312939394497442e-05,
"loss": 0.5317,
"num_tokens": 273982161.0,
"step": 715
},
{
"epoch": 0.49146757679180886,
"grad_norm": 0.41826615673656964,
"learning_rate": 3.0060676061350423e-05,
"loss": 0.5239,
"num_tokens": 275852045.0,
"step": 720
},
{
"epoch": 0.4948805460750853,
"grad_norm": 0.3374695252050787,
"learning_rate": 2.9808086188856084e-05,
"loss": 0.4941,
"num_tokens": 277687831.0,
"step": 725
},
{
"epoch": 0.49829351535836175,
"grad_norm": 0.3304039048198173,
"learning_rate": 2.9555201987465932e-05,
"loss": 0.5171,
"num_tokens": 279504484.0,
"step": 730
},
{
"epoch": 0.5017064846416383,
"grad_norm": 0.34845109052739065,
"learning_rate": 2.9302055705164516e-05,
"loss": 0.5019,
"num_tokens": 281456293.0,
"step": 735
},
{
"epoch": 0.5051194539249146,
"grad_norm": 0.33157077662528966,
"learning_rate": 2.9048679623357155e-05,
"loss": 0.5132,
"num_tokens": 283461546.0,
"step": 740
},
{
"epoch": 0.5085324232081911,
"grad_norm": 0.38033162519773295,
"learning_rate": 2.8795106052753356e-05,
"loss": 0.5087,
"num_tokens": 285433304.0,
"step": 745
},
{
"epoch": 0.5119453924914675,
"grad_norm": 0.34335625928994096,
"learning_rate": 2.854136732924656e-05,
"loss": 0.5219,
"num_tokens": 287371918.0,
"step": 750
},
{
"epoch": 0.515358361774744,
"grad_norm": 0.40565497458200667,
"learning_rate": 2.828749580979061e-05,
"loss": 0.4941,
"num_tokens": 289259239.0,
"step": 755
},
{
"epoch": 0.5187713310580204,
"grad_norm": 0.3720298836144228,
"learning_rate": 2.8033523868273625e-05,
"loss": 0.5106,
"num_tokens": 291057738.0,
"step": 760
},
{
"epoch": 0.5221843003412969,
"grad_norm": 0.31625729147636455,
"learning_rate": 2.777948389138959e-05,
"loss": 0.5157,
"num_tokens": 293034663.0,
"step": 765
},
{
"epoch": 0.5255972696245734,
"grad_norm": 0.3457494085179469,
"learning_rate": 2.7525408274508424e-05,
"loss": 0.5062,
"num_tokens": 294881963.0,
"step": 770
},
{
"epoch": 0.5290102389078498,
"grad_norm": 0.3216688679579312,
"learning_rate": 2.727132941754488e-05,
"loss": 0.5144,
"num_tokens": 296794020.0,
"step": 775
},
{
"epoch": 0.5324232081911263,
"grad_norm": 0.3909025772373753,
"learning_rate": 2.70172797208269e-05,
"loss": 0.5125,
"num_tokens": 298677895.0,
"step": 780
},
{
"epoch": 0.5358361774744027,
"grad_norm": 0.3250766580673669,
"learning_rate": 2.676329158096388e-05,
"loss": 0.5031,
"num_tokens": 300610709.0,
"step": 785
},
{
"epoch": 0.5392491467576792,
"grad_norm": 0.338726222176612,
"learning_rate": 2.650939738671543e-05,
"loss": 0.4924,
"num_tokens": 302387985.0,
"step": 790
},
{
"epoch": 0.5426621160409556,
"grad_norm": 0.3383596322502192,
"learning_rate": 2.6255629514861156e-05,
"loss": 0.523,
"num_tokens": 304293467.0,
"step": 795
},
{
"epoch": 0.5460750853242321,
"grad_norm": 0.39543644326281013,
"learning_rate": 2.6002020326071958e-05,
"loss": 0.5172,
"num_tokens": 306130593.0,
"step": 800
},
{
"epoch": 0.5494880546075085,
"grad_norm": 0.32006279775540386,
"learning_rate": 2.5748602160783308e-05,
"loss": 0.5222,
"num_tokens": 308025370.0,
"step": 805
},
{
"epoch": 0.552901023890785,
"grad_norm": 0.4085796376264559,
"learning_rate": 2.5495407335071247e-05,
"loss": 0.512,
"num_tokens": 309952008.0,
"step": 810
},
{
"epoch": 0.5563139931740614,
"grad_norm": 0.3661536040322409,
"learning_rate": 2.5242468136531354e-05,
"loss": 0.4993,
"num_tokens": 311856787.0,
"step": 815
},
{
"epoch": 0.5597269624573379,
"grad_norm": 0.3752261876585624,
"learning_rate": 2.4989816820161497e-05,
"loss": 0.5054,
"num_tokens": 313725489.0,
"step": 820
},
{
"epoch": 0.5631399317406144,
"grad_norm": 0.36687049267487,
"learning_rate": 2.4737485604248534e-05,
"loss": 0.5039,
"num_tokens": 315690477.0,
"step": 825
},
{
"epoch": 0.5665529010238908,
"grad_norm": 0.3524144778865341,
"learning_rate": 2.4485506666259895e-05,
"loss": 0.5111,
"num_tokens": 317525148.0,
"step": 830
},
{
"epoch": 0.5699658703071673,
"grad_norm": 0.33122901402654253,
"learning_rate": 2.4233912138740244e-05,
"loss": 0.5053,
"num_tokens": 319505322.0,
"step": 835
},
{
"epoch": 0.5733788395904437,
"grad_norm": 0.3303079569668191,
"learning_rate": 2.3982734105213967e-05,
"loss": 0.5087,
"num_tokens": 321410101.0,
"step": 840
},
{
"epoch": 0.5767918088737202,
"grad_norm": 0.33915287853036713,
"learning_rate": 2.373200459609379e-05,
"loss": 0.5008,
"num_tokens": 323302156.0,
"step": 845
},
{
"epoch": 0.5802047781569966,
"grad_norm": 0.3831127722312039,
"learning_rate": 2.348175558459628e-05,
"loss": 0.5038,
"num_tokens": 325102278.0,
"step": 850
},
{
"epoch": 0.5836177474402731,
"grad_norm": 0.37741976583347536,
"learning_rate": 2.323201898266458e-05,
"loss": 0.5082,
"num_tokens": 326903105.0,
"step": 855
},
{
"epoch": 0.5870307167235495,
"grad_norm": 0.4285613277811094,
"learning_rate": 2.2982826636898975e-05,
"loss": 0.5057,
"num_tokens": 328831071.0,
"step": 860
},
{
"epoch": 0.590443686006826,
"grad_norm": 0.36448317241953704,
"learning_rate": 2.2734210324495812e-05,
"loss": 0.5043,
"num_tokens": 330823718.0,
"step": 865
},
{
"epoch": 0.5938566552901023,
"grad_norm": 0.3798280882427834,
"learning_rate": 2.24862017491952e-05,
"loss": 0.4885,
"num_tokens": 332767044.0,
"step": 870
},
{
"epoch": 0.5972696245733788,
"grad_norm": 0.34626645123019356,
"learning_rate": 2.2238832537238174e-05,
"loss": 0.5074,
"num_tokens": 334743310.0,
"step": 875
},
{
"epoch": 0.6006825938566553,
"grad_norm": 0.3163069903882375,
"learning_rate": 2.199213423333365e-05,
"loss": 0.4798,
"num_tokens": 336595638.0,
"step": 880
},
{
"epoch": 0.6040955631399317,
"grad_norm": 0.3377440234044467,
"learning_rate": 2.174613829663587e-05,
"loss": 0.502,
"num_tokens": 338382273.0,
"step": 885
},
{
"epoch": 0.6075085324232082,
"grad_norm": 0.31815725634886977,
"learning_rate": 2.1500876096732664e-05,
"loss": 0.5112,
"num_tokens": 340358925.0,
"step": 890
},
{
"epoch": 0.6109215017064846,
"grad_norm": 0.367940975007064,
"learning_rate": 2.1256378909645187e-05,
"loss": 0.5098,
"num_tokens": 342188299.0,
"step": 895
},
{
"epoch": 0.6143344709897611,
"grad_norm": 0.34518431957216267,
"learning_rate": 2.101267791383959e-05,
"loss": 0.5011,
"num_tokens": 344239058.0,
"step": 900
},
{
"epoch": 0.6177474402730375,
"grad_norm": 0.35244931572919835,
"learning_rate": 2.0769804186251096e-05,
"loss": 0.5115,
"num_tokens": 346206452.0,
"step": 905
},
{
"epoch": 0.621160409556314,
"grad_norm": 0.34420042600455253,
"learning_rate": 2.052778869832108e-05,
"loss": 0.5066,
"num_tokens": 348080585.0,
"step": 910
},
{
"epoch": 0.6245733788395904,
"grad_norm": 0.3764360844364085,
"learning_rate": 2.0286662312047537e-05,
"loss": 0.5138,
"num_tokens": 349930176.0,
"step": 915
},
{
"epoch": 0.6279863481228669,
"grad_norm": 0.30919462082768023,
"learning_rate": 2.0046455776049545e-05,
"loss": 0.4909,
"num_tokens": 351817695.0,
"step": 920
},
{
"epoch": 0.6313993174061433,
"grad_norm": 0.35475689674485694,
"learning_rate": 1.980719972164617e-05,
"loss": 0.4965,
"num_tokens": 353737820.0,
"step": 925
},
{
"epoch": 0.6348122866894198,
"grad_norm": 0.33400288717046317,
"learning_rate": 1.9568924658950376e-05,
"loss": 0.497,
"num_tokens": 355639183.0,
"step": 930
},
{
"epoch": 0.6382252559726962,
"grad_norm": 0.3328103815591411,
"learning_rate": 1.9331660972978294e-05,
"loss": 0.5171,
"num_tokens": 357548627.0,
"step": 935
},
{
"epoch": 0.6416382252559727,
"grad_norm": 0.32058485303964424,
"learning_rate": 1.909543891977454e-05,
"loss": 0.5134,
"num_tokens": 359437581.0,
"step": 940
},
{
"epoch": 0.6450511945392492,
"grad_norm": 0.41282587646488744,
"learning_rate": 1.8860288622553922e-05,
"loss": 0.4891,
"num_tokens": 361329513.0,
"step": 945
},
{
"epoch": 0.6484641638225256,
"grad_norm": 0.3316340082900624,
"learning_rate": 1.862624006786014e-05,
"loss": 0.4935,
"num_tokens": 363189983.0,
"step": 950
},
{
"epoch": 0.6518771331058021,
"grad_norm": 0.36678552775077355,
"learning_rate": 1.839332310174184e-05,
"loss": 0.4985,
"num_tokens": 365024705.0,
"step": 955
},
{
"epoch": 0.6552901023890785,
"grad_norm": 0.3179539334053606,
"learning_rate": 1.8161567425946623e-05,
"loss": 0.4889,
"num_tokens": 366863209.0,
"step": 960
},
{
"epoch": 0.658703071672355,
"grad_norm": 0.33328111865289556,
"learning_rate": 1.7931002594133494e-05,
"loss": 0.5158,
"num_tokens": 368837047.0,
"step": 965
},
{
"epoch": 0.6621160409556314,
"grad_norm": 0.30756457223233913,
"learning_rate": 1.7701658008104122e-05,
"loss": 0.4994,
"num_tokens": 370725860.0,
"step": 970
},
{
"epoch": 0.6655290102389079,
"grad_norm": 0.3031309186317247,
"learning_rate": 1.7473562914053505e-05,
"loss": 0.4817,
"num_tokens": 372569301.0,
"step": 975
},
{
"epoch": 0.6689419795221843,
"grad_norm": 0.32568354226631835,
"learning_rate": 1.7246746398840484e-05,
"loss": 0.5045,
"num_tokens": 374566515.0,
"step": 980
},
{
"epoch": 0.6723549488054608,
"grad_norm": 0.3241787945238758,
"learning_rate": 1.702123738627857e-05,
"loss": 0.4854,
"num_tokens": 376431969.0,
"step": 985
},
{
"epoch": 0.6757679180887372,
"grad_norm": 0.34099547260942087,
"learning_rate": 1.679706463344754e-05,
"loss": 0.4861,
"num_tokens": 378330489.0,
"step": 990
},
{
"epoch": 0.6791808873720137,
"grad_norm": 0.3105997343666536,
"learning_rate": 1.657425672702631e-05,
"loss": 0.4786,
"num_tokens": 380245892.0,
"step": 995
},
{
"epoch": 0.6825938566552902,
"grad_norm": 0.31926125903551006,
"learning_rate": 1.6352842079647567e-05,
"loss": 0.5024,
"num_tokens": 382103468.0,
"step": 1000
},
{
"epoch": 0.6860068259385665,
"grad_norm": 0.35427861114618725,
"learning_rate": 1.6132848926274537e-05,
"loss": 0.5109,
"num_tokens": 384011945.0,
"step": 1005
},
{
"epoch": 0.689419795221843,
"grad_norm": 0.3318228585555292,
"learning_rate": 1.5914305320600474e-05,
"loss": 0.4874,
"num_tokens": 385837297.0,
"step": 1010
},
{
"epoch": 0.6928327645051194,
"grad_norm": 0.4616084679923675,
"learning_rate": 1.5697239131471175e-05,
"loss": 0.501,
"num_tokens": 387718762.0,
"step": 1015
},
{
"epoch": 0.6962457337883959,
"grad_norm": 0.33671130456556214,
"learning_rate": 1.5481678039331217e-05,
"loss": 0.4907,
"num_tokens": 389586680.0,
"step": 1020
},
{
"epoch": 0.6996587030716723,
"grad_norm": 0.3306227060452227,
"learning_rate": 1.526764953269403e-05,
"loss": 0.508,
"num_tokens": 391567792.0,
"step": 1025
},
{
"epoch": 0.7030716723549488,
"grad_norm": 0.31813025756926777,
"learning_rate": 1.5055180904636602e-05,
"loss": 0.4924,
"num_tokens": 393428760.0,
"step": 1030
},
{
"epoch": 0.7064846416382252,
"grad_norm": 0.3302134363815304,
"learning_rate": 1.4844299249319e-05,
"loss": 0.5088,
"num_tokens": 395264025.0,
"step": 1035
},
{
"epoch": 0.7098976109215017,
"grad_norm": 0.32408702255799543,
"learning_rate": 1.4635031458529366e-05,
"loss": 0.5156,
"num_tokens": 397158700.0,
"step": 1040
},
{
"epoch": 0.7133105802047781,
"grad_norm": 0.3146304024133278,
"learning_rate": 1.4427404218254598e-05,
"loss": 0.4876,
"num_tokens": 399092348.0,
"step": 1045
},
{
"epoch": 0.7167235494880546,
"grad_norm": 0.33224101912248266,
"learning_rate": 1.422144400527733e-05,
"loss": 0.5054,
"num_tokens": 400923938.0,
"step": 1050
},
{
"epoch": 0.7201365187713311,
"grad_norm": 0.33570961644767916,
"learning_rate": 1.401717708379964e-05,
"loss": 0.485,
"num_tokens": 402794748.0,
"step": 1055
},
{
"epoch": 0.7235494880546075,
"grad_norm": 0.33759516313845783,
"learning_rate": 1.3814629502093803e-05,
"loss": 0.4961,
"num_tokens": 404685655.0,
"step": 1060
},
{
"epoch": 0.726962457337884,
"grad_norm": 0.2849134557184847,
"learning_rate": 1.3613827089180576e-05,
"loss": 0.4966,
"num_tokens": 406613744.0,
"step": 1065
},
{
"epoch": 0.7303754266211604,
"grad_norm": 0.33308470866419615,
"learning_rate": 1.3414795451535448e-05,
"loss": 0.4955,
"num_tokens": 408582799.0,
"step": 1070
},
{
"epoch": 0.7337883959044369,
"grad_norm": 0.3218010396658918,
"learning_rate": 1.3217559969823334e-05,
"loss": 0.5041,
"num_tokens": 410442926.0,
"step": 1075
},
{
"epoch": 0.7372013651877133,
"grad_norm": 0.2948387124715625,
"learning_rate": 1.3022145795661988e-05,
"loss": 0.5002,
"num_tokens": 412386009.0,
"step": 1080
},
{
"epoch": 0.7406143344709898,
"grad_norm": 0.31655676114802184,
"learning_rate": 1.2828577848414636e-05,
"loss": 0.5102,
"num_tokens": 414357432.0,
"step": 1085
},
{
"epoch": 0.7440273037542662,
"grad_norm": 0.2869995128416729,
"learning_rate": 1.2636880812012253e-05,
"loss": 0.499,
"num_tokens": 416372039.0,
"step": 1090
},
{
"epoch": 0.7474402730375427,
"grad_norm": 0.32259657238383393,
"learning_rate": 1.2447079131805857e-05,
"loss": 0.4956,
"num_tokens": 418265973.0,
"step": 1095
},
{
"epoch": 0.7508532423208191,
"grad_norm": 0.30899676272076165,
"learning_rate": 1.2259197011449203e-05,
"loss": 0.4829,
"num_tokens": 420051975.0,
"step": 1100
},
{
"epoch": 0.7542662116040956,
"grad_norm": 0.29770792665255996,
"learning_rate": 1.2073258409812328e-05,
"loss": 0.4844,
"num_tokens": 421905923.0,
"step": 1105
},
{
"epoch": 0.757679180887372,
"grad_norm": 0.3251270162529146,
"learning_rate": 1.1889287037926289e-05,
"loss": 0.4948,
"num_tokens": 423685709.0,
"step": 1110
},
{
"epoch": 0.7610921501706485,
"grad_norm": 0.33226182684232963,
"learning_rate": 1.1707306355959516e-05,
"loss": 0.4859,
"num_tokens": 425515443.0,
"step": 1115
},
{
"epoch": 0.764505119453925,
"grad_norm": 0.2820363582513593,
"learning_rate": 1.152733957022617e-05,
"loss": 0.4855,
"num_tokens": 427405904.0,
"step": 1120
},
{
"epoch": 0.7679180887372014,
"grad_norm": 0.29505926000276406,
"learning_rate": 1.1349409630226804e-05,
"loss": 0.4983,
"num_tokens": 429329969.0,
"step": 1125
},
{
"epoch": 0.7713310580204779,
"grad_norm": 0.29155650643706393,
"learning_rate": 1.1173539225721916e-05,
"loss": 0.4842,
"num_tokens": 431288378.0,
"step": 1130
},
{
"epoch": 0.7747440273037542,
"grad_norm": 0.30656473872840284,
"learning_rate": 1.0999750783838442e-05,
"loss": 0.4931,
"num_tokens": 433213317.0,
"step": 1135
},
{
"epoch": 0.7781569965870307,
"grad_norm": 0.3103072512057092,
"learning_rate": 1.0828066466209891e-05,
"loss": 0.4853,
"num_tokens": 435077995.0,
"step": 1140
},
{
"epoch": 0.7815699658703071,
"grad_norm": 0.3119583099379152,
"learning_rate": 1.0658508166150224e-05,
"loss": 0.4902,
"num_tokens": 437063353.0,
"step": 1145
},
{
"epoch": 0.7849829351535836,
"grad_norm": 0.2899025649371279,
"learning_rate": 1.0491097505862085e-05,
"loss": 0.4876,
"num_tokens": 439006864.0,
"step": 1150
},
{
"epoch": 0.78839590443686,
"grad_norm": 0.29349884858317443,
"learning_rate": 1.0325855833679438e-05,
"loss": 0.5039,
"num_tokens": 441123044.0,
"step": 1155
},
{
"epoch": 0.7918088737201365,
"grad_norm": 0.31417942708495006,
"learning_rate": 1.0162804221345235e-05,
"loss": 0.5038,
"num_tokens": 443045688.0,
"step": 1160
},
{
"epoch": 0.7952218430034129,
"grad_norm": 0.31804865960369644,
"learning_rate": 1.0001963461324357e-05,
"loss": 0.4925,
"num_tokens": 444874851.0,
"step": 1165
},
{
"epoch": 0.7986348122866894,
"grad_norm": 0.34762843018816847,
"learning_rate": 9.843354064152135e-06,
"loss": 0.49,
"num_tokens": 446852018.0,
"step": 1170
},
{
"epoch": 0.8020477815699659,
"grad_norm": 0.29778977328595035,
"learning_rate": 9.686996255818815e-06,
"loss": 0.4966,
"num_tokens": 448665204.0,
"step": 1175
},
{
"epoch": 0.8054607508532423,
"grad_norm": 0.28927996228687575,
"learning_rate": 9.532909975190317e-06,
"loss": 0.5137,
"num_tokens": 450687287.0,
"step": 1180
},
{
"epoch": 0.8088737201365188,
"grad_norm": 0.3094536714350483,
"learning_rate": 9.381114871465644e-06,
"loss": 0.4907,
"num_tokens": 452550699.0,
"step": 1185
},
{
"epoch": 0.8122866894197952,
"grad_norm": 0.31642622912231355,
"learning_rate": 9.231630301671188e-06,
"loss": 0.4907,
"num_tokens": 454483896.0,
"step": 1190
},
{
"epoch": 0.8156996587030717,
"grad_norm": 0.3081353624354231,
"learning_rate": 9.084475328192308e-06,
"loss": 0.4759,
"num_tokens": 456418788.0,
"step": 1195
},
{
"epoch": 0.8191126279863481,
"grad_norm": 0.365599455782294,
"learning_rate": 8.939668716342473e-06,
"loss": 0.5015,
"num_tokens": 458325861.0,
"step": 1200
},
{
"epoch": 0.8225255972696246,
"grad_norm": 0.3194677820320597,
"learning_rate": 8.797228931970305e-06,
"loss": 0.5069,
"num_tokens": 460227692.0,
"step": 1205
},
{
"epoch": 0.825938566552901,
"grad_norm": 0.2940713904786176,
"learning_rate": 8.657174139104807e-06,
"loss": 0.4807,
"num_tokens": 462055868.0,
"step": 1210
},
{
"epoch": 0.8293515358361775,
"grad_norm": 0.2859069818865439,
"learning_rate": 8.519522197639043e-06,
"loss": 0.4809,
"num_tokens": 463988579.0,
"step": 1215
},
{
"epoch": 0.8327645051194539,
"grad_norm": 0.2817816657796972,
"learning_rate": 8.384290661052662e-06,
"loss": 0.4955,
"num_tokens": 465973876.0,
"step": 1220
},
{
"epoch": 0.8361774744027304,
"grad_norm": 0.2925807997562712,
"learning_rate": 8.251496774173451e-06,
"loss": 0.4887,
"num_tokens": 467962214.0,
"step": 1225
},
{
"epoch": 0.8395904436860068,
"grad_norm": 0.306769088717932,
"learning_rate": 8.121157470978268e-06,
"loss": 0.4826,
"num_tokens": 469791498.0,
"step": 1230
},
{
"epoch": 0.8430034129692833,
"grad_norm": 0.3026860853513822,
"learning_rate": 7.993289372433582e-06,
"loss": 0.4844,
"num_tokens": 471616902.0,
"step": 1235
},
{
"epoch": 0.8464163822525598,
"grad_norm": 0.3002876341550466,
"learning_rate": 7.867908784376006e-06,
"loss": 0.482,
"num_tokens": 473570581.0,
"step": 1240
},
{
"epoch": 0.8498293515358362,
"grad_norm": 0.30847727739844566,
"learning_rate": 7.745031695432923e-06,
"loss": 0.4959,
"num_tokens": 475518680.0,
"step": 1245
},
{
"epoch": 0.8532423208191127,
"grad_norm": 0.3159715882865048,
"learning_rate": 7.624673774983614e-06,
"loss": 0.5024,
"num_tokens": 477401353.0,
"step": 1250
},
{
"epoch": 0.856655290102389,
"grad_norm": 0.28952971357131746,
"learning_rate": 7.506850371161078e-06,
"loss": 0.4969,
"num_tokens": 479371988.0,
"step": 1255
},
{
"epoch": 0.8600682593856656,
"grad_norm": 0.2845959933463602,
"learning_rate": 7.391576508894865e-06,
"loss": 0.4995,
"num_tokens": 481348892.0,
"step": 1260
},
{
"epoch": 0.863481228668942,
"grad_norm": 0.3011207792019779,
"learning_rate": 7.27886688799504e-06,
"loss": 0.4859,
"num_tokens": 483220037.0,
"step": 1265
},
{
"epoch": 0.8668941979522184,
"grad_norm": 0.293919145898071,
"learning_rate": 7.1687358812776715e-06,
"loss": 0.507,
"num_tokens": 485171910.0,
"step": 1270
},
{
"epoch": 0.8703071672354948,
"grad_norm": 0.301426754517889,
"learning_rate": 7.061197532731992e-06,
"loss": 0.4834,
"num_tokens": 487118187.0,
"step": 1275
},
{
"epoch": 0.8737201365187713,
"grad_norm": 0.30097229400923386,
"learning_rate": 6.956265555729524e-06,
"loss": 0.4801,
"num_tokens": 489059548.0,
"step": 1280
},
{
"epoch": 0.8771331058020477,
"grad_norm": 0.2685038385521516,
"learning_rate": 6.853953331275304e-06,
"loss": 0.4834,
"num_tokens": 490973976.0,
"step": 1285
},
{
"epoch": 0.8805460750853242,
"grad_norm": 0.28920434789008614,
"learning_rate": 6.754273906301544e-06,
"loss": 0.4869,
"num_tokens": 492893029.0,
"step": 1290
},
{
"epoch": 0.8839590443686007,
"grad_norm": 0.26489876231378706,
"learning_rate": 6.6572399920038775e-06,
"loss": 0.506,
"num_tokens": 494980675.0,
"step": 1295
},
{
"epoch": 0.8873720136518771,
"grad_norm": 0.2788460104910853,
"learning_rate": 6.562863962220414e-06,
"loss": 0.4783,
"num_tokens": 496905674.0,
"step": 1300
},
{
"epoch": 0.8907849829351536,
"grad_norm": 0.2804986007877692,
"learning_rate": 6.47115785185384e-06,
"loss": 0.4922,
"num_tokens": 498889766.0,
"step": 1305
},
{
"epoch": 0.89419795221843,
"grad_norm": 0.3194121397231278,
"learning_rate": 6.382133355336667e-06,
"loss": 0.4831,
"num_tokens": 500864542.0,
"step": 1310
},
{
"epoch": 0.8976109215017065,
"grad_norm": 0.2826959872970924,
"learning_rate": 6.29580182514e-06,
"loss": 0.5,
"num_tokens": 502876203.0,
"step": 1315
},
{
"epoch": 0.9010238907849829,
"grad_norm": 0.2686904123571036,
"learning_rate": 6.212174270325845e-06,
"loss": 0.4828,
"num_tokens": 504810366.0,
"step": 1320
},
{
"epoch": 0.9044368600682594,
"grad_norm": 0.2663595760500798,
"learning_rate": 6.1312613551432135e-06,
"loss": 0.4865,
"num_tokens": 506777983.0,
"step": 1325
},
{
"epoch": 0.9078498293515358,
"grad_norm": 0.2907833181872873,
"learning_rate": 6.053073397668226e-06,
"loss": 0.4887,
"num_tokens": 508607232.0,
"step": 1330
},
{
"epoch": 0.9112627986348123,
"grad_norm": 0.29397858235192875,
"learning_rate": 5.977620368488328e-06,
"loss": 0.4882,
"num_tokens": 510543906.0,
"step": 1335
},
{
"epoch": 0.9146757679180887,
"grad_norm": 0.31013908532336765,
"learning_rate": 5.904911889430853e-06,
"loss": 0.501,
"num_tokens": 512474084.0,
"step": 1340
},
{
"epoch": 0.9180887372013652,
"grad_norm": 0.283505071193333,
"learning_rate": 5.834957232336018e-06,
"loss": 0.4763,
"num_tokens": 514322576.0,
"step": 1345
},
{
"epoch": 0.9215017064846417,
"grad_norm": 0.28906365848402543,
"learning_rate": 5.7677653178745805e-06,
"loss": 0.4939,
"num_tokens": 516216104.0,
"step": 1350
},
{
"epoch": 0.9249146757679181,
"grad_norm": 0.2727767003049317,
"learning_rate": 5.7033447144102785e-06,
"loss": 0.4993,
"num_tokens": 518137999.0,
"step": 1355
},
{
"epoch": 0.9283276450511946,
"grad_norm": 0.3011258273126128,
"learning_rate": 5.641703636907171e-06,
"loss": 0.4895,
"num_tokens": 519966345.0,
"step": 1360
},
{
"epoch": 0.931740614334471,
"grad_norm": 0.28411785811363793,
"learning_rate": 5.582849945882055e-06,
"loss": 0.4755,
"num_tokens": 521824255.0,
"step": 1365
},
{
"epoch": 0.9351535836177475,
"grad_norm": 0.3050666821162905,
"learning_rate": 5.526791146402112e-06,
"loss": 0.485,
"num_tokens": 523793837.0,
"step": 1370
},
{
"epoch": 0.9385665529010239,
"grad_norm": 0.2677439239480107,
"learning_rate": 5.473534387127838e-06,
"loss": 0.492,
"num_tokens": 525746360.0,
"step": 1375
},
{
"epoch": 0.9419795221843004,
"grad_norm": 0.28833438367340264,
"learning_rate": 5.423086459401437e-06,
"loss": 0.496,
"num_tokens": 527666864.0,
"step": 1380
},
{
"epoch": 0.9453924914675768,
"grad_norm": 0.27631357977330695,
"learning_rate": 5.37545379638079e-06,
"loss": 0.4946,
"num_tokens": 529624010.0,
"step": 1385
},
{
"epoch": 0.9488054607508533,
"grad_norm": 0.29599901993590805,
"learning_rate": 5.330642472219102e-06,
"loss": 0.4883,
"num_tokens": 531466359.0,
"step": 1390
},
{
"epoch": 0.9522184300341296,
"grad_norm": 0.29245998431871206,
"learning_rate": 5.28865820129031e-06,
"loss": 0.503,
"num_tokens": 533360398.0,
"step": 1395
},
{
"epoch": 0.9556313993174061,
"grad_norm": 0.29809224367055454,
"learning_rate": 5.24950633746038e-06,
"loss": 0.4848,
"num_tokens": 535253217.0,
"step": 1400
},
{
"epoch": 0.9590443686006825,
"grad_norm": 0.2681357355557361,
"learning_rate": 5.213191873404592e-06,
"loss": 0.4753,
"num_tokens": 537247787.0,
"step": 1405
},
{
"epoch": 0.962457337883959,
"grad_norm": 0.26573908919210415,
"learning_rate": 5.179719439970854e-06,
"loss": 0.4811,
"num_tokens": 539137436.0,
"step": 1410
},
{
"epoch": 0.9658703071672355,
"grad_norm": 0.3012993878942698,
"learning_rate": 5.14909330558919e-06,
"loss": 0.4856,
"num_tokens": 540954808.0,
"step": 1415
},
{
"epoch": 0.9692832764505119,
"grad_norm": 0.2734104791087793,
"learning_rate": 5.121317375727405e-06,
"loss": 0.492,
"num_tokens": 542824098.0,
"step": 1420
},
{
"epoch": 0.9726962457337884,
"grad_norm": 0.28445668344186853,
"learning_rate": 5.0963951923930745e-06,
"loss": 0.5046,
"num_tokens": 544761712.0,
"step": 1425
},
{
"epoch": 0.9761092150170648,
"grad_norm": 0.3111290218032905,
"learning_rate": 5.074329933681866e-06,
"loss": 0.4941,
"num_tokens": 546578047.0,
"step": 1430
},
{
"epoch": 0.9795221843003413,
"grad_norm": 0.2841539130555513,
"learning_rate": 5.055124413372245e-06,
"loss": 0.4855,
"num_tokens": 548490659.0,
"step": 1435
},
{
"epoch": 0.9829351535836177,
"grad_norm": 0.28667073975075474,
"learning_rate": 5.038781080566688e-06,
"loss": 0.4854,
"num_tokens": 550377811.0,
"step": 1440
},
{
"epoch": 0.9863481228668942,
"grad_norm": 0.30158413834264025,
"learning_rate": 5.025302019379348e-06,
"loss": 0.4891,
"num_tokens": 552306122.0,
"step": 1445
},
{
"epoch": 0.9897610921501706,
"grad_norm": 0.2699230549724662,
"learning_rate": 5.014688948670296e-06,
"loss": 0.474,
"num_tokens": 554224024.0,
"step": 1450
},
{
"epoch": 0.9931740614334471,
"grad_norm": 0.33061635195442485,
"learning_rate": 5.006943221826337e-06,
"loss": 0.4955,
"num_tokens": 556055771.0,
"step": 1455
},
{
"epoch": 0.9965870307167235,
"grad_norm": 0.25966538380747367,
"learning_rate": 5.002065826588409e-06,
"loss": 0.4832,
"num_tokens": 558001366.0,
"step": 1460
},
{
"epoch": 1.0,
"grad_norm": 0.26645961168127524,
"learning_rate": 5.000057384925645e-06,
"loss": 0.4887,
"num_tokens": 559917574.0,
"step": 1465
},
{
"epoch": 1.0,
"step": 1465,
"total_flos": 1146656934330368.0,
"train_loss": 0.5250163195483106,
"train_runtime": 26005.4598,
"train_samples_per_second": 3.604,
"train_steps_per_second": 0.056
}
],
"logging_steps": 5,
"max_steps": 1465,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1146656934330368.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}