both_pack_22528 / trainer_state.json
LHL3341's picture
upload checkpoint
32ab9d1 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 1548,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01937984496124031,
"grad_norm": 3.024710206889159,
"learning_rate": 5.806451612903227e-07,
"loss": 0.581,
"step": 10
},
{
"epoch": 0.03875968992248062,
"grad_norm": 1.5402360004650113,
"learning_rate": 1.2258064516129033e-06,
"loss": 0.5504,
"step": 20
},
{
"epoch": 0.05813953488372093,
"grad_norm": 0.8470059178916516,
"learning_rate": 1.870967741935484e-06,
"loss": 0.5124,
"step": 30
},
{
"epoch": 0.07751937984496124,
"grad_norm": 0.5184552103940687,
"learning_rate": 2.5161290322580645e-06,
"loss": 0.4698,
"step": 40
},
{
"epoch": 0.09689922480620156,
"grad_norm": 0.4600811642482261,
"learning_rate": 3.1612903225806453e-06,
"loss": 0.4486,
"step": 50
},
{
"epoch": 0.11627906976744186,
"grad_norm": 0.323482300837561,
"learning_rate": 3.8064516129032257e-06,
"loss": 0.4405,
"step": 60
},
{
"epoch": 0.13565891472868216,
"grad_norm": 0.3255845459193703,
"learning_rate": 4.451612903225807e-06,
"loss": 0.4235,
"step": 70
},
{
"epoch": 0.15503875968992248,
"grad_norm": 0.30110094189930176,
"learning_rate": 5.096774193548387e-06,
"loss": 0.421,
"step": 80
},
{
"epoch": 0.1744186046511628,
"grad_norm": 0.3472582262303459,
"learning_rate": 5.7419354838709685e-06,
"loss": 0.4056,
"step": 90
},
{
"epoch": 0.1937984496124031,
"grad_norm": 0.2834471087687218,
"learning_rate": 6.3870967741935485e-06,
"loss": 0.4068,
"step": 100
},
{
"epoch": 0.2131782945736434,
"grad_norm": 0.3061797429978253,
"learning_rate": 7.03225806451613e-06,
"loss": 0.3963,
"step": 110
},
{
"epoch": 0.23255813953488372,
"grad_norm": 0.32637029539406764,
"learning_rate": 7.67741935483871e-06,
"loss": 0.3951,
"step": 120
},
{
"epoch": 0.25193798449612403,
"grad_norm": 0.3229985120370211,
"learning_rate": 8.322580645161292e-06,
"loss": 0.3882,
"step": 130
},
{
"epoch": 0.2713178294573643,
"grad_norm": 0.3690058326721124,
"learning_rate": 8.967741935483871e-06,
"loss": 0.3906,
"step": 140
},
{
"epoch": 0.29069767441860467,
"grad_norm": 0.2906195001306367,
"learning_rate": 9.612903225806453e-06,
"loss": 0.3897,
"step": 150
},
{
"epoch": 0.31007751937984496,
"grad_norm": 0.4239631816664054,
"learning_rate": 9.999796551468884e-06,
"loss": 0.3826,
"step": 160
},
{
"epoch": 0.32945736434108525,
"grad_norm": 0.34692538807102946,
"learning_rate": 9.99750794563087e-06,
"loss": 0.389,
"step": 170
},
{
"epoch": 0.3488372093023256,
"grad_norm": 0.4007352767365137,
"learning_rate": 9.992677591163695e-06,
"loss": 0.3742,
"step": 180
},
{
"epoch": 0.3682170542635659,
"grad_norm": 0.36518323602764685,
"learning_rate": 9.985307944801193e-06,
"loss": 0.375,
"step": 190
},
{
"epoch": 0.3875968992248062,
"grad_norm": 0.30356394543129034,
"learning_rate": 9.975402754769224e-06,
"loss": 0.3792,
"step": 200
},
{
"epoch": 0.4069767441860465,
"grad_norm": 0.2852713945073263,
"learning_rate": 9.96296705887933e-06,
"loss": 0.3743,
"step": 210
},
{
"epoch": 0.4263565891472868,
"grad_norm": 0.30179106804473094,
"learning_rate": 9.948007181966471e-06,
"loss": 0.374,
"step": 220
},
{
"epoch": 0.44573643410852715,
"grad_norm": 0.31336324870448984,
"learning_rate": 9.930530732672206e-06,
"loss": 0.3702,
"step": 230
},
{
"epoch": 0.46511627906976744,
"grad_norm": 0.33409125982259624,
"learning_rate": 9.910546599574903e-06,
"loss": 0.3774,
"step": 240
},
{
"epoch": 0.4844961240310077,
"grad_norm": 0.3665485316181777,
"learning_rate": 9.88806494666899e-06,
"loss": 0.3708,
"step": 250
},
{
"epoch": 0.5038759689922481,
"grad_norm": 0.338344043578051,
"learning_rate": 9.863097208195508e-06,
"loss": 0.3745,
"step": 260
},
{
"epoch": 0.5232558139534884,
"grad_norm": 0.3034912732218389,
"learning_rate": 9.835656082826623e-06,
"loss": 0.3713,
"step": 270
},
{
"epoch": 0.5426356589147286,
"grad_norm": 0.30126794365887294,
"learning_rate": 9.805755527207036e-06,
"loss": 0.3673,
"step": 280
},
{
"epoch": 0.562015503875969,
"grad_norm": 0.31497458659496597,
"learning_rate": 9.773410748855585e-06,
"loss": 0.376,
"step": 290
},
{
"epoch": 0.5813953488372093,
"grad_norm": 0.3062365232795322,
"learning_rate": 9.738638198430672e-06,
"loss": 0.36,
"step": 300
},
{
"epoch": 0.6007751937984496,
"grad_norm": 0.29595518548452066,
"learning_rate": 9.701455561363378e-06,
"loss": 0.3658,
"step": 310
},
{
"epoch": 0.6201550387596899,
"grad_norm": 0.34813640927866357,
"learning_rate": 9.661881748862618e-06,
"loss": 0.3606,
"step": 320
},
{
"epoch": 0.6395348837209303,
"grad_norm": 0.3261071482329525,
"learning_rate": 9.619936888296822e-06,
"loss": 0.3662,
"step": 330
},
{
"epoch": 0.6589147286821705,
"grad_norm": 0.31972426712360247,
"learning_rate": 9.575642312957107e-06,
"loss": 0.362,
"step": 340
},
{
"epoch": 0.6782945736434108,
"grad_norm": 0.3104726872415725,
"learning_rate": 9.52902055120708e-06,
"loss": 0.3692,
"step": 350
},
{
"epoch": 0.6976744186046512,
"grad_norm": 0.3009349159975097,
"learning_rate": 9.480095315024855e-06,
"loss": 0.3577,
"step": 360
},
{
"epoch": 0.7170542635658915,
"grad_norm": 0.3396149740587963,
"learning_rate": 9.428891487943056e-06,
"loss": 0.3678,
"step": 370
},
{
"epoch": 0.7364341085271318,
"grad_norm": 0.28452945850141,
"learning_rate": 9.37543511239297e-06,
"loss": 0.3595,
"step": 380
},
{
"epoch": 0.7558139534883721,
"grad_norm": 0.29432126357863087,
"learning_rate": 9.319753376459289e-06,
"loss": 0.3613,
"step": 390
},
{
"epoch": 0.7751937984496124,
"grad_norm": 0.32109558327111826,
"learning_rate": 9.261874600052159e-06,
"loss": 0.358,
"step": 400
},
{
"epoch": 0.7945736434108527,
"grad_norm": 0.30459515622425065,
"learning_rate": 9.201828220503588e-06,
"loss": 0.3523,
"step": 410
},
{
"epoch": 0.813953488372093,
"grad_norm": 0.3029379165001461,
"learning_rate": 9.139644777595514e-06,
"loss": 0.3572,
"step": 420
},
{
"epoch": 0.8333333333333334,
"grad_norm": 0.3184431589658449,
"learning_rate": 9.07535589802718e-06,
"loss": 0.3647,
"step": 430
},
{
"epoch": 0.8527131782945736,
"grad_norm": 0.3285191809001309,
"learning_rate": 9.008994279329695e-06,
"loss": 0.3569,
"step": 440
},
{
"epoch": 0.872093023255814,
"grad_norm": 0.30071181015912896,
"learning_rate": 8.940593673235962e-06,
"loss": 0.3447,
"step": 450
},
{
"epoch": 0.8914728682170543,
"grad_norm": 0.293634789051238,
"learning_rate": 8.870188868514434e-06,
"loss": 0.3609,
"step": 460
},
{
"epoch": 0.9108527131782945,
"grad_norm": 0.31663014520581295,
"learning_rate": 8.797815673275441e-06,
"loss": 0.3569,
"step": 470
},
{
"epoch": 0.9302325581395349,
"grad_norm": 0.2922871433510901,
"learning_rate": 8.723510896759064e-06,
"loss": 0.3571,
"step": 480
},
{
"epoch": 0.9496124031007752,
"grad_norm": 0.2900366588256224,
"learning_rate": 8.647312330613845e-06,
"loss": 0.3533,
"step": 490
},
{
"epoch": 0.9689922480620154,
"grad_norm": 0.27174859845732824,
"learning_rate": 8.56925872967582e-06,
"loss": 0.3534,
"step": 500
},
{
"epoch": 0.9883720930232558,
"grad_norm": 0.3242791388749644,
"learning_rate": 8.4893897922577e-06,
"loss": 0.3564,
"step": 510
},
{
"epoch": 1.0077519379844961,
"grad_norm": 0.41930486160423347,
"learning_rate": 8.40774613995817e-06,
"loss": 0.3496,
"step": 520
},
{
"epoch": 1.0271317829457365,
"grad_norm": 0.33960409797943286,
"learning_rate": 8.324369297001616e-06,
"loss": 0.3292,
"step": 530
},
{
"epoch": 1.0465116279069768,
"grad_norm": 0.2883277794177794,
"learning_rate": 8.239301669118776e-06,
"loss": 0.3335,
"step": 540
},
{
"epoch": 1.0658914728682172,
"grad_norm": 0.278024569033151,
"learning_rate": 8.152586521979038e-06,
"loss": 0.3281,
"step": 550
},
{
"epoch": 1.0852713178294573,
"grad_norm": 0.29701005576786715,
"learning_rate": 8.064267959185393e-06,
"loss": 0.3254,
"step": 560
},
{
"epoch": 1.1046511627906976,
"grad_norm": 0.28568736252968063,
"learning_rate": 7.974390899843194e-06,
"loss": 0.3293,
"step": 570
},
{
"epoch": 1.124031007751938,
"grad_norm": 0.2963119155914095,
"learning_rate": 7.883001055714155e-06,
"loss": 0.3304,
"step": 580
},
{
"epoch": 1.1434108527131783,
"grad_norm": 0.28891746465630497,
"learning_rate": 7.7901449079672e-06,
"loss": 0.3235,
"step": 590
},
{
"epoch": 1.1627906976744187,
"grad_norm": 0.3068381315646611,
"learning_rate": 7.695869683538003e-06,
"loss": 0.334,
"step": 600
},
{
"epoch": 1.1821705426356588,
"grad_norm": 0.2954947043166262,
"learning_rate": 7.600223331109193e-06,
"loss": 0.3272,
"step": 610
},
{
"epoch": 1.2015503875968991,
"grad_norm": 0.3292488373940055,
"learning_rate": 7.5032544967235244e-06,
"loss": 0.3278,
"step": 620
},
{
"epoch": 1.2209302325581395,
"grad_norm": 0.34189866382905837,
"learning_rate": 7.405012499042328e-06,
"loss": 0.3325,
"step": 630
},
{
"epoch": 1.2403100775193798,
"grad_norm": 0.29867309861883884,
"learning_rate": 7.305547304261886e-06,
"loss": 0.3301,
"step": 640
},
{
"epoch": 1.2596899224806202,
"grad_norm": 0.30833185416883796,
"learning_rate": 7.204909500700464e-06,
"loss": 0.3349,
"step": 650
},
{
"epoch": 1.2790697674418605,
"grad_norm": 0.2899026781077889,
"learning_rate": 7.103150273068922e-06,
"loss": 0.3317,
"step": 660
},
{
"epoch": 1.2984496124031009,
"grad_norm": 0.2999993491347847,
"learning_rate": 7.000321376438022e-06,
"loss": 0.3338,
"step": 670
},
{
"epoch": 1.3178294573643412,
"grad_norm": 0.31616423531901156,
"learning_rate": 6.896475109915619e-06,
"loss": 0.3298,
"step": 680
},
{
"epoch": 1.3372093023255813,
"grad_norm": 0.28212973123736484,
"learning_rate": 6.791664290047165e-06,
"loss": 0.3313,
"step": 690
},
{
"epoch": 1.3565891472868217,
"grad_norm": 0.28915033542218266,
"learning_rate": 6.6859422239530545e-06,
"loss": 0.3216,
"step": 700
},
{
"epoch": 1.375968992248062,
"grad_norm": 0.2896008415289824,
"learning_rate": 6.5793626822164466e-06,
"loss": 0.3325,
"step": 710
},
{
"epoch": 1.3953488372093024,
"grad_norm": 0.3119505933455449,
"learning_rate": 6.4719798715353676e-06,
"loss": 0.3303,
"step": 720
},
{
"epoch": 1.4147286821705427,
"grad_norm": 0.29835932589388153,
"learning_rate": 6.363848407153017e-06,
"loss": 0.3224,
"step": 730
},
{
"epoch": 1.4341085271317828,
"grad_norm": 0.2886715830691158,
"learning_rate": 6.255023285080276e-06,
"loss": 0.3261,
"step": 740
},
{
"epoch": 1.4534883720930232,
"grad_norm": 0.28164795191624437,
"learning_rate": 6.1455598541245676e-06,
"loss": 0.329,
"step": 750
},
{
"epoch": 1.4728682170542635,
"grad_norm": 0.2625743581161518,
"learning_rate": 6.035513787739254e-06,
"loss": 0.3264,
"step": 760
},
{
"epoch": 1.4922480620155039,
"grad_norm": 0.2750007352652477,
"learning_rate": 5.924941055707965e-06,
"loss": 0.3263,
"step": 770
},
{
"epoch": 1.5116279069767442,
"grad_norm": 0.26378113717013907,
"learning_rate": 5.813897895678164e-06,
"loss": 0.3272,
"step": 780
},
{
"epoch": 1.5310077519379846,
"grad_norm": 0.2815561800883907,
"learning_rate": 5.702440784558517e-06,
"loss": 0.3312,
"step": 790
},
{
"epoch": 1.550387596899225,
"grad_norm": 0.2914139841744769,
"learning_rate": 5.5906264097945405e-06,
"loss": 0.324,
"step": 800
},
{
"epoch": 1.5697674418604652,
"grad_norm": 0.26428007476263105,
"learning_rate": 5.478511640537194e-06,
"loss": 0.3264,
"step": 810
},
{
"epoch": 1.5891472868217056,
"grad_norm": 0.31418088689469686,
"learning_rate": 5.366153498719042e-06,
"loss": 0.326,
"step": 820
},
{
"epoch": 1.6085271317829457,
"grad_norm": 0.3060727974460338,
"learning_rate": 5.253609130052718e-06,
"loss": 0.3257,
"step": 830
},
{
"epoch": 1.627906976744186,
"grad_norm": 0.30284340426746187,
"learning_rate": 5.140935774966429e-06,
"loss": 0.327,
"step": 840
},
{
"epoch": 1.6472868217054264,
"grad_norm": 0.26699893667119545,
"learning_rate": 5.028190739491291e-06,
"loss": 0.3302,
"step": 850
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.27681198241258764,
"learning_rate": 4.915431366115295e-06,
"loss": 0.3294,
"step": 860
},
{
"epoch": 1.6860465116279069,
"grad_norm": 0.28796400194127436,
"learning_rate": 4.802715004618737e-06,
"loss": 0.3234,
"step": 870
},
{
"epoch": 1.7054263565891472,
"grad_norm": 0.2890809518886021,
"learning_rate": 4.6900989829059315e-06,
"loss": 0.3254,
"step": 880
},
{
"epoch": 1.7248062015503876,
"grad_norm": 0.3407732854582675,
"learning_rate": 4.577640577848055e-06,
"loss": 0.3301,
"step": 890
},
{
"epoch": 1.744186046511628,
"grad_norm": 0.2840744301116043,
"learning_rate": 4.465396986151946e-06,
"loss": 0.3238,
"step": 900
},
{
"epoch": 1.7635658914728682,
"grad_norm": 0.28087812582824156,
"learning_rate": 4.353425295269682e-06,
"loss": 0.3267,
"step": 910
},
{
"epoch": 1.7829457364341086,
"grad_norm": 0.2705639936783464,
"learning_rate": 4.241782454363699e-06,
"loss": 0.3172,
"step": 920
},
{
"epoch": 1.802325581395349,
"grad_norm": 0.27944583278178287,
"learning_rate": 4.130525245342285e-06,
"loss": 0.3245,
"step": 930
},
{
"epoch": 1.8217054263565893,
"grad_norm": 0.2702464835545137,
"learning_rate": 4.01971025398011e-06,
"loss": 0.326,
"step": 940
},
{
"epoch": 1.8410852713178296,
"grad_norm": 0.2829858201228807,
"learning_rate": 3.909393841138517e-06,
"loss": 0.3353,
"step": 950
},
{
"epoch": 1.8604651162790697,
"grad_norm": 0.2691353343474597,
"learning_rate": 3.799632114100207e-06,
"loss": 0.3275,
"step": 960
},
{
"epoch": 1.87984496124031,
"grad_norm": 0.2763189925577441,
"learning_rate": 3.6904808980328988e-06,
"loss": 0.3272,
"step": 970
},
{
"epoch": 1.8992248062015504,
"grad_norm": 0.25705918083289864,
"learning_rate": 3.5819957075964628e-06,
"loss": 0.3198,
"step": 980
},
{
"epoch": 1.9186046511627906,
"grad_norm": 0.2631500393579799,
"learning_rate": 3.4742317187079965e-06,
"loss": 0.319,
"step": 990
},
{
"epoch": 1.937984496124031,
"grad_norm": 0.3054389644963815,
"learning_rate": 3.3672437404791568e-06,
"loss": 0.3247,
"step": 1000
},
{
"epoch": 1.9573643410852712,
"grad_norm": 0.3250008081439114,
"learning_rate": 3.2610861873400884e-06,
"loss": 0.3208,
"step": 1010
},
{
"epoch": 1.9767441860465116,
"grad_norm": 0.2655379268524276,
"learning_rate": 3.155813051364064e-06,
"loss": 0.3253,
"step": 1020
},
{
"epoch": 1.996124031007752,
"grad_norm": 0.26805061070126973,
"learning_rate": 3.0514778748069347e-06,
"loss": 0.326,
"step": 1030
},
{
"epoch": 2.0155038759689923,
"grad_norm": 0.2588360551598438,
"learning_rate": 2.948133722875375e-06,
"loss": 0.3102,
"step": 1040
},
{
"epoch": 2.0348837209302326,
"grad_norm": 0.29212071359901065,
"learning_rate": 2.845833156737742e-06,
"loss": 0.2964,
"step": 1050
},
{
"epoch": 2.054263565891473,
"grad_norm": 0.27475294625612784,
"learning_rate": 2.7446282067913045e-06,
"loss": 0.3048,
"step": 1060
},
{
"epoch": 2.0736434108527133,
"grad_norm": 0.24718514011765463,
"learning_rate": 2.6445703461993986e-06,
"loss": 0.3022,
"step": 1070
},
{
"epoch": 2.0930232558139537,
"grad_norm": 0.24748341008518124,
"learning_rate": 2.5457104647120323e-06,
"loss": 0.2953,
"step": 1080
},
{
"epoch": 2.112403100775194,
"grad_norm": 0.2532318387348756,
"learning_rate": 2.44809884278318e-06,
"loss": 0.3037,
"step": 1090
},
{
"epoch": 2.1317829457364343,
"grad_norm": 0.26267405021388773,
"learning_rate": 2.351785125997985e-06,
"loss": 0.3089,
"step": 1100
},
{
"epoch": 2.1511627906976742,
"grad_norm": 0.27029146219566424,
"learning_rate": 2.25681829982286e-06,
"loss": 0.3076,
"step": 1110
},
{
"epoch": 2.1705426356589146,
"grad_norm": 0.26162112043896607,
"learning_rate": 2.163246664691313e-06,
"loss": 0.3018,
"step": 1120
},
{
"epoch": 2.189922480620155,
"grad_norm": 0.25708975624302893,
"learning_rate": 2.0711178114382035e-06,
"loss": 0.3081,
"step": 1130
},
{
"epoch": 2.2093023255813953,
"grad_norm": 0.26120349850790636,
"learning_rate": 1.9804785970948733e-06,
"loss": 0.3078,
"step": 1140
},
{
"epoch": 2.2286821705426356,
"grad_norm": 0.2745646814773546,
"learning_rate": 1.8913751210575248e-06,
"loss": 0.3055,
"step": 1150
},
{
"epoch": 2.248062015503876,
"grad_norm": 0.26906918113057116,
"learning_rate": 1.8038527016409135e-06,
"loss": 0.3066,
"step": 1160
},
{
"epoch": 2.2674418604651163,
"grad_norm": 0.25703646748291625,
"learning_rate": 1.7179558530293073e-06,
"loss": 0.3036,
"step": 1170
},
{
"epoch": 2.2868217054263567,
"grad_norm": 0.24324389237190225,
"learning_rate": 1.6337282626364304e-06,
"loss": 0.3028,
"step": 1180
},
{
"epoch": 2.306201550387597,
"grad_norm": 0.24286459010368208,
"learning_rate": 1.5512127688859014e-06,
"loss": 0.2973,
"step": 1190
},
{
"epoch": 2.3255813953488373,
"grad_norm": 0.2988865330671064,
"learning_rate": 1.4704513394234776e-06,
"loss": 0.3065,
"step": 1200
},
{
"epoch": 2.3449612403100777,
"grad_norm": 0.2488231150967903,
"learning_rate": 1.3914850497721705e-06,
"loss": 0.303,
"step": 1210
},
{
"epoch": 2.3643410852713176,
"grad_norm": 0.244252494824691,
"learning_rate": 1.314354062441106e-06,
"loss": 0.3027,
"step": 1220
},
{
"epoch": 2.383720930232558,
"grad_norm": 0.2529879369163313,
"learning_rate": 1.239097606498741e-06,
"loss": 0.3001,
"step": 1230
},
{
"epoch": 2.4031007751937983,
"grad_norm": 0.27959948686853464,
"learning_rate": 1.1657539576208344e-06,
"loss": 0.3066,
"step": 1240
},
{
"epoch": 2.4224806201550386,
"grad_norm": 0.23986776621156514,
"learning_rate": 1.0943604186233132e-06,
"loss": 0.302,
"step": 1250
},
{
"epoch": 2.441860465116279,
"grad_norm": 0.24513583859125537,
"learning_rate": 1.0249533004899426e-06,
"loss": 0.3018,
"step": 1260
},
{
"epoch": 2.4612403100775193,
"grad_norm": 0.24422017561684717,
"learning_rate": 9.575679039044411e-07,
"loss": 0.3063,
"step": 1270
},
{
"epoch": 2.4806201550387597,
"grad_norm": 0.22744990222068934,
"learning_rate": 8.922385012964391e-07,
"loss": 0.3,
"step": 1280
},
{
"epoch": 2.5,
"grad_norm": 0.24110629183340684,
"learning_rate": 8.289983194104128e-07,
"loss": 0.304,
"step": 1290
},
{
"epoch": 2.5193798449612403,
"grad_norm": 0.25451639260797554,
"learning_rate": 7.678795224064523e-07,
"loss": 0.301,
"step": 1300
},
{
"epoch": 2.5387596899224807,
"grad_norm": 0.24180452622866522,
"learning_rate": 7.089131955014672e-07,
"loss": 0.3016,
"step": 1310
},
{
"epoch": 2.558139534883721,
"grad_norm": 0.26421567580062955,
"learning_rate": 6.521293291591474e-07,
"loss": 0.3017,
"step": 1320
},
{
"epoch": 2.5775193798449614,
"grad_norm": 0.2485827641719469,
"learning_rate": 5.975568038367124e-07,
"loss": 0.3024,
"step": 1330
},
{
"epoch": 2.5968992248062017,
"grad_norm": 0.2437325082173002,
"learning_rate": 5.452233752962221e-07,
"loss": 0.3095,
"step": 1340
},
{
"epoch": 2.616279069767442,
"grad_norm": 0.24277261297072517,
"learning_rate": 4.951556604879049e-07,
"loss": 0.3029,
"step": 1350
},
{
"epoch": 2.6356589147286824,
"grad_norm": 0.24670456985964445,
"learning_rate": 4.47379124012689e-07,
"loss": 0.3063,
"step": 1360
},
{
"epoch": 2.6550387596899228,
"grad_norm": 0.24370768633669995,
"learning_rate": 4.019180651708299e-07,
"loss": 0.3048,
"step": 1370
},
{
"epoch": 2.6744186046511627,
"grad_norm": 0.23513242406050985,
"learning_rate": 3.587956056032027e-07,
"loss": 0.2994,
"step": 1380
},
{
"epoch": 2.693798449612403,
"grad_norm": 0.24659870896825767,
"learning_rate": 3.180336775315629e-07,
"loss": 0.3002,
"step": 1390
},
{
"epoch": 2.7131782945736433,
"grad_norm": 0.23677102922562213,
"learning_rate": 2.7965301260374943e-07,
"loss": 0.3039,
"step": 1400
},
{
"epoch": 2.7325581395348837,
"grad_norm": 0.23450061355333715,
"learning_rate": 2.4367313134949997e-07,
"loss": 0.3047,
"step": 1410
},
{
"epoch": 2.751937984496124,
"grad_norm": 0.2427525542058839,
"learning_rate": 2.10112333252247e-07,
"loss": 0.3041,
"step": 1420
},
{
"epoch": 2.7713178294573644,
"grad_norm": 0.24134579468603404,
"learning_rate": 1.7898768744194163e-07,
"loss": 0.3046,
"step": 1430
},
{
"epoch": 2.7906976744186047,
"grad_norm": 0.23024540632519397,
"learning_rate": 1.5031502401363973e-07,
"loss": 0.3034,
"step": 1440
},
{
"epoch": 2.810077519379845,
"grad_norm": 0.23021061548492067,
"learning_rate": 1.2410892597626456e-07,
"loss": 0.3069,
"step": 1450
},
{
"epoch": 2.8294573643410854,
"grad_norm": 0.2454947252209154,
"learning_rate": 1.0038272183564069e-07,
"loss": 0.3047,
"step": 1460
},
{
"epoch": 2.8488372093023253,
"grad_norm": 0.24376605561441253,
"learning_rate": 7.91484788155733e-08,
"loss": 0.302,
"step": 1470
},
{
"epoch": 2.8682170542635657,
"grad_norm": 0.26944245390267824,
"learning_rate": 6.04169967204199e-08,
"loss": 0.3064,
"step": 1480
},
{
"epoch": 2.887596899224806,
"grad_norm": 0.24111568899502445,
"learning_rate": 4.4197802442275116e-08,
"loss": 0.3008,
"step": 1490
},
{
"epoch": 2.9069767441860463,
"grad_norm": 0.24352341032150696,
"learning_rate": 3.049914511556118e-08,
"loss": 0.3062,
"step": 1500
},
{
"epoch": 2.9263565891472867,
"grad_norm": 0.23436619435707207,
"learning_rate": 1.9327991921493374e-08,
"loss": 0.3024,
"step": 1510
},
{
"epoch": 2.945736434108527,
"grad_norm": 0.2289169080427513,
"learning_rate": 1.0690024544548483e-08,
"loss": 0.3073,
"step": 1520
},
{
"epoch": 2.9651162790697674,
"grad_norm": 0.28662911889013803,
"learning_rate": 4.589636282741339e-09,
"loss": 0.3082,
"step": 1530
},
{
"epoch": 2.9844961240310077,
"grad_norm": 0.233511639991065,
"learning_rate": 1.0299298131816183e-09,
"loss": 0.3057,
"step": 1540
}
],
"logging_steps": 10,
"max_steps": 1548,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 10000000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5155937639727104.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}