GR00T-SO101-FT / trainer_state.json
Simon531's picture
Update to checkpoint 2000 (20% trained)
de87f44 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.2,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"grad_norm": 1.8000761270523071,
"learning_rate": 1.8e-06,
"loss": 1.1194,
"step": 10
},
{
"grad_norm": 0.8661630153656006,
"learning_rate": 3.8e-06,
"loss": 1.1135,
"step": 20
},
{
"grad_norm": 0.7289697527885437,
"learning_rate": 5.8e-06,
"loss": 1.0689,
"step": 30
},
{
"grad_norm": 0.4166327118873596,
"learning_rate": 7.8e-06,
"loss": 1.0905,
"step": 40
},
{
"grad_norm": 0.526554524898529,
"learning_rate": 9.800000000000001e-06,
"loss": 1.0538,
"step": 50
},
{
"grad_norm": 0.4327709972858429,
"learning_rate": 1.18e-05,
"loss": 1.0508,
"step": 60
},
{
"grad_norm": 0.538928210735321,
"learning_rate": 1.3800000000000002e-05,
"loss": 1.0425,
"step": 70
},
{
"grad_norm": 0.5299176573753357,
"learning_rate": 1.58e-05,
"loss": 1.0375,
"step": 80
},
{
"grad_norm": 0.5128000974655151,
"learning_rate": 1.78e-05,
"loss": 1.0294,
"step": 90
},
{
"grad_norm": 0.5661309361457825,
"learning_rate": 1.9800000000000004e-05,
"loss": 1.0436,
"step": 100
},
{
"grad_norm": 0.4856761395931244,
"learning_rate": 2.18e-05,
"loss": 1.0431,
"step": 110
},
{
"grad_norm": 0.4439884126186371,
"learning_rate": 2.38e-05,
"loss": 1.0226,
"step": 120
},
{
"grad_norm": 0.6393558382987976,
"learning_rate": 2.58e-05,
"loss": 1.0128,
"step": 130
},
{
"grad_norm": 1.510035753250122,
"learning_rate": 2.7800000000000005e-05,
"loss": 0.9712,
"step": 140
},
{
"grad_norm": 0.8571694493293762,
"learning_rate": 2.98e-05,
"loss": 0.9299,
"step": 150
},
{
"grad_norm": 1.6079281568527222,
"learning_rate": 3.18e-05,
"loss": 0.9007,
"step": 160
},
{
"grad_norm": 1.5867490768432617,
"learning_rate": 3.38e-05,
"loss": 0.8255,
"step": 170
},
{
"grad_norm": 2.30320405960083,
"learning_rate": 3.58e-05,
"loss": 0.751,
"step": 180
},
{
"grad_norm": 1.6800912618637085,
"learning_rate": 3.7800000000000004e-05,
"loss": 0.6726,
"step": 190
},
{
"grad_norm": 2.149949789047241,
"learning_rate": 3.9800000000000005e-05,
"loss": 0.6235,
"step": 200
},
{
"grad_norm": 1.355879545211792,
"learning_rate": 4.18e-05,
"loss": 0.5208,
"step": 210
},
{
"grad_norm": 2.6391069889068604,
"learning_rate": 4.38e-05,
"loss": 0.4078,
"step": 220
},
{
"grad_norm": 2.550145387649536,
"learning_rate": 4.58e-05,
"loss": 0.2918,
"step": 230
},
{
"grad_norm": 1.527336835861206,
"learning_rate": 4.78e-05,
"loss": 0.2215,
"step": 240
},
{
"grad_norm": 1.369310975074768,
"learning_rate": 4.9800000000000004e-05,
"loss": 0.1697,
"step": 250
},
{
"grad_norm": 1.200178861618042,
"learning_rate": 5.1800000000000005e-05,
"loss": 0.1496,
"step": 260
},
{
"grad_norm": 1.654726505279541,
"learning_rate": 5.380000000000001e-05,
"loss": 0.1612,
"step": 270
},
{
"grad_norm": 1.003008484840393,
"learning_rate": 5.580000000000001e-05,
"loss": 0.1355,
"step": 280
},
{
"grad_norm": 1.2864620685577393,
"learning_rate": 5.7799999999999995e-05,
"loss": 0.1646,
"step": 290
},
{
"grad_norm": 1.5691300630569458,
"learning_rate": 5.9800000000000003e-05,
"loss": 0.14,
"step": 300
},
{
"grad_norm": 1.1779141426086426,
"learning_rate": 6.18e-05,
"loss": 0.1405,
"step": 310
},
{
"grad_norm": 1.0536258220672607,
"learning_rate": 6.38e-05,
"loss": 0.1245,
"step": 320
},
{
"grad_norm": 0.939816951751709,
"learning_rate": 6.58e-05,
"loss": 0.1273,
"step": 330
},
{
"grad_norm": 1.1611407995224,
"learning_rate": 6.780000000000001e-05,
"loss": 0.1054,
"step": 340
},
{
"grad_norm": 1.0939171314239502,
"learning_rate": 6.98e-05,
"loss": 0.0992,
"step": 350
},
{
"grad_norm": 1.1488924026489258,
"learning_rate": 7.18e-05,
"loss": 0.1146,
"step": 360
},
{
"grad_norm": 0.8890052437782288,
"learning_rate": 7.38e-05,
"loss": 0.1211,
"step": 370
},
{
"grad_norm": 0.7940606474876404,
"learning_rate": 7.58e-05,
"loss": 0.0995,
"step": 380
},
{
"grad_norm": 0.8308754563331604,
"learning_rate": 7.780000000000001e-05,
"loss": 0.1037,
"step": 390
},
{
"grad_norm": 0.8870435953140259,
"learning_rate": 7.98e-05,
"loss": 0.097,
"step": 400
},
{
"grad_norm": 0.6374945640563965,
"learning_rate": 8.18e-05,
"loss": 0.1025,
"step": 410
},
{
"grad_norm": 1.0475709438323975,
"learning_rate": 8.38e-05,
"loss": 0.097,
"step": 420
},
{
"grad_norm": 0.8428962230682373,
"learning_rate": 8.58e-05,
"loss": 0.1012,
"step": 430
},
{
"grad_norm": 0.7878981828689575,
"learning_rate": 8.78e-05,
"loss": 0.095,
"step": 440
},
{
"grad_norm": 0.8133223652839661,
"learning_rate": 8.98e-05,
"loss": 0.0933,
"step": 450
},
{
"grad_norm": 0.5507503747940063,
"learning_rate": 9.180000000000001e-05,
"loss": 0.1006,
"step": 460
},
{
"grad_norm": 0.7642120122909546,
"learning_rate": 9.38e-05,
"loss": 0.1012,
"step": 470
},
{
"grad_norm": 0.8028876185417175,
"learning_rate": 9.58e-05,
"loss": 0.093,
"step": 480
},
{
"grad_norm": 0.6212483048439026,
"learning_rate": 9.78e-05,
"loss": 0.0958,
"step": 490
},
{
"grad_norm": 0.6925771236419678,
"learning_rate": 9.98e-05,
"loss": 0.0934,
"step": 500
},
{
"grad_norm": 0.7765862345695496,
"learning_rate": 9.9999778549206e-05,
"loss": 0.0968,
"step": 510
},
{
"grad_norm": 0.5598022937774658,
"learning_rate": 9.999901304280685e-05,
"loss": 0.0951,
"step": 520
},
{
"grad_norm": 0.6049850583076477,
"learning_rate": 9.999770075521164e-05,
"loss": 0.1006,
"step": 530
},
{
"grad_norm": 0.8623297810554504,
"learning_rate": 9.99958417007713e-05,
"loss": 0.0938,
"step": 540
},
{
"grad_norm": 0.5017800331115723,
"learning_rate": 9.999343589981615e-05,
"loss": 0.0961,
"step": 550
},
{
"grad_norm": 0.586384117603302,
"learning_rate": 9.999048337865568e-05,
"loss": 0.0807,
"step": 560
},
{
"grad_norm": 0.5691239237785339,
"learning_rate": 9.998698416957815e-05,
"loss": 0.0761,
"step": 570
},
{
"grad_norm": 0.5205549597740173,
"learning_rate": 9.998293831085037e-05,
"loss": 0.079,
"step": 580
},
{
"grad_norm": 0.7439537048339844,
"learning_rate": 9.997834584671719e-05,
"loss": 0.0848,
"step": 590
},
{
"grad_norm": 0.5913628935813904,
"learning_rate": 9.997320682740107e-05,
"loss": 0.0821,
"step": 600
},
{
"grad_norm": 0.6202144026756287,
"learning_rate": 9.996752130910149e-05,
"loss": 0.0909,
"step": 610
},
{
"grad_norm": 0.5822686553001404,
"learning_rate": 9.99612893539944e-05,
"loss": 0.0775,
"step": 620
},
{
"grad_norm": 0.6942011713981628,
"learning_rate": 9.995451103023144e-05,
"loss": 0.0762,
"step": 630
},
{
"grad_norm": 0.5743650794029236,
"learning_rate": 9.994718641193928e-05,
"loss": 0.0721,
"step": 640
},
{
"grad_norm": 0.6001242399215698,
"learning_rate": 9.993931557921874e-05,
"loss": 0.0786,
"step": 650
},
{
"grad_norm": 0.4573017954826355,
"learning_rate": 9.993089861814402e-05,
"loss": 0.0742,
"step": 660
},
{
"grad_norm": 0.6226702332496643,
"learning_rate": 9.992193562076166e-05,
"loss": 0.0874,
"step": 670
},
{
"grad_norm": 0.6129893660545349,
"learning_rate": 9.991242668508954e-05,
"loss": 0.0742,
"step": 680
},
{
"grad_norm": 0.4674377739429474,
"learning_rate": 9.990237191511587e-05,
"loss": 0.0725,
"step": 690
},
{
"grad_norm": 0.4674297869205475,
"learning_rate": 9.989177142079802e-05,
"loss": 0.0738,
"step": 700
},
{
"grad_norm": 0.5865145325660706,
"learning_rate": 9.988062531806126e-05,
"loss": 0.0887,
"step": 710
},
{
"grad_norm": 0.7082783579826355,
"learning_rate": 9.986893372879762e-05,
"loss": 0.081,
"step": 720
},
{
"grad_norm": 0.3989603817462921,
"learning_rate": 9.985669678086443e-05,
"loss": 0.0712,
"step": 730
},
{
"grad_norm": 0.6835122108459473,
"learning_rate": 9.984391460808298e-05,
"loss": 0.0634,
"step": 740
},
{
"grad_norm": 0.5364878177642822,
"learning_rate": 9.983058735023709e-05,
"loss": 0.0732,
"step": 750
},
{
"grad_norm": 0.35529661178588867,
"learning_rate": 9.98167151530715e-05,
"loss": 0.0634,
"step": 760
},
{
"grad_norm": 0.40558916330337524,
"learning_rate": 9.980229816829034e-05,
"loss": 0.0711,
"step": 770
},
{
"grad_norm": 0.5186157822608948,
"learning_rate": 9.978733655355544e-05,
"loss": 0.0716,
"step": 780
},
{
"grad_norm": 0.5230529308319092,
"learning_rate": 9.977183047248464e-05,
"loss": 0.0665,
"step": 790
},
{
"grad_norm": 0.55573970079422,
"learning_rate": 9.975578009464992e-05,
"loss": 0.0731,
"step": 800
},
{
"grad_norm": 0.4610231816768646,
"learning_rate": 9.97391855955757e-05,
"loss": 0.0774,
"step": 810
},
{
"grad_norm": 0.42752256989479065,
"learning_rate": 9.972204715673669e-05,
"loss": 0.0684,
"step": 820
},
{
"grad_norm": 0.45933523774147034,
"learning_rate": 9.970436496555617e-05,
"loss": 0.0655,
"step": 830
},
{
"grad_norm": 0.42024242877960205,
"learning_rate": 9.968613921540373e-05,
"loss": 0.0622,
"step": 840
},
{
"grad_norm": 0.5874196290969849,
"learning_rate": 9.966737010559326e-05,
"loss": 0.0727,
"step": 850
},
{
"grad_norm": 0.5695536732673645,
"learning_rate": 9.964805784138072e-05,
"loss": 0.0692,
"step": 860
},
{
"grad_norm": 0.385803759098053,
"learning_rate": 9.962820263396195e-05,
"loss": 0.0633,
"step": 870
},
{
"grad_norm": 0.43449321389198303,
"learning_rate": 9.960780470047033e-05,
"loss": 0.0564,
"step": 880
},
{
"grad_norm": 0.3207187354564667,
"learning_rate": 9.958686426397437e-05,
"loss": 0.0621,
"step": 890
},
{
"grad_norm": 0.3849020302295685,
"learning_rate": 9.956538155347534e-05,
"loss": 0.0732,
"step": 900
},
{
"grad_norm": 0.3251587748527527,
"learning_rate": 9.95433568039047e-05,
"loss": 0.0578,
"step": 910
},
{
"grad_norm": 0.35808417201042175,
"learning_rate": 9.952079025612162e-05,
"loss": 0.0601,
"step": 920
},
{
"grad_norm": 0.2598479390144348,
"learning_rate": 9.949768215691022e-05,
"loss": 0.0587,
"step": 930
},
{
"grad_norm": 0.44453999400138855,
"learning_rate": 9.9474032758977e-05,
"loss": 0.0613,
"step": 940
},
{
"grad_norm": 0.5126887559890747,
"learning_rate": 9.944984232094794e-05,
"loss": 0.0697,
"step": 950
},
{
"grad_norm": 0.34455230832099915,
"learning_rate": 9.942511110736584e-05,
"loss": 0.0602,
"step": 960
},
{
"grad_norm": 0.5941815376281738,
"learning_rate": 9.939983938868726e-05,
"loss": 0.0611,
"step": 970
},
{
"grad_norm": 0.4752832055091858,
"learning_rate": 9.93740274412797e-05,
"loss": 0.0741,
"step": 980
},
{
"grad_norm": 0.46665894985198975,
"learning_rate": 9.934767554741846e-05,
"loss": 0.0674,
"step": 990
},
{
"grad_norm": 0.5232179164886475,
"learning_rate": 9.932078399528361e-05,
"loss": 0.0624,
"step": 1000
},
{
"grad_norm": 0.28243017196655273,
"learning_rate": 9.929335307895689e-05,
"loss": 0.0571,
"step": 1010
},
{
"grad_norm": 0.6062475442886353,
"learning_rate": 9.926538309841839e-05,
"loss": 0.0665,
"step": 1020
},
{
"grad_norm": 0.40058252215385437,
"learning_rate": 9.923687435954334e-05,
"loss": 0.0642,
"step": 1030
},
{
"grad_norm": 0.5141247510910034,
"learning_rate": 9.920782717409873e-05,
"loss": 0.0672,
"step": 1040
},
{
"grad_norm": 0.4424152076244354,
"learning_rate": 9.917824185973994e-05,
"loss": 0.0585,
"step": 1050
},
{
"grad_norm": 0.5799531936645508,
"learning_rate": 9.914811874000723e-05,
"loss": 0.0671,
"step": 1060
},
{
"grad_norm": 0.2700813412666321,
"learning_rate": 9.911745814432218e-05,
"loss": 0.0597,
"step": 1070
},
{
"grad_norm": 0.5105420351028442,
"learning_rate": 9.90862604079842e-05,
"loss": 0.0659,
"step": 1080
},
{
"grad_norm": 0.28891175985336304,
"learning_rate": 9.90545258721667e-05,
"loss": 0.0537,
"step": 1090
},
{
"grad_norm": 0.3393675982952118,
"learning_rate": 9.90222548839135e-05,
"loss": 0.0588,
"step": 1100
},
{
"grad_norm": 0.5993463397026062,
"learning_rate": 9.898944779613495e-05,
"loss": 0.0597,
"step": 1110
},
{
"grad_norm": 0.36953073740005493,
"learning_rate": 9.89561049676041e-05,
"loss": 0.0663,
"step": 1120
},
{
"grad_norm": 0.4737951159477234,
"learning_rate": 9.89222267629528e-05,
"loss": 0.0723,
"step": 1130
},
{
"grad_norm": 0.4001386761665344,
"learning_rate": 9.888781355266763e-05,
"loss": 0.0677,
"step": 1140
},
{
"grad_norm": 0.3812955617904663,
"learning_rate": 9.885286571308598e-05,
"loss": 0.0611,
"step": 1150
},
{
"grad_norm": 0.4578457176685333,
"learning_rate": 9.881738362639182e-05,
"loss": 0.0661,
"step": 1160
},
{
"grad_norm": 0.4562247693538666,
"learning_rate": 9.878136768061154e-05,
"loss": 0.0608,
"step": 1170
},
{
"grad_norm": 0.3326525390148163,
"learning_rate": 9.874481826960979e-05,
"loss": 0.0542,
"step": 1180
},
{
"grad_norm": 0.4416123330593109,
"learning_rate": 9.870773579308503e-05,
"loss": 0.0549,
"step": 1190
},
{
"grad_norm": 0.3314540684223175,
"learning_rate": 9.867012065656533e-05,
"loss": 0.0617,
"step": 1200
},
{
"grad_norm": 0.45206043124198914,
"learning_rate": 9.863197327140376e-05,
"loss": 0.0616,
"step": 1210
},
{
"grad_norm": 0.23829543590545654,
"learning_rate": 9.859329405477403e-05,
"loss": 0.0538,
"step": 1220
},
{
"grad_norm": 0.40245768427848816,
"learning_rate": 9.855408342966585e-05,
"loss": 0.0599,
"step": 1230
},
{
"grad_norm": 0.36202260851860046,
"learning_rate": 9.851434182488033e-05,
"loss": 0.0563,
"step": 1240
},
{
"grad_norm": 0.3963845372200012,
"learning_rate": 9.84740696750253e-05,
"loss": 0.0631,
"step": 1250
},
{
"grad_norm": 0.5504999756813049,
"learning_rate": 9.843326742051055e-05,
"loss": 0.0547,
"step": 1260
},
{
"grad_norm": 0.34831398725509644,
"learning_rate": 9.839193550754297e-05,
"loss": 0.0624,
"step": 1270
},
{
"grad_norm": 0.3962138295173645,
"learning_rate": 9.835007438812177e-05,
"loss": 0.065,
"step": 1280
},
{
"grad_norm": 0.5264518857002258,
"learning_rate": 9.830768452003341e-05,
"loss": 0.0629,
"step": 1290
},
{
"grad_norm": 0.4762389659881592,
"learning_rate": 9.826476636684671e-05,
"loss": 0.061,
"step": 1300
},
{
"grad_norm": 0.4322468638420105,
"learning_rate": 9.822132039790773e-05,
"loss": 0.0587,
"step": 1310
},
{
"grad_norm": 0.3566872477531433,
"learning_rate": 9.817734708833461e-05,
"loss": 0.0591,
"step": 1320
},
{
"grad_norm": 0.35532960295677185,
"learning_rate": 9.813284691901243e-05,
"loss": 0.0562,
"step": 1330
},
{
"grad_norm": 0.46946966648101807,
"learning_rate": 9.808782037658792e-05,
"loss": 0.0571,
"step": 1340
},
{
"grad_norm": 0.332731693983078,
"learning_rate": 9.804226795346411e-05,
"loss": 0.054,
"step": 1350
},
{
"grad_norm": 0.28058159351348877,
"learning_rate": 9.799619014779503e-05,
"loss": 0.0594,
"step": 1360
},
{
"grad_norm": 0.4327380955219269,
"learning_rate": 9.794958746348013e-05,
"loss": 0.049,
"step": 1370
},
{
"grad_norm": 0.2480960488319397,
"learning_rate": 9.790246041015896e-05,
"loss": 0.056,
"step": 1380
},
{
"grad_norm": 0.3483447730541229,
"learning_rate": 9.785480950320538e-05,
"loss": 0.0495,
"step": 1390
},
{
"grad_norm": 0.41755494475364685,
"learning_rate": 9.78066352637221e-05,
"loss": 0.0601,
"step": 1400
},
{
"grad_norm": 0.31297025084495544,
"learning_rate": 9.775793821853488e-05,
"loss": 0.0524,
"step": 1410
},
{
"grad_norm": 0.3274155259132385,
"learning_rate": 9.77087189001868e-05,
"loss": 0.0583,
"step": 1420
},
{
"grad_norm": 0.3719927966594696,
"learning_rate": 9.765897784693243e-05,
"loss": 0.0552,
"step": 1430
},
{
"grad_norm": 0.23558516800403595,
"learning_rate": 9.760871560273197e-05,
"loss": 0.0541,
"step": 1440
},
{
"grad_norm": 0.3183964490890503,
"learning_rate": 9.755793271724526e-05,
"loss": 0.0518,
"step": 1450
},
{
"grad_norm": 0.5059468150138855,
"learning_rate": 9.750662974582584e-05,
"loss": 0.0557,
"step": 1460
},
{
"grad_norm": 0.3334318995475769,
"learning_rate": 9.745480724951473e-05,
"loss": 0.0549,
"step": 1470
},
{
"grad_norm": 0.3796740770339966,
"learning_rate": 9.740246579503447e-05,
"loss": 0.0621,
"step": 1480
},
{
"grad_norm": 0.3182413578033447,
"learning_rate": 9.734960595478284e-05,
"loss": 0.0494,
"step": 1490
},
{
"grad_norm": 0.361194372177124,
"learning_rate": 9.729622830682657e-05,
"loss": 0.0633,
"step": 1500
},
{
"grad_norm": 0.3800562620162964,
"learning_rate": 9.724233343489504e-05,
"loss": 0.0522,
"step": 1510
},
{
"grad_norm": 0.3530648946762085,
"learning_rate": 9.718792192837396e-05,
"loss": 0.0534,
"step": 1520
},
{
"grad_norm": 0.2889000177383423,
"learning_rate": 9.713299438229886e-05,
"loss": 0.0545,
"step": 1530
},
{
"grad_norm": 0.49150562286376953,
"learning_rate": 9.707755139734855e-05,
"loss": 0.0646,
"step": 1540
},
{
"grad_norm": 0.2493639588356018,
"learning_rate": 9.702159357983866e-05,
"loss": 0.0561,
"step": 1550
},
{
"grad_norm": 0.264142245054245,
"learning_rate": 9.696512154171492e-05,
"loss": 0.0541,
"step": 1560
},
{
"grad_norm": 0.366263747215271,
"learning_rate": 9.690813590054645e-05,
"loss": 0.048,
"step": 1570
},
{
"grad_norm": 0.3640550374984741,
"learning_rate": 9.685063727951914e-05,
"loss": 0.0501,
"step": 1580
},
{
"grad_norm": 0.3413008451461792,
"learning_rate": 9.679262630742865e-05,
"loss": 0.0544,
"step": 1590
},
{
"grad_norm": 0.37865203619003296,
"learning_rate": 9.673410361867373e-05,
"loss": 0.053,
"step": 1600
},
{
"grad_norm": 0.5088744759559631,
"learning_rate": 9.667506985324909e-05,
"loss": 0.0547,
"step": 1610
},
{
"grad_norm": 0.25608953833580017,
"learning_rate": 9.661552565673855e-05,
"loss": 0.0541,
"step": 1620
},
{
"grad_norm": 0.41425254940986633,
"learning_rate": 9.655547168030789e-05,
"loss": 0.0491,
"step": 1630
},
{
"grad_norm": 0.5099269151687622,
"learning_rate": 9.649490858069777e-05,
"loss": 0.0574,
"step": 1640
},
{
"grad_norm": 0.42447859048843384,
"learning_rate": 9.643383702021658e-05,
"loss": 0.063,
"step": 1650
},
{
"grad_norm": 0.2408001720905304,
"learning_rate": 9.637225766673307e-05,
"loss": 0.0502,
"step": 1660
},
{
"grad_norm": 0.3616074025630951,
"learning_rate": 9.631017119366922e-05,
"loss": 0.0623,
"step": 1670
},
{
"grad_norm": 0.3710896968841553,
"learning_rate": 9.624757827999273e-05,
"loss": 0.0566,
"step": 1680
},
{
"grad_norm": 0.2255239635705948,
"learning_rate": 9.618447961020971e-05,
"loss": 0.0556,
"step": 1690
},
{
"grad_norm": 0.4593660831451416,
"learning_rate": 9.612087587435707e-05,
"loss": 0.0522,
"step": 1700
},
{
"grad_norm": 0.3208771347999573,
"learning_rate": 9.605676776799508e-05,
"loss": 0.0489,
"step": 1710
},
{
"grad_norm": 0.2732645869255066,
"learning_rate": 9.599215599219973e-05,
"loss": 0.0522,
"step": 1720
},
{
"grad_norm": 0.3936018943786621,
"learning_rate": 9.592704125355505e-05,
"loss": 0.0546,
"step": 1730
},
{
"grad_norm": 0.31143778562545776,
"learning_rate": 9.586142426414538e-05,
"loss": 0.0493,
"step": 1740
},
{
"grad_norm": 0.3694917857646942,
"learning_rate": 9.57953057415476e-05,
"loss": 0.0502,
"step": 1750
},
{
"grad_norm": 0.33363762497901917,
"learning_rate": 9.572868640882328e-05,
"loss": 0.0518,
"step": 1760
},
{
"grad_norm": 0.36443617939949036,
"learning_rate": 9.56615669945108e-05,
"loss": 0.0519,
"step": 1770
},
{
"grad_norm": 0.2904825806617737,
"learning_rate": 9.55939482326173e-05,
"loss": 0.0501,
"step": 1780
},
{
"grad_norm": 0.37458282709121704,
"learning_rate": 9.552583086261069e-05,
"loss": 0.0535,
"step": 1790
},
{
"grad_norm": 0.43567535281181335,
"learning_rate": 9.545721562941168e-05,
"loss": 0.0529,
"step": 1800
},
{
"grad_norm": 0.3915671110153198,
"learning_rate": 9.538810328338543e-05,
"loss": 0.0515,
"step": 1810
},
{
"grad_norm": 0.27103352546691895,
"learning_rate": 9.531849458033349e-05,
"loss": 0.0464,
"step": 1820
},
{
"grad_norm": 0.25404107570648193,
"learning_rate": 9.524839028148547e-05,
"loss": 0.0498,
"step": 1830
},
{
"grad_norm": 0.4265500605106354,
"learning_rate": 9.517779115349077e-05,
"loss": 0.0487,
"step": 1840
},
{
"grad_norm": 0.32784515619277954,
"learning_rate": 9.510669796841014e-05,
"loss": 0.0427,
"step": 1850
},
{
"grad_norm": 0.38013437390327454,
"learning_rate": 9.503511150370727e-05,
"loss": 0.0511,
"step": 1860
},
{
"grad_norm": 0.3897137939929962,
"learning_rate": 9.496303254224024e-05,
"loss": 0.0519,
"step": 1870
},
{
"grad_norm": 0.3544720411300659,
"learning_rate": 9.489046187225306e-05,
"loss": 0.0407,
"step": 1880
},
{
"grad_norm": 0.33186250925064087,
"learning_rate": 9.481740028736692e-05,
"loss": 0.0438,
"step": 1890
},
{
"grad_norm": 0.40961360931396484,
"learning_rate": 9.474384858657164e-05,
"loss": 0.0516,
"step": 1900
},
{
"grad_norm": 0.37814396619796753,
"learning_rate": 9.466980757421679e-05,
"loss": 0.0502,
"step": 1910
},
{
"grad_norm": 0.3576529026031494,
"learning_rate": 9.459527806000305e-05,
"loss": 0.0473,
"step": 1920
},
{
"grad_norm": 0.4338109493255615,
"learning_rate": 9.452026085897325e-05,
"loss": 0.0547,
"step": 1930
},
{
"grad_norm": 0.4323793053627014,
"learning_rate": 9.444475679150348e-05,
"loss": 0.0512,
"step": 1940
},
{
"grad_norm": 0.4074627459049225,
"learning_rate": 9.436876668329411e-05,
"loss": 0.0469,
"step": 1950
},
{
"grad_norm": 0.2311258316040039,
"learning_rate": 9.429229136536079e-05,
"loss": 0.0431,
"step": 1960
},
{
"grad_norm": 0.4605922996997833,
"learning_rate": 9.421533167402534e-05,
"loss": 0.0561,
"step": 1970
},
{
"grad_norm": 0.3769131898880005,
"learning_rate": 9.413788845090666e-05,
"loss": 0.0582,
"step": 1980
},
{
"grad_norm": 0.4106776714324951,
"learning_rate": 9.405996254291136e-05,
"loss": 0.0512,
"step": 1990
},
{
"grad_norm": 0.33406883478164673,
"learning_rate": 9.398155480222474e-05,
"loss": 0.0486,
"step": 2000
}
],
"logging_steps": 10,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}