gr00t-piper / trainer_state.json
Weke's picture
Upload folder using huggingface_hub
7df92c7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.545454545454545,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.022727272727272728,
"grad_norm": 864166.375,
"learning_rate": 1e-05,
"loss": 935851212.8,
"step": 10
},
{
"epoch": 0.045454545454545456,
"grad_norm": 851375.4375,
"learning_rate": 2e-05,
"loss": 779824793.6,
"step": 20
},
{
"epoch": 0.06818181818181818,
"grad_norm": 663149.625,
"learning_rate": 3e-05,
"loss": 931242291.2,
"step": 30
},
{
"epoch": 0.09090909090909091,
"grad_norm": 578363.8125,
"learning_rate": 4e-05,
"loss": 833480396.8,
"step": 40
},
{
"epoch": 0.11363636363636363,
"grad_norm": 736728.9375,
"learning_rate": 5e-05,
"loss": 923433779.2,
"step": 50
},
{
"epoch": 0.13636363636363635,
"grad_norm": 582158.875,
"learning_rate": 6e-05,
"loss": 779678822.4,
"step": 60
},
{
"epoch": 0.1590909090909091,
"grad_norm": 3016550.25,
"learning_rate": 7e-05,
"loss": 880521625.6,
"step": 70
},
{
"epoch": 0.18181818181818182,
"grad_norm": 16130207.0,
"learning_rate": 8e-05,
"loss": 796326297.6,
"step": 80
},
{
"epoch": 0.20454545454545456,
"grad_norm": 72006536.0,
"learning_rate": 9e-05,
"loss": 827050598.4,
"step": 90
},
{
"epoch": 0.22727272727272727,
"grad_norm": 115259504.0,
"learning_rate": 0.0001,
"loss": 779086131.2,
"step": 100
},
{
"epoch": 0.25,
"grad_norm": 35422613504.0,
"learning_rate": 9.999316524962345e-05,
"loss": 679432448.0,
"step": 110
},
{
"epoch": 0.2727272727272727,
"grad_norm": 11170627584.0,
"learning_rate": 9.997266286704631e-05,
"loss": 543212544.0,
"step": 120
},
{
"epoch": 0.29545454545454547,
"grad_norm": 460055968.0,
"learning_rate": 9.993849845741524e-05,
"loss": 488552294.4,
"step": 130
},
{
"epoch": 0.3181818181818182,
"grad_norm": 643671040.0,
"learning_rate": 9.989068136093873e-05,
"loss": 381352115.2,
"step": 140
},
{
"epoch": 0.3409090909090909,
"grad_norm": 514593984.0,
"learning_rate": 9.98292246503335e-05,
"loss": 344548966.4,
"step": 150
},
{
"epoch": 0.36363636363636365,
"grad_norm": 2670534912.0,
"learning_rate": 9.975414512725057e-05,
"loss": 256186470.4,
"step": 160
},
{
"epoch": 0.38636363636363635,
"grad_norm": 367520064.0,
"learning_rate": 9.966546331768191e-05,
"loss": 252642892.8,
"step": 170
},
{
"epoch": 0.4090909090909091,
"grad_norm": 6415584768.0,
"learning_rate": 9.956320346634876e-05,
"loss": 214356454.4,
"step": 180
},
{
"epoch": 0.4318181818181818,
"grad_norm": 342542784.0,
"learning_rate": 9.944739353007344e-05,
"loss": 184521523.2,
"step": 190
},
{
"epoch": 0.45454545454545453,
"grad_norm": 670325952.0,
"learning_rate": 9.931806517013612e-05,
"loss": 203785408.0,
"step": 200
},
{
"epoch": 0.4772727272727273,
"grad_norm": 408250880.0,
"learning_rate": 9.917525374361912e-05,
"loss": 165584844.8,
"step": 210
},
{
"epoch": 0.5,
"grad_norm": 237331648.0,
"learning_rate": 9.901899829374047e-05,
"loss": 161461657.6,
"step": 220
},
{
"epoch": 0.5227272727272727,
"grad_norm": 261846496.0,
"learning_rate": 9.884934153917997e-05,
"loss": 232488601.6,
"step": 230
},
{
"epoch": 0.5454545454545454,
"grad_norm": 258512032.0,
"learning_rate": 9.86663298624003e-05,
"loss": 213760076.8,
"step": 240
},
{
"epoch": 0.5681818181818182,
"grad_norm": 176516288.0,
"learning_rate": 9.847001329696653e-05,
"loss": 190539545.6,
"step": 250
},
{
"epoch": 0.5909090909090909,
"grad_norm": 208952080.0,
"learning_rate": 9.826044551386744e-05,
"loss": 160166745.6,
"step": 260
},
{
"epoch": 0.6136363636363636,
"grad_norm": 444205504.0,
"learning_rate": 9.803768380684242e-05,
"loss": 142089868.8,
"step": 270
},
{
"epoch": 0.6363636363636364,
"grad_norm": 456830976.0,
"learning_rate": 9.780178907671789e-05,
"loss": 164681638.4,
"step": 280
},
{
"epoch": 0.6590909090909091,
"grad_norm": 240448720.0,
"learning_rate": 9.755282581475769e-05,
"loss": 126908902.4,
"step": 290
},
{
"epoch": 0.6818181818181818,
"grad_norm": 289517440.0,
"learning_rate": 9.729086208503174e-05,
"loss": 144903500.8,
"step": 300
},
{
"epoch": 0.7045454545454546,
"grad_norm": 155611392.0,
"learning_rate": 9.701596950580806e-05,
"loss": 109276416.0,
"step": 310
},
{
"epoch": 0.7272727272727273,
"grad_norm": 145763984.0,
"learning_rate": 9.672822322997305e-05,
"loss": 82450060.8,
"step": 320
},
{
"epoch": 0.75,
"grad_norm": 183422624.0,
"learning_rate": 9.642770192448536e-05,
"loss": 94446822.4,
"step": 330
},
{
"epoch": 0.7727272727272727,
"grad_norm": 165524336.0,
"learning_rate": 9.611448774886924e-05,
"loss": 88033280.0,
"step": 340
},
{
"epoch": 0.7954545454545454,
"grad_norm": 370229248.0,
"learning_rate": 9.578866633275288e-05,
"loss": 151451392.0,
"step": 350
},
{
"epoch": 0.8181818181818182,
"grad_norm": 182722992.0,
"learning_rate": 9.545032675245813e-05,
"loss": 81520544.0,
"step": 360
},
{
"epoch": 0.8409090909090909,
"grad_norm": 110293848.0,
"learning_rate": 9.509956150664796e-05,
"loss": 74347558.4,
"step": 370
},
{
"epoch": 0.8636363636363636,
"grad_norm": 210000608.0,
"learning_rate": 9.473646649103818e-05,
"loss": 98096115.2,
"step": 380
},
{
"epoch": 0.8863636363636364,
"grad_norm": 252706992.0,
"learning_rate": 9.43611409721806e-05,
"loss": 45600521.6,
"step": 390
},
{
"epoch": 0.9090909090909091,
"grad_norm": 286857856.0,
"learning_rate": 9.397368756032445e-05,
"loss": 47684240.0,
"step": 400
},
{
"epoch": 0.9318181818181818,
"grad_norm": 145927072.0,
"learning_rate": 9.357421218136386e-05,
"loss": 55028972.8,
"step": 410
},
{
"epoch": 0.9545454545454546,
"grad_norm": 246543744.0,
"learning_rate": 9.316282404787871e-05,
"loss": 62455718.4,
"step": 420
},
{
"epoch": 0.9772727272727273,
"grad_norm": 148089840.0,
"learning_rate": 9.273963562927695e-05,
"loss": 80031763.2,
"step": 430
},
{
"epoch": 1.0,
"grad_norm": 258538496.0,
"learning_rate": 9.230476262104677e-05,
"loss": 63331763.2,
"step": 440
},
{
"epoch": 1.0227272727272727,
"grad_norm": 246891264.0,
"learning_rate": 9.185832391312644e-05,
"loss": 79613408.0,
"step": 450
},
{
"epoch": 1.0454545454545454,
"grad_norm": 99700408.0,
"learning_rate": 9.140044155740101e-05,
"loss": 60524844.8,
"step": 460
},
{
"epoch": 1.0681818181818181,
"grad_norm": 269340576.0,
"learning_rate": 9.093124073433463e-05,
"loss": 53263574.4,
"step": 470
},
{
"epoch": 1.0909090909090908,
"grad_norm": 152498880.0,
"learning_rate": 9.045084971874738e-05,
"loss": 37747696.0,
"step": 480
},
{
"epoch": 1.1136363636363635,
"grad_norm": 405291296.0,
"learning_rate": 8.995939984474624e-05,
"loss": 83147424.0,
"step": 490
},
{
"epoch": 1.1363636363636362,
"grad_norm": 153301696.0,
"learning_rate": 8.945702546981969e-05,
"loss": 46162393.6,
"step": 500
},
{
"epoch": 1.1590909090909092,
"grad_norm": 386725408.0,
"learning_rate": 8.894386393810563e-05,
"loss": 57197043.2,
"step": 510
},
{
"epoch": 1.1818181818181819,
"grad_norm": 112014240.0,
"learning_rate": 8.842005554284296e-05,
"loss": 44577619.2,
"step": 520
},
{
"epoch": 1.2045454545454546,
"grad_norm": 198261360.0,
"learning_rate": 8.788574348801675e-05,
"loss": 49147491.2,
"step": 530
},
{
"epoch": 1.2272727272727273,
"grad_norm": 127353744.0,
"learning_rate": 8.73410738492077e-05,
"loss": 30261772.8,
"step": 540
},
{
"epoch": 1.25,
"grad_norm": 182823824.0,
"learning_rate": 8.678619553365659e-05,
"loss": 48019424.0,
"step": 550
},
{
"epoch": 1.2727272727272727,
"grad_norm": 286290624.0,
"learning_rate": 8.622126023955446e-05,
"loss": 60011494.4,
"step": 560
},
{
"epoch": 1.2954545454545454,
"grad_norm": 78245984.0,
"learning_rate": 8.564642241456986e-05,
"loss": 47857244.8,
"step": 570
},
{
"epoch": 1.3181818181818181,
"grad_norm": 408751168.0,
"learning_rate": 8.506183921362443e-05,
"loss": 70248691.2,
"step": 580
},
{
"epoch": 1.3409090909090908,
"grad_norm": 113171200.0,
"learning_rate": 8.44676704559283e-05,
"loss": 53175008.0,
"step": 590
},
{
"epoch": 1.3636363636363638,
"grad_norm": 201525456.0,
"learning_rate": 8.386407858128706e-05,
"loss": 33759475.2,
"step": 600
},
{
"epoch": 1.3863636363636362,
"grad_norm": 697200320.0,
"learning_rate": 8.32512286056924e-05,
"loss": 32540745.6,
"step": 610
},
{
"epoch": 1.4090909090909092,
"grad_norm": 336322880.0,
"learning_rate": 8.262928807620843e-05,
"loss": 48147216.0,
"step": 620
},
{
"epoch": 1.4318181818181819,
"grad_norm": 132475312.0,
"learning_rate": 8.199842702516583e-05,
"loss": 37222713.6,
"step": 630
},
{
"epoch": 1.4545454545454546,
"grad_norm": 401426752.0,
"learning_rate": 8.135881792367686e-05,
"loss": 31534899.2,
"step": 640
},
{
"epoch": 1.4772727272727273,
"grad_norm": 208541024.0,
"learning_rate": 8.07106356344834e-05,
"loss": 25690548.8,
"step": 650
},
{
"epoch": 1.5,
"grad_norm": 116395000.0,
"learning_rate": 8.005405736415126e-05,
"loss": 35130710.4,
"step": 660
},
{
"epoch": 1.5227272727272727,
"grad_norm": 221156224.0,
"learning_rate": 7.938926261462366e-05,
"loss": 52561321.6,
"step": 670
},
{
"epoch": 1.5454545454545454,
"grad_norm": 139324544.0,
"learning_rate": 7.871643313414718e-05,
"loss": 46616777.6,
"step": 680
},
{
"epoch": 1.5681818181818183,
"grad_norm": 126530176.0,
"learning_rate": 7.803575286758364e-05,
"loss": 32868204.8,
"step": 690
},
{
"epoch": 1.5909090909090908,
"grad_norm": 112456576.0,
"learning_rate": 7.734740790612136e-05,
"loss": 42719804.8,
"step": 700
},
{
"epoch": 1.6136363636363638,
"grad_norm": 178001280.0,
"learning_rate": 7.66515864363997e-05,
"loss": 56040908.8,
"step": 710
},
{
"epoch": 1.6363636363636362,
"grad_norm": 408028640.0,
"learning_rate": 7.594847868906076e-05,
"loss": 27305916.8,
"step": 720
},
{
"epoch": 1.6590909090909092,
"grad_norm": 146312144.0,
"learning_rate": 7.52382768867422e-05,
"loss": 34246518.4,
"step": 730
},
{
"epoch": 1.6818181818181817,
"grad_norm": 121166176.0,
"learning_rate": 7.452117519152542e-05,
"loss": 34342003.2,
"step": 740
},
{
"epoch": 1.7045454545454546,
"grad_norm": 1605302144.0,
"learning_rate": 7.379736965185368e-05,
"loss": 18556844.8,
"step": 750
},
{
"epoch": 1.7272727272727273,
"grad_norm": 145793296.0,
"learning_rate": 7.30670581489344e-05,
"loss": 30094729.6,
"step": 760
},
{
"epoch": 1.75,
"grad_norm": 238633424.0,
"learning_rate": 7.233044034264034e-05,
"loss": 28264518.4,
"step": 770
},
{
"epoch": 1.7727272727272727,
"grad_norm": 146943280.0,
"learning_rate": 7.158771761692464e-05,
"loss": 21468862.4,
"step": 780
},
{
"epoch": 1.7954545454545454,
"grad_norm": 155326464.0,
"learning_rate": 7.083909302476453e-05,
"loss": 24784724.8,
"step": 790
},
{
"epoch": 1.8181818181818183,
"grad_norm": 194690608.0,
"learning_rate": 7.008477123264848e-05,
"loss": 20140659.2,
"step": 800
},
{
"epoch": 1.8409090909090908,
"grad_norm": 269470976.0,
"learning_rate": 6.932495846462261e-05,
"loss": 32951574.4,
"step": 810
},
{
"epoch": 1.8636363636363638,
"grad_norm": 170968176.0,
"learning_rate": 6.855986244591104e-05,
"loss": 24495520.0,
"step": 820
},
{
"epoch": 1.8863636363636362,
"grad_norm": 137592768.0,
"learning_rate": 6.778969234612584e-05,
"loss": 21688649.6,
"step": 830
},
{
"epoch": 1.9090909090909092,
"grad_norm": 77953224.0,
"learning_rate": 6.701465872208216e-05,
"loss": 31676636.8,
"step": 840
},
{
"epoch": 1.9318181818181817,
"grad_norm": 455976896.0,
"learning_rate": 6.623497346023418e-05,
"loss": 47546489.6,
"step": 850
},
{
"epoch": 1.9545454545454546,
"grad_norm": 461556352.0,
"learning_rate": 6.545084971874738e-05,
"loss": 26543750.4,
"step": 860
},
{
"epoch": 1.9772727272727273,
"grad_norm": 342992832.0,
"learning_rate": 6.466250186922325e-05,
"loss": 23164931.2,
"step": 870
},
{
"epoch": 2.0,
"grad_norm": 176021600.0,
"learning_rate": 6.387014543809223e-05,
"loss": 24812347.2,
"step": 880
},
{
"epoch": 2.022727272727273,
"grad_norm": 168301712.0,
"learning_rate": 6.307399704769099e-05,
"loss": 29569001.6,
"step": 890
},
{
"epoch": 2.0454545454545454,
"grad_norm": 170714448.0,
"learning_rate": 6.227427435703997e-05,
"loss": 24018566.4,
"step": 900
},
{
"epoch": 2.0681818181818183,
"grad_norm": 260947392.0,
"learning_rate": 6.147119600233758e-05,
"loss": 26362905.6,
"step": 910
},
{
"epoch": 2.090909090909091,
"grad_norm": 82909696.0,
"learning_rate": 6.066498153718735e-05,
"loss": 20936886.4,
"step": 920
},
{
"epoch": 2.1136363636363638,
"grad_norm": 152754880.0,
"learning_rate": 5.985585137257401e-05,
"loss": 22602596.8,
"step": 930
},
{
"epoch": 2.1363636363636362,
"grad_norm": 71488880.0,
"learning_rate": 5.90440267166055e-05,
"loss": 36498016.0,
"step": 940
},
{
"epoch": 2.159090909090909,
"grad_norm": 104700112.0,
"learning_rate": 5.8229729514036705e-05,
"loss": 30277987.2,
"step": 950
},
{
"epoch": 2.1818181818181817,
"grad_norm": 184239504.0,
"learning_rate": 5.74131823855921e-05,
"loss": 24437344.0,
"step": 960
},
{
"epoch": 2.2045454545454546,
"grad_norm": 322096480.0,
"learning_rate": 5.6594608567103456e-05,
"loss": 28966016.0,
"step": 970
},
{
"epoch": 2.227272727272727,
"grad_norm": 107924136.0,
"learning_rate": 5.577423184847932e-05,
"loss": 23697406.4,
"step": 980
},
{
"epoch": 2.25,
"grad_norm": 287611424.0,
"learning_rate": 5.495227651252315e-05,
"loss": 35587129.6,
"step": 990
},
{
"epoch": 2.2727272727272725,
"grad_norm": 142485760.0,
"learning_rate": 5.4128967273616625e-05,
"loss": 22553091.2,
"step": 1000
},
{
"epoch": 2.2954545454545454,
"grad_norm": 125698872.0,
"learning_rate": 5.330452921628497e-05,
"loss": 21591241.6,
"step": 1010
},
{
"epoch": 2.3181818181818183,
"grad_norm": 121039208.0,
"learning_rate": 5.247918773366112e-05,
"loss": 29732899.2,
"step": 1020
},
{
"epoch": 2.340909090909091,
"grad_norm": 192014624.0,
"learning_rate": 5.165316846586541e-05,
"loss": 19519462.4,
"step": 1030
},
{
"epoch": 2.3636363636363638,
"grad_norm": 219596560.0,
"learning_rate": 5.0826697238317935e-05,
"loss": 25972294.4,
"step": 1040
},
{
"epoch": 2.3863636363636362,
"grad_norm": 453196864.0,
"learning_rate": 5e-05,
"loss": 20440235.2,
"step": 1050
},
{
"epoch": 2.409090909090909,
"grad_norm": 271006016.0,
"learning_rate": 4.917330276168208e-05,
"loss": 19903924.8,
"step": 1060
},
{
"epoch": 2.4318181818181817,
"grad_norm": 322238688.0,
"learning_rate": 4.834683153413459e-05,
"loss": 30931248.0,
"step": 1070
},
{
"epoch": 2.4545454545454546,
"grad_norm": 115611168.0,
"learning_rate": 4.7520812266338885e-05,
"loss": 20302214.4,
"step": 1080
},
{
"epoch": 2.4772727272727275,
"grad_norm": 887404288.0,
"learning_rate": 4.669547078371504e-05,
"loss": 19957492.8,
"step": 1090
},
{
"epoch": 2.5,
"grad_norm": 10758360064.0,
"learning_rate": 4.5871032726383386e-05,
"loss": 22953964.8,
"step": 1100
},
{
"epoch": 2.5227272727272725,
"grad_norm": 427750944.0,
"learning_rate": 4.504772348747687e-05,
"loss": 18938011.2,
"step": 1110
},
{
"epoch": 2.5454545454545454,
"grad_norm": 149082768.0,
"learning_rate": 4.4225768151520694e-05,
"loss": 26505913.6,
"step": 1120
},
{
"epoch": 2.5681818181818183,
"grad_norm": 316686912.0,
"learning_rate": 4.3405391432896555e-05,
"loss": 20391016.0,
"step": 1130
},
{
"epoch": 2.590909090909091,
"grad_norm": 100088072.0,
"learning_rate": 4.2586817614407895e-05,
"loss": 26414075.2,
"step": 1140
},
{
"epoch": 2.6136363636363638,
"grad_norm": 92582088.0,
"learning_rate": 4.17702704859633e-05,
"loss": 15236398.4,
"step": 1150
},
{
"epoch": 2.6363636363636362,
"grad_norm": 473522784.0,
"learning_rate": 4.095597328339452e-05,
"loss": 24309235.2,
"step": 1160
},
{
"epoch": 2.659090909090909,
"grad_norm": 109819416.0,
"learning_rate": 4.0144148627425993e-05,
"loss": 22481440.0,
"step": 1170
},
{
"epoch": 2.6818181818181817,
"grad_norm": 131413952.0,
"learning_rate": 3.933501846281267e-05,
"loss": 15105643.2,
"step": 1180
},
{
"epoch": 2.7045454545454546,
"grad_norm": 196484560.0,
"learning_rate": 3.852880399766243e-05,
"loss": 22866424.0,
"step": 1190
},
{
"epoch": 2.7272727272727275,
"grad_norm": 298367968.0,
"learning_rate": 3.772572564296005e-05,
"loss": 20215891.2,
"step": 1200
},
{
"epoch": 2.75,
"grad_norm": 94960424.0,
"learning_rate": 3.6926002952309016e-05,
"loss": 18007924.8,
"step": 1210
},
{
"epoch": 2.7727272727272725,
"grad_norm": 85202336.0,
"learning_rate": 3.612985456190778e-05,
"loss": 20693896.0,
"step": 1220
},
{
"epoch": 2.7954545454545454,
"grad_norm": 7084129280.0,
"learning_rate": 3.533749813077677e-05,
"loss": 16236560.0,
"step": 1230
},
{
"epoch": 2.8181818181818183,
"grad_norm": 85127592.0,
"learning_rate": 3.4549150281252636e-05,
"loss": 23334752.0,
"step": 1240
},
{
"epoch": 2.840909090909091,
"grad_norm": 142297552.0,
"learning_rate": 3.3765026539765834e-05,
"loss": 18909488.0,
"step": 1250
},
{
"epoch": 2.8636363636363638,
"grad_norm": 284615232.0,
"learning_rate": 3.298534127791785e-05,
"loss": 19228300.8,
"step": 1260
},
{
"epoch": 2.8863636363636362,
"grad_norm": 102847704.0,
"learning_rate": 3.221030765387417e-05,
"loss": 26358712.0,
"step": 1270
},
{
"epoch": 2.909090909090909,
"grad_norm": 288731616.0,
"learning_rate": 3.144013755408895e-05,
"loss": 21636097.6,
"step": 1280
},
{
"epoch": 2.9318181818181817,
"grad_norm": 106496784.0,
"learning_rate": 3.0675041535377405e-05,
"loss": 20164374.4,
"step": 1290
},
{
"epoch": 2.9545454545454546,
"grad_norm": 223327328.0,
"learning_rate": 2.991522876735154e-05,
"loss": 21382241.6,
"step": 1300
},
{
"epoch": 2.9772727272727275,
"grad_norm": 73936632.0,
"learning_rate": 2.916090697523549e-05,
"loss": 16625654.4,
"step": 1310
},
{
"epoch": 3.0,
"grad_norm": 195426608.0,
"learning_rate": 2.8412282383075363e-05,
"loss": 21499422.4,
"step": 1320
},
{
"epoch": 3.022727272727273,
"grad_norm": 239848320.0,
"learning_rate": 2.766955965735968e-05,
"loss": 19992364.8,
"step": 1330
},
{
"epoch": 3.0454545454545454,
"grad_norm": 132064736.0,
"learning_rate": 2.693294185106562e-05,
"loss": 17378974.4,
"step": 1340
},
{
"epoch": 3.0681818181818183,
"grad_norm": 158127008.0,
"learning_rate": 2.6202630348146324e-05,
"loss": 14476110.4,
"step": 1350
},
{
"epoch": 3.090909090909091,
"grad_norm": 193214896.0,
"learning_rate": 2.547882480847461e-05,
"loss": 19444292.8,
"step": 1360
},
{
"epoch": 3.1136363636363638,
"grad_norm": 135614960.0,
"learning_rate": 2.476172311325783e-05,
"loss": 17136611.2,
"step": 1370
},
{
"epoch": 3.1363636363636362,
"grad_norm": 146726304.0,
"learning_rate": 2.405152131093926e-05,
"loss": 17945476.8,
"step": 1380
},
{
"epoch": 3.159090909090909,
"grad_norm": 86271432.0,
"learning_rate": 2.3348413563600325e-05,
"loss": 22355953.6,
"step": 1390
},
{
"epoch": 3.1818181818181817,
"grad_norm": 286385248.0,
"learning_rate": 2.2652592093878666e-05,
"loss": 17187601.6,
"step": 1400
},
{
"epoch": 3.2045454545454546,
"grad_norm": 1212550912.0,
"learning_rate": 2.196424713241637e-05,
"loss": 20235840.0,
"step": 1410
},
{
"epoch": 3.227272727272727,
"grad_norm": 73712976.0,
"learning_rate": 2.128356686585282e-05,
"loss": 30961001.6,
"step": 1420
},
{
"epoch": 3.25,
"grad_norm": 182859040.0,
"learning_rate": 2.061073738537635e-05,
"loss": 32744835.2,
"step": 1430
},
{
"epoch": 3.2727272727272725,
"grad_norm": 64465348.0,
"learning_rate": 1.9945942635848748e-05,
"loss": 18790020.8,
"step": 1440
},
{
"epoch": 3.2954545454545454,
"grad_norm": 79966296.0,
"learning_rate": 1.928936436551661e-05,
"loss": 21262963.2,
"step": 1450
},
{
"epoch": 3.3181818181818183,
"grad_norm": 178412640.0,
"learning_rate": 1.8641182076323148e-05,
"loss": 20795022.4,
"step": 1460
},
{
"epoch": 3.340909090909091,
"grad_norm": 117127040.0,
"learning_rate": 1.800157297483417e-05,
"loss": 18522465.6,
"step": 1470
},
{
"epoch": 3.3636363636363638,
"grad_norm": 225748960.0,
"learning_rate": 1.7370711923791567e-05,
"loss": 16351683.2,
"step": 1480
},
{
"epoch": 3.3863636363636362,
"grad_norm": 333275584.0,
"learning_rate": 1.6748771394307585e-05,
"loss": 15890913.6,
"step": 1490
},
{
"epoch": 3.409090909090909,
"grad_norm": 94839864.0,
"learning_rate": 1.6135921418712956e-05,
"loss": 19005873.6,
"step": 1500
},
{
"epoch": 3.4318181818181817,
"grad_norm": 123844928.0,
"learning_rate": 1.553232954407171e-05,
"loss": 15768131.2,
"step": 1510
},
{
"epoch": 3.4545454545454546,
"grad_norm": 376811424.0,
"learning_rate": 1.4938160786375572e-05,
"loss": 20412670.4,
"step": 1520
},
{
"epoch": 3.4772727272727275,
"grad_norm": 71331304.0,
"learning_rate": 1.435357758543015e-05,
"loss": 15396275.2,
"step": 1530
},
{
"epoch": 3.5,
"grad_norm": 90793376.0,
"learning_rate": 1.3778739760445552e-05,
"loss": 17609350.4,
"step": 1540
},
{
"epoch": 3.5227272727272725,
"grad_norm": 175962528.0,
"learning_rate": 1.3213804466343421e-05,
"loss": 16694264.0,
"step": 1550
},
{
"epoch": 3.5454545454545454,
"grad_norm": 130335672.0,
"learning_rate": 1.2658926150792322e-05,
"loss": 15245347.2,
"step": 1560
},
{
"epoch": 3.5681818181818183,
"grad_norm": 225995312.0,
"learning_rate": 1.2114256511983274e-05,
"loss": 14799392.0,
"step": 1570
},
{
"epoch": 3.590909090909091,
"grad_norm": 91921688.0,
"learning_rate": 1.157994445715706e-05,
"loss": 23821099.2,
"step": 1580
},
{
"epoch": 3.6136363636363638,
"grad_norm": 108905944.0,
"learning_rate": 1.1056136061894384e-05,
"loss": 20226878.4,
"step": 1590
},
{
"epoch": 3.6363636363636362,
"grad_norm": 192163008.0,
"learning_rate": 1.0542974530180327e-05,
"loss": 15503652.8,
"step": 1600
},
{
"epoch": 3.659090909090909,
"grad_norm": 82313128.0,
"learning_rate": 1.0040600155253765e-05,
"loss": 16259214.4,
"step": 1610
},
{
"epoch": 3.6818181818181817,
"grad_norm": 127500728.0,
"learning_rate": 9.549150281252633e-06,
"loss": 13356518.4,
"step": 1620
},
{
"epoch": 3.7045454545454546,
"grad_norm": 195588592.0,
"learning_rate": 9.068759265665384e-06,
"loss": 13309605.6,
"step": 1630
},
{
"epoch": 3.7272727272727275,
"grad_norm": 124094728.0,
"learning_rate": 8.599558442598998e-06,
"loss": 17332201.6,
"step": 1640
},
{
"epoch": 3.75,
"grad_norm": 194997536.0,
"learning_rate": 8.141676086873572e-06,
"loss": 27798745.6,
"step": 1650
},
{
"epoch": 3.7727272727272725,
"grad_norm": 408983104.0,
"learning_rate": 7.695237378953223e-06,
"loss": 18060028.8,
"step": 1660
},
{
"epoch": 3.7954545454545454,
"grad_norm": 64105764.0,
"learning_rate": 7.260364370723044e-06,
"loss": 14837972.8,
"step": 1670
},
{
"epoch": 3.8181818181818183,
"grad_norm": 172431360.0,
"learning_rate": 6.837175952121306e-06,
"loss": 16195776.0,
"step": 1680
},
{
"epoch": 3.840909090909091,
"grad_norm": 82362008.0,
"learning_rate": 6.425787818636131e-06,
"loss": 14680713.6,
"step": 1690
},
{
"epoch": 3.8636363636363638,
"grad_norm": 146533344.0,
"learning_rate": 6.026312439675552e-06,
"loss": 16691865.6,
"step": 1700
},
{
"epoch": 3.8863636363636362,
"grad_norm": 109454416.0,
"learning_rate": 5.6388590278194096e-06,
"loss": 17369052.8,
"step": 1710
},
{
"epoch": 3.909090909090909,
"grad_norm": 294526976.0,
"learning_rate": 5.263533508961827e-06,
"loss": 9919254.4,
"step": 1720
},
{
"epoch": 3.9318181818181817,
"grad_norm": 205665296.0,
"learning_rate": 4.900438493352055e-06,
"loss": 19957376.0,
"step": 1730
},
{
"epoch": 3.9545454545454546,
"grad_norm": 117976384.0,
"learning_rate": 4.549673247541875e-06,
"loss": 14926936.0,
"step": 1740
},
{
"epoch": 3.9772727272727275,
"grad_norm": 139144784.0,
"learning_rate": 4.2113336672471245e-06,
"loss": 15108585.6,
"step": 1750
},
{
"epoch": 4.0,
"grad_norm": 204030640.0,
"learning_rate": 3.885512251130763e-06,
"loss": 16684348.8,
"step": 1760
},
{
"epoch": 4.0227272727272725,
"grad_norm": 231758032.0,
"learning_rate": 3.5722980755146517e-06,
"loss": 14250145.6,
"step": 1770
},
{
"epoch": 4.045454545454546,
"grad_norm": 114193512.0,
"learning_rate": 3.271776770026963e-06,
"loss": 14976139.2,
"step": 1780
},
{
"epoch": 4.068181818181818,
"grad_norm": 45119975424.0,
"learning_rate": 2.9840304941919415e-06,
"loss": 13117681.6,
"step": 1790
},
{
"epoch": 4.090909090909091,
"grad_norm": 87482056.0,
"learning_rate": 2.7091379149682685e-06,
"loss": 16316115.2,
"step": 1800
},
{
"epoch": 4.113636363636363,
"grad_norm": 217095472.0,
"learning_rate": 2.4471741852423237e-06,
"loss": 12584548.8,
"step": 1810
},
{
"epoch": 4.136363636363637,
"grad_norm": 279856288.0,
"learning_rate": 2.1982109232821178e-06,
"loss": 17845868.8,
"step": 1820
},
{
"epoch": 4.159090909090909,
"grad_norm": 105613376.0,
"learning_rate": 1.962316193157593e-06,
"loss": 14880216.0,
"step": 1830
},
{
"epoch": 4.181818181818182,
"grad_norm": 108838528.0,
"learning_rate": 1.7395544861325718e-06,
"loss": 14690222.4,
"step": 1840
},
{
"epoch": 4.204545454545454,
"grad_norm": 98467304.0,
"learning_rate": 1.5299867030334814e-06,
"loss": 18249204.8,
"step": 1850
},
{
"epoch": 4.2272727272727275,
"grad_norm": 145458192.0,
"learning_rate": 1.333670137599713e-06,
"loss": 11624424.8,
"step": 1860
},
{
"epoch": 4.25,
"grad_norm": 75976648.0,
"learning_rate": 1.1506584608200367e-06,
"loss": 16304561.6,
"step": 1870
},
{
"epoch": 4.2727272727272725,
"grad_norm": 152758896.0,
"learning_rate": 9.810017062595322e-07,
"loss": 18160070.4,
"step": 1880
},
{
"epoch": 4.295454545454546,
"grad_norm": 210220208.0,
"learning_rate": 8.247462563808817e-07,
"loss": 23436678.4,
"step": 1890
},
{
"epoch": 4.318181818181818,
"grad_norm": 199508576.0,
"learning_rate": 6.819348298638839e-07,
"loss": 14236168.0,
"step": 1900
},
{
"epoch": 4.340909090909091,
"grad_norm": 63556520.0,
"learning_rate": 5.526064699265753e-07,
"loss": 20044918.4,
"step": 1910
},
{
"epoch": 4.363636363636363,
"grad_norm": 59328524.0,
"learning_rate": 4.367965336512403e-07,
"loss": 13414170.4,
"step": 1920
},
{
"epoch": 4.386363636363637,
"grad_norm": 360369888.0,
"learning_rate": 3.3453668231809286e-07,
"loss": 13388755.2,
"step": 1930
},
{
"epoch": 4.409090909090909,
"grad_norm": 1001972800.0,
"learning_rate": 2.458548727494292e-07,
"loss": 18148713.6,
"step": 1940
},
{
"epoch": 4.431818181818182,
"grad_norm": 104688328.0,
"learning_rate": 1.7077534966650766e-07,
"loss": 15079361.6,
"step": 1950
},
{
"epoch": 4.454545454545454,
"grad_norm": 37318868.0,
"learning_rate": 1.0931863906127327e-07,
"loss": 12394652.0,
"step": 1960
},
{
"epoch": 4.4772727272727275,
"grad_norm": 146546864.0,
"learning_rate": 6.150154258476315e-08,
"loss": 20620992.0,
"step": 1970
},
{
"epoch": 4.5,
"grad_norm": 157523056.0,
"learning_rate": 2.7337132953697554e-08,
"loss": 13256707.2,
"step": 1980
},
{
"epoch": 4.5227272727272725,
"grad_norm": 142617888.0,
"learning_rate": 6.834750376549792e-09,
"loss": 19604684.8,
"step": 1990
},
{
"epoch": 4.545454545454545,
"grad_norm": 65029756.0,
"learning_rate": 0.0,
"loss": 12012332.0,
"step": 2000
}
],
"logging_steps": 10,
"max_steps": 2000,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.763682350698701e+16,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}