| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 3930, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002544529262086514, | |
| "grad_norm": 2.135885238647461, | |
| "learning_rate": 9.160305343511451e-07, | |
| "loss": 0.1137, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005089058524173028, | |
| "grad_norm": 2.356735944747925, | |
| "learning_rate": 1.933842239185751e-06, | |
| "loss": 0.1656, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007633587786259542, | |
| "grad_norm": 3.483503818511963, | |
| "learning_rate": 2.951653944020356e-06, | |
| "loss": 0.1356, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.010178117048346057, | |
| "grad_norm": 3.4808290004730225, | |
| "learning_rate": 3.969465648854962e-06, | |
| "loss": 0.1575, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01272264631043257, | |
| "grad_norm": 2.5104947090148926, | |
| "learning_rate": 4.987277353689568e-06, | |
| "loss": 0.0749, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015267175572519083, | |
| "grad_norm": 3.5466084480285645, | |
| "learning_rate": 6.005089058524174e-06, | |
| "loss": 0.0631, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.017811704834605598, | |
| "grad_norm": 0.6639775633811951, | |
| "learning_rate": 7.022900763358779e-06, | |
| "loss": 0.0374, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.020356234096692113, | |
| "grad_norm": 1.4179292917251587, | |
| "learning_rate": 8.040712468193384e-06, | |
| "loss": 0.0413, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.022900763358778626, | |
| "grad_norm": 2.9656121730804443, | |
| "learning_rate": 9.058524173027991e-06, | |
| "loss": 0.0362, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02544529262086514, | |
| "grad_norm": 3.4811854362487793, | |
| "learning_rate": 1.0076335877862595e-05, | |
| "loss": 0.0411, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.027989821882951654, | |
| "grad_norm": 2.1551337242126465, | |
| "learning_rate": 1.1094147582697202e-05, | |
| "loss": 0.0255, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.030534351145038167, | |
| "grad_norm": 1.5262765884399414, | |
| "learning_rate": 1.2111959287531807e-05, | |
| "loss": 0.0225, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03307888040712468, | |
| "grad_norm": 1.3079928159713745, | |
| "learning_rate": 1.3129770992366414e-05, | |
| "loss": 0.0323, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.035623409669211195, | |
| "grad_norm": 0.22996912896633148, | |
| "learning_rate": 1.4147582697201019e-05, | |
| "loss": 0.0188, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03816793893129771, | |
| "grad_norm": 1.0834699869155884, | |
| "learning_rate": 1.5165394402035624e-05, | |
| "loss": 0.0265, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04071246819338423, | |
| "grad_norm": 0.9807230830192566, | |
| "learning_rate": 1.618320610687023e-05, | |
| "loss": 0.0189, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.043256997455470736, | |
| "grad_norm": 1.8505427837371826, | |
| "learning_rate": 1.7201017811704836e-05, | |
| "loss": 0.0142, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04580152671755725, | |
| "grad_norm": 0.346323698759079, | |
| "learning_rate": 1.8218829516539443e-05, | |
| "loss": 0.0237, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04834605597964377, | |
| "grad_norm": 1.4252818822860718, | |
| "learning_rate": 1.923664122137405e-05, | |
| "loss": 0.0169, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05089058524173028, | |
| "grad_norm": 0.624088704586029, | |
| "learning_rate": 2.0254452926208653e-05, | |
| "loss": 0.0153, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05343511450381679, | |
| "grad_norm": 0.7245016098022461, | |
| "learning_rate": 2.127226463104326e-05, | |
| "loss": 0.0211, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05597964376590331, | |
| "grad_norm": 1.2375560998916626, | |
| "learning_rate": 2.2290076335877867e-05, | |
| "loss": 0.0146, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.058524173027989825, | |
| "grad_norm": 0.15632864832878113, | |
| "learning_rate": 2.330788804071247e-05, | |
| "loss": 0.0179, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.061068702290076333, | |
| "grad_norm": 0.023025542497634888, | |
| "learning_rate": 2.4325699745547078e-05, | |
| "loss": 0.02, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06361323155216285, | |
| "grad_norm": 0.5999135375022888, | |
| "learning_rate": 2.5343511450381678e-05, | |
| "loss": 0.0057, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06615776081424936, | |
| "grad_norm": 0.08057913929224014, | |
| "learning_rate": 2.6361323155216285e-05, | |
| "loss": 0.0203, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06870229007633588, | |
| "grad_norm": 0.3958178460597992, | |
| "learning_rate": 2.737913486005089e-05, | |
| "loss": 0.0365, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07124681933842239, | |
| "grad_norm": 0.24179188907146454, | |
| "learning_rate": 2.8396946564885498e-05, | |
| "loss": 0.0198, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0737913486005089, | |
| "grad_norm": 1.3984962701797485, | |
| "learning_rate": 2.9414758269720102e-05, | |
| "loss": 0.0119, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07633587786259542, | |
| "grad_norm": 0.08135154098272324, | |
| "learning_rate": 3.043256997455471e-05, | |
| "loss": 0.0101, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07888040712468193, | |
| "grad_norm": 2.0051238536834717, | |
| "learning_rate": 3.145038167938931e-05, | |
| "loss": 0.0245, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08142493638676845, | |
| "grad_norm": 0.49833422899246216, | |
| "learning_rate": 3.246819338422392e-05, | |
| "loss": 0.0181, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08396946564885496, | |
| "grad_norm": 0.09235569834709167, | |
| "learning_rate": 3.3486005089058526e-05, | |
| "loss": 0.0116, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08651399491094147, | |
| "grad_norm": 0.06826785206794739, | |
| "learning_rate": 3.450381679389313e-05, | |
| "loss": 0.0161, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.089058524173028, | |
| "grad_norm": 1.1774462461471558, | |
| "learning_rate": 3.552162849872774e-05, | |
| "loss": 0.0144, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0916030534351145, | |
| "grad_norm": 0.6383160948753357, | |
| "learning_rate": 3.653944020356235e-05, | |
| "loss": 0.0197, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09414758269720101, | |
| "grad_norm": 0.7711915969848633, | |
| "learning_rate": 3.755725190839695e-05, | |
| "loss": 0.0086, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09669211195928754, | |
| "grad_norm": 0.7851189970970154, | |
| "learning_rate": 3.8575063613231554e-05, | |
| "loss": 0.0235, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09923664122137404, | |
| "grad_norm": 0.08650732040405273, | |
| "learning_rate": 3.959287531806616e-05, | |
| "loss": 0.0151, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10178117048346055, | |
| "grad_norm": 1.1360907554626465, | |
| "learning_rate": 3.999971599165659e-05, | |
| "loss": 0.021, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10432569974554708, | |
| "grad_norm": 1.1978349685668945, | |
| "learning_rate": 3.9997980414323866e-05, | |
| "loss": 0.0179, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10687022900763359, | |
| "grad_norm": 0.6965557932853699, | |
| "learning_rate": 3.999466717882668e-05, | |
| "loss": 0.021, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10941475826972011, | |
| "grad_norm": 0.6535700559616089, | |
| "learning_rate": 3.998977654654908e-05, | |
| "loss": 0.0153, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11195928753180662, | |
| "grad_norm": 0.04983607307076454, | |
| "learning_rate": 3.998330890331731e-05, | |
| "loss": 0.004, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11450381679389313, | |
| "grad_norm": 0.5391613245010376, | |
| "learning_rate": 3.9975264759369426e-05, | |
| "loss": 0.0096, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11704834605597965, | |
| "grad_norm": 0.28082016110420227, | |
| "learning_rate": 3.996564474931495e-05, | |
| "loss": 0.006, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11959287531806616, | |
| "grad_norm": 0.39269477128982544, | |
| "learning_rate": 3.9954449632084905e-05, | |
| "loss": 0.0055, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.12213740458015267, | |
| "grad_norm": 0.016487566754221916, | |
| "learning_rate": 3.9941680290871876e-05, | |
| "loss": 0.0091, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12468193384223919, | |
| "grad_norm": 0.04324239119887352, | |
| "learning_rate": 3.992733773306036e-05, | |
| "loss": 0.0138, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1272264631043257, | |
| "grad_norm": 0.21221888065338135, | |
| "learning_rate": 3.9911423090147286e-05, | |
| "loss": 0.0195, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1272264631043257, | |
| "eval_loss": 0.04138721898198128, | |
| "eval_runtime": 134.0416, | |
| "eval_samples_per_second": 59.549, | |
| "eval_steps_per_second": 0.47, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1297709923664122, | |
| "grad_norm": 1.9429672956466675, | |
| "learning_rate": 3.989393761765276e-05, | |
| "loss": 0.0203, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13231552162849872, | |
| "grad_norm": 0.18343333899974823, | |
| "learning_rate": 3.987488269502098e-05, | |
| "loss": 0.0137, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13486005089058525, | |
| "grad_norm": 0.17485463619232178, | |
| "learning_rate": 3.985425982551147e-05, | |
| "loss": 0.0064, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13740458015267176, | |
| "grad_norm": 0.8683700561523438, | |
| "learning_rate": 3.983207063608044e-05, | |
| "loss": 0.0107, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.13994910941475827, | |
| "grad_norm": 0.9228803515434265, | |
| "learning_rate": 3.9808316877252454e-05, | |
| "loss": 0.0105, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14249363867684478, | |
| "grad_norm": 0.7613241076469421, | |
| "learning_rate": 3.9783000422982316e-05, | |
| "loss": 0.0123, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1450381679389313, | |
| "grad_norm": 0.23881566524505615, | |
| "learning_rate": 3.975612327050724e-05, | |
| "loss": 0.0128, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1475826972010178, | |
| "grad_norm": 0.31987839937210083, | |
| "learning_rate": 3.97276875401893e-05, | |
| "loss": 0.0043, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.15012722646310434, | |
| "grad_norm": 0.04161321371793747, | |
| "learning_rate": 3.9697695475348125e-05, | |
| "loss": 0.0119, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15267175572519084, | |
| "grad_norm": 0.048683468252420425, | |
| "learning_rate": 3.9666149442083934e-05, | |
| "loss": 0.0022, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.15521628498727735, | |
| "grad_norm": 0.15322823822498322, | |
| "learning_rate": 3.9633051929090895e-05, | |
| "loss": 0.0114, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.15776081424936386, | |
| "grad_norm": 0.025288008153438568, | |
| "learning_rate": 3.959840554746074e-05, | |
| "loss": 0.015, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.16030534351145037, | |
| "grad_norm": 0.06146557256579399, | |
| "learning_rate": 3.956221303047683e-05, | |
| "loss": 0.0128, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.1628498727735369, | |
| "grad_norm": 0.41335877776145935, | |
| "learning_rate": 3.9524477233398466e-05, | |
| "loss": 0.0085, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16539440203562342, | |
| "grad_norm": 0.21233712136745453, | |
| "learning_rate": 3.9485201133235696e-05, | |
| "loss": 0.003, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.16793893129770993, | |
| "grad_norm": 0.29870790243148804, | |
| "learning_rate": 3.9444387828514404e-05, | |
| "loss": 0.0105, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.17048346055979643, | |
| "grad_norm": 0.2143266797065735, | |
| "learning_rate": 3.9402040539031895e-05, | |
| "loss": 0.0081, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.17302798982188294, | |
| "grad_norm": 0.02664622664451599, | |
| "learning_rate": 3.935816260560288e-05, | |
| "loss": 0.0153, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.17557251908396945, | |
| "grad_norm": 0.3243009150028229, | |
| "learning_rate": 3.931275748979588e-05, | |
| "loss": 0.0195, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.178117048346056, | |
| "grad_norm": 0.680862545967102, | |
| "learning_rate": 3.926582877366022e-05, | |
| "loss": 0.0129, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1806615776081425, | |
| "grad_norm": 0.6640552282333374, | |
| "learning_rate": 3.9217380159443334e-05, | |
| "loss": 0.0077, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.183206106870229, | |
| "grad_norm": 0.06368488073348999, | |
| "learning_rate": 3.9167415469298775e-05, | |
| "loss": 0.009, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.18575063613231552, | |
| "grad_norm": 0.8134492039680481, | |
| "learning_rate": 3.9115938644984656e-05, | |
| "loss": 0.0065, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.18829516539440203, | |
| "grad_norm": 0.33869367837905884, | |
| "learning_rate": 3.906295374755266e-05, | |
| "loss": 0.0069, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.19083969465648856, | |
| "grad_norm": 0.024942323565483093, | |
| "learning_rate": 3.900846495702768e-05, | |
| "loss": 0.01, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.19338422391857507, | |
| "grad_norm": 0.5529096126556396, | |
| "learning_rate": 3.895247657207807e-05, | |
| "loss": 0.0072, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.19592875318066158, | |
| "grad_norm": 0.48590993881225586, | |
| "learning_rate": 3.8894993009676464e-05, | |
| "loss": 0.0049, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1984732824427481, | |
| "grad_norm": 0.1863989681005478, | |
| "learning_rate": 3.883601880475138e-05, | |
| "loss": 0.0077, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2010178117048346, | |
| "grad_norm": 0.9839557409286499, | |
| "learning_rate": 3.8775558609829434e-05, | |
| "loss": 0.0086, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2035623409669211, | |
| "grad_norm": 1.0942028760910034, | |
| "learning_rate": 3.8713617194668264e-05, | |
| "loss": 0.0184, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.20610687022900764, | |
| "grad_norm": 0.20266412198543549, | |
| "learning_rate": 3.865019944588029e-05, | |
| "loss": 0.0151, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.20865139949109415, | |
| "grad_norm": 0.14641156792640686, | |
| "learning_rate": 3.858531036654718e-05, | |
| "loss": 0.0035, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.21119592875318066, | |
| "grad_norm": 0.883431613445282, | |
| "learning_rate": 3.851895507582514e-05, | |
| "loss": 0.01, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21374045801526717, | |
| "grad_norm": 0.8385990858078003, | |
| "learning_rate": 3.8451138808541106e-05, | |
| "loss": 0.019, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21628498727735368, | |
| "grad_norm": 0.015678202733397484, | |
| "learning_rate": 3.83818669147797e-05, | |
| "loss": 0.0052, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.21882951653944022, | |
| "grad_norm": 0.1870308667421341, | |
| "learning_rate": 3.831114485946121e-05, | |
| "loss": 0.0177, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.22137404580152673, | |
| "grad_norm": 0.02714279294013977, | |
| "learning_rate": 3.823897822191043e-05, | |
| "loss": 0.0054, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.22391857506361323, | |
| "grad_norm": 0.052650295197963715, | |
| "learning_rate": 3.816537269541651e-05, | |
| "loss": 0.0108, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.22646310432569974, | |
| "grad_norm": 0.780659019947052, | |
| "learning_rate": 3.8090334086783814e-05, | |
| "loss": 0.0122, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.22900763358778625, | |
| "grad_norm": 0.054588913917541504, | |
| "learning_rate": 3.801386831587379e-05, | |
| "loss": 0.0094, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.23155216284987276, | |
| "grad_norm": 0.03867032378911972, | |
| "learning_rate": 3.793598141513799e-05, | |
| "loss": 0.0083, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2340966921119593, | |
| "grad_norm": 0.030569806694984436, | |
| "learning_rate": 3.785667952914213e-05, | |
| "loss": 0.0096, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.2366412213740458, | |
| "grad_norm": 0.16147764027118683, | |
| "learning_rate": 3.777596891408134e-05, | |
| "loss": 0.0068, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.23918575063613232, | |
| "grad_norm": 0.0073355999775230885, | |
| "learning_rate": 3.769385593728665e-05, | |
| "loss": 0.002, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.24173027989821882, | |
| "grad_norm": 0.019785180687904358, | |
| "learning_rate": 3.7610347076722594e-05, | |
| "loss": 0.0116, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.24427480916030533, | |
| "grad_norm": 0.031664274632930756, | |
| "learning_rate": 3.752544892047623e-05, | |
| "loss": 0.0031, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.24681933842239187, | |
| "grad_norm": 0.20639267563819885, | |
| "learning_rate": 3.7439168166237336e-05, | |
| "loss": 0.0045, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.24936386768447838, | |
| "grad_norm": 0.028579477220773697, | |
| "learning_rate": 3.7351511620770084e-05, | |
| "loss": 0.0108, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.25190839694656486, | |
| "grad_norm": 0.1208048015832901, | |
| "learning_rate": 3.726248619937601e-05, | |
| "loss": 0.0052, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2544529262086514, | |
| "grad_norm": 0.2995353937149048, | |
| "learning_rate": 3.717209892534846e-05, | |
| "loss": 0.0084, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2544529262086514, | |
| "eval_loss": 0.03396334871649742, | |
| "eval_runtime": 133.983, | |
| "eval_samples_per_second": 59.575, | |
| "eval_steps_per_second": 0.47, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.25699745547073793, | |
| "grad_norm": 0.07392453402280807, | |
| "learning_rate": 3.7080356929418534e-05, | |
| "loss": 0.0043, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2595419847328244, | |
| "grad_norm": 0.2610020339488983, | |
| "learning_rate": 3.698726744919252e-05, | |
| "loss": 0.0053, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.26208651399491095, | |
| "grad_norm": 0.45659804344177246, | |
| "learning_rate": 3.6892837828580946e-05, | |
| "loss": 0.0053, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.26463104325699743, | |
| "grad_norm": 0.840158998966217, | |
| "learning_rate": 3.679707551721914e-05, | |
| "loss": 0.0111, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.26717557251908397, | |
| "grad_norm": 0.10178951174020767, | |
| "learning_rate": 3.669998806987961e-05, | |
| "loss": 0.0028, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2697201017811705, | |
| "grad_norm": 0.5190885066986084, | |
| "learning_rate": 3.660158314587599e-05, | |
| "loss": 0.0029, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.272264631043257, | |
| "grad_norm": 0.7919594049453735, | |
| "learning_rate": 3.6501868508458784e-05, | |
| "loss": 0.0072, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2748091603053435, | |
| "grad_norm": 0.454806923866272, | |
| "learning_rate": 3.640085202420296e-05, | |
| "loss": 0.0079, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.27735368956743, | |
| "grad_norm": 1.1031146049499512, | |
| "learning_rate": 3.629854166238729e-05, | |
| "loss": 0.0103, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.27989821882951654, | |
| "grad_norm": 0.06990394741296768, | |
| "learning_rate": 3.619494549436571e-05, | |
| "loss": 0.0128, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.2824427480916031, | |
| "grad_norm": 0.009839543141424656, | |
| "learning_rate": 3.609007169293051e-05, | |
| "loss": 0.0064, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.28498727735368956, | |
| "grad_norm": 0.8976128101348877, | |
| "learning_rate": 3.59839285316676e-05, | |
| "loss": 0.0193, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2875318066157761, | |
| "grad_norm": 0.03007723018527031, | |
| "learning_rate": 3.587652438430381e-05, | |
| "loss": 0.0101, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2900763358778626, | |
| "grad_norm": 0.17372159659862518, | |
| "learning_rate": 3.5767867724046254e-05, | |
| "loss": 0.0031, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2926208651399491, | |
| "grad_norm": 0.020978795364499092, | |
| "learning_rate": 3.5657967122913886e-05, | |
| "loss": 0.0116, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2951653944020356, | |
| "grad_norm": 0.5843973159790039, | |
| "learning_rate": 3.5546831251061245e-05, | |
| "loss": 0.0062, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.29770992366412213, | |
| "grad_norm": 1.0085383653640747, | |
| "learning_rate": 3.543446887609446e-05, | |
| "loss": 0.0131, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.30025445292620867, | |
| "grad_norm": 0.01817316561937332, | |
| "learning_rate": 3.532088886237956e-05, | |
| "loss": 0.0058, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.30279898218829515, | |
| "grad_norm": 0.2761935293674469, | |
| "learning_rate": 3.520610017034316e-05, | |
| "loss": 0.0103, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.3053435114503817, | |
| "grad_norm": 0.09700267761945724, | |
| "learning_rate": 3.509011185576556e-05, | |
| "loss": 0.0043, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.30788804071246817, | |
| "grad_norm": 0.0048867869190871716, | |
| "learning_rate": 3.497293306906635e-05, | |
| "loss": 0.0057, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3104325699745547, | |
| "grad_norm": 0.003819795558229089, | |
| "learning_rate": 3.4854573054582474e-05, | |
| "loss": 0.0108, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.31297709923664124, | |
| "grad_norm": 0.14946851134300232, | |
| "learning_rate": 3.473504114983901e-05, | |
| "loss": 0.0135, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3155216284987277, | |
| "grad_norm": 0.008427307941019535, | |
| "learning_rate": 3.4614346784812445e-05, | |
| "loss": 0.0057, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.31806615776081426, | |
| "grad_norm": 0.024618452414870262, | |
| "learning_rate": 3.449249948118679e-05, | |
| "loss": 0.0073, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.32061068702290074, | |
| "grad_norm": 0.045560527592897415, | |
| "learning_rate": 3.436950885160241e-05, | |
| "loss": 0.0168, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3231552162849873, | |
| "grad_norm": 0.01829380914568901, | |
| "learning_rate": 3.424538459889762e-05, | |
| "loss": 0.0047, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.3256997455470738, | |
| "grad_norm": 0.029155071824789047, | |
| "learning_rate": 3.412013651534328e-05, | |
| "loss": 0.0049, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3282442748091603, | |
| "grad_norm": 0.1900954246520996, | |
| "learning_rate": 3.399377448187023e-05, | |
| "loss": 0.0044, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.33078880407124683, | |
| "grad_norm": 0.06734994053840637, | |
| "learning_rate": 3.386630846728981e-05, | |
| "loss": 0.0094, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.7144080400466919, | |
| "learning_rate": 3.3737748527507385e-05, | |
| "loss": 0.0116, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.33587786259541985, | |
| "grad_norm": 0.006130326073616743, | |
| "learning_rate": 3.3608104804729044e-05, | |
| "loss": 0.0083, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.3384223918575064, | |
| "grad_norm": 0.3749116361141205, | |
| "learning_rate": 3.3477387526661455e-05, | |
| "loss": 0.0037, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.34096692111959287, | |
| "grad_norm": 0.5767784714698792, | |
| "learning_rate": 3.3345607005705005e-05, | |
| "loss": 0.0095, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.3435114503816794, | |
| "grad_norm": 0.065456822514534, | |
| "learning_rate": 3.3212773638140244e-05, | |
| "loss": 0.0095, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3460559796437659, | |
| "grad_norm": 1.2213973999023438, | |
| "learning_rate": 3.307889790330771e-05, | |
| "loss": 0.0142, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.3486005089058524, | |
| "grad_norm": 0.03910631313920021, | |
| "learning_rate": 3.2943990362781204e-05, | |
| "loss": 0.0075, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.3511450381679389, | |
| "grad_norm": 0.3203994333744049, | |
| "learning_rate": 3.2808061659534596e-05, | |
| "loss": 0.0115, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.35368956743002544, | |
| "grad_norm": 0.5574242472648621, | |
| "learning_rate": 3.267112251710215e-05, | |
| "loss": 0.011, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.356234096692112, | |
| "grad_norm": 0.16698601841926575, | |
| "learning_rate": 3.253318373873259e-05, | |
| "loss": 0.0049, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.35877862595419846, | |
| "grad_norm": 0.044682443141937256, | |
| "learning_rate": 3.239425620653676e-05, | |
| "loss": 0.0113, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.361323155216285, | |
| "grad_norm": 0.48920974135398865, | |
| "learning_rate": 3.225435088062919e-05, | |
| "loss": 0.0084, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.3638676844783715, | |
| "grad_norm": 0.05923973023891449, | |
| "learning_rate": 3.211347879826339e-05, | |
| "loss": 0.0043, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.366412213740458, | |
| "grad_norm": 0.03358899801969528, | |
| "learning_rate": 3.1971651072961145e-05, | |
| "loss": 0.0166, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.36895674300254455, | |
| "grad_norm": 0.0032274764962494373, | |
| "learning_rate": 3.182887889363574e-05, | |
| "loss": 0.0063, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.37150127226463103, | |
| "grad_norm": 0.01269812323153019, | |
| "learning_rate": 3.168517352370927e-05, | |
| "loss": 0.0108, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.37404580152671757, | |
| "grad_norm": 0.12659408152103424, | |
| "learning_rate": 3.1540546300224045e-05, | |
| "loss": 0.0027, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.37659033078880405, | |
| "grad_norm": 0.2497015744447708, | |
| "learning_rate": 3.1395008632948204e-05, | |
| "loss": 0.013, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3791348600508906, | |
| "grad_norm": 0.09000229835510254, | |
| "learning_rate": 3.1248572003475614e-05, | |
| "loss": 0.0065, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3816793893129771, | |
| "grad_norm": 0.019155532121658325, | |
| "learning_rate": 3.110124796432003e-05, | |
| "loss": 0.013, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3816793893129771, | |
| "eval_loss": 0.028218811377882957, | |
| "eval_runtime": 134.2194, | |
| "eval_samples_per_second": 59.47, | |
| "eval_steps_per_second": 0.469, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3842239185750636, | |
| "grad_norm": 0.845061182975769, | |
| "learning_rate": 3.095304813800376e-05, | |
| "loss": 0.0135, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.38676844783715014, | |
| "grad_norm": 0.20835207402706146, | |
| "learning_rate": 3.0803984216140705e-05, | |
| "loss": 0.0037, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3893129770992366, | |
| "grad_norm": 0.40771785378456116, | |
| "learning_rate": 3.065406795851403e-05, | |
| "loss": 0.0062, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.39185750636132316, | |
| "grad_norm": 0.020069751888513565, | |
| "learning_rate": 3.05033111921484e-05, | |
| "loss": 0.0047, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.3944020356234097, | |
| "grad_norm": 0.5557010769844055, | |
| "learning_rate": 3.035172581037698e-05, | |
| "loss": 0.0064, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3969465648854962, | |
| "grad_norm": 0.2212294191122055, | |
| "learning_rate": 3.0199323771903094e-05, | |
| "loss": 0.0103, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3994910941475827, | |
| "grad_norm": 0.12645219266414642, | |
| "learning_rate": 3.004611709985685e-05, | |
| "loss": 0.0027, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.4020356234096692, | |
| "grad_norm": 0.017775116488337517, | |
| "learning_rate": 2.9892117880846592e-05, | |
| "loss": 0.0041, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.40458015267175573, | |
| "grad_norm": 1.017933964729309, | |
| "learning_rate": 2.9737338264005385e-05, | |
| "loss": 0.0091, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.4071246819338422, | |
| "grad_norm": 0.030523020774126053, | |
| "learning_rate": 2.9581790460032587e-05, | |
| "loss": 0.0066, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.40966921119592875, | |
| "grad_norm": 0.001736677484586835, | |
| "learning_rate": 2.9425486740230468e-05, | |
| "loss": 0.0023, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.4122137404580153, | |
| "grad_norm": 0.25881147384643555, | |
| "learning_rate": 2.9268439435536192e-05, | |
| "loss": 0.0019, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.41475826972010177, | |
| "grad_norm": 1.060746431350708, | |
| "learning_rate": 2.9110660935548974e-05, | |
| "loss": 0.0052, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.4173027989821883, | |
| "grad_norm": 0.706464409828186, | |
| "learning_rate": 2.895216368755266e-05, | |
| "loss": 0.0076, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.4198473282442748, | |
| "grad_norm": 0.0024033733643591404, | |
| "learning_rate": 2.879296019553377e-05, | |
| "loss": 0.0042, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4223918575063613, | |
| "grad_norm": 0.032745685428380966, | |
| "learning_rate": 2.863306301919502e-05, | |
| "loss": 0.0135, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.42493638676844786, | |
| "grad_norm": 0.025153767317533493, | |
| "learning_rate": 2.8472484772964492e-05, | |
| "loss": 0.0054, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.42748091603053434, | |
| "grad_norm": 0.058375366032123566, | |
| "learning_rate": 2.831123812500047e-05, | |
| "loss": 0.0045, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.4300254452926209, | |
| "grad_norm": 0.02816547080874443, | |
| "learning_rate": 2.814933579619202e-05, | |
| "loss": 0.0105, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.43256997455470736, | |
| "grad_norm": 0.25510331988334656, | |
| "learning_rate": 2.798679055915547e-05, | |
| "loss": 0.0065, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.4351145038167939, | |
| "grad_norm": 0.8073764443397522, | |
| "learning_rate": 2.782361523722673e-05, | |
| "loss": 0.0092, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.43765903307888043, | |
| "grad_norm": 0.009462070651352406, | |
| "learning_rate": 2.765982270344964e-05, | |
| "loss": 0.0083, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4402035623409669, | |
| "grad_norm": 1.005836009979248, | |
| "learning_rate": 2.749542587956045e-05, | |
| "loss": 0.008, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.44274809160305345, | |
| "grad_norm": 0.02954590506851673, | |
| "learning_rate": 2.733043773496836e-05, | |
| "loss": 0.0016, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.44529262086513993, | |
| "grad_norm": 0.5857617855072021, | |
| "learning_rate": 2.71648712857324e-05, | |
| "loss": 0.0071, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.44783715012722647, | |
| "grad_norm": 0.006716390606015921, | |
| "learning_rate": 2.699873959353455e-05, | |
| "loss": 0.009, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.45038167938931295, | |
| "grad_norm": 0.20183268189430237, | |
| "learning_rate": 2.6832055764649303e-05, | |
| "loss": 0.0066, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4529262086513995, | |
| "grad_norm": 0.9785445928573608, | |
| "learning_rate": 2.6664832948909683e-05, | |
| "loss": 0.016, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.455470737913486, | |
| "grad_norm": 0.05489499494433403, | |
| "learning_rate": 2.6497084338669878e-05, | |
| "loss": 0.0072, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4580152671755725, | |
| "grad_norm": 0.5305062532424927, | |
| "learning_rate": 2.632882316776446e-05, | |
| "loss": 0.0072, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.46055979643765904, | |
| "grad_norm": 0.0883222445845604, | |
| "learning_rate": 2.6160062710464358e-05, | |
| "loss": 0.0092, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.4631043256997455, | |
| "grad_norm": 0.1060655266046524, | |
| "learning_rate": 2.5990816280429636e-05, | |
| "loss": 0.0047, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.46564885496183206, | |
| "grad_norm": 0.02622363343834877, | |
| "learning_rate": 2.582109722965918e-05, | |
| "loss": 0.0054, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4681933842239186, | |
| "grad_norm": 0.008235059678554535, | |
| "learning_rate": 2.5650918947437334e-05, | |
| "loss": 0.0064, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.4707379134860051, | |
| "grad_norm": 0.14751340448856354, | |
| "learning_rate": 2.548029485927762e-05, | |
| "loss": 0.0057, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.4732824427480916, | |
| "grad_norm": 0.17184267938137054, | |
| "learning_rate": 2.5309238425863563e-05, | |
| "loss": 0.0136, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.4758269720101781, | |
| "grad_norm": 0.002700164681300521, | |
| "learning_rate": 2.5137763141986805e-05, | |
| "loss": 0.0101, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.47837150127226463, | |
| "grad_norm": 0.3989448845386505, | |
| "learning_rate": 2.4965882535482467e-05, | |
| "loss": 0.0033, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.48091603053435117, | |
| "grad_norm": 0.7475537061691284, | |
| "learning_rate": 2.4793610166161894e-05, | |
| "loss": 0.0099, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.48346055979643765, | |
| "grad_norm": 0.13649067282676697, | |
| "learning_rate": 2.462095962474299e-05, | |
| "loss": 0.0084, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4860050890585242, | |
| "grad_norm": 0.030215373262763023, | |
| "learning_rate": 2.444794453177794e-05, | |
| "loss": 0.0063, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.48854961832061067, | |
| "grad_norm": 0.2751119136810303, | |
| "learning_rate": 2.4274578536578753e-05, | |
| "loss": 0.0025, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.4910941475826972, | |
| "grad_norm": 0.8307936787605286, | |
| "learning_rate": 2.4100875316140414e-05, | |
| "loss": 0.0081, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.49363867684478374, | |
| "grad_norm": 0.2610468566417694, | |
| "learning_rate": 2.392684857406189e-05, | |
| "loss": 0.0038, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.4961832061068702, | |
| "grad_norm": 0.0161338672041893, | |
| "learning_rate": 2.3752512039465067e-05, | |
| "loss": 0.0082, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.49872773536895676, | |
| "grad_norm": 0.19266584515571594, | |
| "learning_rate": 2.357787946591162e-05, | |
| "loss": 0.0047, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.5012722646310432, | |
| "grad_norm": 0.9321357011795044, | |
| "learning_rate": 2.340296463031802e-05, | |
| "loss": 0.0069, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.5038167938931297, | |
| "grad_norm": 0.06572539359331131, | |
| "learning_rate": 2.3227781331868603e-05, | |
| "loss": 0.0054, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.5063613231552163, | |
| "grad_norm": 0.08220947533845901, | |
| "learning_rate": 2.3052343390927014e-05, | |
| "loss": 0.0028, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5089058524173028, | |
| "grad_norm": 0.20873849093914032, | |
| "learning_rate": 2.2876664647945828e-05, | |
| "loss": 0.0019, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5089058524173028, | |
| "eval_loss": 0.027919922024011612, | |
| "eval_runtime": 133.9302, | |
| "eval_samples_per_second": 59.598, | |
| "eval_steps_per_second": 0.47, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5114503816793893, | |
| "grad_norm": 0.0028455285355448723, | |
| "learning_rate": 2.270075896237473e-05, | |
| "loss": 0.0097, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5139949109414759, | |
| "grad_norm": 0.1659770905971527, | |
| "learning_rate": 2.2524640211567096e-05, | |
| "loss": 0.0054, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5165394402035624, | |
| "grad_norm": 0.30801087617874146, | |
| "learning_rate": 2.23483222896852e-05, | |
| "loss": 0.006, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.5190839694656488, | |
| "grad_norm": 0.5698021054267883, | |
| "learning_rate": 2.2171819106604116e-05, | |
| "loss": 0.0085, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5216284987277354, | |
| "grad_norm": 0.24747492372989655, | |
| "learning_rate": 2.19951445868143e-05, | |
| "loss": 0.0097, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5241730279898219, | |
| "grad_norm": 0.9410986304283142, | |
| "learning_rate": 2.1818312668323148e-05, | |
| "loss": 0.0063, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.5267175572519084, | |
| "grad_norm": 0.31956005096435547, | |
| "learning_rate": 2.164133730155535e-05, | |
| "loss": 0.0112, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5292620865139949, | |
| "grad_norm": 0.6595532298088074, | |
| "learning_rate": 2.1464232448252375e-05, | |
| "loss": 0.0139, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5318066157760815, | |
| "grad_norm": 0.012306688353419304, | |
| "learning_rate": 2.1287012080370983e-05, | |
| "loss": 0.0105, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5343511450381679, | |
| "grad_norm": 0.9253210425376892, | |
| "learning_rate": 2.1109690178981e-05, | |
| "loss": 0.0133, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5368956743002544, | |
| "grad_norm": 0.0925484448671341, | |
| "learning_rate": 2.0932280733162295e-05, | |
| "loss": 0.0085, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.539440203562341, | |
| "grad_norm": 0.049422722309827805, | |
| "learning_rate": 2.075479773890121e-05, | |
| "loss": 0.0071, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5419847328244275, | |
| "grad_norm": 0.015734514221549034, | |
| "learning_rate": 2.0577255197986376e-05, | |
| "loss": 0.0047, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.544529262086514, | |
| "grad_norm": 0.005423278082162142, | |
| "learning_rate": 2.0399667116904127e-05, | |
| "loss": 0.0096, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5470737913486005, | |
| "grad_norm": 0.13550741970539093, | |
| "learning_rate": 2.02220475057335e-05, | |
| "loss": 0.0093, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.549618320610687, | |
| "grad_norm": 0.4259481132030487, | |
| "learning_rate": 2.0044410377040936e-05, | |
| "loss": 0.0049, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5521628498727735, | |
| "grad_norm": 0.12342940270900726, | |
| "learning_rate": 1.986676974477489e-05, | |
| "loss": 0.0046, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.55470737913486, | |
| "grad_norm": 0.5921499133110046, | |
| "learning_rate": 1.9689139623160202e-05, | |
| "loss": 0.0103, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5572519083969466, | |
| "grad_norm": 0.055644094944000244, | |
| "learning_rate": 1.951153402559251e-05, | |
| "loss": 0.0068, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5597964376590331, | |
| "grad_norm": 0.00220988760702312, | |
| "learning_rate": 1.9333966963532732e-05, | |
| "loss": 0.0119, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5623409669211196, | |
| "grad_norm": 0.8570493459701538, | |
| "learning_rate": 1.915645244540169e-05, | |
| "loss": 0.0044, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5648854961832062, | |
| "grad_norm": 0.6145547032356262, | |
| "learning_rate": 1.8979004475474986e-05, | |
| "loss": 0.0098, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.5674300254452926, | |
| "grad_norm": 0.8959047794342041, | |
| "learning_rate": 1.8801637052778126e-05, | |
| "loss": 0.0147, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5699745547073791, | |
| "grad_norm": 0.0702587440609932, | |
| "learning_rate": 1.8624364169982222e-05, | |
| "loss": 0.0028, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5725190839694656, | |
| "grad_norm": 0.20443186163902283, | |
| "learning_rate": 1.8447199812300035e-05, | |
| "loss": 0.011, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5750636132315522, | |
| "grad_norm": 0.017398599535226822, | |
| "learning_rate": 1.827015795638268e-05, | |
| "loss": 0.0047, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.5776081424936387, | |
| "grad_norm": 0.3448769152164459, | |
| "learning_rate": 1.8093252569217024e-05, | |
| "loss": 0.007, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.5801526717557252, | |
| "grad_norm": 0.4621836841106415, | |
| "learning_rate": 1.7916497607023748e-05, | |
| "loss": 0.0071, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5826972010178118, | |
| "grad_norm": 0.4924357831478119, | |
| "learning_rate": 1.773990701415642e-05, | |
| "loss": 0.0068, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5852417302798982, | |
| "grad_norm": 0.0024092900566756725, | |
| "learning_rate": 1.7563494722001346e-05, | |
| "loss": 0.0005, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5877862595419847, | |
| "grad_norm": 0.4683379530906677, | |
| "learning_rate": 1.7387274647878548e-05, | |
| "loss": 0.0029, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5903307888040712, | |
| "grad_norm": 0.02217201516032219, | |
| "learning_rate": 1.7211260693943776e-05, | |
| "loss": 0.0121, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.5928753180661578, | |
| "grad_norm": 0.21860001981258392, | |
| "learning_rate": 1.70354667460918e-05, | |
| "loss": 0.0041, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5954198473282443, | |
| "grad_norm": 0.14538930356502533, | |
| "learning_rate": 1.6859906672860907e-05, | |
| "loss": 0.0042, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.5979643765903307, | |
| "grad_norm": 0.003986242692917585, | |
| "learning_rate": 1.668459432433883e-05, | |
| "loss": 0.0103, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.6005089058524173, | |
| "grad_norm": 0.21803154051303864, | |
| "learning_rate": 1.6509543531070033e-05, | |
| "loss": 0.0028, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.6030534351145038, | |
| "grad_norm": 0.1650070995092392, | |
| "learning_rate": 1.6334768102964712e-05, | |
| "loss": 0.0063, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.6055979643765903, | |
| "grad_norm": 0.011033114977180958, | |
| "learning_rate": 1.6160281828209242e-05, | |
| "loss": 0.0092, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6081424936386769, | |
| "grad_norm": 0.32751017808914185, | |
| "learning_rate": 1.598609847217844e-05, | |
| "loss": 0.0063, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6106870229007634, | |
| "grad_norm": 0.3929450809955597, | |
| "learning_rate": 1.5812231776349592e-05, | |
| "loss": 0.0146, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6132315521628499, | |
| "grad_norm": 0.1516672968864441, | |
| "learning_rate": 1.563869545721839e-05, | |
| "loss": 0.0082, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.6157760814249363, | |
| "grad_norm": 1.0102410316467285, | |
| "learning_rate": 1.5465503205216823e-05, | |
| "loss": 0.0157, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.6183206106870229, | |
| "grad_norm": 0.056212831288576126, | |
| "learning_rate": 1.529266868363312e-05, | |
| "loss": 0.0031, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6208651399491094, | |
| "grad_norm": 0.009815212339162827, | |
| "learning_rate": 1.512020552753385e-05, | |
| "loss": 0.0075, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6234096692111959, | |
| "grad_norm": 0.07710433006286621, | |
| "learning_rate": 1.494812734268821e-05, | |
| "loss": 0.004, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6259541984732825, | |
| "grad_norm": 0.7043729424476624, | |
| "learning_rate": 1.4776447704494712e-05, | |
| "loss": 0.0138, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.628498727735369, | |
| "grad_norm": 0.4183601140975952, | |
| "learning_rate": 1.4605180156910152e-05, | |
| "loss": 0.0045, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6310432569974554, | |
| "grad_norm": 0.3137657642364502, | |
| "learning_rate": 1.4434338211381147e-05, | |
| "loss": 0.0182, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6335877862595419, | |
| "grad_norm": 0.027838747948408127, | |
| "learning_rate": 1.4263935345778202e-05, | |
| "loss": 0.0107, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6361323155216285, | |
| "grad_norm": 0.5097953677177429, | |
| "learning_rate": 1.4093985003332392e-05, | |
| "loss": 0.0055, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6361323155216285, | |
| "eval_loss": 0.02573392726480961, | |
| "eval_runtime": 134.0752, | |
| "eval_samples_per_second": 59.534, | |
| "eval_steps_per_second": 0.47, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.638676844783715, | |
| "grad_norm": 0.6618711948394775, | |
| "learning_rate": 1.3924500591574877e-05, | |
| "loss": 0.0124, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6412213740458015, | |
| "grad_norm": 0.44481927156448364, | |
| "learning_rate": 1.3755495481279123e-05, | |
| "loss": 0.0059, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.6437659033078881, | |
| "grad_norm": 0.670520544052124, | |
| "learning_rate": 1.3586983005406099e-05, | |
| "loss": 0.0102, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6463104325699746, | |
| "grad_norm": 0.0075628384947776794, | |
| "learning_rate": 1.3418976458052386e-05, | |
| "loss": 0.0054, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.648854961832061, | |
| "grad_norm": 0.7156027555465698, | |
| "learning_rate": 1.3251489093401446e-05, | |
| "loss": 0.0082, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6513994910941476, | |
| "grad_norm": 0.313753604888916, | |
| "learning_rate": 1.3084534124677952e-05, | |
| "loss": 0.0234, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.6539440203562341, | |
| "grad_norm": 0.3655739426612854, | |
| "learning_rate": 1.2918124723105412e-05, | |
| "loss": 0.0095, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6564885496183206, | |
| "grad_norm": 0.047630202025175095, | |
| "learning_rate": 1.2752274016867035e-05, | |
| "loss": 0.0119, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6590330788804071, | |
| "grad_norm": 0.5230339169502258, | |
| "learning_rate": 1.2586995090070092e-05, | |
| "loss": 0.0129, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6615776081424937, | |
| "grad_norm": 0.06909344345331192, | |
| "learning_rate": 1.242230098171366e-05, | |
| "loss": 0.0044, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6641221374045801, | |
| "grad_norm": 0.022904975339770317, | |
| "learning_rate": 1.225820468465999e-05, | |
| "loss": 0.01, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.09478318691253662, | |
| "learning_rate": 1.2094719144609473e-05, | |
| "loss": 0.0053, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6692111959287532, | |
| "grad_norm": 0.023772576823830605, | |
| "learning_rate": 1.1931857259079334e-05, | |
| "loss": 0.0037, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.6717557251908397, | |
| "grad_norm": 0.4528583586215973, | |
| "learning_rate": 1.1769631876386159e-05, | |
| "loss": 0.0078, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6743002544529262, | |
| "grad_norm": 0.0382055826485157, | |
| "learning_rate": 1.1608055794632267e-05, | |
| "loss": 0.0039, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6768447837150128, | |
| "grad_norm": 0.23594285547733307, | |
| "learning_rate": 1.1447141760696066e-05, | |
| "loss": 0.0022, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6793893129770993, | |
| "grad_norm": 0.5700483322143555, | |
| "learning_rate": 1.1286902469226399e-05, | |
| "loss": 0.0107, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.6819338422391857, | |
| "grad_norm": 0.7145746946334839, | |
| "learning_rate": 1.1127350561641133e-05, | |
| "loss": 0.022, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6844783715012722, | |
| "grad_norm": 1.2287328243255615, | |
| "learning_rate": 1.0968498625129778e-05, | |
| "loss": 0.0142, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6870229007633588, | |
| "grad_norm": 1.0417256355285645, | |
| "learning_rate": 1.0810359191660549e-05, | |
| "loss": 0.0068, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6895674300254453, | |
| "grad_norm": 1.2579175233840942, | |
| "learning_rate": 1.0652944736991651e-05, | |
| "loss": 0.0073, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.6921119592875318, | |
| "grad_norm": 0.025868605822324753, | |
| "learning_rate": 1.0496267679687072e-05, | |
| "loss": 0.0094, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.6946564885496184, | |
| "grad_norm": 0.2584488093852997, | |
| "learning_rate": 1.0340340380136902e-05, | |
| "loss": 0.0093, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6972010178117048, | |
| "grad_norm": 0.05498351901769638, | |
| "learning_rate": 1.0185175139582158e-05, | |
| "loss": 0.0103, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.6997455470737913, | |
| "grad_norm": 0.5365030169487, | |
| "learning_rate": 1.0030784199144392e-05, | |
| "loss": 0.0039, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.7022900763358778, | |
| "grad_norm": 0.30561184883117676, | |
| "learning_rate": 9.87717973885989e-06, | |
| "loss": 0.0055, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.7048346055979644, | |
| "grad_norm": 0.609359860420227, | |
| "learning_rate": 9.724373876718883e-06, | |
| "loss": 0.0065, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.7073791348600509, | |
| "grad_norm": 0.03320237994194031, | |
| "learning_rate": 9.57237866770946e-06, | |
| "loss": 0.0008, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.7099236641221374, | |
| "grad_norm": 0.003559828968718648, | |
| "learning_rate": 9.421206102866607e-06, | |
| "loss": 0.0045, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.712468193384224, | |
| "grad_norm": 0.5287792086601257, | |
| "learning_rate": 9.270868108326165e-06, | |
| "loss": 0.0053, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7150127226463104, | |
| "grad_norm": 0.005851399153470993, | |
| "learning_rate": 9.121376544383997e-06, | |
| "loss": 0.0047, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7175572519083969, | |
| "grad_norm": 0.6579704880714417, | |
| "learning_rate": 8.972743204560336e-06, | |
| "loss": 0.0057, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.7201017811704835, | |
| "grad_norm": 0.0362946093082428, | |
| "learning_rate": 8.824979814669338e-06, | |
| "loss": 0.0018, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.72264631043257, | |
| "grad_norm": 0.029756495729088783, | |
| "learning_rate": 8.678098031894084e-06, | |
| "loss": 0.0134, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7251908396946565, | |
| "grad_norm": 0.0017073224298655987, | |
| "learning_rate": 8.532109443866842e-06, | |
| "loss": 0.0041, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.727735368956743, | |
| "grad_norm": 0.3829917013645172, | |
| "learning_rate": 8.387025567755025e-06, | |
| "loss": 0.0181, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.7302798982188295, | |
| "grad_norm": 0.36170193552970886, | |
| "learning_rate": 8.242857849352485e-06, | |
| "loss": 0.0012, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.732824427480916, | |
| "grad_norm": 0.7954331040382385, | |
| "learning_rate": 8.099617662176635e-06, | |
| "loss": 0.0042, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7353689567430025, | |
| "grad_norm": 0.434501975774765, | |
| "learning_rate": 7.957316306571088e-06, | |
| "loss": 0.0086, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7379134860050891, | |
| "grad_norm": 0.20867472887039185, | |
| "learning_rate": 7.815965008814259e-06, | |
| "loss": 0.0132, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7404580152671756, | |
| "grad_norm": 0.03252045810222626, | |
| "learning_rate": 7.675574920233635e-06, | |
| "loss": 0.0146, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7430025445292621, | |
| "grad_norm": 0.08730760216712952, | |
| "learning_rate": 7.536157116326099e-06, | |
| "loss": 0.0045, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.7455470737913485, | |
| "grad_norm": 0.07691848278045654, | |
| "learning_rate": 7.397722595884125e-06, | |
| "loss": 0.0015, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7480916030534351, | |
| "grad_norm": 1.1509069204330444, | |
| "learning_rate": 7.260282280128095e-06, | |
| "loss": 0.0073, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7506361323155216, | |
| "grad_norm": 0.2349032759666443, | |
| "learning_rate": 7.123847011844731e-06, | |
| "loss": 0.0083, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7531806615776081, | |
| "grad_norm": 0.436190664768219, | |
| "learning_rate": 6.9884275545316585e-06, | |
| "loss": 0.006, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7557251908396947, | |
| "grad_norm": 0.6010274291038513, | |
| "learning_rate": 6.854034591548317e-06, | |
| "loss": 0.0053, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.7582697201017812, | |
| "grad_norm": 0.18983256816864014, | |
| "learning_rate": 6.7206787252730645e-06, | |
| "loss": 0.0081, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7608142493638677, | |
| "grad_norm": 0.07123024016618729, | |
| "learning_rate": 6.588370476266843e-06, | |
| "loss": 0.0025, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7633587786259542, | |
| "grad_norm": 0.5430541038513184, | |
| "learning_rate": 6.457120282443114e-06, | |
| "loss": 0.0069, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7633587786259542, | |
| "eval_loss": 0.02698148414492607, | |
| "eval_runtime": 134.184, | |
| "eval_samples_per_second": 59.485, | |
| "eval_steps_per_second": 0.47, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7659033078880407, | |
| "grad_norm": 0.1465359479188919, | |
| "learning_rate": 6.326938498244468e-06, | |
| "loss": 0.006, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7684478371501272, | |
| "grad_norm": 0.03581090644001961, | |
| "learning_rate": 6.197835393825708e-06, | |
| "loss": 0.0061, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7709923664122137, | |
| "grad_norm": 1.2527718544006348, | |
| "learning_rate": 6.0698211542436424e-06, | |
| "loss": 0.008, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7735368956743003, | |
| "grad_norm": 0.4238179326057434, | |
| "learning_rate": 5.942905878653604e-06, | |
| "loss": 0.0056, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7760814249363868, | |
| "grad_norm": 0.14762815833091736, | |
| "learning_rate": 5.817099579512668e-06, | |
| "loss": 0.0026, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7786259541984732, | |
| "grad_norm": 0.9028608798980713, | |
| "learning_rate": 5.692412181789815e-06, | |
| "loss": 0.0092, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7811704834605598, | |
| "grad_norm": 0.013088079169392586, | |
| "learning_rate": 5.568853522182875e-06, | |
| "loss": 0.0173, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.7837150127226463, | |
| "grad_norm": 0.1991066336631775, | |
| "learning_rate": 5.446433348342585e-06, | |
| "loss": 0.0047, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7862595419847328, | |
| "grad_norm": 0.01575920730829239, | |
| "learning_rate": 5.325161318103513e-06, | |
| "loss": 0.0066, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.7888040712468194, | |
| "grad_norm": 0.2835613191127777, | |
| "learning_rate": 5.205046998722199e-06, | |
| "loss": 0.0066, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7913486005089059, | |
| "grad_norm": 0.09472717344760895, | |
| "learning_rate": 5.086099866122327e-06, | |
| "loss": 0.0062, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.7938931297709924, | |
| "grad_norm": 0.309705525636673, | |
| "learning_rate": 4.968329304147228e-06, | |
| "loss": 0.0069, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.7964376590330788, | |
| "grad_norm": 0.004742303863167763, | |
| "learning_rate": 4.851744603819528e-06, | |
| "loss": 0.0122, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.7989821882951654, | |
| "grad_norm": 0.03736419230699539, | |
| "learning_rate": 4.736354962608207e-06, | |
| "loss": 0.0058, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.8015267175572519, | |
| "grad_norm": 0.06985121965408325, | |
| "learning_rate": 4.6221694837029695e-06, | |
| "loss": 0.0037, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.8040712468193384, | |
| "grad_norm": 0.6169087290763855, | |
| "learning_rate": 4.509197175296103e-06, | |
| "loss": 0.0165, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.806615776081425, | |
| "grad_norm": 0.05300121381878853, | |
| "learning_rate": 4.397446949871833e-06, | |
| "loss": 0.0071, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.8091603053435115, | |
| "grad_norm": 0.35867124795913696, | |
| "learning_rate": 4.286927623503163e-06, | |
| "loss": 0.0069, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.811704834605598, | |
| "grad_norm": 0.024535993114113808, | |
| "learning_rate": 4.177647915156424e-06, | |
| "loss": 0.0125, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.8142493638676844, | |
| "grad_norm": 1.021812915802002, | |
| "learning_rate": 4.069616446003359e-06, | |
| "loss": 0.0278, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.816793893129771, | |
| "grad_norm": 0.030973881483078003, | |
| "learning_rate": 3.962841738741066e-06, | |
| "loss": 0.001, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.8193384223918575, | |
| "grad_norm": 0.07177729159593582, | |
| "learning_rate": 3.8573322169195714e-06, | |
| "loss": 0.0016, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.821882951653944, | |
| "grad_norm": 0.17373861372470856, | |
| "learning_rate": 3.753096204277338e-06, | |
| "loss": 0.0137, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.8244274809160306, | |
| "grad_norm": 0.078709177672863, | |
| "learning_rate": 3.650141924084558e-06, | |
| "loss": 0.0055, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.8269720101781171, | |
| "grad_norm": 0.6328759789466858, | |
| "learning_rate": 3.5484774984944338e-06, | |
| "loss": 0.0064, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8295165394402035, | |
| "grad_norm": 0.5705218315124512, | |
| "learning_rate": 3.448110947902421e-06, | |
| "loss": 0.0032, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.8320610687022901, | |
| "grad_norm": 0.49125197529792786, | |
| "learning_rate": 3.349050190313461e-06, | |
| "loss": 0.0061, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.8346055979643766, | |
| "grad_norm": 0.7902815341949463, | |
| "learning_rate": 3.251303040717375e-06, | |
| "loss": 0.0178, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.8371501272264631, | |
| "grad_norm": 0.218702495098114, | |
| "learning_rate": 3.154877210472258e-06, | |
| "loss": 0.0046, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.8396946564885496, | |
| "grad_norm": 0.6285209059715271, | |
| "learning_rate": 3.059780306696205e-06, | |
| "loss": 0.0116, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8422391857506362, | |
| "grad_norm": 0.9174745678901672, | |
| "learning_rate": 2.966019831667113e-06, | |
| "loss": 0.0025, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8447837150127226, | |
| "grad_norm": 0.16245588660240173, | |
| "learning_rate": 2.873603182230871e-06, | |
| "loss": 0.0061, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.8473282442748091, | |
| "grad_norm": 0.22902363538742065, | |
| "learning_rate": 2.782537649217758e-06, | |
| "loss": 0.0049, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8498727735368957, | |
| "grad_norm": 0.1257568746805191, | |
| "learning_rate": 2.6928304168673334e-06, | |
| "loss": 0.0125, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8524173027989822, | |
| "grad_norm": 0.5556793808937073, | |
| "learning_rate": 2.6044885622616002e-06, | |
| "loss": 0.0055, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8549618320610687, | |
| "grad_norm": 0.29589366912841797, | |
| "learning_rate": 2.5175190547667392e-06, | |
| "loss": 0.015, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8575063613231552, | |
| "grad_norm": 0.09268206357955933, | |
| "learning_rate": 2.431928755483255e-06, | |
| "loss": 0.0062, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.8600508905852418, | |
| "grad_norm": 0.3186149597167969, | |
| "learning_rate": 2.3477244167047043e-06, | |
| "loss": 0.0074, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8625954198473282, | |
| "grad_norm": 0.012010197155177593, | |
| "learning_rate": 2.2649126813850255e-06, | |
| "loss": 0.0022, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8651399491094147, | |
| "grad_norm": 0.048557162284851074, | |
| "learning_rate": 2.183500082614436e-06, | |
| "loss": 0.0042, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8676844783715013, | |
| "grad_norm": 0.635742723941803, | |
| "learning_rate": 2.1034930431040636e-06, | |
| "loss": 0.0126, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8702290076335878, | |
| "grad_norm": 0.04084605723619461, | |
| "learning_rate": 2.0248978746792103e-06, | |
| "loss": 0.0035, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.8727735368956743, | |
| "grad_norm": 0.38855063915252686, | |
| "learning_rate": 1.9477207777814657e-06, | |
| "loss": 0.0035, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8753180661577609, | |
| "grad_norm": 0.002912223804742098, | |
| "learning_rate": 1.8719678409794895e-06, | |
| "loss": 0.0021, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8778625954198473, | |
| "grad_norm": 0.007197024300694466, | |
| "learning_rate": 1.7976450404887247e-06, | |
| "loss": 0.0084, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8804071246819338, | |
| "grad_norm": 0.0007993974722921848, | |
| "learning_rate": 1.7247582396998997e-06, | |
| "loss": 0.0047, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8829516539440203, | |
| "grad_norm": 0.03334103897213936, | |
| "learning_rate": 1.6533131887164678e-06, | |
| "loss": 0.0029, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.8854961832061069, | |
| "grad_norm": 0.23546229302883148, | |
| "learning_rate": 1.5833155239009945e-06, | |
| "loss": 0.0032, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8880407124681934, | |
| "grad_norm": 0.6449694633483887, | |
| "learning_rate": 1.5147707674304645e-06, | |
| "loss": 0.0091, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.8905852417302799, | |
| "grad_norm": 0.12207461148500443, | |
| "learning_rate": 1.4476843268606766e-06, | |
| "loss": 0.017, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8905852417302799, | |
| "eval_loss": 0.026228830218315125, | |
| "eval_runtime": 134.0402, | |
| "eval_samples_per_second": 59.549, | |
| "eval_steps_per_second": 0.47, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8931297709923665, | |
| "grad_norm": 0.020415719598531723, | |
| "learning_rate": 1.3820614946995825e-06, | |
| "loss": 0.0012, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.8956743002544529, | |
| "grad_norm": 0.2966751158237457, | |
| "learning_rate": 1.3179074479898147e-06, | |
| "loss": 0.0023, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.8982188295165394, | |
| "grad_norm": 0.23145350813865662, | |
| "learning_rate": 1.2552272479002191e-06, | |
| "loss": 0.0048, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.9007633587786259, | |
| "grad_norm": 0.5908975005149841, | |
| "learning_rate": 1.1940258393266046e-06, | |
| "loss": 0.0074, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.9033078880407125, | |
| "grad_norm": 0.014466706663370132, | |
| "learning_rate": 1.1343080505016113e-06, | |
| "loss": 0.0049, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.905852417302799, | |
| "grad_norm": 0.12114397436380386, | |
| "learning_rate": 1.0760785926138317e-06, | |
| "loss": 0.0061, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.9083969465648855, | |
| "grad_norm": 0.013869269751012325, | |
| "learning_rate": 1.0193420594361192e-06, | |
| "loss": 0.0066, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.910941475826972, | |
| "grad_norm": 0.0026755717117339373, | |
| "learning_rate": 9.64102926963204e-07, | |
| "loss": 0.0044, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.9134860050890585, | |
| "grad_norm": 0.3063240647315979, | |
| "learning_rate": 9.10365553058552e-07, | |
| "loss": 0.0062, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.916030534351145, | |
| "grad_norm": 0.40372052788734436, | |
| "learning_rate": 8.581341771105855e-07, | |
| "loss": 0.0031, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.9185750636132316, | |
| "grad_norm": 0.002851370954886079, | |
| "learning_rate": 8.074129196982383e-07, | |
| "loss": 0.0003, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.9211195928753181, | |
| "grad_norm": 0.01199525035917759, | |
| "learning_rate": 7.582057822658617e-07, | |
| "loss": 0.0072, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.9236641221374046, | |
| "grad_norm": 0.07732322812080383, | |
| "learning_rate": 7.105166468075708e-07, | |
| "loss": 0.0106, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.926208651399491, | |
| "grad_norm": 0.014808997511863708, | |
| "learning_rate": 6.643492755609582e-07, | |
| "loss": 0.009, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.9287531806615776, | |
| "grad_norm": 0.010980433784425259, | |
| "learning_rate": 6.197073107103246e-07, | |
| "loss": 0.0053, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.9312977099236641, | |
| "grad_norm": 0.02744114026427269, | |
| "learning_rate": 5.76594274099318e-07, | |
| "loss": 0.0053, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.9338422391857506, | |
| "grad_norm": 0.015632448717951775, | |
| "learning_rate": 5.350135669531021e-07, | |
| "loss": 0.0015, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.9363867684478372, | |
| "grad_norm": 0.005407819990068674, | |
| "learning_rate": 4.94968469610031e-07, | |
| "loss": 0.0045, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.9389312977099237, | |
| "grad_norm": 0.0825563594698906, | |
| "learning_rate": 4.5646214126285003e-07, | |
| "loss": 0.0057, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.9414758269720102, | |
| "grad_norm": 0.2840502858161926, | |
| "learning_rate": 4.19497619709488e-07, | |
| "loss": 0.0094, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9440203562340967, | |
| "grad_norm": 0.017108524218201637, | |
| "learning_rate": 3.8407782111336887e-07, | |
| "loss": 0.0021, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.9465648854961832, | |
| "grad_norm": 0.06506211310625076, | |
| "learning_rate": 3.5020553977338944e-07, | |
| "loss": 0.0067, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.9491094147582697, | |
| "grad_norm": 0.001275021000765264, | |
| "learning_rate": 3.1788344790343983e-07, | |
| "loss": 0.0093, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.9516539440203562, | |
| "grad_norm": 0.3302850127220154, | |
| "learning_rate": 2.8711409542162337e-07, | |
| "loss": 0.0045, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.9541984732824428, | |
| "grad_norm": 0.2824239432811737, | |
| "learning_rate": 2.5789990974905976e-07, | |
| "loss": 0.0019, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.9567430025445293, | |
| "grad_norm": 0.015957772731781006, | |
| "learning_rate": 2.3024319561841367e-07, | |
| "loss": 0.0047, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.9592875318066157, | |
| "grad_norm": 0.0077671995386481285, | |
| "learning_rate": 2.0414613489204482e-07, | |
| "loss": 0.0076, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.9618320610687023, | |
| "grad_norm": 0.11338448524475098, | |
| "learning_rate": 1.7961078638989882e-07, | |
| "loss": 0.0073, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.9643765903307888, | |
| "grad_norm": 0.10897558927536011, | |
| "learning_rate": 1.5663908572707942e-07, | |
| "loss": 0.0035, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.9669211195928753, | |
| "grad_norm": 0.015073304064571857, | |
| "learning_rate": 1.3523284516113955e-07, | |
| "loss": 0.0047, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9694656488549618, | |
| "grad_norm": 0.3454066216945648, | |
| "learning_rate": 1.1539375344912229e-07, | |
| "loss": 0.0109, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.9720101781170484, | |
| "grad_norm": 0.015719274058938026, | |
| "learning_rate": 9.712337571431862e-08, | |
| "loss": 0.0028, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.9745547073791349, | |
| "grad_norm": 0.24041391909122467, | |
| "learning_rate": 8.042315332280836e-08, | |
| "loss": 0.0161, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.9770992366412213, | |
| "grad_norm": 0.025461114943027496, | |
| "learning_rate": 6.529440376974227e-08, | |
| "loss": 0.0144, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.9796437659033079, | |
| "grad_norm": 0.05264173448085785, | |
| "learning_rate": 5.173832057540518e-08, | |
| "loss": 0.0083, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.9821882951653944, | |
| "grad_norm": 0.00923751387745142, | |
| "learning_rate": 3.975597319105129e-08, | |
| "loss": 0.0062, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.9847328244274809, | |
| "grad_norm": 0.21058113873004913, | |
| "learning_rate": 2.9348306914551704e-08, | |
| "loss": 0.0017, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.9872773536895675, | |
| "grad_norm": 0.06167938560247421, | |
| "learning_rate": 2.051614281579406e-08, | |
| "loss": 0.0055, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.989821882951654, | |
| "grad_norm": 0.030828675255179405, | |
| "learning_rate": 1.3260177671934327e-08, | |
| "loss": 0.0052, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.9923664122137404, | |
| "grad_norm": 0.0021713576279580593, | |
| "learning_rate": 7.580983912405249e-09, | |
| "loss": 0.0033, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.9949109414758269, | |
| "grad_norm": 0.06376785784959793, | |
| "learning_rate": 3.4790095737680107e-09, | |
| "loss": 0.0032, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.9974554707379135, | |
| "grad_norm": 0.07311426848173141, | |
| "learning_rate": 9.545782643716195e-10, | |
| "loss": 0.0011, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 5.7562898291507736e-05, | |
| "learning_rate": 7.88913880445108e-12, | |
| "loss": 0.0041, | |
| "step": 3930 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3930, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.273606668827065e+18, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |