| { |
| "best_global_step": 2601, |
| "best_metric": 0.8139059304703476, |
| "best_model_checkpoint": "RALL_RGBCROP_Aug16F-WD01/checkpoint-2601", |
| "epoch": 11.081744656268054, |
| "eval_steps": 500, |
| "global_step": 3462, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0028885037550548816, |
| "grad_norm": 5.42613410949707, |
| "learning_rate": 1.2968299711815564e-07, |
| "loss": 0.6979, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.005777007510109763, |
| "grad_norm": 7.1514387130737305, |
| "learning_rate": 2.7377521613832854e-07, |
| "loss": 0.7302, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.008665511265164644, |
| "grad_norm": 5.011861801147461, |
| "learning_rate": 4.1786743515850145e-07, |
| "loss": 0.7362, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.011554015020219527, |
| "grad_norm": 5.10536527633667, |
| "learning_rate": 5.619596541786745e-07, |
| "loss": 0.6468, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.014442518775274409, |
| "grad_norm": 4.414210796356201, |
| "learning_rate": 7.060518731988474e-07, |
| "loss": 0.6643, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.01733102253032929, |
| "grad_norm": 3.883697748184204, |
| "learning_rate": 8.501440922190203e-07, |
| "loss": 0.7357, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.020219526285384173, |
| "grad_norm": 6.038342475891113, |
| "learning_rate": 9.94236311239193e-07, |
| "loss": 0.713, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.023108030040439053, |
| "grad_norm": 4.572866439819336, |
| "learning_rate": 1.138328530259366e-06, |
| "loss": 0.6776, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.025996533795493933, |
| "grad_norm": 4.689591407775879, |
| "learning_rate": 1.282420749279539e-06, |
| "loss": 0.7337, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.028885037550548817, |
| "grad_norm": 4.543818950653076, |
| "learning_rate": 1.426512968299712e-06, |
| "loss": 0.6663, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0317735413056037, |
| "grad_norm": 4.83401346206665, |
| "learning_rate": 1.5706051873198849e-06, |
| "loss": 0.6594, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.03466204506065858, |
| "grad_norm": 4.072539806365967, |
| "learning_rate": 1.7146974063400579e-06, |
| "loss": 0.6605, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.03755054881571346, |
| "grad_norm": 5.373439311981201, |
| "learning_rate": 1.8587896253602309e-06, |
| "loss": 0.6367, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.040439052570768345, |
| "grad_norm": 5.04488468170166, |
| "learning_rate": 2.0028818443804035e-06, |
| "loss": 0.6649, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.043327556325823226, |
| "grad_norm": 3.912029504776001, |
| "learning_rate": 2.1469740634005763e-06, |
| "loss": 0.6293, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.046216060080878106, |
| "grad_norm": 4.424826622009277, |
| "learning_rate": 2.2910662824207495e-06, |
| "loss": 0.6533, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.049104563835932986, |
| "grad_norm": 4.445134162902832, |
| "learning_rate": 2.4351585014409223e-06, |
| "loss": 0.616, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.05199306759098787, |
| "grad_norm": 5.449381351470947, |
| "learning_rate": 2.579250720461095e-06, |
| "loss": 0.6339, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.05488157134604275, |
| "grad_norm": 3.924225330352783, |
| "learning_rate": 2.7233429394812683e-06, |
| "loss": 0.5626, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.057770075101097634, |
| "grad_norm": 5.135080814361572, |
| "learning_rate": 2.867435158501441e-06, |
| "loss": 0.5723, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.060658578856152515, |
| "grad_norm": 4.920149326324463, |
| "learning_rate": 3.0115273775216143e-06, |
| "loss": 0.5679, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.0635470826112074, |
| "grad_norm": 7.151332378387451, |
| "learning_rate": 3.1556195965417867e-06, |
| "loss": 0.58, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.06643558636626228, |
| "grad_norm": 5.037742614746094, |
| "learning_rate": 3.29971181556196e-06, |
| "loss": 0.5527, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.06932409012131716, |
| "grad_norm": 4.7811102867126465, |
| "learning_rate": 3.4438040345821327e-06, |
| "loss": 0.512, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.07221259387637204, |
| "grad_norm": 4.887838840484619, |
| "learning_rate": 3.587896253602306e-06, |
| "loss": 0.4884, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.07510109763142692, |
| "grad_norm": 4.093018054962158, |
| "learning_rate": 3.7319884726224787e-06, |
| "loss": 0.4964, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.0779896013864818, |
| "grad_norm": 4.6939826011657715, |
| "learning_rate": 3.876080691642652e-06, |
| "loss": 0.4429, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.08087810514153669, |
| "grad_norm": 4.340346813201904, |
| "learning_rate": 4.020172910662825e-06, |
| "loss": 0.4949, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.08347775852108608, |
| "eval_accuracy": 0.6973415132924335, |
| "eval_loss": 0.5839780569076538, |
| "eval_runtime": 83.801, |
| "eval_samples_per_second": 5.835, |
| "eval_steps_per_second": 0.74, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.0002888503755054, |
| "grad_norm": 7.082851886749268, |
| "learning_rate": 4.1642651296829975e-06, |
| "loss": 0.462, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.0031773541305604, |
| "grad_norm": 8.205607414245605, |
| "learning_rate": 4.30835734870317e-06, |
| "loss": 0.443, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.0060658578856152, |
| "grad_norm": 6.79085111618042, |
| "learning_rate": 4.452449567723343e-06, |
| "loss": 0.4279, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.0089543616406702, |
| "grad_norm": 5.912543773651123, |
| "learning_rate": 4.596541786743517e-06, |
| "loss": 0.4448, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.011842865395725, |
| "grad_norm": 7.633585453033447, |
| "learning_rate": 4.740634005763689e-06, |
| "loss": 0.3472, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.01473136915078, |
| "grad_norm": 2.95974063873291, |
| "learning_rate": 4.884726224783862e-06, |
| "loss": 0.3795, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.0176198729058348, |
| "grad_norm": 9.113896369934082, |
| "learning_rate": 4.996789727126807e-06, |
| "loss": 0.3192, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.0205083766608896, |
| "grad_norm": 6.554640769958496, |
| "learning_rate": 4.980738362760835e-06, |
| "loss": 0.3585, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.0233968804159446, |
| "grad_norm": 3.178997278213501, |
| "learning_rate": 4.9646869983948645e-06, |
| "loss": 0.3187, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.0262853841709993, |
| "grad_norm": 9.070796012878418, |
| "learning_rate": 4.948635634028893e-06, |
| "loss": 0.3489, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.0291738879260544, |
| "grad_norm": 8.085728645324707, |
| "learning_rate": 4.9325842696629215e-06, |
| "loss": 0.2994, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.0320623916811091, |
| "grad_norm": 3.467472791671753, |
| "learning_rate": 4.916532905296951e-06, |
| "loss": 0.2839, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.0349508954361641, |
| "grad_norm": 11.494449615478516, |
| "learning_rate": 4.90048154093098e-06, |
| "loss": 0.2662, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.037839399191219, |
| "grad_norm": 10.576976776123047, |
| "learning_rate": 4.884430176565008e-06, |
| "loss": 0.3153, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.040727902946274, |
| "grad_norm": 10.300637245178223, |
| "learning_rate": 4.868378812199037e-06, |
| "loss": 0.295, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.0436164067013287, |
| "grad_norm": 2.993881940841675, |
| "learning_rate": 4.8523274478330665e-06, |
| "loss": 0.2122, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.0465049104563835, |
| "grad_norm": 7.386906623840332, |
| "learning_rate": 4.836276083467095e-06, |
| "loss": 0.3317, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.0493934142114385, |
| "grad_norm": 2.5367188453674316, |
| "learning_rate": 4.820224719101124e-06, |
| "loss": 0.2116, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.0522819179664933, |
| "grad_norm": 9.584460258483887, |
| "learning_rate": 4.804173354735153e-06, |
| "loss": 0.2566, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.0551704217215483, |
| "grad_norm": 8.785528182983398, |
| "learning_rate": 4.788121990369181e-06, |
| "loss": 0.2509, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.058058925476603, |
| "grad_norm": 8.40345573425293, |
| "learning_rate": 4.772070626003211e-06, |
| "loss": 0.3175, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.060947429231658, |
| "grad_norm": 5.987034320831299, |
| "learning_rate": 4.75601926163724e-06, |
| "loss": 0.2683, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.0638359329867129, |
| "grad_norm": 4.399767875671387, |
| "learning_rate": 4.7399678972712685e-06, |
| "loss": 0.1927, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.0667244367417679, |
| "grad_norm": 12.556426048278809, |
| "learning_rate": 4.723916532905297e-06, |
| "loss": 0.3256, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.0696129404968227, |
| "grad_norm": 6.317582607269287, |
| "learning_rate": 4.707865168539326e-06, |
| "loss": 0.1805, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.0725014442518774, |
| "grad_norm": 7.772289752960205, |
| "learning_rate": 4.691813804173355e-06, |
| "loss": 0.192, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.0753899480069324, |
| "grad_norm": 7.010458946228027, |
| "learning_rate": 4.675762439807384e-06, |
| "loss": 0.2454, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.0782784517619872, |
| "grad_norm": 6.772615909576416, |
| "learning_rate": 4.659711075441413e-06, |
| "loss": 0.1515, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.0811669555170422, |
| "grad_norm": 9.612960815429688, |
| "learning_rate": 4.643659711075442e-06, |
| "loss": 0.2616, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.0834777585210862, |
| "eval_accuracy": 0.7955010224948875, |
| "eval_loss": 0.530440628528595, |
| "eval_runtime": 77.7344, |
| "eval_samples_per_second": 6.291, |
| "eval_steps_per_second": 0.798, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.000577700751011, |
| "grad_norm": 4.917853832244873, |
| "learning_rate": 4.6276083467094705e-06, |
| "loss": 0.1349, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.003466204506066, |
| "grad_norm": 4.1992974281311035, |
| "learning_rate": 4.6115569823435e-06, |
| "loss": 0.1807, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.006354708261121, |
| "grad_norm": 18.553295135498047, |
| "learning_rate": 4.595505617977528e-06, |
| "loss": 0.2002, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.009243212016176, |
| "grad_norm": 7.829350471496582, |
| "learning_rate": 4.579454253611557e-06, |
| "loss": 0.1349, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.0121317157712304, |
| "grad_norm": 3.751248836517334, |
| "learning_rate": 4.563402889245586e-06, |
| "loss": 0.1752, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.0150202195262854, |
| "grad_norm": 12.89821720123291, |
| "learning_rate": 4.5473515248796155e-06, |
| "loss": 0.1294, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.0179087232813404, |
| "grad_norm": 11.471810340881348, |
| "learning_rate": 4.531300160513644e-06, |
| "loss": 0.0896, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.020797227036395, |
| "grad_norm": 0.9516859650611877, |
| "learning_rate": 4.515248796147673e-06, |
| "loss": 0.141, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.02368573079145, |
| "grad_norm": 11.20914363861084, |
| "learning_rate": 4.499197431781702e-06, |
| "loss": 0.1688, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.026574234546505, |
| "grad_norm": 11.292973518371582, |
| "learning_rate": 4.48314606741573e-06, |
| "loss": 0.1823, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.02946273830156, |
| "grad_norm": 9.095600128173828, |
| "learning_rate": 4.46709470304976e-06, |
| "loss": 0.1181, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.0323512420566145, |
| "grad_norm": 0.7727231383323669, |
| "learning_rate": 4.451043338683789e-06, |
| "loss": 0.0527, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.0352397458116696, |
| "grad_norm": 3.2032511234283447, |
| "learning_rate": 4.4349919743178176e-06, |
| "loss": 0.077, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.0381282495667246, |
| "grad_norm": 10.461459159851074, |
| "learning_rate": 4.418940609951846e-06, |
| "loss": 0.0952, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.041016753321779, |
| "grad_norm": 7.884979248046875, |
| "learning_rate": 4.402889245585875e-06, |
| "loss": 0.0498, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.043905257076834, |
| "grad_norm": 1.1473121643066406, |
| "learning_rate": 4.386837881219904e-06, |
| "loss": 0.1644, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.046793760831889, |
| "grad_norm": 3.6056792736053467, |
| "learning_rate": 4.370786516853933e-06, |
| "loss": 0.1515, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.049682264586944, |
| "grad_norm": 1.1584240198135376, |
| "learning_rate": 4.354735152487962e-06, |
| "loss": 0.0563, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.0525707683419987, |
| "grad_norm": 6.910070896148682, |
| "learning_rate": 4.33868378812199e-06, |
| "loss": 0.0525, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.0554592720970537, |
| "grad_norm": 1.379238486289978, |
| "learning_rate": 4.32263242375602e-06, |
| "loss": 0.0915, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.0583477758521087, |
| "grad_norm": 23.958478927612305, |
| "learning_rate": 4.306581059390049e-06, |
| "loss": 0.0828, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.0612362796071633, |
| "grad_norm": 0.20569536089897156, |
| "learning_rate": 4.290529695024077e-06, |
| "loss": 0.0821, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.0641247833622183, |
| "grad_norm": 0.5140540599822998, |
| "learning_rate": 4.274478330658106e-06, |
| "loss": 0.0866, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.0670132871172733, |
| "grad_norm": 10.027976989746094, |
| "learning_rate": 4.258426966292135e-06, |
| "loss": 0.217, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.0699017908723283, |
| "grad_norm": 6.995995998382568, |
| "learning_rate": 4.242375601926164e-06, |
| "loss": 0.0857, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.072790294627383, |
| "grad_norm": 0.4062481224536896, |
| "learning_rate": 4.226324237560193e-06, |
| "loss": 0.0597, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.075678798382438, |
| "grad_norm": 4.28646993637085, |
| "learning_rate": 4.210272873194222e-06, |
| "loss": 0.0872, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.078567302137493, |
| "grad_norm": 9.371219635009766, |
| "learning_rate": 4.194221508828251e-06, |
| "loss": 0.0373, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.081455805892548, |
| "grad_norm": 13.69465446472168, |
| "learning_rate": 4.1781701444622794e-06, |
| "loss": 0.078, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.083477758521086, |
| "eval_accuracy": 0.7975460122699386, |
| "eval_loss": 0.7102476358413696, |
| "eval_runtime": 69.3294, |
| "eval_samples_per_second": 7.053, |
| "eval_steps_per_second": 0.894, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.0008665511265167, |
| "grad_norm": 0.9676089882850647, |
| "learning_rate": 4.162118780096309e-06, |
| "loss": 0.1413, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.0037550548815712, |
| "grad_norm": 0.3187597095966339, |
| "learning_rate": 4.146067415730337e-06, |
| "loss": 0.0649, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.0066435586366262, |
| "grad_norm": 10.905332565307617, |
| "learning_rate": 4.130016051364366e-06, |
| "loss": 0.0185, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.0095320623916813, |
| "grad_norm": 2.329608917236328, |
| "learning_rate": 4.113964686998395e-06, |
| "loss": 0.0381, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.012420566146736, |
| "grad_norm": 11.575812339782715, |
| "learning_rate": 4.0979133226324245e-06, |
| "loss": 0.0537, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.015309069901791, |
| "grad_norm": 1.2669545412063599, |
| "learning_rate": 4.081861958266453e-06, |
| "loss": 0.0144, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.018197573656846, |
| "grad_norm": 0.1274038404226303, |
| "learning_rate": 4.0658105939004815e-06, |
| "loss": 0.0468, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.021086077411901, |
| "grad_norm": 0.11354996263980865, |
| "learning_rate": 4.049759229534511e-06, |
| "loss": 0.0452, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.0239745811669554, |
| "grad_norm": 0.11091677844524384, |
| "learning_rate": 4.033707865168539e-06, |
| "loss": 0.0251, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.0268630849220104, |
| "grad_norm": 0.12637047469615936, |
| "learning_rate": 4.017656500802569e-06, |
| "loss": 0.054, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.0297515886770654, |
| "grad_norm": 3.8349571228027344, |
| "learning_rate": 4.001605136436598e-06, |
| "loss": 0.0724, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.03264009243212, |
| "grad_norm": 2.915048360824585, |
| "learning_rate": 3.9855537720706265e-06, |
| "loss": 0.0171, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.035528596187175, |
| "grad_norm": 0.14977912604808807, |
| "learning_rate": 3.969502407704655e-06, |
| "loss": 0.0054, |
| "step": 990 |
| }, |
| { |
| "epoch": 3.03841709994223, |
| "grad_norm": 0.3599390983581543, |
| "learning_rate": 3.953451043338684e-06, |
| "loss": 0.0537, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.041305603697285, |
| "grad_norm": 0.08736338466405869, |
| "learning_rate": 3.937399678972713e-06, |
| "loss": 0.0324, |
| "step": 1010 |
| }, |
| { |
| "epoch": 3.0441941074523395, |
| "grad_norm": 4.6888322830200195, |
| "learning_rate": 3.921348314606742e-06, |
| "loss": 0.0271, |
| "step": 1020 |
| }, |
| { |
| "epoch": 3.0470826112073945, |
| "grad_norm": 0.21701326966285706, |
| "learning_rate": 3.905296950240771e-06, |
| "loss": 0.0253, |
| "step": 1030 |
| }, |
| { |
| "epoch": 3.0499711149624495, |
| "grad_norm": 1.4320465326309204, |
| "learning_rate": 3.8892455858748e-06, |
| "loss": 0.0311, |
| "step": 1040 |
| }, |
| { |
| "epoch": 3.0528596187175046, |
| "grad_norm": 0.6287254095077515, |
| "learning_rate": 3.8731942215088285e-06, |
| "loss": 0.0027, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.055748122472559, |
| "grad_norm": 0.17181923985481262, |
| "learning_rate": 3.857142857142858e-06, |
| "loss": 0.063, |
| "step": 1060 |
| }, |
| { |
| "epoch": 3.058636626227614, |
| "grad_norm": 0.20554514229297638, |
| "learning_rate": 3.841091492776886e-06, |
| "loss": 0.0136, |
| "step": 1070 |
| }, |
| { |
| "epoch": 3.061525129982669, |
| "grad_norm": 0.05833299085497856, |
| "learning_rate": 3.825040128410915e-06, |
| "loss": 0.0192, |
| "step": 1080 |
| }, |
| { |
| "epoch": 3.0644136337377237, |
| "grad_norm": 0.08491459488868713, |
| "learning_rate": 3.808988764044944e-06, |
| "loss": 0.0216, |
| "step": 1090 |
| }, |
| { |
| "epoch": 3.0673021374927787, |
| "grad_norm": 30.636640548706055, |
| "learning_rate": 3.792937399678973e-06, |
| "loss": 0.0334, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.0701906412478337, |
| "grad_norm": 0.24978935718536377, |
| "learning_rate": 3.776886035313002e-06, |
| "loss": 0.0338, |
| "step": 1110 |
| }, |
| { |
| "epoch": 3.0730791450028887, |
| "grad_norm": 6.24122428894043, |
| "learning_rate": 3.7608346709470305e-06, |
| "loss": 0.0096, |
| "step": 1120 |
| }, |
| { |
| "epoch": 3.0759676487579433, |
| "grad_norm": 0.10055310279130936, |
| "learning_rate": 3.7447833065810594e-06, |
| "loss": 0.0078, |
| "step": 1130 |
| }, |
| { |
| "epoch": 3.0788561525129983, |
| "grad_norm": 0.07958950102329254, |
| "learning_rate": 3.7287319422150888e-06, |
| "loss": 0.0153, |
| "step": 1140 |
| }, |
| { |
| "epoch": 3.0817446562680533, |
| "grad_norm": 0.2819121479988098, |
| "learning_rate": 3.7126805778491177e-06, |
| "loss": 0.0089, |
| "step": 1150 |
| }, |
| { |
| "epoch": 3.083477758521086, |
| "eval_accuracy": 0.7934560327198364, |
| "eval_loss": 0.9156056046485901, |
| "eval_runtime": 77.6168, |
| "eval_samples_per_second": 6.3, |
| "eval_steps_per_second": 0.799, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.001155401502022, |
| "grad_norm": 0.28028979897499084, |
| "learning_rate": 3.6966292134831466e-06, |
| "loss": 0.0059, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.004043905257077, |
| "grad_norm": 0.09656143933534622, |
| "learning_rate": 3.680577849117175e-06, |
| "loss": 0.026, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.006932409012132, |
| "grad_norm": 3.490778923034668, |
| "learning_rate": 3.664526484751204e-06, |
| "loss": 0.0105, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.009820912767187, |
| "grad_norm": 0.14486035704612732, |
| "learning_rate": 3.648475120385233e-06, |
| "loss": 0.0015, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.012709416522242, |
| "grad_norm": 0.04033642262220383, |
| "learning_rate": 3.6324237560192623e-06, |
| "loss": 0.015, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.015597920277297, |
| "grad_norm": 0.35192564129829407, |
| "learning_rate": 3.6163723916532904e-06, |
| "loss": 0.0015, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.018486424032352, |
| "grad_norm": 1.2167606353759766, |
| "learning_rate": 3.6003210272873197e-06, |
| "loss": 0.0062, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.021374927787406, |
| "grad_norm": 0.30561357736587524, |
| "learning_rate": 3.5842696629213486e-06, |
| "loss": 0.0014, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.024263431542461, |
| "grad_norm": 0.07887325435876846, |
| "learning_rate": 3.5682182985553776e-06, |
| "loss": 0.0017, |
| "step": 1240 |
| }, |
| { |
| "epoch": 4.027151935297516, |
| "grad_norm": 0.08028972148895264, |
| "learning_rate": 3.5521669341894065e-06, |
| "loss": 0.0013, |
| "step": 1250 |
| }, |
| { |
| "epoch": 4.030040439052571, |
| "grad_norm": 0.026913689449429512, |
| "learning_rate": 3.536115569823435e-06, |
| "loss": 0.0634, |
| "step": 1260 |
| }, |
| { |
| "epoch": 4.032928942807626, |
| "grad_norm": 0.028358453884720802, |
| "learning_rate": 3.520064205457464e-06, |
| "loss": 0.0012, |
| "step": 1270 |
| }, |
| { |
| "epoch": 4.035817446562681, |
| "grad_norm": 0.3259369134902954, |
| "learning_rate": 3.5040128410914932e-06, |
| "loss": 0.0018, |
| "step": 1280 |
| }, |
| { |
| "epoch": 4.038705950317736, |
| "grad_norm": 0.033729150891304016, |
| "learning_rate": 3.487961476725522e-06, |
| "loss": 0.0252, |
| "step": 1290 |
| }, |
| { |
| "epoch": 4.04159445407279, |
| "grad_norm": 0.07054167240858078, |
| "learning_rate": 3.471910112359551e-06, |
| "loss": 0.001, |
| "step": 1300 |
| }, |
| { |
| "epoch": 4.044482957827845, |
| "grad_norm": 0.13403773307800293, |
| "learning_rate": 3.4558587479935796e-06, |
| "loss": 0.011, |
| "step": 1310 |
| }, |
| { |
| "epoch": 4.0473714615829, |
| "grad_norm": 0.036013245582580566, |
| "learning_rate": 3.4398073836276085e-06, |
| "loss": 0.0031, |
| "step": 1320 |
| }, |
| { |
| "epoch": 4.050259965337955, |
| "grad_norm": 0.3583677411079407, |
| "learning_rate": 3.4237560192616374e-06, |
| "loss": 0.0012, |
| "step": 1330 |
| }, |
| { |
| "epoch": 4.05314846909301, |
| "grad_norm": 0.03582717850804329, |
| "learning_rate": 3.4077046548956668e-06, |
| "loss": 0.0018, |
| "step": 1340 |
| }, |
| { |
| "epoch": 4.056036972848065, |
| "grad_norm": 0.021955780684947968, |
| "learning_rate": 3.391653290529695e-06, |
| "loss": 0.0013, |
| "step": 1350 |
| }, |
| { |
| "epoch": 4.05892547660312, |
| "grad_norm": 0.03652675449848175, |
| "learning_rate": 3.375601926163724e-06, |
| "loss": 0.0011, |
| "step": 1360 |
| }, |
| { |
| "epoch": 4.061813980358174, |
| "grad_norm": 0.02935628965497017, |
| "learning_rate": 3.359550561797753e-06, |
| "loss": 0.0007, |
| "step": 1370 |
| }, |
| { |
| "epoch": 4.064702484113229, |
| "grad_norm": 0.020217472687363625, |
| "learning_rate": 3.343499197431782e-06, |
| "loss": 0.0012, |
| "step": 1380 |
| }, |
| { |
| "epoch": 4.067590987868284, |
| "grad_norm": 13.410699844360352, |
| "learning_rate": 3.327447833065811e-06, |
| "loss": 0.0043, |
| "step": 1390 |
| }, |
| { |
| "epoch": 4.070479491623339, |
| "grad_norm": 0.04453463479876518, |
| "learning_rate": 3.3113964686998394e-06, |
| "loss": 0.0009, |
| "step": 1400 |
| }, |
| { |
| "epoch": 4.073367995378394, |
| "grad_norm": 0.05309749022126198, |
| "learning_rate": 3.2953451043338684e-06, |
| "loss": 0.0015, |
| "step": 1410 |
| }, |
| { |
| "epoch": 4.076256499133449, |
| "grad_norm": 0.15033993124961853, |
| "learning_rate": 3.2792937399678977e-06, |
| "loss": 0.0011, |
| "step": 1420 |
| }, |
| { |
| "epoch": 4.079145002888504, |
| "grad_norm": 0.044659897685050964, |
| "learning_rate": 3.2632423756019266e-06, |
| "loss": 0.0289, |
| "step": 1430 |
| }, |
| { |
| "epoch": 4.082033506643558, |
| "grad_norm": 0.10378444194793701, |
| "learning_rate": 3.2471910112359555e-06, |
| "loss": 0.0012, |
| "step": 1440 |
| }, |
| { |
| "epoch": 4.083477758521086, |
| "eval_accuracy": 0.7914110429447853, |
| "eval_loss": 1.0430103540420532, |
| "eval_runtime": 66.2163, |
| "eval_samples_per_second": 7.385, |
| "eval_steps_per_second": 0.936, |
| "step": 1445 |
| }, |
| { |
| "epoch": 5.001444251877527, |
| "grad_norm": 0.016393378376960754, |
| "learning_rate": 3.231139646869984e-06, |
| "loss": 0.0007, |
| "step": 1450 |
| }, |
| { |
| "epoch": 5.004332755632582, |
| "grad_norm": 0.023797810077667236, |
| "learning_rate": 3.215088282504013e-06, |
| "loss": 0.0009, |
| "step": 1460 |
| }, |
| { |
| "epoch": 5.007221259387637, |
| "grad_norm": 0.03091173619031906, |
| "learning_rate": 3.199036918138042e-06, |
| "loss": 0.0155, |
| "step": 1470 |
| }, |
| { |
| "epoch": 5.010109763142692, |
| "grad_norm": 0.07352596521377563, |
| "learning_rate": 3.1829855537720712e-06, |
| "loss": 0.0036, |
| "step": 1480 |
| }, |
| { |
| "epoch": 5.012998266897747, |
| "grad_norm": 0.01384744606912136, |
| "learning_rate": 3.1669341894060997e-06, |
| "loss": 0.0006, |
| "step": 1490 |
| }, |
| { |
| "epoch": 5.015886770652802, |
| "grad_norm": 0.048492494970560074, |
| "learning_rate": 3.1508828250401286e-06, |
| "loss": 0.0035, |
| "step": 1500 |
| }, |
| { |
| "epoch": 5.018775274407857, |
| "grad_norm": 0.05079851299524307, |
| "learning_rate": 3.1348314606741576e-06, |
| "loss": 0.0074, |
| "step": 1510 |
| }, |
| { |
| "epoch": 5.021663778162911, |
| "grad_norm": 0.08019035309553146, |
| "learning_rate": 3.1187800963081865e-06, |
| "loss": 0.001, |
| "step": 1520 |
| }, |
| { |
| "epoch": 5.024552281917966, |
| "grad_norm": 0.014834980480372906, |
| "learning_rate": 3.1027287319422154e-06, |
| "loss": 0.0008, |
| "step": 1530 |
| }, |
| { |
| "epoch": 5.027440785673021, |
| "grad_norm": 0.05213601887226105, |
| "learning_rate": 3.086677367576244e-06, |
| "loss": 0.0007, |
| "step": 1540 |
| }, |
| { |
| "epoch": 5.030329289428076, |
| "grad_norm": 0.03506625443696976, |
| "learning_rate": 3.0706260032102732e-06, |
| "loss": 0.0006, |
| "step": 1550 |
| }, |
| { |
| "epoch": 5.033217793183131, |
| "grad_norm": 0.02491496317088604, |
| "learning_rate": 3.054574638844302e-06, |
| "loss": 0.0007, |
| "step": 1560 |
| }, |
| { |
| "epoch": 5.036106296938186, |
| "grad_norm": 8.909887313842773, |
| "learning_rate": 3.038523274478331e-06, |
| "loss": 0.0098, |
| "step": 1570 |
| }, |
| { |
| "epoch": 5.038994800693241, |
| "grad_norm": 0.026988506317138672, |
| "learning_rate": 3.02247191011236e-06, |
| "loss": 0.0006, |
| "step": 1580 |
| }, |
| { |
| "epoch": 5.041883304448295, |
| "grad_norm": 0.09933661669492722, |
| "learning_rate": 3.0064205457463885e-06, |
| "loss": 0.0006, |
| "step": 1590 |
| }, |
| { |
| "epoch": 5.04477180820335, |
| "grad_norm": 0.019388403743505478, |
| "learning_rate": 2.9903691813804174e-06, |
| "loss": 0.0006, |
| "step": 1600 |
| }, |
| { |
| "epoch": 5.047660311958405, |
| "grad_norm": 0.04846403747797012, |
| "learning_rate": 2.9743178170144463e-06, |
| "loss": 0.0011, |
| "step": 1610 |
| }, |
| { |
| "epoch": 5.05054881571346, |
| "grad_norm": 0.019278401508927345, |
| "learning_rate": 2.9582664526484757e-06, |
| "loss": 0.0006, |
| "step": 1620 |
| }, |
| { |
| "epoch": 5.053437319468515, |
| "grad_norm": 0.023772340267896652, |
| "learning_rate": 2.942215088282504e-06, |
| "loss": 0.0007, |
| "step": 1630 |
| }, |
| { |
| "epoch": 5.05632582322357, |
| "grad_norm": 0.13470448553562164, |
| "learning_rate": 2.926163723916533e-06, |
| "loss": 0.0006, |
| "step": 1640 |
| }, |
| { |
| "epoch": 5.059214326978625, |
| "grad_norm": 0.016751592978835106, |
| "learning_rate": 2.910112359550562e-06, |
| "loss": 0.0006, |
| "step": 1650 |
| }, |
| { |
| "epoch": 5.06210283073368, |
| "grad_norm": 0.05011456087231636, |
| "learning_rate": 2.894060995184591e-06, |
| "loss": 0.0004, |
| "step": 1660 |
| }, |
| { |
| "epoch": 5.0649913344887345, |
| "grad_norm": 3.883164405822754, |
| "learning_rate": 2.87800963081862e-06, |
| "loss": 0.0048, |
| "step": 1670 |
| }, |
| { |
| "epoch": 5.0678798382437895, |
| "grad_norm": 0.014229238964617252, |
| "learning_rate": 2.8619582664526484e-06, |
| "loss": 0.0004, |
| "step": 1680 |
| }, |
| { |
| "epoch": 5.0707683419988445, |
| "grad_norm": 0.029299357905983925, |
| "learning_rate": 2.8459069020866777e-06, |
| "loss": 0.0004, |
| "step": 1690 |
| }, |
| { |
| "epoch": 5.0736568457538995, |
| "grad_norm": 0.0175183042883873, |
| "learning_rate": 2.8298555377207066e-06, |
| "loss": 0.0005, |
| "step": 1700 |
| }, |
| { |
| "epoch": 5.0765453495089545, |
| "grad_norm": 0.34429457783699036, |
| "learning_rate": 2.8138041733547356e-06, |
| "loss": 0.0008, |
| "step": 1710 |
| }, |
| { |
| "epoch": 5.0794338532640095, |
| "grad_norm": 0.033084671944379807, |
| "learning_rate": 2.797752808988764e-06, |
| "loss": 0.0008, |
| "step": 1720 |
| }, |
| { |
| "epoch": 5.0823223570190645, |
| "grad_norm": 0.10312939435243607, |
| "learning_rate": 2.781701444622793e-06, |
| "loss": 0.0005, |
| "step": 1730 |
| }, |
| { |
| "epoch": 5.083477758521086, |
| "eval_accuracy": 0.803680981595092, |
| "eval_loss": 1.052091121673584, |
| "eval_runtime": 70.6742, |
| "eval_samples_per_second": 6.919, |
| "eval_steps_per_second": 0.877, |
| "step": 1734 |
| }, |
| { |
| "epoch": 6.001733102253033, |
| "grad_norm": 0.024799736216664314, |
| "learning_rate": 2.765650080256822e-06, |
| "loss": 0.0004, |
| "step": 1740 |
| }, |
| { |
| "epoch": 6.0046216060080875, |
| "grad_norm": 0.007654332090169191, |
| "learning_rate": 2.7495987158908512e-06, |
| "loss": 0.0004, |
| "step": 1750 |
| }, |
| { |
| "epoch": 6.0075101097631425, |
| "grad_norm": 0.04988357052206993, |
| "learning_rate": 2.73354735152488e-06, |
| "loss": 0.0004, |
| "step": 1760 |
| }, |
| { |
| "epoch": 6.0103986135181975, |
| "grad_norm": 0.020988399162888527, |
| "learning_rate": 2.7174959871589087e-06, |
| "loss": 0.0094, |
| "step": 1770 |
| }, |
| { |
| "epoch": 6.0132871172732525, |
| "grad_norm": 0.01697859913110733, |
| "learning_rate": 2.7014446227929376e-06, |
| "loss": 0.0014, |
| "step": 1780 |
| }, |
| { |
| "epoch": 6.0161756210283075, |
| "grad_norm": 0.0390143021941185, |
| "learning_rate": 2.6853932584269665e-06, |
| "loss": 0.0003, |
| "step": 1790 |
| }, |
| { |
| "epoch": 6.0190641247833625, |
| "grad_norm": 0.11428093165159225, |
| "learning_rate": 2.6693418940609954e-06, |
| "loss": 0.0076, |
| "step": 1800 |
| }, |
| { |
| "epoch": 6.0219526285384175, |
| "grad_norm": 0.008388683199882507, |
| "learning_rate": 2.6532905296950243e-06, |
| "loss": 0.0006, |
| "step": 1810 |
| }, |
| { |
| "epoch": 6.024841132293472, |
| "grad_norm": 0.01707868091762066, |
| "learning_rate": 2.637239165329053e-06, |
| "loss": 0.0004, |
| "step": 1820 |
| }, |
| { |
| "epoch": 6.027729636048527, |
| "grad_norm": 0.02858269028365612, |
| "learning_rate": 2.621187800963082e-06, |
| "loss": 0.0006, |
| "step": 1830 |
| }, |
| { |
| "epoch": 6.030618139803582, |
| "grad_norm": 0.018801476806402206, |
| "learning_rate": 2.605136436597111e-06, |
| "loss": 0.0004, |
| "step": 1840 |
| }, |
| { |
| "epoch": 6.033506643558637, |
| "grad_norm": 0.038629643619060516, |
| "learning_rate": 2.58908507223114e-06, |
| "loss": 0.0004, |
| "step": 1850 |
| }, |
| { |
| "epoch": 6.036395147313692, |
| "grad_norm": 0.01919536292552948, |
| "learning_rate": 2.5730337078651685e-06, |
| "loss": 0.0005, |
| "step": 1860 |
| }, |
| { |
| "epoch": 6.039283651068747, |
| "grad_norm": 0.01720685325562954, |
| "learning_rate": 2.5569823434991974e-06, |
| "loss": 0.0004, |
| "step": 1870 |
| }, |
| { |
| "epoch": 6.042172154823802, |
| "grad_norm": 0.07143541425466537, |
| "learning_rate": 2.5409309791332264e-06, |
| "loss": 0.0004, |
| "step": 1880 |
| }, |
| { |
| "epoch": 6.045060658578856, |
| "grad_norm": 0.026604874059557915, |
| "learning_rate": 2.5248796147672557e-06, |
| "loss": 0.0004, |
| "step": 1890 |
| }, |
| { |
| "epoch": 6.047949162333911, |
| "grad_norm": 0.012191304937005043, |
| "learning_rate": 2.5088282504012846e-06, |
| "loss": 0.0003, |
| "step": 1900 |
| }, |
| { |
| "epoch": 6.050837666088966, |
| "grad_norm": 0.021905358880758286, |
| "learning_rate": 2.492776886035313e-06, |
| "loss": 0.0004, |
| "step": 1910 |
| }, |
| { |
| "epoch": 6.053726169844021, |
| "grad_norm": 0.006233502645045519, |
| "learning_rate": 2.476725521669342e-06, |
| "loss": 0.0011, |
| "step": 1920 |
| }, |
| { |
| "epoch": 6.056614673599076, |
| "grad_norm": 0.04028209298849106, |
| "learning_rate": 2.460674157303371e-06, |
| "loss": 0.0003, |
| "step": 1930 |
| }, |
| { |
| "epoch": 6.059503177354131, |
| "grad_norm": 0.2698580026626587, |
| "learning_rate": 2.4446227929374e-06, |
| "loss": 0.0004, |
| "step": 1940 |
| }, |
| { |
| "epoch": 6.062391681109186, |
| "grad_norm": 0.01763250306248665, |
| "learning_rate": 2.428571428571429e-06, |
| "loss": 0.0006, |
| "step": 1950 |
| }, |
| { |
| "epoch": 6.06528018486424, |
| "grad_norm": 0.04466104507446289, |
| "learning_rate": 2.4125200642054577e-06, |
| "loss": 0.0018, |
| "step": 1960 |
| }, |
| { |
| "epoch": 6.068168688619295, |
| "grad_norm": 0.024800406768918037, |
| "learning_rate": 2.3964686998394866e-06, |
| "loss": 0.0072, |
| "step": 1970 |
| }, |
| { |
| "epoch": 6.07105719237435, |
| "grad_norm": 0.2319115251302719, |
| "learning_rate": 2.3804173354735156e-06, |
| "loss": 0.0004, |
| "step": 1980 |
| }, |
| { |
| "epoch": 6.073945696129405, |
| "grad_norm": 0.04877489432692528, |
| "learning_rate": 2.364365971107544e-06, |
| "loss": 0.0003, |
| "step": 1990 |
| }, |
| { |
| "epoch": 6.07683419988446, |
| "grad_norm": 0.4406111538410187, |
| "learning_rate": 2.3483146067415734e-06, |
| "loss": 0.0007, |
| "step": 2000 |
| }, |
| { |
| "epoch": 6.079722703639515, |
| "grad_norm": 0.013905012980103493, |
| "learning_rate": 2.3322632423756023e-06, |
| "loss": 0.0003, |
| "step": 2010 |
| }, |
| { |
| "epoch": 6.08261120739457, |
| "grad_norm": 0.02120201103389263, |
| "learning_rate": 2.316211878009631e-06, |
| "loss": 0.0006, |
| "step": 2020 |
| }, |
| { |
| "epoch": 6.083477758521086, |
| "eval_accuracy": 0.8118609406952966, |
| "eval_loss": 1.0801777839660645, |
| "eval_runtime": 71.8007, |
| "eval_samples_per_second": 6.811, |
| "eval_steps_per_second": 0.864, |
| "step": 2023 |
| }, |
| { |
| "epoch": 7.002021952628539, |
| "grad_norm": 0.08827612549066544, |
| "learning_rate": 2.30016051364366e-06, |
| "loss": 0.0003, |
| "step": 2030 |
| }, |
| { |
| "epoch": 7.004910456383593, |
| "grad_norm": 0.2227260172367096, |
| "learning_rate": 2.2841091492776887e-06, |
| "loss": 0.0004, |
| "step": 2040 |
| }, |
| { |
| "epoch": 7.007798960138648, |
| "grad_norm": 0.010085574351251125, |
| "learning_rate": 2.2680577849117176e-06, |
| "loss": 0.0003, |
| "step": 2050 |
| }, |
| { |
| "epoch": 7.010687463893703, |
| "grad_norm": 0.07205743342638016, |
| "learning_rate": 2.2520064205457465e-06, |
| "loss": 0.0014, |
| "step": 2060 |
| }, |
| { |
| "epoch": 7.013575967648758, |
| "grad_norm": 0.1408742517232895, |
| "learning_rate": 2.2359550561797754e-06, |
| "loss": 0.0004, |
| "step": 2070 |
| }, |
| { |
| "epoch": 7.016464471403813, |
| "grad_norm": 0.033056750893592834, |
| "learning_rate": 2.2199036918138043e-06, |
| "loss": 0.0003, |
| "step": 2080 |
| }, |
| { |
| "epoch": 7.019352975158868, |
| "grad_norm": 0.008353917859494686, |
| "learning_rate": 2.2038523274478333e-06, |
| "loss": 0.0002, |
| "step": 2090 |
| }, |
| { |
| "epoch": 7.022241478913923, |
| "grad_norm": 0.007533412892371416, |
| "learning_rate": 2.187800963081862e-06, |
| "loss": 0.0003, |
| "step": 2100 |
| }, |
| { |
| "epoch": 7.025129982668977, |
| "grad_norm": 0.009329171851277351, |
| "learning_rate": 2.171749598715891e-06, |
| "loss": 0.0003, |
| "step": 2110 |
| }, |
| { |
| "epoch": 7.028018486424032, |
| "grad_norm": 0.006963707972317934, |
| "learning_rate": 2.15569823434992e-06, |
| "loss": 0.0003, |
| "step": 2120 |
| }, |
| { |
| "epoch": 7.030906990179087, |
| "grad_norm": 0.01631193608045578, |
| "learning_rate": 2.1396468699839485e-06, |
| "loss": 0.0003, |
| "step": 2130 |
| }, |
| { |
| "epoch": 7.033795493934142, |
| "grad_norm": 0.015248533338308334, |
| "learning_rate": 2.123595505617978e-06, |
| "loss": 0.0003, |
| "step": 2140 |
| }, |
| { |
| "epoch": 7.036683997689197, |
| "grad_norm": 0.005624765995889902, |
| "learning_rate": 2.1075441412520064e-06, |
| "loss": 0.0013, |
| "step": 2150 |
| }, |
| { |
| "epoch": 7.039572501444252, |
| "grad_norm": 0.007767071016132832, |
| "learning_rate": 2.0914927768860353e-06, |
| "loss": 0.0002, |
| "step": 2160 |
| }, |
| { |
| "epoch": 7.042461005199307, |
| "grad_norm": 0.05685372278094292, |
| "learning_rate": 2.0754414125200646e-06, |
| "loss": 0.0019, |
| "step": 2170 |
| }, |
| { |
| "epoch": 7.045349508954362, |
| "grad_norm": 0.010567046701908112, |
| "learning_rate": 2.059390048154093e-06, |
| "loss": 0.0003, |
| "step": 2180 |
| }, |
| { |
| "epoch": 7.048238012709416, |
| "grad_norm": 0.00699876993894577, |
| "learning_rate": 2.043338683788122e-06, |
| "loss": 0.0006, |
| "step": 2190 |
| }, |
| { |
| "epoch": 7.051126516464471, |
| "grad_norm": 0.023216497153043747, |
| "learning_rate": 2.027287319422151e-06, |
| "loss": 0.0006, |
| "step": 2200 |
| }, |
| { |
| "epoch": 7.054015020219526, |
| "grad_norm": 0.3829960525035858, |
| "learning_rate": 2.01123595505618e-06, |
| "loss": 0.0003, |
| "step": 2210 |
| }, |
| { |
| "epoch": 7.056903523974581, |
| "grad_norm": 0.01023771334439516, |
| "learning_rate": 1.995184590690209e-06, |
| "loss": 0.0005, |
| "step": 2220 |
| }, |
| { |
| "epoch": 7.059792027729636, |
| "grad_norm": 0.013453644700348377, |
| "learning_rate": 1.9791332263242377e-06, |
| "loss": 0.0003, |
| "step": 2230 |
| }, |
| { |
| "epoch": 7.062680531484691, |
| "grad_norm": 0.009664042852818966, |
| "learning_rate": 1.9630818619582666e-06, |
| "loss": 0.0003, |
| "step": 2240 |
| }, |
| { |
| "epoch": 7.065569035239746, |
| "grad_norm": 0.02549542300403118, |
| "learning_rate": 1.9470304975922956e-06, |
| "loss": 0.0008, |
| "step": 2250 |
| }, |
| { |
| "epoch": 7.0684575389948, |
| "grad_norm": 0.016215378418564796, |
| "learning_rate": 1.9309791332263245e-06, |
| "loss": 0.0002, |
| "step": 2260 |
| }, |
| { |
| "epoch": 7.071346042749855, |
| "grad_norm": 0.014920054003596306, |
| "learning_rate": 1.9149277688603534e-06, |
| "loss": 0.0002, |
| "step": 2270 |
| }, |
| { |
| "epoch": 7.07423454650491, |
| "grad_norm": 0.00620607752352953, |
| "learning_rate": 1.8988764044943821e-06, |
| "loss": 0.0003, |
| "step": 2280 |
| }, |
| { |
| "epoch": 7.077123050259965, |
| "grad_norm": 0.015935564413666725, |
| "learning_rate": 1.882825040128411e-06, |
| "loss": 0.0003, |
| "step": 2290 |
| }, |
| { |
| "epoch": 7.08001155401502, |
| "grad_norm": 0.0060450248420238495, |
| "learning_rate": 1.86677367576244e-06, |
| "loss": 0.0002, |
| "step": 2300 |
| }, |
| { |
| "epoch": 7.082900057770075, |
| "grad_norm": 0.012453804723918438, |
| "learning_rate": 1.8507223113964689e-06, |
| "loss": 0.0002, |
| "step": 2310 |
| }, |
| { |
| "epoch": 7.083477758521086, |
| "eval_accuracy": 0.8077709611451943, |
| "eval_loss": 1.1695784330368042, |
| "eval_runtime": 65.8882, |
| "eval_samples_per_second": 7.422, |
| "eval_steps_per_second": 0.941, |
| "step": 2312 |
| }, |
| { |
| "epoch": 8.002310803004043, |
| "grad_norm": 0.6690245866775513, |
| "learning_rate": 1.8346709470304978e-06, |
| "loss": 0.0007, |
| "step": 2320 |
| }, |
| { |
| "epoch": 8.0051993067591, |
| "grad_norm": 0.0172465480864048, |
| "learning_rate": 1.8186195826645267e-06, |
| "loss": 0.0002, |
| "step": 2330 |
| }, |
| { |
| "epoch": 8.008087810514153, |
| "grad_norm": 0.04959592595696449, |
| "learning_rate": 1.8025682182985554e-06, |
| "loss": 0.0003, |
| "step": 2340 |
| }, |
| { |
| "epoch": 8.01097631426921, |
| "grad_norm": 0.008038152940571308, |
| "learning_rate": 1.7865168539325846e-06, |
| "loss": 0.0002, |
| "step": 2350 |
| }, |
| { |
| "epoch": 8.013864818024263, |
| "grad_norm": 0.009810101240873337, |
| "learning_rate": 1.7704654895666133e-06, |
| "loss": 0.0002, |
| "step": 2360 |
| }, |
| { |
| "epoch": 8.016753321779317, |
| "grad_norm": 0.00564626744017005, |
| "learning_rate": 1.7544141252006422e-06, |
| "loss": 0.0002, |
| "step": 2370 |
| }, |
| { |
| "epoch": 8.019641825534373, |
| "grad_norm": 0.010218638926744461, |
| "learning_rate": 1.738362760834671e-06, |
| "loss": 0.0002, |
| "step": 2380 |
| }, |
| { |
| "epoch": 8.022530329289427, |
| "grad_norm": 0.008607611060142517, |
| "learning_rate": 1.7223113964687e-06, |
| "loss": 0.0002, |
| "step": 2390 |
| }, |
| { |
| "epoch": 8.025418833044483, |
| "grad_norm": 0.006128065288066864, |
| "learning_rate": 1.706260032102729e-06, |
| "loss": 0.0003, |
| "step": 2400 |
| }, |
| { |
| "epoch": 8.028307336799537, |
| "grad_norm": 0.0036342665553092957, |
| "learning_rate": 1.6902086677367576e-06, |
| "loss": 0.0007, |
| "step": 2410 |
| }, |
| { |
| "epoch": 8.031195840554593, |
| "grad_norm": 0.008177458308637142, |
| "learning_rate": 1.6741573033707868e-06, |
| "loss": 0.0003, |
| "step": 2420 |
| }, |
| { |
| "epoch": 8.034084344309647, |
| "grad_norm": 0.009102354757487774, |
| "learning_rate": 1.6581059390048155e-06, |
| "loss": 0.0006, |
| "step": 2430 |
| }, |
| { |
| "epoch": 8.036972848064703, |
| "grad_norm": 0.012548357248306274, |
| "learning_rate": 1.6420545746388444e-06, |
| "loss": 0.0002, |
| "step": 2440 |
| }, |
| { |
| "epoch": 8.039861351819757, |
| "grad_norm": 0.012381269596517086, |
| "learning_rate": 1.6260032102728735e-06, |
| "loss": 0.0002, |
| "step": 2450 |
| }, |
| { |
| "epoch": 8.042749855574812, |
| "grad_norm": 0.008270743303000927, |
| "learning_rate": 1.6099518459069023e-06, |
| "loss": 0.0002, |
| "step": 2460 |
| }, |
| { |
| "epoch": 8.045638359329867, |
| "grad_norm": 0.008984532207250595, |
| "learning_rate": 1.5939004815409312e-06, |
| "loss": 0.0003, |
| "step": 2470 |
| }, |
| { |
| "epoch": 8.048526863084922, |
| "grad_norm": 0.007146380841732025, |
| "learning_rate": 1.5778491171749599e-06, |
| "loss": 0.0002, |
| "step": 2480 |
| }, |
| { |
| "epoch": 8.051415366839977, |
| "grad_norm": 0.013866865076124668, |
| "learning_rate": 1.561797752808989e-06, |
| "loss": 0.0002, |
| "step": 2490 |
| }, |
| { |
| "epoch": 8.054303870595032, |
| "grad_norm": 0.01275590155273676, |
| "learning_rate": 1.5457463884430177e-06, |
| "loss": 0.0002, |
| "step": 2500 |
| }, |
| { |
| "epoch": 8.057192374350088, |
| "grad_norm": 0.008473788388073444, |
| "learning_rate": 1.5296950240770466e-06, |
| "loss": 0.0002, |
| "step": 2510 |
| }, |
| { |
| "epoch": 8.060080878105142, |
| "grad_norm": 0.0103828813880682, |
| "learning_rate": 1.5136436597110758e-06, |
| "loss": 0.0002, |
| "step": 2520 |
| }, |
| { |
| "epoch": 8.062969381860196, |
| "grad_norm": 0.005070853512734175, |
| "learning_rate": 1.4975922953451045e-06, |
| "loss": 0.0002, |
| "step": 2530 |
| }, |
| { |
| "epoch": 8.065857885615252, |
| "grad_norm": 0.009254480712115765, |
| "learning_rate": 1.4815409309791334e-06, |
| "loss": 0.0002, |
| "step": 2540 |
| }, |
| { |
| "epoch": 8.068746389370306, |
| "grad_norm": 0.05558139830827713, |
| "learning_rate": 1.4654895666131621e-06, |
| "loss": 0.0002, |
| "step": 2550 |
| }, |
| { |
| "epoch": 8.071634893125362, |
| "grad_norm": 0.006696054711937904, |
| "learning_rate": 1.4494382022471912e-06, |
| "loss": 0.0002, |
| "step": 2560 |
| }, |
| { |
| "epoch": 8.074523396880416, |
| "grad_norm": 0.00565418740734458, |
| "learning_rate": 1.43338683788122e-06, |
| "loss": 0.0002, |
| "step": 2570 |
| }, |
| { |
| "epoch": 8.077411900635472, |
| "grad_norm": 0.011022423394024372, |
| "learning_rate": 1.4173354735152489e-06, |
| "loss": 0.0002, |
| "step": 2580 |
| }, |
| { |
| "epoch": 8.080300404390526, |
| "grad_norm": 0.006783703807741404, |
| "learning_rate": 1.401284109149278e-06, |
| "loss": 0.0002, |
| "step": 2590 |
| }, |
| { |
| "epoch": 8.08318890814558, |
| "grad_norm": 0.03814297169446945, |
| "learning_rate": 1.3852327447833067e-06, |
| "loss": 0.0003, |
| "step": 2600 |
| }, |
| { |
| "epoch": 8.083477758521086, |
| "eval_accuracy": 0.8139059304703476, |
| "eval_loss": 1.1929619312286377, |
| "eval_runtime": 66.8221, |
| "eval_samples_per_second": 7.318, |
| "eval_steps_per_second": 0.928, |
| "step": 2601 |
| }, |
| { |
| "epoch": 9.00259965337955, |
| "grad_norm": 0.004687231034040451, |
| "learning_rate": 1.3691813804173356e-06, |
| "loss": 0.0002, |
| "step": 2610 |
| }, |
| { |
| "epoch": 9.005488157134604, |
| "grad_norm": 0.01752406731247902, |
| "learning_rate": 1.3531300160513643e-06, |
| "loss": 0.0002, |
| "step": 2620 |
| }, |
| { |
| "epoch": 9.00837666088966, |
| "grad_norm": 0.005811620503664017, |
| "learning_rate": 1.3370786516853935e-06, |
| "loss": 0.0002, |
| "step": 2630 |
| }, |
| { |
| "epoch": 9.011265164644714, |
| "grad_norm": 0.0183991938829422, |
| "learning_rate": 1.3210272873194222e-06, |
| "loss": 0.0002, |
| "step": 2640 |
| }, |
| { |
| "epoch": 9.01415366839977, |
| "grad_norm": 0.00602333527058363, |
| "learning_rate": 1.304975922953451e-06, |
| "loss": 0.0002, |
| "step": 2650 |
| }, |
| { |
| "epoch": 9.017042172154824, |
| "grad_norm": 0.06442410498857498, |
| "learning_rate": 1.2889245585874802e-06, |
| "loss": 0.0002, |
| "step": 2660 |
| }, |
| { |
| "epoch": 9.019930675909878, |
| "grad_norm": 0.006975897122174501, |
| "learning_rate": 1.272873194221509e-06, |
| "loss": 0.0002, |
| "step": 2670 |
| }, |
| { |
| "epoch": 9.022819179664934, |
| "grad_norm": 0.006208993028849363, |
| "learning_rate": 1.2568218298555379e-06, |
| "loss": 0.0002, |
| "step": 2680 |
| }, |
| { |
| "epoch": 9.025707683419988, |
| "grad_norm": 0.006232697516679764, |
| "learning_rate": 1.2407704654895668e-06, |
| "loss": 0.0004, |
| "step": 2690 |
| }, |
| { |
| "epoch": 9.028596187175044, |
| "grad_norm": 0.008961373008787632, |
| "learning_rate": 1.2247191011235957e-06, |
| "loss": 0.0002, |
| "step": 2700 |
| }, |
| { |
| "epoch": 9.031484690930098, |
| "grad_norm": 0.017206748947501183, |
| "learning_rate": 1.2086677367576246e-06, |
| "loss": 0.0002, |
| "step": 2710 |
| }, |
| { |
| "epoch": 9.034373194685154, |
| "grad_norm": 0.024422556161880493, |
| "learning_rate": 1.1926163723916533e-06, |
| "loss": 0.0002, |
| "step": 2720 |
| }, |
| { |
| "epoch": 9.037261698440208, |
| "grad_norm": 0.0045550218783319, |
| "learning_rate": 1.1765650080256823e-06, |
| "loss": 0.0002, |
| "step": 2730 |
| }, |
| { |
| "epoch": 9.040150202195262, |
| "grad_norm": 0.030436644330620766, |
| "learning_rate": 1.1605136436597112e-06, |
| "loss": 0.0002, |
| "step": 2740 |
| }, |
| { |
| "epoch": 9.043038705950318, |
| "grad_norm": 0.018208695575594902, |
| "learning_rate": 1.14446227929374e-06, |
| "loss": 0.0003, |
| "step": 2750 |
| }, |
| { |
| "epoch": 9.045927209705372, |
| "grad_norm": 0.005186667200177908, |
| "learning_rate": 1.1284109149277688e-06, |
| "loss": 0.0002, |
| "step": 2760 |
| }, |
| { |
| "epoch": 9.048815713460428, |
| "grad_norm": 0.021143196150660515, |
| "learning_rate": 1.112359550561798e-06, |
| "loss": 0.0002, |
| "step": 2770 |
| }, |
| { |
| "epoch": 9.051704217215482, |
| "grad_norm": 0.312775194644928, |
| "learning_rate": 1.0963081861958269e-06, |
| "loss": 0.0002, |
| "step": 2780 |
| }, |
| { |
| "epoch": 9.054592720970538, |
| "grad_norm": 0.009823737666010857, |
| "learning_rate": 1.0802568218298556e-06, |
| "loss": 0.0002, |
| "step": 2790 |
| }, |
| { |
| "epoch": 9.057481224725592, |
| "grad_norm": 0.01661614701151848, |
| "learning_rate": 1.0642054574638845e-06, |
| "loss": 0.0002, |
| "step": 2800 |
| }, |
| { |
| "epoch": 9.060369728480646, |
| "grad_norm": 0.010930007323622704, |
| "learning_rate": 1.0481540930979134e-06, |
| "loss": 0.0001, |
| "step": 2810 |
| }, |
| { |
| "epoch": 9.063258232235702, |
| "grad_norm": 0.005455450154840946, |
| "learning_rate": 1.0321027287319423e-06, |
| "loss": 0.0002, |
| "step": 2820 |
| }, |
| { |
| "epoch": 9.066146735990756, |
| "grad_norm": 0.010016432963311672, |
| "learning_rate": 1.016051364365971e-06, |
| "loss": 0.0002, |
| "step": 2830 |
| }, |
| { |
| "epoch": 9.069035239745812, |
| "grad_norm": 0.0051452540792524815, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.0002, |
| "step": 2840 |
| }, |
| { |
| "epoch": 9.071923743500866, |
| "grad_norm": 0.008383591659367085, |
| "learning_rate": 9.83948635634029e-07, |
| "loss": 0.0002, |
| "step": 2850 |
| }, |
| { |
| "epoch": 9.074812247255922, |
| "grad_norm": 0.005880800541490316, |
| "learning_rate": 9.678972712680578e-07, |
| "loss": 0.0003, |
| "step": 2860 |
| }, |
| { |
| "epoch": 9.077700751010976, |
| "grad_norm": 0.004441461060196161, |
| "learning_rate": 9.518459069020867e-07, |
| "loss": 0.0002, |
| "step": 2870 |
| }, |
| { |
| "epoch": 9.080589254766032, |
| "grad_norm": 0.018957389518618584, |
| "learning_rate": 9.357945425361156e-07, |
| "loss": 0.0002, |
| "step": 2880 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "grad_norm": 0.0041592782363295555, |
| "learning_rate": 9.197431781701445e-07, |
| "loss": 0.0002, |
| "step": 2890 |
| }, |
| { |
| "epoch": 9.083477758521086, |
| "eval_accuracy": 0.8077709611451943, |
| "eval_loss": 1.2366055250167847, |
| "eval_runtime": 69.8127, |
| "eval_samples_per_second": 7.004, |
| "eval_steps_per_second": 0.888, |
| "step": 2890 |
| }, |
| { |
| "epoch": 10.002888503755054, |
| "grad_norm": 0.0036611612886190414, |
| "learning_rate": 9.036918138041734e-07, |
| "loss": 0.0002, |
| "step": 2900 |
| }, |
| { |
| "epoch": 10.00577700751011, |
| "grad_norm": 0.010820758529007435, |
| "learning_rate": 8.876404494382024e-07, |
| "loss": 0.0002, |
| "step": 2910 |
| }, |
| { |
| "epoch": 10.008665511265164, |
| "grad_norm": 0.18064051866531372, |
| "learning_rate": 8.715890850722312e-07, |
| "loss": 0.0002, |
| "step": 2920 |
| }, |
| { |
| "epoch": 10.01155401502022, |
| "grad_norm": 0.011522581800818443, |
| "learning_rate": 8.555377207062601e-07, |
| "loss": 0.0002, |
| "step": 2930 |
| }, |
| { |
| "epoch": 10.014442518775274, |
| "grad_norm": 0.02415822632610798, |
| "learning_rate": 8.394863563402889e-07, |
| "loss": 0.0002, |
| "step": 2940 |
| }, |
| { |
| "epoch": 10.01733102253033, |
| "grad_norm": 0.005263539496809244, |
| "learning_rate": 8.234349919743179e-07, |
| "loss": 0.0002, |
| "step": 2950 |
| }, |
| { |
| "epoch": 10.020219526285384, |
| "grad_norm": 0.008322079665958881, |
| "learning_rate": 8.073836276083468e-07, |
| "loss": 0.0002, |
| "step": 2960 |
| }, |
| { |
| "epoch": 10.023108030040438, |
| "grad_norm": 0.0035860182251781225, |
| "learning_rate": 7.913322632423756e-07, |
| "loss": 0.0002, |
| "step": 2970 |
| }, |
| { |
| "epoch": 10.025996533795494, |
| "grad_norm": 0.026575535535812378, |
| "learning_rate": 7.752808988764046e-07, |
| "loss": 0.0002, |
| "step": 2980 |
| }, |
| { |
| "epoch": 10.028885037550548, |
| "grad_norm": 0.006455577909946442, |
| "learning_rate": 7.592295345104334e-07, |
| "loss": 0.0002, |
| "step": 2990 |
| }, |
| { |
| "epoch": 10.031773541305604, |
| "grad_norm": 0.023962225764989853, |
| "learning_rate": 7.431781701444624e-07, |
| "loss": 0.0002, |
| "step": 3000 |
| }, |
| { |
| "epoch": 10.034662045060658, |
| "grad_norm": 0.004179599694907665, |
| "learning_rate": 7.271268057784913e-07, |
| "loss": 0.0001, |
| "step": 3010 |
| }, |
| { |
| "epoch": 10.037550548815714, |
| "grad_norm": 0.0037284549325704575, |
| "learning_rate": 7.110754414125201e-07, |
| "loss": 0.0002, |
| "step": 3020 |
| }, |
| { |
| "epoch": 10.040439052570768, |
| "grad_norm": 0.006995455361902714, |
| "learning_rate": 6.95024077046549e-07, |
| "loss": 0.0002, |
| "step": 3030 |
| }, |
| { |
| "epoch": 10.043327556325822, |
| "grad_norm": 0.005864481907337904, |
| "learning_rate": 6.789727126805778e-07, |
| "loss": 0.0002, |
| "step": 3040 |
| }, |
| { |
| "epoch": 10.046216060080878, |
| "grad_norm": 0.007472215220332146, |
| "learning_rate": 6.629213483146069e-07, |
| "loss": 0.0002, |
| "step": 3050 |
| }, |
| { |
| "epoch": 10.049104563835932, |
| "grad_norm": 0.00438402034342289, |
| "learning_rate": 6.468699839486358e-07, |
| "loss": 0.0002, |
| "step": 3060 |
| }, |
| { |
| "epoch": 10.051993067590988, |
| "grad_norm": 0.029750341549515724, |
| "learning_rate": 6.308186195826646e-07, |
| "loss": 0.0001, |
| "step": 3070 |
| }, |
| { |
| "epoch": 10.054881571346042, |
| "grad_norm": 0.010507237166166306, |
| "learning_rate": 6.147672552166935e-07, |
| "loss": 0.0002, |
| "step": 3080 |
| }, |
| { |
| "epoch": 10.057770075101098, |
| "grad_norm": 0.006501095835119486, |
| "learning_rate": 5.987158908507223e-07, |
| "loss": 0.0001, |
| "step": 3090 |
| }, |
| { |
| "epoch": 10.060658578856152, |
| "grad_norm": 0.013263998553156853, |
| "learning_rate": 5.826645264847512e-07, |
| "loss": 0.0001, |
| "step": 3100 |
| }, |
| { |
| "epoch": 10.063547082611207, |
| "grad_norm": 0.06579581648111343, |
| "learning_rate": 5.666131621187802e-07, |
| "loss": 0.0002, |
| "step": 3110 |
| }, |
| { |
| "epoch": 10.066435586366262, |
| "grad_norm": 0.024195345118641853, |
| "learning_rate": 5.50561797752809e-07, |
| "loss": 0.0002, |
| "step": 3120 |
| }, |
| { |
| "epoch": 10.069324090121317, |
| "grad_norm": 0.00511367479339242, |
| "learning_rate": 5.345104333868379e-07, |
| "loss": 0.0003, |
| "step": 3130 |
| }, |
| { |
| "epoch": 10.072212593876372, |
| "grad_norm": 0.008129936642944813, |
| "learning_rate": 5.184590690208668e-07, |
| "loss": 0.0002, |
| "step": 3140 |
| }, |
| { |
| "epoch": 10.075101097631427, |
| "grad_norm": 0.0060712313279509544, |
| "learning_rate": 5.024077046548957e-07, |
| "loss": 0.0002, |
| "step": 3150 |
| }, |
| { |
| "epoch": 10.077989601386482, |
| "grad_norm": 0.016326796263456345, |
| "learning_rate": 4.863563402889246e-07, |
| "loss": 0.0001, |
| "step": 3160 |
| }, |
| { |
| "epoch": 10.080878105141537, |
| "grad_norm": 0.0037833349779248238, |
| "learning_rate": 4.703049759229535e-07, |
| "loss": 0.0002, |
| "step": 3170 |
| }, |
| { |
| "epoch": 10.083477758521086, |
| "eval_accuracy": 0.8098159509202454, |
| "eval_loss": 1.2292110919952393, |
| "eval_runtime": 69.0823, |
| "eval_samples_per_second": 7.079, |
| "eval_steps_per_second": 0.897, |
| "step": 3179 |
| }, |
| { |
| "epoch": 11.000288850375506, |
| "grad_norm": 0.007949397899210453, |
| "learning_rate": 4.542536115569824e-07, |
| "loss": 0.0001, |
| "step": 3180 |
| }, |
| { |
| "epoch": 11.00317735413056, |
| "grad_norm": 0.005895549897104502, |
| "learning_rate": 4.3820224719101127e-07, |
| "loss": 0.0004, |
| "step": 3190 |
| }, |
| { |
| "epoch": 11.006065857885615, |
| "grad_norm": 0.004202838055789471, |
| "learning_rate": 4.2215088282504014e-07, |
| "loss": 0.0001, |
| "step": 3200 |
| }, |
| { |
| "epoch": 11.00895436164067, |
| "grad_norm": 0.005272583104670048, |
| "learning_rate": 4.060995184590691e-07, |
| "loss": 0.0002, |
| "step": 3210 |
| }, |
| { |
| "epoch": 11.011842865395725, |
| "grad_norm": 0.009343048557639122, |
| "learning_rate": 3.90048154093098e-07, |
| "loss": 0.0002, |
| "step": 3220 |
| }, |
| { |
| "epoch": 11.01473136915078, |
| "grad_norm": 0.004706119187176228, |
| "learning_rate": 3.7399678972712684e-07, |
| "loss": 0.0002, |
| "step": 3230 |
| }, |
| { |
| "epoch": 11.017619872905835, |
| "grad_norm": 0.03129461407661438, |
| "learning_rate": 3.579454253611557e-07, |
| "loss": 0.0002, |
| "step": 3240 |
| }, |
| { |
| "epoch": 11.02050837666089, |
| "grad_norm": 0.004606070462614298, |
| "learning_rate": 3.4189406099518463e-07, |
| "loss": 0.0002, |
| "step": 3250 |
| }, |
| { |
| "epoch": 11.023396880415945, |
| "grad_norm": 0.0045063793659210205, |
| "learning_rate": 3.258426966292135e-07, |
| "loss": 0.0002, |
| "step": 3260 |
| }, |
| { |
| "epoch": 11.026285384170999, |
| "grad_norm": 0.0062253838405013084, |
| "learning_rate": 3.0979133226324237e-07, |
| "loss": 0.0003, |
| "step": 3270 |
| }, |
| { |
| "epoch": 11.029173887926055, |
| "grad_norm": 0.0034174402244389057, |
| "learning_rate": 2.937399678972713e-07, |
| "loss": 0.0002, |
| "step": 3280 |
| }, |
| { |
| "epoch": 11.032062391681109, |
| "grad_norm": 0.004293691832572222, |
| "learning_rate": 2.776886035313002e-07, |
| "loss": 0.0001, |
| "step": 3290 |
| }, |
| { |
| "epoch": 11.034950895436165, |
| "grad_norm": 0.0049605295062065125, |
| "learning_rate": 2.616372391653291e-07, |
| "loss": 0.0002, |
| "step": 3300 |
| }, |
| { |
| "epoch": 11.037839399191219, |
| "grad_norm": 0.005613707937300205, |
| "learning_rate": 2.45585874799358e-07, |
| "loss": 0.0002, |
| "step": 3310 |
| }, |
| { |
| "epoch": 11.040727902946275, |
| "grad_norm": 0.18399178981781006, |
| "learning_rate": 2.2953451043338686e-07, |
| "loss": 0.0002, |
| "step": 3320 |
| }, |
| { |
| "epoch": 11.043616406701329, |
| "grad_norm": 0.004424693062901497, |
| "learning_rate": 2.1348314606741576e-07, |
| "loss": 0.0001, |
| "step": 3330 |
| }, |
| { |
| "epoch": 11.046504910456383, |
| "grad_norm": 0.020336680114269257, |
| "learning_rate": 1.9743178170144462e-07, |
| "loss": 0.0001, |
| "step": 3340 |
| }, |
| { |
| "epoch": 11.049393414211439, |
| "grad_norm": 0.015212628059089184, |
| "learning_rate": 1.8138041733547354e-07, |
| "loss": 0.0001, |
| "step": 3350 |
| }, |
| { |
| "epoch": 11.052281917966493, |
| "grad_norm": 0.008279630914330482, |
| "learning_rate": 1.653290529695024e-07, |
| "loss": 0.0001, |
| "step": 3360 |
| }, |
| { |
| "epoch": 11.055170421721549, |
| "grad_norm": 0.0034426343627274036, |
| "learning_rate": 1.492776886035313e-07, |
| "loss": 0.0002, |
| "step": 3370 |
| }, |
| { |
| "epoch": 11.058058925476603, |
| "grad_norm": 0.010223750956356525, |
| "learning_rate": 1.332263242375602e-07, |
| "loss": 0.0002, |
| "step": 3380 |
| }, |
| { |
| "epoch": 11.060947429231659, |
| "grad_norm": 0.004876590799540281, |
| "learning_rate": 1.1717495987158909e-07, |
| "loss": 0.0001, |
| "step": 3390 |
| }, |
| { |
| "epoch": 11.063835932986713, |
| "grad_norm": 0.006909272633492947, |
| "learning_rate": 1.0112359550561797e-07, |
| "loss": 0.0002, |
| "step": 3400 |
| }, |
| { |
| "epoch": 11.066724436741767, |
| "grad_norm": 0.017502378672361374, |
| "learning_rate": 8.507223113964687e-08, |
| "loss": 0.0002, |
| "step": 3410 |
| }, |
| { |
| "epoch": 11.069612940496823, |
| "grad_norm": 0.020532017573714256, |
| "learning_rate": 6.902086677367576e-08, |
| "loss": 0.0001, |
| "step": 3420 |
| }, |
| { |
| "epoch": 11.072501444251877, |
| "grad_norm": 0.01509220339357853, |
| "learning_rate": 5.2969502407704655e-08, |
| "loss": 0.0002, |
| "step": 3430 |
| }, |
| { |
| "epoch": 11.075389948006933, |
| "grad_norm": 0.0059640249237418175, |
| "learning_rate": 3.691813804173355e-08, |
| "loss": 0.0002, |
| "step": 3440 |
| }, |
| { |
| "epoch": 11.078278451761987, |
| "grad_norm": 0.0043342639692127705, |
| "learning_rate": 2.0866773675762443e-08, |
| "loss": 0.0001, |
| "step": 3450 |
| }, |
| { |
| "epoch": 11.081166955517043, |
| "grad_norm": 0.010837825946509838, |
| "learning_rate": 4.815409309791333e-09, |
| "loss": 0.0002, |
| "step": 3460 |
| }, |
| { |
| "epoch": 11.081744656268054, |
| "eval_accuracy": 0.8098159509202454, |
| "eval_loss": 1.2295476198196411, |
| "eval_runtime": 73.4564, |
| "eval_samples_per_second": 6.657, |
| "eval_steps_per_second": 0.844, |
| "step": 3462 |
| }, |
| { |
| "epoch": 11.081744656268054, |
| "step": 3462, |
| "total_flos": 3.4483591449041043e+19, |
| "train_loss": 0.08882198683258911, |
| "train_runtime": 9253.1851, |
| "train_samples_per_second": 2.993, |
| "train_steps_per_second": 0.374 |
| }, |
| { |
| "epoch": 11.081744656268054, |
| "eval_accuracy": 0.8413654618473896, |
| "eval_loss": 0.7867043018341064, |
| "eval_runtime": 76.5589, |
| "eval_samples_per_second": 6.505, |
| "eval_steps_per_second": 0.823, |
| "step": 3462 |
| }, |
| { |
| "epoch": 11.081744656268054, |
| "eval_accuracy": 0.8413654618473896, |
| "eval_loss": 0.7867043018341064, |
| "eval_runtime": 74.8858, |
| "eval_samples_per_second": 6.65, |
| "eval_steps_per_second": 0.841, |
| "step": 3462 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 3462, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "EarlyStoppingCallback": { |
| "args": { |
| "early_stopping_patience": 4, |
| "early_stopping_threshold": 0.0 |
| }, |
| "attributes": { |
| "early_stopping_patience_counter": 3 |
| } |
| }, |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.4483591449041043e+19, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|