| { | |
| "best_metric": 0.980876728449544, | |
| "best_model_checkpoint": "speechcommand-demo/checkpoint-1995", | |
| "epoch": 4.996869129618034, | |
| "global_step": 1995, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.5e-06, | |
| "loss": 2.4947, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3e-06, | |
| "loss": 2.4762, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.5e-06, | |
| "loss": 2.4191, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6e-06, | |
| "loss": 2.3201, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 7.5e-06, | |
| "loss": 2.1921, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9e-06, | |
| "loss": 1.9942, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.05e-05, | |
| "loss": 1.859, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.7746, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.6635, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.6259, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.65e-05, | |
| "loss": 1.5039, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.4957, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.95e-05, | |
| "loss": 1.4948, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.1e-05, | |
| "loss": 1.4833, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.25e-05, | |
| "loss": 1.5046, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.4e-05, | |
| "loss": 1.4647, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.55e-05, | |
| "loss": 1.3949, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 1.4712, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8499999999999998e-05, | |
| "loss": 1.4208, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3e-05, | |
| "loss": 1.3243, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.984958217270195e-05, | |
| "loss": 1.3227, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.9682451253481892e-05, | |
| "loss": 1.2442, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.951532033426184e-05, | |
| "loss": 1.149, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.9348189415041785e-05, | |
| "loss": 1.1348, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.9181058495821726e-05, | |
| "loss": 1.0605, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.901392757660167e-05, | |
| "loss": 1.0612, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.884679665738162e-05, | |
| "loss": 1.0456, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.867966573816156e-05, | |
| "loss": 0.9838, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.8512534818941504e-05, | |
| "loss": 1.006, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.834540389972145e-05, | |
| "loss": 0.9104, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.8178272980501393e-05, | |
| "loss": 0.9237, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.8011142061281338e-05, | |
| "loss": 0.8698, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.7844011142061283e-05, | |
| "loss": 0.8832, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7676880222841227e-05, | |
| "loss": 0.8519, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.750974930362117e-05, | |
| "loss": 0.7448, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.7342618384401116e-05, | |
| "loss": 0.752, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.717548746518106e-05, | |
| "loss": 0.7493, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7008356545961002e-05, | |
| "loss": 0.7061, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.6841225626740946e-05, | |
| "loss": 0.6433, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9111503383348043, | |
| "eval_loss": 0.49785539507865906, | |
| "eval_runtime": 18.7306, | |
| "eval_samples_per_second": 362.935, | |
| "eval_steps_per_second": 11.372, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.6674094707520894e-05, | |
| "loss": 0.6448, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.6506963788300836e-05, | |
| "loss": 0.6268, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.633983286908078e-05, | |
| "loss": 0.5355, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.6172701949860725e-05, | |
| "loss": 0.5586, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.600557103064067e-05, | |
| "loss": 0.5473, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.5838440111420614e-05, | |
| "loss": 0.5302, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.567130919220056e-05, | |
| "loss": 0.4767, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.55041782729805e-05, | |
| "loss": 0.4598, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.5337047353760448e-05, | |
| "loss": 0.4979, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.5169916434540392e-05, | |
| "loss": 0.4271, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.5002785515320333e-05, | |
| "loss": 0.4215, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.4835654596100278e-05, | |
| "loss": 0.4576, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.4668523676880226e-05, | |
| "loss": 0.3883, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.4501392757660167e-05, | |
| "loss": 0.4202, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.433426183844011e-05, | |
| "loss": 0.3698, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.4167130919220056e-05, | |
| "loss": 0.3846, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.4e-05, | |
| "loss": 0.35, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.3832869080779945e-05, | |
| "loss": 0.3472, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.366573816155989e-05, | |
| "loss": 0.3518, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.349860724233983e-05, | |
| "loss": 0.3614, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.333147632311978e-05, | |
| "loss": 0.3199, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.3164345403899723e-05, | |
| "loss": 0.3179, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.2997214484679665e-05, | |
| "loss": 0.3151, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.283008356545961e-05, | |
| "loss": 0.3121, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.2662952646239557e-05, | |
| "loss": 0.288, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2495821727019498e-05, | |
| "loss": 0.2595, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2328690807799443e-05, | |
| "loss": 0.2745, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.2161559888579387e-05, | |
| "loss": 0.2816, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.1994428969359335e-05, | |
| "loss": 0.2574, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.1827298050139276e-05, | |
| "loss": 0.2606, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.166016713091922e-05, | |
| "loss": 0.2881, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.1493036211699166e-05, | |
| "loss": 0.2807, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.1325905292479107e-05, | |
| "loss": 0.2641, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.1158774373259055e-05, | |
| "loss": 0.2743, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.0991643454039e-05, | |
| "loss": 0.2688, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.082451253481894e-05, | |
| "loss": 0.2546, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.0657381615598885e-05, | |
| "loss": 0.2664, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.0490250696378833e-05, | |
| "loss": 0.2628, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.0323119777158774e-05, | |
| "loss": 0.2427, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.015598885793872e-05, | |
| "loss": 0.2406, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9749926448955575, | |
| "eval_loss": 0.14553038775920868, | |
| "eval_runtime": 18.8357, | |
| "eval_samples_per_second": 360.91, | |
| "eval_steps_per_second": 11.308, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.9988857938718663e-05, | |
| "loss": 0.2327, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.9821727019498608e-05, | |
| "loss": 0.2697, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.9654596100278552e-05, | |
| "loss": 0.2287, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.9487465181058497e-05, | |
| "loss": 0.2169, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.9320334261838438e-05, | |
| "loss": 0.1917, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.9153203342618386e-05, | |
| "loss": 0.2033, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.898607242339833e-05, | |
| "loss": 0.2353, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.8818941504178272e-05, | |
| "loss": 0.2046, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.8651810584958216e-05, | |
| "loss": 0.2137, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.8484679665738164e-05, | |
| "loss": 0.2059, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.8317548746518105e-05, | |
| "loss": 0.2195, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.815041782729805e-05, | |
| "loss": 0.2132, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.7983286908077995e-05, | |
| "loss": 0.1939, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.781615598885794e-05, | |
| "loss": 0.1882, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.7649025069637884e-05, | |
| "loss": 0.1941, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.7481894150417828e-05, | |
| "loss": 0.1961, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.731476323119777e-05, | |
| "loss": 0.198, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.7147632311977717e-05, | |
| "loss": 0.1957, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.6980501392757662e-05, | |
| "loss": 0.2086, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.6813370473537606e-05, | |
| "loss": 0.192, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.6646239554317548e-05, | |
| "loss": 0.1926, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.6479108635097496e-05, | |
| "loss": 0.2244, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.631197771587744e-05, | |
| "loss": 0.2097, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.614484679665738e-05, | |
| "loss": 0.1986, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.5977715877437326e-05, | |
| "loss": 0.1777, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.581058495821727e-05, | |
| "loss": 0.1772, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 1.5643454038997215e-05, | |
| "loss": 0.1777, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.547632311977716e-05, | |
| "loss": 0.1999, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.5309192200557104e-05, | |
| "loss": 0.1833, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.5142061281337047e-05, | |
| "loss": 0.1899, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.4974930362116992e-05, | |
| "loss": 0.1642, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.4807799442896936e-05, | |
| "loss": 0.2343, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.464066852367688e-05, | |
| "loss": 0.1889, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.4473537604456825e-05, | |
| "loss": 0.1752, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.4306406685236768e-05, | |
| "loss": 0.1787, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.4139275766016714e-05, | |
| "loss": 0.2027, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.3972144846796657e-05, | |
| "loss": 0.1712, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.3805013927576602e-05, | |
| "loss": 0.1779, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.3637883008356546e-05, | |
| "loss": 0.1916, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.3470752089136491e-05, | |
| "loss": 0.1563, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9785230950279494, | |
| "eval_loss": 0.10318083316087723, | |
| "eval_runtime": 18.9658, | |
| "eval_samples_per_second": 358.435, | |
| "eval_steps_per_second": 11.231, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.3303621169916434e-05, | |
| "loss": 0.1839, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 1.313649025069638e-05, | |
| "loss": 0.1921, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.2969359331476323e-05, | |
| "loss": 0.1651, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.2802228412256267e-05, | |
| "loss": 0.1636, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.2635097493036212e-05, | |
| "loss": 0.1595, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.2467966573816157e-05, | |
| "loss": 0.2028, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.2300835654596101e-05, | |
| "loss": 0.1564, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.2133704735376046e-05, | |
| "loss": 0.1675, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.1966573816155989e-05, | |
| "loss": 0.1746, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.1799442896935935e-05, | |
| "loss": 0.1726, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.1632311977715878e-05, | |
| "loss": 0.135, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.1465181058495822e-05, | |
| "loss": 0.1478, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.1298050139275767e-05, | |
| "loss": 0.1791, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.1130919220055711e-05, | |
| "loss": 0.1824, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.0963788300835654e-05, | |
| "loss": 0.1709, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.0796657381615599e-05, | |
| "loss": 0.1655, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.0629526462395543e-05, | |
| "loss": 0.1356, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.0462395543175486e-05, | |
| "loss": 0.1452, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.0295264623955432e-05, | |
| "loss": 0.149, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.0128133704735375e-05, | |
| "loss": 0.1466, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 9.961002785515322e-06, | |
| "loss": 0.1198, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 9.793871866295264e-06, | |
| "loss": 0.1383, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 9.626740947075209e-06, | |
| "loss": 0.1593, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 9.459610027855154e-06, | |
| "loss": 0.188, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 9.292479108635098e-06, | |
| "loss": 0.1463, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 9.125348189415041e-06, | |
| "loss": 0.1626, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.958217270194987e-06, | |
| "loss": 0.1637, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 8.79108635097493e-06, | |
| "loss": 0.1591, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 8.623955431754875e-06, | |
| "loss": 0.1514, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 8.45682451253482e-06, | |
| "loss": 0.1432, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 8.289693593314764e-06, | |
| "loss": 0.1575, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 8.122562674094707e-06, | |
| "loss": 0.16, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 7.955431754874653e-06, | |
| "loss": 0.1492, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 7.788300835654596e-06, | |
| "loss": 0.1553, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 7.621169916434541e-06, | |
| "loss": 0.1218, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 7.454038997214485e-06, | |
| "loss": 0.1777, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 7.2869080779944286e-06, | |
| "loss": 0.1677, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 7.119777158774373e-06, | |
| "loss": 0.1675, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 6.952646239554318e-06, | |
| "loss": 0.1451, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 6.785515320334261e-06, | |
| "loss": 0.1144, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.9805825242718447, | |
| "eval_loss": 0.0919189527630806, | |
| "eval_runtime": 18.771, | |
| "eval_samples_per_second": 362.154, | |
| "eval_steps_per_second": 11.347, | |
| "step": 1597 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 6.618384401114206e-06, | |
| "loss": 0.135, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 6.4512534818941505e-06, | |
| "loss": 0.1666, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 6.284122562674095e-06, | |
| "loss": 0.1546, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 6.116991643454039e-06, | |
| "loss": 0.1353, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 5.949860724233983e-06, | |
| "loss": 0.1515, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 5.782729805013928e-06, | |
| "loss": 0.1246, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 5.615598885793872e-06, | |
| "loss": 0.1342, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 5.448467966573816e-06, | |
| "loss": 0.1315, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 5.281337047353761e-06, | |
| "loss": 0.1571, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 5.114206128133705e-06, | |
| "loss": 0.1401, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 4.947075208913649e-06, | |
| "loss": 0.152, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 4.7799442896935936e-06, | |
| "loss": 0.1186, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 4.612813370473538e-06, | |
| "loss": 0.159, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 4.445682451253482e-06, | |
| "loss": 0.1377, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 4.278551532033426e-06, | |
| "loss": 0.155, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 4.111420612813371e-06, | |
| "loss": 0.1214, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 3.944289693593315e-06, | |
| "loss": 0.1368, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 3.7771587743732592e-06, | |
| "loss": 0.1499, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 3.6100278551532034e-06, | |
| "loss": 0.1257, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 3.4428969359331475e-06, | |
| "loss": 0.1457, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.275766016713092e-06, | |
| "loss": 0.1239, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 3.108635097493036e-06, | |
| "loss": 0.1239, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.9415041782729803e-06, | |
| "loss": 0.151, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.774373259052925e-06, | |
| "loss": 0.1166, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.607242339832869e-06, | |
| "loss": 0.1388, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 2.4401114206128136e-06, | |
| "loss": 0.1517, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.2729805013927577e-06, | |
| "loss": 0.161, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.1058495821727023e-06, | |
| "loss": 0.128, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.9387186629526464e-06, | |
| "loss": 0.1562, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.7715877437325906e-06, | |
| "loss": 0.1157, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.604456824512535e-06, | |
| "loss": 0.1409, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.437325905292479e-06, | |
| "loss": 0.1168, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.2701949860724234e-06, | |
| "loss": 0.144, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.1030640668523677e-06, | |
| "loss": 0.1135, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 9.35933147632312e-07, | |
| "loss": 0.1288, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 7.688022284122563e-07, | |
| "loss": 0.1104, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 6.016713091922006e-07, | |
| "loss": 0.1359, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 4.3454038997214486e-07, | |
| "loss": 0.1234, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.6740947075208915e-07, | |
| "loss": 0.1325, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.0027855153203343e-07, | |
| "loss": 0.1254, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.980876728449544, | |
| "eval_loss": 0.08725827932357788, | |
| "eval_runtime": 20.0033, | |
| "eval_samples_per_second": 339.844, | |
| "eval_steps_per_second": 10.648, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 1995, | |
| "total_flos": 2.3180194781952e+18, | |
| "train_loss": 0.44099008884645047, | |
| "train_runtime": 2246.0139, | |
| "train_samples_per_second": 113.744, | |
| "train_steps_per_second": 0.888 | |
| } | |
| ], | |
| "max_steps": 1995, | |
| "num_train_epochs": 5, | |
| "total_flos": 2.3180194781952e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |