| { | |
| "best_metric": 0.9819064430714917, | |
| "best_model_checkpoint": "hubert-base-ft-keyword-spotting/checkpoint-1995", | |
| "epoch": 4.999373825923607, | |
| "global_step": 1995, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.5e-06, | |
| "loss": 2.4768, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3e-06, | |
| "loss": 2.4615, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.5e-06, | |
| "loss": 2.4135, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6e-06, | |
| "loss": 2.3143, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 7.5e-06, | |
| "loss": 2.1904, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9e-06, | |
| "loss": 2.047, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.05e-05, | |
| "loss": 1.9046, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.8282, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.7283, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.7102, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.65e-05, | |
| "loss": 1.5944, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.5268, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.95e-05, | |
| "loss": 1.5128, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.1e-05, | |
| "loss": 1.5108, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.25e-05, | |
| "loss": 1.5206, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.4e-05, | |
| "loss": 1.5091, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.55e-05, | |
| "loss": 1.4353, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 1.5367, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8499999999999998e-05, | |
| "loss": 1.5555, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3e-05, | |
| "loss": 1.5275, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.9832869080779945e-05, | |
| "loss": 1.5864, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.9665738161559886e-05, | |
| "loss": 1.55, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.9498607242339834e-05, | |
| "loss": 1.4591, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.933147632311978e-05, | |
| "loss": 1.4969, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.916434540389972e-05, | |
| "loss": 1.4884, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.8997214484679665e-05, | |
| "loss": 1.5086, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.8830083565459613e-05, | |
| "loss": 1.5097, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 2.8662952646239554e-05, | |
| "loss": 1.4455, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.84958217270195e-05, | |
| "loss": 1.5155, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.8328690807799443e-05, | |
| "loss": 1.3772, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.8161559888579388e-05, | |
| "loss": 1.4063, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.7994428969359332e-05, | |
| "loss": 1.3666, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.7827298050139277e-05, | |
| "loss": 1.3803, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7660167130919218e-05, | |
| "loss": 1.3257, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.7493036211699166e-05, | |
| "loss": 1.1917, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.732590529247911e-05, | |
| "loss": 1.1957, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.7158774373259055e-05, | |
| "loss": 1.164, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.6991643454038996e-05, | |
| "loss": 1.1043, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.6824512534818944e-05, | |
| "loss": 1.0422, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.6918211238599589, | |
| "eval_loss": 0.8998974561691284, | |
| "eval_runtime": 13.5007, | |
| "eval_samples_per_second": 503.53, | |
| "eval_steps_per_second": 15.777, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.665738161559889e-05, | |
| "loss": 1.0979, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.649025069637883e-05, | |
| "loss": 0.9165, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.6323119777158774e-05, | |
| "loss": 0.9599, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.6155988857938722e-05, | |
| "loss": 0.88, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.5988857938718663e-05, | |
| "loss": 0.8921, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.5821727019498608e-05, | |
| "loss": 0.8814, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.5654596100278553e-05, | |
| "loss": 0.8321, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.5487465181058497e-05, | |
| "loss": 0.7497, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.532033426183844e-05, | |
| "loss": 0.7318, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.5153203342618386e-05, | |
| "loss": 0.672, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.4986072423398327e-05, | |
| "loss": 0.6778, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.4818941504178275e-05, | |
| "loss": 0.6688, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.465181058495822e-05, | |
| "loss": 0.6432, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.448467966573816e-05, | |
| "loss": 0.615, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.4317548746518106e-05, | |
| "loss": 0.5814, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.415041782729805e-05, | |
| "loss": 0.527, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.3983286908077995e-05, | |
| "loss": 0.5114, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.381615598885794e-05, | |
| "loss": 0.5298, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.3649025069637884e-05, | |
| "loss": 0.5075, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.3481894150417825e-05, | |
| "loss": 0.4495, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.3314763231197773e-05, | |
| "loss": 0.4965, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.3147632311977718e-05, | |
| "loss": 0.4944, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.298050139275766e-05, | |
| "loss": 0.4509, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.2813370473537603e-05, | |
| "loss": 0.4434, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.264623955431755e-05, | |
| "loss": 0.5139, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2479108635097492e-05, | |
| "loss": 0.4106, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2311977715877437e-05, | |
| "loss": 0.3633, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.214484679665738e-05, | |
| "loss": 0.3641, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.1977715877437326e-05, | |
| "loss": 0.417, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.181058495821727e-05, | |
| "loss": 0.4298, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.1643454038997215e-05, | |
| "loss": 0.3578, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.147632311977716e-05, | |
| "loss": 0.3365, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.1309192200557104e-05, | |
| "loss": 0.3343, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.114206128133705e-05, | |
| "loss": 0.3471, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.0974930362116993e-05, | |
| "loss": 0.335, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.0807799442896935e-05, | |
| "loss": 0.344, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.0640668523676883e-05, | |
| "loss": 0.3352, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 2.0473537604456827e-05, | |
| "loss": 0.2792, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 2.0306406685236768e-05, | |
| "loss": 0.2965, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 2.0139275766016713e-05, | |
| "loss": 0.3296, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9777875845837011, | |
| "eval_loss": 0.15050138533115387, | |
| "eval_runtime": 13.6335, | |
| "eval_samples_per_second": 498.624, | |
| "eval_steps_per_second": 15.623, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.997214484679666e-05, | |
| "loss": 0.3354, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.9805013927576602e-05, | |
| "loss": 0.3083, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.9637883008356546e-05, | |
| "loss": 0.2819, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.947075208913649e-05, | |
| "loss": 0.2958, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.9303621169916436e-05, | |
| "loss": 0.3153, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.913649025069638e-05, | |
| "loss": 0.2539, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.8969359331476325e-05, | |
| "loss": 0.2776, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.8802228412256266e-05, | |
| "loss": 0.259, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.863509749303621e-05, | |
| "loss": 0.2948, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.846796657381616e-05, | |
| "loss": 0.2331, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.83008356545961e-05, | |
| "loss": 0.2668, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.8133704735376044e-05, | |
| "loss": 0.2456, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.796657381615599e-05, | |
| "loss": 0.2146, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.7799442896935933e-05, | |
| "loss": 0.2564, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.7632311977715878e-05, | |
| "loss": 0.2372, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.7465181058495822e-05, | |
| "loss": 0.2695, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.7298050139275764e-05, | |
| "loss": 0.2716, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.713091922005571e-05, | |
| "loss": 0.244, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6963788300835656e-05, | |
| "loss": 0.2562, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.6796657381615597e-05, | |
| "loss": 0.2863, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.6629526462395542e-05, | |
| "loss": 0.2448, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.646239554317549e-05, | |
| "loss": 0.2321, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.6295264623955434e-05, | |
| "loss": 0.242, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.6128133704735375e-05, | |
| "loss": 0.272, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.596100278551532e-05, | |
| "loss": 0.2391, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.5793871866295268e-05, | |
| "loss": 0.2109, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.562674094707521e-05, | |
| "loss": 0.2091, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.5459610027855154e-05, | |
| "loss": 0.2335, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.5292479108635098e-05, | |
| "loss": 0.1772, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.5125348189415043e-05, | |
| "loss": 0.2452, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.4958217270194987e-05, | |
| "loss": 0.2707, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.479108635097493e-05, | |
| "loss": 0.2312, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.4623955431754876e-05, | |
| "loss": 0.1965, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.445682451253482e-05, | |
| "loss": 0.2222, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.4289693593314764e-05, | |
| "loss": 0.2182, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.4122562674094708e-05, | |
| "loss": 0.1983, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.3955431754874653e-05, | |
| "loss": 0.1875, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.3788300835654596e-05, | |
| "loss": 0.2274, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.362116991643454e-05, | |
| "loss": 0.205, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.3454038997214485e-05, | |
| "loss": 0.2088, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9816122388937923, | |
| "eval_loss": 0.09009595215320587, | |
| "eval_runtime": 13.6443, | |
| "eval_samples_per_second": 498.229, | |
| "eval_steps_per_second": 15.611, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.3286908077994428e-05, | |
| "loss": 0.2213, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 1.3119777158774374e-05, | |
| "loss": 0.231, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.2952646239554317e-05, | |
| "loss": 0.1848, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.2785515320334262e-05, | |
| "loss": 0.1874, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.2618384401114206e-05, | |
| "loss": 0.1669, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.245125348189415e-05, | |
| "loss": 0.1892, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.2284122562674095e-05, | |
| "loss": 0.2161, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.211699164345404e-05, | |
| "loss": 0.2098, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.1949860724233983e-05, | |
| "loss": 0.1857, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.1782729805013929e-05, | |
| "loss": 0.2393, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.1615598885793872e-05, | |
| "loss": 0.2078, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.1448467966573816e-05, | |
| "loss": 0.2179, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.1281337047353761e-05, | |
| "loss": 0.1823, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.1114206128133705e-05, | |
| "loss": 0.2193, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.0947075208913648e-05, | |
| "loss": 0.1705, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.0779944289693595e-05, | |
| "loss": 0.2366, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.0612813370473537e-05, | |
| "loss": 0.1765, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.0445682451253482e-05, | |
| "loss": 0.1986, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.0278551532033427e-05, | |
| "loss": 0.257, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.0111420612813371e-05, | |
| "loss": 0.2109, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 9.944289693593314e-06, | |
| "loss": 0.1744, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 9.77715877437326e-06, | |
| "loss": 0.1824, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 9.610027855153203e-06, | |
| "loss": 0.1797, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 9.44289693593315e-06, | |
| "loss": 0.1616, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 9.275766016713092e-06, | |
| "loss": 0.1654, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 9.108635097493037e-06, | |
| "loss": 0.2256, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.941504178272981e-06, | |
| "loss": 0.1667, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 8.774373259052926e-06, | |
| "loss": 0.1817, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 8.607242339832869e-06, | |
| "loss": 0.1801, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 8.440111420612815e-06, | |
| "loss": 0.187, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 8.272980501392758e-06, | |
| "loss": 0.2072, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 8.1058495821727e-06, | |
| "loss": 0.2132, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 7.938718662952647e-06, | |
| "loss": 0.165, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 7.77158774373259e-06, | |
| "loss": 0.1905, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 7.604456824512535e-06, | |
| "loss": 0.223, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 7.43732590529248e-06, | |
| "loss": 0.1503, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 7.2701949860724235e-06, | |
| "loss": 0.1976, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 7.103064066852368e-06, | |
| "loss": 0.1549, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 6.935933147632313e-06, | |
| "loss": 0.1742, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 6.768802228412256e-06, | |
| "loss": 0.202, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.981318034716093, | |
| "eval_loss": 0.08481906354427338, | |
| "eval_runtime": 13.3788, | |
| "eval_samples_per_second": 508.116, | |
| "eval_steps_per_second": 15.921, | |
| "step": 1596 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 6.601671309192201e-06, | |
| "loss": 0.2132, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 6.4345403899721455e-06, | |
| "loss": 0.2056, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 6.267409470752089e-06, | |
| "loss": 0.2143, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 6.100278551532034e-06, | |
| "loss": 0.166, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 5.933147632311978e-06, | |
| "loss": 0.1567, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 5.766016713091923e-06, | |
| "loss": 0.1975, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 5.598885793871867e-06, | |
| "loss": 0.1609, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 5.43175487465181e-06, | |
| "loss": 0.1639, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 5.264623955431755e-06, | |
| "loss": 0.1653, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 5.0974930362116986e-06, | |
| "loss": 0.1602, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 4.930362116991643e-06, | |
| "loss": 0.1981, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 4.763231197771588e-06, | |
| "loss": 0.2038, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 4.596100278551532e-06, | |
| "loss": 0.1774, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 4.428969359331476e-06, | |
| "loss": 0.1846, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 4.2618384401114205e-06, | |
| "loss": 0.177, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 4.094707520891365e-06, | |
| "loss": 0.2003, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 3.927576601671309e-06, | |
| "loss": 0.1663, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 3.7604456824512533e-06, | |
| "loss": 0.1597, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 3.593314763231198e-06, | |
| "loss": 0.1792, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 3.426183844011142e-06, | |
| "loss": 0.1468, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.259052924791086e-06, | |
| "loss": 0.1833, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 3.0919220055710307e-06, | |
| "loss": 0.1762, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.924791086350975e-06, | |
| "loss": 0.1917, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 2.7576601671309194e-06, | |
| "loss": 0.1682, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.5905292479108636e-06, | |
| "loss": 0.1443, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.4233983286908077e-06, | |
| "loss": 0.1304, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.2562674094707523e-06, | |
| "loss": 0.1729, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.0891364902506964e-06, | |
| "loss": 0.1517, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.922005571030641e-06, | |
| "loss": 0.1886, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.7548746518105849e-06, | |
| "loss": 0.1681, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.5877437325905292e-06, | |
| "loss": 0.1713, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.4206128133704736e-06, | |
| "loss": 0.2033, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.253481894150418e-06, | |
| "loss": 0.1564, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.0863509749303623e-06, | |
| "loss": 0.136, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 9.192200557103064e-07, | |
| "loss": 0.1679, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 7.520891364902508e-07, | |
| "loss": 0.1564, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 5.84958217270195e-07, | |
| "loss": 0.1618, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 4.178272980501393e-07, | |
| "loss": 0.164, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.506963788300836e-07, | |
| "loss": 0.1707, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 8.356545961002785e-08, | |
| "loss": 0.1535, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.9819064430714917, | |
| "eval_loss": 0.07737560570240021, | |
| "eval_runtime": 13.6188, | |
| "eval_samples_per_second": 499.163, | |
| "eval_steps_per_second": 15.64, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 1995, | |
| "total_flos": 2.31918157475328e+18, | |
| "train_loss": 0.546848169783303, | |
| "train_runtime": 857.0555, | |
| "train_samples_per_second": 298.079, | |
| "train_steps_per_second": 2.328 | |
| } | |
| ], | |
| "max_steps": 1995, | |
| "num_train_epochs": 5, | |
| "total_flos": 2.31918157475328e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |