| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.9890764647467725, | |
| "global_step": 2510, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.960238568588469e-05, | |
| "loss": 0.1445, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.924453280318093e-05, | |
| "loss": 0.3715, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.884691848906562e-05, | |
| "loss": 0.5201, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.84493041749503e-05, | |
| "loss": 0.355, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.805168986083499e-05, | |
| "loss": 0.4739, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.765407554671967e-05, | |
| "loss": 0.3795, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.725646123260437e-05, | |
| "loss": 0.2449, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.685884691848906e-05, | |
| "loss": 0.2834, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.646123260437376e-05, | |
| "loss": 0.4803, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.606361829025845e-05, | |
| "loss": 0.4239, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.566600397614315e-05, | |
| "loss": 0.4822, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.530815109343937e-05, | |
| "loss": 0.6423, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.491053677932406e-05, | |
| "loss": 0.4146, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.451292246520876e-05, | |
| "loss": 0.3988, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.411530815109344e-05, | |
| "loss": 0.576, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.371769383697814e-05, | |
| "loss": 0.3698, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.332007952286283e-05, | |
| "loss": 0.4959, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.292246520874752e-05, | |
| "loss": 0.353, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.25248508946322e-05, | |
| "loss": 0.3502, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.21272365805169e-05, | |
| "loss": 0.2586, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.172962226640159e-05, | |
| "loss": 0.3487, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.133200795228629e-05, | |
| "loss": 0.2338, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.093439363817098e-05, | |
| "loss": 0.3604, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.053677932405566e-05, | |
| "loss": 0.1628, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.013916500994036e-05, | |
| "loss": 0.3106, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 8.974155069582505e-05, | |
| "loss": 0.3068, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.934393638170975e-05, | |
| "loss": 0.273, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.894632206759443e-05, | |
| "loss": 0.1321, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.854870775347913e-05, | |
| "loss": 0.4806, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.815109343936382e-05, | |
| "loss": 0.1793, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.775347912524852e-05, | |
| "loss": 0.2027, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.735586481113321e-05, | |
| "loss": 0.3706, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.695825049701791e-05, | |
| "loss": 0.4457, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 8.65606361829026e-05, | |
| "loss": 0.3953, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8.616302186878728e-05, | |
| "loss": 0.132, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 8.576540755467197e-05, | |
| "loss": 0.3439, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.536779324055667e-05, | |
| "loss": 0.6228, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 8.497017892644135e-05, | |
| "loss": 0.3251, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 8.457256461232604e-05, | |
| "loss": 0.3342, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 8.417495029821074e-05, | |
| "loss": 0.316, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 8.377733598409543e-05, | |
| "loss": 0.2536, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.337972166998013e-05, | |
| "loss": 0.2428, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 8.298210735586481e-05, | |
| "loss": 0.1508, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 8.258449304174951e-05, | |
| "loss": 0.2234, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 8.21868787276342e-05, | |
| "loss": 0.3659, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.17892644135189e-05, | |
| "loss": 0.2216, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 8.139165009940358e-05, | |
| "loss": 0.3196, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 8.099403578528827e-05, | |
| "loss": 0.3921, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.059642147117297e-05, | |
| "loss": 0.3935, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.019880715705766e-05, | |
| "loss": 0.1749, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_accuracy": 0.9107142686843872, | |
| "eval_loss": 0.2552814781665802, | |
| "eval_runtime": 19.16, | |
| "eval_samples_per_second": 26.305, | |
| "eval_steps_per_second": 6.576, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 7.980119284294236e-05, | |
| "loss": 0.3414, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 7.940357852882704e-05, | |
| "loss": 0.2059, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 7.900596421471174e-05, | |
| "loss": 0.1762, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 7.860834990059643e-05, | |
| "loss": 0.118, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 7.821073558648112e-05, | |
| "loss": 0.1524, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 7.78131212723658e-05, | |
| "loss": 0.1576, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 7.74155069582505e-05, | |
| "loss": 0.2975, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.701789264413519e-05, | |
| "loss": 0.2985, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.662027833001987e-05, | |
| "loss": 0.1437, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.622266401590457e-05, | |
| "loss": 0.2702, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 7.582504970178926e-05, | |
| "loss": 0.2913, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 7.542743538767396e-05, | |
| "loss": 0.1741, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 7.502982107355865e-05, | |
| "loss": 0.1987, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 7.463220675944335e-05, | |
| "loss": 0.2568, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 7.423459244532803e-05, | |
| "loss": 0.4236, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 7.383697813121273e-05, | |
| "loss": 0.1788, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 7.343936381709742e-05, | |
| "loss": 0.2973, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 7.304174950298212e-05, | |
| "loss": 0.3244, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 7.26441351888668e-05, | |
| "loss": 0.2661, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 7.22465208747515e-05, | |
| "loss": 0.3832, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 7.184890656063619e-05, | |
| "loss": 0.1453, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 7.145129224652088e-05, | |
| "loss": 0.1034, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 7.105367793240556e-05, | |
| "loss": 0.2906, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 7.065606361829026e-05, | |
| "loss": 0.2115, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 7.025844930417495e-05, | |
| "loss": 0.3433, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 6.986083499005964e-05, | |
| "loss": 0.1599, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 6.946322067594434e-05, | |
| "loss": 0.1725, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 6.910536779324056e-05, | |
| "loss": 0.428, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 6.870775347912525e-05, | |
| "loss": 0.1033, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 6.831013916500995e-05, | |
| "loss": 0.314, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 6.791252485089463e-05, | |
| "loss": 0.146, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 6.751491053677933e-05, | |
| "loss": 0.181, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 6.711729622266402e-05, | |
| "loss": 0.2174, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 6.671968190854872e-05, | |
| "loss": 0.1458, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 6.632206759443341e-05, | |
| "loss": 0.1477, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 6.59244532803181e-05, | |
| "loss": 0.3046, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 6.552683896620278e-05, | |
| "loss": 0.2796, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.512922465208748e-05, | |
| "loss": 0.1284, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 6.473161033797217e-05, | |
| "loss": 0.3014, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 6.433399602385685e-05, | |
| "loss": 0.2031, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 6.393638170974155e-05, | |
| "loss": 0.134, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 6.353876739562624e-05, | |
| "loss": 0.2085, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.314115308151094e-05, | |
| "loss": 0.2496, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.274353876739562e-05, | |
| "loss": 0.1984, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.234592445328032e-05, | |
| "loss": 0.2887, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.194831013916501e-05, | |
| "loss": 0.1923, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.155069582504971e-05, | |
| "loss": 0.1373, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 6.11530815109344e-05, | |
| "loss": 0.2089, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 6.075546719681909e-05, | |
| "loss": 0.2066, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.035785288270378e-05, | |
| "loss": 0.1106, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_accuracy": 0.932539701461792, | |
| "eval_loss": 0.25381073355674744, | |
| "eval_runtime": 20.2105, | |
| "eval_samples_per_second": 24.938, | |
| "eval_steps_per_second": 6.234, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.996023856858847e-05, | |
| "loss": 0.125, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.956262425447316e-05, | |
| "loss": 0.1804, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 5.916500994035785e-05, | |
| "loss": 0.0419, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 5.876739562624255e-05, | |
| "loss": 0.1823, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 5.8369781312127236e-05, | |
| "loss": 0.085, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 5.7972166998011936e-05, | |
| "loss": 0.5209, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 5.757455268389662e-05, | |
| "loss": 0.1744, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 5.717693836978132e-05, | |
| "loss": 0.1288, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 5.677932405566601e-05, | |
| "loss": 0.0729, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.63817097415507e-05, | |
| "loss": 0.1138, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.598409542743539e-05, | |
| "loss": 0.2507, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.5586481113320074e-05, | |
| "loss": 0.2038, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.5188866799204774e-05, | |
| "loss": 0.3174, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 5.479125248508946e-05, | |
| "loss": 0.2045, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 5.439363817097416e-05, | |
| "loss": 0.1042, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 5.399602385685885e-05, | |
| "loss": 0.0531, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 5.359840954274355e-05, | |
| "loss": 0.1353, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 5.320079522862823e-05, | |
| "loss": 0.3447, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 5.2803180914512926e-05, | |
| "loss": 0.1704, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 5.240556660039761e-05, | |
| "loss": 0.2148, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 5.200795228628231e-05, | |
| "loss": 0.222, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 5.1610337972167e-05, | |
| "loss": 0.0645, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 5.12127236580517e-05, | |
| "loss": 0.1284, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 5.0815109343936385e-05, | |
| "loss": 0.1021, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 5.041749502982107e-05, | |
| "loss": 0.1881, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 5.001988071570577e-05, | |
| "loss": 0.1158, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.9622266401590465e-05, | |
| "loss": 0.2495, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 4.922465208747515e-05, | |
| "loss": 0.0767, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 4.882703777335984e-05, | |
| "loss": 0.1401, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 4.842942345924453e-05, | |
| "loss": 0.1357, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.8031809145129224e-05, | |
| "loss": 0.1962, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 4.763419483101392e-05, | |
| "loss": 0.1744, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 4.723658051689861e-05, | |
| "loss": 0.1815, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 4.68389662027833e-05, | |
| "loss": 0.275, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 4.6441351888667996e-05, | |
| "loss": 0.1725, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.604373757455269e-05, | |
| "loss": 0.1071, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.5646123260437376e-05, | |
| "loss": 0.0908, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.524850894632207e-05, | |
| "loss": 0.0793, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.485089463220676e-05, | |
| "loss": 0.114, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.4453280318091455e-05, | |
| "loss": 0.2036, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.405566600397614e-05, | |
| "loss": 0.0941, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.3658051689860835e-05, | |
| "loss": 0.1597, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 4.326043737574553e-05, | |
| "loss": 0.1476, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.286282306163022e-05, | |
| "loss": 0.1634, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 4.2465208747514914e-05, | |
| "loss": 0.3117, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 4.206759443339961e-05, | |
| "loss": 0.1822, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 4.1669980119284293e-05, | |
| "loss": 0.2559, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 4.1272365805168987e-05, | |
| "loss": 0.0924, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 4.087475149105368e-05, | |
| "loss": 0.095, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 4.047713717693837e-05, | |
| "loss": 0.0947, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "eval_accuracy": 0.9305555820465088, | |
| "eval_loss": 0.23964740335941315, | |
| "eval_runtime": 18.9345, | |
| "eval_samples_per_second": 26.618, | |
| "eval_steps_per_second": 6.655, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 4.0079522862823066e-05, | |
| "loss": 0.1292, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.968190854870776e-05, | |
| "loss": 0.0505, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.9284294234592445e-05, | |
| "loss": 0.0629, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.888667992047714e-05, | |
| "loss": 0.2428, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.848906560636183e-05, | |
| "loss": 0.0792, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.8091451292246525e-05, | |
| "loss": 0.1127, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.769383697813121e-05, | |
| "loss": 0.0928, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.7296222664015904e-05, | |
| "loss": 0.1148, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.68986083499006e-05, | |
| "loss": 0.1969, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.650099403578529e-05, | |
| "loss": 0.0891, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.6103379721669984e-05, | |
| "loss": 0.1074, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.570576540755468e-05, | |
| "loss": 0.1484, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.530815109343937e-05, | |
| "loss": 0.2352, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.491053677932406e-05, | |
| "loss": 0.0469, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.451292246520875e-05, | |
| "loss": 0.1484, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.4115308151093436e-05, | |
| "loss": 0.1446, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.371769383697813e-05, | |
| "loss": 0.0165, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.332007952286282e-05, | |
| "loss": 0.0797, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.2922465208747515e-05, | |
| "loss": 0.1391, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.252485089463221e-05, | |
| "loss": 0.0918, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.21272365805169e-05, | |
| "loss": 0.1694, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.1729622266401595e-05, | |
| "loss": 0.1379, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.133200795228629e-05, | |
| "loss": 0.1016, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.0934393638170974e-05, | |
| "loss": 0.0449, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.053677932405567e-05, | |
| "loss": 0.0124, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.0139165009940357e-05, | |
| "loss": 0.0186, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 2.974155069582505e-05, | |
| "loss": 0.1103, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 2.934393638170974e-05, | |
| "loss": 0.063, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 2.8946322067594433e-05, | |
| "loss": 0.061, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 2.8548707753479126e-05, | |
| "loss": 0.1044, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 2.815109343936382e-05, | |
| "loss": 0.1516, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 2.775347912524851e-05, | |
| "loss": 0.1427, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 2.7355864811133202e-05, | |
| "loss": 0.0448, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 2.6958250497017895e-05, | |
| "loss": 0.0703, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 2.656063618290259e-05, | |
| "loss": 0.0905, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 2.616302186878728e-05, | |
| "loss": 0.081, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 2.576540755467197e-05, | |
| "loss": 0.1642, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 2.5367793240556658e-05, | |
| "loss": 0.1915, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 2.4970178926441354e-05, | |
| "loss": 0.0696, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 2.4572564612326048e-05, | |
| "loss": 0.0605, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 2.4174950298210737e-05, | |
| "loss": 0.1562, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.3777335984095427e-05, | |
| "loss": 0.1316, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 2.337972166998012e-05, | |
| "loss": 0.1364, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 2.2982107355864813e-05, | |
| "loss": 0.0766, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 2.2584493041749506e-05, | |
| "loss": 0.2037, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 2.2186878727634196e-05, | |
| "loss": 0.029, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 2.1789264413518886e-05, | |
| "loss": 0.0443, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 2.139165009940358e-05, | |
| "loss": 0.0494, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 2.0994035785288272e-05, | |
| "loss": 0.0766, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 2.0596421471172962e-05, | |
| "loss": 0.1602, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "eval_accuracy": 0.9444444179534912, | |
| "eval_loss": 0.26106151938438416, | |
| "eval_runtime": 19.5055, | |
| "eval_samples_per_second": 25.839, | |
| "eval_steps_per_second": 6.46, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 2.0198807157057655e-05, | |
| "loss": 0.0202, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 1.9801192842942348e-05, | |
| "loss": 0.0856, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.9403578528827038e-05, | |
| "loss": 0.0999, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 1.900596421471173e-05, | |
| "loss": 0.0053, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 1.860834990059642e-05, | |
| "loss": 0.1129, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.8210735586481114e-05, | |
| "loss": 0.0083, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 1.7813121272365807e-05, | |
| "loss": 0.0857, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 1.7415506958250497e-05, | |
| "loss": 0.0475, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.701789264413519e-05, | |
| "loss": 0.0267, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 1.662027833001988e-05, | |
| "loss": 0.0469, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 1.6222664015904573e-05, | |
| "loss": 0.0346, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 1.5825049701789266e-05, | |
| "loss": 0.0822, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 1.542743538767396e-05, | |
| "loss": 0.0803, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 1.5029821073558647e-05, | |
| "loss": 0.0077, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 1.463220675944334e-05, | |
| "loss": 0.0347, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 1.4234592445328032e-05, | |
| "loss": 0.0665, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 1.3836978131212725e-05, | |
| "loss": 0.0779, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 1.3439363817097416e-05, | |
| "loss": 0.056, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 1.304174950298211e-05, | |
| "loss": 0.1503, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 1.26441351888668e-05, | |
| "loss": 0.0032, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 1.2246520874751492e-05, | |
| "loss": 0.0457, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 1.1848906560636184e-05, | |
| "loss": 0.0077, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 1.1451292246520875e-05, | |
| "loss": 0.1558, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 1.1053677932405568e-05, | |
| "loss": 0.0307, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.0656063618290258e-05, | |
| "loss": 0.1193, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.0258449304174951e-05, | |
| "loss": 0.0638, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 9.860834990059643e-06, | |
| "loss": 0.059, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 9.463220675944334e-06, | |
| "loss": 0.0875, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 9.065606361829027e-06, | |
| "loss": 0.1511, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 8.667992047713717e-06, | |
| "loss": 0.1217, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 8.27037773359841e-06, | |
| "loss": 0.2091, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 7.872763419483102e-06, | |
| "loss": 0.1723, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 7.475149105367793e-06, | |
| "loss": 0.0763, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 7.077534791252485e-06, | |
| "loss": 0.0172, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 6.679920477137178e-06, | |
| "loss": 0.0352, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 6.282306163021868e-06, | |
| "loss": 0.1989, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 5.8846918489065606e-06, | |
| "loss": 0.0896, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 5.487077534791253e-06, | |
| "loss": 0.0728, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 5.089463220675945e-06, | |
| "loss": 0.0482, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 4.6918489065606366e-06, | |
| "loss": 0.1288, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 4.294234592445328e-06, | |
| "loss": 0.0756, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 3.89662027833002e-06, | |
| "loss": 0.051, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 3.4990059642147117e-06, | |
| "loss": 0.0874, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 3.101391650099404e-06, | |
| "loss": 0.0381, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 2.7037773359840955e-06, | |
| "loss": 0.0031, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.3061630218687873e-06, | |
| "loss": 0.0371, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.9085487077534792e-06, | |
| "loss": 0.007, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.510934393638171e-06, | |
| "loss": 0.115, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 1.113320079522863e-06, | |
| "loss": 0.0492, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 7.157057654075547e-07, | |
| "loss": 0.0104, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "eval_accuracy": 0.9484127163887024, | |
| "eval_loss": 0.2534567415714264, | |
| "eval_runtime": 18.9048, | |
| "eval_samples_per_second": 26.66, | |
| "eval_steps_per_second": 6.665, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 3.1809145129224657e-07, | |
| "loss": 0.0589, | |
| "step": 2510 | |
| } | |
| ], | |
| "max_steps": 2515, | |
| "num_train_epochs": 5, | |
| "total_flos": 7.174884566087474e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |