| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.3579935988247023, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.0000000000000004e-08, | |
| "loss": 1.3803, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 8e-08, | |
| "loss": 1.3294, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.3e-07, | |
| "loss": 1.2867, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.8e-07, | |
| "loss": 1.2626, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.3000000000000002e-07, | |
| "loss": 1.2373, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.8e-07, | |
| "loss": 1.2286, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 3.3e-07, | |
| "loss": 1.2056, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 3.8e-07, | |
| "loss": 1.2047, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.3e-07, | |
| "loss": 1.1813, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.800000000000001e-07, | |
| "loss": 1.1672, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 5.3e-07, | |
| "loss": 1.1585, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 1.1429, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6.3e-07, | |
| "loss": 1.1384, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 6.800000000000001e-07, | |
| "loss": 1.1265, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.3e-07, | |
| "loss": 1.1375, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.8e-07, | |
| "loss": 1.1396, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 8.300000000000001e-07, | |
| "loss": 1.1302, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 8.8e-07, | |
| "loss": 1.1213, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.300000000000001e-07, | |
| "loss": 1.1183, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 1.1081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.03e-06, | |
| "loss": 1.0939, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.08e-06, | |
| "loss": 1.0826, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.1300000000000002e-06, | |
| "loss": 1.1104, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 1.111, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.23e-06, | |
| "loss": 1.0862, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.28e-06, | |
| "loss": 1.0783, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.3300000000000002e-06, | |
| "loss": 1.0762, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 1.0928, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.43e-06, | |
| "loss": 1.0884, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.48e-06, | |
| "loss": 1.068, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.5300000000000002e-06, | |
| "loss": 1.0785, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 1.0538, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.6300000000000003e-06, | |
| "loss": 1.0734, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.6800000000000002e-06, | |
| "loss": 1.0642, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.73e-06, | |
| "loss": 1.067, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 1.079, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.83e-06, | |
| "loss": 1.048, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8800000000000002e-06, | |
| "loss": 1.0571, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.93e-06, | |
| "loss": 1.0617, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.98e-06, | |
| "loss": 1.0644, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.0300000000000005e-06, | |
| "loss": 1.0569, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.08e-06, | |
| "loss": 1.0429, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.13e-06, | |
| "loss": 1.03, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 1.0449, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.2300000000000002e-06, | |
| "loss": 1.0462, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.28e-06, | |
| "loss": 1.0272, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.33e-06, | |
| "loss": 1.0221, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.38e-06, | |
| "loss": 1.0307, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 2.43e-06, | |
| "loss": 1.048, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.4800000000000004e-06, | |
| "loss": 1.0376, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "eval_code_evol_accuracy": 0.7292901919811937, | |
| "eval_code_evol_loss": 1.0595703125, | |
| "eval_code_evol_runtime": 44.6855, | |
| "eval_code_evol_samples_per_second": 17.254, | |
| "eval_code_evol_steps_per_second": 2.171, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5300000000000003e-06, | |
| "loss": 1.0246, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 1.0309, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.6300000000000002e-06, | |
| "loss": 1.0391, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.68e-06, | |
| "loss": 1.0193, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.7300000000000005e-06, | |
| "loss": 1.0039, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 1.0271, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.83e-06, | |
| "loss": 1.0269, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.88e-06, | |
| "loss": 1.0103, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.93e-06, | |
| "loss": 1.0012, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 1.0011, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.0300000000000002e-06, | |
| "loss": 0.9842, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.08e-06, | |
| "loss": 0.9918, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.13e-06, | |
| "loss": 0.985, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.988, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.2300000000000004e-06, | |
| "loss": 0.9832, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.2800000000000004e-06, | |
| "loss": 0.97, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.3300000000000003e-06, | |
| "loss": 0.9699, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.983, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.4300000000000006e-06, | |
| "loss": 0.9789, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.48e-06, | |
| "loss": 0.9783, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.53e-06, | |
| "loss": 0.9805, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.9764, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.6300000000000004e-06, | |
| "loss": 0.9754, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.6800000000000003e-06, | |
| "loss": 0.9765, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.7300000000000003e-06, | |
| "loss": 0.9637, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.9689, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.830000000000001e-06, | |
| "loss": 0.9847, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.88e-06, | |
| "loss": 0.9646, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.9300000000000005e-06, | |
| "loss": 0.9844, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.9678, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.03e-06, | |
| "loss": 0.9597, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.08e-06, | |
| "loss": 0.981, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.13e-06, | |
| "loss": 0.9736, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.9702, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.23e-06, | |
| "loss": 0.9676, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 4.2800000000000005e-06, | |
| "loss": 0.975, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.33e-06, | |
| "loss": 0.961, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.9572, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.430000000000001e-06, | |
| "loss": 0.9637, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.48e-06, | |
| "loss": 0.9625, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.530000000000001e-06, | |
| "loss": 0.9634, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.9524, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.6300000000000006e-06, | |
| "loss": 0.957, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.680000000000001e-06, | |
| "loss": 0.9609, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.7300000000000005e-06, | |
| "loss": 0.9684, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.9607, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.83e-06, | |
| "loss": 0.9577, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.880000000000001e-06, | |
| "loss": 0.9614, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.93e-06, | |
| "loss": 0.953, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.9679, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "eval_code_evol_accuracy": 0.7400189369204649, | |
| "eval_code_evol_loss": 1.001953125, | |
| "eval_code_evol_runtime": 44.1999, | |
| "eval_code_evol_samples_per_second": 17.443, | |
| "eval_code_evol_steps_per_second": 2.195, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 5.03e-06, | |
| "loss": 0.949, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 5.0800000000000005e-06, | |
| "loss": 0.9567, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 5.130000000000001e-06, | |
| "loss": 0.9527, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.9614, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 5.230000000000001e-06, | |
| "loss": 0.9595, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.28e-06, | |
| "loss": 0.9482, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.330000000000001e-06, | |
| "loss": 0.9403, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.9386, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 5.4300000000000005e-06, | |
| "loss": 0.9663, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 5.480000000000001e-06, | |
| "loss": 0.9491, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 5.530000000000001e-06, | |
| "loss": 0.9638, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.9728, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 5.63e-06, | |
| "loss": 0.9551, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 5.68e-06, | |
| "loss": 0.9432, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.73e-06, | |
| "loss": 0.9517, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.9448, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.83e-06, | |
| "loss": 0.954, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.8800000000000005e-06, | |
| "loss": 0.9402, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.93e-06, | |
| "loss": 0.9257, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.8889, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 6.030000000000001e-06, | |
| "loss": 0.8903, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.08e-06, | |
| "loss": 0.8778, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 6.130000000000001e-06, | |
| "loss": 0.8759, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.8957, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.2300000000000005e-06, | |
| "loss": 0.8805, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.280000000000001e-06, | |
| "loss": 0.882, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.33e-06, | |
| "loss": 0.8887, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.8692, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 6.43e-06, | |
| "loss": 0.896, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 6.480000000000001e-06, | |
| "loss": 0.8724, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 6.530000000000001e-06, | |
| "loss": 0.8705, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.8805, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 6.630000000000001e-06, | |
| "loss": 0.8804, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 6.680000000000001e-06, | |
| "loss": 0.8864, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 6.730000000000001e-06, | |
| "loss": 0.8908, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.8836, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 6.830000000000001e-06, | |
| "loss": 0.8817, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 6.88e-06, | |
| "loss": 0.8825, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 6.93e-06, | |
| "loss": 0.8928, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.885, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.0300000000000005e-06, | |
| "loss": 0.8756, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 7.08e-06, | |
| "loss": 0.8945, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 7.13e-06, | |
| "loss": 0.8712, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.8992, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 7.23e-06, | |
| "loss": 0.8687, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 7.280000000000001e-06, | |
| "loss": 0.8896, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 7.33e-06, | |
| "loss": 0.8853, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.866, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 7.430000000000001e-06, | |
| "loss": 0.8834, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 7.48e-06, | |
| "loss": 0.8657, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "eval_code_evol_accuracy": 0.7452004701580254, | |
| "eval_code_evol_loss": 0.98095703125, | |
| "eval_code_evol_runtime": 43.1875, | |
| "eval_code_evol_samples_per_second": 17.852, | |
| "eval_code_evol_steps_per_second": 2.246, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.530000000000001e-06, | |
| "loss": 0.8986, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.8768, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.630000000000001e-06, | |
| "loss": 0.8873, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 7.680000000000001e-06, | |
| "loss": 0.8836, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 7.73e-06, | |
| "loss": 0.8689, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.8896, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.830000000000001e-06, | |
| "loss": 0.8916, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.88e-06, | |
| "loss": 0.8654, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 7.93e-06, | |
| "loss": 0.8742, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.8822, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 8.030000000000001e-06, | |
| "loss": 0.8699, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 8.08e-06, | |
| "loss": 0.8759, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 8.13e-06, | |
| "loss": 0.879, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.8819, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.23e-06, | |
| "loss": 0.8797, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.28e-06, | |
| "loss": 0.8706, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.33e-06, | |
| "loss": 0.8732, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.8829, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.43e-06, | |
| "loss": 0.8914, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.48e-06, | |
| "loss": 0.8692, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.530000000000001e-06, | |
| "loss": 0.8975, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.8762, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.63e-06, | |
| "loss": 0.8955, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.68e-06, | |
| "loss": 0.8853, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.730000000000001e-06, | |
| "loss": 0.8702, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.8824, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.83e-06, | |
| "loss": 0.8839, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.880000000000001e-06, | |
| "loss": 0.885, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 8.930000000000001e-06, | |
| "loss": 0.8535, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.7718, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 9.030000000000002e-06, | |
| "loss": 0.7874, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 9.080000000000001e-06, | |
| "loss": 0.7935, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 9.13e-06, | |
| "loss": 0.7874, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.7963, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 9.230000000000001e-06, | |
| "loss": 0.7788, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 9.280000000000001e-06, | |
| "loss": 0.7856, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 9.33e-06, | |
| "loss": 0.7824, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.7942, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 9.43e-06, | |
| "loss": 0.7846, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 9.48e-06, | |
| "loss": 0.7971, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 9.53e-06, | |
| "loss": 0.7914, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.7873, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 9.630000000000001e-06, | |
| "loss": 0.7815, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 9.68e-06, | |
| "loss": 0.7958, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 9.73e-06, | |
| "loss": 0.7875, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.7968, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 9.83e-06, | |
| "loss": 0.7834, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 9.88e-06, | |
| "loss": 0.7935, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 9.930000000000001e-06, | |
| "loss": 0.7894, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.7908, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "eval_code_evol_accuracy": 0.7468688781507118, | |
| "eval_code_evol_loss": 0.9775390625, | |
| "eval_code_evol_runtime": 44.2366, | |
| "eval_code_evol_samples_per_second": 17.429, | |
| "eval_code_evol_steps_per_second": 2.193, | |
| "step": 2000 | |
| } | |
| ], | |
| "max_steps": 4760, | |
| "num_train_epochs": 8, | |
| "total_flos": 832756800815104.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |