| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.8507242605603262, | |
| "global_step": 4000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.62e-07, | |
| "loss": 0.6957, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.42e-07, | |
| "loss": 0.6958, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5.22e-07, | |
| "loss": 0.6916, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.02e-07, | |
| "loss": 0.6959, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 8.820000000000001e-07, | |
| "loss": 0.6927, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.062e-06, | |
| "loss": 0.6921, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.2420000000000001e-06, | |
| "loss": 0.691, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.4220000000000001e-06, | |
| "loss": 0.6892, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.6019999999999999e-06, | |
| "loss": 0.6881, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.782e-06, | |
| "loss": 0.6857, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.962e-06, | |
| "loss": 0.6836, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.142e-06, | |
| "loss": 0.6802, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.322e-06, | |
| "loss": 0.6714, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.4840000000000003e-06, | |
| "loss": 0.6661, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.664e-06, | |
| "loss": 0.6599, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.826e-06, | |
| "loss": 0.6524, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3.006e-06, | |
| "loss": 0.6362, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3.168e-06, | |
| "loss": 0.632, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.348e-06, | |
| "loss": 0.6301, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.5280000000000004e-06, | |
| "loss": 0.6272, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.708e-06, | |
| "loss": 0.6172, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.87e-06, | |
| "loss": 0.6146, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.606, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.23e-06, | |
| "loss": 0.6194, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.392e-06, | |
| "loss": 0.5863, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "eval_webgpt_accuracy": 0.547752808988764, | |
| "eval_webgpt_loss": 0.7199520468711853, | |
| "eval_webgpt_runtime": 152.9037, | |
| "eval_webgpt_samples_per_second": 25.611, | |
| "eval_webgpt_steps_per_second": 2.564, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "eval_hfsummary_accuracy": 0.630263277211861, | |
| "eval_hfsummary_loss": 0.6598580479621887, | |
| "eval_hfsummary_runtime": 2471.5424, | |
| "eval_hfsummary_samples_per_second": 13.386, | |
| "eval_hfsummary_steps_per_second": 1.339, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "eval_gptsynthetic_accuracy": 0.9978883861236802, | |
| "eval_gptsynthetic_loss": 0.09524902701377869, | |
| "eval_gptsynthetic_runtime": 116.7014, | |
| "eval_gptsynthetic_samples_per_second": 28.406, | |
| "eval_gptsynthetic_steps_per_second": 2.845, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.5720000000000004e-06, | |
| "loss": 0.5818, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.752e-06, | |
| "loss": 0.5817, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.932e-06, | |
| "loss": 0.5713, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 5.1119999999999995e-06, | |
| "loss": 0.5787, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 5.2919999999999995e-06, | |
| "loss": 0.566, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 5.472e-06, | |
| "loss": 0.5765, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 5.652e-06, | |
| "loss": 0.5286, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 5.814000000000001e-06, | |
| "loss": 0.5644, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 5.9940000000000005e-06, | |
| "loss": 0.5278, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.1740000000000005e-06, | |
| "loss": 0.5376, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.354e-06, | |
| "loss": 0.5396, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.534e-06, | |
| "loss": 0.517, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.7140000000000004e-06, | |
| "loss": 0.5001, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.894e-06, | |
| "loss": 0.5069, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.074e-06, | |
| "loss": 0.5102, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.254e-06, | |
| "loss": 0.5105, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.4339999999999995e-06, | |
| "loss": 0.5332, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.614e-06, | |
| "loss": 0.5076, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.794e-06, | |
| "loss": 0.5088, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.974e-06, | |
| "loss": 0.4968, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 8.154e-06, | |
| "loss": 0.4902, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 8.334e-06, | |
| "loss": 0.5097, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 8.514e-06, | |
| "loss": 0.5038, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.694e-06, | |
| "loss": 0.5082, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.874e-06, | |
| "loss": 0.4887, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "eval_webgpt_accuracy": 0.5623084780388151, | |
| "eval_webgpt_loss": 0.7031316161155701, | |
| "eval_webgpt_runtime": 152.8509, | |
| "eval_webgpt_samples_per_second": 25.62, | |
| "eval_webgpt_steps_per_second": 2.565, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "eval_hfsummary_accuracy": 0.6744249312335641, | |
| "eval_hfsummary_loss": 0.6107630729675293, | |
| "eval_hfsummary_runtime": 2471.6908, | |
| "eval_hfsummary_samples_per_second": 13.385, | |
| "eval_hfsummary_steps_per_second": 1.339, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "eval_gptsynthetic_accuracy": 0.9987933634992459, | |
| "eval_gptsynthetic_loss": 0.023942505940794945, | |
| "eval_gptsynthetic_runtime": 116.8138, | |
| "eval_gptsynthetic_samples_per_second": 28.379, | |
| "eval_gptsynthetic_steps_per_second": 2.842, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 8.99293563579278e-06, | |
| "loss": 0.4875, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 8.969387755102041e-06, | |
| "loss": 0.483, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 8.945839874411304e-06, | |
| "loss": 0.5057, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 8.922291993720566e-06, | |
| "loss": 0.4923, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 8.898744113029827e-06, | |
| "loss": 0.4623, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.87519623233909e-06, | |
| "loss": 0.4735, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.851648351648352e-06, | |
| "loss": 0.4516, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 8.828100470957614e-06, | |
| "loss": 0.4542, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 8.804552590266875e-06, | |
| "loss": 0.465, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 8.781004709576138e-06, | |
| "loss": 0.4577, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 8.7574568288854e-06, | |
| "loss": 0.4994, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 8.733908948194663e-06, | |
| "loss": 0.4565, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 8.710361067503925e-06, | |
| "loss": 0.4617, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 8.686813186813188e-06, | |
| "loss": 0.4598, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 8.663265306122449e-06, | |
| "loss": 0.4441, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 8.639717425431711e-06, | |
| "loss": 0.4935, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 8.616169544740974e-06, | |
| "loss": 0.4646, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 8.592621664050236e-06, | |
| "loss": 0.4771, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 8.569073783359497e-06, | |
| "loss": 0.4754, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 8.54552590266876e-06, | |
| "loss": 0.4684, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 8.521978021978022e-06, | |
| "loss": 0.4691, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 8.498430141287285e-06, | |
| "loss": 0.4807, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 8.474882260596547e-06, | |
| "loss": 0.4597, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 8.45133437990581e-06, | |
| "loss": 0.455, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 8.42778649921507e-06, | |
| "loss": 0.4561, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "eval_webgpt_accuracy": 0.5702247191011236, | |
| "eval_webgpt_loss": 0.6994287371635437, | |
| "eval_webgpt_runtime": 152.9386, | |
| "eval_webgpt_samples_per_second": 25.605, | |
| "eval_webgpt_steps_per_second": 2.563, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "eval_hfsummary_accuracy": 0.6827373575552399, | |
| "eval_hfsummary_loss": 0.60725337266922, | |
| "eval_hfsummary_runtime": 2470.4498, | |
| "eval_hfsummary_samples_per_second": 13.391, | |
| "eval_hfsummary_steps_per_second": 1.339, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "eval_gptsynthetic_accuracy": 0.9975867269984917, | |
| "eval_gptsynthetic_loss": 0.01110268384218216, | |
| "eval_gptsynthetic_runtime": 116.8749, | |
| "eval_gptsynthetic_samples_per_second": 28.364, | |
| "eval_gptsynthetic_steps_per_second": 2.841, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 8.404238618524333e-06, | |
| "loss": 0.4578, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 8.380690737833595e-06, | |
| "loss": 0.4798, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 8.359497645211932e-06, | |
| "loss": 0.4402, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 8.335949764521193e-06, | |
| "loss": 0.4632, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 8.312401883830455e-06, | |
| "loss": 0.4505, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 8.288854003139718e-06, | |
| "loss": 0.4641, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 8.26530612244898e-06, | |
| "loss": 0.4373, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 8.241758241758241e-06, | |
| "loss": 0.4192, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 8.218210361067504e-06, | |
| "loss": 0.4671, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 8.194662480376766e-06, | |
| "loss": 0.4481, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 8.171114599686029e-06, | |
| "loss": 0.4481, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 8.147566718995291e-06, | |
| "loss": 0.4317, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 8.124018838304554e-06, | |
| "loss": 0.4517, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 8.100470957613814e-06, | |
| "loss": 0.4282, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 8.076923076923077e-06, | |
| "loss": 0.4543, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 8.05337519623234e-06, | |
| "loss": 0.4303, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 8.029827315541602e-06, | |
| "loss": 0.4423, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 8.006279434850863e-06, | |
| "loss": 0.4624, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 7.982731554160125e-06, | |
| "loss": 0.4318, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 7.959183673469388e-06, | |
| "loss": 0.4374, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 7.93563579277865e-06, | |
| "loss": 0.4271, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.912087912087913e-06, | |
| "loss": 0.4535, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 7.888540031397175e-06, | |
| "loss": 0.4393, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.864992150706436e-06, | |
| "loss": 0.4513, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 7.841444270015699e-06, | |
| "loss": 0.4259, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_webgpt_accuracy": 0.5720122574055159, | |
| "eval_webgpt_loss": 0.6880369782447815, | |
| "eval_webgpt_runtime": 153.0376, | |
| "eval_webgpt_samples_per_second": 25.588, | |
| "eval_webgpt_steps_per_second": 2.561, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_hfsummary_accuracy": 0.6844905238339933, | |
| "eval_hfsummary_loss": 0.6170333027839661, | |
| "eval_hfsummary_runtime": 2474.9308, | |
| "eval_hfsummary_samples_per_second": 13.367, | |
| "eval_hfsummary_steps_per_second": 1.337, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "eval_gptsynthetic_accuracy": 0.9978883861236802, | |
| "eval_gptsynthetic_loss": 0.014577150344848633, | |
| "eval_gptsynthetic_runtime": 116.8198, | |
| "eval_gptsynthetic_samples_per_second": 28.377, | |
| "eval_gptsynthetic_steps_per_second": 2.842, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 7.817896389324961e-06, | |
| "loss": 0.449, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 7.794348508634224e-06, | |
| "loss": 0.4274, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 7.770800627943485e-06, | |
| "loss": 0.4014, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 7.747252747252747e-06, | |
| "loss": 0.4356, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.72370486656201e-06, | |
| "loss": 0.4378, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.700156985871272e-06, | |
| "loss": 0.4298, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 7.676609105180535e-06, | |
| "loss": 0.4514, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 7.653061224489796e-06, | |
| "loss": 0.4526, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 7.629513343799058e-06, | |
| "loss": 0.4192, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 7.60596546310832e-06, | |
| "loss": 0.4433, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 7.582417582417582e-06, | |
| "loss": 0.4199, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 7.558869701726845e-06, | |
| "loss": 0.4419, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 7.535321821036107e-06, | |
| "loss": 0.4163, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 7.511773940345369e-06, | |
| "loss": 0.411, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 7.4882260596546314e-06, | |
| "loss": 0.4492, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 7.464678178963893e-06, | |
| "loss": 0.454, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 7.441130298273156e-06, | |
| "loss": 0.4311, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 7.417582417582417e-06, | |
| "loss": 0.4368, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 7.39403453689168e-06, | |
| "loss": 0.4264, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 7.3704866562009415e-06, | |
| "loss": 0.4163, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 7.346938775510204e-06, | |
| "loss": 0.4214, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 7.3233908948194665e-06, | |
| "loss": 0.4263, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 7.299843014128729e-06, | |
| "loss": 0.4233, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 7.276295133437991e-06, | |
| "loss": 0.4331, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.252747252747253e-06, | |
| "loss": 0.4241, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_webgpt_accuracy": 0.5806945863125639, | |
| "eval_webgpt_loss": 0.6766389608383179, | |
| "eval_webgpt_runtime": 153.0189, | |
| "eval_webgpt_samples_per_second": 25.592, | |
| "eval_webgpt_steps_per_second": 2.562, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_hfsummary_accuracy": 0.6927122691412508, | |
| "eval_hfsummary_loss": 0.6117472648620605, | |
| "eval_hfsummary_runtime": 2473.4013, | |
| "eval_hfsummary_samples_per_second": 13.376, | |
| "eval_hfsummary_steps_per_second": 1.338, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_gptsynthetic_accuracy": 0.9987933634992459, | |
| "eval_gptsynthetic_loss": 0.007306403946131468, | |
| "eval_gptsynthetic_runtime": 116.8034, | |
| "eval_gptsynthetic_samples_per_second": 28.381, | |
| "eval_gptsynthetic_steps_per_second": 2.842, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 7.229199372056515e-06, | |
| "loss": 0.3976, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.205651491365777e-06, | |
| "loss": 0.4221, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 7.182103610675039e-06, | |
| "loss": 0.4225, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.158555729984302e-06, | |
| "loss": 0.3961, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 7.135007849293563e-06, | |
| "loss": 0.4348, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.111459968602826e-06, | |
| "loss": 0.4146, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.087912087912088e-06, | |
| "loss": 0.4122, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.064364207221351e-06, | |
| "loss": 0.4126, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.0408163265306125e-06, | |
| "loss": 0.4, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.017268445839875e-06, | |
| "loss": 0.3939, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.993720565149137e-06, | |
| "loss": 0.4144, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 6.970172684458399e-06, | |
| "loss": 0.3895, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.946624803767661e-06, | |
| "loss": 0.431, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.4156, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 6.899529042386185e-06, | |
| "loss": 0.4317, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 6.8759811616954476e-06, | |
| "loss": 0.4142, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.85243328100471e-06, | |
| "loss": 0.393, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.828885400313973e-06, | |
| "loss": 0.4156, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.805337519623234e-06, | |
| "loss": 0.3995, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.781789638932497e-06, | |
| "loss": 0.4084, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.7582417582417585e-06, | |
| "loss": 0.4162, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.734693877551021e-06, | |
| "loss": 0.4167, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.711145996860283e-06, | |
| "loss": 0.3878, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.687598116169544e-06, | |
| "loss": 0.403, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.664050235478807e-06, | |
| "loss": 0.3988, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "eval_webgpt_accuracy": 0.5840143003064351, | |
| "eval_webgpt_loss": 0.6755279302597046, | |
| "eval_webgpt_runtime": 152.9789, | |
| "eval_webgpt_samples_per_second": 25.598, | |
| "eval_webgpt_steps_per_second": 2.562, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "eval_hfsummary_accuracy": 0.6905661518000181, | |
| "eval_hfsummary_loss": 0.6140123605728149, | |
| "eval_hfsummary_runtime": 2473.83, | |
| "eval_hfsummary_samples_per_second": 13.373, | |
| "eval_hfsummary_steps_per_second": 1.338, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "eval_gptsynthetic_accuracy": 0.9978883861236802, | |
| "eval_gptsynthetic_loss": 0.008705263957381248, | |
| "eval_gptsynthetic_runtime": 116.82, | |
| "eval_gptsynthetic_samples_per_second": 28.377, | |
| "eval_gptsynthetic_steps_per_second": 2.842, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.6405023547880685e-06, | |
| "loss": 0.4004, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.616954474097332e-06, | |
| "loss": 0.3856, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.5934065934065935e-06, | |
| "loss": 0.4168, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.569858712715856e-06, | |
| "loss": 0.401, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 6.546310832025118e-06, | |
| "loss": 0.4038, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 6.52276295133438e-06, | |
| "loss": 0.3804, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 6.499215070643642e-06, | |
| "loss": 0.3787, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 6.4756671899529044e-06, | |
| "loss": 0.4003, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 6.452119309262166e-06, | |
| "loss": 0.3954, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 6.428571428571429e-06, | |
| "loss": 0.4011, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 6.40502354788069e-06, | |
| "loss": 0.3882, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 6.381475667189954e-06, | |
| "loss": 0.3954, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 6.357927786499215e-06, | |
| "loss": 0.3788, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.334379905808478e-06, | |
| "loss": 0.3921, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.3108320251177395e-06, | |
| "loss": 0.39, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.287284144427002e-06, | |
| "loss": 0.4137, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.263736263736264e-06, | |
| "loss": 0.4032, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.240188383045526e-06, | |
| "loss": 0.3865, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.216640502354788e-06, | |
| "loss": 0.3971, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 6.19309262166405e-06, | |
| "loss": 0.3585, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 6.169544740973312e-06, | |
| "loss": 0.3933, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 6.1459968602825754e-06, | |
| "loss": 0.4003, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 6.122448979591837e-06, | |
| "loss": 0.3931, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 6.0989010989011e-06, | |
| "loss": 0.3751, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 6.075353218210361e-06, | |
| "loss": 0.3793, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "eval_webgpt_accuracy": 0.593973442288049, | |
| "eval_webgpt_loss": 0.6751279830932617, | |
| "eval_webgpt_runtime": 152.9792, | |
| "eval_webgpt_samples_per_second": 25.598, | |
| "eval_webgpt_steps_per_second": 2.562, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "eval_hfsummary_accuracy": 0.6901127467279268, | |
| "eval_hfsummary_loss": 0.655200183391571, | |
| "eval_hfsummary_runtime": 2476.3756, | |
| "eval_hfsummary_samples_per_second": 13.359, | |
| "eval_hfsummary_steps_per_second": 1.336, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "eval_gptsynthetic_accuracy": 0.9978883861236802, | |
| "eval_gptsynthetic_loss": 0.007934121415019035, | |
| "eval_gptsynthetic_runtime": 116.9338, | |
| "eval_gptsynthetic_samples_per_second": 28.349, | |
| "eval_gptsynthetic_steps_per_second": 2.839, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 6.051805337519624e-06, | |
| "loss": 0.3883, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.0282574568288855e-06, | |
| "loss": 0.3918, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 6.004709576138148e-06, | |
| "loss": 0.4086, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.98116169544741e-06, | |
| "loss": 0.3876, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.957613814756672e-06, | |
| "loss": 0.3613, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.934065934065934e-06, | |
| "loss": 0.3715, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 5.910518053375197e-06, | |
| "loss": 0.3879, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.886970172684459e-06, | |
| "loss": 0.3893, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.863422291993721e-06, | |
| "loss": 0.34, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 5.839874411302983e-06, | |
| "loss": 0.3938, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 5.816326530612246e-06, | |
| "loss": 0.3802, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.792778649921507e-06, | |
| "loss": 0.3869, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.76923076923077e-06, | |
| "loss": 0.4023, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.7456828885400314e-06, | |
| "loss": 0.3694, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 5.722135007849293e-06, | |
| "loss": 0.358, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 5.698587127158556e-06, | |
| "loss": 0.3939, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.675039246467817e-06, | |
| "loss": 0.3805, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.651491365777081e-06, | |
| "loss": 0.3754, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.627943485086342e-06, | |
| "loss": 0.3454, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.604395604395605e-06, | |
| "loss": 0.4109, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 5.5808477237048665e-06, | |
| "loss": 0.364, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 5.557299843014129e-06, | |
| "loss": 0.3869, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 5.533751962323391e-06, | |
| "loss": 0.3858, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 5.510204081632653e-06, | |
| "loss": 0.3634, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.486656200941915e-06, | |
| "loss": 0.3444, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "eval_webgpt_accuracy": 0.5916751787538305, | |
| "eval_webgpt_loss": 0.6755998134613037, | |
| "eval_webgpt_runtime": 153.0377, | |
| "eval_webgpt_samples_per_second": 25.588, | |
| "eval_webgpt_steps_per_second": 2.561, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "eval_hfsummary_accuracy": 0.6894477526221927, | |
| "eval_hfsummary_loss": 0.6544569730758667, | |
| "eval_hfsummary_runtime": 2482.2617, | |
| "eval_hfsummary_samples_per_second": 13.328, | |
| "eval_hfsummary_steps_per_second": 1.333, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "eval_gptsynthetic_accuracy": 0.9975867269984917, | |
| "eval_gptsynthetic_loss": 0.00854982528835535, | |
| "eval_gptsynthetic_runtime": 117.0015, | |
| "eval_gptsynthetic_samples_per_second": 28.333, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.463108320251177e-06, | |
| "loss": 0.3548, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.439560439560439e-06, | |
| "loss": 0.3849, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.4160125588697024e-06, | |
| "loss": 0.3732, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.392464678178964e-06, | |
| "loss": 0.3589, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 5.368916797488227e-06, | |
| "loss": 0.3595, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 5.345368916797488e-06, | |
| "loss": 0.3704, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 5.324175824175825e-06, | |
| "loss": 0.3846, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 5.300627943485086e-06, | |
| "loss": 0.3862, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.277080062794349e-06, | |
| "loss": 0.3956, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.253532182103611e-06, | |
| "loss": 0.3629, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.229984301412873e-06, | |
| "loss": 0.3674, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.206436420722135e-06, | |
| "loss": 0.3827, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.182888540031397e-06, | |
| "loss": 0.3751, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.159340659340659e-06, | |
| "loss": 0.3834, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.1357927786499215e-06, | |
| "loss": 0.3425, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 5.112244897959183e-06, | |
| "loss": 0.3731, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 5.0886970172684465e-06, | |
| "loss": 0.3724, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 5.065149136577708e-06, | |
| "loss": 0.3842, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 5.041601255886971e-06, | |
| "loss": 0.3661, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 5.018053375196232e-06, | |
| "loss": 0.3795, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.994505494505495e-06, | |
| "loss": 0.3766, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.9709576138147566e-06, | |
| "loss": 0.3641, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.947409733124019e-06, | |
| "loss": 0.355, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.923861852433281e-06, | |
| "loss": 0.3481, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.900313971742543e-06, | |
| "loss": 0.312, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_webgpt_accuracy": 0.5970377936670072, | |
| "eval_webgpt_loss": 0.6743035912513733, | |
| "eval_webgpt_runtime": 153.349, | |
| "eval_webgpt_samples_per_second": 25.537, | |
| "eval_webgpt_steps_per_second": 2.556, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_hfsummary_accuracy": 0.6958558776410846, | |
| "eval_hfsummary_loss": 0.6904739141464233, | |
| "eval_hfsummary_runtime": 2483.5335, | |
| "eval_hfsummary_samples_per_second": 13.321, | |
| "eval_hfsummary_steps_per_second": 1.332, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_gptsynthetic_accuracy": 0.9972850678733032, | |
| "eval_gptsynthetic_loss": 0.011290821246802807, | |
| "eval_gptsynthetic_runtime": 117.0341, | |
| "eval_gptsynthetic_samples_per_second": 28.325, | |
| "eval_gptsynthetic_steps_per_second": 2.837, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.876766091051805e-06, | |
| "loss": 0.3593, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.853218210361068e-06, | |
| "loss": 0.3378, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.82967032967033e-06, | |
| "loss": 0.3522, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.8061224489795925e-06, | |
| "loss": 0.3732, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.782574568288854e-06, | |
| "loss": 0.3616, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.759026687598117e-06, | |
| "loss": 0.3478, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.735478806907378e-06, | |
| "loss": 0.3721, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.711930926216641e-06, | |
| "loss": 0.3314, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.6883830455259025e-06, | |
| "loss": 0.3413, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.664835164835165e-06, | |
| "loss": 0.3266, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.641287284144427e-06, | |
| "loss": 0.3579, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.61773940345369e-06, | |
| "loss": 0.3549, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.594191522762952e-06, | |
| "loss": 0.347, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.570643642072214e-06, | |
| "loss": 0.3453, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.547095761381476e-06, | |
| "loss": 0.3411, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.5235478806907384e-06, | |
| "loss": 0.3411, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.361, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.476452119309263e-06, | |
| "loss": 0.3576, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.452904238618524e-06, | |
| "loss": 0.3476, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.429356357927787e-06, | |
| "loss": 0.3382, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.4058084772370485e-06, | |
| "loss": 0.3485, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.382260596546311e-06, | |
| "loss": 0.3237, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.3587127158555735e-06, | |
| "loss": 0.3385, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.335164835164835e-06, | |
| "loss": 0.3124, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.311616954474098e-06, | |
| "loss": 0.3385, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_webgpt_accuracy": 0.589632277834525, | |
| "eval_webgpt_loss": 0.6730017066001892, | |
| "eval_webgpt_runtime": 153.2835, | |
| "eval_webgpt_samples_per_second": 25.547, | |
| "eval_webgpt_steps_per_second": 2.557, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_hfsummary_accuracy": 0.6939515763383006, | |
| "eval_hfsummary_loss": 0.6676614880561829, | |
| "eval_hfsummary_runtime": 2484.0871, | |
| "eval_hfsummary_samples_per_second": 13.318, | |
| "eval_hfsummary_steps_per_second": 1.332, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_gptsynthetic_accuracy": 0.9987933634992459, | |
| "eval_gptsynthetic_loss": 0.008312013931572437, | |
| "eval_gptsynthetic_runtime": 117.0521, | |
| "eval_gptsynthetic_samples_per_second": 28.321, | |
| "eval_gptsynthetic_steps_per_second": 2.836, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.288069073783359e-06, | |
| "loss": 0.3575, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.264521193092622e-06, | |
| "loss": 0.3562, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.240973312401884e-06, | |
| "loss": 0.3296, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.217425431711146e-06, | |
| "loss": 0.3447, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.193877551020409e-06, | |
| "loss": 0.3373, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.17032967032967e-06, | |
| "loss": 0.3394, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.146781789638933e-06, | |
| "loss": 0.3294, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.123233908948195e-06, | |
| "loss": 0.3715, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.099686028257457e-06, | |
| "loss": 0.3356, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.0761381475667195e-06, | |
| "loss": 0.3497, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.052590266875981e-06, | |
| "loss": 0.3189, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.029042386185244e-06, | |
| "loss": 0.3473, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.005494505494505e-06, | |
| "loss": 0.3259, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 3.981946624803768e-06, | |
| "loss": 0.3294, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.9583987441130295e-06, | |
| "loss": 0.3294, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.934850863422292e-06, | |
| "loss": 0.3221, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.911302982731554e-06, | |
| "loss": 0.3698, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.887755102040816e-06, | |
| "loss": 0.3514, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.864207221350079e-06, | |
| "loss": 0.3461, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.84065934065934e-06, | |
| "loss": 0.3321, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.817111459968603e-06, | |
| "loss": 0.3241, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.793563579277865e-06, | |
| "loss": 0.3466, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.770015698587127e-06, | |
| "loss": 0.3188, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.7464678178963892e-06, | |
| "loss": 0.32, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.7229199372056513e-06, | |
| "loss": 0.3383, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "eval_webgpt_accuracy": 0.5776302349336058, | |
| "eval_webgpt_loss": 0.6770792603492737, | |
| "eval_webgpt_runtime": 153.2414, | |
| "eval_webgpt_samples_per_second": 25.554, | |
| "eval_webgpt_steps_per_second": 2.558, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "eval_hfsummary_accuracy": 0.6819212284254753, | |
| "eval_hfsummary_loss": 0.7081930041313171, | |
| "eval_hfsummary_runtime": 2484.801, | |
| "eval_hfsummary_samples_per_second": 13.314, | |
| "eval_hfsummary_steps_per_second": 1.332, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "eval_gptsynthetic_accuracy": 0.9984917043740573, | |
| "eval_gptsynthetic_loss": 0.006183410994708538, | |
| "eval_gptsynthetic_runtime": 117.1145, | |
| "eval_gptsynthetic_samples_per_second": 28.306, | |
| "eval_gptsynthetic_steps_per_second": 2.835, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.699372056514914e-06, | |
| "loss": 0.3136, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.675824175824176e-06, | |
| "loss": 0.3358, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.652276295133438e-06, | |
| "loss": 0.3612, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.6287284144427e-06, | |
| "loss": 0.319, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.605180533751962e-06, | |
| "loss": 0.3039, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.5816326530612247e-06, | |
| "loss": 0.3383, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.558084772370487e-06, | |
| "loss": 0.3096, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.534536891679749e-06, | |
| "loss": 0.3377, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.510989010989011e-06, | |
| "loss": 0.314, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.487441130298273e-06, | |
| "loss": 0.344, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.4638932496075356e-06, | |
| "loss": 0.3158, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.4403453689167977e-06, | |
| "loss": 0.3277, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.41679748822606e-06, | |
| "loss": 0.3292, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.393249607535322e-06, | |
| "loss": 0.3479, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.369701726844584e-06, | |
| "loss": 0.338, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.3461538461538465e-06, | |
| "loss": 0.3388, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.3226059654631086e-06, | |
| "loss": 0.2873, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.2990580847723707e-06, | |
| "loss": 0.3147, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.2755102040816328e-06, | |
| "loss": 0.3423, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.251962323390895e-06, | |
| "loss": 0.3052, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.2284144427001574e-06, | |
| "loss": 0.3308, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.2048665620094195e-06, | |
| "loss": 0.3484, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.1813186813186816e-06, | |
| "loss": 0.3287, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.1577708006279437e-06, | |
| "loss": 0.2954, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.1342229199372058e-06, | |
| "loss": 0.3284, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "eval_webgpt_accuracy": 0.5832482124616956, | |
| "eval_webgpt_loss": 0.6810373067855835, | |
| "eval_webgpt_runtime": 153.3795, | |
| "eval_webgpt_samples_per_second": 25.531, | |
| "eval_webgpt_steps_per_second": 2.556, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "eval_hfsummary_accuracy": 0.6889641205452952, | |
| "eval_hfsummary_loss": 0.7132372856140137, | |
| "eval_hfsummary_runtime": 2481.1392, | |
| "eval_hfsummary_samples_per_second": 13.334, | |
| "eval_hfsummary_steps_per_second": 1.334, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "eval_gptsynthetic_accuracy": 0.9987933634992459, | |
| "eval_gptsynthetic_loss": 0.005898704752326012, | |
| "eval_gptsynthetic_runtime": 116.9777, | |
| "eval_gptsynthetic_samples_per_second": 28.339, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.1106750392464683e-06, | |
| "loss": 0.3138, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.0871271585557304e-06, | |
| "loss": 0.3364, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.063579277864992e-06, | |
| "loss": 0.3439, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.040031397174254e-06, | |
| "loss": 0.3489, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.0164835164835162e-06, | |
| "loss": 0.3419, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.9929356357927783e-06, | |
| "loss": 0.3052, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.969387755102041e-06, | |
| "loss": 0.3126, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.945839874411303e-06, | |
| "loss": 0.3475, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.922291993720565e-06, | |
| "loss": 0.3444, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.898744113029827e-06, | |
| "loss": 0.3279, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.8751962323390892e-06, | |
| "loss": 0.3315, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.8516483516483517e-06, | |
| "loss": 0.3238, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.828100470957614e-06, | |
| "loss": 0.3291, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.804552590266876e-06, | |
| "loss": 0.3014, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.781004709576138e-06, | |
| "loss": 0.3397, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.7574568288854e-06, | |
| "loss": 0.3143, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.7339089481946626e-06, | |
| "loss": 0.3317, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.7103610675039247e-06, | |
| "loss": 0.3522, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.686813186813187e-06, | |
| "loss": 0.3324, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.663265306122449e-06, | |
| "loss": 0.3043, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.639717425431711e-06, | |
| "loss": 0.3424, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.6161695447409735e-06, | |
| "loss": 0.3461, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5926216640502356e-06, | |
| "loss": 0.3289, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.5690737833594977e-06, | |
| "loss": 0.3057, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.54552590266876e-06, | |
| "loss": 0.3118, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_webgpt_accuracy": 0.5847803881511746, | |
| "eval_webgpt_loss": 0.6773096919059753, | |
| "eval_webgpt_runtime": 153.2012, | |
| "eval_webgpt_samples_per_second": 25.561, | |
| "eval_webgpt_steps_per_second": 2.559, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_hfsummary_accuracy": 0.6834628056705861, | |
| "eval_hfsummary_loss": 0.7264418005943298, | |
| "eval_hfsummary_runtime": 2480.0981, | |
| "eval_hfsummary_samples_per_second": 13.339, | |
| "eval_hfsummary_steps_per_second": 1.334, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_gptsynthetic_accuracy": 0.9984917043740573, | |
| "eval_gptsynthetic_loss": 0.008459103293716908, | |
| "eval_gptsynthetic_runtime": 116.9721, | |
| "eval_gptsynthetic_samples_per_second": 28.34, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.521978021978022e-06, | |
| "loss": 0.2776, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4984301412872844e-06, | |
| "loss": 0.3286, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4748822605965465e-06, | |
| "loss": 0.3118, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4513343799058086e-06, | |
| "loss": 0.3444, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4277864992150707e-06, | |
| "loss": 0.3078, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4042386185243328e-06, | |
| "loss": 0.3139, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.3806907378335953e-06, | |
| "loss": 0.32, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.3571428571428574e-06, | |
| "loss": 0.3211, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.3335949764521195e-06, | |
| "loss": 0.306, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.3100470957613816e-06, | |
| "loss": 0.3045, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.2864992150706437e-06, | |
| "loss": 0.3123, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.262951334379906e-06, | |
| "loss": 0.3073, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.239403453689168e-06, | |
| "loss": 0.3061, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.21585557299843e-06, | |
| "loss": 0.297, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.1923076923076925e-06, | |
| "loss": 0.3467, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.1687598116169546e-06, | |
| "loss": 0.3183, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.1452119309262166e-06, | |
| "loss": 0.3063, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.1216640502354787e-06, | |
| "loss": 0.286, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.098116169544741e-06, | |
| "loss": 0.2963, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.0745682888540034e-06, | |
| "loss": 0.3189, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.0510204081632654e-06, | |
| "loss": 0.3128, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.0274725274725275e-06, | |
| "loss": 0.3463, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.0039246467817896e-06, | |
| "loss": 0.2914, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.9803767660910517e-06, | |
| "loss": 0.3021, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.9568288854003142e-06, | |
| "loss": 0.3168, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "eval_webgpt_accuracy": 0.5870786516853933, | |
| "eval_webgpt_loss": 0.6775321364402771, | |
| "eval_webgpt_runtime": 153.1117, | |
| "eval_webgpt_samples_per_second": 25.576, | |
| "eval_webgpt_steps_per_second": 2.56, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "eval_hfsummary_accuracy": 0.6833116706465556, | |
| "eval_hfsummary_loss": 0.7361535429954529, | |
| "eval_hfsummary_runtime": 2478.5992, | |
| "eval_hfsummary_samples_per_second": 13.347, | |
| "eval_hfsummary_steps_per_second": 1.335, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "eval_gptsynthetic_accuracy": 0.9984917043740573, | |
| "eval_gptsynthetic_loss": 0.005919534247368574, | |
| "eval_gptsynthetic_runtime": 116.9876, | |
| "eval_gptsynthetic_samples_per_second": 28.336, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.9332810047095763e-06, | |
| "loss": 0.3267, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.9097331240188384e-06, | |
| "loss": 0.3299, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.8861852433281007e-06, | |
| "loss": 0.3035, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.8626373626373626e-06, | |
| "loss": 0.2943, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.8390894819466247e-06, | |
| "loss": 0.287, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.815541601255887e-06, | |
| "loss": 0.3217, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.7919937205651491e-06, | |
| "loss": 0.3173, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.7684458398744112e-06, | |
| "loss": 0.319, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.7448979591836735e-06, | |
| "loss": 0.3236, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.7213500784929356e-06, | |
| "loss": 0.3055, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.697802197802198e-06, | |
| "loss": 0.3119, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.67425431711146e-06, | |
| "loss": 0.3349, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.650706436420722e-06, | |
| "loss": 0.2947, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.6271585557299844e-06, | |
| "loss": 0.3285, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.6036106750392465e-06, | |
| "loss": 0.3137, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.5800627943485088e-06, | |
| "loss": 0.2999, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.5565149136577709e-06, | |
| "loss": 0.3313, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.532967032967033e-06, | |
| "loss": 0.3295, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.5094191522762953e-06, | |
| "loss": 0.2978, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.4858712715855574e-06, | |
| "loss": 0.3036, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.4623233908948197e-06, | |
| "loss": 0.3072, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.4387755102040818e-06, | |
| "loss": 0.3102, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.4152276295133439e-06, | |
| "loss": 0.3215, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.3916797488226062e-06, | |
| "loss": 0.3018, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.368131868131868e-06, | |
| "loss": 0.312, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "eval_webgpt_accuracy": 0.5873340143003064, | |
| "eval_webgpt_loss": 0.6725783348083496, | |
| "eval_webgpt_runtime": 153.1463, | |
| "eval_webgpt_samples_per_second": 25.57, | |
| "eval_webgpt_steps_per_second": 2.56, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "eval_hfsummary_accuracy": 0.6811957803101291, | |
| "eval_hfsummary_loss": 0.7275147438049316, | |
| "eval_hfsummary_runtime": 2480.3805, | |
| "eval_hfsummary_samples_per_second": 13.338, | |
| "eval_hfsummary_steps_per_second": 1.334, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "eval_gptsynthetic_accuracy": 0.9984917043740573, | |
| "eval_gptsynthetic_loss": 0.0056294966489076614, | |
| "eval_gptsynthetic_runtime": 116.9973, | |
| "eval_gptsynthetic_samples_per_second": 28.334, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3445839874411302e-06, | |
| "loss": 0.3263, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3210361067503925e-06, | |
| "loss": 0.3066, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2974882260596545e-06, | |
| "loss": 0.2967, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2739403453689169e-06, | |
| "loss": 0.3409, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.250392464678179e-06, | |
| "loss": 0.3158, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.226844583987441e-06, | |
| "loss": 0.3123, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2032967032967033e-06, | |
| "loss": 0.3299, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.1797488226059654e-06, | |
| "loss": 0.2974, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.1562009419152277e-06, | |
| "loss": 0.3047, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.1326530612244898e-06, | |
| "loss": 0.2854, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.109105180533752e-06, | |
| "loss": 0.3379, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.0855572998430142e-06, | |
| "loss": 0.3147, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.0620094191522763e-06, | |
| "loss": 0.2994, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.0384615384615386e-06, | |
| "loss": 0.3146, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.0149136577708005e-06, | |
| "loss": 0.3412, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.913657770800628e-07, | |
| "loss": 0.285, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.67817896389325e-07, | |
| "loss": 0.3162, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.442700156985871e-07, | |
| "loss": 0.3123, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.207221350078493e-07, | |
| "loss": 0.3323, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.971742543171115e-07, | |
| "loss": 0.3072, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.736263736263737e-07, | |
| "loss": 0.3104, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 8.500784929356358e-07, | |
| "loss": 0.3, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 8.26530612244898e-07, | |
| "loss": 0.3235, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 8.029827315541602e-07, | |
| "loss": 0.3237, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 7.794348508634223e-07, | |
| "loss": 0.3086, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "eval_webgpt_accuracy": 0.5875893769152196, | |
| "eval_webgpt_loss": 0.6754750609397888, | |
| "eval_webgpt_runtime": 153.2384, | |
| "eval_webgpt_samples_per_second": 25.555, | |
| "eval_webgpt_steps_per_second": 2.558, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "eval_hfsummary_accuracy": 0.6865761871656137, | |
| "eval_hfsummary_loss": 0.7391700744628906, | |
| "eval_hfsummary_runtime": 2482.99, | |
| "eval_hfsummary_samples_per_second": 13.324, | |
| "eval_hfsummary_steps_per_second": 1.333, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "eval_gptsynthetic_accuracy": 0.9984917043740573, | |
| "eval_gptsynthetic_loss": 0.006686400156468153, | |
| "eval_gptsynthetic_runtime": 116.9831, | |
| "eval_gptsynthetic_samples_per_second": 28.337, | |
| "eval_gptsynthetic_steps_per_second": 2.838, | |
| "step": 4000 | |
| } | |
| ], | |
| "max_steps": 4322, | |
| "num_train_epochs": 2, | |
| "total_flos": 7.590150027767578e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |