| { | |
| "best_metric": 0.5247489213943481, | |
| "best_model_checkpoint": "./beans_outputs/checkpoint-720", | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 900, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 3.8575210571289062, | |
| "learning_rate": 1.977777777777778e-05, | |
| "loss": 0.6949, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 3.2087113857269287, | |
| "learning_rate": 1.9555555555555557e-05, | |
| "loss": 0.6848, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 2.9537899494171143, | |
| "learning_rate": 1.9333333333333333e-05, | |
| "loss": 0.6871, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.586965560913086, | |
| "learning_rate": 1.9111111111111113e-05, | |
| "loss": 0.6731, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 3.3346517086029053, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 0.6728, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.4391684532165527, | |
| "learning_rate": 1.866666666666667e-05, | |
| "loss": 0.6617, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 1.6900883913040161, | |
| "learning_rate": 1.8444444444444448e-05, | |
| "loss": 0.6671, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.3360586166381836, | |
| "learning_rate": 1.8222222222222224e-05, | |
| "loss": 0.6341, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.310093641281128, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.6446, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 2.071892499923706, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "loss": 0.6478, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 1.9108268022537231, | |
| "learning_rate": 1.7555555555555556e-05, | |
| "loss": 0.6333, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.8000233173370361, | |
| "learning_rate": 1.7333333333333336e-05, | |
| "loss": 0.6368, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 2.010941982269287, | |
| "learning_rate": 1.7111111111111112e-05, | |
| "loss": 0.6417, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 2.502824068069458, | |
| "learning_rate": 1.688888888888889e-05, | |
| "loss": 0.6175, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 2.920900821685791, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.5894, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 2.418879985809326, | |
| "learning_rate": 1.6444444444444444e-05, | |
| "loss": 0.6012, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 2.625758171081543, | |
| "learning_rate": 1.6222222222222223e-05, | |
| "loss": 0.5761, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 3.7166590690612793, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.6138, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.6811023622047244, | |
| "eval_loss": 0.6001904010772705, | |
| "eval_runtime": 5.9005, | |
| "eval_samples_per_second": 43.047, | |
| "eval_steps_per_second": 5.423, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0555555555555556, | |
| "grad_norm": 3.4292004108428955, | |
| "learning_rate": 1.577777777777778e-05, | |
| "loss": 0.6237, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 2.9556570053100586, | |
| "learning_rate": 1.555555555555556e-05, | |
| "loss": 0.6242, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 2.3123323917388916, | |
| "learning_rate": 1.5333333333333334e-05, | |
| "loss": 0.6179, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 1.728090763092041, | |
| "learning_rate": 1.5111111111111112e-05, | |
| "loss": 0.6208, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2777777777777777, | |
| "grad_norm": 2.110353946685791, | |
| "learning_rate": 1.488888888888889e-05, | |
| "loss": 0.5744, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 2.0783276557922363, | |
| "learning_rate": 1.4666666666666666e-05, | |
| "loss": 0.5388, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "grad_norm": 2.3175930976867676, | |
| "learning_rate": 1.4444444444444446e-05, | |
| "loss": 0.5846, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 3.0923080444335938, | |
| "learning_rate": 1.4222222222222224e-05, | |
| "loss": 0.5831, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.994885802268982, | |
| "learning_rate": 1.4e-05, | |
| "loss": 0.5929, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 3.28375244140625, | |
| "learning_rate": 1.377777777777778e-05, | |
| "loss": 0.5965, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.6111111111111112, | |
| "grad_norm": 2.8813462257385254, | |
| "learning_rate": 1.3555555555555557e-05, | |
| "loss": 0.5358, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 3.112164258956909, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.5877, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.7222222222222223, | |
| "grad_norm": 1.542144775390625, | |
| "learning_rate": 1.3111111111111113e-05, | |
| "loss": 0.5485, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 2.144716262817383, | |
| "learning_rate": 1.288888888888889e-05, | |
| "loss": 0.6028, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 2.632028102874756, | |
| "learning_rate": 1.2666666666666667e-05, | |
| "loss": 0.5295, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 2.2505481243133545, | |
| "learning_rate": 1.2444444444444446e-05, | |
| "loss": 0.6173, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.9444444444444444, | |
| "grad_norm": 2.7676846981048584, | |
| "learning_rate": 1.2222222222222224e-05, | |
| "loss": 0.5421, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 2.5781171321868896, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.5028, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.6811023622047244, | |
| "eval_loss": 0.5528703927993774, | |
| "eval_runtime": 4.4862, | |
| "eval_samples_per_second": 56.618, | |
| "eval_steps_per_second": 7.133, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.0555555555555554, | |
| "grad_norm": 2.5306832790374756, | |
| "learning_rate": 1.177777777777778e-05, | |
| "loss": 0.5849, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.111111111111111, | |
| "grad_norm": 2.1644484996795654, | |
| "learning_rate": 1.1555555555555556e-05, | |
| "loss": 0.5268, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 1.9074407815933228, | |
| "learning_rate": 1.1333333333333334e-05, | |
| "loss": 0.55, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 3.7348294258117676, | |
| "learning_rate": 1.1111111111111113e-05, | |
| "loss": 0.6125, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.2777777777777777, | |
| "grad_norm": 2.921757221221924, | |
| "learning_rate": 1.088888888888889e-05, | |
| "loss": 0.642, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 2.2528910636901855, | |
| "learning_rate": 1.0666666666666667e-05, | |
| "loss": 0.5009, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.388888888888889, | |
| "grad_norm": 1.6619905233383179, | |
| "learning_rate": 1.0444444444444445e-05, | |
| "loss": 0.5158, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 3.4696500301361084, | |
| "learning_rate": 1.0222222222222223e-05, | |
| "loss": 0.6036, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 2.936615467071533, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5424, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.5555555555555554, | |
| "grad_norm": 4.1920952796936035, | |
| "learning_rate": 9.777777777777779e-06, | |
| "loss": 0.5532, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.611111111111111, | |
| "grad_norm": 2.4296085834503174, | |
| "learning_rate": 9.555555555555556e-06, | |
| "loss": 0.5436, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 2.5725982189178467, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 0.5015, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.7222222222222223, | |
| "grad_norm": 4.97006368637085, | |
| "learning_rate": 9.111111111111112e-06, | |
| "loss": 0.5775, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.7777777777777777, | |
| "grad_norm": 3.6014504432678223, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 0.5689, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 1.8251533508300781, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 0.5137, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 3.503689765930176, | |
| "learning_rate": 8.444444444444446e-06, | |
| "loss": 0.5827, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.9444444444444446, | |
| "grad_norm": 2.4786183834075928, | |
| "learning_rate": 8.222222222222222e-06, | |
| "loss": 0.5202, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 2.0592217445373535, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.5103, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.6811023622047244, | |
| "eval_loss": 0.5325487852096558, | |
| "eval_runtime": 5.1901, | |
| "eval_samples_per_second": 48.94, | |
| "eval_steps_per_second": 6.166, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.0555555555555554, | |
| "grad_norm": 1.9031552076339722, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 0.5273, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.111111111111111, | |
| "grad_norm": 2.656759262084961, | |
| "learning_rate": 7.555555555555556e-06, | |
| "loss": 0.5824, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.1666666666666665, | |
| "grad_norm": 1.8883424997329712, | |
| "learning_rate": 7.333333333333333e-06, | |
| "loss": 0.5653, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.2222222222222223, | |
| "grad_norm": 2.118739128112793, | |
| "learning_rate": 7.111111111111112e-06, | |
| "loss": 0.5057, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.2777777777777777, | |
| "grad_norm": 1.90389883518219, | |
| "learning_rate": 6.88888888888889e-06, | |
| "loss": 0.6118, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 1.8850902318954468, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.5682, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.388888888888889, | |
| "grad_norm": 5.424034118652344, | |
| "learning_rate": 6.444444444444445e-06, | |
| "loss": 0.5501, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.4444444444444446, | |
| "grad_norm": 6.140316963195801, | |
| "learning_rate": 6.222222222222223e-06, | |
| "loss": 0.6259, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 2.766357183456421, | |
| "learning_rate": 6e-06, | |
| "loss": 0.5077, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.5555555555555554, | |
| "grad_norm": 2.0831124782562256, | |
| "learning_rate": 5.777777777777778e-06, | |
| "loss": 0.5609, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.611111111111111, | |
| "grad_norm": 1.9845359325408936, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.4875, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 1.5764096975326538, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 0.4979, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.7222222222222223, | |
| "grad_norm": 3.984339952468872, | |
| "learning_rate": 5.1111111111111115e-06, | |
| "loss": 0.5522, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.7777777777777777, | |
| "grad_norm": 2.4655332565307617, | |
| "learning_rate": 4.888888888888889e-06, | |
| "loss": 0.5476, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.8333333333333335, | |
| "grad_norm": 2.9143753051757812, | |
| "learning_rate": 4.666666666666667e-06, | |
| "loss": 0.5901, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.888888888888889, | |
| "grad_norm": 2.023881435394287, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 0.5842, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.9444444444444446, | |
| "grad_norm": 2.730069398880005, | |
| "learning_rate": 4.222222222222223e-06, | |
| "loss": 0.5443, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 2.496239185333252, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.4892, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.6811023622047244, | |
| "eval_loss": 0.5247489213943481, | |
| "eval_runtime": 4.9059, | |
| "eval_samples_per_second": 51.774, | |
| "eval_steps_per_second": 6.523, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.055555555555555, | |
| "grad_norm": 1.9344916343688965, | |
| "learning_rate": 3.777777777777778e-06, | |
| "loss": 0.5017, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 4.111111111111111, | |
| "grad_norm": 3.2695019245147705, | |
| "learning_rate": 3.555555555555556e-06, | |
| "loss": 0.5974, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.166666666666667, | |
| "grad_norm": 4.992809772491455, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.5307, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.222222222222222, | |
| "grad_norm": 3.5359721183776855, | |
| "learning_rate": 3.1111111111111116e-06, | |
| "loss": 0.5372, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.277777777777778, | |
| "grad_norm": 2.123009204864502, | |
| "learning_rate": 2.888888888888889e-06, | |
| "loss": 0.49, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.333333333333333, | |
| "grad_norm": 2.8134477138519287, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 0.4749, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.388888888888889, | |
| "grad_norm": 2.195077896118164, | |
| "learning_rate": 2.4444444444444447e-06, | |
| "loss": 0.5876, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.444444444444445, | |
| "grad_norm": 4.078568935394287, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 0.5609, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 2.6274867057800293, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.5392, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.555555555555555, | |
| "grad_norm": 1.7526293992996216, | |
| "learning_rate": 1.777777777777778e-06, | |
| "loss": 0.4965, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.611111111111111, | |
| "grad_norm": 2.3001506328582764, | |
| "learning_rate": 1.5555555555555558e-06, | |
| "loss": 0.5345, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 2.5624406337738037, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 0.5765, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.722222222222222, | |
| "grad_norm": 2.199018716812134, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 0.5547, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.777777777777778, | |
| "grad_norm": 3.756605625152588, | |
| "learning_rate": 8.88888888888889e-07, | |
| "loss": 0.5021, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.833333333333333, | |
| "grad_norm": 1.6385400295257568, | |
| "learning_rate": 6.666666666666667e-07, | |
| "loss": 0.5858, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.888888888888889, | |
| "grad_norm": 5.481604099273682, | |
| "learning_rate": 4.444444444444445e-07, | |
| "loss": 0.5062, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.944444444444445, | |
| "grad_norm": 2.554666042327881, | |
| "learning_rate": 2.2222222222222224e-07, | |
| "loss": 0.5355, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 5.525686264038086, | |
| "learning_rate": 0.0, | |
| "loss": 0.5779, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.6811023622047244, | |
| "eval_loss": 0.530208170413971, | |
| "eval_runtime": 5.531, | |
| "eval_samples_per_second": 45.923, | |
| "eval_steps_per_second": 5.786, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 900, | |
| "total_flos": 7.24330215447552e+16, | |
| "train_loss": 0.5726550849278768, | |
| "train_runtime": 239.6067, | |
| "train_samples_per_second": 29.945, | |
| "train_steps_per_second": 3.756 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 900, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.24330215447552e+16, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |