| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 5000, |
| "global_step": 2250, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0044444444444444444, |
| "grad_norm": 9768.7255859375, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 16.1729, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.008888888888888889, |
| "grad_norm": 16245.8876953125, |
| "learning_rate": 1.1e-06, |
| "loss": 14.043, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.013333333333333334, |
| "grad_norm": 9001.0771484375, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 8.2782, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.017777777777777778, |
| "grad_norm": 491.5092468261719, |
| "learning_rate": 3e-06, |
| "loss": 7.3674, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.022222222222222223, |
| "grad_norm": 2662.317626953125, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 6.5525, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02666666666666667, |
| "grad_norm": 238.51461791992188, |
| "learning_rate": 5e-06, |
| "loss": 6.4309, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03111111111111111, |
| "grad_norm": 592.76953125, |
| "learning_rate": 6e-06, |
| "loss": 5.4059, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.035555555555555556, |
| "grad_norm": 129.07666015625, |
| "learning_rate": 7.000000000000001e-06, |
| "loss": 5.1084, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 763.647216796875, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 4.6757, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.044444444444444446, |
| "grad_norm": 269.35308837890625, |
| "learning_rate": 9e-06, |
| "loss": 4.2741, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.04888888888888889, |
| "grad_norm": 121.892822265625, |
| "learning_rate": 1e-05, |
| "loss": 3.3332, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05333333333333334, |
| "grad_norm": 121.66092681884766, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 3.0875, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.057777777777777775, |
| "grad_norm": 88.0842514038086, |
| "learning_rate": 1.2e-05, |
| "loss": 1.9639, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06222222222222222, |
| "grad_norm": 77.41666412353516, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 1.645, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.06666666666666667, |
| "grad_norm": 27.421039581298828, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.1254, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.07111111111111111, |
| "grad_norm": 146.31039428710938, |
| "learning_rate": 1.5e-05, |
| "loss": 1.1082, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.07555555555555556, |
| "grad_norm": 1123.4139404296875, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.9222, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 535.9423828125, |
| "learning_rate": 1.7000000000000003e-05, |
| "loss": 1.0467, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.08444444444444445, |
| "grad_norm": 36.32695007324219, |
| "learning_rate": 1.8e-05, |
| "loss": 0.7194, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.08888888888888889, |
| "grad_norm": 35.27937698364258, |
| "learning_rate": 1.9e-05, |
| "loss": 1.1295, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.09333333333333334, |
| "grad_norm": 186.22567749023438, |
| "learning_rate": 2e-05, |
| "loss": 0.941, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.09777777777777778, |
| "grad_norm": 11.655800819396973, |
| "learning_rate": 2.1e-05, |
| "loss": 1.0712, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.10222222222222223, |
| "grad_norm": 6.5133256912231445, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 0.799, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.10666666666666667, |
| "grad_norm": 1871.7095947265625, |
| "learning_rate": 2.3000000000000003e-05, |
| "loss": 1.4545, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 12.251700401306152, |
| "learning_rate": 2.4e-05, |
| "loss": 0.9906, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.11555555555555555, |
| "grad_norm": 76.3839111328125, |
| "learning_rate": 2.5e-05, |
| "loss": 1.2341, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 19.359607696533203, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 2.9686, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.12444444444444444, |
| "grad_norm": 34.06645584106445, |
| "learning_rate": 2.7000000000000002e-05, |
| "loss": 1.0668, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.1288888888888889, |
| "grad_norm": 7.353147983551025, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 0.9959, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.13333333333333333, |
| "grad_norm": 27.748685836791992, |
| "learning_rate": 2.9e-05, |
| "loss": 0.7448, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.13777777777777778, |
| "grad_norm": 2796.3427734375, |
| "learning_rate": 3e-05, |
| "loss": 1.4512, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.14222222222222222, |
| "grad_norm": 80.05686950683594, |
| "learning_rate": 3.1e-05, |
| "loss": 0.8864, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.14666666666666667, |
| "grad_norm": 27.44098472595215, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.144, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.1511111111111111, |
| "grad_norm": 21.465320587158203, |
| "learning_rate": 3.3e-05, |
| "loss": 0.8698, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.15555555555555556, |
| "grad_norm": 10.413846015930176, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 0.9131, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 13.890963554382324, |
| "learning_rate": 3.5e-05, |
| "loss": 1.1077, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.16444444444444445, |
| "grad_norm": 6.141105651855469, |
| "learning_rate": 3.6e-05, |
| "loss": 1.0575, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.1688888888888889, |
| "grad_norm": 2.727337121963501, |
| "learning_rate": 3.7e-05, |
| "loss": 0.9563, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.17333333333333334, |
| "grad_norm": 1.5828684568405151, |
| "learning_rate": 3.8e-05, |
| "loss": 0.781, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.17777777777777778, |
| "grad_norm": 3.337902545928955, |
| "learning_rate": 3.9000000000000006e-05, |
| "loss": 0.8312, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.18222222222222223, |
| "grad_norm": 1.760097622871399, |
| "learning_rate": 4e-05, |
| "loss": 0.9048, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.18666666666666668, |
| "grad_norm": 0.9947965741157532, |
| "learning_rate": 4.1e-05, |
| "loss": 0.8803, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.19111111111111112, |
| "grad_norm": 18.71548843383789, |
| "learning_rate": 4.2e-05, |
| "loss": 1.143, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.19555555555555557, |
| "grad_norm": 12.882355690002441, |
| "learning_rate": 4.3e-05, |
| "loss": 0.9267, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 2.379068613052368, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 0.7401, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.20444444444444446, |
| "grad_norm": 6.240898609161377, |
| "learning_rate": 4.5e-05, |
| "loss": 1.0539, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.2088888888888889, |
| "grad_norm": 14.994612693786621, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.119, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.21333333333333335, |
| "grad_norm": 39.39948654174805, |
| "learning_rate": 4.7e-05, |
| "loss": 1.027, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.21777777777777776, |
| "grad_norm": 2.395543098449707, |
| "learning_rate": 4.8e-05, |
| "loss": 1.0852, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 1.2860183715820312, |
| "learning_rate": 4.9e-05, |
| "loss": 0.7519, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.22666666666666666, |
| "grad_norm": 10.799077033996582, |
| "learning_rate": 5e-05, |
| "loss": 0.8347, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.2311111111111111, |
| "grad_norm": 1.4125126600265503, |
| "learning_rate": 4.971428571428572e-05, |
| "loss": 1.0284, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.23555555555555555, |
| "grad_norm": 3.897629737854004, |
| "learning_rate": 4.942857142857143e-05, |
| "loss": 0.8591, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.8170456886291504, |
| "learning_rate": 4.9142857142857144e-05, |
| "loss": 0.8344, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.24444444444444444, |
| "grad_norm": 0.6229439377784729, |
| "learning_rate": 4.885714285714286e-05, |
| "loss": 0.9252, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.24888888888888888, |
| "grad_norm": 3.2861270904541016, |
| "learning_rate": 4.8571428571428576e-05, |
| "loss": 0.9951, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.25333333333333335, |
| "grad_norm": 1.9368836879730225, |
| "learning_rate": 4.828571428571429e-05, |
| "loss": 0.9994, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.2577777777777778, |
| "grad_norm": 1.2594083547592163, |
| "learning_rate": 4.8e-05, |
| "loss": 0.8287, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.26222222222222225, |
| "grad_norm": 0.7579429149627686, |
| "learning_rate": 4.771428571428572e-05, |
| "loss": 1.2831, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.26666666666666666, |
| "grad_norm": 1.412414789199829, |
| "learning_rate": 4.742857142857143e-05, |
| "loss": 0.7316, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.27111111111111114, |
| "grad_norm": 1.765723466873169, |
| "learning_rate": 4.714285714285714e-05, |
| "loss": 0.6747, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.27555555555555555, |
| "grad_norm": 0.9297116994857788, |
| "learning_rate": 4.685714285714286e-05, |
| "loss": 0.7766, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.9512072205543518, |
| "learning_rate": 4.6571428571428575e-05, |
| "loss": 0.5614, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.28444444444444444, |
| "grad_norm": 2.3289010524749756, |
| "learning_rate": 4.628571428571429e-05, |
| "loss": 1.3121, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.28888888888888886, |
| "grad_norm": 1.013492465019226, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.0048, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.29333333333333333, |
| "grad_norm": 0.9352627992630005, |
| "learning_rate": 4.5714285714285716e-05, |
| "loss": 0.6874, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.29777777777777775, |
| "grad_norm": 88.00923156738281, |
| "learning_rate": 4.542857142857143e-05, |
| "loss": 1.1643, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.3022222222222222, |
| "grad_norm": 0.7422826290130615, |
| "learning_rate": 4.514285714285714e-05, |
| "loss": 0.7003, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.30666666666666664, |
| "grad_norm": 1.197748064994812, |
| "learning_rate": 4.485714285714286e-05, |
| "loss": 1.2505, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.3111111111111111, |
| "grad_norm": 2.1018965244293213, |
| "learning_rate": 4.4571428571428574e-05, |
| "loss": 0.7924, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.31555555555555553, |
| "grad_norm": 1.9967838525772095, |
| "learning_rate": 4.428571428571428e-05, |
| "loss": 1.0143, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.196930885314941, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 0.8882, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.3244444444444444, |
| "grad_norm": 1.986365795135498, |
| "learning_rate": 4.371428571428572e-05, |
| "loss": 0.8591, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.3288888888888889, |
| "grad_norm": 3.0498948097229004, |
| "learning_rate": 4.342857142857143e-05, |
| "loss": 0.8434, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.7130551338195801, |
| "learning_rate": 4.314285714285715e-05, |
| "loss": 0.8395, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3377777777777778, |
| "grad_norm": 0.6148263216018677, |
| "learning_rate": 4.2857142857142856e-05, |
| "loss": 0.9217, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.3422222222222222, |
| "grad_norm": 4.150040149688721, |
| "learning_rate": 4.257142857142857e-05, |
| "loss": 0.9144, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.3466666666666667, |
| "grad_norm": 1.1592193841934204, |
| "learning_rate": 4.228571428571429e-05, |
| "loss": 0.9254, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.3511111111111111, |
| "grad_norm": 1.1033934354782104, |
| "learning_rate": 4.2e-05, |
| "loss": 0.7114, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.35555555555555557, |
| "grad_norm": 1.2952511310577393, |
| "learning_rate": 4.1714285714285714e-05, |
| "loss": 0.8385, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 1.5696533918380737, |
| "learning_rate": 4.1428571428571437e-05, |
| "loss": 0.6439, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.36444444444444446, |
| "grad_norm": 0.9464730024337769, |
| "learning_rate": 4.1142857142857146e-05, |
| "loss": 0.5408, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.3688888888888889, |
| "grad_norm": 2.5423879623413086, |
| "learning_rate": 4.085714285714286e-05, |
| "loss": 0.8305, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.37333333333333335, |
| "grad_norm": 1.3495992422103882, |
| "learning_rate": 4.057142857142857e-05, |
| "loss": 0.7594, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.37777777777777777, |
| "grad_norm": 1.4968547821044922, |
| "learning_rate": 4.028571428571429e-05, |
| "loss": 0.7879, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.38222222222222224, |
| "grad_norm": 23.5468692779541, |
| "learning_rate": 4e-05, |
| "loss": 1.0052, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.38666666666666666, |
| "grad_norm": 1.6756787300109863, |
| "learning_rate": 3.971428571428571e-05, |
| "loss": 0.7402, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.39111111111111113, |
| "grad_norm": 2.612205743789673, |
| "learning_rate": 3.942857142857143e-05, |
| "loss": 0.8477, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.39555555555555555, |
| "grad_norm": 5.494552135467529, |
| "learning_rate": 3.9142857142857145e-05, |
| "loss": 1.0808, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 1.5293407440185547, |
| "learning_rate": 3.885714285714286e-05, |
| "loss": 0.8273, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.40444444444444444, |
| "grad_norm": 1.242197871208191, |
| "learning_rate": 3.857142857142858e-05, |
| "loss": 0.6873, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.4088888888888889, |
| "grad_norm": 2.009373903274536, |
| "learning_rate": 3.8285714285714286e-05, |
| "loss": 0.8726, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.41333333333333333, |
| "grad_norm": 1.1505591869354248, |
| "learning_rate": 3.8e-05, |
| "loss": 0.6348, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.4177777777777778, |
| "grad_norm": 0.9240453243255615, |
| "learning_rate": 3.771428571428572e-05, |
| "loss": 0.7547, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.4222222222222222, |
| "grad_norm": 4.990575313568115, |
| "learning_rate": 3.742857142857143e-05, |
| "loss": 0.8018, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.4266666666666667, |
| "grad_norm": 1.9885175228118896, |
| "learning_rate": 3.7142857142857143e-05, |
| "loss": 0.8499, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.4311111111111111, |
| "grad_norm": 1.303685188293457, |
| "learning_rate": 3.685714285714286e-05, |
| "loss": 0.9209, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.43555555555555553, |
| "grad_norm": 1.4578043222427368, |
| "learning_rate": 3.6571428571428576e-05, |
| "loss": 0.9527, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.819185972213745, |
| "learning_rate": 3.628571428571429e-05, |
| "loss": 0.8843, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 0.9243647456169128, |
| "learning_rate": 3.6e-05, |
| "loss": 0.496, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.4488888888888889, |
| "grad_norm": 1.31293785572052, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 0.6839, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.4533333333333333, |
| "grad_norm": 1.1468777656555176, |
| "learning_rate": 3.5428571428571426e-05, |
| "loss": 1.1296, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.4577777777777778, |
| "grad_norm": 1.786746621131897, |
| "learning_rate": 3.514285714285714e-05, |
| "loss": 0.9237, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.4622222222222222, |
| "grad_norm": 2.120232343673706, |
| "learning_rate": 3.485714285714286e-05, |
| "loss": 0.8313, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.4666666666666667, |
| "grad_norm": 1.00870943069458, |
| "learning_rate": 3.4571428571428574e-05, |
| "loss": 0.5811, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.4711111111111111, |
| "grad_norm": 0.977967381477356, |
| "learning_rate": 3.428571428571429e-05, |
| "loss": 0.7529, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.47555555555555556, |
| "grad_norm": 1.1854398250579834, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 0.7092, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 1.7509541511535645, |
| "learning_rate": 3.3714285714285716e-05, |
| "loss": 0.6859, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.48444444444444446, |
| "grad_norm": 1.652112364768982, |
| "learning_rate": 3.342857142857143e-05, |
| "loss": 0.7396, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.4888888888888889, |
| "grad_norm": 2.1805965900421143, |
| "learning_rate": 3.314285714285714e-05, |
| "loss": 0.7982, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.49333333333333335, |
| "grad_norm": 2.820934534072876, |
| "learning_rate": 3.285714285714286e-05, |
| "loss": 0.7424, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.49777777777777776, |
| "grad_norm": 9.163993835449219, |
| "learning_rate": 3.257142857142857e-05, |
| "loss": 0.8498, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.5022222222222222, |
| "grad_norm": 1.3526314496994019, |
| "learning_rate": 3.228571428571428e-05, |
| "loss": 0.7335, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.5066666666666667, |
| "grad_norm": 3.196364164352417, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.0057, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.5111111111111111, |
| "grad_norm": 2.5258631706237793, |
| "learning_rate": 3.1714285714285715e-05, |
| "loss": 0.7504, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5155555555555555, |
| "grad_norm": 18.060762405395508, |
| "learning_rate": 3.142857142857143e-05, |
| "loss": 0.8299, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.3956117630004883, |
| "learning_rate": 3.114285714285715e-05, |
| "loss": 0.7396, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.5244444444444445, |
| "grad_norm": 1.5035347938537598, |
| "learning_rate": 3.0857142857142856e-05, |
| "loss": 0.6786, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.5288888888888889, |
| "grad_norm": 1.8701106309890747, |
| "learning_rate": 3.057142857142857e-05, |
| "loss": 1.045, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.5333333333333333, |
| "grad_norm": 2.3694677352905273, |
| "learning_rate": 3.0285714285714288e-05, |
| "loss": 0.7394, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.5377777777777778, |
| "grad_norm": 3.630218267440796, |
| "learning_rate": 3e-05, |
| "loss": 0.8106, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.5422222222222223, |
| "grad_norm": 1.8953900337219238, |
| "learning_rate": 2.9714285714285717e-05, |
| "loss": 0.8347, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.5466666666666666, |
| "grad_norm": 2.067115545272827, |
| "learning_rate": 2.9428571428571426e-05, |
| "loss": 0.9066, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.5511111111111111, |
| "grad_norm": 2.195612907409668, |
| "learning_rate": 2.9142857142857146e-05, |
| "loss": 0.85, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 6.791778564453125, |
| "learning_rate": 2.885714285714286e-05, |
| "loss": 0.6259, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.0913190841674805, |
| "learning_rate": 2.857142857142857e-05, |
| "loss": 0.8132, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.5644444444444444, |
| "grad_norm": 8.887785911560059, |
| "learning_rate": 2.8285714285714287e-05, |
| "loss": 0.626, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.5688888888888889, |
| "grad_norm": 2.769585132598877, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 0.5566, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.5733333333333334, |
| "grad_norm": 2.938758373260498, |
| "learning_rate": 2.7714285714285716e-05, |
| "loss": 0.7082, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.5777777777777777, |
| "grad_norm": 3.9184396266937256, |
| "learning_rate": 2.742857142857143e-05, |
| "loss": 0.5574, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.5822222222222222, |
| "grad_norm": 2.5171377658843994, |
| "learning_rate": 2.714285714285714e-05, |
| "loss": 0.497, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.5866666666666667, |
| "grad_norm": 4.993486404418945, |
| "learning_rate": 2.6857142857142857e-05, |
| "loss": 0.6557, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.5911111111111111, |
| "grad_norm": 5.2871551513671875, |
| "learning_rate": 2.6571428571428576e-05, |
| "loss": 0.7234, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.5955555555555555, |
| "grad_norm": 4.414983749389648, |
| "learning_rate": 2.6285714285714286e-05, |
| "loss": 0.8204, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.0641419887542725, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 0.5932, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.6044444444444445, |
| "grad_norm": 4.55308723449707, |
| "learning_rate": 2.5714285714285714e-05, |
| "loss": 0.7171, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.6088888888888889, |
| "grad_norm": 4.960785865783691, |
| "learning_rate": 2.542857142857143e-05, |
| "loss": 0.5145, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.6133333333333333, |
| "grad_norm": 4.376406669616699, |
| "learning_rate": 2.5142857142857147e-05, |
| "loss": 0.5864, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.6177777777777778, |
| "grad_norm": 14.931255340576172, |
| "learning_rate": 2.485714285714286e-05, |
| "loss": 0.5942, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.6222222222222222, |
| "grad_norm": 80.68295288085938, |
| "learning_rate": 2.4571428571428572e-05, |
| "loss": 0.7149, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6266666666666667, |
| "grad_norm": 6.966264247894287, |
| "learning_rate": 2.4285714285714288e-05, |
| "loss": 0.5357, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.6311111111111111, |
| "grad_norm": 11.523138046264648, |
| "learning_rate": 2.4e-05, |
| "loss": 0.5548, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.6355555555555555, |
| "grad_norm": 5.8682475090026855, |
| "learning_rate": 2.3714285714285717e-05, |
| "loss": 0.6273, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.063971519470215, |
| "learning_rate": 2.342857142857143e-05, |
| "loss": 0.5253, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.6444444444444445, |
| "grad_norm": 6.171313285827637, |
| "learning_rate": 2.3142857142857145e-05, |
| "loss": 0.5423, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.6488888888888888, |
| "grad_norm": 7.615331649780273, |
| "learning_rate": 2.2857142857142858e-05, |
| "loss": 0.708, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.6533333333333333, |
| "grad_norm": 6.465192794799805, |
| "learning_rate": 2.257142857142857e-05, |
| "loss": 0.6355, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.6577777777777778, |
| "grad_norm": 11.557660102844238, |
| "learning_rate": 2.2285714285714287e-05, |
| "loss": 0.5724, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.6622222222222223, |
| "grad_norm": 10.57749080657959, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 0.6745, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 7.165240287780762, |
| "learning_rate": 2.1714285714285715e-05, |
| "loss": 0.582, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.6711111111111111, |
| "grad_norm": 4.825045108795166, |
| "learning_rate": 2.1428571428571428e-05, |
| "loss": 0.4501, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.6755555555555556, |
| "grad_norm": 5.933408737182617, |
| "learning_rate": 2.1142857142857144e-05, |
| "loss": 0.6209, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.2412068843841553, |
| "learning_rate": 2.0857142857142857e-05, |
| "loss": 0.6279, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.6844444444444444, |
| "grad_norm": 8.654913902282715, |
| "learning_rate": 2.0571428571428573e-05, |
| "loss": 0.5614, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.6888888888888889, |
| "grad_norm": 3.5298168659210205, |
| "learning_rate": 2.0285714285714286e-05, |
| "loss": 0.6737, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.6933333333333334, |
| "grad_norm": 3.3998289108276367, |
| "learning_rate": 2e-05, |
| "loss": 0.669, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.6977777777777778, |
| "grad_norm": 4.436722278594971, |
| "learning_rate": 1.9714285714285714e-05, |
| "loss": 0.7394, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.7022222222222222, |
| "grad_norm": 5.631706714630127, |
| "learning_rate": 1.942857142857143e-05, |
| "loss": 0.5167, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.7066666666666667, |
| "grad_norm": 4.268092155456543, |
| "learning_rate": 1.9142857142857143e-05, |
| "loss": 0.5114, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.7111111111111111, |
| "grad_norm": 14.461816787719727, |
| "learning_rate": 1.885714285714286e-05, |
| "loss": 0.5903, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.7155555555555555, |
| "grad_norm": 6.177591323852539, |
| "learning_rate": 1.8571428571428572e-05, |
| "loss": 0.4619, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 3.8775877952575684, |
| "learning_rate": 1.8285714285714288e-05, |
| "loss": 0.4254, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.7244444444444444, |
| "grad_norm": 10.525681495666504, |
| "learning_rate": 1.8e-05, |
| "loss": 0.7416, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.7288888888888889, |
| "grad_norm": 8.446870803833008, |
| "learning_rate": 1.7714285714285713e-05, |
| "loss": 0.5597, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.7333333333333333, |
| "grad_norm": 6.213122367858887, |
| "learning_rate": 1.742857142857143e-05, |
| "loss": 0.4531, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.7377777777777778, |
| "grad_norm": 6.555688858032227, |
| "learning_rate": 1.7142857142857145e-05, |
| "loss": 0.6882, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.7422222222222222, |
| "grad_norm": 5.502016067504883, |
| "learning_rate": 1.6857142857142858e-05, |
| "loss": 0.5177, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.7466666666666667, |
| "grad_norm": 4.0184102058410645, |
| "learning_rate": 1.657142857142857e-05, |
| "loss": 0.4499, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.7511111111111111, |
| "grad_norm": 5.010197639465332, |
| "learning_rate": 1.6285714285714287e-05, |
| "loss": 0.5105, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.7555555555555555, |
| "grad_norm": 6.907636642456055, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.4339, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 4.459092617034912, |
| "learning_rate": 1.5714285714285715e-05, |
| "loss": 0.5309, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.7644444444444445, |
| "grad_norm": 6.3801164627075195, |
| "learning_rate": 1.5428571428571428e-05, |
| "loss": 0.5354, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.7688888888888888, |
| "grad_norm": 3.440077066421509, |
| "learning_rate": 1.5142857142857144e-05, |
| "loss": 0.4684, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.7733333333333333, |
| "grad_norm": 8.774897575378418, |
| "learning_rate": 1.4857142857142858e-05, |
| "loss": 0.6064, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 4.872552871704102, |
| "learning_rate": 1.4571428571428573e-05, |
| "loss": 0.5877, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.7822222222222223, |
| "grad_norm": 4.998244762420654, |
| "learning_rate": 1.4285714285714285e-05, |
| "loss": 0.3201, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.7866666666666666, |
| "grad_norm": 9.849549293518066, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 0.552, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.7911111111111111, |
| "grad_norm": 3.8695757389068604, |
| "learning_rate": 1.3714285714285716e-05, |
| "loss": 0.4807, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.7955555555555556, |
| "grad_norm": 10.59960651397705, |
| "learning_rate": 1.3428571428571429e-05, |
| "loss": 0.5487, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 3.5705807209014893, |
| "learning_rate": 1.3142857142857143e-05, |
| "loss": 0.4526, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.8044444444444444, |
| "grad_norm": 3.2554914951324463, |
| "learning_rate": 1.2857142857142857e-05, |
| "loss": 0.6035, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.8088888888888889, |
| "grad_norm": 10.050302505493164, |
| "learning_rate": 1.2571428571428573e-05, |
| "loss": 0.6413, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.8133333333333334, |
| "grad_norm": 3.8750650882720947, |
| "learning_rate": 1.2285714285714286e-05, |
| "loss": 0.4818, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.8177777777777778, |
| "grad_norm": 5.40247917175293, |
| "learning_rate": 1.2e-05, |
| "loss": 0.5041, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.8222222222222222, |
| "grad_norm": 4.115529537200928, |
| "learning_rate": 1.1714285714285715e-05, |
| "loss": 0.4337, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.8266666666666667, |
| "grad_norm": 4.241470813751221, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 0.5223, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.8311111111111111, |
| "grad_norm": 25.0269775390625, |
| "learning_rate": 1.1142857142857143e-05, |
| "loss": 0.6195, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.8355555555555556, |
| "grad_norm": 4.397892475128174, |
| "learning_rate": 1.0857142857142858e-05, |
| "loss": 0.5594, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 15.26313591003418, |
| "learning_rate": 1.0571428571428572e-05, |
| "loss": 0.6732, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.8444444444444444, |
| "grad_norm": 7.27249813079834, |
| "learning_rate": 1.0285714285714286e-05, |
| "loss": 0.6866, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.8488888888888889, |
| "grad_norm": 4.72747278213501, |
| "learning_rate": 1e-05, |
| "loss": 0.4916, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.8533333333333334, |
| "grad_norm": 8.689544677734375, |
| "learning_rate": 9.714285714285715e-06, |
| "loss": 0.4455, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.8577777777777778, |
| "grad_norm": 6.947673797607422, |
| "learning_rate": 9.42857142857143e-06, |
| "loss": 0.394, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.8622222222222222, |
| "grad_norm": 12.597881317138672, |
| "learning_rate": 9.142857142857144e-06, |
| "loss": 0.5695, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.8666666666666667, |
| "grad_norm": 5.3088274002075195, |
| "learning_rate": 8.857142857142857e-06, |
| "loss": 0.5589, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.8711111111111111, |
| "grad_norm": 4.580944061279297, |
| "learning_rate": 8.571428571428573e-06, |
| "loss": 0.5902, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.8755555555555555, |
| "grad_norm": 4.883549213409424, |
| "learning_rate": 8.285714285714285e-06, |
| "loss": 0.4635, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 20.647493362426758, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.5043, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.8844444444444445, |
| "grad_norm": 5.067135810852051, |
| "learning_rate": 7.714285714285714e-06, |
| "loss": 0.4372, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 8.781386375427246, |
| "learning_rate": 7.428571428571429e-06, |
| "loss": 0.4274, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.8933333333333333, |
| "grad_norm": 4.173491954803467, |
| "learning_rate": 7.142857142857143e-06, |
| "loss": 0.422, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.8977777777777778, |
| "grad_norm": 5.917858600616455, |
| "learning_rate": 6.857142857142858e-06, |
| "loss": 0.505, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.9022222222222223, |
| "grad_norm": 6.02107048034668, |
| "learning_rate": 6.5714285714285714e-06, |
| "loss": 0.4933, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.9066666666666666, |
| "grad_norm": 5.2334794998168945, |
| "learning_rate": 6.285714285714287e-06, |
| "loss": 0.4, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.9111111111111111, |
| "grad_norm": 6.435775279998779, |
| "learning_rate": 6e-06, |
| "loss": 0.5269, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.9155555555555556, |
| "grad_norm": 11.653307914733887, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 0.5007, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 6.596048355102539, |
| "learning_rate": 5.428571428571429e-06, |
| "loss": 0.482, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.9244444444444444, |
| "grad_norm": 11.182780265808105, |
| "learning_rate": 5.142857142857143e-06, |
| "loss": 0.498, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.9288888888888889, |
| "grad_norm": 7.414155960083008, |
| "learning_rate": 4.857142857142858e-06, |
| "loss": 0.4041, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.9333333333333333, |
| "grad_norm": 3.3029086589813232, |
| "learning_rate": 4.571428571428572e-06, |
| "loss": 0.4889, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.9377777777777778, |
| "grad_norm": 4.610305309295654, |
| "learning_rate": 4.285714285714286e-06, |
| "loss": 0.3627, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.9422222222222222, |
| "grad_norm": 13.668721199035645, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.5639, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.9466666666666667, |
| "grad_norm": 11.849321365356445, |
| "learning_rate": 3.7142857142857146e-06, |
| "loss": 0.4124, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.9511111111111111, |
| "grad_norm": 4.347206115722656, |
| "learning_rate": 3.428571428571429e-06, |
| "loss": 0.3611, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.9555555555555556, |
| "grad_norm": 5.468479156494141, |
| "learning_rate": 3.1428571428571433e-06, |
| "loss": 0.4312, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 5.55856466293335, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 0.3958, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.9644444444444444, |
| "grad_norm": 10.76039981842041, |
| "learning_rate": 2.5714285714285716e-06, |
| "loss": 0.5161, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.9688888888888889, |
| "grad_norm": 6.2166547775268555, |
| "learning_rate": 2.285714285714286e-06, |
| "loss": 0.4374, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.9733333333333334, |
| "grad_norm": 8.524560928344727, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.4948, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.9777777777777777, |
| "grad_norm": 7.957738399505615, |
| "learning_rate": 1.7142857142857145e-06, |
| "loss": 0.4129, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.9822222222222222, |
| "grad_norm": 9.382002830505371, |
| "learning_rate": 1.4285714285714286e-06, |
| "loss": 0.3626, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.9866666666666667, |
| "grad_norm": 5.674395561218262, |
| "learning_rate": 1.142857142857143e-06, |
| "loss": 0.5119, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.9911111111111112, |
| "grad_norm": 4.210872650146484, |
| "learning_rate": 8.571428571428572e-07, |
| "loss": 0.4205, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.9955555555555555, |
| "grad_norm": 5.570230007171631, |
| "learning_rate": 5.714285714285715e-07, |
| "loss": 0.5659, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 4.3954572677612305, |
| "learning_rate": 2.8571428571428575e-07, |
| "loss": 0.4376, |
| "step": 2250 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2250, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 5000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.74343695220736e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|