| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 5000, |
| "global_step": 2250, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0044444444444444444, |
| "grad_norm": 0.5433621406555176, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.9321, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.008888888888888889, |
| "grad_norm": 0.561606764793396, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.6473, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.013333333333333334, |
| "grad_norm": 0.5591593384742737, |
| "learning_rate": 6e-06, |
| "loss": 1.6608, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.017777777777777778, |
| "grad_norm": 0.4737153649330139, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.593, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.022222222222222223, |
| "grad_norm": 0.49554353952407837, |
| "learning_rate": 1e-05, |
| "loss": 1.5977, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02666666666666667, |
| "grad_norm": 0.5209193229675293, |
| "learning_rate": 1.2e-05, |
| "loss": 1.7224, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03111111111111111, |
| "grad_norm": 0.6041632294654846, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.7147, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.035555555555555556, |
| "grad_norm": 0.5665949583053589, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.8407, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.46845224499702454, |
| "learning_rate": 1.8e-05, |
| "loss": 1.7081, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.044444444444444446, |
| "grad_norm": 0.5971758365631104, |
| "learning_rate": 2e-05, |
| "loss": 1.6679, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.04888888888888889, |
| "grad_norm": 0.5474440455436707, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 1.7097, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05333333333333334, |
| "grad_norm": 0.5440825819969177, |
| "learning_rate": 2.4e-05, |
| "loss": 1.699, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.057777777777777775, |
| "grad_norm": 0.5594825148582458, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 1.5727, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06222222222222222, |
| "grad_norm": 0.5222841501235962, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 1.7716, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.06666666666666667, |
| "grad_norm": 0.5387088060379028, |
| "learning_rate": 3e-05, |
| "loss": 1.7168, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.07111111111111111, |
| "grad_norm": 0.582731306552887, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.6742, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.07555555555555556, |
| "grad_norm": 0.4863213002681732, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 1.8133, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.558056652545929, |
| "learning_rate": 3.6e-05, |
| "loss": 1.6343, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.08444444444444445, |
| "grad_norm": 0.5111934542655945, |
| "learning_rate": 3.8e-05, |
| "loss": 1.7636, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.08888888888888889, |
| "grad_norm": 0.5395738482475281, |
| "learning_rate": 4e-05, |
| "loss": 1.802, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.09333333333333334, |
| "grad_norm": 0.5868604779243469, |
| "learning_rate": 4.2e-05, |
| "loss": 1.7807, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.09777777777777778, |
| "grad_norm": 0.6610602140426636, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 1.7328, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.10222222222222223, |
| "grad_norm": 0.4879331588745117, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.6453, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.10666666666666667, |
| "grad_norm": 0.49884936213493347, |
| "learning_rate": 4.8e-05, |
| "loss": 1.7553, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 0.5518313050270081, |
| "learning_rate": 5e-05, |
| "loss": 1.7899, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.11555555555555555, |
| "grad_norm": 0.4758407473564148, |
| "learning_rate": 5.2000000000000004e-05, |
| "loss": 1.8008, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.6643330454826355, |
| "learning_rate": 5.4000000000000005e-05, |
| "loss": 1.8044, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.12444444444444444, |
| "grad_norm": 0.6399062275886536, |
| "learning_rate": 5.6000000000000006e-05, |
| "loss": 1.707, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.1288888888888889, |
| "grad_norm": 0.5865938663482666, |
| "learning_rate": 5.8e-05, |
| "loss": 1.8816, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.13333333333333333, |
| "grad_norm": 0.696479320526123, |
| "learning_rate": 6e-05, |
| "loss": 1.8446, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.13777777777777778, |
| "grad_norm": 0.5724856853485107, |
| "learning_rate": 6.2e-05, |
| "loss": 1.8147, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.14222222222222222, |
| "grad_norm": 0.61114901304245, |
| "learning_rate": 6.400000000000001e-05, |
| "loss": 1.7069, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.14666666666666667, |
| "grad_norm": 0.47783616185188293, |
| "learning_rate": 6.6e-05, |
| "loss": 1.945, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.1511111111111111, |
| "grad_norm": 0.5438987612724304, |
| "learning_rate": 6.800000000000001e-05, |
| "loss": 1.8745, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.15555555555555556, |
| "grad_norm": 0.6080267429351807, |
| "learning_rate": 7e-05, |
| "loss": 1.7863, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.5038191676139832, |
| "learning_rate": 7.2e-05, |
| "loss": 1.7838, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.16444444444444445, |
| "grad_norm": 0.6252524256706238, |
| "learning_rate": 7.4e-05, |
| "loss": 1.7028, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.1688888888888889, |
| "grad_norm": 0.5980324149131775, |
| "learning_rate": 7.6e-05, |
| "loss": 1.7397, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.17333333333333334, |
| "grad_norm": 0.7241755723953247, |
| "learning_rate": 7.800000000000001e-05, |
| "loss": 1.7465, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.17777777777777778, |
| "grad_norm": 0.5854328870773315, |
| "learning_rate": 8e-05, |
| "loss": 1.7453, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.18222222222222223, |
| "grad_norm": 0.6349270939826965, |
| "learning_rate": 8.2e-05, |
| "loss": 1.6712, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.18666666666666668, |
| "grad_norm": 0.6904433965682983, |
| "learning_rate": 8.4e-05, |
| "loss": 1.7417, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.19111111111111112, |
| "grad_norm": 0.5410600900650024, |
| "learning_rate": 8.6e-05, |
| "loss": 1.8458, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.19555555555555557, |
| "grad_norm": 0.6205611228942871, |
| "learning_rate": 8.800000000000001e-05, |
| "loss": 1.7107, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.5386173129081726, |
| "learning_rate": 9e-05, |
| "loss": 1.8438, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.20444444444444446, |
| "grad_norm": 0.7793389558792114, |
| "learning_rate": 9.200000000000001e-05, |
| "loss": 1.8507, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.2088888888888889, |
| "grad_norm": 0.5682786703109741, |
| "learning_rate": 9.4e-05, |
| "loss": 1.6696, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.21333333333333335, |
| "grad_norm": 0.6214142441749573, |
| "learning_rate": 9.6e-05, |
| "loss": 1.7636, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.21777777777777776, |
| "grad_norm": 0.6407720446586609, |
| "learning_rate": 9.8e-05, |
| "loss": 1.945, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 0.5883258581161499, |
| "learning_rate": 0.0001, |
| "loss": 1.749, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.22666666666666666, |
| "grad_norm": 0.7052844762802124, |
| "learning_rate": 9.942857142857144e-05, |
| "loss": 1.7102, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.2311111111111111, |
| "grad_norm": 0.5479081869125366, |
| "learning_rate": 9.885714285714286e-05, |
| "loss": 1.7169, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.23555555555555555, |
| "grad_norm": 0.5972569584846497, |
| "learning_rate": 9.828571428571429e-05, |
| "loss": 1.7405, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.5114515423774719, |
| "learning_rate": 9.771428571428572e-05, |
| "loss": 1.9784, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.24444444444444444, |
| "grad_norm": 0.5898337960243225, |
| "learning_rate": 9.714285714285715e-05, |
| "loss": 1.7976, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.24888888888888888, |
| "grad_norm": 0.6429341435432434, |
| "learning_rate": 9.657142857142858e-05, |
| "loss": 1.958, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.25333333333333335, |
| "grad_norm": 0.46451956033706665, |
| "learning_rate": 9.6e-05, |
| "loss": 1.5966, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.2577777777777778, |
| "grad_norm": 0.506208598613739, |
| "learning_rate": 9.542857142857143e-05, |
| "loss": 1.6479, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.26222222222222225, |
| "grad_norm": 0.5113669633865356, |
| "learning_rate": 9.485714285714287e-05, |
| "loss": 1.6449, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.26666666666666666, |
| "grad_norm": 0.5901801586151123, |
| "learning_rate": 9.428571428571429e-05, |
| "loss": 1.7377, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.27111111111111114, |
| "grad_norm": 0.5738946199417114, |
| "learning_rate": 9.371428571428572e-05, |
| "loss": 1.8211, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.27555555555555555, |
| "grad_norm": 0.5505834817886353, |
| "learning_rate": 9.314285714285715e-05, |
| "loss": 1.7923, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.6749151349067688, |
| "learning_rate": 9.257142857142858e-05, |
| "loss": 1.851, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.28444444444444444, |
| "grad_norm": 0.618687629699707, |
| "learning_rate": 9.200000000000001e-05, |
| "loss": 1.8906, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.28888888888888886, |
| "grad_norm": 0.592284619808197, |
| "learning_rate": 9.142857142857143e-05, |
| "loss": 1.8052, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.29333333333333333, |
| "grad_norm": 0.6234279274940491, |
| "learning_rate": 9.085714285714286e-05, |
| "loss": 1.7278, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.29777777777777775, |
| "grad_norm": 0.5879189372062683, |
| "learning_rate": 9.028571428571428e-05, |
| "loss": 1.8085, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.3022222222222222, |
| "grad_norm": 0.6300005912780762, |
| "learning_rate": 8.971428571428571e-05, |
| "loss": 1.7155, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.30666666666666664, |
| "grad_norm": 0.563451886177063, |
| "learning_rate": 8.914285714285715e-05, |
| "loss": 1.7602, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.3111111111111111, |
| "grad_norm": 0.6689388751983643, |
| "learning_rate": 8.857142857142857e-05, |
| "loss": 1.7183, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.31555555555555553, |
| "grad_norm": 0.5935807228088379, |
| "learning_rate": 8.800000000000001e-05, |
| "loss": 1.8198, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.6254555583000183, |
| "learning_rate": 8.742857142857144e-05, |
| "loss": 1.7565, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.3244444444444444, |
| "grad_norm": 0.640834391117096, |
| "learning_rate": 8.685714285714286e-05, |
| "loss": 1.6407, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.3288888888888889, |
| "grad_norm": 0.7603657245635986, |
| "learning_rate": 8.62857142857143e-05, |
| "loss": 1.8392, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.5401626229286194, |
| "learning_rate": 8.571428571428571e-05, |
| "loss": 1.4939, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3377777777777778, |
| "grad_norm": 0.5616275668144226, |
| "learning_rate": 8.514285714285714e-05, |
| "loss": 1.6581, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.3422222222222222, |
| "grad_norm": 0.6039714813232422, |
| "learning_rate": 8.457142857142858e-05, |
| "loss": 1.7711, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.3466666666666667, |
| "grad_norm": 0.6184443831443787, |
| "learning_rate": 8.4e-05, |
| "loss": 1.709, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.3511111111111111, |
| "grad_norm": 0.5194800496101379, |
| "learning_rate": 8.342857142857143e-05, |
| "loss": 1.7707, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.35555555555555557, |
| "grad_norm": 0.6379169225692749, |
| "learning_rate": 8.285714285714287e-05, |
| "loss": 1.8612, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.6198997497558594, |
| "learning_rate": 8.228571428571429e-05, |
| "loss": 1.8448, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.36444444444444446, |
| "grad_norm": 0.6147898435592651, |
| "learning_rate": 8.171428571428572e-05, |
| "loss": 1.6975, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.3688888888888889, |
| "grad_norm": 0.5424386858940125, |
| "learning_rate": 8.114285714285714e-05, |
| "loss": 1.6583, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.37333333333333335, |
| "grad_norm": 0.607720673084259, |
| "learning_rate": 8.057142857142857e-05, |
| "loss": 1.6709, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.37777777777777777, |
| "grad_norm": 0.5133110880851746, |
| "learning_rate": 8e-05, |
| "loss": 1.7619, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.38222222222222224, |
| "grad_norm": 0.525132417678833, |
| "learning_rate": 7.942857142857143e-05, |
| "loss": 1.651, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.38666666666666666, |
| "grad_norm": 0.4882131516933441, |
| "learning_rate": 7.885714285714286e-05, |
| "loss": 1.6483, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.39111111111111113, |
| "grad_norm": 0.6584396958351135, |
| "learning_rate": 7.828571428571429e-05, |
| "loss": 1.7908, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.39555555555555555, |
| "grad_norm": 0.4846736490726471, |
| "learning_rate": 7.771428571428572e-05, |
| "loss": 1.7241, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.5569609999656677, |
| "learning_rate": 7.714285714285715e-05, |
| "loss": 1.8487, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.40444444444444444, |
| "grad_norm": 0.47317251563072205, |
| "learning_rate": 7.657142857142857e-05, |
| "loss": 1.5844, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.4088888888888889, |
| "grad_norm": 0.49643898010253906, |
| "learning_rate": 7.6e-05, |
| "loss": 1.701, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.41333333333333333, |
| "grad_norm": 0.6150220036506653, |
| "learning_rate": 7.542857142857144e-05, |
| "loss": 1.6521, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.4177777777777778, |
| "grad_norm": 0.5863474607467651, |
| "learning_rate": 7.485714285714285e-05, |
| "loss": 1.7961, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.4222222222222222, |
| "grad_norm": 0.46018537878990173, |
| "learning_rate": 7.428571428571429e-05, |
| "loss": 1.5727, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.4266666666666667, |
| "grad_norm": 0.5820701718330383, |
| "learning_rate": 7.371428571428572e-05, |
| "loss": 1.7221, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.4311111111111111, |
| "grad_norm": 0.5305566787719727, |
| "learning_rate": 7.314285714285715e-05, |
| "loss": 1.5491, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.43555555555555553, |
| "grad_norm": 0.44739487767219543, |
| "learning_rate": 7.257142857142858e-05, |
| "loss": 1.7449, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.525854766368866, |
| "learning_rate": 7.2e-05, |
| "loss": 1.6216, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 0.6370983719825745, |
| "learning_rate": 7.142857142857143e-05, |
| "loss": 1.8326, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.4488888888888889, |
| "grad_norm": 0.5000634789466858, |
| "learning_rate": 7.085714285714285e-05, |
| "loss": 1.6554, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.4533333333333333, |
| "grad_norm": 0.5493870973587036, |
| "learning_rate": 7.028571428571428e-05, |
| "loss": 1.6535, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.4577777777777778, |
| "grad_norm": 0.6068606376647949, |
| "learning_rate": 6.971428571428572e-05, |
| "loss": 1.7912, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.4622222222222222, |
| "grad_norm": 0.36175239086151123, |
| "learning_rate": 6.914285714285715e-05, |
| "loss": 1.7161, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.4666666666666667, |
| "grad_norm": 0.4946162700653076, |
| "learning_rate": 6.857142857142858e-05, |
| "loss": 1.7134, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.4711111111111111, |
| "grad_norm": 0.570744514465332, |
| "learning_rate": 6.800000000000001e-05, |
| "loss": 1.7526, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.47555555555555556, |
| "grad_norm": 0.4829612374305725, |
| "learning_rate": 6.742857142857143e-05, |
| "loss": 1.5849, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.5220139026641846, |
| "learning_rate": 6.685714285714286e-05, |
| "loss": 1.5798, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.48444444444444446, |
| "grad_norm": 0.4307192265987396, |
| "learning_rate": 6.628571428571428e-05, |
| "loss": 1.6771, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.4888888888888889, |
| "grad_norm": 0.5403544306755066, |
| "learning_rate": 6.571428571428571e-05, |
| "loss": 1.6165, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.49333333333333335, |
| "grad_norm": 0.47385355830192566, |
| "learning_rate": 6.514285714285715e-05, |
| "loss": 1.6693, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.49777777777777776, |
| "grad_norm": 0.5639126300811768, |
| "learning_rate": 6.457142857142856e-05, |
| "loss": 1.6192, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.5022222222222222, |
| "grad_norm": 0.596899688243866, |
| "learning_rate": 6.400000000000001e-05, |
| "loss": 1.742, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.5066666666666667, |
| "grad_norm": 0.5001611113548279, |
| "learning_rate": 6.342857142857143e-05, |
| "loss": 1.7284, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.5111111111111111, |
| "grad_norm": 0.5272433757781982, |
| "learning_rate": 6.285714285714286e-05, |
| "loss": 1.7103, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5155555555555555, |
| "grad_norm": 0.46714940667152405, |
| "learning_rate": 6.22857142857143e-05, |
| "loss": 1.739, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.4891936182975769, |
| "learning_rate": 6.171428571428571e-05, |
| "loss": 1.5622, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.5244444444444445, |
| "grad_norm": 0.5693128705024719, |
| "learning_rate": 6.114285714285714e-05, |
| "loss": 1.7711, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.5288888888888889, |
| "grad_norm": 0.4832131862640381, |
| "learning_rate": 6.0571428571428576e-05, |
| "loss": 1.6518, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.5333333333333333, |
| "grad_norm": 0.4521544277667999, |
| "learning_rate": 6e-05, |
| "loss": 1.7293, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.5377777777777778, |
| "grad_norm": 0.530257523059845, |
| "learning_rate": 5.9428571428571434e-05, |
| "loss": 1.7398, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.5422222222222223, |
| "grad_norm": 0.4766942262649536, |
| "learning_rate": 5.885714285714285e-05, |
| "loss": 1.5728, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.5466666666666666, |
| "grad_norm": 0.5979341864585876, |
| "learning_rate": 5.828571428571429e-05, |
| "loss": 1.7144, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.5511111111111111, |
| "grad_norm": 0.48419347405433655, |
| "learning_rate": 5.771428571428572e-05, |
| "loss": 1.6109, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 0.5022163391113281, |
| "learning_rate": 5.714285714285714e-05, |
| "loss": 1.6527, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.44246387481689453, |
| "learning_rate": 5.6571428571428574e-05, |
| "loss": 1.5973, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.5644444444444444, |
| "grad_norm": 0.4790962338447571, |
| "learning_rate": 5.6000000000000006e-05, |
| "loss": 1.7611, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.5688888888888889, |
| "grad_norm": 0.539997398853302, |
| "learning_rate": 5.542857142857143e-05, |
| "loss": 1.6742, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.5733333333333334, |
| "grad_norm": 0.4964543282985687, |
| "learning_rate": 5.485714285714286e-05, |
| "loss": 1.7395, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.5777777777777777, |
| "grad_norm": 0.5202584266662598, |
| "learning_rate": 5.428571428571428e-05, |
| "loss": 1.8882, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.5822222222222222, |
| "grad_norm": 0.5655657649040222, |
| "learning_rate": 5.3714285714285714e-05, |
| "loss": 1.6829, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.5866666666666667, |
| "grad_norm": 0.4495587944984436, |
| "learning_rate": 5.314285714285715e-05, |
| "loss": 1.6082, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.5911111111111111, |
| "grad_norm": 0.5401484370231628, |
| "learning_rate": 5.257142857142857e-05, |
| "loss": 1.6704, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.5955555555555555, |
| "grad_norm": 0.6409158706665039, |
| "learning_rate": 5.2000000000000004e-05, |
| "loss": 1.6984, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.47010332345962524, |
| "learning_rate": 5.142857142857143e-05, |
| "loss": 1.7012, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.6044444444444445, |
| "grad_norm": 0.5039106011390686, |
| "learning_rate": 5.085714285714286e-05, |
| "loss": 1.7446, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.6088888888888889, |
| "grad_norm": 0.46006426215171814, |
| "learning_rate": 5.028571428571429e-05, |
| "loss": 1.4179, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.6133333333333333, |
| "grad_norm": 0.5222061276435852, |
| "learning_rate": 4.971428571428572e-05, |
| "loss": 1.6808, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.6177777777777778, |
| "grad_norm": 0.45127660036087036, |
| "learning_rate": 4.9142857142857144e-05, |
| "loss": 1.653, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.6222222222222222, |
| "grad_norm": 0.4859090745449066, |
| "learning_rate": 4.8571428571428576e-05, |
| "loss": 1.7781, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6266666666666667, |
| "grad_norm": 0.6080654859542847, |
| "learning_rate": 4.8e-05, |
| "loss": 1.6916, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.6311111111111111, |
| "grad_norm": 0.5803218483924866, |
| "learning_rate": 4.742857142857143e-05, |
| "loss": 1.7859, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.6355555555555555, |
| "grad_norm": 0.590130090713501, |
| "learning_rate": 4.685714285714286e-05, |
| "loss": 1.6935, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.4160462021827698, |
| "learning_rate": 4.628571428571429e-05, |
| "loss": 1.5872, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.6444444444444445, |
| "grad_norm": 0.4726947546005249, |
| "learning_rate": 4.5714285714285716e-05, |
| "loss": 1.7101, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.6488888888888888, |
| "grad_norm": 0.48443475365638733, |
| "learning_rate": 4.514285714285714e-05, |
| "loss": 1.7473, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.6533333333333333, |
| "grad_norm": 0.5587878823280334, |
| "learning_rate": 4.4571428571428574e-05, |
| "loss": 1.5633, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.6577777777777778, |
| "grad_norm": 0.43862953782081604, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 1.6364, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.6622222222222223, |
| "grad_norm": 0.4286442995071411, |
| "learning_rate": 4.342857142857143e-05, |
| "loss": 1.6191, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.45126351714134216, |
| "learning_rate": 4.2857142857142856e-05, |
| "loss": 1.7249, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.6711111111111111, |
| "grad_norm": 0.5152072906494141, |
| "learning_rate": 4.228571428571429e-05, |
| "loss": 1.6294, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.6755555555555556, |
| "grad_norm": 0.4662505090236664, |
| "learning_rate": 4.1714285714285714e-05, |
| "loss": 1.6245, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.49927669763565063, |
| "learning_rate": 4.1142857142857146e-05, |
| "loss": 1.5931, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.6844444444444444, |
| "grad_norm": 0.4711079001426697, |
| "learning_rate": 4.057142857142857e-05, |
| "loss": 1.6273, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.6888888888888889, |
| "grad_norm": 0.5319389700889587, |
| "learning_rate": 4e-05, |
| "loss": 1.7385, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.6933333333333334, |
| "grad_norm": 0.4717606008052826, |
| "learning_rate": 3.942857142857143e-05, |
| "loss": 1.7488, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.6977777777777778, |
| "grad_norm": 0.4487731158733368, |
| "learning_rate": 3.885714285714286e-05, |
| "loss": 1.4776, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.7022222222222222, |
| "grad_norm": 0.594499409198761, |
| "learning_rate": 3.8285714285714286e-05, |
| "loss": 1.5591, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.7066666666666667, |
| "grad_norm": 0.5072243809700012, |
| "learning_rate": 3.771428571428572e-05, |
| "loss": 1.6309, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.7111111111111111, |
| "grad_norm": 0.6944729685783386, |
| "learning_rate": 3.7142857142857143e-05, |
| "loss": 1.6328, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.7155555555555555, |
| "grad_norm": 0.41481533646583557, |
| "learning_rate": 3.6571428571428576e-05, |
| "loss": 1.541, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.45629650354385376, |
| "learning_rate": 3.6e-05, |
| "loss": 1.7933, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.7244444444444444, |
| "grad_norm": 0.4524475932121277, |
| "learning_rate": 3.5428571428571426e-05, |
| "loss": 1.7052, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.7288888888888889, |
| "grad_norm": 0.5357774496078491, |
| "learning_rate": 3.485714285714286e-05, |
| "loss": 1.6476, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.7333333333333333, |
| "grad_norm": 0.4461857080459595, |
| "learning_rate": 3.428571428571429e-05, |
| "loss": 1.5601, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.7377777777777778, |
| "grad_norm": 0.5301758050918579, |
| "learning_rate": 3.3714285714285716e-05, |
| "loss": 1.8176, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.7422222222222222, |
| "grad_norm": 1.122097134590149, |
| "learning_rate": 3.314285714285714e-05, |
| "loss": 1.6481, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.7466666666666667, |
| "grad_norm": 0.6046774387359619, |
| "learning_rate": 3.257142857142857e-05, |
| "loss": 1.8148, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.7511111111111111, |
| "grad_norm": 0.4735705256462097, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.5967, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.7555555555555555, |
| "grad_norm": 0.4766364097595215, |
| "learning_rate": 3.142857142857143e-05, |
| "loss": 1.6152, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.5077019333839417, |
| "learning_rate": 3.0857142857142856e-05, |
| "loss": 1.6901, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.7644444444444445, |
| "grad_norm": 0.47451454401016235, |
| "learning_rate": 3.0285714285714288e-05, |
| "loss": 1.5958, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.7688888888888888, |
| "grad_norm": 0.4522535800933838, |
| "learning_rate": 2.9714285714285717e-05, |
| "loss": 1.5278, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.7733333333333333, |
| "grad_norm": 0.47677576541900635, |
| "learning_rate": 2.9142857142857146e-05, |
| "loss": 1.7089, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 0.5438882112503052, |
| "learning_rate": 2.857142857142857e-05, |
| "loss": 1.6967, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.7822222222222223, |
| "grad_norm": 0.5106326341629028, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 1.6133, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.7866666666666666, |
| "grad_norm": 0.48920413851737976, |
| "learning_rate": 2.742857142857143e-05, |
| "loss": 1.5464, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.7911111111111111, |
| "grad_norm": 0.4835570156574249, |
| "learning_rate": 2.6857142857142857e-05, |
| "loss": 1.6191, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.7955555555555556, |
| "grad_norm": 0.4348030686378479, |
| "learning_rate": 2.6285714285714286e-05, |
| "loss": 1.595, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.5459887981414795, |
| "learning_rate": 2.5714285714285714e-05, |
| "loss": 1.6642, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.8044444444444444, |
| "grad_norm": 0.49609118700027466, |
| "learning_rate": 2.5142857142857147e-05, |
| "loss": 1.6884, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.8088888888888889, |
| "grad_norm": 0.49508237838745117, |
| "learning_rate": 2.4571428571428572e-05, |
| "loss": 1.5543, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.8133333333333334, |
| "grad_norm": 0.5876829624176025, |
| "learning_rate": 2.4e-05, |
| "loss": 1.5876, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.8177777777777778, |
| "grad_norm": 0.5161216259002686, |
| "learning_rate": 2.342857142857143e-05, |
| "loss": 1.5489, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.8222222222222222, |
| "grad_norm": 0.49379730224609375, |
| "learning_rate": 2.2857142857142858e-05, |
| "loss": 1.5353, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.8266666666666667, |
| "grad_norm": 0.4468284249305725, |
| "learning_rate": 2.2285714285714287e-05, |
| "loss": 1.5983, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.8311111111111111, |
| "grad_norm": 0.3884105384349823, |
| "learning_rate": 2.1714285714285715e-05, |
| "loss": 1.6029, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.8355555555555556, |
| "grad_norm": 0.5299472808837891, |
| "learning_rate": 2.1142857142857144e-05, |
| "loss": 1.4086, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.4671512544155121, |
| "learning_rate": 2.0571428571428573e-05, |
| "loss": 1.6286, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.8444444444444444, |
| "grad_norm": 0.42520585656166077, |
| "learning_rate": 2e-05, |
| "loss": 1.5687, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.8488888888888889, |
| "grad_norm": 0.4671627879142761, |
| "learning_rate": 1.942857142857143e-05, |
| "loss": 1.7934, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.8533333333333334, |
| "grad_norm": 0.44849321246147156, |
| "learning_rate": 1.885714285714286e-05, |
| "loss": 1.4699, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.8577777777777778, |
| "grad_norm": 0.4914058744907379, |
| "learning_rate": 1.8285714285714288e-05, |
| "loss": 1.6696, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.8622222222222222, |
| "grad_norm": 0.4552600681781769, |
| "learning_rate": 1.7714285714285713e-05, |
| "loss": 1.5428, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.8666666666666667, |
| "grad_norm": 0.568909764289856, |
| "learning_rate": 1.7142857142857145e-05, |
| "loss": 1.5826, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.8711111111111111, |
| "grad_norm": 0.4071631133556366, |
| "learning_rate": 1.657142857142857e-05, |
| "loss": 1.4793, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.8755555555555555, |
| "grad_norm": 0.4576158821582794, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.7766, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.48867034912109375, |
| "learning_rate": 1.5428571428571428e-05, |
| "loss": 1.5677, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.8844444444444445, |
| "grad_norm": 0.5315440893173218, |
| "learning_rate": 1.4857142857142858e-05, |
| "loss": 1.6422, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 0.5676016211509705, |
| "learning_rate": 1.4285714285714285e-05, |
| "loss": 1.669, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.8933333333333333, |
| "grad_norm": 0.4910498261451721, |
| "learning_rate": 1.3714285714285716e-05, |
| "loss": 1.6654, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.8977777777777778, |
| "grad_norm": 0.4945501387119293, |
| "learning_rate": 1.3142857142857143e-05, |
| "loss": 1.752, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.9022222222222223, |
| "grad_norm": 0.4435444176197052, |
| "learning_rate": 1.2571428571428573e-05, |
| "loss": 1.4818, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.9066666666666666, |
| "grad_norm": 0.49968045949935913, |
| "learning_rate": 1.2e-05, |
| "loss": 1.7248, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.9111111111111111, |
| "grad_norm": 0.5078977942466736, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 1.6665, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.9155555555555556, |
| "grad_norm": 0.4734516739845276, |
| "learning_rate": 1.0857142857142858e-05, |
| "loss": 1.6927, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.5901415348052979, |
| "learning_rate": 1.0285714285714286e-05, |
| "loss": 1.575, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.9244444444444444, |
| "grad_norm": 0.4999895393848419, |
| "learning_rate": 9.714285714285715e-06, |
| "loss": 1.6373, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.9288888888888889, |
| "grad_norm": 0.5657374858856201, |
| "learning_rate": 9.142857142857144e-06, |
| "loss": 1.7343, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.9333333333333333, |
| "grad_norm": 0.4468814432621002, |
| "learning_rate": 8.571428571428573e-06, |
| "loss": 1.7404, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.9377777777777778, |
| "grad_norm": 0.4465270936489105, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.6573, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.9422222222222222, |
| "grad_norm": 0.573610782623291, |
| "learning_rate": 7.428571428571429e-06, |
| "loss": 1.6556, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.9466666666666667, |
| "grad_norm": 0.4416819214820862, |
| "learning_rate": 6.857142857142858e-06, |
| "loss": 1.6867, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.9511111111111111, |
| "grad_norm": 0.5595669150352478, |
| "learning_rate": 6.285714285714287e-06, |
| "loss": 1.6842, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.9555555555555556, |
| "grad_norm": 0.4565703868865967, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 1.5335, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.5595963001251221, |
| "learning_rate": 5.142857142857143e-06, |
| "loss": 1.7135, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.9644444444444444, |
| "grad_norm": 0.4642212986946106, |
| "learning_rate": 4.571428571428572e-06, |
| "loss": 1.6675, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.9688888888888889, |
| "grad_norm": 0.45244458317756653, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.573, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.9733333333333334, |
| "grad_norm": 0.4644954204559326, |
| "learning_rate": 3.428571428571429e-06, |
| "loss": 1.5292, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.9777777777777777, |
| "grad_norm": 0.4594193398952484, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 1.6958, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.9822222222222222, |
| "grad_norm": 0.4561087489128113, |
| "learning_rate": 2.285714285714286e-06, |
| "loss": 1.6534, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.9866666666666667, |
| "grad_norm": 0.5011770129203796, |
| "learning_rate": 1.7142857142857145e-06, |
| "loss": 1.4897, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.9911111111111112, |
| "grad_norm": 0.4924561083316803, |
| "learning_rate": 1.142857142857143e-06, |
| "loss": 1.7416, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.9955555555555555, |
| "grad_norm": 0.5766103863716125, |
| "learning_rate": 5.714285714285715e-07, |
| "loss": 1.727, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.5178750157356262, |
| "learning_rate": 0.0, |
| "loss": 1.5411, |
| "step": 2250 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2250, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 5000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 8.8552192868352e+16, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|