| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 5000, | |
| "global_step": 2250, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0044444444444444444, | |
| "grad_norm": 172.13922119140625, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 11.5329, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008888888888888889, | |
| "grad_norm": 179.96237182617188, | |
| "learning_rate": 1.5e-06, | |
| "loss": 11.3904, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.013333333333333334, | |
| "grad_norm": 1364.69921875, | |
| "learning_rate": 2.5e-06, | |
| "loss": 10.7917, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.017777777777777778, | |
| "grad_norm": 8219.5224609375, | |
| "learning_rate": 3.3e-06, | |
| "loss": 9.1512, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.022222222222222223, | |
| "grad_norm": 12965.9892578125, | |
| "learning_rate": 4.2999999999999995e-06, | |
| "loss": 7.8213, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02666666666666667, | |
| "grad_norm": 31872.74609375, | |
| "learning_rate": 5.3e-06, | |
| "loss": 6.6272, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03111111111111111, | |
| "grad_norm": 788.8919677734375, | |
| "learning_rate": 6.300000000000001e-06, | |
| "loss": 6.3266, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.035555555555555556, | |
| "grad_norm": 106.24903869628906, | |
| "learning_rate": 7.2999999999999996e-06, | |
| "loss": 5.7356, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 409.2347412109375, | |
| "learning_rate": 8.3e-06, | |
| "loss": 5.018, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.044444444444444446, | |
| "grad_norm": 817.6410522460938, | |
| "learning_rate": 9.3e-06, | |
| "loss": 5.422, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04888888888888889, | |
| "grad_norm": 589.453125, | |
| "learning_rate": 1.03e-05, | |
| "loss": 3.6104, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.05333333333333334, | |
| "grad_norm": 299.5021667480469, | |
| "learning_rate": 1.13e-05, | |
| "loss": 3.3433, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.057777777777777775, | |
| "grad_norm": 105.66629791259766, | |
| "learning_rate": 1.23e-05, | |
| "loss": 2.1184, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.06222222222222222, | |
| "grad_norm": 94.36296844482422, | |
| "learning_rate": 1.3300000000000001e-05, | |
| "loss": 1.6441, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06666666666666667, | |
| "grad_norm": 99.46455383300781, | |
| "learning_rate": 1.43e-05, | |
| "loss": 1.0691, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.07111111111111111, | |
| "grad_norm": 128.905517578125, | |
| "learning_rate": 1.53e-05, | |
| "loss": 1.347, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.07555555555555556, | |
| "grad_norm": 10.669831275939941, | |
| "learning_rate": 1.63e-05, | |
| "loss": 0.8163, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 17.684833526611328, | |
| "learning_rate": 1.73e-05, | |
| "loss": 1.0315, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.08444444444444445, | |
| "grad_norm": 35.8447151184082, | |
| "learning_rate": 1.83e-05, | |
| "loss": 0.6946, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08888888888888889, | |
| "grad_norm": 88.678466796875, | |
| "learning_rate": 1.93e-05, | |
| "loss": 1.0834, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.09333333333333334, | |
| "grad_norm": 50.5704231262207, | |
| "learning_rate": 2.0300000000000002e-05, | |
| "loss": 0.9062, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.09777777777777778, | |
| "grad_norm": 5.56904935836792, | |
| "learning_rate": 2.13e-05, | |
| "loss": 0.9961, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.10222222222222223, | |
| "grad_norm": 2.848574638366699, | |
| "learning_rate": 2.23e-05, | |
| "loss": 0.7547, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.10666666666666667, | |
| "grad_norm": 51.646854400634766, | |
| "learning_rate": 2.3300000000000004e-05, | |
| "loss": 0.9434, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 4.968932151794434, | |
| "learning_rate": 2.43e-05, | |
| "loss": 0.9282, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.11555555555555555, | |
| "grad_norm": 8.864336967468262, | |
| "learning_rate": 2.5300000000000002e-05, | |
| "loss": 1.1923, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 6.225351333618164, | |
| "learning_rate": 2.6300000000000002e-05, | |
| "loss": 0.7519, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.12444444444444444, | |
| "grad_norm": 16.043298721313477, | |
| "learning_rate": 2.7300000000000003e-05, | |
| "loss": 0.8965, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1288888888888889, | |
| "grad_norm": 13.324910163879395, | |
| "learning_rate": 2.83e-05, | |
| "loss": 0.8238, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.13333333333333333, | |
| "grad_norm": 3.661782741546631, | |
| "learning_rate": 2.93e-05, | |
| "loss": 0.6619, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.13777777777777778, | |
| "grad_norm": 24.01588249206543, | |
| "learning_rate": 3.03e-05, | |
| "loss": 0.9874, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.14222222222222222, | |
| "grad_norm": 3.4116015434265137, | |
| "learning_rate": 3.13e-05, | |
| "loss": 0.7866, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.14666666666666667, | |
| "grad_norm": 5.378733158111572, | |
| "learning_rate": 3.2300000000000006e-05, | |
| "loss": 0.784, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1511111111111111, | |
| "grad_norm": 3.0666747093200684, | |
| "learning_rate": 3.33e-05, | |
| "loss": 0.8068, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.15555555555555556, | |
| "grad_norm": 5.233002662658691, | |
| "learning_rate": 3.430000000000001e-05, | |
| "loss": 0.8338, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 12.038925170898438, | |
| "learning_rate": 3.53e-05, | |
| "loss": 0.9978, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.16444444444444445, | |
| "grad_norm": 4.42678165435791, | |
| "learning_rate": 3.63e-05, | |
| "loss": 0.9186, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1688888888888889, | |
| "grad_norm": 3.2533979415893555, | |
| "learning_rate": 3.73e-05, | |
| "loss": 0.853, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.17333333333333334, | |
| "grad_norm": 6.089888095855713, | |
| "learning_rate": 3.83e-05, | |
| "loss": 0.6885, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 2.185434341430664, | |
| "learning_rate": 3.9300000000000007e-05, | |
| "loss": 0.7051, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.18222222222222223, | |
| "grad_norm": 3.463395118713379, | |
| "learning_rate": 4.0300000000000004e-05, | |
| "loss": 0.7594, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.18666666666666668, | |
| "grad_norm": 111.5975341796875, | |
| "learning_rate": 4.13e-05, | |
| "loss": 0.7679, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.19111111111111112, | |
| "grad_norm": 2.420258045196533, | |
| "learning_rate": 4.23e-05, | |
| "loss": 0.9897, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.19555555555555557, | |
| "grad_norm": 3.507974863052368, | |
| "learning_rate": 4.33e-05, | |
| "loss": 0.7764, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.6020970344543457, | |
| "learning_rate": 4.43e-05, | |
| "loss": 0.6163, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.20444444444444446, | |
| "grad_norm": 1.9605637788772583, | |
| "learning_rate": 4.53e-05, | |
| "loss": 0.9071, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2088888888888889, | |
| "grad_norm": 6.756528377532959, | |
| "learning_rate": 4.630000000000001e-05, | |
| "loss": 0.939, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.21333333333333335, | |
| "grad_norm": 6.34450101852417, | |
| "learning_rate": 4.73e-05, | |
| "loss": 0.832, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.21777777777777776, | |
| "grad_norm": 3.8294358253479004, | |
| "learning_rate": 4.83e-05, | |
| "loss": 0.9205, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.1834263801574707, | |
| "learning_rate": 4.93e-05, | |
| "loss": 0.6335, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.22666666666666666, | |
| "grad_norm": 2.837082624435425, | |
| "learning_rate": 4.9914285714285717e-05, | |
| "loss": 0.6933, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2311111111111111, | |
| "grad_norm": 1.8396414518356323, | |
| "learning_rate": 4.962857142857143e-05, | |
| "loss": 1.0322, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.23555555555555555, | |
| "grad_norm": 4.330628871917725, | |
| "learning_rate": 4.934285714285715e-05, | |
| "loss": 0.7124, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.490299701690674, | |
| "learning_rate": 4.905714285714286e-05, | |
| "loss": 0.7426, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.24444444444444444, | |
| "grad_norm": 1.341838002204895, | |
| "learning_rate": 4.8771428571428574e-05, | |
| "loss": 0.7588, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.24888888888888888, | |
| "grad_norm": 2.234884262084961, | |
| "learning_rate": 4.848571428571429e-05, | |
| "loss": 0.8314, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.25333333333333335, | |
| "grad_norm": 2.8187923431396484, | |
| "learning_rate": 4.82e-05, | |
| "loss": 0.8109, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2577777777777778, | |
| "grad_norm": 0.9691933393478394, | |
| "learning_rate": 4.7914285714285715e-05, | |
| "loss": 0.6785, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.26222222222222225, | |
| "grad_norm": 1.8897961378097534, | |
| "learning_rate": 4.762857142857143e-05, | |
| "loss": 0.9523, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.26666666666666666, | |
| "grad_norm": 7.316945552825928, | |
| "learning_rate": 4.734285714285715e-05, | |
| "loss": 0.5996, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.27111111111111114, | |
| "grad_norm": 2.491607427597046, | |
| "learning_rate": 4.7057142857142864e-05, | |
| "loss": 0.5426, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.27555555555555555, | |
| "grad_norm": 2.7054076194763184, | |
| "learning_rate": 4.677142857142857e-05, | |
| "loss": 0.6294, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.5034236907958984, | |
| "learning_rate": 4.648571428571429e-05, | |
| "loss": 0.4571, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 5.013509750366211, | |
| "learning_rate": 4.6200000000000005e-05, | |
| "loss": 0.9696, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.28888888888888886, | |
| "grad_norm": 1.4784796237945557, | |
| "learning_rate": 4.5914285714285714e-05, | |
| "loss": 0.79, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.29333333333333333, | |
| "grad_norm": 1.679485559463501, | |
| "learning_rate": 4.562857142857143e-05, | |
| "loss": 0.5445, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.29777777777777775, | |
| "grad_norm": 11.93566608428955, | |
| "learning_rate": 4.534285714285714e-05, | |
| "loss": 0.8077, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3022222222222222, | |
| "grad_norm": 2.100651264190674, | |
| "learning_rate": 4.5057142857142856e-05, | |
| "loss": 0.5643, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.30666666666666664, | |
| "grad_norm": 2.368565797805786, | |
| "learning_rate": 4.477142857142858e-05, | |
| "loss": 1.0012, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3111111111111111, | |
| "grad_norm": 2.4255175590515137, | |
| "learning_rate": 4.448571428571429e-05, | |
| "loss": 0.6344, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.31555555555555553, | |
| "grad_norm": 3.4809205532073975, | |
| "learning_rate": 4.4200000000000004e-05, | |
| "loss": 0.87, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 12.76645565032959, | |
| "learning_rate": 4.391428571428572e-05, | |
| "loss": 0.673, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3244444444444444, | |
| "grad_norm": 3.417945623397827, | |
| "learning_rate": 4.362857142857143e-05, | |
| "loss": 0.6753, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3288888888888889, | |
| "grad_norm": 2.9550516605377197, | |
| "learning_rate": 4.3342857142857145e-05, | |
| "loss": 0.6621, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 1.9887003898620605, | |
| "learning_rate": 4.3057142857142854e-05, | |
| "loss": 0.671, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3377777777777778, | |
| "grad_norm": 1.826278567314148, | |
| "learning_rate": 4.277142857142857e-05, | |
| "loss": 0.7536, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3422222222222222, | |
| "grad_norm": 4.41416597366333, | |
| "learning_rate": 4.2485714285714286e-05, | |
| "loss": 0.7106, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3466666666666667, | |
| "grad_norm": 1.8409727811813354, | |
| "learning_rate": 4.22e-05, | |
| "loss": 0.7142, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.3511111111111111, | |
| "grad_norm": 2.143974781036377, | |
| "learning_rate": 4.191428571428572e-05, | |
| "loss": 0.5623, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 1.1921825408935547, | |
| "learning_rate": 4.162857142857143e-05, | |
| "loss": 0.6638, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 2.4289398193359375, | |
| "learning_rate": 4.1342857142857144e-05, | |
| "loss": 0.503, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.36444444444444446, | |
| "grad_norm": 1.718072533607483, | |
| "learning_rate": 4.105714285714286e-05, | |
| "loss": 0.4197, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.3688888888888889, | |
| "grad_norm": 2.278563976287842, | |
| "learning_rate": 4.077142857142857e-05, | |
| "loss": 0.611, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.37333333333333335, | |
| "grad_norm": 2.060332775115967, | |
| "learning_rate": 4.0485714285714285e-05, | |
| "loss": 0.5883, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.37777777777777777, | |
| "grad_norm": 2.5066239833831787, | |
| "learning_rate": 4.02e-05, | |
| "loss": 0.6179, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.38222222222222224, | |
| "grad_norm": 5.195372104644775, | |
| "learning_rate": 3.991428571428572e-05, | |
| "loss": 0.7709, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.38666666666666666, | |
| "grad_norm": 3.0029399394989014, | |
| "learning_rate": 3.9628571428571433e-05, | |
| "loss": 0.584, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.39111111111111113, | |
| "grad_norm": 6.307543754577637, | |
| "learning_rate": 3.934285714285714e-05, | |
| "loss": 0.6633, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.39555555555555555, | |
| "grad_norm": 6.635744571685791, | |
| "learning_rate": 3.905714285714286e-05, | |
| "loss": 0.85, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 4.6865434646606445, | |
| "learning_rate": 3.8771428571428575e-05, | |
| "loss": 0.6508, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.40444444444444444, | |
| "grad_norm": 1.8700509071350098, | |
| "learning_rate": 3.8485714285714284e-05, | |
| "loss": 0.5405, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.4088888888888889, | |
| "grad_norm": 3.749453067779541, | |
| "learning_rate": 3.82e-05, | |
| "loss": 0.6869, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.41333333333333333, | |
| "grad_norm": 2.443854570388794, | |
| "learning_rate": 3.7914285714285716e-05, | |
| "loss": 0.5013, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.4177777777777778, | |
| "grad_norm": 1.066603183746338, | |
| "learning_rate": 3.762857142857143e-05, | |
| "loss": 0.5993, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.4222222222222222, | |
| "grad_norm": 5.453496932983398, | |
| "learning_rate": 3.734285714285715e-05, | |
| "loss": 0.633, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4266666666666667, | |
| "grad_norm": 3.3490309715270996, | |
| "learning_rate": 3.705714285714286e-05, | |
| "loss": 0.7452, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.4311111111111111, | |
| "grad_norm": 2.070544719696045, | |
| "learning_rate": 3.6771428571428574e-05, | |
| "loss": 0.7698, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.43555555555555553, | |
| "grad_norm": 3.1262307167053223, | |
| "learning_rate": 3.648571428571429e-05, | |
| "loss": 0.7202, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 4.966836929321289, | |
| "learning_rate": 3.62e-05, | |
| "loss": 0.6881, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.9398611783981323, | |
| "learning_rate": 3.5914285714285715e-05, | |
| "loss": 0.3937, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4488888888888889, | |
| "grad_norm": 2.2783327102661133, | |
| "learning_rate": 3.562857142857143e-05, | |
| "loss": 0.5421, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.4533333333333333, | |
| "grad_norm": 1.5350534915924072, | |
| "learning_rate": 3.534285714285715e-05, | |
| "loss": 1.1952, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4577777777777778, | |
| "grad_norm": 2.5785741806030273, | |
| "learning_rate": 3.505714285714286e-05, | |
| "loss": 0.7421, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.4622222222222222, | |
| "grad_norm": 3.4625842571258545, | |
| "learning_rate": 3.477142857142857e-05, | |
| "loss": 0.6769, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4666666666666667, | |
| "grad_norm": 1.2348237037658691, | |
| "learning_rate": 3.448571428571429e-05, | |
| "loss": 0.4613, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4711111111111111, | |
| "grad_norm": 1.4216806888580322, | |
| "learning_rate": 3.4200000000000005e-05, | |
| "loss": 0.6116, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.47555555555555556, | |
| "grad_norm": 1.089032769203186, | |
| "learning_rate": 3.3914285714285714e-05, | |
| "loss": 0.5686, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.135321855545044, | |
| "learning_rate": 3.362857142857143e-05, | |
| "loss": 0.5668, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.48444444444444446, | |
| "grad_norm": 1.846582055091858, | |
| "learning_rate": 3.334285714285714e-05, | |
| "loss": 0.62, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.4888888888888889, | |
| "grad_norm": 2.3308606147766113, | |
| "learning_rate": 3.305714285714286e-05, | |
| "loss": 0.7323, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.49333333333333335, | |
| "grad_norm": 10.256339073181152, | |
| "learning_rate": 3.277142857142858e-05, | |
| "loss": 0.6122, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.49777777777777776, | |
| "grad_norm": 5.60221529006958, | |
| "learning_rate": 3.248571428571429e-05, | |
| "loss": 0.7023, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.5022222222222222, | |
| "grad_norm": 1.3172476291656494, | |
| "learning_rate": 3.2200000000000003e-05, | |
| "loss": 0.6019, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.5066666666666667, | |
| "grad_norm": 2.984369993209839, | |
| "learning_rate": 3.191428571428571e-05, | |
| "loss": 0.8876, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.5111111111111111, | |
| "grad_norm": 2.645580291748047, | |
| "learning_rate": 3.162857142857143e-05, | |
| "loss": 0.6198, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.5155555555555555, | |
| "grad_norm": 6.525484085083008, | |
| "learning_rate": 3.1342857142857145e-05, | |
| "loss": 0.7378, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.520108938217163, | |
| "learning_rate": 3.1057142857142854e-05, | |
| "loss": 0.6144, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.5244444444444445, | |
| "grad_norm": 1.4671144485473633, | |
| "learning_rate": 3.077142857142857e-05, | |
| "loss": 0.5624, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.5288888888888889, | |
| "grad_norm": 1.6521360874176025, | |
| "learning_rate": 3.048571428571429e-05, | |
| "loss": 0.8754, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 1.7824636697769165, | |
| "learning_rate": 3.02e-05, | |
| "loss": 0.6161, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.5377777777777778, | |
| "grad_norm": 3.1892054080963135, | |
| "learning_rate": 2.9914285714285718e-05, | |
| "loss": 0.6746, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5422222222222223, | |
| "grad_norm": 2.471684217453003, | |
| "learning_rate": 2.9628571428571428e-05, | |
| "loss": 0.7047, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5466666666666666, | |
| "grad_norm": 1.1938486099243164, | |
| "learning_rate": 2.9342857142857144e-05, | |
| "loss": 0.7613, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5511111111111111, | |
| "grad_norm": 4.988212585449219, | |
| "learning_rate": 2.905714285714286e-05, | |
| "loss": 0.7503, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 6.780579566955566, | |
| "learning_rate": 2.8771428571428572e-05, | |
| "loss": 0.5375, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.5735678672790527, | |
| "learning_rate": 2.848571428571429e-05, | |
| "loss": 0.703, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5644444444444444, | |
| "grad_norm": 3.509777069091797, | |
| "learning_rate": 2.8199999999999998e-05, | |
| "loss": 0.5273, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5688888888888889, | |
| "grad_norm": 1.5865542888641357, | |
| "learning_rate": 2.7914285714285714e-05, | |
| "loss": 0.4821, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5733333333333334, | |
| "grad_norm": 1.2859346866607666, | |
| "learning_rate": 2.762857142857143e-05, | |
| "loss": 0.6347, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5777777777777777, | |
| "grad_norm": 2.0483293533325195, | |
| "learning_rate": 2.7342857142857142e-05, | |
| "loss": 0.4798, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5822222222222222, | |
| "grad_norm": 1.2751628160476685, | |
| "learning_rate": 2.705714285714286e-05, | |
| "loss": 0.4283, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5866666666666667, | |
| "grad_norm": 1.7461509704589844, | |
| "learning_rate": 2.6771428571428575e-05, | |
| "loss": 0.577, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5911111111111111, | |
| "grad_norm": 2.4847190380096436, | |
| "learning_rate": 2.6485714285714287e-05, | |
| "loss": 0.6608, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5955555555555555, | |
| "grad_norm": 7.046814441680908, | |
| "learning_rate": 2.6200000000000003e-05, | |
| "loss": 0.7501, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8862992525100708, | |
| "learning_rate": 2.5914285714285713e-05, | |
| "loss": 0.5347, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.6044444444444445, | |
| "grad_norm": 1.941525936126709, | |
| "learning_rate": 2.562857142857143e-05, | |
| "loss": 0.6533, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.6088888888888889, | |
| "grad_norm": 1.9246207475662231, | |
| "learning_rate": 2.5342857142857145e-05, | |
| "loss": 0.4619, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.6133333333333333, | |
| "grad_norm": 2.3284366130828857, | |
| "learning_rate": 2.5057142857142857e-05, | |
| "loss": 0.5442, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.6177777777777778, | |
| "grad_norm": 3.2495412826538086, | |
| "learning_rate": 2.4771428571428573e-05, | |
| "loss": 0.5538, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.6222222222222222, | |
| "grad_norm": 1.9646495580673218, | |
| "learning_rate": 2.4485714285714286e-05, | |
| "loss": 0.6764, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.6266666666666667, | |
| "grad_norm": 2.3741135597229004, | |
| "learning_rate": 2.4200000000000002e-05, | |
| "loss": 0.4923, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.6311111111111111, | |
| "grad_norm": 3.175323486328125, | |
| "learning_rate": 2.3914285714285715e-05, | |
| "loss": 0.5318, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.6355555555555555, | |
| "grad_norm": 4.018131732940674, | |
| "learning_rate": 2.362857142857143e-05, | |
| "loss": 0.6042, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 5.065487384796143, | |
| "learning_rate": 2.3342857142857143e-05, | |
| "loss": 0.4944, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.6444444444444445, | |
| "grad_norm": 4.733597278594971, | |
| "learning_rate": 2.3057142857142856e-05, | |
| "loss": 0.5209, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6488888888888888, | |
| "grad_norm": 2.8453125953674316, | |
| "learning_rate": 2.2771428571428572e-05, | |
| "loss": 0.6981, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6533333333333333, | |
| "grad_norm": 5.391908168792725, | |
| "learning_rate": 2.2485714285714288e-05, | |
| "loss": 0.6173, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6577777777777778, | |
| "grad_norm": 3.4642038345336914, | |
| "learning_rate": 2.22e-05, | |
| "loss": 0.5644, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6622222222222223, | |
| "grad_norm": 5.350740909576416, | |
| "learning_rate": 2.1914285714285714e-05, | |
| "loss": 0.6647, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.7532992362976074, | |
| "learning_rate": 2.162857142857143e-05, | |
| "loss": 0.5771, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6711111111111111, | |
| "grad_norm": 1.6923083066940308, | |
| "learning_rate": 2.1342857142857146e-05, | |
| "loss": 0.4541, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6755555555555556, | |
| "grad_norm": 3.4142796993255615, | |
| "learning_rate": 2.105714285714286e-05, | |
| "loss": 0.6353, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.2692922353744507, | |
| "learning_rate": 2.077142857142857e-05, | |
| "loss": 0.6471, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6844444444444444, | |
| "grad_norm": 6.867006778717041, | |
| "learning_rate": 2.0485714285714287e-05, | |
| "loss": 0.5668, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6888888888888889, | |
| "grad_norm": 1.1940428018569946, | |
| "learning_rate": 2.0200000000000003e-05, | |
| "loss": 0.7113, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6933333333333334, | |
| "grad_norm": 0.9745553731918335, | |
| "learning_rate": 1.9914285714285716e-05, | |
| "loss": 0.6865, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6977777777777778, | |
| "grad_norm": 1.4725788831710815, | |
| "learning_rate": 1.962857142857143e-05, | |
| "loss": 0.7809, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.7022222222222222, | |
| "grad_norm": 2.4500861167907715, | |
| "learning_rate": 1.9342857142857144e-05, | |
| "loss": 0.5468, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.7066666666666667, | |
| "grad_norm": 13.177860260009766, | |
| "learning_rate": 1.9057142857142857e-05, | |
| "loss": 0.5427, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 5.718009948730469, | |
| "learning_rate": 1.8771428571428573e-05, | |
| "loss": 0.6003, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.7155555555555555, | |
| "grad_norm": 2.4186694622039795, | |
| "learning_rate": 1.8485714285714286e-05, | |
| "loss": 0.4843, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.150259256362915, | |
| "learning_rate": 1.8200000000000002e-05, | |
| "loss": 0.4339, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.7244444444444444, | |
| "grad_norm": 2.5581448078155518, | |
| "learning_rate": 1.7914285714285715e-05, | |
| "loss": 0.7765, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.7288888888888889, | |
| "grad_norm": 2.4859399795532227, | |
| "learning_rate": 1.762857142857143e-05, | |
| "loss": 0.5869, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.7333333333333333, | |
| "grad_norm": 1.6887503862380981, | |
| "learning_rate": 1.7342857142857143e-05, | |
| "loss": 0.4651, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.7377777777777778, | |
| "grad_norm": 2.0939149856567383, | |
| "learning_rate": 1.7057142857142856e-05, | |
| "loss": 0.7603, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.7422222222222222, | |
| "grad_norm": 1.9502744674682617, | |
| "learning_rate": 1.6771428571428572e-05, | |
| "loss": 0.5475, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.7466666666666667, | |
| "grad_norm": 0.8679394721984863, | |
| "learning_rate": 1.6485714285714288e-05, | |
| "loss": 0.4752, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.7511111111111111, | |
| "grad_norm": 1.2583119869232178, | |
| "learning_rate": 1.62e-05, | |
| "loss": 0.5677, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.7555555555555555, | |
| "grad_norm": 2.252943754196167, | |
| "learning_rate": 1.5914285714285713e-05, | |
| "loss": 0.4604, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.150745153427124, | |
| "learning_rate": 1.562857142857143e-05, | |
| "loss": 0.5713, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7644444444444445, | |
| "grad_norm": 1.7483420372009277, | |
| "learning_rate": 1.5342857142857146e-05, | |
| "loss": 0.5989, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7688888888888888, | |
| "grad_norm": 0.8522917628288269, | |
| "learning_rate": 1.5057142857142858e-05, | |
| "loss": 0.5211, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7733333333333333, | |
| "grad_norm": 2.067723035812378, | |
| "learning_rate": 1.4771428571428573e-05, | |
| "loss": 0.6904, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 1.3494257926940918, | |
| "learning_rate": 1.4485714285714285e-05, | |
| "loss": 0.6667, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7822222222222223, | |
| "grad_norm": 1.3629013299942017, | |
| "learning_rate": 1.42e-05, | |
| "loss": 0.3367, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7866666666666666, | |
| "grad_norm": 3.2202422618865967, | |
| "learning_rate": 1.3914285714285716e-05, | |
| "loss": 0.6033, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7911111111111111, | |
| "grad_norm": 1.038692593574524, | |
| "learning_rate": 1.362857142857143e-05, | |
| "loss": 0.5305, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7955555555555556, | |
| "grad_norm": 2.57888126373291, | |
| "learning_rate": 1.3342857142857143e-05, | |
| "loss": 0.5989, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.118715524673462, | |
| "learning_rate": 1.3057142857142857e-05, | |
| "loss": 0.4963, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.8044444444444444, | |
| "grad_norm": 2.586069107055664, | |
| "learning_rate": 1.2771428571428573e-05, | |
| "loss": 0.6829, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.8088888888888889, | |
| "grad_norm": 2.27518630027771, | |
| "learning_rate": 1.2485714285714287e-05, | |
| "loss": 0.7121, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.8133333333333334, | |
| "grad_norm": 1.1266392469406128, | |
| "learning_rate": 1.22e-05, | |
| "loss": 0.5293, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.8177777777777778, | |
| "grad_norm": 1.4908055067062378, | |
| "learning_rate": 1.1914285714285716e-05, | |
| "loss": 0.5562, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.8222222222222222, | |
| "grad_norm": 1.4283602237701416, | |
| "learning_rate": 1.1628571428571429e-05, | |
| "loss": 0.4831, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.8266666666666667, | |
| "grad_norm": 1.0552829504013062, | |
| "learning_rate": 1.1342857142857143e-05, | |
| "loss": 0.5761, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.8311111111111111, | |
| "grad_norm": 6.45361852645874, | |
| "learning_rate": 1.1057142857142858e-05, | |
| "loss": 0.6874, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.8355555555555556, | |
| "grad_norm": 1.207909345626831, | |
| "learning_rate": 1.0771428571428572e-05, | |
| "loss": 0.6289, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 3.6151106357574463, | |
| "learning_rate": 1.0485714285714286e-05, | |
| "loss": 0.78, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.8444444444444444, | |
| "grad_norm": 2.560347318649292, | |
| "learning_rate": 1.02e-05, | |
| "loss": 0.7657, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.8488888888888889, | |
| "grad_norm": 1.1826066970825195, | |
| "learning_rate": 9.914285714285715e-06, | |
| "loss": 0.5419, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 2.4970545768737793, | |
| "learning_rate": 9.628571428571428e-06, | |
| "loss": 0.5137, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.8577777777777778, | |
| "grad_norm": 1.6849817037582397, | |
| "learning_rate": 9.342857142857144e-06, | |
| "loss": 0.4476, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.8622222222222222, | |
| "grad_norm": 3.5557351112365723, | |
| "learning_rate": 9.057142857142856e-06, | |
| "loss": 0.6422, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8666666666666667, | |
| "grad_norm": 1.7327756881713867, | |
| "learning_rate": 8.771428571428572e-06, | |
| "loss": 0.656, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8711111111111111, | |
| "grad_norm": 1.3222990036010742, | |
| "learning_rate": 8.485714285714285e-06, | |
| "loss": 0.6862, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8755555555555555, | |
| "grad_norm": 1.3732144832611084, | |
| "learning_rate": 8.200000000000001e-06, | |
| "loss": 0.5173, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 5.491960048675537, | |
| "learning_rate": 7.914285714285714e-06, | |
| "loss": 0.5735, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8844444444444445, | |
| "grad_norm": 1.9830958843231201, | |
| "learning_rate": 7.628571428571429e-06, | |
| "loss": 0.4936, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 3.4833927154541016, | |
| "learning_rate": 7.342857142857143e-06, | |
| "loss": 0.5099, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8933333333333333, | |
| "grad_norm": 1.197811484336853, | |
| "learning_rate": 7.057142857142858e-06, | |
| "loss": 0.4711, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8977777777777778, | |
| "grad_norm": 2.15141224861145, | |
| "learning_rate": 6.771428571428571e-06, | |
| "loss": 0.5555, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.9022222222222223, | |
| "grad_norm": 1.7254974842071533, | |
| "learning_rate": 6.485714285714286e-06, | |
| "loss": 0.5582, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.9066666666666666, | |
| "grad_norm": 9.579049110412598, | |
| "learning_rate": 6.2e-06, | |
| "loss": 0.4571, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.9111111111111111, | |
| "grad_norm": 2.882373094558716, | |
| "learning_rate": 5.914285714285714e-06, | |
| "loss": 0.6287, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.9155555555555556, | |
| "grad_norm": 5.913229942321777, | |
| "learning_rate": 5.628571428571429e-06, | |
| "loss": 0.5779, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 4.352343559265137, | |
| "learning_rate": 5.342857142857143e-06, | |
| "loss": 0.5565, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.9244444444444444, | |
| "grad_norm": 2.8510098457336426, | |
| "learning_rate": 5.057142857142857e-06, | |
| "loss": 0.5999, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.9288888888888889, | |
| "grad_norm": 2.46824049949646, | |
| "learning_rate": 4.771428571428572e-06, | |
| "loss": 0.4779, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.9333333333333333, | |
| "grad_norm": 1.5886437892913818, | |
| "learning_rate": 4.485714285714286e-06, | |
| "loss": 0.5774, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.9377777777777778, | |
| "grad_norm": 1.5184223651885986, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.4278, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.9422222222222222, | |
| "grad_norm": 4.225131511688232, | |
| "learning_rate": 3.914285714285715e-06, | |
| "loss": 0.6479, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.9466666666666667, | |
| "grad_norm": 2.665025234222412, | |
| "learning_rate": 3.6285714285714283e-06, | |
| "loss": 0.4907, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.9511111111111111, | |
| "grad_norm": 1.4222490787506104, | |
| "learning_rate": 3.3428571428571427e-06, | |
| "loss": 0.4263, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.9555555555555556, | |
| "grad_norm": 1.8098088502883911, | |
| "learning_rate": 3.0571428571428575e-06, | |
| "loss": 0.512, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.5185695886611938, | |
| "learning_rate": 2.771428571428572e-06, | |
| "loss": 0.4707, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.9644444444444444, | |
| "grad_norm": 3.510244846343994, | |
| "learning_rate": 2.4857142857142858e-06, | |
| "loss": 0.5985, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.9688888888888889, | |
| "grad_norm": 2.4858155250549316, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.5148, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9733333333333334, | |
| "grad_norm": 2.5503909587860107, | |
| "learning_rate": 1.9142857142857145e-06, | |
| "loss": 0.5905, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9777777777777777, | |
| "grad_norm": 1.640555739402771, | |
| "learning_rate": 1.6285714285714286e-06, | |
| "loss": 0.4983, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9822222222222222, | |
| "grad_norm": 3.124906539916992, | |
| "learning_rate": 1.342857142857143e-06, | |
| "loss": 0.4201, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9866666666666667, | |
| "grad_norm": 2.2014262676239014, | |
| "learning_rate": 1.0571428571428573e-06, | |
| "loss": 0.5991, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9911111111111112, | |
| "grad_norm": 1.412158727645874, | |
| "learning_rate": 7.714285714285715e-07, | |
| "loss": 0.5062, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9955555555555555, | |
| "grad_norm": 2.74906325340271, | |
| "learning_rate": 4.857142857142857e-07, | |
| "loss": 0.6499, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.1791956424713135, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.521, | |
| "step": 2250 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2250, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 5000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.20270921367552e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |