| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 32.142857142857146, | |
| "global_step": 2700, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.9999999999999997e-05, | |
| "loss": 1.3126, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 5.9999999999999995e-05, | |
| "loss": 1.3439, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 8.999999999999999e-05, | |
| "loss": 1.3104, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00011999999999999999, | |
| "loss": 1.2288, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00015, | |
| "loss": 1.1731, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 1.1577, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00020999999999999998, | |
| "loss": 1.1174, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00023999999999999998, | |
| "loss": 1.0971, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00027, | |
| "loss": 1.0506, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0003, | |
| "loss": 1.0976, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.0002996385542168674, | |
| "loss": 1.125, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00029927710843373495, | |
| "loss": 1.0538, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.0002989156626506024, | |
| "loss": 1.1258, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00029855421686746987, | |
| "loss": 1.0822, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00029819277108433735, | |
| "loss": 1.1109, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.0002978313253012048, | |
| "loss": 1.0948, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00029746987951807227, | |
| "loss": 1.0414, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.00029710843373493976, | |
| "loss": 1.0601, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.0002967469879518072, | |
| "loss": 1.0577, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.0002963855421686747, | |
| "loss": 1.0807, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00029602409638554216, | |
| "loss": 1.1074, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0002956626506024096, | |
| "loss": 1.069, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.0002953012048192771, | |
| "loss": 1.0588, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.00029493975903614457, | |
| "loss": 1.1084, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.000294578313253012, | |
| "loss": 1.0767, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.0002942168674698795, | |
| "loss": 1.0462, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 0.000293855421686747, | |
| "loss": 1.0666, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 0.0002934939759036144, | |
| "loss": 1.0284, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 0.0002931325301204819, | |
| "loss": 1.0777, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.0002927710843373494, | |
| "loss": 1.0611, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.00029240963855421687, | |
| "loss": 1.0645, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.0002920481927710843, | |
| "loss": 1.0233, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.0002916867469879518, | |
| "loss": 1.0475, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 0.00029132530120481927, | |
| "loss": 1.0703, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 0.0002909638554216867, | |
| "loss": 1.0487, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 0.0002906024096385542, | |
| "loss": 1.0293, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 0.0002902409638554217, | |
| "loss": 1.037, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 0.0002898795180722891, | |
| "loss": 1.0678, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 0.0002895180722891566, | |
| "loss": 1.0603, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 0.0002891566265060241, | |
| "loss": 1.0428, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 0.0002887951807228915, | |
| "loss": 1.035, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.000288433734939759, | |
| "loss": 1.0401, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 0.0002880722891566265, | |
| "loss": 1.0475, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 0.0002877108433734939, | |
| "loss": 1.0064, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 0.0002873493975903614, | |
| "loss": 1.0432, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 0.0002869879518072289, | |
| "loss": 1.0251, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 0.0002866265060240963, | |
| "loss": 1.0731, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 0.0002862650602409638, | |
| "loss": 1.0108, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 0.0002859036144578313, | |
| "loss": 1.0032, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 0.00028554216867469873, | |
| "loss": 1.0084, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 0.00028518072289156627, | |
| "loss": 1.011, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 0.0002848192771084337, | |
| "loss": 1.0214, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 0.0002844578313253012, | |
| "loss": 1.0117, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 0.0002840963855421687, | |
| "loss": 1.0065, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 0.0002837349397590361, | |
| "loss": 1.0052, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 0.0002833734939759036, | |
| "loss": 1.0286, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 0.0002830120481927711, | |
| "loss": 0.9932, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 0.0002826506024096385, | |
| "loss": 1.0117, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 0.000282289156626506, | |
| "loss": 0.9999, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 0.0002819277108433735, | |
| "loss": 0.992, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 0.0002815662650602409, | |
| "loss": 1.0053, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 0.0002812048192771084, | |
| "loss": 0.9967, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 0.0002808433734939759, | |
| "loss": 0.9603, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 0.0002804819277108433, | |
| "loss": 0.9778, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 0.0002801204819277108, | |
| "loss": 1.0112, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 0.0002797590361445783, | |
| "loss": 0.9785, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 0.00027939759036144573, | |
| "loss": 0.9976, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 0.0002790361445783132, | |
| "loss": 0.9728, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 0.0002786746987951807, | |
| "loss": 0.9541, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 0.0002783132530120482, | |
| "loss": 0.9576, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 0.0002779518072289156, | |
| "loss": 0.9821, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 0.0002775903614457831, | |
| "loss": 0.9833, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 0.0002772289156626506, | |
| "loss": 0.9733, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 0.00027686746987951803, | |
| "loss": 0.9662, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 0.0002765060240963855, | |
| "loss": 0.9954, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 0.000276144578313253, | |
| "loss": 0.9656, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 0.00027578313253012044, | |
| "loss": 0.9462, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 0.0002754216867469879, | |
| "loss": 0.8893, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 0.0002750602409638554, | |
| "loss": 0.967, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 0.00027469879518072284, | |
| "loss": 0.8942, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 0.00027433734939759033, | |
| "loss": 0.9385, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 0.0002739759036144578, | |
| "loss": 0.9863, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 0.00027361445783132525, | |
| "loss": 0.945, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.00027325301204819273, | |
| "loss": 0.9697, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 0.0002728915662650602, | |
| "loss": 0.9053, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 10.24, | |
| "learning_rate": 0.00027253012048192765, | |
| "loss": 0.916, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 10.36, | |
| "learning_rate": 0.00027216867469879514, | |
| "loss": 0.9316, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 10.48, | |
| "learning_rate": 0.0002718072289156626, | |
| "loss": 0.9116, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 0.00027144578313253006, | |
| "loss": 0.8975, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 0.0002710843373493976, | |
| "loss": 0.9206, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 10.83, | |
| "learning_rate": 0.00027072289156626503, | |
| "loss": 0.9543, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 10.95, | |
| "learning_rate": 0.0002703614457831325, | |
| "loss": 0.8909, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 11.07, | |
| "learning_rate": 0.00027, | |
| "loss": 0.8982, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 11.19, | |
| "learning_rate": 0.00026963855421686744, | |
| "loss": 0.9051, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 0.0002692771084337349, | |
| "loss": 0.8927, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 0.0002689156626506024, | |
| "loss": 0.8364, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 0.00026855421686746984, | |
| "loss": 0.9083, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 11.67, | |
| "learning_rate": 0.00026819277108433733, | |
| "loss": 0.9085, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 11.79, | |
| "learning_rate": 0.0002678313253012048, | |
| "loss": 0.8402, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 11.9, | |
| "learning_rate": 0.00026746987951807225, | |
| "loss": 0.8964, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 0.00026710843373493973, | |
| "loss": 0.8608, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 0.0002667469879518072, | |
| "loss": 0.8442, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 12.26, | |
| "learning_rate": 0.00026638554216867465, | |
| "loss": 0.8686, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 0.00026602409638554214, | |
| "loss": 0.8222, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 0.0002656626506024096, | |
| "loss": 0.8634, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 0.00026530120481927706, | |
| "loss": 0.8106, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 12.74, | |
| "learning_rate": 0.00026493975903614454, | |
| "loss": 0.8377, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 0.00026457831325301203, | |
| "loss": 0.8477, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 0.00026421686746987946, | |
| "loss": 0.8738, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 13.1, | |
| "learning_rate": 0.00026385542168674695, | |
| "loss": 0.8378, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 13.21, | |
| "learning_rate": 0.00026349397590361444, | |
| "loss": 0.7851, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 0.0002631325301204819, | |
| "loss": 0.8583, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 0.00026277108433734936, | |
| "loss": 0.7784, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 13.57, | |
| "learning_rate": 0.00026240963855421684, | |
| "loss": 0.8323, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 13.69, | |
| "learning_rate": 0.00026204819277108433, | |
| "loss": 0.7988, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 0.0002616867469879518, | |
| "loss": 0.7671, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 13.93, | |
| "learning_rate": 0.00026132530120481925, | |
| "loss": 0.8336, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 0.00026096385542168673, | |
| "loss": 0.7438, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 14.17, | |
| "learning_rate": 0.0002606024096385542, | |
| "loss": 0.7234, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 0.00026024096385542165, | |
| "loss": 0.7257, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 14.4, | |
| "learning_rate": 0.00025987951807228914, | |
| "loss": 0.7833, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 0.0002595180722891566, | |
| "loss": 0.77, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 14.64, | |
| "learning_rate": 0.00025915662650602406, | |
| "loss": 0.7756, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 14.76, | |
| "learning_rate": 0.00025879518072289154, | |
| "loss": 0.7867, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 0.00025843373493975903, | |
| "loss": 0.7665, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 0.00025807228915662646, | |
| "loss": 0.7284, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 15.12, | |
| "learning_rate": 0.00025771084337349395, | |
| "loss": 0.6533, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 15.24, | |
| "learning_rate": 0.00025734939759036144, | |
| "loss": 0.712, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 15.36, | |
| "learning_rate": 0.0002569879518072289, | |
| "loss": 0.7408, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 15.48, | |
| "learning_rate": 0.00025662650602409636, | |
| "loss": 0.6888, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 15.6, | |
| "learning_rate": 0.00025626506024096384, | |
| "loss": 0.7257, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 15.71, | |
| "learning_rate": 0.00025590361445783133, | |
| "loss": 0.687, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 15.83, | |
| "learning_rate": 0.00025554216867469876, | |
| "loss": 0.7182, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 15.95, | |
| "learning_rate": 0.00025518072289156625, | |
| "loss": 0.7337, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 16.07, | |
| "learning_rate": 0.00025481927710843373, | |
| "loss": 0.6581, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 16.19, | |
| "learning_rate": 0.00025445783132530117, | |
| "loss": 0.6839, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 16.31, | |
| "learning_rate": 0.00025409638554216865, | |
| "loss": 0.6336, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 16.43, | |
| "learning_rate": 0.00025373493975903614, | |
| "loss": 0.6679, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 0.00025337349397590357, | |
| "loss": 0.632, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 0.00025301204819277106, | |
| "loss": 0.6286, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 16.79, | |
| "learning_rate": 0.00025265060240963855, | |
| "loss": 0.7263, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 16.9, | |
| "learning_rate": 0.000252289156626506, | |
| "loss": 0.6465, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 17.02, | |
| "learning_rate": 0.00025192771084337346, | |
| "loss": 0.617, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 17.14, | |
| "learning_rate": 0.00025156626506024095, | |
| "loss": 0.599, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 17.26, | |
| "learning_rate": 0.0002512048192771084, | |
| "loss": 0.6203, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 0.00025084337349397587, | |
| "loss": 0.5545, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 0.00025048192771084336, | |
| "loss": 0.6239, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 17.62, | |
| "learning_rate": 0.0002501204819277108, | |
| "loss": 0.5739, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 17.74, | |
| "learning_rate": 0.00024975903614457833, | |
| "loss": 0.6289, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 17.86, | |
| "learning_rate": 0.00024939759036144576, | |
| "loss": 0.5728, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 0.00024903614457831325, | |
| "loss": 0.6469, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 18.1, | |
| "learning_rate": 0.00024867469879518074, | |
| "loss": 0.5318, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 18.21, | |
| "learning_rate": 0.00024831325301204817, | |
| "loss": 0.4846, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 0.00024795180722891565, | |
| "loss": 0.5352, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 18.45, | |
| "learning_rate": 0.00024759036144578314, | |
| "loss": 0.5341, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 0.0002472289156626506, | |
| "loss": 0.57, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 18.69, | |
| "learning_rate": 0.00024686746987951806, | |
| "loss": 0.5228, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 0.00024650602409638555, | |
| "loss": 0.5857, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 18.93, | |
| "learning_rate": 0.000246144578313253, | |
| "loss": 0.5219, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 19.05, | |
| "learning_rate": 0.00024578313253012046, | |
| "loss": 0.5233, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 19.17, | |
| "learning_rate": 0.00024542168674698795, | |
| "loss": 0.4589, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 0.0002450602409638554, | |
| "loss": 0.5354, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 19.4, | |
| "learning_rate": 0.00024469879518072287, | |
| "loss": 0.4662, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 19.52, | |
| "learning_rate": 0.00024433734939759036, | |
| "loss": 0.465, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 19.64, | |
| "learning_rate": 0.00024397590361445782, | |
| "loss": 0.4602, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 19.76, | |
| "learning_rate": 0.00024361445783132528, | |
| "loss": 0.4851, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 19.88, | |
| "learning_rate": 0.00024325301204819276, | |
| "loss": 0.495, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 0.00024289156626506022, | |
| "loss": 0.4556, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 20.12, | |
| "learning_rate": 0.00024253012048192768, | |
| "loss": 0.3657, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 20.24, | |
| "learning_rate": 0.00024216867469879517, | |
| "loss": 0.4285, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 20.36, | |
| "learning_rate": 0.00024180722891566263, | |
| "loss": 0.3793, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 20.48, | |
| "learning_rate": 0.0002414457831325301, | |
| "loss": 0.4846, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 20.6, | |
| "learning_rate": 0.00024108433734939757, | |
| "loss": 0.4543, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 20.71, | |
| "learning_rate": 0.00024072289156626503, | |
| "loss": 0.4694, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 0.0002403614457831325, | |
| "loss": 0.3779, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 20.95, | |
| "learning_rate": 0.00023999999999999998, | |
| "loss": 0.431, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 21.07, | |
| "learning_rate": 0.00023963855421686744, | |
| "loss": 0.367, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 21.19, | |
| "learning_rate": 0.0002392771084337349, | |
| "loss": 0.4387, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 21.31, | |
| "learning_rate": 0.0002389156626506024, | |
| "loss": 0.3326, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 21.43, | |
| "learning_rate": 0.00023855421686746987, | |
| "loss": 0.3899, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 21.55, | |
| "learning_rate": 0.00023819277108433733, | |
| "loss": 0.3869, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 21.67, | |
| "learning_rate": 0.00023783132530120482, | |
| "loss": 0.3685, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 21.79, | |
| "learning_rate": 0.00023746987951807228, | |
| "loss": 0.3711, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 21.9, | |
| "learning_rate": 0.00023710843373493974, | |
| "loss": 0.3748, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 22.02, | |
| "learning_rate": 0.00023674698795180722, | |
| "loss": 0.3345, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 22.14, | |
| "learning_rate": 0.00023638554216867468, | |
| "loss": 0.3128, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 22.26, | |
| "learning_rate": 0.00023602409638554214, | |
| "loss": 0.348, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 22.38, | |
| "learning_rate": 0.00023566265060240963, | |
| "loss": 0.3506, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "learning_rate": 0.0002353012048192771, | |
| "loss": 0.289, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 22.62, | |
| "learning_rate": 0.00023493975903614455, | |
| "loss": 0.3008, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 22.74, | |
| "learning_rate": 0.00023457831325301203, | |
| "loss": 0.3372, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 22.86, | |
| "learning_rate": 0.0002342168674698795, | |
| "loss": 0.3642, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 22.98, | |
| "learning_rate": 0.00023385542168674695, | |
| "loss": 0.3117, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 23.1, | |
| "learning_rate": 0.00023349397590361444, | |
| "loss": 0.3003, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 23.21, | |
| "learning_rate": 0.0002331325301204819, | |
| "loss": 0.2342, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 23.33, | |
| "learning_rate": 0.00023277108433734936, | |
| "loss": 0.3119, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 23.45, | |
| "learning_rate": 0.00023240963855421687, | |
| "loss": 0.2618, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 23.57, | |
| "learning_rate": 0.00023204819277108433, | |
| "loss": 0.3138, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 23.69, | |
| "learning_rate": 0.00023168674698795176, | |
| "loss": 0.2934, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 23.81, | |
| "learning_rate": 0.00023132530120481928, | |
| "loss": 0.2713, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 23.93, | |
| "learning_rate": 0.00023096385542168674, | |
| "loss": 0.3095, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 24.05, | |
| "learning_rate": 0.0002306024096385542, | |
| "loss": 0.2688, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 24.17, | |
| "learning_rate": 0.00023024096385542168, | |
| "loss": 0.2309, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "learning_rate": 0.00022987951807228914, | |
| "loss": 0.2614, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 24.4, | |
| "learning_rate": 0.0002295180722891566, | |
| "loss": 0.2264, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 24.52, | |
| "learning_rate": 0.0002291566265060241, | |
| "loss": 0.2506, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 24.64, | |
| "learning_rate": 0.00022879518072289155, | |
| "loss": 0.2241, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 24.76, | |
| "learning_rate": 0.000228433734939759, | |
| "loss": 0.2882, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 24.88, | |
| "learning_rate": 0.0002280722891566265, | |
| "loss": 0.2868, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 0.00022771084337349395, | |
| "loss": 0.284, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 0.0002273493975903614, | |
| "loss": 0.2088, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 25.24, | |
| "learning_rate": 0.0002269879518072289, | |
| "loss": 0.1954, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 25.36, | |
| "learning_rate": 0.00022662650602409636, | |
| "loss": 0.2121, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 25.48, | |
| "learning_rate": 0.00022626506024096382, | |
| "loss": 0.2345, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 25.6, | |
| "learning_rate": 0.0002259036144578313, | |
| "loss": 0.221, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 25.71, | |
| "learning_rate": 0.00022554216867469876, | |
| "loss": 0.2258, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 25.83, | |
| "learning_rate": 0.00022518072289156622, | |
| "loss": 0.2467, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 25.95, | |
| "learning_rate": 0.00022481927710843374, | |
| "loss": 0.2216, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 26.07, | |
| "learning_rate": 0.0002244578313253012, | |
| "loss": 0.2106, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 26.19, | |
| "learning_rate": 0.00022409638554216866, | |
| "loss": 0.2242, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 26.31, | |
| "learning_rate": 0.00022373493975903614, | |
| "loss": 0.1702, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 26.43, | |
| "learning_rate": 0.0002233734939759036, | |
| "loss": 0.2117, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 26.55, | |
| "learning_rate": 0.00022301204819277106, | |
| "loss": 0.1841, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 26.67, | |
| "learning_rate": 0.00022265060240963855, | |
| "loss": 0.1786, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 26.79, | |
| "learning_rate": 0.000222289156626506, | |
| "loss": 0.201, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 26.9, | |
| "learning_rate": 0.00022192771084337347, | |
| "loss": 0.1915, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 27.02, | |
| "learning_rate": 0.00022156626506024095, | |
| "loss": 0.1973, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 27.14, | |
| "learning_rate": 0.0002212048192771084, | |
| "loss": 0.1649, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 27.26, | |
| "learning_rate": 0.00022084337349397587, | |
| "loss": 0.1665, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 27.38, | |
| "learning_rate": 0.00022048192771084336, | |
| "loss": 0.2129, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "learning_rate": 0.00022012048192771082, | |
| "loss": 0.194, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 27.62, | |
| "learning_rate": 0.00021975903614457828, | |
| "loss": 0.1684, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 27.74, | |
| "learning_rate": 0.00021939759036144576, | |
| "loss": 0.1312, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 27.86, | |
| "learning_rate": 0.00021903614457831322, | |
| "loss": 0.1711, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 27.98, | |
| "learning_rate": 0.00021867469879518068, | |
| "loss": 0.1863, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 28.1, | |
| "learning_rate": 0.0002183132530120482, | |
| "loss": 0.1425, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 28.21, | |
| "learning_rate": 0.00021795180722891563, | |
| "loss": 0.1293, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 28.33, | |
| "learning_rate": 0.0002175903614457831, | |
| "loss": 0.1406, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 28.45, | |
| "learning_rate": 0.0002172289156626506, | |
| "loss": 0.1583, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 28.57, | |
| "learning_rate": 0.00021686746987951806, | |
| "loss": 0.1706, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 28.69, | |
| "learning_rate": 0.00021650602409638552, | |
| "loss": 0.1629, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 28.81, | |
| "learning_rate": 0.000216144578313253, | |
| "loss": 0.1788, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 28.93, | |
| "learning_rate": 0.00021578313253012047, | |
| "loss": 0.1618, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 29.05, | |
| "learning_rate": 0.00021542168674698793, | |
| "loss": 0.1391, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 29.17, | |
| "learning_rate": 0.00021506024096385541, | |
| "loss": 0.1271, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 29.29, | |
| "learning_rate": 0.00021469879518072287, | |
| "loss": 0.1312, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 29.4, | |
| "learning_rate": 0.00021433734939759033, | |
| "loss": 0.1401, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 29.52, | |
| "learning_rate": 0.00021397590361445782, | |
| "loss": 0.1222, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 29.64, | |
| "learning_rate": 0.00021361445783132528, | |
| "loss": 0.1124, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 29.76, | |
| "learning_rate": 0.00021325301204819274, | |
| "loss": 0.1796, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 29.88, | |
| "learning_rate": 0.00021289156626506022, | |
| "loss": 0.1502, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "learning_rate": 0.00021253012048192768, | |
| "loss": 0.137, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 30.12, | |
| "learning_rate": 0.00021216867469879514, | |
| "loss": 0.1021, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 30.24, | |
| "learning_rate": 0.00021180722891566263, | |
| "loss": 0.129, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 30.36, | |
| "learning_rate": 0.0002114457831325301, | |
| "loss": 0.1119, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 30.48, | |
| "learning_rate": 2.3802395209580838e-05, | |
| "loss": 0.1135, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 30.6, | |
| "learning_rate": 2.2679640718562872e-05, | |
| "loss": 0.1224, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 30.71, | |
| "learning_rate": 2.1556886227544907e-05, | |
| "loss": 0.1004, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 30.83, | |
| "learning_rate": 2.043413173652694e-05, | |
| "loss": 0.1048, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 30.95, | |
| "learning_rate": 1.931137724550898e-05, | |
| "loss": 0.0978, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 31.07, | |
| "learning_rate": 1.8188622754491017e-05, | |
| "loss": 0.0981, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 31.19, | |
| "learning_rate": 1.7065868263473052e-05, | |
| "loss": 0.0979, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 31.31, | |
| "learning_rate": 1.594311377245509e-05, | |
| "loss": 0.103, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 31.43, | |
| "learning_rate": 1.4820359281437124e-05, | |
| "loss": 0.1065, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 31.55, | |
| "learning_rate": 1.369760479041916e-05, | |
| "loss": 0.0912, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 31.67, | |
| "learning_rate": 1.2574850299401195e-05, | |
| "loss": 0.0927, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 31.79, | |
| "learning_rate": 1.1452095808383233e-05, | |
| "loss": 0.0942, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 31.9, | |
| "learning_rate": 1.032934131736527e-05, | |
| "loss": 0.0896, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 32.02, | |
| "learning_rate": 9.206586826347304e-06, | |
| "loss": 0.0965, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 32.14, | |
| "learning_rate": 8.08383233532934e-06, | |
| "loss": 0.0943, | |
| "step": 2700 | |
| } | |
| ], | |
| "max_steps": 2772, | |
| "num_train_epochs": 33, | |
| "total_flos": 1.5684921338658816e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |