| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 666.6666666666666, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 4.998766400914329e-05, | |
| "loss": 1.0516, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 4.995066821070679e-05, | |
| "loss": 1.0167, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 4.9889049115077005e-05, | |
| "loss": 0.9676, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 26.67, | |
| "learning_rate": 4.980286753286195e-05, | |
| "loss": 0.888, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 4.9692208514878444e-05, | |
| "loss": 0.798, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "learning_rate": 4.9557181268217227e-05, | |
| "loss": 0.6272, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 46.67, | |
| "learning_rate": 4.939791904846869e-05, | |
| "loss": 0.5097, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 53.33, | |
| "learning_rate": 4.9214579028215776e-05, | |
| "loss": 0.3408, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 60.0, | |
| "learning_rate": 4.900734214192358e-05, | |
| "loss": 0.1979, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 66.67, | |
| "learning_rate": 4.877641290737884e-05, | |
| "loss": 0.109, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 73.33, | |
| "learning_rate": 4.852201922385564e-05, | |
| "loss": 0.0503, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 80.0, | |
| "learning_rate": 4.8244412147206284e-05, | |
| "loss": 0.0281, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 86.67, | |
| "learning_rate": 4.794386564209953e-05, | |
| "loss": 0.0152, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 93.33, | |
| "learning_rate": 4.762067631165049e-05, | |
| "loss": 0.0102, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 100.0, | |
| "learning_rate": 4.72751631047092e-05, | |
| "loss": 0.0075, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 106.67, | |
| "learning_rate": 4.690766700109659e-05, | |
| "loss": 0.0063, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 113.33, | |
| "learning_rate": 4.65185506750986e-05, | |
| "loss": 0.0053, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 120.0, | |
| "learning_rate": 4.610819813755038e-05, | |
| "loss": 0.0049, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 126.67, | |
| "learning_rate": 4.567701435686404e-05, | |
| "loss": 0.0045, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 133.33, | |
| "learning_rate": 4.522542485937369e-05, | |
| "loss": 0.0041, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 140.0, | |
| "learning_rate": 4.4753875309392266e-05, | |
| "loss": 0.0038, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 146.67, | |
| "learning_rate": 4.426283106939474e-05, | |
| "loss": 0.0036, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 153.33, | |
| "learning_rate": 4.375277674076149e-05, | |
| "loss": 0.0035, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 160.0, | |
| "learning_rate": 4.3224215685535294e-05, | |
| "loss": 0.0033, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 166.67, | |
| "learning_rate": 4.267766952966369e-05, | |
| "loss": 0.0032, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 173.33, | |
| "learning_rate": 4.211367764821722e-05, | |
| "loss": 0.0032, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 180.0, | |
| "learning_rate": 4.1532796633091296e-05, | |
| "loss": 0.0031, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 186.67, | |
| "learning_rate": 4.093559974371725e-05, | |
| "loss": 0.003, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 193.33, | |
| "learning_rate": 4.0322676341324415e-05, | |
| "loss": 0.0029, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 200.0, | |
| "learning_rate": 3.969463130731183e-05, | |
| "loss": 0.0029, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 206.67, | |
| "learning_rate": 3.905208444630327e-05, | |
| "loss": 0.0029, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 213.33, | |
| "learning_rate": 3.8395669874474915e-05, | |
| "loss": 0.0028, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 220.0, | |
| "learning_rate": 3.7726035393759285e-05, | |
| "loss": 0.0028, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 226.67, | |
| "learning_rate": 3.704384185254288e-05, | |
| "loss": 0.0027, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 233.33, | |
| "learning_rate": 3.634976249348867e-05, | |
| "loss": 0.0027, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 240.0, | |
| "learning_rate": 3.564448228912682e-05, | |
| "loss": 0.0027, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 246.67, | |
| "learning_rate": 3.4928697265869515e-05, | |
| "loss": 0.0027, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 253.33, | |
| "learning_rate": 3.4203113817116957e-05, | |
| "loss": 0.0028, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 260.0, | |
| "learning_rate": 3.346844800613229e-05, | |
| "loss": 0.0027, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 266.67, | |
| "learning_rate": 3.272542485937369e-05, | |
| "loss": 0.0027, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 273.33, | |
| "learning_rate": 3.1974777650980735e-05, | |
| "loss": 0.0027, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 280.0, | |
| "learning_rate": 3.121724717912138e-05, | |
| "loss": 0.0026, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 286.67, | |
| "learning_rate": 3.045358103491357e-05, | |
| "loss": 0.0026, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 293.33, | |
| "learning_rate": 2.9684532864643122e-05, | |
| "loss": 0.0026, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 300.0, | |
| "learning_rate": 2.8910861626005776e-05, | |
| "loss": 0.0026, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 306.67, | |
| "learning_rate": 2.8133330839107608e-05, | |
| "loss": 0.0026, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 313.33, | |
| "learning_rate": 2.7352707832962865e-05, | |
| "loss": 0.0026, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 320.0, | |
| "learning_rate": 2.656976298823284e-05, | |
| "loss": 0.0026, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 326.67, | |
| "learning_rate": 2.578526897695321e-05, | |
| "loss": 0.0026, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 333.33, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.0026, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 340.0, | |
| "learning_rate": 2.4214731023046793e-05, | |
| "loss": 0.0025, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 346.67, | |
| "learning_rate": 2.3430237011767167e-05, | |
| "loss": 0.0026, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 353.33, | |
| "learning_rate": 2.2647292167037144e-05, | |
| "loss": 0.0025, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 360.0, | |
| "learning_rate": 2.186666916089239e-05, | |
| "loss": 0.0026, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 366.67, | |
| "learning_rate": 2.1089138373994223e-05, | |
| "loss": 0.0025, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 373.33, | |
| "learning_rate": 2.031546713535688e-05, | |
| "loss": 0.0025, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 380.0, | |
| "learning_rate": 1.9546418965086442e-05, | |
| "loss": 0.0025, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 386.67, | |
| "learning_rate": 1.8782752820878634e-05, | |
| "loss": 0.0025, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 393.33, | |
| "learning_rate": 1.802522234901927e-05, | |
| "loss": 0.0025, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 400.0, | |
| "learning_rate": 1.7274575140626318e-05, | |
| "loss": 0.0025, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 406.67, | |
| "learning_rate": 1.6531551993867717e-05, | |
| "loss": 0.0025, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 413.33, | |
| "learning_rate": 1.5796886182883053e-05, | |
| "loss": 0.0025, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 420.0, | |
| "learning_rate": 1.5071302734130489e-05, | |
| "loss": 0.0025, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 426.67, | |
| "learning_rate": 1.4355517710873184e-05, | |
| "loss": 0.0025, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 433.33, | |
| "learning_rate": 1.3650237506511331e-05, | |
| "loss": 0.0025, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 440.0, | |
| "learning_rate": 1.2956158147457115e-05, | |
| "loss": 0.0025, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 446.67, | |
| "learning_rate": 1.2273964606240718e-05, | |
| "loss": 0.0025, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 453.33, | |
| "learning_rate": 1.1604330125525079e-05, | |
| "loss": 0.0025, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 460.0, | |
| "learning_rate": 1.0947915553696742e-05, | |
| "loss": 0.0025, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 466.67, | |
| "learning_rate": 1.0305368692688174e-05, | |
| "loss": 0.0025, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 473.33, | |
| "learning_rate": 9.677323658675594e-06, | |
| "loss": 0.0025, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 480.0, | |
| "learning_rate": 9.064400256282757e-06, | |
| "loss": 0.0025, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 486.67, | |
| "learning_rate": 8.467203366908707e-06, | |
| "loss": 0.0025, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 493.33, | |
| "learning_rate": 7.886322351782783e-06, | |
| "loss": 0.0025, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 500.0, | |
| "learning_rate": 7.3223304703363135e-06, | |
| "loss": 0.0025, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 506.67, | |
| "learning_rate": 6.775784314464717e-06, | |
| "loss": 0.0025, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 513.33, | |
| "learning_rate": 6.247223259238511e-06, | |
| "loss": 0.0025, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 520.0, | |
| "learning_rate": 5.737168930605272e-06, | |
| "loss": 0.0025, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 526.67, | |
| "learning_rate": 5.24612469060774e-06, | |
| "loss": 0.0025, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 533.33, | |
| "learning_rate": 4.7745751406263165e-06, | |
| "loss": 0.0025, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 540.0, | |
| "learning_rate": 4.322985643135952e-06, | |
| "loss": 0.0025, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 546.67, | |
| "learning_rate": 3.891801862449629e-06, | |
| "loss": 0.0025, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 553.33, | |
| "learning_rate": 3.4814493249014116e-06, | |
| "loss": 0.0025, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 560.0, | |
| "learning_rate": 3.092332998903416e-06, | |
| "loss": 0.0025, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 566.67, | |
| "learning_rate": 2.7248368952908053e-06, | |
| "loss": 0.0025, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 573.33, | |
| "learning_rate": 2.379323688349516e-06, | |
| "loss": 0.0025, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 580.0, | |
| "learning_rate": 2.0561343579004715e-06, | |
| "loss": 0.0024, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 586.67, | |
| "learning_rate": 1.7555878527937164e-06, | |
| "loss": 0.0025, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 593.33, | |
| "learning_rate": 1.4779807761443636e-06, | |
| "loss": 0.0025, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 600.0, | |
| "learning_rate": 1.2235870926211619e-06, | |
| "loss": 0.0025, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 606.67, | |
| "learning_rate": 9.926578580764234e-07, | |
| "loss": 0.0024, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 613.33, | |
| "learning_rate": 7.854209717842231e-07, | |
| "loss": 0.0025, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 620.0, | |
| "learning_rate": 6.020809515313142e-07, | |
| "loss": 0.0025, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 626.67, | |
| "learning_rate": 4.4281873178278475e-07, | |
| "loss": 0.0025, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 633.33, | |
| "learning_rate": 3.077914851215585e-07, | |
| "loss": 0.0025, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 640.0, | |
| "learning_rate": 1.9713246713805588e-07, | |
| "loss": 0.0024, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 646.67, | |
| "learning_rate": 1.109508849230001e-07, | |
| "loss": 0.0025, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 653.33, | |
| "learning_rate": 4.9331789293211026e-08, | |
| "loss": 0.0025, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 660.0, | |
| "learning_rate": 1.233599085671e-08, | |
| "loss": 0.0025, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 666.67, | |
| "learning_rate": 0.0, | |
| "loss": 0.0025, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 666.67, | |
| "step": 1000, | |
| "total_flos": 1.3543137746092032e+17, | |
| "train_loss": 0.06853693281114101, | |
| "train_runtime": 2944.4055, | |
| "train_samples_per_second": 3.396, | |
| "train_steps_per_second": 0.34 | |
| } | |
| ], | |
| "max_steps": 1000, | |
| "num_train_epochs": 1000, | |
| "total_flos": 1.3543137746092032e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |