| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.149184149184149, | |
| "eval_steps": 500, | |
| "global_step": 1500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.014344629729245113, | |
| "grad_norm": 2.203942058202339, | |
| "learning_rate": 4.285714285714286e-06, | |
| "loss": 1.1704, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.028689259458490227, | |
| "grad_norm": 1.1497467131489452, | |
| "learning_rate": 9.047619047619047e-06, | |
| "loss": 0.9166, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04303388918773534, | |
| "grad_norm": 0.8889709375752792, | |
| "learning_rate": 1.3809523809523811e-05, | |
| "loss": 0.7948, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05737851891698045, | |
| "grad_norm": 0.9291104053668569, | |
| "learning_rate": 1.8571428571428572e-05, | |
| "loss": 0.7447, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07172314864622557, | |
| "grad_norm": 1.1881747732049386, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 0.729, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08606777837547068, | |
| "grad_norm": 0.9032703792102055, | |
| "learning_rate": 2.8095238095238096e-05, | |
| "loss": 0.7177, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1004124081047158, | |
| "grad_norm": 0.9447848465031972, | |
| "learning_rate": 3.285714285714286e-05, | |
| "loss": 0.7048, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1147570378339609, | |
| "grad_norm": 0.9807912049624671, | |
| "learning_rate": 3.761904761904762e-05, | |
| "loss": 0.7048, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.129101667563206, | |
| "grad_norm": 0.8678523409043055, | |
| "learning_rate": 4.2380952380952385e-05, | |
| "loss": 0.7065, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14344629729245115, | |
| "grad_norm": 1.095952408037045, | |
| "learning_rate": 4.714285714285714e-05, | |
| "loss": 0.7098, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15779092702169625, | |
| "grad_norm": 0.932556439833824, | |
| "learning_rate": 5.1904761904761913e-05, | |
| "loss": 0.7047, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17213555675094136, | |
| "grad_norm": 0.716991228951169, | |
| "learning_rate": 5.666666666666667e-05, | |
| "loss": 0.704, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1864801864801865, | |
| "grad_norm": 0.7487957467977157, | |
| "learning_rate": 6.142857142857143e-05, | |
| "loss": 0.7013, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.2008248162094316, | |
| "grad_norm": 0.7206473376735171, | |
| "learning_rate": 6.619047619047619e-05, | |
| "loss": 0.7038, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2151694459386767, | |
| "grad_norm": 0.6694694288559293, | |
| "learning_rate": 7.095238095238096e-05, | |
| "loss": 0.7119, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2295140756679218, | |
| "grad_norm": 0.7219727300855844, | |
| "learning_rate": 7.571428571428571e-05, | |
| "loss": 0.7068, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.24385870539716695, | |
| "grad_norm": 0.6818226409038356, | |
| "learning_rate": 8.047619047619048e-05, | |
| "loss": 0.7088, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.258203335126412, | |
| "grad_norm": 0.6977379507391648, | |
| "learning_rate": 8.523809523809524e-05, | |
| "loss": 0.7032, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2725479648556572, | |
| "grad_norm": 0.6553925195793743, | |
| "learning_rate": 9e-05, | |
| "loss": 0.7081, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2868925945849023, | |
| "grad_norm": 0.5491527719782942, | |
| "learning_rate": 9.476190476190476e-05, | |
| "loss": 0.7079, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3012372243141474, | |
| "grad_norm": 0.6640413812930087, | |
| "learning_rate": 9.952380952380953e-05, | |
| "loss": 0.7153, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3155818540433925, | |
| "grad_norm": 0.5699857338148432, | |
| "learning_rate": 9.999436939807164e-05, | |
| "loss": 0.7235, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3299264837726376, | |
| "grad_norm": 0.6222679010251994, | |
| "learning_rate": 9.99749072170404e-05, | |
| "loss": 0.7215, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3442711135018827, | |
| "grad_norm": 0.529203333569492, | |
| "learning_rate": 9.994154935353517e-05, | |
| "loss": 0.7074, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3586157432311278, | |
| "grad_norm": 0.524281983121501, | |
| "learning_rate": 9.98943050828164e-05, | |
| "loss": 0.7086, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.372960372960373, | |
| "grad_norm": 0.5037155864766709, | |
| "learning_rate": 9.983318754130435e-05, | |
| "loss": 0.7004, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3873050026896181, | |
| "grad_norm": 0.5619940203814628, | |
| "learning_rate": 9.975821372292653e-05, | |
| "loss": 0.6961, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4016496324188632, | |
| "grad_norm": 0.46139196683094363, | |
| "learning_rate": 9.966940447439245e-05, | |
| "loss": 0.6914, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4159942621481083, | |
| "grad_norm": 0.5701814263114097, | |
| "learning_rate": 9.956678448939718e-05, | |
| "loss": 0.6805, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.4303388918773534, | |
| "grad_norm": 0.5822888434573161, | |
| "learning_rate": 9.945038230175509e-05, | |
| "loss": 0.6918, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4446835216065985, | |
| "grad_norm": 0.44360325798902245, | |
| "learning_rate": 9.932023027746602e-05, | |
| "loss": 0.6798, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4590281513358436, | |
| "grad_norm": 0.4584389136933647, | |
| "learning_rate": 9.917636460571578e-05, | |
| "loss": 0.6848, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.47337278106508873, | |
| "grad_norm": 0.42186486444358706, | |
| "learning_rate": 9.901882528881363e-05, | |
| "loss": 0.6856, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4877174107943339, | |
| "grad_norm": 0.4547332297314441, | |
| "learning_rate": 9.884765613106948e-05, | |
| "loss": 0.6761, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.502062040523579, | |
| "grad_norm": 0.5669430722485119, | |
| "learning_rate": 9.866290472661406e-05, | |
| "loss": 0.6795, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.516406670252824, | |
| "grad_norm": 0.4480667658595798, | |
| "learning_rate": 9.846462244616508e-05, | |
| "loss": 0.6701, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5307512999820692, | |
| "grad_norm": 0.4330593851481768, | |
| "learning_rate": 9.825286442274357e-05, | |
| "loss": 0.6785, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5450959297113144, | |
| "grad_norm": 0.43973236782853137, | |
| "learning_rate": 9.802768953634388e-05, | |
| "loss": 0.6692, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5594405594405595, | |
| "grad_norm": 0.4800141760306811, | |
| "learning_rate": 9.778916039756193e-05, | |
| "loss": 0.6644, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5737851891698046, | |
| "grad_norm": 0.4370272358079981, | |
| "learning_rate": 9.753734333018616e-05, | |
| "loss": 0.6582, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5881298188990497, | |
| "grad_norm": 0.46636751943765076, | |
| "learning_rate": 9.727230835275598e-05, | |
| "loss": 0.6606, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6024744486282948, | |
| "grad_norm": 0.44398226842129596, | |
| "learning_rate": 9.699412915909284e-05, | |
| "loss": 0.6591, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6168190783575399, | |
| "grad_norm": 0.417529788133883, | |
| "learning_rate": 9.670288309780953e-05, | |
| "loss": 0.6502, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.631163708086785, | |
| "grad_norm": 0.425244481854258, | |
| "learning_rate": 9.639865115080304e-05, | |
| "loss": 0.6601, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6455083378160301, | |
| "grad_norm": 0.4266753216646151, | |
| "learning_rate": 9.608151791073737e-05, | |
| "loss": 0.6552, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6598529675452752, | |
| "grad_norm": 0.4060939108779214, | |
| "learning_rate": 9.575157155752222e-05, | |
| "loss": 0.6483, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6741975972745203, | |
| "grad_norm": 0.44733731016902734, | |
| "learning_rate": 9.54089038337943e-05, | |
| "loss": 0.6414, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6885422270037654, | |
| "grad_norm": 0.4343263599516913, | |
| "learning_rate": 9.5053610019408e-05, | |
| "loss": 0.6452, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7028868567330105, | |
| "grad_norm": 0.40484892671799194, | |
| "learning_rate": 9.468578890494256e-05, | |
| "loss": 0.6399, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7172314864622557, | |
| "grad_norm": 0.3977086895873736, | |
| "learning_rate": 9.430554276423292e-05, | |
| "loss": 0.6427, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7315761161915008, | |
| "grad_norm": 0.4059393728648637, | |
| "learning_rate": 9.391297732593229e-05, | |
| "loss": 0.6405, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.745920745920746, | |
| "grad_norm": 0.3918015124178049, | |
| "learning_rate": 9.350820174411386e-05, | |
| "loss": 0.6428, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7602653756499911, | |
| "grad_norm": 0.397284227604159, | |
| "learning_rate": 9.309132856792023e-05, | |
| "loss": 0.6379, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7746100053792362, | |
| "grad_norm": 0.423979379524061, | |
| "learning_rate": 9.266247371026873e-05, | |
| "loss": 0.6346, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7889546351084813, | |
| "grad_norm": 0.3974744664267281, | |
| "learning_rate": 9.222175641562143e-05, | |
| "loss": 0.6389, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8032992648377264, | |
| "grad_norm": 0.40660189053377105, | |
| "learning_rate": 9.176929922682891e-05, | |
| "loss": 0.6328, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8176438945669715, | |
| "grad_norm": 0.39183846193855565, | |
| "learning_rate": 9.130522795105676e-05, | |
| "loss": 0.6332, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8319885242962166, | |
| "grad_norm": 0.3794359883589157, | |
| "learning_rate": 9.082967162480459e-05, | |
| "loss": 0.6248, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8463331540254617, | |
| "grad_norm": 0.3757192118653646, | |
| "learning_rate": 9.034276247802688e-05, | |
| "loss": 0.6204, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.8606777837547068, | |
| "grad_norm": 0.4344414187841982, | |
| "learning_rate": 8.984463589736614e-05, | |
| "loss": 0.6253, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8750224134839519, | |
| "grad_norm": 0.4124738293723295, | |
| "learning_rate": 8.933543038850816e-05, | |
| "loss": 0.6254, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.889367043213197, | |
| "grad_norm": 0.3729179255559684, | |
| "learning_rate": 8.881528753767007e-05, | |
| "loss": 0.6225, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9037116729424421, | |
| "grad_norm": 0.3999425136973519, | |
| "learning_rate": 8.82843519722319e-05, | |
| "loss": 0.6219, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9180563026716873, | |
| "grad_norm": 0.3613041053584094, | |
| "learning_rate": 8.774277132052237e-05, | |
| "loss": 0.6151, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9324009324009324, | |
| "grad_norm": 0.3606937460298364, | |
| "learning_rate": 8.719069617077046e-05, | |
| "loss": 0.6216, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.9467455621301775, | |
| "grad_norm": 0.42662552468134024, | |
| "learning_rate": 8.662828002923378e-05, | |
| "loss": 0.6179, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.9610901918594227, | |
| "grad_norm": 0.3630873900729852, | |
| "learning_rate": 8.605567927751576e-05, | |
| "loss": 0.6171, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9754348215886678, | |
| "grad_norm": 0.3905293019503008, | |
| "learning_rate": 8.547305312908318e-05, | |
| "loss": 0.6085, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9897794513179129, | |
| "grad_norm": 0.36834813483218043, | |
| "learning_rate": 8.48805635849964e-05, | |
| "loss": 0.6074, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.002868925945849, | |
| "grad_norm": 0.39960980145566394, | |
| "learning_rate": 8.427837538886437e-05, | |
| "loss": 0.5784, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.017213555675094, | |
| "grad_norm": 0.38333026599469155, | |
| "learning_rate": 8.366665598103727e-05, | |
| "loss": 0.4367, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.0315581854043392, | |
| "grad_norm": 0.3516312791289111, | |
| "learning_rate": 8.304557545204908e-05, | |
| "loss": 0.4364, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.0459028151335843, | |
| "grad_norm": 0.33320216950958276, | |
| "learning_rate": 8.241530649532339e-05, | |
| "loss": 0.4252, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.0602474448628294, | |
| "grad_norm": 0.3367270532117864, | |
| "learning_rate": 8.177602435915546e-05, | |
| "loss": 0.4245, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.0745920745920745, | |
| "grad_norm": 0.36314384787697035, | |
| "learning_rate": 8.11279067979839e-05, | |
| "loss": 0.4172, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.0889367043213196, | |
| "grad_norm": 0.3331116126500675, | |
| "learning_rate": 8.04711340229654e-05, | |
| "loss": 0.4117, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.1032813340505647, | |
| "grad_norm": 0.3316800823164157, | |
| "learning_rate": 7.980588865186649e-05, | |
| "loss": 0.4217, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.11762596377981, | |
| "grad_norm": 0.37471291784176136, | |
| "learning_rate": 7.913235565828613e-05, | |
| "loss": 0.4178, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.1319705935090552, | |
| "grad_norm": 0.3734129628024853, | |
| "learning_rate": 7.845072232022311e-05, | |
| "loss": 0.4173, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.1463152232383003, | |
| "grad_norm": 0.33461223045242555, | |
| "learning_rate": 7.776117816800288e-05, | |
| "loss": 0.4258, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.1606598529675454, | |
| "grad_norm": 0.3149294832232542, | |
| "learning_rate": 7.706391493157805e-05, | |
| "loss": 0.4194, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.1750044826967905, | |
| "grad_norm": 0.33310067113179365, | |
| "learning_rate": 7.635912648721718e-05, | |
| "loss": 0.4159, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.1893491124260356, | |
| "grad_norm": 0.34107614209430825, | |
| "learning_rate": 7.564700880359696e-05, | |
| "loss": 0.4193, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.2036937421552807, | |
| "grad_norm": 0.3033843674878663, | |
| "learning_rate": 7.492775988731243e-05, | |
| "loss": 0.4096, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.2180383718845258, | |
| "grad_norm": 0.36776234094697624, | |
| "learning_rate": 7.420157972782063e-05, | |
| "loss": 0.4179, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.232383001613771, | |
| "grad_norm": 0.32016874059260647, | |
| "learning_rate": 7.346867024183291e-05, | |
| "loss": 0.4178, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.246727631343016, | |
| "grad_norm": 0.3461545416340315, | |
| "learning_rate": 7.272923521717133e-05, | |
| "loss": 0.4178, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.2610722610722611, | |
| "grad_norm": 0.3239854507991741, | |
| "learning_rate": 7.198348025610481e-05, | |
| "loss": 0.4173, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.2754168908015062, | |
| "grad_norm": 0.35967022774745094, | |
| "learning_rate": 7.12316127181808e-05, | |
| "loss": 0.4128, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.2897615205307513, | |
| "grad_norm": 0.33647912683040776, | |
| "learning_rate": 7.047384166256815e-05, | |
| "loss": 0.4206, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.3041061502599964, | |
| "grad_norm": 0.34026212090737123, | |
| "learning_rate": 6.971037778992775e-05, | |
| "loss": 0.4196, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.3184507799892415, | |
| "grad_norm": 0.31786823344694176, | |
| "learning_rate": 6.894143338382639e-05, | |
| "loss": 0.4217, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.3327954097184866, | |
| "grad_norm": 0.3189700837737566, | |
| "learning_rate": 6.81672222517107e-05, | |
| "loss": 0.4225, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.3471400394477318, | |
| "grad_norm": 0.3275473534649835, | |
| "learning_rate": 6.73879596654573e-05, | |
| "loss": 0.4119, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.3614846691769769, | |
| "grad_norm": 0.3420058843642656, | |
| "learning_rate": 6.660386230151571e-05, | |
| "loss": 0.4153, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.375829298906222, | |
| "grad_norm": 0.36451277332483983, | |
| "learning_rate": 6.581514818066088e-05, | |
| "loss": 0.4176, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.390173928635467, | |
| "grad_norm": 0.3392630329016263, | |
| "learning_rate": 6.502203660737169e-05, | |
| "loss": 0.4146, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.4045185583647122, | |
| "grad_norm": 0.32756084558577897, | |
| "learning_rate": 6.422474810885278e-05, | |
| "loss": 0.4093, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.4188631880939573, | |
| "grad_norm": 0.31495606064539233, | |
| "learning_rate": 6.342350437371614e-05, | |
| "loss": 0.4127, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.4332078178232024, | |
| "grad_norm": 0.3106906576340927, | |
| "learning_rate": 6.26185281903399e-05, | |
| "loss": 0.4109, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.4475524475524475, | |
| "grad_norm": 0.34231901054792907, | |
| "learning_rate": 6.181004338492141e-05, | |
| "loss": 0.4115, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.4618970772816926, | |
| "grad_norm": 0.2998753745826404, | |
| "learning_rate": 6.09982747592415e-05, | |
| "loss": 0.4063, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.4762417070109377, | |
| "grad_norm": 0.3030265142945988, | |
| "learning_rate": 6.018344802815778e-05, | |
| "loss": 0.4096, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.4905863367401828, | |
| "grad_norm": 0.3209761231799724, | |
| "learning_rate": 5.936578975684378e-05, | |
| "loss": 0.4115, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.504930966469428, | |
| "grad_norm": 0.3117296901575659, | |
| "learning_rate": 5.854552729779184e-05, | |
| "loss": 0.4083, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.519275596198673, | |
| "grad_norm": 0.32123211938760937, | |
| "learning_rate": 5.772288872759702e-05, | |
| "loss": 0.4068, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.5336202259279181, | |
| "grad_norm": 0.32646003873154356, | |
| "learning_rate": 5.6898102783539665e-05, | |
| "loss": 0.4037, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.5479648556571632, | |
| "grad_norm": 0.31930171711882954, | |
| "learning_rate": 5.607139879998427e-05, | |
| "loss": 0.4052, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.5623094853864083, | |
| "grad_norm": 0.2987369966742978, | |
| "learning_rate": 5.524300664461235e-05, | |
| "loss": 0.4041, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.5766541151156535, | |
| "grad_norm": 0.29715944144054796, | |
| "learning_rate": 5.441315665450697e-05, | |
| "loss": 0.408, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.5909987448448986, | |
| "grad_norm": 0.300825472928675, | |
| "learning_rate": 5.3582079572106794e-05, | |
| "loss": 0.3996, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.6053433745741437, | |
| "grad_norm": 0.321003899975276, | |
| "learning_rate": 5.275000648104743e-05, | |
| "loss": 0.4012, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.6196880043033888, | |
| "grad_norm": 0.3068035924351862, | |
| "learning_rate": 5.191716874190785e-05, | |
| "loss": 0.4056, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.6340326340326339, | |
| "grad_norm": 0.3172811217753757, | |
| "learning_rate": 5.1083797927879896e-05, | |
| "loss": 0.3995, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.648377263761879, | |
| "grad_norm": 0.3135234713854417, | |
| "learning_rate": 5.025012576037855e-05, | |
| "loss": 0.4058, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.6627218934911243, | |
| "grad_norm": 0.2927786895264955, | |
| "learning_rate": 4.9416384044611124e-05, | |
| "loss": 0.4015, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.6770665232203694, | |
| "grad_norm": 0.29358825034715325, | |
| "learning_rate": 4.858280460512302e-05, | |
| "loss": 0.4013, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.6914111529496145, | |
| "grad_norm": 0.3100930655471993, | |
| "learning_rate": 4.7749619221338227e-05, | |
| "loss": 0.4009, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.7057557826788596, | |
| "grad_norm": 0.307169278710133, | |
| "learning_rate": 4.691705956311225e-05, | |
| "loss": 0.3992, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.7201004124081047, | |
| "grad_norm": 0.29726825931152523, | |
| "learning_rate": 4.608535712631566e-05, | |
| "loss": 0.3941, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.7344450421373498, | |
| "grad_norm": 0.2930544538896363, | |
| "learning_rate": 4.525474316846581e-05, | |
| "loss": 0.3916, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.748789671866595, | |
| "grad_norm": 0.3040077165902238, | |
| "learning_rate": 4.4425448644425066e-05, | |
| "loss": 0.3923, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.76313430159584, | |
| "grad_norm": 0.29459907021508275, | |
| "learning_rate": 4.359770414218296e-05, | |
| "loss": 0.3875, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.7774789313250852, | |
| "grad_norm": 0.30065987027605934, | |
| "learning_rate": 4.2771739818740565e-05, | |
| "loss": 0.3961, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.7918235610543303, | |
| "grad_norm": 0.2920496264668077, | |
| "learning_rate": 4.194778533611451e-05, | |
| "loss": 0.3852, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.8061681907835754, | |
| "grad_norm": 0.2888363234117279, | |
| "learning_rate": 4.112606979747881e-05, | |
| "loss": 0.3937, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.8205128205128205, | |
| "grad_norm": 0.30497579196679886, | |
| "learning_rate": 4.030682168346192e-05, | |
| "loss": 0.3817, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.8348574502420656, | |
| "grad_norm": 0.28969651963041293, | |
| "learning_rate": 3.949026878861704e-05, | |
| "loss": 0.383, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.8492020799713107, | |
| "grad_norm": 0.28259658296458734, | |
| "learning_rate": 3.867663815808303e-05, | |
| "loss": 0.3832, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.8635467097005558, | |
| "grad_norm": 0.30667496660431287, | |
| "learning_rate": 3.78661560244539e-05, | |
| "loss": 0.3833, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.8778913394298011, | |
| "grad_norm": 0.3058644823146065, | |
| "learning_rate": 3.705904774487396e-05, | |
| "loss": 0.3794, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.8922359691590462, | |
| "grad_norm": 0.2990291886718597, | |
| "learning_rate": 3.6255537738376706e-05, | |
| "loss": 0.3874, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.9065805988882913, | |
| "grad_norm": 0.2863080421019278, | |
| "learning_rate": 3.545584942348426e-05, | |
| "loss": 0.3778, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.9209252286175365, | |
| "grad_norm": 0.2900728201465024, | |
| "learning_rate": 3.466020515608525e-05, | |
| "loss": 0.3755, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.9352698583467816, | |
| "grad_norm": 0.28225638086220933, | |
| "learning_rate": 3.386882616760794e-05, | |
| "loss": 0.3794, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.9496144880760267, | |
| "grad_norm": 0.29192196316916985, | |
| "learning_rate": 3.30819325035062e-05, | |
| "loss": 0.378, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.9639591178052718, | |
| "grad_norm": 0.28745199551725825, | |
| "learning_rate": 3.229974296207513e-05, | |
| "loss": 0.3812, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.9783037475345169, | |
| "grad_norm": 0.29973956860014883, | |
| "learning_rate": 3.152247503361353e-05, | |
| "loss": 0.3724, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.992648377263762, | |
| "grad_norm": 0.31079745266426567, | |
| "learning_rate": 3.075034483994997e-05, | |
| "loss": 0.3718, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.005737851891698, | |
| "grad_norm": 0.33118419222329254, | |
| "learning_rate": 2.998356707434947e-05, | |
| "loss": 0.2803, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.020082481620943, | |
| "grad_norm": 0.2667436179131777, | |
| "learning_rate": 2.9222354941817375e-05, | |
| "loss": 0.1535, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.034427111350188, | |
| "grad_norm": 0.2667296842605953, | |
| "learning_rate": 2.846692009981693e-05, | |
| "loss": 0.1438, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.0487717410794333, | |
| "grad_norm": 0.2687925364002219, | |
| "learning_rate": 2.771747259941734e-05, | |
| "loss": 0.1383, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.0631163708086784, | |
| "grad_norm": 0.258692098758459, | |
| "learning_rate": 2.6974220826888374e-05, | |
| "loss": 0.1348, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.0774610005379235, | |
| "grad_norm": 0.27396537684578004, | |
| "learning_rate": 2.623737144575787e-05, | |
| "loss": 0.1371, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.0918056302671686, | |
| "grad_norm": 0.26487337871277594, | |
| "learning_rate": 2.5507129339348335e-05, | |
| "loss": 0.132, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.1061502599964137, | |
| "grad_norm": 0.26513450922087345, | |
| "learning_rate": 2.478369755380839e-05, | |
| "loss": 0.1335, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.120494889725659, | |
| "grad_norm": 0.25972172147117933, | |
| "learning_rate": 2.406727724165524e-05, | |
| "loss": 0.134, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.134839519454904, | |
| "grad_norm": 0.2524599507939378, | |
| "learning_rate": 2.3358067605843537e-05, | |
| "loss": 0.1307, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.149184149184149, | |
| "grad_norm": 0.8197351837947908, | |
| "learning_rate": 2.2656265844376367e-05, | |
| "loss": 0.1332, | |
| "step": 1500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2094, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1017570026586112.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |