| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 24.93765586034913, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02493765586034913, | |
| "grad_norm": 11.571956634521484, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.0028, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04987531172069826, | |
| "grad_norm": 3.5949718952178955, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.8153, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07481296758104738, | |
| "grad_norm": 3.9494338035583496, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.4251, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09975062344139651, | |
| "grad_norm": 1.789270281791687, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2728, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12468827930174564, | |
| "grad_norm": 1.306166410446167, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2383, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.14962593516209477, | |
| "grad_norm": 1.317975640296936, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1964, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1745635910224439, | |
| "grad_norm": 1.1910805702209473, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1565, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.19950124688279303, | |
| "grad_norm": 1.379970669746399, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1611, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.22443890274314215, | |
| "grad_norm": 1.7543959617614746, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1558, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.24937655860349128, | |
| "grad_norm": 0.9556743502616882, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1387, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2743142144638404, | |
| "grad_norm": 1.1828086376190186, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1273, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.29925187032418954, | |
| "grad_norm": 0.7264103293418884, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1235, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.32418952618453867, | |
| "grad_norm": 1.5272457599639893, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1215, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3491271820448878, | |
| "grad_norm": 0.867839515209198, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.1102, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3740648379052369, | |
| "grad_norm": 1.2812258005142212, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.1125, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.39900249376558605, | |
| "grad_norm": 0.7936857342720032, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.1135, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4239401496259352, | |
| "grad_norm": 1.7893768548965454, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.1064, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4488778054862843, | |
| "grad_norm": 0.9866449236869812, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0999, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.47381546134663344, | |
| "grad_norm": 1.174594521522522, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0999, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.49875311720698257, | |
| "grad_norm": 1.0781136751174927, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.1044, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5236907730673317, | |
| "grad_norm": 0.9381222724914551, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.105, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5486284289276808, | |
| "grad_norm": 0.808373212814331, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0988, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.57356608478803, | |
| "grad_norm": 1.0837031602859497, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0923, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5985037406483791, | |
| "grad_norm": 0.8404956459999084, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0893, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.6234413965087282, | |
| "grad_norm": 1.0768768787384033, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0829, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.6483790523690773, | |
| "grad_norm": 1.100760817527771, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0805, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6733167082294265, | |
| "grad_norm": 0.594467043876648, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0892, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6982543640897756, | |
| "grad_norm": 0.678115725517273, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0954, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.7231920199501247, | |
| "grad_norm": 0.8147896528244019, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0916, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.7481296758104738, | |
| "grad_norm": 0.8104990124702454, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0834, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.773067331670823, | |
| "grad_norm": 1.190301775932312, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0839, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7980049875311721, | |
| "grad_norm": 1.3575026988983154, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0808, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.8229426433915212, | |
| "grad_norm": 0.9009537696838379, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0851, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.8478802992518704, | |
| "grad_norm": 0.5478576421737671, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0763, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.8728179551122195, | |
| "grad_norm": 0.9551648497581482, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0725, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8977556109725686, | |
| "grad_norm": 0.639614462852478, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0763, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.9226932668329177, | |
| "grad_norm": 0.7857018709182739, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0782, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9476309226932669, | |
| "grad_norm": 0.802908182144165, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0707, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.972568578553616, | |
| "grad_norm": 0.8614866137504578, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0727, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.9975062344139651, | |
| "grad_norm": 0.7557615637779236, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0701, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.0224438902743143, | |
| "grad_norm": 0.8197771310806274, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0721, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.0473815461346634, | |
| "grad_norm": 0.8987151384353638, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0677, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.0723192019950125, | |
| "grad_norm": 0.7855445742607117, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0763, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.0972568578553616, | |
| "grad_norm": 0.6521250605583191, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0699, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.1221945137157108, | |
| "grad_norm": 0.9570059776306152, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.076, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.14713216957606, | |
| "grad_norm": 0.7695994973182678, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0761, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.172069825436409, | |
| "grad_norm": 0.5223149657249451, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0775, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.1970074812967582, | |
| "grad_norm": 1.0341119766235352, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0735, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.2219451371571073, | |
| "grad_norm": 0.8444030284881592, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0702, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.2468827930174564, | |
| "grad_norm": 0.7774174213409424, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0674, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.2718204488778055, | |
| "grad_norm": 0.7007724046707153, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0728, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.2967581047381547, | |
| "grad_norm": 0.6850026249885559, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0614, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.3216957605985038, | |
| "grad_norm": 0.6630402207374573, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0654, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.346633416458853, | |
| "grad_norm": 0.8697960376739502, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0657, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.371571072319202, | |
| "grad_norm": 0.8383796811103821, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0612, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.3965087281795512, | |
| "grad_norm": 0.896980881690979, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0587, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.4214463840399003, | |
| "grad_norm": 0.5838524103164673, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0551, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.4463840399002494, | |
| "grad_norm": 0.9347367286682129, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0583, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.4713216957605986, | |
| "grad_norm": 0.8938815593719482, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0576, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.4962593516209477, | |
| "grad_norm": 0.7539616823196411, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0586, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.5211970074812968, | |
| "grad_norm": 0.6478615403175354, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0636, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.546134663341646, | |
| "grad_norm": 0.5171212553977966, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.056, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.571072319201995, | |
| "grad_norm": 0.4528360068798065, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.055, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.5960099750623442, | |
| "grad_norm": 0.48255279660224915, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.056, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.6209476309226933, | |
| "grad_norm": 0.327452689409256, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.056, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.6458852867830425, | |
| "grad_norm": 0.5360577702522278, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0559, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.6708229426433916, | |
| "grad_norm": 0.529985785484314, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.062, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.6957605985037407, | |
| "grad_norm": 0.9172735214233398, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0572, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.7206982543640899, | |
| "grad_norm": 0.621444821357727, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0607, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.745635910224439, | |
| "grad_norm": 0.9494289755821228, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0624, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.770573566084788, | |
| "grad_norm": 0.6422003507614136, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.065, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.7955112219451372, | |
| "grad_norm": 0.7670819163322449, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0539, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.8204488778054864, | |
| "grad_norm": 0.4186156392097473, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0571, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.8453865336658355, | |
| "grad_norm": 0.35236841440200806, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.046, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.8703241895261846, | |
| "grad_norm": 0.4584829807281494, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0596, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.8952618453865338, | |
| "grad_norm": 0.763542115688324, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0493, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.9201995012468829, | |
| "grad_norm": 0.6167883276939392, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0475, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.945137157107232, | |
| "grad_norm": 0.6490591764450073, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0557, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.9700748129675811, | |
| "grad_norm": 0.47041717171669006, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0534, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.9950124688279303, | |
| "grad_norm": 0.6531452536582947, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0529, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.0199501246882794, | |
| "grad_norm": 0.5055134892463684, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0558, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.0448877805486285, | |
| "grad_norm": 0.457332581281662, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0536, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.0698254364089776, | |
| "grad_norm": 0.7614457011222839, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0555, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.0947630922693268, | |
| "grad_norm": 0.5162164568901062, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0459, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.119700748129676, | |
| "grad_norm": 0.5172446370124817, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0574, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.144638403990025, | |
| "grad_norm": 0.3654800355434418, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0473, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.169576059850374, | |
| "grad_norm": 0.566642165184021, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0518, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.1945137157107233, | |
| "grad_norm": 0.26897284388542175, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0422, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.2194513715710724, | |
| "grad_norm": 0.4064438045024872, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0454, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.2443890274314215, | |
| "grad_norm": 0.7141358256340027, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0555, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.2693266832917707, | |
| "grad_norm": 0.5871822834014893, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0447, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.29426433915212, | |
| "grad_norm": 0.4652409553527832, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0456, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.319201995012469, | |
| "grad_norm": 0.5235645771026611, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0445, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.344139650872818, | |
| "grad_norm": 0.49978169798851013, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0476, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.369077306733167, | |
| "grad_norm": 0.3671029508113861, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0522, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.3940149625935163, | |
| "grad_norm": 0.30070266127586365, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0458, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.4189526184538654, | |
| "grad_norm": 0.6850016713142395, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0452, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.4438902743142146, | |
| "grad_norm": 0.6264611482620239, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0447, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.4688279301745637, | |
| "grad_norm": 0.4290107488632202, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0471, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.493765586034913, | |
| "grad_norm": 0.4855428636074066, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0475, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.518703241895262, | |
| "grad_norm": 0.4431631565093994, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0427, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.543640897755611, | |
| "grad_norm": 0.48978525400161743, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0429, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.56857855361596, | |
| "grad_norm": 0.43295225501060486, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0435, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.5935162094763093, | |
| "grad_norm": 0.4908277690410614, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0453, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.6184538653366585, | |
| "grad_norm": 0.6801359057426453, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0448, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.6433915211970076, | |
| "grad_norm": 0.6601159572601318, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.05, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.6683291770573567, | |
| "grad_norm": 0.36734864115715027, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0491, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.693266832917706, | |
| "grad_norm": 0.6162293553352356, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0507, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.718204488778055, | |
| "grad_norm": 0.7145102024078369, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0364, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.743142144638404, | |
| "grad_norm": 0.5999926328659058, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0424, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.7680798004987532, | |
| "grad_norm": 0.5250579118728638, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.043, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.7930174563591024, | |
| "grad_norm": 0.5992938280105591, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0438, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.8179551122194515, | |
| "grad_norm": 0.5858743786811829, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0484, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.8428927680798006, | |
| "grad_norm": 0.4575258493423462, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0459, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.8678304239401498, | |
| "grad_norm": 0.3843657970428467, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0487, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.892768079800499, | |
| "grad_norm": 0.3950599133968353, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0391, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.917705735660848, | |
| "grad_norm": 0.5489752888679504, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0469, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.942643391521197, | |
| "grad_norm": 0.4432223439216614, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0418, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.9675810473815463, | |
| "grad_norm": 0.4217780530452728, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0409, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.9925187032418954, | |
| "grad_norm": 0.5071325302124023, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0464, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.0174563591022445, | |
| "grad_norm": 0.7415447235107422, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0427, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.0423940149625937, | |
| "grad_norm": 0.6217844486236572, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0443, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.067331670822943, | |
| "grad_norm": 0.29709821939468384, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0482, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.092269326683292, | |
| "grad_norm": 0.7090349197387695, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.042, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.117206982543641, | |
| "grad_norm": 0.6397743225097656, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0475, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.14214463840399, | |
| "grad_norm": 0.3680868148803711, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.048, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.1670822942643393, | |
| "grad_norm": 0.4650847613811493, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0393, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.1920199501246884, | |
| "grad_norm": 0.2799277603626251, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0403, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.2169576059850375, | |
| "grad_norm": 0.32110607624053955, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0465, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.2418952618453867, | |
| "grad_norm": 0.44318118691444397, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0445, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.266832917705736, | |
| "grad_norm": 0.3606151342391968, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0426, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.291770573566085, | |
| "grad_norm": 0.4466325342655182, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0418, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.316708229426434, | |
| "grad_norm": 0.5970152020454407, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0422, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.341645885286783, | |
| "grad_norm": 0.5823197960853577, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0449, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.3665835411471323, | |
| "grad_norm": 0.43264061212539673, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0433, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.3915211970074814, | |
| "grad_norm": 0.5282667279243469, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0406, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 3.4164588528678306, | |
| "grad_norm": 0.49149250984191895, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0341, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.4413965087281797, | |
| "grad_norm": 0.6064574718475342, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0402, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.466334164588529, | |
| "grad_norm": 0.6650893688201904, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0366, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.491271820448878, | |
| "grad_norm": 0.5070934891700745, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.041, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.516209476309227, | |
| "grad_norm": 0.5912305116653442, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0355, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.541147132169576, | |
| "grad_norm": 0.3551137447357178, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0385, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.5660847880299253, | |
| "grad_norm": 0.40974414348602295, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0399, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.5910224438902745, | |
| "grad_norm": 0.48432958126068115, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0419, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.6159600997506236, | |
| "grad_norm": 0.6076517701148987, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0407, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.6408977556109727, | |
| "grad_norm": 0.38681232929229736, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0403, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.665835411471322, | |
| "grad_norm": 0.22687983512878418, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0378, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.690773067331671, | |
| "grad_norm": 0.40982744097709656, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.044, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.71571072319202, | |
| "grad_norm": 0.42715805768966675, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0356, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.7406483790523692, | |
| "grad_norm": 0.3974098861217499, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0417, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.765586034912718, | |
| "grad_norm": 0.45194122195243835, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0476, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.7905236907730675, | |
| "grad_norm": 0.49544402956962585, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0364, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.815461346633416, | |
| "grad_norm": 0.5881789326667786, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0432, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.8403990024937658, | |
| "grad_norm": 0.40502801537513733, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0455, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.8653366583541144, | |
| "grad_norm": 0.5270668268203735, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0468, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.890274314214464, | |
| "grad_norm": 0.34116536378860474, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0376, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.9152119700748127, | |
| "grad_norm": 0.4720582067966461, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0396, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.9401496259351623, | |
| "grad_norm": 0.4505358636379242, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0381, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.965087281795511, | |
| "grad_norm": 0.3258936405181885, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0427, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.9900249376558605, | |
| "grad_norm": 0.5124104619026184, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0404, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.014962593516209, | |
| "grad_norm": 0.4145730435848236, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0369, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 4.039900249376559, | |
| "grad_norm": 0.31593286991119385, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0364, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 4.0648379052369075, | |
| "grad_norm": 0.39038410782814026, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0372, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 4.089775561097257, | |
| "grad_norm": 0.32141461968421936, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0368, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 4.114713216957606, | |
| "grad_norm": 0.3261277377605438, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0406, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 4.139650872817955, | |
| "grad_norm": 0.4306156039237976, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0352, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 4.164588528678304, | |
| "grad_norm": 0.39495402574539185, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.038, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 4.1895261845386536, | |
| "grad_norm": 0.5219168663024902, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0398, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 4.214463840399002, | |
| "grad_norm": 0.31842708587646484, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.038, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 4.239401496259352, | |
| "grad_norm": 0.3704530894756317, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0367, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 4.2643391521197005, | |
| "grad_norm": 0.2946793735027313, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0325, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 4.28927680798005, | |
| "grad_norm": 0.3459037244319916, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0323, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 4.314214463840399, | |
| "grad_norm": 0.5248053669929504, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0388, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 4.339152119700748, | |
| "grad_norm": 0.3273724913597107, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0353, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 4.364089775561097, | |
| "grad_norm": 0.4606214165687561, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0383, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 4.389027431421447, | |
| "grad_norm": 0.4063427448272705, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0383, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 4.413965087281795, | |
| "grad_norm": 0.4495067894458771, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0352, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 4.438902743142145, | |
| "grad_norm": 0.3874441385269165, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0375, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 4.4638403990024935, | |
| "grad_norm": 0.23679353296756744, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0387, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 4.488778054862843, | |
| "grad_norm": 0.4244563579559326, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0387, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 4.513715710723192, | |
| "grad_norm": 0.30219611525535583, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0359, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 4.538653366583541, | |
| "grad_norm": 0.44261634349823, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0375, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 4.56359102244389, | |
| "grad_norm": 0.3280412554740906, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0412, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 4.58852867830424, | |
| "grad_norm": 0.49666354060173035, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0351, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 4.613466334164588, | |
| "grad_norm": 0.38438352942466736, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0356, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 4.638403990024938, | |
| "grad_norm": 0.2703809142112732, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0419, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 4.6633416458852865, | |
| "grad_norm": 0.3769274950027466, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0384, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.688279301745636, | |
| "grad_norm": 0.43063125014305115, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0371, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.713216957605985, | |
| "grad_norm": 0.32541826367378235, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0343, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.738154613466334, | |
| "grad_norm": 0.38114655017852783, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0402, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.763092269326683, | |
| "grad_norm": 0.27604904770851135, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0367, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.788029925187033, | |
| "grad_norm": 0.36325347423553467, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0351, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.812967581047381, | |
| "grad_norm": 0.39804601669311523, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0361, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.837905236907731, | |
| "grad_norm": 0.4743953347206116, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0358, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.86284289276808, | |
| "grad_norm": 0.35019317269325256, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.037, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.887780548628429, | |
| "grad_norm": 0.41721805930137634, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0319, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.912718204488778, | |
| "grad_norm": 0.30812227725982666, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0351, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.937655860349127, | |
| "grad_norm": 0.4882888197898865, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0446, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.962593516209476, | |
| "grad_norm": 0.4373229444026947, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0386, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 4.987531172069826, | |
| "grad_norm": 0.4061412811279297, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0372, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 5.012468827930174, | |
| "grad_norm": 0.30475953221321106, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0333, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 5.037406483790524, | |
| "grad_norm": 0.3979506492614746, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0392, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 5.062344139650873, | |
| "grad_norm": 0.5207826495170593, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0432, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 5.087281795511222, | |
| "grad_norm": 0.40674319863319397, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.043, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 5.112219451371571, | |
| "grad_norm": 0.4956563115119934, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0354, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 5.13715710723192, | |
| "grad_norm": 0.537293016910553, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0333, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 5.162094763092269, | |
| "grad_norm": 0.2519850432872772, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0312, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 5.187032418952619, | |
| "grad_norm": 0.351418137550354, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0334, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 5.211970074812967, | |
| "grad_norm": 0.3854059875011444, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0325, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 5.236907730673317, | |
| "grad_norm": 0.38833683729171753, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0288, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 5.261845386533666, | |
| "grad_norm": 0.33043190836906433, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0316, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 5.286783042394015, | |
| "grad_norm": 0.3381466865539551, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0331, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 5.311720698254364, | |
| "grad_norm": 0.411288321018219, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0361, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 5.3366583541147135, | |
| "grad_norm": 0.4079836308956146, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0297, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 5.361596009975062, | |
| "grad_norm": 0.23944659531116486, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0402, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 5.386533665835412, | |
| "grad_norm": 0.34464189410209656, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0319, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 5.41147132169576, | |
| "grad_norm": 0.28853949904441833, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0301, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 5.43640897755611, | |
| "grad_norm": 0.428758442401886, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0329, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 5.461346633416459, | |
| "grad_norm": 0.3644638657569885, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.036, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 5.486284289276808, | |
| "grad_norm": 0.3619435131549835, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0359, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 5.511221945137157, | |
| "grad_norm": 0.2934129238128662, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0316, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 5.5361596009975065, | |
| "grad_norm": 0.29304927587509155, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0313, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 5.561097256857855, | |
| "grad_norm": 0.3287537693977356, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0332, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 5.586034912718205, | |
| "grad_norm": 0.2560971975326538, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0281, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 5.610972568578553, | |
| "grad_norm": 0.32151782512664795, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0325, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 5.635910224438903, | |
| "grad_norm": 0.3118205964565277, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.032, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 5.660847880299252, | |
| "grad_norm": 0.4589914083480835, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0339, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 5.685785536159601, | |
| "grad_norm": 0.4059724509716034, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0345, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 5.71072319201995, | |
| "grad_norm": 0.42660242319107056, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0347, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 5.7356608478802995, | |
| "grad_norm": 0.4508174955844879, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0275, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 5.760598503740648, | |
| "grad_norm": 0.26857584714889526, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0375, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 5.785536159600998, | |
| "grad_norm": 0.33610790967941284, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0297, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 5.8104738154613464, | |
| "grad_norm": 0.5054598450660706, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0359, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 5.835411471321696, | |
| "grad_norm": 0.5674673914909363, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0349, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 5.860349127182045, | |
| "grad_norm": 0.27185484766960144, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0332, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 5.885286783042394, | |
| "grad_norm": 0.3172619640827179, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0338, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 5.910224438902743, | |
| "grad_norm": 0.3121204376220703, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0282, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 5.9351620947630925, | |
| "grad_norm": 0.4109380543231964, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0243, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 5.960099750623441, | |
| "grad_norm": 0.3287731111049652, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0312, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 5.985037406483791, | |
| "grad_norm": 0.39618200063705444, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0389, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 6.0099750623441395, | |
| "grad_norm": 0.3790699243545532, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0278, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 6.034912718204489, | |
| "grad_norm": 0.35707733035087585, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0321, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 6.059850374064838, | |
| "grad_norm": 0.3150838017463684, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0254, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 6.084788029925187, | |
| "grad_norm": 0.4454396367073059, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0323, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 6.109725685785536, | |
| "grad_norm": 0.4744904637336731, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0296, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 6.134663341645886, | |
| "grad_norm": 0.24157115817070007, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0313, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 6.159600997506234, | |
| "grad_norm": 0.3061055839061737, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0341, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 6.184538653366584, | |
| "grad_norm": 0.2806146442890167, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0342, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 6.2094763092269325, | |
| "grad_norm": 0.3088599145412445, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0273, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 6.234413965087282, | |
| "grad_norm": 0.33661511540412903, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0314, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 6.259351620947631, | |
| "grad_norm": 0.3303493559360504, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0301, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 6.28428927680798, | |
| "grad_norm": 0.25623950362205505, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0289, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 6.309226932668329, | |
| "grad_norm": 0.24085302650928497, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0286, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 6.334164588528679, | |
| "grad_norm": 0.28363513946533203, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0298, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 6.359102244389027, | |
| "grad_norm": 0.3395390212535858, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0366, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 6.384039900249377, | |
| "grad_norm": 0.2700148820877075, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0304, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 6.4089775561097255, | |
| "grad_norm": 0.21034905314445496, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0325, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 6.433915211970075, | |
| "grad_norm": 0.39408400654792786, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.034, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 6.458852867830424, | |
| "grad_norm": 0.2695557177066803, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0341, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 6.483790523690773, | |
| "grad_norm": 0.3011220395565033, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0296, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 6.508728179551122, | |
| "grad_norm": 0.2988806664943695, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0271, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 6.533665835411472, | |
| "grad_norm": 0.38008424639701843, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0325, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 6.55860349127182, | |
| "grad_norm": 0.35910317301750183, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0295, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 6.58354114713217, | |
| "grad_norm": 0.38358306884765625, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0255, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 6.6084788029925186, | |
| "grad_norm": 0.3978181481361389, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0282, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 6.633416458852868, | |
| "grad_norm": 0.44730305671691895, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0393, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 6.658354114713217, | |
| "grad_norm": 0.23833191394805908, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0338, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 6.683291770573566, | |
| "grad_norm": 0.5621431469917297, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0339, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 6.708229426433915, | |
| "grad_norm": 0.3257071375846863, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0285, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 6.733167082294265, | |
| "grad_norm": 0.4277850389480591, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0353, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 6.758104738154613, | |
| "grad_norm": 0.3179050385951996, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0326, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 6.783042394014963, | |
| "grad_norm": 0.41092365980148315, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0327, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 6.807980049875312, | |
| "grad_norm": 0.3530631363391876, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.038, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 6.832917705735661, | |
| "grad_norm": 0.3354165256023407, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0359, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 6.85785536159601, | |
| "grad_norm": 0.2850760519504547, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0297, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 6.882793017456359, | |
| "grad_norm": 0.38652780652046204, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0283, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 6.907730673316708, | |
| "grad_norm": 0.3204423487186432, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0291, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 6.932668329177058, | |
| "grad_norm": 0.42647093534469604, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0289, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 6.957605985037406, | |
| "grad_norm": 0.23492592573165894, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0253, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 6.982543640897756, | |
| "grad_norm": 0.3873066008090973, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0308, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 7.007481296758105, | |
| "grad_norm": 0.4193486273288727, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.031, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 7.032418952618454, | |
| "grad_norm": 0.26158273220062256, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0244, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 7.057356608478803, | |
| "grad_norm": 0.3684608042240143, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0299, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 7.082294264339152, | |
| "grad_norm": 0.4851345717906952, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0336, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 7.107231920199501, | |
| "grad_norm": 0.41797852516174316, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0261, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 7.132169576059851, | |
| "grad_norm": 0.2468748539686203, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0292, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 7.157107231920199, | |
| "grad_norm": 0.379503458738327, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0259, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 7.182044887780549, | |
| "grad_norm": 0.350256085395813, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0271, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 7.206982543640898, | |
| "grad_norm": 0.38539424538612366, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0252, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 7.231920199501247, | |
| "grad_norm": 0.5875989198684692, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0332, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 7.256857855361596, | |
| "grad_norm": 0.2453652173280716, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0335, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 7.2817955112219455, | |
| "grad_norm": 0.36421850323677063, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0234, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 7.306733167082294, | |
| "grad_norm": 0.2878887355327606, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0281, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 7.331670822942644, | |
| "grad_norm": 0.3950226902961731, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0271, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 7.356608478802992, | |
| "grad_norm": 0.3496701419353485, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.029, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 7.381546134663342, | |
| "grad_norm": 0.30931177735328674, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0323, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 7.406483790523691, | |
| "grad_norm": 0.39362427592277527, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0272, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 7.43142144638404, | |
| "grad_norm": 0.32126304507255554, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0291, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 7.456359102244389, | |
| "grad_norm": 0.32677528262138367, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0255, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 7.4812967581047385, | |
| "grad_norm": 0.3530014753341675, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0251, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 7.506234413965087, | |
| "grad_norm": 0.26059940457344055, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0271, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 7.531172069825437, | |
| "grad_norm": 0.30646204948425293, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0313, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 7.556109725685785, | |
| "grad_norm": 0.5425297617912292, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0258, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 7.581047381546135, | |
| "grad_norm": 0.20844897627830505, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0235, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 7.605985037406484, | |
| "grad_norm": 0.21480798721313477, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0243, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 7.630922693266833, | |
| "grad_norm": 0.2983399033546448, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0317, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 7.655860349127182, | |
| "grad_norm": 0.4282253682613373, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0307, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 7.6807980049875315, | |
| "grad_norm": 0.22319430112838745, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0233, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 7.70573566084788, | |
| "grad_norm": 0.30499720573425293, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0313, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 7.73067331670823, | |
| "grad_norm": 0.40973371267318726, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0292, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 7.7556109725685785, | |
| "grad_norm": 0.22653548419475555, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.033, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 7.780548628428928, | |
| "grad_norm": 0.2702227234840393, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.03, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 7.805486284289277, | |
| "grad_norm": 0.4388284385204315, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.03, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 7.830423940149626, | |
| "grad_norm": 0.3754916489124298, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0334, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 7.855361596009975, | |
| "grad_norm": 0.38858163356781006, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0278, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 7.8802992518703245, | |
| "grad_norm": 0.32206445932388306, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0255, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 7.905236907730673, | |
| "grad_norm": 0.23205003142356873, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0282, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 7.930174563591023, | |
| "grad_norm": 0.44583043456077576, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0288, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 7.9551122194513715, | |
| "grad_norm": 0.27936458587646484, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0279, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 7.980049875311721, | |
| "grad_norm": 0.2818054258823395, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0329, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 8.00498753117207, | |
| "grad_norm": 0.25153425335884094, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0282, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 8.029925187032418, | |
| "grad_norm": 0.525701105594635, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0287, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 8.054862842892769, | |
| "grad_norm": 0.3765043318271637, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0297, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 8.079800498753118, | |
| "grad_norm": 0.28264981508255005, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0214, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 8.104738154613466, | |
| "grad_norm": 0.3153613805770874, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.023, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 8.129675810473815, | |
| "grad_norm": 0.2339533269405365, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.038, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 8.154613466334165, | |
| "grad_norm": 0.31046655774116516, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0218, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 8.179551122194514, | |
| "grad_norm": 0.2567477226257324, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0278, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 8.204488778054863, | |
| "grad_norm": 0.4400595426559448, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0286, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 8.229426433915211, | |
| "grad_norm": 0.225955069065094, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0239, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 8.254364089775562, | |
| "grad_norm": 0.27724096179008484, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.024, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 8.27930174563591, | |
| "grad_norm": 0.22674660384655, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0253, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 8.30423940149626, | |
| "grad_norm": 0.24300932884216309, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0229, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 8.329177057356608, | |
| "grad_norm": 0.2596067190170288, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0299, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 8.354114713216958, | |
| "grad_norm": 0.3651449680328369, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0303, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 8.379052369077307, | |
| "grad_norm": 0.29636484384536743, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0267, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 8.403990024937656, | |
| "grad_norm": 0.38751697540283203, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.035, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 8.428927680798004, | |
| "grad_norm": 0.39694109559059143, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0271, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 8.453865336658355, | |
| "grad_norm": 0.21990922093391418, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0285, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 8.478802992518704, | |
| "grad_norm": 0.2724454700946808, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0258, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 8.503740648379052, | |
| "grad_norm": 0.19968266785144806, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0309, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 8.528678304239401, | |
| "grad_norm": 0.46485111117362976, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0265, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 8.553615960099751, | |
| "grad_norm": 0.40101081132888794, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0294, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 8.5785536159601, | |
| "grad_norm": 0.41232603788375854, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0241, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 8.603491271820449, | |
| "grad_norm": 0.30246806144714355, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0297, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 8.628428927680797, | |
| "grad_norm": 0.3900691866874695, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0335, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 8.653366583541148, | |
| "grad_norm": 0.24749229848384857, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.025, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 8.678304239401497, | |
| "grad_norm": 0.4658712148666382, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0256, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 8.703241895261845, | |
| "grad_norm": 0.24069152772426605, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0266, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 8.728179551122194, | |
| "grad_norm": 0.43368878960609436, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0357, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 8.753117206982544, | |
| "grad_norm": 0.23905228078365326, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0295, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 8.778054862842893, | |
| "grad_norm": 0.21941429376602173, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0266, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 8.802992518703242, | |
| "grad_norm": 0.34031426906585693, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0263, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 8.82793017456359, | |
| "grad_norm": 0.4775576889514923, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0287, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 8.85286783042394, | |
| "grad_norm": 0.35605284571647644, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0312, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 8.87780548628429, | |
| "grad_norm": 0.6605315804481506, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0303, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 8.902743142144638, | |
| "grad_norm": 0.47203585505485535, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0259, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 8.927680798004987, | |
| "grad_norm": 0.3850856423377991, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0281, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 8.952618453865338, | |
| "grad_norm": 0.231129452586174, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0305, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 8.977556109725686, | |
| "grad_norm": 0.23916848003864288, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0277, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 9.002493765586035, | |
| "grad_norm": 0.2950526177883148, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0235, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 9.027431421446384, | |
| "grad_norm": 0.23984360694885254, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.023, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 9.052369077306734, | |
| "grad_norm": 0.2650838792324066, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0261, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 9.077306733167083, | |
| "grad_norm": 0.22623947262763977, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0283, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 9.102244389027431, | |
| "grad_norm": 0.22665300965309143, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0241, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 9.12718204488778, | |
| "grad_norm": 0.34982213377952576, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0226, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 9.15211970074813, | |
| "grad_norm": 0.25045695900917053, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0241, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 9.17705735660848, | |
| "grad_norm": 0.36845269799232483, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0251, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 9.201995012468828, | |
| "grad_norm": 0.2546306252479553, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0295, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 9.226932668329177, | |
| "grad_norm": 0.5125709176063538, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0289, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 9.251870324189527, | |
| "grad_norm": 0.36019453406333923, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0272, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 9.276807980049876, | |
| "grad_norm": 0.2954815626144409, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0275, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 9.301745635910224, | |
| "grad_norm": 0.31509238481521606, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0232, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 9.326683291770573, | |
| "grad_norm": 0.26963862776756287, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0225, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 9.351620947630924, | |
| "grad_norm": 0.29535359144210815, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0281, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 9.376558603491272, | |
| "grad_norm": 0.2614090144634247, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.026, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 9.401496259351621, | |
| "grad_norm": 0.216142937541008, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0255, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 9.42643391521197, | |
| "grad_norm": 0.34700286388397217, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0292, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 9.451371571072318, | |
| "grad_norm": 0.23132944107055664, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.026, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 9.476309226932669, | |
| "grad_norm": 0.265299916267395, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0273, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 9.501246882793017, | |
| "grad_norm": 0.3425016403198242, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0264, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 9.526184538653366, | |
| "grad_norm": 0.28656673431396484, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0263, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 9.551122194513717, | |
| "grad_norm": 0.33011212944984436, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0258, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 9.576059850374065, | |
| "grad_norm": 0.2773212790489197, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0232, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 9.600997506234414, | |
| "grad_norm": 0.3047066032886505, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0297, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 9.625935162094763, | |
| "grad_norm": 0.31418055295944214, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.027, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 9.650872817955111, | |
| "grad_norm": 0.21528251469135284, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0287, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 9.675810473815462, | |
| "grad_norm": 0.3307095468044281, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0293, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 9.70074812967581, | |
| "grad_norm": 0.3405265212059021, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.019, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 9.72568578553616, | |
| "grad_norm": 0.41708970069885254, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0265, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 9.75062344139651, | |
| "grad_norm": 0.18004103004932404, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0253, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 9.775561097256858, | |
| "grad_norm": 0.25215479731559753, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0287, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 9.800498753117207, | |
| "grad_norm": 0.4058176875114441, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0261, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 9.825436408977556, | |
| "grad_norm": 0.30756476521492004, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0277, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 9.850374064837904, | |
| "grad_norm": 0.18380045890808105, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0286, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 9.875311720698255, | |
| "grad_norm": 0.4481236934661865, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0265, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 9.900249376558603, | |
| "grad_norm": 0.2737267017364502, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0268, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 9.925187032418952, | |
| "grad_norm": 0.32162588834762573, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.025, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 9.950124688279303, | |
| "grad_norm": 0.2212892323732376, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0237, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 9.975062344139651, | |
| "grad_norm": 0.22838039696216583, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0239, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.23952533304691315, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0245, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 10.024937655860349, | |
| "grad_norm": 0.23830989003181458, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0241, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 10.049875311720697, | |
| "grad_norm": 0.28235211968421936, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0265, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 10.074812967581048, | |
| "grad_norm": 0.1995336413383484, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0249, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 10.099750623441397, | |
| "grad_norm": 0.25365176796913147, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0242, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 10.124688279301745, | |
| "grad_norm": 0.33549439907073975, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0258, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 10.149625935162096, | |
| "grad_norm": 0.370531290769577, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0309, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 10.174563591022444, | |
| "grad_norm": 0.3140500783920288, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.023, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 10.199501246882793, | |
| "grad_norm": 0.2719040513038635, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0288, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 10.224438902743142, | |
| "grad_norm": 0.37795066833496094, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0283, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 10.24937655860349, | |
| "grad_norm": 0.31465786695480347, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0252, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 10.27431421446384, | |
| "grad_norm": 0.4439608156681061, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0253, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 10.29925187032419, | |
| "grad_norm": 0.3169604241847992, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0269, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 10.324189526184538, | |
| "grad_norm": 0.18323646485805511, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0236, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 10.349127182044889, | |
| "grad_norm": 0.24845083057880402, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0284, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 10.374064837905237, | |
| "grad_norm": 0.15983980894088745, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0172, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 10.399002493765586, | |
| "grad_norm": 0.32086536288261414, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.025, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 10.423940149625935, | |
| "grad_norm": 0.2811485826969147, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0242, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 10.448877805486283, | |
| "grad_norm": 0.19532540440559387, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0233, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 10.473815461346634, | |
| "grad_norm": 0.28798985481262207, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0268, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 10.498753117206983, | |
| "grad_norm": 0.2245911955833435, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0203, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 10.523690773067331, | |
| "grad_norm": 0.3056621253490448, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0224, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 10.548628428927682, | |
| "grad_norm": 0.2182088941335678, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0175, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 10.57356608478803, | |
| "grad_norm": 0.2164739966392517, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0225, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 10.598503740648379, | |
| "grad_norm": 0.30183082818984985, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0297, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 10.623441396508728, | |
| "grad_norm": 0.31106331944465637, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0302, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 10.648379052369076, | |
| "grad_norm": 0.34731388092041016, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0234, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 10.673316708229427, | |
| "grad_norm": 0.2951902151107788, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0206, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 10.698254364089776, | |
| "grad_norm": 0.2821643054485321, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0208, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 10.723192019950124, | |
| "grad_norm": 0.2782182991504669, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0228, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 10.748129675810475, | |
| "grad_norm": 0.3009501099586487, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0225, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 10.773067331670823, | |
| "grad_norm": 0.3045292794704437, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0228, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 10.798004987531172, | |
| "grad_norm": 0.2894745469093323, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0238, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 10.82294264339152, | |
| "grad_norm": 0.26919829845428467, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0217, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 10.84788029925187, | |
| "grad_norm": 0.2804599106311798, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0225, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 10.87281795511222, | |
| "grad_norm": 0.2726246416568756, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0233, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 10.897755610972569, | |
| "grad_norm": 0.3837462067604065, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0249, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 10.922693266832917, | |
| "grad_norm": 0.33217403292655945, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0219, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 10.947630922693268, | |
| "grad_norm": 0.15496332943439484, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0262, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 10.972568578553616, | |
| "grad_norm": 0.23124326765537262, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0292, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 10.997506234413965, | |
| "grad_norm": 0.26465481519699097, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0272, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 11.022443890274314, | |
| "grad_norm": 0.34905219078063965, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0258, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 11.047381546134662, | |
| "grad_norm": 0.3847571313381195, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0254, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 11.072319201995013, | |
| "grad_norm": 0.4140780568122864, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.023, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 11.097256857855362, | |
| "grad_norm": 0.22926808893680573, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0273, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 11.12219451371571, | |
| "grad_norm": 0.2562496066093445, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.022, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 11.147132169576059, | |
| "grad_norm": 0.29838594794273376, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0199, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 11.17206982543641, | |
| "grad_norm": 0.23450398445129395, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0224, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 11.197007481296758, | |
| "grad_norm": 0.26818662881851196, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0251, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 11.221945137157107, | |
| "grad_norm": 0.28541457653045654, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0225, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 11.246882793017456, | |
| "grad_norm": 0.2384330779314041, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0233, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 11.271820448877806, | |
| "grad_norm": 0.2639118731021881, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0271, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 11.296758104738155, | |
| "grad_norm": 0.27339300513267517, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0232, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 11.321695760598503, | |
| "grad_norm": 0.333829790353775, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0228, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 11.346633416458852, | |
| "grad_norm": 0.24662379920482635, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0214, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 11.371571072319203, | |
| "grad_norm": 0.31482985615730286, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0223, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 11.396508728179551, | |
| "grad_norm": 0.30124378204345703, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0228, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 11.4214463840399, | |
| "grad_norm": 0.23434828221797943, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0203, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 11.446384039900249, | |
| "grad_norm": 0.19696398079395294, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0238, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 11.471321695760599, | |
| "grad_norm": 0.25811946392059326, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0202, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 11.496259351620948, | |
| "grad_norm": 0.2840825617313385, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0294, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 11.521197007481296, | |
| "grad_norm": 0.21360383927822113, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0198, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 11.546134663341645, | |
| "grad_norm": 0.220230832695961, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0242, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 11.571072319201996, | |
| "grad_norm": 0.3370763063430786, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0207, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 11.596009975062344, | |
| "grad_norm": 0.41919195652008057, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0256, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 11.620947630922693, | |
| "grad_norm": 0.3001018762588501, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0305, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 11.645885286783042, | |
| "grad_norm": 0.3273577094078064, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0272, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 11.670822942643392, | |
| "grad_norm": 0.37101373076438904, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0217, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 11.69576059850374, | |
| "grad_norm": 0.24971233308315277, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0224, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 11.72069825436409, | |
| "grad_norm": 0.405575692653656, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0259, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 11.745635910224438, | |
| "grad_norm": 0.2847684621810913, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0234, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 11.770573566084789, | |
| "grad_norm": 0.3589344024658203, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.022, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 11.795511221945137, | |
| "grad_norm": 0.32202762365341187, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0249, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 11.820448877805486, | |
| "grad_norm": 0.275572270154953, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0213, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 11.845386533665835, | |
| "grad_norm": 0.3132439851760864, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0212, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 11.870324189526185, | |
| "grad_norm": 0.22886471450328827, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0207, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 11.895261845386534, | |
| "grad_norm": 0.2274550050497055, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0199, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 11.920199501246882, | |
| "grad_norm": 0.1831224411725998, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0241, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 11.945137157107231, | |
| "grad_norm": 0.22795765101909637, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0204, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 11.970074812967582, | |
| "grad_norm": 0.22010649740695953, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0183, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 11.99501246882793, | |
| "grad_norm": 0.26522693037986755, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0218, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 12.019950124688279, | |
| "grad_norm": 0.206947460770607, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0173, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 12.044887780548628, | |
| "grad_norm": 0.19102860987186432, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0224, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 12.069825436408978, | |
| "grad_norm": 0.3428409695625305, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0328, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 12.094763092269327, | |
| "grad_norm": 0.16704417765140533, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0216, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 12.119700748129675, | |
| "grad_norm": 0.166272833943367, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0163, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 12.144638403990024, | |
| "grad_norm": 0.14958655834197998, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0242, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 12.169576059850375, | |
| "grad_norm": 0.18853528797626495, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0203, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 12.194513715710723, | |
| "grad_norm": 0.22540320456027985, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0249, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 12.219451371571072, | |
| "grad_norm": 0.24679411947727203, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0205, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 12.24438902743142, | |
| "grad_norm": 0.32882463932037354, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0203, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 12.269326683291771, | |
| "grad_norm": 0.1784829944372177, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0177, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 12.29426433915212, | |
| "grad_norm": 0.25231051445007324, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.019, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 12.319201995012468, | |
| "grad_norm": 0.16155695915222168, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0213, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 12.344139650872817, | |
| "grad_norm": 0.19818508625030518, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.02, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 12.369077306733168, | |
| "grad_norm": 0.29574841260910034, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0241, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 12.394014962593516, | |
| "grad_norm": 0.17044132947921753, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0237, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 12.418952618453865, | |
| "grad_norm": 0.3044898211956024, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.03, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 12.443890274314214, | |
| "grad_norm": 0.24579276144504547, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0189, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 12.468827930174564, | |
| "grad_norm": 0.2730746865272522, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0184, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 12.493765586034913, | |
| "grad_norm": 0.26499801874160767, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0185, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 12.518703241895262, | |
| "grad_norm": 0.23202534019947052, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0249, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 12.54364089775561, | |
| "grad_norm": 0.19818097352981567, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0218, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 12.56857855361596, | |
| "grad_norm": 0.2663494348526001, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0184, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 12.59351620947631, | |
| "grad_norm": 0.22444742918014526, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0228, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 12.618453865336658, | |
| "grad_norm": 0.19216719269752502, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0201, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 12.643391521197007, | |
| "grad_norm": 0.2782403230667114, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0199, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 12.668329177057357, | |
| "grad_norm": 0.20658832788467407, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0193, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 12.693266832917706, | |
| "grad_norm": 0.2642762064933777, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0249, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 12.718204488778055, | |
| "grad_norm": 0.25494977831840515, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0261, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 12.743142144638403, | |
| "grad_norm": 0.2977260947227478, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0202, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 12.768079800498754, | |
| "grad_norm": 0.31455764174461365, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.026, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 12.793017456359102, | |
| "grad_norm": 0.16773463785648346, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0208, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 12.817955112219451, | |
| "grad_norm": 0.28746992349624634, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0172, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 12.8428927680798, | |
| "grad_norm": 0.19649285078048706, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0196, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 12.86783042394015, | |
| "grad_norm": 0.2085386961698532, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0227, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 12.892768079800499, | |
| "grad_norm": 0.28125497698783875, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0168, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 12.917705735660848, | |
| "grad_norm": 0.21301256120204926, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0228, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 12.942643391521196, | |
| "grad_norm": 0.22395165264606476, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0225, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 12.967581047381547, | |
| "grad_norm": 0.18140634894371033, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0289, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 12.992518703241895, | |
| "grad_norm": 0.3020365238189697, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0232, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 13.017456359102244, | |
| "grad_norm": 0.24667315185070038, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0198, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 13.042394014962593, | |
| "grad_norm": 0.292667031288147, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0202, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 13.067331670822943, | |
| "grad_norm": 0.2934412956237793, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.021, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 13.092269326683292, | |
| "grad_norm": 0.23947474360466003, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0187, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 13.11720698254364, | |
| "grad_norm": 0.2996933162212372, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0185, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 13.14214463840399, | |
| "grad_norm": 0.35077378153800964, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0237, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 13.16708229426434, | |
| "grad_norm": 0.291131854057312, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0207, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 13.192019950124688, | |
| "grad_norm": 0.2684290111064911, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.02, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 13.216957605985037, | |
| "grad_norm": 0.24735210835933685, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0188, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 13.241895261845386, | |
| "grad_norm": 0.25163590908050537, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0196, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 13.266832917705736, | |
| "grad_norm": 0.15661580860614777, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0187, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 13.291770573566085, | |
| "grad_norm": 0.2017534077167511, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.02, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 13.316708229426434, | |
| "grad_norm": 0.29376116394996643, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0174, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 13.341645885286782, | |
| "grad_norm": 0.17338944971561432, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0193, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 13.366583541147133, | |
| "grad_norm": 0.30280524492263794, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0221, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 13.391521197007481, | |
| "grad_norm": 0.2336844801902771, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0192, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 13.41645885286783, | |
| "grad_norm": 0.20401246845722198, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0203, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 13.441396508728179, | |
| "grad_norm": 0.20434141159057617, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0177, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 13.46633416458853, | |
| "grad_norm": 0.21218310296535492, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0163, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 13.491271820448878, | |
| "grad_norm": 0.21623140573501587, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0221, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 13.516209476309227, | |
| "grad_norm": 0.2130313366651535, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0185, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 13.541147132169575, | |
| "grad_norm": 0.284483402967453, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0196, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 13.566084788029926, | |
| "grad_norm": 0.33570489287376404, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0197, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 13.591022443890274, | |
| "grad_norm": 0.24931751191616058, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0225, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 13.615960099750623, | |
| "grad_norm": 0.25948721170425415, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0166, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 13.640897755610972, | |
| "grad_norm": 0.17840495705604553, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0213, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 13.665835411471322, | |
| "grad_norm": 0.29844382405281067, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.017, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 13.690773067331671, | |
| "grad_norm": 0.19903980195522308, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0175, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 13.71571072319202, | |
| "grad_norm": 0.2593510150909424, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0176, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 13.740648379052368, | |
| "grad_norm": 0.18609662353992462, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0184, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 13.765586034912719, | |
| "grad_norm": 0.21233777701854706, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0187, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 13.790523690773068, | |
| "grad_norm": 0.21142171323299408, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0166, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 13.815461346633416, | |
| "grad_norm": 0.25082799792289734, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0237, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 13.840399002493765, | |
| "grad_norm": 0.27774113416671753, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0181, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 13.865336658354115, | |
| "grad_norm": 0.25792816281318665, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.018, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 13.890274314214464, | |
| "grad_norm": 0.2312798798084259, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0158, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 13.915211970074813, | |
| "grad_norm": 0.22915568947792053, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0184, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 13.940149625935161, | |
| "grad_norm": 0.272836297750473, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0213, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 13.965087281795512, | |
| "grad_norm": 0.23298053443431854, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0216, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 13.99002493765586, | |
| "grad_norm": 0.49090591073036194, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.024, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 14.01496259351621, | |
| "grad_norm": 0.3042021691799164, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0207, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 14.039900249376558, | |
| "grad_norm": 0.2638491690158844, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0177, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 14.064837905236908, | |
| "grad_norm": 0.3192089796066284, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0208, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 14.089775561097257, | |
| "grad_norm": 0.17655348777770996, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0223, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 14.114713216957606, | |
| "grad_norm": 0.19428007304668427, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0148, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 14.139650872817954, | |
| "grad_norm": 0.21721254289150238, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0182, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 14.164588528678305, | |
| "grad_norm": 0.3082655072212219, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0182, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 14.189526184538654, | |
| "grad_norm": 0.23802044987678528, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0179, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 14.214463840399002, | |
| "grad_norm": 0.21190091967582703, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0248, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 14.239401496259351, | |
| "grad_norm": 0.24995775520801544, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0181, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 14.264339152119701, | |
| "grad_norm": 0.19574052095413208, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0244, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 14.28927680798005, | |
| "grad_norm": 0.279885470867157, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0241, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 14.314214463840399, | |
| "grad_norm": 0.24879010021686554, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0212, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 14.339152119700747, | |
| "grad_norm": 0.21264341473579407, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0187, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 14.364089775561098, | |
| "grad_norm": 0.1977480798959732, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0188, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 14.389027431421447, | |
| "grad_norm": 0.18054012954235077, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0197, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 14.413965087281795, | |
| "grad_norm": 0.25218743085861206, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0186, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 14.438902743142144, | |
| "grad_norm": 0.23916180431842804, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0179, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 14.463840399002494, | |
| "grad_norm": 0.20511573553085327, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0183, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 14.488778054862843, | |
| "grad_norm": 0.19747114181518555, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0185, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 14.513715710723192, | |
| "grad_norm": 0.15366511046886444, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0175, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 14.53865336658354, | |
| "grad_norm": 0.23903138935565948, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0177, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 14.563591022443891, | |
| "grad_norm": 0.1838330626487732, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0208, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 14.58852867830424, | |
| "grad_norm": 0.2696753442287445, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0229, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 14.613466334164588, | |
| "grad_norm": 0.27833878993988037, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0251, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 14.638403990024937, | |
| "grad_norm": 0.20480979979038239, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0171, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 14.663341645885287, | |
| "grad_norm": 0.22825954854488373, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0182, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 14.688279301745636, | |
| "grad_norm": 0.20724137127399445, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0217, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 14.713216957605985, | |
| "grad_norm": 0.19955697655677795, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0161, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 14.738154613466333, | |
| "grad_norm": 0.20092672109603882, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0198, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 14.763092269326684, | |
| "grad_norm": 0.17976947128772736, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0158, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 14.788029925187033, | |
| "grad_norm": 0.31870386004447937, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0163, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 14.812967581047381, | |
| "grad_norm": 0.27772510051727295, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0175, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 14.83790523690773, | |
| "grad_norm": 0.23716199398040771, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0157, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 14.86284289276808, | |
| "grad_norm": 0.2043595314025879, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0192, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 14.88778054862843, | |
| "grad_norm": 0.20099425315856934, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0162, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 14.912718204488778, | |
| "grad_norm": 0.2089329957962036, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.016, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 14.937655860349127, | |
| "grad_norm": 0.19201625883579254, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0162, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 14.962593516209477, | |
| "grad_norm": 0.23528531193733215, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0171, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 14.987531172069826, | |
| "grad_norm": 0.1668626070022583, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0168, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 15.012468827930174, | |
| "grad_norm": 0.201826810836792, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0165, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 15.037406483790523, | |
| "grad_norm": 0.31043142080307007, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0179, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 15.062344139650873, | |
| "grad_norm": 0.26940152049064636, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.016, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 15.087281795511222, | |
| "grad_norm": 0.2804289162158966, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0186, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 15.11221945137157, | |
| "grad_norm": 0.2345680594444275, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0147, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 15.13715710723192, | |
| "grad_norm": 0.26679036021232605, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0167, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 15.16209476309227, | |
| "grad_norm": 0.24993357062339783, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0173, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 15.187032418952619, | |
| "grad_norm": 0.2339811772108078, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0143, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 15.211970074812967, | |
| "grad_norm": 0.13606788218021393, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0171, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 15.236907730673316, | |
| "grad_norm": 0.1822890192270279, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0161, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 15.261845386533667, | |
| "grad_norm": 0.14667703211307526, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0147, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 15.286783042394015, | |
| "grad_norm": 0.290108859539032, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0161, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 15.311720698254364, | |
| "grad_norm": 0.13865269720554352, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0164, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 15.336658354114713, | |
| "grad_norm": 0.13238705694675446, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0136, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 15.361596009975063, | |
| "grad_norm": 0.17133651673793793, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0156, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 15.386533665835412, | |
| "grad_norm": 0.22780229151248932, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0193, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 15.41147132169576, | |
| "grad_norm": 0.20020505785942078, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0178, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 15.436408977556109, | |
| "grad_norm": 0.24689267575740814, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.019, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 15.46134663341646, | |
| "grad_norm": 0.18527744710445404, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0179, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 15.486284289276808, | |
| "grad_norm": 0.1628297120332718, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.014, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 15.511221945137157, | |
| "grad_norm": 0.20444464683532715, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0153, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 15.536159600997506, | |
| "grad_norm": 0.2367943525314331, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0163, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 15.561097256857856, | |
| "grad_norm": 0.18373975157737732, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0146, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 15.586034912718205, | |
| "grad_norm": 0.18674665689468384, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0175, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 15.610972568578553, | |
| "grad_norm": 0.2700800597667694, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0199, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 15.635910224438902, | |
| "grad_norm": 0.3131679892539978, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0184, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 15.660847880299253, | |
| "grad_norm": 0.27556347846984863, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0199, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 15.685785536159601, | |
| "grad_norm": 0.275347501039505, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0213, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 15.71072319201995, | |
| "grad_norm": 0.20208273828029633, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0143, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 15.735660847880299, | |
| "grad_norm": 0.20885714888572693, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0163, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 15.760598503740649, | |
| "grad_norm": 0.1977938711643219, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0159, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 15.785536159600998, | |
| "grad_norm": 0.21437180042266846, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0183, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 15.810473815461346, | |
| "grad_norm": 0.13302595913410187, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0149, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 15.835411471321695, | |
| "grad_norm": 0.2628529369831085, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0148, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 15.860349127182046, | |
| "grad_norm": 0.2229318618774414, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0127, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 15.885286783042394, | |
| "grad_norm": 0.2556484639644623, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.019, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 15.910224438902743, | |
| "grad_norm": 0.22030752897262573, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0145, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 15.935162094763092, | |
| "grad_norm": 0.28019654750823975, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0179, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 15.960099750623442, | |
| "grad_norm": 0.21245922148227692, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0159, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 15.98503740648379, | |
| "grad_norm": 0.18166065216064453, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0172, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 16.00997506234414, | |
| "grad_norm": 0.15449444949626923, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0144, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 16.034912718204488, | |
| "grad_norm": 0.12723799049854279, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0154, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 16.059850374064837, | |
| "grad_norm": 0.18919967114925385, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.016, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 16.084788029925186, | |
| "grad_norm": 0.19477224349975586, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0122, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 16.109725685785538, | |
| "grad_norm": 0.1922047734260559, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0122, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 16.134663341645886, | |
| "grad_norm": 0.24549920856952667, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0159, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 16.159600997506235, | |
| "grad_norm": 0.12796825170516968, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0157, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 16.184538653366584, | |
| "grad_norm": 0.14820651710033417, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0145, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 16.209476309226932, | |
| "grad_norm": 0.1364356428384781, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0153, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 16.23441396508728, | |
| "grad_norm": 0.18831998109817505, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0169, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 16.25935162094763, | |
| "grad_norm": 0.1880173534154892, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0148, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 16.28428927680798, | |
| "grad_norm": 0.15736524760723114, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0209, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 16.30922693266833, | |
| "grad_norm": 0.22002153098583221, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.02, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 16.33416458852868, | |
| "grad_norm": 0.24109351634979248, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.015, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 16.359102244389028, | |
| "grad_norm": 0.18013128638267517, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0178, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 16.384039900249377, | |
| "grad_norm": 0.20001298189163208, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0154, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 16.408977556109726, | |
| "grad_norm": 0.17360186576843262, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0214, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 16.433915211970074, | |
| "grad_norm": 0.12526439130306244, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0202, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 16.458852867830423, | |
| "grad_norm": 0.1808585226535797, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0145, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 16.48379052369077, | |
| "grad_norm": 0.17511676251888275, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0155, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 16.508728179551124, | |
| "grad_norm": 0.16646809875965118, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0171, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 16.533665835411473, | |
| "grad_norm": 0.22161412239074707, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0133, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 16.55860349127182, | |
| "grad_norm": 0.1550191193819046, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.017, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 16.58354114713217, | |
| "grad_norm": 0.19345645606517792, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0128, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 16.60847880299252, | |
| "grad_norm": 0.21538229286670685, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.015, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 16.633416458852867, | |
| "grad_norm": 0.2819233536720276, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0135, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 16.658354114713216, | |
| "grad_norm": 0.22603356838226318, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0151, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 16.683291770573565, | |
| "grad_norm": 0.21096298098564148, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0145, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 16.708229426433917, | |
| "grad_norm": 0.279281347990036, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0159, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 16.733167082294266, | |
| "grad_norm": 0.33940017223358154, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0136, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 16.758104738154614, | |
| "grad_norm": 0.15617480874061584, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.017, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 16.783042394014963, | |
| "grad_norm": 0.2107282429933548, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0158, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 16.80798004987531, | |
| "grad_norm": 0.17856352031230927, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0165, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 16.83291770573566, | |
| "grad_norm": 0.15779192745685577, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0163, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 16.85785536159601, | |
| "grad_norm": 0.14080210030078888, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0109, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 16.882793017456358, | |
| "grad_norm": 0.21321918070316315, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0152, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 16.90773067331671, | |
| "grad_norm": 0.19265246391296387, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0125, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 16.93266832917706, | |
| "grad_norm": 0.24140265583992004, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0139, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 16.957605985037407, | |
| "grad_norm": 0.11414530873298645, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0141, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 16.982543640897756, | |
| "grad_norm": 0.1986575871706009, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0183, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 17.007481296758105, | |
| "grad_norm": 0.2724411189556122, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0153, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 17.032418952618453, | |
| "grad_norm": 0.17774352431297302, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0105, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 17.057356608478802, | |
| "grad_norm": 0.17269599437713623, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.012, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 17.08229426433915, | |
| "grad_norm": 0.25596779584884644, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0145, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 17.107231920199503, | |
| "grad_norm": 0.16668182611465454, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0133, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 17.13216957605985, | |
| "grad_norm": 0.20096273720264435, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0137, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 17.1571072319202, | |
| "grad_norm": 0.2984505593776703, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0188, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 17.18204488778055, | |
| "grad_norm": 0.20007608830928802, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0197, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 17.206982543640898, | |
| "grad_norm": 0.17159053683280945, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0147, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 17.231920199501246, | |
| "grad_norm": 0.17019398510456085, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0137, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 17.256857855361595, | |
| "grad_norm": 0.11703617125749588, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0139, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 17.281795511221944, | |
| "grad_norm": 0.1912344992160797, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0158, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 17.306733167082296, | |
| "grad_norm": 0.2726556956768036, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.014, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 17.331670822942645, | |
| "grad_norm": 0.2574710547924042, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0134, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 17.356608478802993, | |
| "grad_norm": 0.21012945473194122, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0151, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 17.381546134663342, | |
| "grad_norm": 0.16733276844024658, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0132, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 17.40648379052369, | |
| "grad_norm": 0.16988280415534973, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0142, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 17.43142144638404, | |
| "grad_norm": 0.19951944053173065, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0122, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 17.456359102244388, | |
| "grad_norm": 0.15427358448505402, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0132, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 17.481296758104737, | |
| "grad_norm": 0.2390935868024826, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0125, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 17.50623441396509, | |
| "grad_norm": 0.18670183420181274, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0136, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 17.531172069825438, | |
| "grad_norm": 0.2646785080432892, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0135, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 17.556109725685786, | |
| "grad_norm": 0.15656375885009766, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0133, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 17.581047381546135, | |
| "grad_norm": 0.19597883522510529, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0168, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 17.605985037406484, | |
| "grad_norm": 0.1515006572008133, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0191, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 17.630922693266832, | |
| "grad_norm": 0.18910616636276245, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0144, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 17.65586034912718, | |
| "grad_norm": 0.16210100054740906, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0125, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 17.68079800498753, | |
| "grad_norm": 0.16548502445220947, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0124, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 17.705735660847882, | |
| "grad_norm": 0.17527201771736145, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0118, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 17.73067331670823, | |
| "grad_norm": 0.24314862489700317, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0153, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 17.75561097256858, | |
| "grad_norm": 0.28165191411972046, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0123, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 17.780548628428928, | |
| "grad_norm": 0.11589303612709045, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0143, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 17.805486284289277, | |
| "grad_norm": 0.24427318572998047, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0136, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 17.830423940149625, | |
| "grad_norm": 0.16070619225502014, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0156, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 17.855361596009974, | |
| "grad_norm": 0.18744312226772308, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0151, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 17.880299251870323, | |
| "grad_norm": 0.187273770570755, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.014, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 17.905236907730675, | |
| "grad_norm": 0.16674570739269257, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0143, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 17.930174563591024, | |
| "grad_norm": 0.2733837068080902, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0208, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 17.955112219451372, | |
| "grad_norm": 0.28830480575561523, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0142, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 17.98004987531172, | |
| "grad_norm": 0.19582916796207428, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0145, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 18.00498753117207, | |
| "grad_norm": 0.20871758460998535, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0129, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 18.02992518703242, | |
| "grad_norm": 0.1352134346961975, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0156, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 18.054862842892767, | |
| "grad_norm": 0.12410979717969894, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0118, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 18.079800498753116, | |
| "grad_norm": 0.18063949048519135, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0134, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 18.104738154613468, | |
| "grad_norm": 0.206300288438797, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0116, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 18.129675810473817, | |
| "grad_norm": 0.17813508212566376, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0113, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 18.154613466334165, | |
| "grad_norm": 0.11702705919742584, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0116, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 18.179551122194514, | |
| "grad_norm": 0.127091184258461, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0122, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 18.204488778054863, | |
| "grad_norm": 0.3138023316860199, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0143, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 18.22942643391521, | |
| "grad_norm": 0.21918581426143646, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0135, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 18.25436408977556, | |
| "grad_norm": 0.22395913302898407, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0148, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 18.27930174563591, | |
| "grad_norm": 0.10242143273353577, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0124, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 18.30423940149626, | |
| "grad_norm": 0.20338401198387146, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0146, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 18.32917705735661, | |
| "grad_norm": 0.13682910799980164, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0145, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 18.35411471321696, | |
| "grad_norm": 0.18041102588176727, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0123, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 18.379052369077307, | |
| "grad_norm": 0.15307408571243286, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.014, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 18.403990024937656, | |
| "grad_norm": 0.1764901876449585, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0121, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 18.428927680798004, | |
| "grad_norm": 0.2417878657579422, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0133, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 18.453865336658353, | |
| "grad_norm": 0.24020728468894958, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0142, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 18.478802992518702, | |
| "grad_norm": 0.17263422906398773, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0108, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 18.503740648379054, | |
| "grad_norm": 0.2942994236946106, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.013, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 18.528678304239403, | |
| "grad_norm": 0.18044176697731018, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0149, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 18.55361596009975, | |
| "grad_norm": 0.10796672850847244, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0158, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 18.5785536159601, | |
| "grad_norm": 0.18820756673812866, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0125, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 18.60349127182045, | |
| "grad_norm": 0.18307971954345703, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0117, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 18.628428927680797, | |
| "grad_norm": 0.20657193660736084, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0144, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 18.653366583541146, | |
| "grad_norm": 0.2926298677921295, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0116, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 18.678304239401495, | |
| "grad_norm": 0.20589709281921387, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0171, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 18.703241895261847, | |
| "grad_norm": 0.17581333220005035, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0135, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 18.728179551122196, | |
| "grad_norm": 0.21577408909797668, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0124, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 18.753117206982544, | |
| "grad_norm": 0.21110914647579193, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0133, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 18.778054862842893, | |
| "grad_norm": 0.14681799709796906, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0101, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 18.802992518703242, | |
| "grad_norm": 0.22271208465099335, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0125, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 18.82793017456359, | |
| "grad_norm": 0.2072574496269226, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0109, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 18.85286783042394, | |
| "grad_norm": 0.19125764071941376, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0121, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 18.877805486284288, | |
| "grad_norm": 0.1937733143568039, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0104, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 18.902743142144637, | |
| "grad_norm": 0.24604687094688416, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0154, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 18.92768079800499, | |
| "grad_norm": 0.17904645204544067, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0155, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 18.952618453865338, | |
| "grad_norm": 0.23642393946647644, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0154, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 18.977556109725686, | |
| "grad_norm": 0.12186987698078156, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0127, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 19.002493765586035, | |
| "grad_norm": 0.12449382990598679, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0135, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 19.027431421446384, | |
| "grad_norm": 0.19680935144424438, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0113, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 19.052369077306732, | |
| "grad_norm": 0.1381329894065857, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0126, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 19.07730673316708, | |
| "grad_norm": 0.15984071791172028, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0138, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 19.102244389027433, | |
| "grad_norm": 0.16683955490589142, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.012, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 19.127182044887782, | |
| "grad_norm": 0.20510785281658173, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0129, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 19.15211970074813, | |
| "grad_norm": 0.1646248698234558, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0098, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 19.17705735660848, | |
| "grad_norm": 0.19685028493404388, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0116, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 19.201995012468828, | |
| "grad_norm": 0.16668826341629028, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0093, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 19.226932668329177, | |
| "grad_norm": 0.1837756633758545, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.011, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 19.251870324189525, | |
| "grad_norm": 0.20641258358955383, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0112, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 19.276807980049874, | |
| "grad_norm": 0.15517428517341614, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0105, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 19.301745635910226, | |
| "grad_norm": 0.1268729269504547, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0116, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 19.326683291770575, | |
| "grad_norm": 0.20498508214950562, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.012, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 19.351620947630924, | |
| "grad_norm": 0.1662495732307434, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0132, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 19.376558603491272, | |
| "grad_norm": 0.1282566338777542, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0099, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 19.40149625935162, | |
| "grad_norm": 0.13146764039993286, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0104, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 19.42643391521197, | |
| "grad_norm": 0.15411917865276337, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0105, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 19.45137157107232, | |
| "grad_norm": 0.1768351048231125, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0098, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 19.476309226932667, | |
| "grad_norm": 0.11871691793203354, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.015, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 19.50124688279302, | |
| "grad_norm": 0.1518869400024414, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0136, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 19.526184538653368, | |
| "grad_norm": 0.19363293051719666, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.015, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 19.551122194513717, | |
| "grad_norm": 0.1647634655237198, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0154, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 19.576059850374065, | |
| "grad_norm": 0.22673118114471436, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0159, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 19.600997506234414, | |
| "grad_norm": 0.18206587433815002, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0119, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 19.625935162094763, | |
| "grad_norm": 0.1770549863576889, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0105, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 19.65087281795511, | |
| "grad_norm": 0.16882337629795074, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0118, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 19.67581047381546, | |
| "grad_norm": 0.18759211897850037, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0119, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 19.70074812967581, | |
| "grad_norm": 0.13904209434986115, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0104, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 19.72568578553616, | |
| "grad_norm": 0.20748458802700043, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0114, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 19.75062344139651, | |
| "grad_norm": 0.1970975399017334, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0131, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 19.77556109725686, | |
| "grad_norm": 0.1166309043765068, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0096, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 19.800498753117207, | |
| "grad_norm": 0.15643249452114105, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0104, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 19.825436408977556, | |
| "grad_norm": 0.2112215906381607, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0119, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 19.850374064837904, | |
| "grad_norm": 0.2472122311592102, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0198, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 19.875311720698253, | |
| "grad_norm": 0.2562101185321808, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0144, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 19.900249376558605, | |
| "grad_norm": 0.18427228927612305, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0164, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 19.925187032418954, | |
| "grad_norm": 0.1996258646249771, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0147, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 19.950124688279303, | |
| "grad_norm": 0.23629839718341827, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0131, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 19.97506234413965, | |
| "grad_norm": 0.11965874582529068, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0102, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.24889467656612396, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0129, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 20.02493765586035, | |
| "grad_norm": 0.1816418617963791, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0149, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 20.049875311720697, | |
| "grad_norm": 0.29368168115615845, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.012, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 20.074812967581046, | |
| "grad_norm": 0.19872735440731049, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0106, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 20.099750623441395, | |
| "grad_norm": 0.20744027197360992, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0097, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 20.124688279301747, | |
| "grad_norm": 0.13560041785240173, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0107, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 20.149625935162096, | |
| "grad_norm": 0.149778813123703, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0118, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 20.174563591022444, | |
| "grad_norm": 0.1729152500629425, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0085, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 20.199501246882793, | |
| "grad_norm": 0.16466082632541656, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0102, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 20.22443890274314, | |
| "grad_norm": 0.14830881357192993, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0092, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 20.24937655860349, | |
| "grad_norm": 0.17200683057308197, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0125, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 20.27431421446384, | |
| "grad_norm": 0.11875200271606445, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0107, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 20.29925187032419, | |
| "grad_norm": 0.1842079609632492, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0097, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 20.32418952618454, | |
| "grad_norm": 0.14516538381576538, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0113, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 20.34912718204489, | |
| "grad_norm": 0.17120328545570374, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0132, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 20.374064837905237, | |
| "grad_norm": 0.15593267977237701, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.01, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 20.399002493765586, | |
| "grad_norm": 0.16508732736110687, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0096, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 20.423940149625935, | |
| "grad_norm": 0.264967679977417, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0114, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 20.448877805486283, | |
| "grad_norm": 0.15196336805820465, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0113, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 20.473815461346632, | |
| "grad_norm": 0.11913860589265823, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0103, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 20.49875311720698, | |
| "grad_norm": 0.13211913406848907, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0127, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 20.523690773067333, | |
| "grad_norm": 0.11454223841428757, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0109, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 20.54862842892768, | |
| "grad_norm": 0.14827895164489746, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0129, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 20.57356608478803, | |
| "grad_norm": 0.19276633858680725, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0103, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 20.59850374064838, | |
| "grad_norm": 0.2303502857685089, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0101, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 20.623441396508728, | |
| "grad_norm": 0.17112362384796143, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0086, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 20.648379052369076, | |
| "grad_norm": 0.1264120489358902, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0173, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 20.673316708229425, | |
| "grad_norm": 0.10251034796237946, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0115, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 20.698254364089777, | |
| "grad_norm": 0.14415833353996277, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0095, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 20.723192019950126, | |
| "grad_norm": 0.1260681450366974, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0098, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 20.748129675810475, | |
| "grad_norm": 0.19292029738426208, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0136, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 20.773067331670823, | |
| "grad_norm": 0.16914553940296173, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.011, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 20.798004987531172, | |
| "grad_norm": 0.1808631271123886, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0096, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 20.82294264339152, | |
| "grad_norm": 0.16995872557163239, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0107, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 20.84788029925187, | |
| "grad_norm": 0.13984939455986023, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0097, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 20.872817955112218, | |
| "grad_norm": 0.17095516622066498, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0102, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 20.897755610972567, | |
| "grad_norm": 0.18818429112434387, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0091, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 20.92269326683292, | |
| "grad_norm": 0.1732785999774933, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0132, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 20.947630922693268, | |
| "grad_norm": 0.2573503851890564, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0132, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 20.972568578553616, | |
| "grad_norm": 0.15582725405693054, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0066, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 20.997506234413965, | |
| "grad_norm": 0.1384490728378296, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0099, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 21.022443890274314, | |
| "grad_norm": 0.13213975727558136, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0104, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 21.047381546134662, | |
| "grad_norm": 0.09760343283414841, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0087, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 21.07231920199501, | |
| "grad_norm": 0.162679523229599, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0089, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 21.09725685785536, | |
| "grad_norm": 0.1391264796257019, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0091, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 21.122194513715712, | |
| "grad_norm": 0.1114204153418541, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0081, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 21.14713216957606, | |
| "grad_norm": 0.1561351865530014, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0103, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 21.17206982543641, | |
| "grad_norm": 0.18560706079006195, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0148, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 21.197007481296758, | |
| "grad_norm": 0.16355405747890472, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0092, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 21.221945137157107, | |
| "grad_norm": 0.14421485364437103, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0079, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 21.246882793017456, | |
| "grad_norm": 0.16273567080497742, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0138, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 21.271820448877804, | |
| "grad_norm": 0.1267632097005844, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0092, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 21.296758104738153, | |
| "grad_norm": 0.13281220197677612, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0079, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 21.321695760598505, | |
| "grad_norm": 0.15917646884918213, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0082, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 21.346633416458854, | |
| "grad_norm": 0.17474614083766937, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0093, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 21.371571072319203, | |
| "grad_norm": 0.17023734748363495, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0088, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 21.39650872817955, | |
| "grad_norm": 0.1023193970322609, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0105, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 21.4214463840399, | |
| "grad_norm": 0.14239411056041718, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0085, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 21.44638403990025, | |
| "grad_norm": 0.20956820249557495, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0121, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 21.471321695760597, | |
| "grad_norm": 0.09319250285625458, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0099, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 21.496259351620946, | |
| "grad_norm": 0.17635712027549744, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0122, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 21.521197007481298, | |
| "grad_norm": 0.20555302500724792, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0098, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 21.546134663341647, | |
| "grad_norm": 0.09722301363945007, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.009, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 21.571072319201996, | |
| "grad_norm": 0.2117350697517395, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0094, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 21.596009975062344, | |
| "grad_norm": 0.07865309715270996, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0107, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 21.620947630922693, | |
| "grad_norm": 0.1302402764558792, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0106, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 21.64588528678304, | |
| "grad_norm": 0.10037621855735779, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0096, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 21.67082294264339, | |
| "grad_norm": 0.08978832513093948, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0139, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 21.69576059850374, | |
| "grad_norm": 0.10747410356998444, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0094, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 21.72069825436409, | |
| "grad_norm": 0.11507570743560791, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0113, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 21.74563591022444, | |
| "grad_norm": 0.14979727566242218, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0086, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 21.77057356608479, | |
| "grad_norm": 0.17306186258792877, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0115, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 21.795511221945137, | |
| "grad_norm": 0.0843510851264, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0095, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 21.820448877805486, | |
| "grad_norm": 0.14261963963508606, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.011, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 21.845386533665835, | |
| "grad_norm": 0.20708800852298737, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0099, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 21.870324189526183, | |
| "grad_norm": 0.12792934477329254, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0118, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 21.895261845386532, | |
| "grad_norm": 0.27362823486328125, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0113, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 21.920199501246884, | |
| "grad_norm": 0.20215623080730438, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0108, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 21.945137157107233, | |
| "grad_norm": 0.13568472862243652, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0118, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 21.97007481296758, | |
| "grad_norm": 0.19980351626873016, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0109, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 21.99501246882793, | |
| "grad_norm": 0.15585456788539886, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0087, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 22.01995012468828, | |
| "grad_norm": 0.07734023779630661, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0094, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 22.044887780548628, | |
| "grad_norm": 0.20725880563259125, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0126, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 22.069825436408976, | |
| "grad_norm": 0.08230875432491302, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0063, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 22.094763092269325, | |
| "grad_norm": 0.12335740029811859, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.012, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 22.119700748129677, | |
| "grad_norm": 0.10417554527521133, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0117, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 22.144638403990026, | |
| "grad_norm": 0.21877962350845337, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.009, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 22.169576059850375, | |
| "grad_norm": 0.10149165242910385, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0107, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 22.194513715710723, | |
| "grad_norm": 0.0754472017288208, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0077, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 22.219451371571072, | |
| "grad_norm": 0.18970726430416107, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.013, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 22.24438902743142, | |
| "grad_norm": 0.13444998860359192, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0067, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 22.26932668329177, | |
| "grad_norm": 0.08629672229290009, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0077, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 22.294264339152118, | |
| "grad_norm": 0.13979920744895935, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0082, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 22.31920199501247, | |
| "grad_norm": 0.11845487356185913, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0098, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 22.34413965087282, | |
| "grad_norm": 0.17686918377876282, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0137, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 22.369077306733168, | |
| "grad_norm": 0.1734665185213089, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0103, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 22.394014962593516, | |
| "grad_norm": 0.0991511419415474, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0079, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 22.418952618453865, | |
| "grad_norm": 0.11316787451505661, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0115, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 22.443890274314214, | |
| "grad_norm": 0.19939127564430237, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0108, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 22.468827930174562, | |
| "grad_norm": 0.11768173426389694, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0105, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 22.49376558603491, | |
| "grad_norm": 0.14030784368515015, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0098, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 22.518703241895263, | |
| "grad_norm": 0.16166001558303833, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0094, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 22.543640897755612, | |
| "grad_norm": 0.11566431075334549, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0087, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 22.56857855361596, | |
| "grad_norm": 0.12991498410701752, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0092, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 22.59351620947631, | |
| "grad_norm": 0.10265893489122391, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0095, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 22.618453865336658, | |
| "grad_norm": 0.13150304555892944, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0101, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 22.643391521197007, | |
| "grad_norm": 0.1058659702539444, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0101, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 22.668329177057355, | |
| "grad_norm": 0.16634219884872437, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0126, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 22.693266832917704, | |
| "grad_norm": 0.10053948312997818, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0081, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 22.718204488778056, | |
| "grad_norm": 0.16862116754055023, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0117, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 22.743142144638405, | |
| "grad_norm": 0.2290639728307724, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0101, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 22.768079800498754, | |
| "grad_norm": 0.11383877694606781, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0085, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 22.793017456359102, | |
| "grad_norm": 0.15358632802963257, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0063, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 22.81795511221945, | |
| "grad_norm": 0.09199787676334381, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0097, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 22.8428927680798, | |
| "grad_norm": 0.16180719435214996, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0132, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 22.86783042394015, | |
| "grad_norm": 0.15866521000862122, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.011, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 22.892768079800497, | |
| "grad_norm": 0.21543671190738678, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0114, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 22.91770573566085, | |
| "grad_norm": 0.15842817723751068, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0109, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 22.942643391521198, | |
| "grad_norm": 0.18475976586341858, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.009, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 22.967581047381547, | |
| "grad_norm": 0.13380274176597595, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0103, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 22.992518703241895, | |
| "grad_norm": 0.19328123331069946, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0117, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 23.017456359102244, | |
| "grad_norm": 0.08324471861124039, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0111, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 23.042394014962593, | |
| "grad_norm": 0.1410548835992813, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0097, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 23.06733167082294, | |
| "grad_norm": 0.17459465563297272, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0089, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 23.09226932668329, | |
| "grad_norm": 0.1616915911436081, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0124, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 23.117206982543642, | |
| "grad_norm": 0.182282492518425, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0086, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 23.14214463840399, | |
| "grad_norm": 0.11945149302482605, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0106, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 23.16708229426434, | |
| "grad_norm": 0.15818195044994354, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0076, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 23.19201995012469, | |
| "grad_norm": 0.09345827996730804, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0082, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 23.216957605985037, | |
| "grad_norm": 0.06752045452594757, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0117, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 23.241895261845386, | |
| "grad_norm": 0.18825390934944153, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0106, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 23.266832917705734, | |
| "grad_norm": 0.13447588682174683, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0104, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 23.291770573566083, | |
| "grad_norm": 0.16687792539596558, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0115, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 23.316708229426435, | |
| "grad_norm": 0.12523305416107178, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0095, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 23.341645885286784, | |
| "grad_norm": 0.1146356463432312, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0071, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 23.366583541147133, | |
| "grad_norm": 0.13623486459255219, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0118, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 23.39152119700748, | |
| "grad_norm": 0.1595918834209442, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0122, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 23.41645885286783, | |
| "grad_norm": 0.10325364023447037, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0117, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 23.44139650872818, | |
| "grad_norm": 0.19897833466529846, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0115, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 23.466334164588527, | |
| "grad_norm": 0.15857039391994476, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.008, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 23.491271820448876, | |
| "grad_norm": 0.1193074882030487, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0082, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 23.51620947630923, | |
| "grad_norm": 0.12145932763814926, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0111, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 23.541147132169577, | |
| "grad_norm": 0.08620520681142807, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0092, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 23.566084788029926, | |
| "grad_norm": 0.1716388314962387, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.013, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 23.591022443890274, | |
| "grad_norm": 0.1060759574174881, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0075, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 23.615960099750623, | |
| "grad_norm": 0.21080216765403748, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0129, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 23.640897755610972, | |
| "grad_norm": 0.12358783930540085, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0095, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 23.66583541147132, | |
| "grad_norm": 0.13231295347213745, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0071, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 23.69077306733167, | |
| "grad_norm": 0.23074249923229218, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0113, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 23.71571072319202, | |
| "grad_norm": 0.2231118232011795, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0122, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 23.74064837905237, | |
| "grad_norm": 0.21575309336185455, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0102, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 23.76558603491272, | |
| "grad_norm": 0.2051115781068802, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0111, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 23.790523690773068, | |
| "grad_norm": 0.12659715116024017, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0093, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 23.815461346633416, | |
| "grad_norm": 0.17052437365055084, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0125, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 23.840399002493765, | |
| "grad_norm": 0.10644330829381943, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.011, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 23.865336658354114, | |
| "grad_norm": 0.13171544671058655, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0108, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 23.890274314214462, | |
| "grad_norm": 0.09954603016376495, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0098, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 23.915211970074814, | |
| "grad_norm": 0.09581620991230011, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0108, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 23.940149625935163, | |
| "grad_norm": 0.17687483131885529, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0078, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 23.965087281795512, | |
| "grad_norm": 0.15976780652999878, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0099, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 23.99002493765586, | |
| "grad_norm": 0.2240252047777176, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0117, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 24.01496259351621, | |
| "grad_norm": 0.17584775388240814, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.01, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 24.039900249376558, | |
| "grad_norm": 0.09660171717405319, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0123, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 24.064837905236907, | |
| "grad_norm": 0.12731677293777466, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0096, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 24.089775561097255, | |
| "grad_norm": 0.1523570567369461, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0105, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 24.114713216957608, | |
| "grad_norm": 0.16970428824424744, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.009, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 24.139650872817956, | |
| "grad_norm": 0.14059139788150787, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0091, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 24.164588528678305, | |
| "grad_norm": 0.15968100726604462, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0095, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 24.189526184538654, | |
| "grad_norm": 0.07966794818639755, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0079, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 24.214463840399002, | |
| "grad_norm": 0.16648989915847778, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0082, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 24.23940149625935, | |
| "grad_norm": 0.1813577115535736, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0119, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 24.2643391521197, | |
| "grad_norm": 0.28378596901893616, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0101, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 24.28927680798005, | |
| "grad_norm": 0.16535334289073944, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0099, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 24.3142144638404, | |
| "grad_norm": 0.11055565625429153, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.008, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 24.33915211970075, | |
| "grad_norm": 0.09220130741596222, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0087, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 24.364089775561098, | |
| "grad_norm": 0.11395467817783356, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0084, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 24.389027431421447, | |
| "grad_norm": 0.11669038981199265, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0099, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 24.413965087281795, | |
| "grad_norm": 0.10578929632902145, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0082, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 24.438902743142144, | |
| "grad_norm": 0.09901167452335358, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0096, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 24.463840399002493, | |
| "grad_norm": 0.1508231908082962, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0069, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 24.48877805486284, | |
| "grad_norm": 0.15237489342689514, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0107, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 24.513715710723194, | |
| "grad_norm": 0.19540637731552124, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0088, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 24.538653366583542, | |
| "grad_norm": 0.12219835072755814, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0121, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 24.56359102244389, | |
| "grad_norm": 0.10399410128593445, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.01, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 24.58852867830424, | |
| "grad_norm": 0.16520583629608154, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0134, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 24.61346633416459, | |
| "grad_norm": 0.165066197514534, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0119, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 24.638403990024937, | |
| "grad_norm": 0.22801975905895233, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0087, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 24.663341645885286, | |
| "grad_norm": 0.18782998621463776, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0096, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 24.688279301745634, | |
| "grad_norm": 0.1467093527317047, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0103, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 24.713216957605987, | |
| "grad_norm": 0.16080711781978607, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0126, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 24.738154613466335, | |
| "grad_norm": 0.15161679685115814, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0114, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 24.763092269326684, | |
| "grad_norm": 0.13160894811153412, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0132, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 24.788029925187033, | |
| "grad_norm": 0.12861140072345734, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0068, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 24.81296758104738, | |
| "grad_norm": 0.13793323934078217, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0117, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 24.83790523690773, | |
| "grad_norm": 0.2963007986545563, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0121, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 24.86284289276808, | |
| "grad_norm": 0.20123007893562317, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0108, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 24.887780548628427, | |
| "grad_norm": 0.13613452017307281, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0083, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 24.91271820448878, | |
| "grad_norm": 0.10863277316093445, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0088, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 24.93765586034913, | |
| "grad_norm": 0.19118401408195496, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0104, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 24.93765586034913, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.028992590371519328, | |
| "train_runtime": 9856.6279, | |
| "train_samples_per_second": 32.465, | |
| "train_steps_per_second": 1.015 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 25, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |