| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 13.175230566534914, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013175230566534914, | |
| "grad_norm": 12.585679054260254, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.3094, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.026350461133069828, | |
| "grad_norm": 4.774308681488037, | |
| "learning_rate": 3.8e-06, | |
| "loss": 1.0382, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.039525691699604744, | |
| "grad_norm": 3.6964216232299805, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.5183, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.052700922266139656, | |
| "grad_norm": 1.9297399520874023, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.3297, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06587615283267458, | |
| "grad_norm": 3.0238001346588135, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2296, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07905138339920949, | |
| "grad_norm": 1.9905507564544678, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1924, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0922266139657444, | |
| "grad_norm": 1.7341668605804443, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1222, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10540184453227931, | |
| "grad_norm": 1.1846983432769775, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1143, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11857707509881422, | |
| "grad_norm": 0.9171633720397949, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.0895, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.13175230566534915, | |
| "grad_norm": 0.9449801445007324, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.0864, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14492753623188406, | |
| "grad_norm": 0.49553966522216797, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.073, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15810276679841898, | |
| "grad_norm": 0.9112503528594971, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.0631, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1712779973649539, | |
| "grad_norm": 1.0028133392333984, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.0668, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1844532279314888, | |
| "grad_norm": 1.2871983051300049, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0646, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1976284584980237, | |
| "grad_norm": 0.9798805117607117, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0565, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.21080368906455862, | |
| "grad_norm": 0.8933775424957275, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0581, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.22397891963109354, | |
| "grad_norm": 1.1488267183303833, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0521, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.23715415019762845, | |
| "grad_norm": 1.329391360282898, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0518, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2503293807641634, | |
| "grad_norm": 0.9331446290016174, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.05, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2635046113306983, | |
| "grad_norm": 0.7018640041351318, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0475, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2766798418972332, | |
| "grad_norm": 0.7023022174835205, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0506, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2898550724637681, | |
| "grad_norm": 0.6905364394187927, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0627, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 1.085239052772522, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0481, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.31620553359683795, | |
| "grad_norm": 0.8007176518440247, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0519, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.32938076416337286, | |
| "grad_norm": 1.0830175876617432, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0501, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3425559947299078, | |
| "grad_norm": 1.1084550619125366, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0553, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3557312252964427, | |
| "grad_norm": 1.091086983680725, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0605, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3689064558629776, | |
| "grad_norm": 0.6870923638343811, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.047, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3820816864295125, | |
| "grad_norm": 0.44346657395362854, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0561, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3952569169960474, | |
| "grad_norm": 1.086161732673645, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.05, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.40843214756258234, | |
| "grad_norm": 0.6184456944465637, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0505, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.42160737812911725, | |
| "grad_norm": 0.7681217789649963, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0458, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 0.7591136693954468, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0504, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4479578392621871, | |
| "grad_norm": 0.7818257808685303, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.049, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.461133069828722, | |
| "grad_norm": 0.929442286491394, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0429, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4743083003952569, | |
| "grad_norm": 0.8063979148864746, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0394, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4874835309617918, | |
| "grad_norm": 0.8431144952774048, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.048, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5006587615283268, | |
| "grad_norm": 0.6320706009864807, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0445, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5138339920948617, | |
| "grad_norm": 0.6919974684715271, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0515, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5270092226613966, | |
| "grad_norm": 1.066429615020752, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.048, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5401844532279315, | |
| "grad_norm": 0.7330025434494019, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0499, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5533596837944664, | |
| "grad_norm": 0.642924427986145, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0437, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5665349143610013, | |
| "grad_norm": 0.6752575635910034, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0441, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5797101449275363, | |
| "grad_norm": 0.9005573391914368, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0418, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5928853754940712, | |
| "grad_norm": 0.870056688785553, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0451, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 1.2314468622207642, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0521, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.619235836627141, | |
| "grad_norm": 0.8210376501083374, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0541, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6324110671936759, | |
| "grad_norm": 0.8480843901634216, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0456, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6455862977602108, | |
| "grad_norm": 0.9127245545387268, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0424, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6587615283267457, | |
| "grad_norm": 0.7549911141395569, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0459, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6719367588932806, | |
| "grad_norm": 0.5965666174888611, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0406, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6851119894598156, | |
| "grad_norm": 0.666847825050354, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0417, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6982872200263505, | |
| "grad_norm": 0.4748551547527313, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0475, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7114624505928854, | |
| "grad_norm": 1.098746418952942, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0434, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7246376811594203, | |
| "grad_norm": 0.5824465751647949, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0407, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7378129117259552, | |
| "grad_norm": 0.4799025058746338, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0369, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7509881422924901, | |
| "grad_norm": 0.6298460960388184, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0375, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.764163372859025, | |
| "grad_norm": 0.6886503100395203, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0389, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7773386034255599, | |
| "grad_norm": 0.5728000402450562, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0417, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7905138339920948, | |
| "grad_norm": 0.7823150157928467, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0409, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8036890645586298, | |
| "grad_norm": 0.8241381645202637, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0459, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8168642951251647, | |
| "grad_norm": 0.5445210933685303, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0352, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8300395256916996, | |
| "grad_norm": 0.5866085886955261, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0295, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8432147562582345, | |
| "grad_norm": 0.7469897270202637, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0321, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8563899868247694, | |
| "grad_norm": 0.869316041469574, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0353, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 0.6988140344619751, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0343, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8827404479578392, | |
| "grad_norm": 0.5596421360969543, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0424, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8959156785243741, | |
| "grad_norm": 0.5170013308525085, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0373, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.5016767382621765, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0379, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.922266139657444, | |
| "grad_norm": 0.445021390914917, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0352, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9354413702239789, | |
| "grad_norm": 0.41842174530029297, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0383, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9486166007905138, | |
| "grad_norm": 0.4440097212791443, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0334, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9617918313570487, | |
| "grad_norm": 0.3641037940979004, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.034, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9749670619235836, | |
| "grad_norm": 0.5874915719032288, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0334, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9881422924901185, | |
| "grad_norm": 0.736756443977356, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.038, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.0013175230566536, | |
| "grad_norm": 0.6323824524879456, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0344, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.0144927536231885, | |
| "grad_norm": 0.4926735758781433, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0293, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.0276679841897234, | |
| "grad_norm": 0.6455330848693848, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0282, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.0408432147562583, | |
| "grad_norm": 0.7627984881401062, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0296, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.0540184453227932, | |
| "grad_norm": 0.5647096633911133, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0349, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.0671936758893281, | |
| "grad_norm": 0.3709547817707062, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0322, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.080368906455863, | |
| "grad_norm": 0.7317115068435669, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0332, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.093544137022398, | |
| "grad_norm": 0.6597622632980347, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0338, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.1067193675889329, | |
| "grad_norm": 0.4061703383922577, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0304, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.1198945981554678, | |
| "grad_norm": 0.4285595118999481, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0374, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.1330698287220027, | |
| "grad_norm": 0.5008792877197266, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0269, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.1462450592885376, | |
| "grad_norm": 0.3218404948711395, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0333, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.1594202898550725, | |
| "grad_norm": 0.550163984298706, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0302, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.1725955204216074, | |
| "grad_norm": 0.4781897962093353, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0282, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.1857707509881423, | |
| "grad_norm": 0.41555675864219666, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0338, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.1989459815546772, | |
| "grad_norm": 0.6632710099220276, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0349, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.2121212121212122, | |
| "grad_norm": 0.6058996915817261, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0334, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.225296442687747, | |
| "grad_norm": 0.46293482184410095, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0296, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.238471673254282, | |
| "grad_norm": 0.5244153738021851, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0287, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.251646903820817, | |
| "grad_norm": 0.4336259961128235, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0327, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.2648221343873518, | |
| "grad_norm": 0.5450809001922607, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0287, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.2779973649538867, | |
| "grad_norm": 0.37985944747924805, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0294, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.2911725955204216, | |
| "grad_norm": 0.6127066612243652, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0265, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.3043478260869565, | |
| "grad_norm": 0.6105954051017761, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0341, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.3175230566534915, | |
| "grad_norm": 0.350715696811676, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.026, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.3306982872200264, | |
| "grad_norm": 0.34700220823287964, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0249, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.3438735177865613, | |
| "grad_norm": 0.2994444668292999, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0254, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.3570487483530962, | |
| "grad_norm": 0.2695624530315399, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0259, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.370223978919631, | |
| "grad_norm": 0.35280922055244446, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0296, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.383399209486166, | |
| "grad_norm": 0.4313255548477173, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0219, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.396574440052701, | |
| "grad_norm": 0.6447331309318542, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.033, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.4097496706192358, | |
| "grad_norm": 0.6714804172515869, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0316, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.4229249011857708, | |
| "grad_norm": 0.35933995246887207, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0288, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.4361001317523057, | |
| "grad_norm": 0.6764751672744751, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0221, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.4492753623188406, | |
| "grad_norm": 0.4556847810745239, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0261, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.4624505928853755, | |
| "grad_norm": 0.49368539452552795, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0266, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.4756258234519104, | |
| "grad_norm": 0.3783435821533203, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0259, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.4888010540184453, | |
| "grad_norm": 0.26338449120521545, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0283, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.5019762845849802, | |
| "grad_norm": 0.4226028621196747, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0242, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.5151515151515151, | |
| "grad_norm": 0.38758257031440735, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0301, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.52832674571805, | |
| "grad_norm": 0.3099963366985321, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.026, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.541501976284585, | |
| "grad_norm": 0.519696056842804, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0258, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.5546772068511199, | |
| "grad_norm": 0.4573246240615845, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0247, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.5678524374176548, | |
| "grad_norm": 0.6357062458992004, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0249, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.5810276679841897, | |
| "grad_norm": 0.5196475386619568, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.032, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.5942028985507246, | |
| "grad_norm": 0.7611187696456909, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0302, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.6073781291172595, | |
| "grad_norm": 0.3952830731868744, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0241, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.6205533596837944, | |
| "grad_norm": 0.5244176983833313, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0285, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.6337285902503293, | |
| "grad_norm": 0.45377880334854126, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0268, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.6469038208168643, | |
| "grad_norm": 0.35962337255477905, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0304, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.6600790513833992, | |
| "grad_norm": 0.49337679147720337, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0266, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.673254281949934, | |
| "grad_norm": 0.4094015061855316, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0328, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.686429512516469, | |
| "grad_norm": 0.28054094314575195, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0298, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.699604743083004, | |
| "grad_norm": 0.38139668107032776, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0318, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.7127799736495388, | |
| "grad_norm": 0.3427455723285675, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0262, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.7259552042160737, | |
| "grad_norm": 0.4167240858078003, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.026, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.7391304347826086, | |
| "grad_norm": 0.3868737816810608, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0318, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.7523056653491436, | |
| "grad_norm": 0.2871069014072418, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0278, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.7654808959156785, | |
| "grad_norm": 0.5268304347991943, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.027, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.7786561264822134, | |
| "grad_norm": 0.34222403168678284, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0247, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.7918313570487485, | |
| "grad_norm": 0.21124961972236633, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0267, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.8050065876152832, | |
| "grad_norm": 0.6499063968658447, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0209, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.2798737585544586, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0237, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.831357048748353, | |
| "grad_norm": 0.5095801949501038, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0204, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.8445322793148882, | |
| "grad_norm": 0.50572270154953, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.027, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.8577075098814229, | |
| "grad_norm": 0.3475058674812317, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0239, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.870882740447958, | |
| "grad_norm": 0.44244056940078735, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0289, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.8840579710144927, | |
| "grad_norm": 0.3293551206588745, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.023, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.8972332015810278, | |
| "grad_norm": 0.31059810519218445, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0247, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.9104084321475625, | |
| "grad_norm": 0.396855890750885, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0227, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.9235836627140976, | |
| "grad_norm": 0.3800758719444275, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0265, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.9367588932806323, | |
| "grad_norm": 0.3450396656990051, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0278, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.9499341238471675, | |
| "grad_norm": 0.5375139117240906, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0291, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.9631093544137022, | |
| "grad_norm": 0.48629283905029297, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0266, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.9762845849802373, | |
| "grad_norm": 0.5940545797348022, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0309, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.989459815546772, | |
| "grad_norm": 0.4284568428993225, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0303, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.002635046113307, | |
| "grad_norm": 0.649758517742157, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0281, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.015810276679842, | |
| "grad_norm": 0.3681211471557617, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0271, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.028985507246377, | |
| "grad_norm": 0.5985534191131592, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0302, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.0421607378129116, | |
| "grad_norm": 0.6348494291305542, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0358, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.0553359683794468, | |
| "grad_norm": 0.7188793420791626, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0244, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.0685111989459815, | |
| "grad_norm": 0.7274630069732666, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0271, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.0816864295125166, | |
| "grad_norm": 0.4827321171760559, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0256, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.0948616600790513, | |
| "grad_norm": 0.539287805557251, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0272, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.1080368906455864, | |
| "grad_norm": 0.34089940786361694, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0238, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.121212121212121, | |
| "grad_norm": 0.32052725553512573, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0208, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.1343873517786562, | |
| "grad_norm": 0.369957834482193, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0214, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.147562582345191, | |
| "grad_norm": 0.26442891359329224, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0195, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.160737812911726, | |
| "grad_norm": 0.47019681334495544, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0221, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.1739130434782608, | |
| "grad_norm": 0.22871090471744537, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0259, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.187088274044796, | |
| "grad_norm": 0.2812241315841675, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0213, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.2002635046113306, | |
| "grad_norm": 0.472553014755249, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0216, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.2134387351778657, | |
| "grad_norm": 0.4297308623790741, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0239, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.2266139657444004, | |
| "grad_norm": 0.2847653329372406, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0245, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.2397891963109355, | |
| "grad_norm": 0.3599100708961487, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.019, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.2529644268774702, | |
| "grad_norm": 0.306449830532074, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0216, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.2661396574440054, | |
| "grad_norm": 0.40967902541160583, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0235, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.27931488801054, | |
| "grad_norm": 0.35484758019447327, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0218, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.292490118577075, | |
| "grad_norm": 0.3785801827907562, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0239, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.30566534914361, | |
| "grad_norm": 0.24761465191841125, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0244, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.318840579710145, | |
| "grad_norm": 0.22312359511852264, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0266, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.3320158102766797, | |
| "grad_norm": 0.3827199339866638, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0215, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.345191040843215, | |
| "grad_norm": 0.5303699374198914, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0241, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.3583662714097495, | |
| "grad_norm": 0.2927263081073761, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.025, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.3715415019762847, | |
| "grad_norm": 0.36540526151657104, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.026, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.3847167325428194, | |
| "grad_norm": 0.3504578471183777, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0204, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.3978919631093545, | |
| "grad_norm": 0.268880158662796, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.02, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.411067193675889, | |
| "grad_norm": 0.27113309502601624, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0265, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.4242424242424243, | |
| "grad_norm": 0.2458828240633011, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0217, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.437417654808959, | |
| "grad_norm": 0.41761359572410583, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0258, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.450592885375494, | |
| "grad_norm": 0.6113036274909973, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0241, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.463768115942029, | |
| "grad_norm": 0.4670635759830475, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0293, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.476943346508564, | |
| "grad_norm": 0.39811426401138306, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0241, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.4901185770750986, | |
| "grad_norm": 0.4884253442287445, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0233, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.503293807641634, | |
| "grad_norm": 0.41597744822502136, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0257, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.516469038208169, | |
| "grad_norm": 0.3447679877281189, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0262, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.5296442687747036, | |
| "grad_norm": 0.3724343776702881, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.025, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.5428194993412383, | |
| "grad_norm": 0.5130154490470886, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0268, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.5559947299077734, | |
| "grad_norm": 0.323124498128891, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0225, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.5691699604743086, | |
| "grad_norm": 0.3052145838737488, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0261, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.5823451910408433, | |
| "grad_norm": 0.424175500869751, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0217, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.595520421607378, | |
| "grad_norm": 0.3193293809890747, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0236, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.608695652173913, | |
| "grad_norm": 0.3281519412994385, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.024, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.621870882740448, | |
| "grad_norm": 0.5388944745063782, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0254, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.635046113306983, | |
| "grad_norm": 0.43273690342903137, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0216, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.6482213438735176, | |
| "grad_norm": 0.33787184953689575, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0218, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.6613965744400527, | |
| "grad_norm": 0.4092657268047333, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0205, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.674571805006588, | |
| "grad_norm": 0.3141351342201233, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0303, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.6877470355731226, | |
| "grad_norm": 0.43514305353164673, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0276, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.7009222661396572, | |
| "grad_norm": 0.5144362449645996, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0245, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.7140974967061924, | |
| "grad_norm": 0.3669821619987488, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0219, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.5248842835426331, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0194, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.740447957839262, | |
| "grad_norm": 0.24729865789413452, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0218, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.753623188405797, | |
| "grad_norm": 0.2766522467136383, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0165, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.766798418972332, | |
| "grad_norm": 0.3554515838623047, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.018, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.779973649538867, | |
| "grad_norm": 0.37791627645492554, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0206, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.793148880105402, | |
| "grad_norm": 0.48156484961509705, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0205, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.8063241106719365, | |
| "grad_norm": 0.21726833283901215, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0235, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.8194993412384717, | |
| "grad_norm": 0.3837207555770874, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0232, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.832674571805007, | |
| "grad_norm": 0.21415646374225616, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0246, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.8458498023715415, | |
| "grad_norm": 0.2206660807132721, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0225, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.859025032938076, | |
| "grad_norm": 0.30975693464279175, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0192, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.8722002635046113, | |
| "grad_norm": 0.34590449929237366, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.023, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.8853754940711465, | |
| "grad_norm": 0.2704302966594696, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0211, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.898550724637681, | |
| "grad_norm": 0.5347023606300354, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0187, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.911725955204216, | |
| "grad_norm": 0.23387780785560608, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.02, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.924901185770751, | |
| "grad_norm": 0.285714328289032, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0169, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.938076416337286, | |
| "grad_norm": 0.34807008504867554, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0157, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.951251646903821, | |
| "grad_norm": 0.20837929844856262, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.02, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.9644268774703555, | |
| "grad_norm": 0.43575602769851685, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0249, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.9776021080368906, | |
| "grad_norm": 0.3351643979549408, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0228, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.9907773386034258, | |
| "grad_norm": 0.521143913269043, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0232, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.0039525691699605, | |
| "grad_norm": 0.23659829795360565, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0245, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.0171277997364956, | |
| "grad_norm": 0.3649868071079254, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0238, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.0303030303030303, | |
| "grad_norm": 0.29252931475639343, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0156, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.0434782608695654, | |
| "grad_norm": 0.27642548084259033, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0211, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.0566534914361, | |
| "grad_norm": 0.1891625076532364, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0217, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.0698287220026352, | |
| "grad_norm": 0.2757662236690521, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0249, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.08300395256917, | |
| "grad_norm": 0.23346559703350067, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0219, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.096179183135705, | |
| "grad_norm": 0.2930508852005005, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0238, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.1093544137022397, | |
| "grad_norm": 0.25949984788894653, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.023, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.122529644268775, | |
| "grad_norm": 0.2743162512779236, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0216, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.1357048748353096, | |
| "grad_norm": 0.3307958245277405, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0167, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.1488801054018447, | |
| "grad_norm": 0.4071004390716553, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0255, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.1620553359683794, | |
| "grad_norm": 0.2871227562427521, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0247, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.1752305665349145, | |
| "grad_norm": 0.4296686351299286, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0197, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.1884057971014492, | |
| "grad_norm": 0.2433115541934967, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0207, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.2015810276679844, | |
| "grad_norm": 0.28873997926712036, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0175, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.214756258234519, | |
| "grad_norm": 0.3427509069442749, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0216, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.227931488801054, | |
| "grad_norm": 0.5131787061691284, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0203, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.241106719367589, | |
| "grad_norm": 0.347695529460907, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0217, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.254281949934124, | |
| "grad_norm": 0.3435944616794586, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0194, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.2674571805006587, | |
| "grad_norm": 0.24768733978271484, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0206, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.280632411067194, | |
| "grad_norm": 0.3580437898635864, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0171, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.2938076416337285, | |
| "grad_norm": 0.38864248991012573, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0223, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.3069828722002637, | |
| "grad_norm": 0.35056614875793457, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0195, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.3201581027667983, | |
| "grad_norm": 0.1966569721698761, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0192, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.2948076128959656, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0192, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 3.346508563899868, | |
| "grad_norm": 0.32685697078704834, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0192, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 3.3596837944664033, | |
| "grad_norm": 0.26307734847068787, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0237, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.372859025032938, | |
| "grad_norm": 0.27680397033691406, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0212, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 3.386034255599473, | |
| "grad_norm": 0.2818705141544342, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0168, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 3.399209486166008, | |
| "grad_norm": 0.2630791664123535, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0223, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 3.412384716732543, | |
| "grad_norm": 0.28061074018478394, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0219, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 3.4255599472990776, | |
| "grad_norm": 0.1784231811761856, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0176, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.438735177865613, | |
| "grad_norm": 0.23167821764945984, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0204, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 3.4519104084321475, | |
| "grad_norm": 0.25540560483932495, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0185, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.4650856389986826, | |
| "grad_norm": 0.5046829581260681, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0191, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.4782608695652173, | |
| "grad_norm": 0.45605048537254333, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0186, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.4914361001317524, | |
| "grad_norm": 0.34779468178749084, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0209, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.504611330698287, | |
| "grad_norm": 0.26932665705680847, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0295, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.5177865612648223, | |
| "grad_norm": 0.2671760320663452, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0227, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.530961791831357, | |
| "grad_norm": 0.5423169732093811, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0211, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.544137022397892, | |
| "grad_norm": 0.603018045425415, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0168, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.5573122529644268, | |
| "grad_norm": 0.33711379766464233, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0231, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.570487483530962, | |
| "grad_norm": 0.4962400197982788, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0233, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.5836627140974966, | |
| "grad_norm": 0.4521920680999756, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0194, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.5968379446640317, | |
| "grad_norm": 0.5082101821899414, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0207, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.6100131752305664, | |
| "grad_norm": 0.3494955003261566, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0244, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.6231884057971016, | |
| "grad_norm": 0.359284371137619, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0161, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.4799230992794037, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0178, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.6495388669301714, | |
| "grad_norm": 0.37354904413223267, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0193, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.662714097496706, | |
| "grad_norm": 0.2906670570373535, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0203, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.675889328063241, | |
| "grad_norm": 0.2389180213212967, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0166, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.689064558629776, | |
| "grad_norm": 0.2554898262023926, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0183, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.702239789196311, | |
| "grad_norm": 0.18219242990016937, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0217, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.7154150197628457, | |
| "grad_norm": 0.292879194021225, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0154, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.728590250329381, | |
| "grad_norm": 0.2756798565387726, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.019, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.7417654808959155, | |
| "grad_norm": 0.2696395218372345, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0182, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.7549407114624507, | |
| "grad_norm": 0.27421027421951294, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0177, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.7681159420289854, | |
| "grad_norm": 0.29462969303131104, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0199, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.7812911725955205, | |
| "grad_norm": 0.39727547764778137, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0193, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.794466403162055, | |
| "grad_norm": 0.3851800560951233, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0196, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.8076416337285903, | |
| "grad_norm": 0.49293825030326843, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0184, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.820816864295125, | |
| "grad_norm": 0.40799012780189514, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0204, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.83399209486166, | |
| "grad_norm": 0.43597811460494995, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0275, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.847167325428195, | |
| "grad_norm": 0.35586801171302795, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0205, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.86034255599473, | |
| "grad_norm": 0.4549504816532135, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0209, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.8735177865612647, | |
| "grad_norm": 0.4210937023162842, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0212, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.8866930171278, | |
| "grad_norm": 0.30184030532836914, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0186, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.8998682476943345, | |
| "grad_norm": 0.4085947275161743, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.021, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.9130434782608696, | |
| "grad_norm": 0.44483330845832825, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0196, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.9262187088274043, | |
| "grad_norm": 0.3594276010990143, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.019, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.9393939393939394, | |
| "grad_norm": 0.33870217204093933, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0172, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.9525691699604746, | |
| "grad_norm": 0.28566181659698486, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0171, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.9657444005270093, | |
| "grad_norm": 0.18129827082157135, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0182, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.978919631093544, | |
| "grad_norm": 0.471519410610199, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0238, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.992094861660079, | |
| "grad_norm": 0.2733546793460846, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0182, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.005270092226614, | |
| "grad_norm": 0.2888932526111603, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0157, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.0184453227931485, | |
| "grad_norm": 0.31720566749572754, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0175, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.031620553359684, | |
| "grad_norm": 0.2824496030807495, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0202, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.044795783926219, | |
| "grad_norm": 0.32166388630867004, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0225, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.057971014492754, | |
| "grad_norm": 0.5426804423332214, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.016, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 4.071146245059288, | |
| "grad_norm": 0.3431253433227539, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0178, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 4.084321475625823, | |
| "grad_norm": 0.32741838693618774, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0164, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.097496706192358, | |
| "grad_norm": 0.2607785165309906, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.018, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 4.1106719367588935, | |
| "grad_norm": 0.31585800647735596, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0246, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 4.123847167325428, | |
| "grad_norm": 0.6056305170059204, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0205, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 4.137022397891963, | |
| "grad_norm": 0.39446625113487244, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0243, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 4.150197628458498, | |
| "grad_norm": 0.23034586012363434, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.019, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.163372859025033, | |
| "grad_norm": 0.28100505471229553, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0142, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 4.176548089591567, | |
| "grad_norm": 0.19925159215927124, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0208, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 4.189723320158103, | |
| "grad_norm": 0.4441159963607788, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0176, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 4.202898550724638, | |
| "grad_norm": 0.2651693820953369, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0187, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 4.216073781291173, | |
| "grad_norm": 0.2650001645088196, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0219, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 4.229249011857707, | |
| "grad_norm": 0.17157067358493805, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0173, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 4.242424242424242, | |
| "grad_norm": 0.3844333291053772, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0172, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 4.255599472990777, | |
| "grad_norm": 0.29438167810440063, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0196, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 4.2687747035573125, | |
| "grad_norm": 0.2370031476020813, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0157, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 4.281949934123848, | |
| "grad_norm": 0.233305886387825, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0156, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.295125164690382, | |
| "grad_norm": 0.27094778418540955, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0196, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 4.308300395256917, | |
| "grad_norm": 0.25244513154029846, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0147, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 4.321475625823452, | |
| "grad_norm": 0.26508960127830505, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0155, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 4.334650856389986, | |
| "grad_norm": 0.24337764084339142, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0177, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 4.3478260869565215, | |
| "grad_norm": 0.23213884234428406, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0153, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.361001317523057, | |
| "grad_norm": 0.2334267944097519, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0142, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 4.374176548089592, | |
| "grad_norm": 0.21493524312973022, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.017, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 4.387351778656127, | |
| "grad_norm": 0.23911434412002563, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0143, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 4.400527009222661, | |
| "grad_norm": 0.285081684589386, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0193, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 4.413702239789196, | |
| "grad_norm": 0.34651780128479004, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0202, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 4.426877470355731, | |
| "grad_norm": 0.3821335732936859, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0193, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 4.440052700922266, | |
| "grad_norm": 0.28446274995803833, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0227, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 4.453227931488801, | |
| "grad_norm": 0.3928273916244507, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0208, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 4.466403162055336, | |
| "grad_norm": 0.5543475151062012, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0231, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 4.479578392621871, | |
| "grad_norm": 0.2991754710674286, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0222, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 4.492753623188406, | |
| "grad_norm": 0.5026523470878601, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0234, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 4.5059288537549405, | |
| "grad_norm": 0.296000599861145, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0193, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 4.519104084321476, | |
| "grad_norm": 0.26779842376708984, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0169, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 4.532279314888011, | |
| "grad_norm": 0.22933119535446167, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0181, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.30201396346092224, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0204, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 4.55862977602108, | |
| "grad_norm": 0.23445376753807068, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0196, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 4.571805006587615, | |
| "grad_norm": 0.24794942140579224, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0184, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 4.58498023715415, | |
| "grad_norm": 0.3686930537223816, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0152, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 4.5981554677206855, | |
| "grad_norm": 0.6352735161781311, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0173, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 4.61133069828722, | |
| "grad_norm": 0.3366442322731018, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0245, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 4.624505928853755, | |
| "grad_norm": 0.3499387800693512, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0236, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 4.63768115942029, | |
| "grad_norm": 0.23462465405464172, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.017, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 4.650856389986825, | |
| "grad_norm": 0.37723302841186523, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0185, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 4.664031620553359, | |
| "grad_norm": 0.27838173508644104, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0195, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 4.6772068511198945, | |
| "grad_norm": 0.17701946198940277, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0197, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 4.69038208168643, | |
| "grad_norm": 0.3603031635284424, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0207, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 4.703557312252965, | |
| "grad_norm": 0.37775689363479614, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.019, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 4.716732542819499, | |
| "grad_norm": 0.3618902862071991, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0172, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 4.729907773386034, | |
| "grad_norm": 0.2325575351715088, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0218, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 4.743083003952569, | |
| "grad_norm": 0.39902588725090027, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0185, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.7562582345191045, | |
| "grad_norm": 0.41432973742485046, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0182, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 4.769433465085639, | |
| "grad_norm": 0.3391708731651306, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.018, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 4.782608695652174, | |
| "grad_norm": 0.2337019294500351, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.017, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 4.795783926218709, | |
| "grad_norm": 0.4126136004924774, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0186, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 4.808959156785244, | |
| "grad_norm": 0.3636666536331177, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0204, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.822134387351778, | |
| "grad_norm": 0.34775424003601074, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0185, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 4.8353096179183135, | |
| "grad_norm": 0.37673404812812805, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0202, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 4.848484848484849, | |
| "grad_norm": 0.42134353518486023, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0191, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 4.861660079051384, | |
| "grad_norm": 0.28770652413368225, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0224, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 4.874835309617918, | |
| "grad_norm": 0.27064254879951477, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0186, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.888010540184453, | |
| "grad_norm": 0.2361055612564087, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0189, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 4.901185770750988, | |
| "grad_norm": 0.3993823826313019, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0242, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 4.914361001317523, | |
| "grad_norm": 0.483935683965683, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0173, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 4.927536231884058, | |
| "grad_norm": 0.24301816523075104, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0172, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 4.940711462450593, | |
| "grad_norm": 0.323569118976593, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0193, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.953886693017128, | |
| "grad_norm": 0.21056847274303436, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0164, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 4.967061923583663, | |
| "grad_norm": 0.29462572932243347, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.021, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 4.980237154150197, | |
| "grad_norm": 0.23086123168468475, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0174, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 4.993412384716732, | |
| "grad_norm": 0.254380464553833, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0177, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 5.006587615283268, | |
| "grad_norm": 0.4429381489753723, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0169, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 5.019762845849803, | |
| "grad_norm": 0.24621398746967316, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0161, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 5.032938076416337, | |
| "grad_norm": 0.250224232673645, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0214, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 5.046113306982872, | |
| "grad_norm": 0.3510632812976837, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0209, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 5.059288537549407, | |
| "grad_norm": 0.2965291440486908, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0156, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 5.072463768115942, | |
| "grad_norm": 0.25247883796691895, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0204, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 5.085638998682477, | |
| "grad_norm": 0.36644870042800903, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0198, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 5.098814229249012, | |
| "grad_norm": 0.20640870928764343, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0208, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 5.111989459815547, | |
| "grad_norm": 0.26875925064086914, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0185, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 5.125164690382082, | |
| "grad_norm": 0.2605835497379303, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0152, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 5.138339920948616, | |
| "grad_norm": 0.24679867923259735, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.019, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 5.151515151515151, | |
| "grad_norm": 0.30721744894981384, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0163, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 5.1646903820816865, | |
| "grad_norm": 0.20572873950004578, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0182, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 5.177865612648222, | |
| "grad_norm": 0.28250861167907715, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0178, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 5.191040843214756, | |
| "grad_norm": 0.2272404581308365, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0178, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 5.204216073781291, | |
| "grad_norm": 0.3031019866466522, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0215, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 5.217391304347826, | |
| "grad_norm": 0.30057474970817566, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0207, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 5.230566534914361, | |
| "grad_norm": 0.5263017416000366, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0192, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 5.2437417654808955, | |
| "grad_norm": 0.3072521686553955, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0179, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 5.256916996047431, | |
| "grad_norm": 0.3801591992378235, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0173, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 5.270092226613966, | |
| "grad_norm": 0.27008554339408875, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0138, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 5.283267457180501, | |
| "grad_norm": 0.23966392874717712, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0159, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 5.296442687747035, | |
| "grad_norm": 0.2968190610408783, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0163, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 5.30961791831357, | |
| "grad_norm": 0.24101383984088898, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0154, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 5.3227931488801055, | |
| "grad_norm": 0.418264240026474, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0181, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 5.335968379446641, | |
| "grad_norm": 0.3220149576663971, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.014, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 5.349143610013175, | |
| "grad_norm": 0.2802201211452484, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0169, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 5.36231884057971, | |
| "grad_norm": 0.2993554174900055, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0214, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 5.375494071146245, | |
| "grad_norm": 0.30128803849220276, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0174, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 5.38866930171278, | |
| "grad_norm": 0.33825552463531494, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0171, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 5.4018445322793145, | |
| "grad_norm": 0.31894010305404663, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0165, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 5.41501976284585, | |
| "grad_norm": 0.289244681596756, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0175, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 5.428194993412385, | |
| "grad_norm": 0.2629208266735077, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.017, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 5.44137022397892, | |
| "grad_norm": 0.24454976618289948, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0174, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 5.454545454545454, | |
| "grad_norm": 0.27494749426841736, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0183, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 5.467720685111989, | |
| "grad_norm": 0.2748429775238037, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0186, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 5.480895915678524, | |
| "grad_norm": 0.26624834537506104, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.013, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 5.4940711462450595, | |
| "grad_norm": 0.41027799248695374, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0176, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 5.507246376811594, | |
| "grad_norm": 0.27950572967529297, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0217, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 5.520421607378129, | |
| "grad_norm": 0.32828864455223083, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0212, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 5.533596837944664, | |
| "grad_norm": 0.32206991314888, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0208, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 5.546772068511199, | |
| "grad_norm": 0.2994462549686432, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0169, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 5.559947299077734, | |
| "grad_norm": 0.1446998417377472, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0163, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 5.573122529644269, | |
| "grad_norm": 0.2796706259250641, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.014, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 5.586297760210804, | |
| "grad_norm": 0.2798866033554077, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0159, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 5.599472990777339, | |
| "grad_norm": 0.3122697174549103, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0216, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 5.612648221343873, | |
| "grad_norm": 0.23494744300842285, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0215, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 5.625823451910408, | |
| "grad_norm": 0.218988299369812, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0158, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 5.638998682476943, | |
| "grad_norm": 0.2929661273956299, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.018, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 5.6521739130434785, | |
| "grad_norm": 0.26279154419898987, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0158, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 5.665349143610014, | |
| "grad_norm": 0.35149410367012024, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0186, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 5.678524374176548, | |
| "grad_norm": 0.26635152101516724, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.016, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 5.691699604743083, | |
| "grad_norm": 0.25784632563591003, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0183, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 5.704874835309618, | |
| "grad_norm": 0.34484511613845825, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0151, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 5.718050065876152, | |
| "grad_norm": 0.29317110776901245, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0139, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 5.7312252964426875, | |
| "grad_norm": 0.35007333755493164, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0218, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 5.744400527009223, | |
| "grad_norm": 0.17053362727165222, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0161, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 5.757575757575758, | |
| "grad_norm": 0.24661651253700256, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0166, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 5.770750988142293, | |
| "grad_norm": 0.3710819184780121, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0161, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 5.783926218708827, | |
| "grad_norm": 0.2797921299934387, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.016, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 5.797101449275362, | |
| "grad_norm": 0.5894481539726257, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0219, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 5.810276679841897, | |
| "grad_norm": 0.24950364232063293, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0193, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 5.823451910408432, | |
| "grad_norm": 0.38150009512901306, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0185, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 5.836627140974967, | |
| "grad_norm": 0.22015729546546936, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0168, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 5.849802371541502, | |
| "grad_norm": 0.2161886841058731, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.019, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 5.862977602108037, | |
| "grad_norm": 0.31231123208999634, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0197, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 5.876152832674572, | |
| "grad_norm": 0.19847756624221802, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0159, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 5.8893280632411065, | |
| "grad_norm": 0.18192614614963531, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0131, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 5.902503293807642, | |
| "grad_norm": 0.25896963477134705, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0147, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 5.915678524374177, | |
| "grad_norm": 0.2821304202079773, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0178, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 5.928853754940711, | |
| "grad_norm": 0.24604766070842743, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0174, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 5.942028985507246, | |
| "grad_norm": 0.23723308742046356, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0197, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 5.955204216073781, | |
| "grad_norm": 0.3339266777038574, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0164, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 5.968379446640316, | |
| "grad_norm": 0.3604557514190674, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0164, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 5.9815546772068515, | |
| "grad_norm": 0.398492693901062, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0152, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 5.994729907773386, | |
| "grad_norm": 0.23473089933395386, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0136, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 6.007905138339921, | |
| "grad_norm": 0.4985441565513611, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0174, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 6.021080368906456, | |
| "grad_norm": 0.2359652817249298, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0203, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 6.034255599472991, | |
| "grad_norm": 0.24522265791893005, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0184, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 6.047430830039525, | |
| "grad_norm": 0.3857898414134979, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0173, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 6.0606060606060606, | |
| "grad_norm": 0.26704806089401245, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0151, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 6.073781291172596, | |
| "grad_norm": 0.7608732581138611, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0176, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 6.086956521739131, | |
| "grad_norm": 0.4076343774795532, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0221, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 6.100131752305665, | |
| "grad_norm": 0.18973155319690704, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0186, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 6.1133069828722, | |
| "grad_norm": 0.2442351132631302, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0213, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 6.126482213438735, | |
| "grad_norm": 0.22191573679447174, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0191, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 6.1396574440052705, | |
| "grad_norm": 0.23175200819969177, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0248, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 6.152832674571805, | |
| "grad_norm": 0.42165979743003845, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0205, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 6.16600790513834, | |
| "grad_norm": 0.34937527775764465, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0158, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 6.179183135704875, | |
| "grad_norm": 0.21536408364772797, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0186, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 6.19235836627141, | |
| "grad_norm": 0.20470696687698364, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0204, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 6.205533596837944, | |
| "grad_norm": 0.35827165842056274, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0155, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 6.2187088274044795, | |
| "grad_norm": 0.42314767837524414, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.019, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 6.231884057971015, | |
| "grad_norm": 0.3023538589477539, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0199, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 6.24505928853755, | |
| "grad_norm": 0.3448214828968048, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0133, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 6.258234519104084, | |
| "grad_norm": 0.1906011402606964, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0157, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 6.271409749670619, | |
| "grad_norm": 0.21655221283435822, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0151, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 6.284584980237154, | |
| "grad_norm": 0.26237595081329346, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0151, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 6.297760210803689, | |
| "grad_norm": 0.29299458861351013, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0164, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 6.310935441370224, | |
| "grad_norm": 0.24668575823307037, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0156, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 6.324110671936759, | |
| "grad_norm": 0.39310356974601746, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0132, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 6.337285902503294, | |
| "grad_norm": 0.2791636884212494, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0204, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 6.350461133069829, | |
| "grad_norm": 0.2549298405647278, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0134, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 6.363636363636363, | |
| "grad_norm": 0.21396104991436005, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0156, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 6.3768115942028984, | |
| "grad_norm": 0.2730461359024048, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0258, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 6.389986824769434, | |
| "grad_norm": 0.1767221987247467, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0177, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 6.403162055335969, | |
| "grad_norm": 0.32627832889556885, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0136, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 6.416337285902503, | |
| "grad_norm": 0.1690535843372345, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0165, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 6.429512516469038, | |
| "grad_norm": 0.2542962431907654, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0154, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 6.442687747035573, | |
| "grad_norm": 0.29230985045433044, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0153, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 6.455862977602108, | |
| "grad_norm": 0.33685338497161865, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.017, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 6.469038208168643, | |
| "grad_norm": 0.3226190507411957, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0172, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 6.482213438735178, | |
| "grad_norm": 0.2552209198474884, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0123, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 6.495388669301713, | |
| "grad_norm": 0.26738399267196655, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0159, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 6.508563899868248, | |
| "grad_norm": 0.4078490138053894, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0146, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 6.521739130434782, | |
| "grad_norm": 0.31933119893074036, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0146, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 6.534914361001317, | |
| "grad_norm": 0.17034372687339783, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0172, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 6.5480895915678525, | |
| "grad_norm": 0.32328030467033386, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0159, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 6.561264822134388, | |
| "grad_norm": 0.23999857902526855, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0198, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 6.574440052700922, | |
| "grad_norm": 0.20127585530281067, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0129, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 6.587615283267457, | |
| "grad_norm": 0.3747844696044922, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.018, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 6.600790513833992, | |
| "grad_norm": 0.25499704480171204, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0145, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 6.613965744400527, | |
| "grad_norm": 0.20843933522701263, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0171, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 6.627140974967062, | |
| "grad_norm": 0.23514339327812195, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0147, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 6.640316205533597, | |
| "grad_norm": 0.2892953157424927, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0154, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 6.653491436100132, | |
| "grad_norm": 0.2587839961051941, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.015, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 6.666666666666667, | |
| "grad_norm": 0.15298497676849365, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.011, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 6.679841897233201, | |
| "grad_norm": 0.24668270349502563, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0148, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 6.693017127799736, | |
| "grad_norm": 0.19608354568481445, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0133, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 6.7061923583662715, | |
| "grad_norm": 0.28214743733406067, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0149, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 6.719367588932807, | |
| "grad_norm": 0.301462858915329, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0187, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 6.732542819499341, | |
| "grad_norm": 0.3126068115234375, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0135, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 6.745718050065876, | |
| "grad_norm": 0.27192267775535583, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0171, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 6.758893280632411, | |
| "grad_norm": 0.2133040726184845, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0146, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 6.772068511198946, | |
| "grad_norm": 0.22072051465511322, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0128, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 6.7852437417654805, | |
| "grad_norm": 0.16883297264575958, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0148, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 6.798418972332016, | |
| "grad_norm": 0.24253515899181366, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0158, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 6.811594202898551, | |
| "grad_norm": 0.23843777179718018, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.011, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 6.824769433465086, | |
| "grad_norm": 0.20740272104740143, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0123, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 6.837944664031621, | |
| "grad_norm": 0.1737157106399536, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0148, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 6.851119894598155, | |
| "grad_norm": 0.2929360270500183, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0201, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 6.86429512516469, | |
| "grad_norm": 0.33327507972717285, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.016, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 6.877470355731226, | |
| "grad_norm": 0.25603005290031433, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0164, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 6.89064558629776, | |
| "grad_norm": 0.2927559018135071, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0155, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 6.903820816864295, | |
| "grad_norm": 0.31930333375930786, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0128, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 6.91699604743083, | |
| "grad_norm": 0.1962880790233612, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.014, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 6.930171277997365, | |
| "grad_norm": 0.1927347481250763, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.015, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 6.9433465085639, | |
| "grad_norm": 0.3085514008998871, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0176, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 6.956521739130435, | |
| "grad_norm": 0.30694425106048584, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.013, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 6.96969696969697, | |
| "grad_norm": 0.3337339758872986, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0155, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 6.982872200263505, | |
| "grad_norm": 0.273749977350235, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0141, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 6.996047430830039, | |
| "grad_norm": 0.2660110890865326, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0154, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 7.009222661396574, | |
| "grad_norm": 0.16499164700508118, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0135, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 7.022397891963109, | |
| "grad_norm": 0.2095925658941269, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0167, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 7.0355731225296445, | |
| "grad_norm": 0.2133823186159134, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0155, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 7.048748353096179, | |
| "grad_norm": 0.42575761675834656, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.014, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 7.061923583662714, | |
| "grad_norm": 0.2763396203517914, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0159, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 7.075098814229249, | |
| "grad_norm": 0.17784270644187927, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0138, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 7.088274044795784, | |
| "grad_norm": 0.36119556427001953, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0167, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 7.101449275362318, | |
| "grad_norm": 0.2838824689388275, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0131, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 7.1146245059288535, | |
| "grad_norm": 0.3211531341075897, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0122, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 7.127799736495389, | |
| "grad_norm": 0.2507878541946411, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.015, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 7.140974967061924, | |
| "grad_norm": 0.29864948987960815, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0121, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 7.154150197628459, | |
| "grad_norm": 0.20546981692314148, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0119, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 7.167325428194993, | |
| "grad_norm": 0.29513996839523315, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0135, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 7.180500658761528, | |
| "grad_norm": 0.252029687166214, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0158, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 7.1936758893280635, | |
| "grad_norm": 0.31195035576820374, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0144, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 7.206851119894598, | |
| "grad_norm": 0.2600157558917999, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0141, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 7.220026350461133, | |
| "grad_norm": 0.21147438883781433, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.013, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 7.233201581027668, | |
| "grad_norm": 0.2041497379541397, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0128, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 7.246376811594203, | |
| "grad_norm": 0.25358089804649353, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0115, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 7.259552042160738, | |
| "grad_norm": 0.23748533427715302, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.012, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 7.2727272727272725, | |
| "grad_norm": 0.2055012583732605, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0136, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 7.285902503293808, | |
| "grad_norm": 0.15180236101150513, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0136, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 7.299077733860343, | |
| "grad_norm": 0.26564714312553406, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0172, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 7.312252964426877, | |
| "grad_norm": 0.19193261861801147, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0096, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 7.325428194993412, | |
| "grad_norm": 0.12721064686775208, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0129, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 7.338603425559947, | |
| "grad_norm": 0.2680702209472656, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.012, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 7.351778656126482, | |
| "grad_norm": 0.22795313596725464, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0114, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 7.3649538866930175, | |
| "grad_norm": 0.24292701482772827, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.014, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 7.378129117259552, | |
| "grad_norm": 0.22425279021263123, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0147, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 7.391304347826087, | |
| "grad_norm": 0.2584420442581177, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0142, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 7.404479578392622, | |
| "grad_norm": 0.27375370264053345, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0139, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 7.417654808959157, | |
| "grad_norm": 0.269481360912323, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0131, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 7.430830039525691, | |
| "grad_norm": 0.200521782040596, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0113, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 7.444005270092227, | |
| "grad_norm": 0.1899503469467163, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0157, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 7.457180500658762, | |
| "grad_norm": 0.17898981273174286, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0096, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 7.470355731225297, | |
| "grad_norm": 0.1877722591161728, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.013, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 7.483530961791831, | |
| "grad_norm": 0.19093987345695496, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0135, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 7.496706192358366, | |
| "grad_norm": 0.32967081665992737, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0163, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 7.509881422924901, | |
| "grad_norm": 0.24727611243724823, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0151, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 7.5230566534914365, | |
| "grad_norm": 0.23724959790706635, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0124, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 7.536231884057971, | |
| "grad_norm": 0.24676303565502167, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0162, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 7.549407114624506, | |
| "grad_norm": 0.23069219291210175, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0151, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 7.562582345191041, | |
| "grad_norm": 0.22480972111225128, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0146, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 7.575757575757576, | |
| "grad_norm": 0.22481106221675873, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0133, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 7.58893280632411, | |
| "grad_norm": 0.2546963393688202, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0132, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 7.6021080368906455, | |
| "grad_norm": 0.24717223644256592, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0145, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 7.615283267457181, | |
| "grad_norm": 0.20582571625709534, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0129, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 7.628458498023716, | |
| "grad_norm": 0.18852469325065613, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.013, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 7.64163372859025, | |
| "grad_norm": 0.2953014671802521, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0164, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 7.654808959156785, | |
| "grad_norm": 0.1740752011537552, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0126, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 7.66798418972332, | |
| "grad_norm": 0.24041345715522766, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0129, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 7.681159420289855, | |
| "grad_norm": 0.25071561336517334, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0172, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 7.69433465085639, | |
| "grad_norm": 0.20551979541778564, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0146, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 7.707509881422925, | |
| "grad_norm": 0.20781241357326508, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0132, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 7.72068511198946, | |
| "grad_norm": 0.2976298928260803, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0186, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 7.733860342555995, | |
| "grad_norm": 0.30154094099998474, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0136, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 7.747035573122529, | |
| "grad_norm": 0.259523868560791, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0144, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 7.7602108036890645, | |
| "grad_norm": 0.24883371591567993, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.015, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 7.7733860342556, | |
| "grad_norm": 0.2566511631011963, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0137, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 7.786561264822135, | |
| "grad_norm": 0.2503558099269867, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0182, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 7.799736495388669, | |
| "grad_norm": 0.2588229179382324, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0109, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 7.812911725955204, | |
| "grad_norm": 0.22495663166046143, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0125, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 7.826086956521739, | |
| "grad_norm": 0.2702830731868744, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0131, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 7.839262187088274, | |
| "grad_norm": 0.16298788785934448, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.011, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 7.852437417654809, | |
| "grad_norm": 0.18812081217765808, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0122, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 7.865612648221344, | |
| "grad_norm": 0.2774938642978668, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.012, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 7.878787878787879, | |
| "grad_norm": 0.15192358195781708, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0121, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 7.891963109354414, | |
| "grad_norm": 0.24365267157554626, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0108, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 7.905138339920948, | |
| "grad_norm": 0.24914588034152985, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0112, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 7.918313570487483, | |
| "grad_norm": 0.23948980867862701, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0115, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 7.9314888010540185, | |
| "grad_norm": 0.24214811623096466, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0123, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 7.944664031620554, | |
| "grad_norm": 0.19386622309684753, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0143, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 7.957839262187088, | |
| "grad_norm": 0.18021267652511597, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0132, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 7.971014492753623, | |
| "grad_norm": 0.21891270577907562, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0108, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 7.984189723320158, | |
| "grad_norm": 0.23576456308364868, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0096, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 7.997364953886693, | |
| "grad_norm": 0.12668360769748688, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.012, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 8.010540184453228, | |
| "grad_norm": 0.16363386809825897, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.012, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 8.023715415019764, | |
| "grad_norm": 0.1997222751379013, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0114, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 8.036890645586297, | |
| "grad_norm": 0.1548883616924286, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0124, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 8.050065876152832, | |
| "grad_norm": 0.2827013432979584, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0114, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 8.063241106719367, | |
| "grad_norm": 0.24781610071659088, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0129, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 8.076416337285902, | |
| "grad_norm": 0.22967146337032318, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0135, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 8.089591567852437, | |
| "grad_norm": 0.2027362883090973, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0142, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 8.102766798418973, | |
| "grad_norm": 0.1740102767944336, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0116, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 8.115942028985508, | |
| "grad_norm": 0.1729733943939209, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0114, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 8.129117259552043, | |
| "grad_norm": 0.27243903279304504, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0116, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 8.142292490118576, | |
| "grad_norm": 0.36281585693359375, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0127, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 8.155467720685111, | |
| "grad_norm": 0.21071209013462067, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0143, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 8.168642951251647, | |
| "grad_norm": 0.16985180974006653, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0161, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 8.181818181818182, | |
| "grad_norm": 0.28265002369880676, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0114, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 8.194993412384717, | |
| "grad_norm": 0.23601707816123962, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0123, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 8.208168642951252, | |
| "grad_norm": 0.23068974912166595, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0102, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 8.221343873517787, | |
| "grad_norm": 0.16201822459697723, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.009, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 8.234519104084322, | |
| "grad_norm": 0.18543598055839539, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0119, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 8.247694334650856, | |
| "grad_norm": 0.14588962495326996, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0135, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 8.26086956521739, | |
| "grad_norm": 0.23218132555484772, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0138, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 8.274044795783926, | |
| "grad_norm": 0.1436203271150589, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0119, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 8.287220026350461, | |
| "grad_norm": 0.20360875129699707, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.013, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 8.300395256916996, | |
| "grad_norm": 0.1437600553035736, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0113, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 8.313570487483531, | |
| "grad_norm": 0.3516365885734558, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0121, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 8.326745718050066, | |
| "grad_norm": 0.1786649525165558, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0118, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 8.339920948616601, | |
| "grad_norm": 0.2372141033411026, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0132, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 8.353096179183135, | |
| "grad_norm": 0.254221111536026, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0117, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 8.36627140974967, | |
| "grad_norm": 0.2704264521598816, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0106, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 8.379446640316205, | |
| "grad_norm": 0.19264473021030426, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0091, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 8.39262187088274, | |
| "grad_norm": 0.394570529460907, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.012, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 8.405797101449275, | |
| "grad_norm": 0.282055139541626, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0111, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 8.41897233201581, | |
| "grad_norm": 0.14342975616455078, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0111, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 8.432147562582346, | |
| "grad_norm": 0.20480139553546906, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0131, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 8.44532279314888, | |
| "grad_norm": 0.3057665228843689, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0149, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 8.458498023715414, | |
| "grad_norm": 0.20350416004657745, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0108, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 8.47167325428195, | |
| "grad_norm": 0.19562511146068573, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0138, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 8.484848484848484, | |
| "grad_norm": 0.23752659559249878, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.011, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 8.49802371541502, | |
| "grad_norm": 0.20719431340694427, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0118, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 8.511198945981555, | |
| "grad_norm": 0.14212384819984436, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0104, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 8.52437417654809, | |
| "grad_norm": 0.1687770038843155, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0104, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 8.537549407114625, | |
| "grad_norm": 0.17196719348430634, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0104, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 8.55072463768116, | |
| "grad_norm": 0.0772448256611824, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0111, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 8.563899868247695, | |
| "grad_norm": 0.200760155916214, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0108, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 8.577075098814229, | |
| "grad_norm": 0.14407402276992798, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0121, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 8.590250329380764, | |
| "grad_norm": 0.22289133071899414, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0109, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 8.603425559947299, | |
| "grad_norm": 0.15810120105743408, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0134, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 8.616600790513834, | |
| "grad_norm": 0.2241305559873581, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0112, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 8.62977602108037, | |
| "grad_norm": 0.2314157485961914, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0111, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 8.642951251646904, | |
| "grad_norm": 0.21943719685077667, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.012, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 8.65612648221344, | |
| "grad_norm": 0.14395660161972046, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0109, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 8.669301712779973, | |
| "grad_norm": 0.21410293877124786, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0164, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 8.682476943346508, | |
| "grad_norm": 0.19529272615909576, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0141, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 8.695652173913043, | |
| "grad_norm": 0.21148556470870972, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0097, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 8.708827404479578, | |
| "grad_norm": 0.25088655948638916, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0126, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 8.722002635046113, | |
| "grad_norm": 0.20759238302707672, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0119, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 8.735177865612648, | |
| "grad_norm": 0.23086342215538025, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0122, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 8.748353096179184, | |
| "grad_norm": 0.21984222531318665, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0137, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 8.761528326745719, | |
| "grad_norm": 0.16901598870754242, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0102, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 8.774703557312254, | |
| "grad_norm": 0.16642488539218903, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.011, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 8.787878787878787, | |
| "grad_norm": 0.30090758204460144, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0132, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 8.801054018445322, | |
| "grad_norm": 0.2027425616979599, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.01, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 8.814229249011857, | |
| "grad_norm": 0.17420056462287903, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0113, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 8.827404479578393, | |
| "grad_norm": 0.1992754340171814, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0112, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 8.840579710144928, | |
| "grad_norm": 0.14116120338439941, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0088, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 8.853754940711463, | |
| "grad_norm": 0.15394073724746704, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0097, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 8.866930171277998, | |
| "grad_norm": 0.2150605171918869, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0104, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 8.880105401844531, | |
| "grad_norm": 0.22290481626987457, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.012, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 8.893280632411066, | |
| "grad_norm": 0.18290726840496063, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0117, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 8.906455862977602, | |
| "grad_norm": 0.18754500150680542, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.009, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 8.919631093544137, | |
| "grad_norm": 0.18662101030349731, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0113, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 8.932806324110672, | |
| "grad_norm": 0.2117272913455963, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0097, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 8.945981554677207, | |
| "grad_norm": 0.2579967975616455, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0115, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 8.959156785243742, | |
| "grad_norm": 0.3161567747592926, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0102, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 8.972332015810277, | |
| "grad_norm": 0.1549680531024933, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0149, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 8.985507246376812, | |
| "grad_norm": 0.20083005726337433, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0132, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 8.998682476943346, | |
| "grad_norm": 0.1578175276517868, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0073, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 9.011857707509881, | |
| "grad_norm": 0.12106601148843765, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0087, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 9.025032938076416, | |
| "grad_norm": 0.14856408536434174, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0093, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 9.038208168642951, | |
| "grad_norm": 0.21818016469478607, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0099, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 9.051383399209486, | |
| "grad_norm": 0.23136316239833832, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.012, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 9.064558629776021, | |
| "grad_norm": 0.23694013059139252, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0135, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 9.077733860342557, | |
| "grad_norm": 0.17179802060127258, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0129, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 9.090909090909092, | |
| "grad_norm": 0.1833650767803192, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0099, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 9.104084321475625, | |
| "grad_norm": 0.17761869728565216, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0114, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 9.11725955204216, | |
| "grad_norm": 0.21558180451393127, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0123, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 9.130434782608695, | |
| "grad_norm": 0.1749400645494461, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.014, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 9.14361001317523, | |
| "grad_norm": 0.17026203870773315, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0089, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 9.156785243741766, | |
| "grad_norm": 0.2876078486442566, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0097, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 9.1699604743083, | |
| "grad_norm": 0.3452471196651459, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0129, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 9.183135704874836, | |
| "grad_norm": 0.2366582602262497, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0103, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 9.196310935441371, | |
| "grad_norm": 0.2024129182100296, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.014, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 9.209486166007904, | |
| "grad_norm": 0.19319729506969452, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.012, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 9.22266139657444, | |
| "grad_norm": 0.17540772259235382, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0114, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 9.235836627140975, | |
| "grad_norm": 0.23558589816093445, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0098, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 9.24901185770751, | |
| "grad_norm": 0.19168560206890106, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0086, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 9.262187088274045, | |
| "grad_norm": 0.17272160947322845, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0087, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 9.27536231884058, | |
| "grad_norm": 0.13440345227718353, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0096, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 9.288537549407115, | |
| "grad_norm": 0.25527286529541016, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0117, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 9.30171277997365, | |
| "grad_norm": 0.1481703817844391, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0163, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 9.314888010540184, | |
| "grad_norm": 0.20705603063106537, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0099, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 9.328063241106719, | |
| "grad_norm": 0.10488489270210266, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.01, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 9.341238471673254, | |
| "grad_norm": 0.29786285758018494, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.01, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 9.354413702239789, | |
| "grad_norm": 0.2556435763835907, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0088, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 9.367588932806324, | |
| "grad_norm": 0.2757495641708374, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0114, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 9.38076416337286, | |
| "grad_norm": 0.25607019662857056, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0112, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 9.393939393939394, | |
| "grad_norm": 0.30468475818634033, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0099, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 9.40711462450593, | |
| "grad_norm": 0.28744977712631226, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0113, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 9.420289855072463, | |
| "grad_norm": 0.23797324299812317, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0112, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 9.433465085638998, | |
| "grad_norm": 0.2671097218990326, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0101, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 9.446640316205533, | |
| "grad_norm": 0.24652323126792908, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0094, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 9.459815546772068, | |
| "grad_norm": 0.13198544085025787, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0101, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 9.472990777338604, | |
| "grad_norm": 0.15571995079517365, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0116, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 9.486166007905139, | |
| "grad_norm": 0.3202069103717804, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0122, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 9.499341238471674, | |
| "grad_norm": 0.28108784556388855, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0091, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 9.512516469038209, | |
| "grad_norm": 0.22256651520729065, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0108, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 9.525691699604742, | |
| "grad_norm": 0.18551883101463318, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0114, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 9.538866930171277, | |
| "grad_norm": 0.15290212631225586, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0094, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 9.552042160737813, | |
| "grad_norm": 0.3061795234680176, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0116, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 9.565217391304348, | |
| "grad_norm": 0.2448814958333969, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0086, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 9.578392621870883, | |
| "grad_norm": 0.22381773591041565, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0111, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 9.591567852437418, | |
| "grad_norm": 0.14286114275455475, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0086, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 9.604743083003953, | |
| "grad_norm": 0.2133261263370514, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0097, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 9.617918313570488, | |
| "grad_norm": 0.267894446849823, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0122, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 9.631093544137022, | |
| "grad_norm": 0.2295752465724945, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.01, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 9.644268774703557, | |
| "grad_norm": 0.2165345996618271, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0105, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 9.657444005270092, | |
| "grad_norm": 0.21037983894348145, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0093, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 9.670619235836627, | |
| "grad_norm": 0.21100281178951263, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0109, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 9.683794466403162, | |
| "grad_norm": 0.14599454402923584, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.01, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 9.696969696969697, | |
| "grad_norm": 0.15527044236660004, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0094, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 9.710144927536232, | |
| "grad_norm": 0.15139532089233398, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0092, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 9.723320158102768, | |
| "grad_norm": 0.1905406415462494, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0099, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 9.7364953886693, | |
| "grad_norm": 0.24150630831718445, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0098, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 9.749670619235836, | |
| "grad_norm": 0.13083931803703308, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0115, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 9.762845849802371, | |
| "grad_norm": 0.20750683546066284, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0091, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 9.776021080368906, | |
| "grad_norm": 0.29100707173347473, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.01, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 9.789196310935441, | |
| "grad_norm": 0.14742396771907806, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0087, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 9.802371541501977, | |
| "grad_norm": 0.21344038844108582, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0107, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 9.815546772068512, | |
| "grad_norm": 0.1723639816045761, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0091, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 9.828722002635047, | |
| "grad_norm": 0.11757984012365341, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0073, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 9.841897233201582, | |
| "grad_norm": 0.17279432713985443, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0109, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 9.855072463768115, | |
| "grad_norm": 0.17756018042564392, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0096, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 9.86824769433465, | |
| "grad_norm": 0.21475255489349365, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0114, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 9.881422924901186, | |
| "grad_norm": 0.1909179538488388, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0092, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 9.89459815546772, | |
| "grad_norm": 0.13779670000076294, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0092, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 9.907773386034256, | |
| "grad_norm": 0.18837608397006989, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0112, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 9.920948616600791, | |
| "grad_norm": 0.1538374423980713, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0085, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 9.934123847167326, | |
| "grad_norm": 0.22132106125354767, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0116, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 9.94729907773386, | |
| "grad_norm": 0.19287706911563873, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0105, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 9.960474308300395, | |
| "grad_norm": 0.2113484889268875, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0083, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 9.97364953886693, | |
| "grad_norm": 0.2562454640865326, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0086, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 9.986824769433465, | |
| "grad_norm": 0.13577942550182343, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0108, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.2062060385942459, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0108, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 10.013175230566535, | |
| "grad_norm": 0.14395931363105774, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0082, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 10.02635046113307, | |
| "grad_norm": 0.13919766247272491, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0106, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 10.039525691699605, | |
| "grad_norm": 0.19914333522319794, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.009, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 10.05270092226614, | |
| "grad_norm": 0.15142998099327087, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0089, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 10.065876152832674, | |
| "grad_norm": 0.2267027497291565, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0088, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 10.079051383399209, | |
| "grad_norm": 0.20620562136173248, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0093, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 10.092226613965744, | |
| "grad_norm": 0.17721879482269287, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0106, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 10.10540184453228, | |
| "grad_norm": 0.21728409826755524, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0087, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 10.118577075098814, | |
| "grad_norm": 0.13773860037326813, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0104, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 10.13175230566535, | |
| "grad_norm": 0.20984046161174774, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0103, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 10.144927536231885, | |
| "grad_norm": 0.14403507113456726, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0082, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 10.15810276679842, | |
| "grad_norm": 0.16830037534236908, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0097, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 10.171277997364953, | |
| "grad_norm": 0.1339355707168579, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.008, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 10.184453227931488, | |
| "grad_norm": 0.19045881927013397, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.01, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 10.197628458498023, | |
| "grad_norm": 0.11783690750598907, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0108, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 10.210803689064559, | |
| "grad_norm": 0.15417170524597168, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0095, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 10.223978919631094, | |
| "grad_norm": 0.15006369352340698, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0095, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 10.237154150197629, | |
| "grad_norm": 0.11807779222726822, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0082, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 10.250329380764164, | |
| "grad_norm": 0.18806616961956024, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0091, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 10.2635046113307, | |
| "grad_norm": 0.18147198855876923, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0074, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 10.276679841897232, | |
| "grad_norm": 0.20511873066425323, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0092, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 10.289855072463768, | |
| "grad_norm": 0.16353419423103333, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0103, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 10.303030303030303, | |
| "grad_norm": 0.16834335029125214, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0087, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 10.316205533596838, | |
| "grad_norm": 0.1898210048675537, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0106, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 10.329380764163373, | |
| "grad_norm": 0.23814056813716888, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.013, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 10.342555994729908, | |
| "grad_norm": 0.20332714915275574, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0103, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 10.355731225296443, | |
| "grad_norm": 0.12532776594161987, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0076, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 10.368906455862978, | |
| "grad_norm": 0.12927134335041046, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0075, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 10.382081686429512, | |
| "grad_norm": 0.22005397081375122, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0095, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 10.395256916996047, | |
| "grad_norm": 0.20675811171531677, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0101, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 10.408432147562582, | |
| "grad_norm": 0.27455243468284607, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0093, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 10.421607378129117, | |
| "grad_norm": 0.23391202092170715, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.008, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 10.434782608695652, | |
| "grad_norm": 0.17565032839775085, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0099, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 10.447957839262187, | |
| "grad_norm": 0.14462821185588837, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0072, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 10.461133069828723, | |
| "grad_norm": 0.1348995417356491, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0098, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 10.474308300395258, | |
| "grad_norm": 0.17276756465435028, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0109, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 10.487483530961791, | |
| "grad_norm": 0.17429892718791962, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0157, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 10.500658761528326, | |
| "grad_norm": 0.22026081383228302, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0108, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 10.513833992094861, | |
| "grad_norm": 0.14811038970947266, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0119, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 10.527009222661396, | |
| "grad_norm": 0.19848553836345673, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0112, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 10.540184453227932, | |
| "grad_norm": 0.22085192799568176, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.009, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 10.553359683794467, | |
| "grad_norm": 0.15261310338974, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0103, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 10.566534914361002, | |
| "grad_norm": 0.13214528560638428, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0107, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 10.579710144927537, | |
| "grad_norm": 0.22246448695659637, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0096, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 10.59288537549407, | |
| "grad_norm": 0.18928980827331543, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0093, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 10.606060606060606, | |
| "grad_norm": 0.15583442151546478, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0084, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 10.61923583662714, | |
| "grad_norm": 0.1976160854101181, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0079, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 10.632411067193676, | |
| "grad_norm": 0.2328920215368271, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0084, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 10.645586297760211, | |
| "grad_norm": 0.19469021260738373, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0079, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 10.658761528326746, | |
| "grad_norm": 0.16853180527687073, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0082, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 10.671936758893281, | |
| "grad_norm": 0.10395480692386627, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0082, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 10.685111989459816, | |
| "grad_norm": 0.18008802831172943, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0078, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 10.69828722002635, | |
| "grad_norm": 0.22644630074501038, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.009, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 10.711462450592885, | |
| "grad_norm": 0.10834449529647827, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0069, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 10.72463768115942, | |
| "grad_norm": 0.12147624790668488, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.008, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 10.737812911725955, | |
| "grad_norm": 0.1636052131652832, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0093, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 10.75098814229249, | |
| "grad_norm": 0.21793915331363678, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0076, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 10.764163372859025, | |
| "grad_norm": 0.1545238196849823, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0073, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 10.77733860342556, | |
| "grad_norm": 0.1943051815032959, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0086, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 10.790513833992096, | |
| "grad_norm": 0.241092249751091, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0097, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 10.803689064558629, | |
| "grad_norm": 0.16625875234603882, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0083, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 10.816864295125164, | |
| "grad_norm": 0.1712971031665802, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0086, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 10.8300395256917, | |
| "grad_norm": 0.19201204180717468, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0099, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 10.843214756258234, | |
| "grad_norm": 0.13061735033988953, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0074, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 10.85638998682477, | |
| "grad_norm": 0.18794460594654083, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0095, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 10.869565217391305, | |
| "grad_norm": 0.2249092310667038, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.008, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 10.88274044795784, | |
| "grad_norm": 0.2575426697731018, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0068, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 10.895915678524375, | |
| "grad_norm": 0.2082395702600479, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0083, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 10.909090909090908, | |
| "grad_norm": 0.17624586820602417, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0111, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 10.922266139657443, | |
| "grad_norm": 0.13988056778907776, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0079, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 10.935441370223979, | |
| "grad_norm": 0.2388487309217453, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0086, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 10.948616600790514, | |
| "grad_norm": 0.15359775722026825, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0051, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 10.961791831357049, | |
| "grad_norm": 0.28482502698898315, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0122, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 10.974967061923584, | |
| "grad_norm": 0.18701320886611938, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0091, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 10.988142292490119, | |
| "grad_norm": 0.12363678216934204, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0091, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 11.001317523056654, | |
| "grad_norm": 0.21663540601730347, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0098, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 11.014492753623188, | |
| "grad_norm": 0.18112725019454956, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0082, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 11.027667984189723, | |
| "grad_norm": 0.16493700444698334, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0078, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 11.040843214756258, | |
| "grad_norm": 0.13823840022087097, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0056, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 11.054018445322793, | |
| "grad_norm": 0.2114834487438202, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0098, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 11.067193675889328, | |
| "grad_norm": 0.2341718226671219, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0111, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 11.080368906455863, | |
| "grad_norm": 0.16323405504226685, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0059, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 11.093544137022398, | |
| "grad_norm": 0.25901684165000916, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0092, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 11.106719367588934, | |
| "grad_norm": 0.11953702569007874, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0066, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 11.119894598155467, | |
| "grad_norm": 0.13243408501148224, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0069, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 11.133069828722002, | |
| "grad_norm": 0.119338758289814, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0079, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 11.146245059288537, | |
| "grad_norm": 0.21323683857917786, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0078, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 11.159420289855072, | |
| "grad_norm": 0.21504490077495575, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0062, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 11.172595520421607, | |
| "grad_norm": 0.17403605580329895, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0086, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 11.185770750988143, | |
| "grad_norm": 0.11534730345010757, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0138, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 11.198945981554678, | |
| "grad_norm": 0.1640927791595459, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0094, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 11.212121212121213, | |
| "grad_norm": 0.11937487870454788, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0082, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 11.225296442687746, | |
| "grad_norm": 0.1374310553073883, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0079, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 11.238471673254281, | |
| "grad_norm": 0.2548101246356964, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0084, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 11.251646903820816, | |
| "grad_norm": 0.1334235519170761, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.007, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 11.264822134387352, | |
| "grad_norm": 0.1807614266872406, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0073, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 11.277997364953887, | |
| "grad_norm": 0.22683222591876984, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0082, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 11.291172595520422, | |
| "grad_norm": 0.16206318140029907, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0076, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 11.304347826086957, | |
| "grad_norm": 0.11189868301153183, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0088, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 11.317523056653492, | |
| "grad_norm": 0.11796924471855164, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0074, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 11.330698287220027, | |
| "grad_norm": 0.23435086011886597, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0078, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 11.34387351778656, | |
| "grad_norm": 0.13652624189853668, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0078, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 11.357048748353096, | |
| "grad_norm": 0.17509783804416656, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0082, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 11.37022397891963, | |
| "grad_norm": 0.1447538137435913, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0087, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 11.383399209486166, | |
| "grad_norm": 0.11106734722852707, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0067, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 11.396574440052701, | |
| "grad_norm": 0.16821962594985962, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0076, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 11.409749670619236, | |
| "grad_norm": 0.11497115343809128, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0089, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 11.422924901185771, | |
| "grad_norm": 0.15908224880695343, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0083, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 11.436100131752305, | |
| "grad_norm": 0.12201621383428574, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0073, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 11.44927536231884, | |
| "grad_norm": 0.11300718039274216, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.01, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 11.462450592885375, | |
| "grad_norm": 0.11321785300970078, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0066, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 11.47562582345191, | |
| "grad_norm": 0.11770084500312805, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0075, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 11.488801054018445, | |
| "grad_norm": 0.18582583963871002, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0061, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 11.50197628458498, | |
| "grad_norm": 0.1639215350151062, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0081, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 11.515151515151516, | |
| "grad_norm": 0.0973479226231575, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0056, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 11.52832674571805, | |
| "grad_norm": 0.08141589164733887, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0076, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 11.541501976284586, | |
| "grad_norm": 0.29469650983810425, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0067, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 11.55467720685112, | |
| "grad_norm": 0.0893554612994194, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0076, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 11.567852437417654, | |
| "grad_norm": 0.16742943227291107, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0083, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 11.58102766798419, | |
| "grad_norm": 0.14368514716625214, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0099, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 11.594202898550725, | |
| "grad_norm": 0.17125031352043152, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0108, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 11.60737812911726, | |
| "grad_norm": 0.19009582698345184, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0105, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 11.620553359683795, | |
| "grad_norm": 0.15681231021881104, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.006, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 11.63372859025033, | |
| "grad_norm": 0.09388696402311325, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0084, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 11.646903820816863, | |
| "grad_norm": 0.14405575394630432, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0078, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 11.660079051383399, | |
| "grad_norm": 0.07782807946205139, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0071, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 11.673254281949934, | |
| "grad_norm": 0.18701545894145966, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0085, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 11.686429512516469, | |
| "grad_norm": 0.2714499533176422, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0072, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 11.699604743083004, | |
| "grad_norm": 0.14982731640338898, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0069, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 11.712779973649539, | |
| "grad_norm": 0.14660252630710602, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0068, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 11.725955204216074, | |
| "grad_norm": 0.14174430072307587, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0067, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 11.73913043478261, | |
| "grad_norm": 0.15731292963027954, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0093, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 11.752305665349144, | |
| "grad_norm": 0.14486129581928253, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0065, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 11.765480895915678, | |
| "grad_norm": 0.11417282372713089, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0064, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 11.778656126482213, | |
| "grad_norm": 0.089751698076725, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0067, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 11.791831357048748, | |
| "grad_norm": 0.17435140907764435, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0092, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 11.805006587615283, | |
| "grad_norm": 0.07187920808792114, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0079, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 11.818181818181818, | |
| "grad_norm": 0.1424235999584198, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0083, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 11.831357048748353, | |
| "grad_norm": 0.12420742958784103, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.007, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 11.844532279314889, | |
| "grad_norm": 0.1716492623090744, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0105, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 11.857707509881424, | |
| "grad_norm": 0.2577267587184906, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0082, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 11.870882740447957, | |
| "grad_norm": 0.08861849457025528, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0087, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 11.884057971014492, | |
| "grad_norm": 0.19629858434200287, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0088, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 11.897233201581027, | |
| "grad_norm": 0.1813318282365799, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0065, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 11.910408432147563, | |
| "grad_norm": 0.10713133215904236, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0052, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 11.923583662714098, | |
| "grad_norm": 0.17797449231147766, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0072, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 11.936758893280633, | |
| "grad_norm": 0.11036095023155212, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0068, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 11.949934123847168, | |
| "grad_norm": 0.1084558367729187, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0075, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 11.963109354413703, | |
| "grad_norm": 0.20317208766937256, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0103, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 11.976284584980236, | |
| "grad_norm": 0.12911272048950195, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.005, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 11.989459815546772, | |
| "grad_norm": 0.1062001958489418, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0071, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 12.002635046113307, | |
| "grad_norm": 0.16308660805225372, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0112, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 12.015810276679842, | |
| "grad_norm": 0.18296882510185242, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0072, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 12.028985507246377, | |
| "grad_norm": 0.18580010533332825, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0068, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 12.042160737812912, | |
| "grad_norm": 0.1033332422375679, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0051, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 12.055335968379447, | |
| "grad_norm": 0.1866554468870163, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0073, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 12.068511198945982, | |
| "grad_norm": 0.26920342445373535, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0089, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 12.081686429512516, | |
| "grad_norm": 0.18751156330108643, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0085, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 12.09486166007905, | |
| "grad_norm": 0.3206658959388733, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0111, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 12.108036890645586, | |
| "grad_norm": 0.1985057294368744, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.009, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 12.121212121212121, | |
| "grad_norm": 0.17330951988697052, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0072, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 12.134387351778656, | |
| "grad_norm": 0.08973265439271927, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0076, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 12.147562582345191, | |
| "grad_norm": 0.23255111277103424, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0078, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 12.160737812911727, | |
| "grad_norm": 0.09232556819915771, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0104, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 12.173913043478262, | |
| "grad_norm": 0.1634305715560913, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0063, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 12.187088274044795, | |
| "grad_norm": 0.11620501428842545, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0067, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 12.20026350461133, | |
| "grad_norm": 0.15891708433628082, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0085, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 12.213438735177865, | |
| "grad_norm": 0.21395912766456604, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0068, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 12.2266139657444, | |
| "grad_norm": 0.11466138064861298, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0069, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 12.239789196310936, | |
| "grad_norm": 0.2666526734828949, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0064, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 12.25296442687747, | |
| "grad_norm": 0.10765407234430313, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0065, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 12.266139657444006, | |
| "grad_norm": 0.13771207630634308, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0095, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 12.279314888010541, | |
| "grad_norm": 0.15090085566043854, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0077, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 12.292490118577074, | |
| "grad_norm": 0.15205277502536774, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.008, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 12.30566534914361, | |
| "grad_norm": 0.12167327851057053, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0076, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 12.318840579710145, | |
| "grad_norm": 0.10922878235578537, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0079, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 12.33201581027668, | |
| "grad_norm": 0.0954914391040802, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0077, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 12.345191040843215, | |
| "grad_norm": 0.18894347548484802, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0092, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 12.35836627140975, | |
| "grad_norm": 0.25298139452934265, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0084, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 12.371541501976285, | |
| "grad_norm": 0.10267756134271622, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0088, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 12.38471673254282, | |
| "grad_norm": 0.19973057508468628, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0077, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 12.397891963109354, | |
| "grad_norm": 0.12004857510328293, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0064, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 12.411067193675889, | |
| "grad_norm": 0.1885608583688736, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0061, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 12.424242424242424, | |
| "grad_norm": 0.19836807250976562, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0076, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 12.437417654808959, | |
| "grad_norm": 0.0960315391421318, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.007, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 12.450592885375494, | |
| "grad_norm": 0.16369284689426422, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0091, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 12.46376811594203, | |
| "grad_norm": 0.10038845986127853, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0064, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 12.476943346508564, | |
| "grad_norm": 0.22409868240356445, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0084, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 12.4901185770751, | |
| "grad_norm": 0.11471178382635117, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0068, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 12.503293807641633, | |
| "grad_norm": 0.14783388376235962, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0068, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 12.516469038208168, | |
| "grad_norm": 0.12338835746049881, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.009, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 12.529644268774703, | |
| "grad_norm": 0.17950797080993652, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0094, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 12.542819499341238, | |
| "grad_norm": 0.20151561498641968, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0083, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 12.555994729907773, | |
| "grad_norm": 0.2703305184841156, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0065, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 12.569169960474309, | |
| "grad_norm": 0.11729317158460617, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0077, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 12.582345191040844, | |
| "grad_norm": 0.13801559805870056, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0089, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 12.595520421607379, | |
| "grad_norm": 0.10463900119066238, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0084, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 12.608695652173914, | |
| "grad_norm": 0.13275998830795288, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0106, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 12.621870882740447, | |
| "grad_norm": 0.11285616457462311, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0068, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 12.635046113306982, | |
| "grad_norm": 0.10838264226913452, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0093, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 12.648221343873518, | |
| "grad_norm": 0.1535099595785141, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0066, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 12.661396574440053, | |
| "grad_norm": 0.1464422047138214, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0058, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 12.674571805006588, | |
| "grad_norm": 0.17395055294036865, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0088, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 12.687747035573123, | |
| "grad_norm": 0.19919253885746002, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0073, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 12.700922266139658, | |
| "grad_norm": 0.10250774025917053, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0088, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 12.714097496706191, | |
| "grad_norm": 0.15123838186264038, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0062, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 12.727272727272727, | |
| "grad_norm": 0.13388404250144958, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0087, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 12.740447957839262, | |
| "grad_norm": 0.12210414558649063, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0069, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 12.753623188405797, | |
| "grad_norm": 0.16462716460227966, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0068, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 12.766798418972332, | |
| "grad_norm": 0.18494081497192383, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0098, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 12.779973649538867, | |
| "grad_norm": 0.09822367131710052, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0066, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 12.793148880105402, | |
| "grad_norm": 0.19610926508903503, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0069, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 12.806324110671937, | |
| "grad_norm": 0.21302397549152374, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0094, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 12.819499341238473, | |
| "grad_norm": 0.30115124583244324, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0072, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 12.832674571805006, | |
| "grad_norm": 0.15520079433918, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0077, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 12.845849802371541, | |
| "grad_norm": 0.08769290894269943, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0056, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 12.859025032938076, | |
| "grad_norm": 0.09021688997745514, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0071, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 12.872200263504611, | |
| "grad_norm": 0.13328352570533752, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0076, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 12.885375494071146, | |
| "grad_norm": 0.13384944200515747, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0061, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 12.898550724637682, | |
| "grad_norm": 0.13545677065849304, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0064, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 12.911725955204217, | |
| "grad_norm": 0.18425719439983368, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0076, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 12.92490118577075, | |
| "grad_norm": 0.19322925806045532, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0076, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 12.938076416337285, | |
| "grad_norm": 0.1715928018093109, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0085, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 12.95125164690382, | |
| "grad_norm": 0.1342555284500122, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0057, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 12.964426877470355, | |
| "grad_norm": 0.2071145623922348, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0107, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 12.97760210803689, | |
| "grad_norm": 0.14439895749092102, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.008, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 12.990777338603426, | |
| "grad_norm": 0.18717682361602783, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0096, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 13.003952569169961, | |
| "grad_norm": 0.14880332350730896, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.01, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 13.017127799736496, | |
| "grad_norm": 0.17164605855941772, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0057, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 13.030303030303031, | |
| "grad_norm": 0.22017565369606018, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0083, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 13.043478260869565, | |
| "grad_norm": 0.16133266687393188, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0095, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 13.0566534914361, | |
| "grad_norm": 0.14002366364002228, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0075, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 13.069828722002635, | |
| "grad_norm": 0.20343704521656036, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0078, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 13.08300395256917, | |
| "grad_norm": 0.09156999737024307, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0083, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 13.096179183135705, | |
| "grad_norm": 0.16819879412651062, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0061, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 13.10935441370224, | |
| "grad_norm": 0.11606054753065109, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.009, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 13.122529644268775, | |
| "grad_norm": 0.21958565711975098, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0087, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 13.13570487483531, | |
| "grad_norm": 0.09595669060945511, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0066, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 13.148880105401844, | |
| "grad_norm": 0.144814595580101, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0086, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 13.162055335968379, | |
| "grad_norm": 0.15428341925144196, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.007, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 13.175230566534914, | |
| "grad_norm": 0.1226944774389267, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0079, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 13.175230566534914, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.021228669699653983, | |
| "train_runtime": 6638.0204, | |
| "train_samples_per_second": 48.207, | |
| "train_steps_per_second": 1.506 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 14, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |