| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 11.1731843575419, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0111731843575419, | |
| "grad_norm": 13.190491676330566, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.0475, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0223463687150838, | |
| "grad_norm": 6.592611789703369, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.7807, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0335195530726257, | |
| "grad_norm": 4.4936323165893555, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.3357, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0446927374301676, | |
| "grad_norm": 2.287525177001953, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2137, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.055865921787709494, | |
| "grad_norm": 1.441115379333496, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.1718, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0670391061452514, | |
| "grad_norm": 1.250541090965271, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1384, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0782122905027933, | |
| "grad_norm": 0.822912335395813, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1157, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0893854748603352, | |
| "grad_norm": 1.0248991250991821, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1128, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1005586592178771, | |
| "grad_norm": 0.9630948305130005, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1033, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11173184357541899, | |
| "grad_norm": 0.9368418455123901, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1144, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.12290502793296089, | |
| "grad_norm": 1.318041443824768, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1036, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1340782122905028, | |
| "grad_norm": 0.9262441396713257, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.0985, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1452513966480447, | |
| "grad_norm": 0.6435949206352234, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.092, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1564245810055866, | |
| "grad_norm": 1.1955463886260986, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0845, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.16759776536312848, | |
| "grad_norm": 0.950725257396698, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0795, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1787709497206704, | |
| "grad_norm": 1.0833418369293213, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0829, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.18994413407821228, | |
| "grad_norm": 0.9835636019706726, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0779, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2011173184357542, | |
| "grad_norm": 1.331928014755249, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0812, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2122905027932961, | |
| "grad_norm": 1.063370704650879, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0759, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.22346368715083798, | |
| "grad_norm": 0.9621316194534302, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0739, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2346368715083799, | |
| "grad_norm": 1.1999611854553223, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0753, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.24581005586592178, | |
| "grad_norm": 0.8318652510643005, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0674, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2569832402234637, | |
| "grad_norm": 0.8618952035903931, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0694, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2681564245810056, | |
| "grad_norm": 1.0187468528747559, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0751, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.27932960893854747, | |
| "grad_norm": 1.002341389656067, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0578, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2905027932960894, | |
| "grad_norm": 0.7851305603981018, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0646, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3016759776536313, | |
| "grad_norm": 0.7979455590248108, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0686, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3128491620111732, | |
| "grad_norm": 0.7960092425346375, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0617, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3240223463687151, | |
| "grad_norm": 0.9286141991615295, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0691, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.33519553072625696, | |
| "grad_norm": 0.6999386548995972, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0568, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3463687150837989, | |
| "grad_norm": 0.6877157092094421, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.06, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3575418994413408, | |
| "grad_norm": 0.7681863903999329, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0566, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3687150837988827, | |
| "grad_norm": 0.6397208571434021, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0558, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.37988826815642457, | |
| "grad_norm": 0.6604412794113159, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0521, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.39106145251396646, | |
| "grad_norm": 0.8915361166000366, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0474, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4022346368715084, | |
| "grad_norm": 1.0175302028656006, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0487, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4134078212290503, | |
| "grad_norm": 0.608553409576416, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0511, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4245810055865922, | |
| "grad_norm": 0.6215671300888062, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0523, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.43575418994413406, | |
| "grad_norm": 0.6975111365318298, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0537, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.44692737430167595, | |
| "grad_norm": 0.647553026676178, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.05, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4581005586592179, | |
| "grad_norm": 0.9006842970848083, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.055, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4692737430167598, | |
| "grad_norm": 0.7171258330345154, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0493, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.48044692737430167, | |
| "grad_norm": 0.8186599016189575, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0532, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.49162011173184356, | |
| "grad_norm": 0.736146092414856, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0522, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5027932960893855, | |
| "grad_norm": 0.7795101404190063, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0444, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5139664804469274, | |
| "grad_norm": 0.9831286668777466, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0488, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5251396648044693, | |
| "grad_norm": 0.506114661693573, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0514, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5363128491620112, | |
| "grad_norm": 1.0282622575759888, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0532, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.547486033519553, | |
| "grad_norm": 0.8823404908180237, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0537, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5586592178770949, | |
| "grad_norm": 0.5598077774047852, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.048, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5698324022346368, | |
| "grad_norm": 0.6227619647979736, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.047, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5810055865921788, | |
| "grad_norm": 0.6840865015983582, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.046, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5921787709497207, | |
| "grad_norm": 0.592741072177887, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0442, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6033519553072626, | |
| "grad_norm": 0.6069875955581665, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0434, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6145251396648045, | |
| "grad_norm": 0.5454892516136169, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0475, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6256983240223464, | |
| "grad_norm": 0.8275286555290222, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0467, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6368715083798883, | |
| "grad_norm": 0.4879235327243805, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0397, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6480446927374302, | |
| "grad_norm": 0.42566731572151184, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0424, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.659217877094972, | |
| "grad_norm": 0.7584878206253052, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0387, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6703910614525139, | |
| "grad_norm": 0.5318875908851624, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0446, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6815642458100558, | |
| "grad_norm": 0.3701190650463104, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.045, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6927374301675978, | |
| "grad_norm": 0.3234793245792389, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0343, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.7039106145251397, | |
| "grad_norm": 0.6376948952674866, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0355, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7150837988826816, | |
| "grad_norm": 0.5927942395210266, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0367, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7262569832402235, | |
| "grad_norm": 0.46763601899147034, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0399, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7374301675977654, | |
| "grad_norm": 0.6515280604362488, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0391, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7486033519553073, | |
| "grad_norm": 0.33606088161468506, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0407, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7597765363128491, | |
| "grad_norm": 0.9731888771057129, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0435, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.770949720670391, | |
| "grad_norm": 0.4574678838253021, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0347, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7821229050279329, | |
| "grad_norm": 0.5672054886817932, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0392, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7932960893854749, | |
| "grad_norm": 0.5358249545097351, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0452, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8044692737430168, | |
| "grad_norm": 0.753917932510376, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0344, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8156424581005587, | |
| "grad_norm": 0.689767062664032, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.038, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8268156424581006, | |
| "grad_norm": 0.42885977029800415, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0347, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.8379888268156425, | |
| "grad_norm": 0.4192526042461395, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0359, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8491620111731844, | |
| "grad_norm": 0.4939801096916199, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0328, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8603351955307262, | |
| "grad_norm": 0.5736432075500488, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0349, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8715083798882681, | |
| "grad_norm": 0.3758193254470825, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0374, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.88268156424581, | |
| "grad_norm": 0.46636465191841125, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0363, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8938547486033519, | |
| "grad_norm": 0.359456866979599, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0377, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9050279329608939, | |
| "grad_norm": 0.5090558528900146, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0411, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9162011173184358, | |
| "grad_norm": 0.41094401478767395, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.042, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9273743016759777, | |
| "grad_norm": 0.35526952147483826, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0356, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9385474860335196, | |
| "grad_norm": 0.5944293141365051, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0363, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9497206703910615, | |
| "grad_norm": 0.4854707717895508, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.044, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9608938547486033, | |
| "grad_norm": 0.8677361011505127, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.035, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9720670391061452, | |
| "grad_norm": 0.5716046690940857, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0355, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9832402234636871, | |
| "grad_norm": 0.6367104053497314, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0345, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.994413407821229, | |
| "grad_norm": 0.5620201230049133, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0335, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.005586592178771, | |
| "grad_norm": 0.6383714079856873, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0375, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.0167597765363128, | |
| "grad_norm": 0.6897162795066833, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.029, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.0279329608938548, | |
| "grad_norm": 0.7410043478012085, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0382, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.0391061452513966, | |
| "grad_norm": 0.7957388758659363, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0378, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.0502793296089385, | |
| "grad_norm": 0.5646666288375854, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0362, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.0614525139664805, | |
| "grad_norm": 0.4214427173137665, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0391, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.0726256983240223, | |
| "grad_norm": 0.4713134169578552, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0308, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.0837988826815643, | |
| "grad_norm": 0.6919177770614624, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0349, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.094972067039106, | |
| "grad_norm": 0.7058985233306885, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0363, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.106145251396648, | |
| "grad_norm": 0.6045360565185547, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0398, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.1173184357541899, | |
| "grad_norm": 0.3165498375892639, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0317, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.1284916201117319, | |
| "grad_norm": 0.5136957764625549, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0321, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.1396648044692737, | |
| "grad_norm": 0.6026665568351746, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.03, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.1508379888268156, | |
| "grad_norm": 0.4883114993572235, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0274, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.1620111731843576, | |
| "grad_norm": 0.32800090312957764, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0387, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.1731843575418994, | |
| "grad_norm": 0.6770690083503723, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0292, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.1843575418994414, | |
| "grad_norm": 0.5624381899833679, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0404, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.1955307262569832, | |
| "grad_norm": 0.3495476543903351, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0349, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.2067039106145252, | |
| "grad_norm": 0.7264448404312134, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0379, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.217877094972067, | |
| "grad_norm": 0.6616931557655334, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0282, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.229050279329609, | |
| "grad_norm": 0.34278085827827454, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0347, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2402234636871508, | |
| "grad_norm": 0.6887082457542419, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0342, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.2513966480446927, | |
| "grad_norm": 0.6070915460586548, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0341, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.2625698324022347, | |
| "grad_norm": 0.4935801923274994, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0395, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.2737430167597765, | |
| "grad_norm": 0.6002891659736633, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.036, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.2849162011173183, | |
| "grad_norm": 0.3555659353733063, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0331, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.2960893854748603, | |
| "grad_norm": 0.4441574513912201, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0336, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.3072625698324023, | |
| "grad_norm": 0.34948721528053284, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.032, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.318435754189944, | |
| "grad_norm": 0.5667333006858826, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0333, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.329608938547486, | |
| "grad_norm": 0.6017534136772156, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0324, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.3407821229050279, | |
| "grad_norm": 0.44944337010383606, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0353, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3519553072625698, | |
| "grad_norm": 0.48210635781288147, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0311, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.3631284916201118, | |
| "grad_norm": 0.4882315993309021, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0292, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.3743016759776536, | |
| "grad_norm": 0.6849837899208069, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0386, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.3854748603351954, | |
| "grad_norm": 0.7890218496322632, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.032, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.3966480446927374, | |
| "grad_norm": 0.6017642021179199, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0367, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.4078212290502794, | |
| "grad_norm": 0.4289197325706482, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0304, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.4189944134078212, | |
| "grad_norm": 0.6042135953903198, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0367, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.4301675977653632, | |
| "grad_norm": 0.2792087197303772, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0318, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.441340782122905, | |
| "grad_norm": 0.38916629552841187, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0351, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.452513966480447, | |
| "grad_norm": 0.4702565371990204, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0346, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.463687150837989, | |
| "grad_norm": 0.5284172296524048, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0302, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.4748603351955307, | |
| "grad_norm": 0.6767519116401672, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0342, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.4860335195530725, | |
| "grad_norm": 0.39435723423957825, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0302, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.4972067039106145, | |
| "grad_norm": 0.5077378153800964, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.029, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.5083798882681565, | |
| "grad_norm": 0.432860791683197, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.03, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.5195530726256983, | |
| "grad_norm": 0.38271060585975647, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0322, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.5307262569832403, | |
| "grad_norm": 0.3990545868873596, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0273, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.541899441340782, | |
| "grad_norm": 0.5833948850631714, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0333, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.553072625698324, | |
| "grad_norm": 0.4858004152774811, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0288, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.564245810055866, | |
| "grad_norm": 0.4144662022590637, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0292, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.5754189944134078, | |
| "grad_norm": 0.5155880451202393, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0304, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.5865921787709496, | |
| "grad_norm": 0.4640715420246124, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0329, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.5977653631284916, | |
| "grad_norm": 0.5954652428627014, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0299, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.6089385474860336, | |
| "grad_norm": 0.6071608662605286, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0329, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.6201117318435754, | |
| "grad_norm": 0.45499187707901, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0312, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.6312849162011172, | |
| "grad_norm": 0.45236775279045105, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0304, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.6424581005586592, | |
| "grad_norm": 0.5543355941772461, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.027, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.6536312849162011, | |
| "grad_norm": 0.6362754106521606, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0316, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.6648044692737431, | |
| "grad_norm": 0.580976128578186, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0271, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.675977653631285, | |
| "grad_norm": 0.309814453125, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0312, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.6871508379888267, | |
| "grad_norm": 0.363379567861557, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0279, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.6983240223463687, | |
| "grad_norm": 0.5480021834373474, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0306, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.7094972067039107, | |
| "grad_norm": 0.45745283365249634, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0285, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.7206703910614525, | |
| "grad_norm": 0.438733845949173, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0364, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.7318435754189943, | |
| "grad_norm": 0.42913299798965454, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0318, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.7430167597765363, | |
| "grad_norm": 0.3040868639945984, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.03, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.7541899441340782, | |
| "grad_norm": 0.6359307169914246, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0292, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.7653631284916202, | |
| "grad_norm": 0.6539324522018433, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0272, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.776536312849162, | |
| "grad_norm": 0.24541224539279938, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0311, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.7877094972067038, | |
| "grad_norm": 0.43337199091911316, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0296, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.7988826815642458, | |
| "grad_norm": 0.40654614567756653, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0288, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.8100558659217878, | |
| "grad_norm": 0.5727466344833374, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0262, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.8212290502793296, | |
| "grad_norm": 0.4763135612010956, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.027, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.8324022346368714, | |
| "grad_norm": 0.4589232802391052, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0335, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.8435754189944134, | |
| "grad_norm": 0.6089374423027039, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0345, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.8547486033519553, | |
| "grad_norm": 0.5817599296569824, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0258, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.8659217877094973, | |
| "grad_norm": 0.40041080117225647, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0234, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.8770949720670391, | |
| "grad_norm": 0.6193286180496216, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0293, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.888268156424581, | |
| "grad_norm": 0.4545292258262634, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0302, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.899441340782123, | |
| "grad_norm": 0.5392395853996277, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0289, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.910614525139665, | |
| "grad_norm": 0.35454365611076355, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.031, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.9217877094972067, | |
| "grad_norm": 0.5708985924720764, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.033, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.9329608938547485, | |
| "grad_norm": 0.5149425268173218, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0323, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.9441340782122905, | |
| "grad_norm": 0.32631558179855347, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0276, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.9553072625698324, | |
| "grad_norm": 0.17594584822654724, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0267, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.9664804469273744, | |
| "grad_norm": 0.48492005467414856, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0322, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.9776536312849162, | |
| "grad_norm": 0.5293797254562378, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0263, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.988826815642458, | |
| "grad_norm": 0.42673739790916443, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0299, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.4551115334033966, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.031, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.011173184357542, | |
| "grad_norm": 0.26057150959968567, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0282, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.022346368715084, | |
| "grad_norm": 0.32846423983573914, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0264, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.0335195530726256, | |
| "grad_norm": 0.44937869906425476, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0253, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.0446927374301676, | |
| "grad_norm": 0.3289620876312256, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0252, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.0558659217877095, | |
| "grad_norm": 0.5227333307266235, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0275, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.0670391061452515, | |
| "grad_norm": 0.3700127601623535, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0229, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.078212290502793, | |
| "grad_norm": 0.357212632894516, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0281, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.089385474860335, | |
| "grad_norm": 0.3466659188270569, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0281, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.100558659217877, | |
| "grad_norm": 0.4313991665840149, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0281, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.111731843575419, | |
| "grad_norm": 0.4367799162864685, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.024, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.122905027932961, | |
| "grad_norm": 0.466824471950531, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0314, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.1340782122905027, | |
| "grad_norm": 0.5016106367111206, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0269, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.1452513966480447, | |
| "grad_norm": 0.3497682213783264, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0279, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.1564245810055866, | |
| "grad_norm": 0.2097923755645752, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0251, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.1675977653631286, | |
| "grad_norm": 0.35045918822288513, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0265, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.17877094972067, | |
| "grad_norm": 0.3739369809627533, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.027, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.189944134078212, | |
| "grad_norm": 0.27002036571502686, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0232, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.201117318435754, | |
| "grad_norm": 0.3334072232246399, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0284, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.212290502793296, | |
| "grad_norm": 0.4417743384838104, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0291, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.223463687150838, | |
| "grad_norm": 0.6442617774009705, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0308, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.2346368715083798, | |
| "grad_norm": 0.6156677603721619, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0262, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.2458100558659218, | |
| "grad_norm": 0.5929524302482605, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0232, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.2569832402234637, | |
| "grad_norm": 0.5453823804855347, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0245, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.2681564245810057, | |
| "grad_norm": 0.4303855001926422, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0334, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.2793296089385473, | |
| "grad_norm": 0.6418226957321167, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0307, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.2905027932960893, | |
| "grad_norm": 0.5516045093536377, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0316, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.3016759776536313, | |
| "grad_norm": 0.6908289194107056, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0303, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.3128491620111733, | |
| "grad_norm": 0.4831877648830414, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0261, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.3240223463687153, | |
| "grad_norm": 0.591765284538269, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0304, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.335195530726257, | |
| "grad_norm": 0.4540766179561615, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0261, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.346368715083799, | |
| "grad_norm": 0.3805314898490906, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.024, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.357541899441341, | |
| "grad_norm": 0.3124728500843048, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.025, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.368715083798883, | |
| "grad_norm": 0.3138362765312195, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0237, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.3798882681564244, | |
| "grad_norm": 0.3131006062030792, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0278, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.3910614525139664, | |
| "grad_norm": 0.4441392123699188, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0291, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.4022346368715084, | |
| "grad_norm": 0.4318331480026245, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0343, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.4134078212290504, | |
| "grad_norm": 0.4163145124912262, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0257, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.4245810055865924, | |
| "grad_norm": 0.3285245895385742, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0221, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.435754189944134, | |
| "grad_norm": 0.39286714792251587, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0243, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.446927374301676, | |
| "grad_norm": 0.5441449284553528, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0288, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.458100558659218, | |
| "grad_norm": 0.3258397579193115, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0262, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.46927374301676, | |
| "grad_norm": 0.29159262776374817, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0229, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.4804469273743015, | |
| "grad_norm": 0.483515202999115, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0256, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.4916201117318435, | |
| "grad_norm": 0.41556450724601746, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0249, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.5027932960893855, | |
| "grad_norm": 0.5685746669769287, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0267, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.5139664804469275, | |
| "grad_norm": 0.3909520208835602, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0242, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.5251396648044695, | |
| "grad_norm": 0.5399158000946045, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0257, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.536312849162011, | |
| "grad_norm": 0.4618130326271057, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0248, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.547486033519553, | |
| "grad_norm": 0.48366767168045044, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0284, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.558659217877095, | |
| "grad_norm": 0.41343602538108826, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0277, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.5698324022346366, | |
| "grad_norm": 0.3573741316795349, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0217, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.5810055865921786, | |
| "grad_norm": 0.30859851837158203, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.026, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.5921787709497206, | |
| "grad_norm": 0.5140319466590881, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0261, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.6033519553072626, | |
| "grad_norm": 0.257357120513916, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.033, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.6145251396648046, | |
| "grad_norm": 0.28950628638267517, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0237, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.6256983240223466, | |
| "grad_norm": 0.4110758602619171, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0284, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.636871508379888, | |
| "grad_norm": 0.4541243612766266, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0278, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.64804469273743, | |
| "grad_norm": 0.24878370761871338, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0243, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.659217877094972, | |
| "grad_norm": 0.3473913371562958, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0216, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.6703910614525137, | |
| "grad_norm": 0.3158006966114044, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0251, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.6815642458100557, | |
| "grad_norm": 0.3652605414390564, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0236, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.6927374301675977, | |
| "grad_norm": 0.44528162479400635, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0236, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.7039106145251397, | |
| "grad_norm": 0.3181734085083008, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0263, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.7150837988826817, | |
| "grad_norm": 0.4195885956287384, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0204, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.7262569832402237, | |
| "grad_norm": 0.3051789700984955, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0253, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.7374301675977653, | |
| "grad_norm": 0.43227654695510864, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0255, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.7486033519553073, | |
| "grad_norm": 0.4627783000469208, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.025, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.7597765363128492, | |
| "grad_norm": 0.43730294704437256, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0265, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.770949720670391, | |
| "grad_norm": 0.3086725175380707, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0239, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.782122905027933, | |
| "grad_norm": 0.22066396474838257, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0235, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.793296089385475, | |
| "grad_norm": 0.397085577249527, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0257, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.804469273743017, | |
| "grad_norm": 0.471504807472229, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0233, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.815642458100559, | |
| "grad_norm": 0.3180745542049408, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0237, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.826815642458101, | |
| "grad_norm": 0.3276680111885071, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0194, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.8379888268156424, | |
| "grad_norm": 0.28021499514579773, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0224, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.8491620111731844, | |
| "grad_norm": 0.3426031768321991, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0308, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.8603351955307263, | |
| "grad_norm": 0.2376944124698639, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.024, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.871508379888268, | |
| "grad_norm": 0.4365091323852539, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0217, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.88268156424581, | |
| "grad_norm": 0.3246183395385742, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0287, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.893854748603352, | |
| "grad_norm": 0.5038343667984009, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0308, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.905027932960894, | |
| "grad_norm": 0.4625000059604645, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0247, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.916201117318436, | |
| "grad_norm": 0.32260429859161377, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0228, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.927374301675978, | |
| "grad_norm": 0.2971498370170593, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.024, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.9385474860335195, | |
| "grad_norm": 0.357435941696167, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0223, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.9497206703910615, | |
| "grad_norm": 0.28375473618507385, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0197, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.9608938547486034, | |
| "grad_norm": 0.5048009157180786, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0226, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.972067039106145, | |
| "grad_norm": 0.5440890789031982, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0317, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.983240223463687, | |
| "grad_norm": 0.4744352698326111, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0275, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.994413407821229, | |
| "grad_norm": 0.30766549706459045, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0287, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.005586592178771, | |
| "grad_norm": 0.34162425994873047, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0268, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.016759776536313, | |
| "grad_norm": 0.7820532917976379, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0353, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.0279329608938546, | |
| "grad_norm": 0.40746408700942993, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0264, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.0391061452513966, | |
| "grad_norm": 0.4193464517593384, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0296, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.0502793296089385, | |
| "grad_norm": 0.2858193814754486, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0277, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.0614525139664805, | |
| "grad_norm": 0.35246649384498596, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0291, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.0726256983240225, | |
| "grad_norm": 0.29800596833229065, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0237, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.083798882681564, | |
| "grad_norm": 0.3077072501182556, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0238, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.094972067039106, | |
| "grad_norm": 0.44679829478263855, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0264, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.106145251396648, | |
| "grad_norm": 0.6781097650527954, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0263, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.11731843575419, | |
| "grad_norm": 0.37409693002700806, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0217, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.1284916201117317, | |
| "grad_norm": 0.27593672275543213, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0222, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.1396648044692737, | |
| "grad_norm": 0.3805045485496521, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0242, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.1508379888268156, | |
| "grad_norm": 0.3491942882537842, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0177, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.1620111731843576, | |
| "grad_norm": 0.4499363899230957, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0235, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.1731843575418996, | |
| "grad_norm": 0.3043382167816162, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0219, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.184357541899441, | |
| "grad_norm": 0.47189202904701233, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0244, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.195530726256983, | |
| "grad_norm": 0.3318402171134949, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0253, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.206703910614525, | |
| "grad_norm": 0.3320620059967041, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0217, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.217877094972067, | |
| "grad_norm": 0.3031757175922394, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0237, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.2290502793296088, | |
| "grad_norm": 0.31146520376205444, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0227, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.2402234636871508, | |
| "grad_norm": 0.4903692305088043, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.028, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.2513966480446927, | |
| "grad_norm": 0.43727049231529236, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0323, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.2625698324022347, | |
| "grad_norm": 0.3779037892818451, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.022, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.2737430167597763, | |
| "grad_norm": 0.3431920111179352, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0218, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.2849162011173183, | |
| "grad_norm": 0.3281972408294678, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0228, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.2960893854748603, | |
| "grad_norm": 0.30141904950141907, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0201, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.3072625698324023, | |
| "grad_norm": 0.29672929644584656, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0229, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.3184357541899443, | |
| "grad_norm": 0.3677186369895935, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0218, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.329608938547486, | |
| "grad_norm": 0.21716094017028809, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0219, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.340782122905028, | |
| "grad_norm": 0.25246715545654297, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0222, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.35195530726257, | |
| "grad_norm": 0.3142368793487549, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0212, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.363128491620112, | |
| "grad_norm": 0.3489794433116913, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0203, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.3743016759776534, | |
| "grad_norm": 0.5527549982070923, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0264, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.3854748603351954, | |
| "grad_norm": 0.3032941520214081, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0233, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.3966480446927374, | |
| "grad_norm": 0.2869105637073517, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0203, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.4078212290502794, | |
| "grad_norm": 0.3869854807853699, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0231, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.4189944134078214, | |
| "grad_norm": 0.3190396726131439, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0256, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.430167597765363, | |
| "grad_norm": 0.19806832075119019, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0212, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.441340782122905, | |
| "grad_norm": 0.22852574288845062, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0215, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.452513966480447, | |
| "grad_norm": 0.3609927296638489, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0213, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.463687150837989, | |
| "grad_norm": 0.5330267548561096, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.022, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.4748603351955305, | |
| "grad_norm": 0.32180726528167725, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0218, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.4860335195530725, | |
| "grad_norm": 0.36455973982810974, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0214, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.4972067039106145, | |
| "grad_norm": 0.28950798511505127, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0209, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.5083798882681565, | |
| "grad_norm": 0.2672787308692932, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0212, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.5195530726256985, | |
| "grad_norm": 0.2959877550601959, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.018, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.5307262569832405, | |
| "grad_norm": 0.23967130482196808, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0183, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.541899441340782, | |
| "grad_norm": 0.39975592494010925, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0242, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.553072625698324, | |
| "grad_norm": 0.5880351662635803, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0241, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.564245810055866, | |
| "grad_norm": 0.43386974930763245, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0206, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.5754189944134076, | |
| "grad_norm": 0.38397714495658875, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0246, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.5865921787709496, | |
| "grad_norm": 0.6550553441047668, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.027, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.5977653631284916, | |
| "grad_norm": 0.41087210178375244, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0231, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.6089385474860336, | |
| "grad_norm": 0.3772335946559906, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0212, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.6201117318435756, | |
| "grad_norm": 0.3334108293056488, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0165, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.631284916201117, | |
| "grad_norm": 0.30168646574020386, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0162, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.642458100558659, | |
| "grad_norm": 0.37991204857826233, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0261, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.653631284916201, | |
| "grad_norm": 0.3721810281276703, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0215, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 3.664804469273743, | |
| "grad_norm": 0.2584337592124939, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0183, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 3.6759776536312847, | |
| "grad_norm": 0.327175110578537, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0242, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 3.6871508379888267, | |
| "grad_norm": 0.31137314438819885, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0182, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.6983240223463687, | |
| "grad_norm": 0.30332621932029724, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0204, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 3.7094972067039107, | |
| "grad_norm": 0.1747010052204132, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0172, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 3.7206703910614527, | |
| "grad_norm": 0.33679547905921936, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0217, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 3.7318435754189943, | |
| "grad_norm": 0.2536109387874603, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0184, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 3.7430167597765363, | |
| "grad_norm": 0.18687337636947632, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0214, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.7541899441340782, | |
| "grad_norm": 0.28626778721809387, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0195, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 3.7653631284916202, | |
| "grad_norm": 0.39891865849494934, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0245, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 3.776536312849162, | |
| "grad_norm": 0.4357566237449646, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0284, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 3.787709497206704, | |
| "grad_norm": 0.3913089334964752, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0211, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 3.798882681564246, | |
| "grad_norm": 0.35088130831718445, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0235, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.810055865921788, | |
| "grad_norm": 0.26693660020828247, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0229, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.82122905027933, | |
| "grad_norm": 0.3225536048412323, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0209, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.8324022346368714, | |
| "grad_norm": 0.43941548466682434, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0194, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.8435754189944134, | |
| "grad_norm": 0.3178078532218933, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0182, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.8547486033519553, | |
| "grad_norm": 0.28912386298179626, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0192, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.8659217877094973, | |
| "grad_norm": 0.2994902729988098, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0206, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.877094972067039, | |
| "grad_norm": 0.32059934735298157, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0177, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.888268156424581, | |
| "grad_norm": 0.36329320073127747, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0184, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.899441340782123, | |
| "grad_norm": 0.2955698072910309, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0206, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 3.910614525139665, | |
| "grad_norm": 0.2274722009897232, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0254, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.921787709497207, | |
| "grad_norm": 0.31275102496147156, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0241, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 3.9329608938547485, | |
| "grad_norm": 0.2409592717885971, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0206, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 3.9441340782122905, | |
| "grad_norm": 0.30333396792411804, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0229, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 3.9553072625698324, | |
| "grad_norm": 0.3438279330730438, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0193, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 3.9664804469273744, | |
| "grad_norm": 0.2038949877023697, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0218, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 3.977653631284916, | |
| "grad_norm": 0.32184457778930664, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.019, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 3.988826815642458, | |
| "grad_norm": 0.35373061895370483, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0219, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.6127492785453796, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0211, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 4.011173184357542, | |
| "grad_norm": 0.32141149044036865, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.023, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 4.022346368715084, | |
| "grad_norm": 0.39776670932769775, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0234, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.033519553072626, | |
| "grad_norm": 0.2874944806098938, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0174, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 4.044692737430168, | |
| "grad_norm": 0.30579710006713867, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0182, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 4.055865921787709, | |
| "grad_norm": 0.3727220296859741, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0198, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 4.067039106145251, | |
| "grad_norm": 0.3621104955673218, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0198, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 4.078212290502793, | |
| "grad_norm": 0.2716482877731323, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0246, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.089385474860335, | |
| "grad_norm": 0.28553998470306396, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0183, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 4.100558659217877, | |
| "grad_norm": 0.3745405375957489, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0205, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 4.111731843575419, | |
| "grad_norm": 0.26210570335388184, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0187, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 4.122905027932961, | |
| "grad_norm": 0.385082483291626, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.026, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 4.134078212290503, | |
| "grad_norm": 0.2971392571926117, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0189, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.145251396648045, | |
| "grad_norm": 0.2730582356452942, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0182, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 4.156424581005586, | |
| "grad_norm": 0.22895021736621857, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0234, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 4.167597765363128, | |
| "grad_norm": 0.35224512219429016, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0186, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 4.17877094972067, | |
| "grad_norm": 0.36178138852119446, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0182, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 4.189944134078212, | |
| "grad_norm": 0.3729289770126343, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0254, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.201117318435754, | |
| "grad_norm": 0.22079184651374817, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0188, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 4.212290502793296, | |
| "grad_norm": 0.3875645101070404, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.025, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 4.223463687150838, | |
| "grad_norm": 0.42908117175102234, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0225, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 4.23463687150838, | |
| "grad_norm": 0.3162376582622528, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0176, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 4.245810055865922, | |
| "grad_norm": 0.2734178900718689, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0204, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 4.256983240223463, | |
| "grad_norm": 0.20637495815753937, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0197, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 4.268156424581005, | |
| "grad_norm": 0.3378507196903229, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.022, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 4.279329608938547, | |
| "grad_norm": 0.33755922317504883, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0257, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 4.290502793296089, | |
| "grad_norm": 0.5192545652389526, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0176, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 4.301675977653631, | |
| "grad_norm": 0.38737359642982483, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0196, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 4.312849162011173, | |
| "grad_norm": 0.26216524839401245, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.025, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 4.324022346368715, | |
| "grad_norm": 0.19758957624435425, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0203, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 4.335195530726257, | |
| "grad_norm": 0.4048031270503998, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0263, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 4.346368715083799, | |
| "grad_norm": 0.29149582982063293, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0185, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 4.35754189944134, | |
| "grad_norm": 0.27375537157058716, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0249, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.368715083798882, | |
| "grad_norm": 0.3518116772174835, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0184, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 4.379888268156424, | |
| "grad_norm": 0.21699579060077667, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0205, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 4.391061452513966, | |
| "grad_norm": 0.28446510434150696, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0172, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 4.402234636871508, | |
| "grad_norm": 0.27037110924720764, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0192, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 4.41340782122905, | |
| "grad_norm": 0.2583399713039398, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0199, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.424581005586592, | |
| "grad_norm": 0.35920700430870056, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0189, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.435754189944134, | |
| "grad_norm": 0.26789629459381104, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0199, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 4.446927374301676, | |
| "grad_norm": 0.3364312946796417, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0162, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 4.4581005586592175, | |
| "grad_norm": 0.3453218340873718, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0226, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 4.4692737430167595, | |
| "grad_norm": 0.2362622171640396, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0204, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.4804469273743015, | |
| "grad_norm": 0.28312528133392334, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0205, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 4.4916201117318435, | |
| "grad_norm": 0.37684956192970276, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0192, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 4.5027932960893855, | |
| "grad_norm": 0.2909739315509796, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0187, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 4.5139664804469275, | |
| "grad_norm": 0.3545827865600586, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0217, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 4.5251396648044695, | |
| "grad_norm": 0.30086764693260193, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0186, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 4.5363128491620115, | |
| "grad_norm": 0.32489290833473206, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0232, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 4.547486033519553, | |
| "grad_norm": 0.37412402033805847, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0189, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 4.558659217877095, | |
| "grad_norm": 0.39823004603385925, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0175, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 4.569832402234637, | |
| "grad_norm": 0.2889609932899475, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0185, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 4.581005586592179, | |
| "grad_norm": 0.4276615381240845, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0206, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.592178770949721, | |
| "grad_norm": 0.2940601706504822, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0227, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 4.603351955307263, | |
| "grad_norm": 0.349774032831192, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0198, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 4.614525139664805, | |
| "grad_norm": 0.2504842281341553, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0202, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 4.625698324022347, | |
| "grad_norm": 0.2703098654747009, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0228, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 4.636871508379889, | |
| "grad_norm": 0.43742501735687256, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0202, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 4.648044692737431, | |
| "grad_norm": 0.22369062900543213, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0144, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 4.659217877094972, | |
| "grad_norm": 0.32691171765327454, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0217, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 4.670391061452514, | |
| "grad_norm": 0.31471747159957886, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0196, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 4.681564245810056, | |
| "grad_norm": 0.36191728711128235, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0266, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 4.692737430167598, | |
| "grad_norm": 0.33021312952041626, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0163, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.70391061452514, | |
| "grad_norm": 0.220609650015831, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0194, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 4.715083798882682, | |
| "grad_norm": 0.24448662996292114, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.018, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 4.726256983240224, | |
| "grad_norm": 0.2669801414012909, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0177, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 4.737430167597766, | |
| "grad_norm": 0.3137591481208801, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0186, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 4.748603351955307, | |
| "grad_norm": 0.24421556293964386, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.019, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 4.759776536312849, | |
| "grad_norm": 0.2204665094614029, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0191, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 4.770949720670391, | |
| "grad_norm": 0.29891934990882874, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0169, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 4.782122905027933, | |
| "grad_norm": 0.23533323407173157, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0133, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 4.793296089385475, | |
| "grad_norm": 0.4082735478878021, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.018, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 4.804469273743017, | |
| "grad_norm": 0.17240087687969208, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0182, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.815642458100559, | |
| "grad_norm": 0.2604635953903198, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.014, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 4.826815642458101, | |
| "grad_norm": 0.26895982027053833, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.018, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 4.837988826815643, | |
| "grad_norm": 0.136953666806221, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0158, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 4.849162011173185, | |
| "grad_norm": 0.2494569569826126, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0141, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 4.860335195530726, | |
| "grad_norm": 0.3353077471256256, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0175, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 4.871508379888268, | |
| "grad_norm": 0.20469434559345245, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0155, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 4.88268156424581, | |
| "grad_norm": 0.2275722175836563, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0185, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 4.893854748603352, | |
| "grad_norm": 0.22833925485610962, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0177, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 4.905027932960894, | |
| "grad_norm": 0.33448755741119385, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0161, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 4.916201117318436, | |
| "grad_norm": 0.2128269076347351, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0222, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 4.927374301675978, | |
| "grad_norm": 0.23171773552894592, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.02, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 4.93854748603352, | |
| "grad_norm": 0.3888982832431793, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0208, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 4.949720670391061, | |
| "grad_norm": 0.17369918525218964, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0193, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 4.960893854748603, | |
| "grad_norm": 0.39287957549095154, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0215, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 4.972067039106145, | |
| "grad_norm": 0.4172436594963074, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0168, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 4.983240223463687, | |
| "grad_norm": 0.22985272109508514, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0178, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 4.994413407821229, | |
| "grad_norm": 0.32593294978141785, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0147, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 5.005586592178771, | |
| "grad_norm": 0.3171547055244446, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0194, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 5.016759776536313, | |
| "grad_norm": 0.3192219138145447, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0183, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 5.027932960893855, | |
| "grad_norm": 0.4188298285007477, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0185, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 5.039106145251397, | |
| "grad_norm": 0.31511184573173523, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.02, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 5.050279329608939, | |
| "grad_norm": 0.3869243264198303, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0206, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 5.06145251396648, | |
| "grad_norm": 0.16901937127113342, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.017, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 5.072625698324022, | |
| "grad_norm": 0.22027498483657837, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0161, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 5.083798882681564, | |
| "grad_norm": 0.3079332411289215, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.018, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 5.094972067039106, | |
| "grad_norm": 0.24744749069213867, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0143, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 5.106145251396648, | |
| "grad_norm": 0.3199874460697174, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0184, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 5.11731843575419, | |
| "grad_norm": 0.25573620200157166, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0155, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 5.128491620111732, | |
| "grad_norm": 0.2456025332212448, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0182, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 5.139664804469274, | |
| "grad_norm": 0.28563737869262695, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0138, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 5.150837988826815, | |
| "grad_norm": 0.24037277698516846, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0199, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 5.162011173184357, | |
| "grad_norm": 0.33862486481666565, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0154, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 5.173184357541899, | |
| "grad_norm": 0.23848088085651398, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0205, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 5.184357541899441, | |
| "grad_norm": 0.29244279861450195, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0159, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 5.195530726256983, | |
| "grad_norm": 0.19082193076610565, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0197, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 5.206703910614525, | |
| "grad_norm": 0.2830309569835663, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0223, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 5.217877094972067, | |
| "grad_norm": 0.24866537749767303, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0195, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 5.229050279329609, | |
| "grad_norm": 0.3959875702857971, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0195, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 5.240223463687151, | |
| "grad_norm": 0.25344517827033997, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0185, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 5.251396648044693, | |
| "grad_norm": 0.2909165918827057, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0227, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 5.262569832402234, | |
| "grad_norm": 0.34014803171157837, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.016, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 5.273743016759776, | |
| "grad_norm": 0.28902870416641235, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0175, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 5.284916201117318, | |
| "grad_norm": 0.3000399172306061, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0189, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 5.29608938547486, | |
| "grad_norm": 0.22094817459583282, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0149, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 5.307262569832402, | |
| "grad_norm": 0.3182206451892853, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0157, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 5.318435754189944, | |
| "grad_norm": 0.3035812973976135, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0158, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 5.329608938547486, | |
| "grad_norm": 0.27920255064964294, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0177, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 5.340782122905028, | |
| "grad_norm": 0.40788498520851135, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0198, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 5.351955307262569, | |
| "grad_norm": 0.3927519619464874, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0157, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 5.363128491620111, | |
| "grad_norm": 0.4437953531742096, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0181, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 5.374301675977653, | |
| "grad_norm": 0.21239735186100006, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0167, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 5.385474860335195, | |
| "grad_norm": 0.25831475853919983, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0141, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 5.396648044692737, | |
| "grad_norm": 0.22870288789272308, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0166, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 5.407821229050279, | |
| "grad_norm": 0.1946844905614853, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0225, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 5.418994413407821, | |
| "grad_norm": 0.22310243546962738, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0177, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 5.430167597765363, | |
| "grad_norm": 0.3094041049480438, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0169, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 5.441340782122905, | |
| "grad_norm": 0.2887223958969116, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0219, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 5.452513966480447, | |
| "grad_norm": 0.4509047567844391, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0197, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 5.4636871508379885, | |
| "grad_norm": 0.2928324043750763, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0133, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 5.4748603351955305, | |
| "grad_norm": 0.24095076322555542, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0186, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 5.4860335195530725, | |
| "grad_norm": 0.31689223647117615, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0185, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 5.4972067039106145, | |
| "grad_norm": 0.23025156557559967, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0136, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 5.5083798882681565, | |
| "grad_norm": 0.20641502737998962, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0143, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 5.5195530726256985, | |
| "grad_norm": 0.25677597522735596, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0135, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 5.5307262569832405, | |
| "grad_norm": 0.29458826780319214, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0177, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 5.5418994413407825, | |
| "grad_norm": 0.2180536687374115, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0143, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 5.553072625698324, | |
| "grad_norm": 0.20262886583805084, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0179, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 5.564245810055866, | |
| "grad_norm": 0.2030559480190277, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0219, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 5.575418994413408, | |
| "grad_norm": 0.2678528428077698, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0127, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 5.58659217877095, | |
| "grad_norm": 0.23892712593078613, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0163, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.597765363128492, | |
| "grad_norm": 0.22284139692783356, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0179, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 5.608938547486034, | |
| "grad_norm": 0.36922913789749146, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0203, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 5.620111731843576, | |
| "grad_norm": 0.33381739258766174, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0196, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 5.631284916201118, | |
| "grad_norm": 0.27770885825157166, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0153, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 5.64245810055866, | |
| "grad_norm": 0.2743906080722809, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0168, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 5.653631284916202, | |
| "grad_norm": 0.19876563549041748, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0131, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 5.664804469273743, | |
| "grad_norm": 0.24195827543735504, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0178, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 5.675977653631285, | |
| "grad_norm": 0.29250091314315796, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0143, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 5.687150837988827, | |
| "grad_norm": 0.1904466301202774, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0142, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 5.698324022346369, | |
| "grad_norm": 0.23230841755867004, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0179, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 5.709497206703911, | |
| "grad_norm": 0.17707963287830353, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.015, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 5.720670391061453, | |
| "grad_norm": 0.18270374834537506, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.022, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 5.731843575418995, | |
| "grad_norm": 0.31562599539756775, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0151, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 5.743016759776537, | |
| "grad_norm": 0.26111161708831787, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0171, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 5.754189944134078, | |
| "grad_norm": 0.2525655925273895, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0134, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 5.76536312849162, | |
| "grad_norm": 0.22460895776748657, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0194, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 5.776536312849162, | |
| "grad_norm": 0.30684345960617065, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0137, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 5.787709497206704, | |
| "grad_norm": 0.180323526263237, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0135, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 5.798882681564246, | |
| "grad_norm": 0.35692161321640015, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0189, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 5.810055865921788, | |
| "grad_norm": 0.13406284153461456, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0198, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 5.82122905027933, | |
| "grad_norm": 0.3202178478240967, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0181, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 5.832402234636872, | |
| "grad_norm": 0.2961965501308441, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0194, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 5.843575418994414, | |
| "grad_norm": 0.21282415091991425, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0175, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 5.854748603351956, | |
| "grad_norm": 0.2664795517921448, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0152, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 5.865921787709497, | |
| "grad_norm": 0.15621113777160645, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0139, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 5.877094972067039, | |
| "grad_norm": 0.2446075975894928, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.015, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 5.888268156424581, | |
| "grad_norm": 0.19233182072639465, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.017, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 5.899441340782123, | |
| "grad_norm": 0.2763586938381195, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0157, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 5.910614525139665, | |
| "grad_norm": 0.31184709072113037, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0132, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 5.921787709497207, | |
| "grad_norm": 0.1821676641702652, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0174, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 5.932960893854749, | |
| "grad_norm": 0.2709354758262634, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0156, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 5.94413407821229, | |
| "grad_norm": 0.29008686542510986, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0133, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 5.955307262569832, | |
| "grad_norm": 0.2169903963804245, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.017, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 5.966480446927374, | |
| "grad_norm": 0.24227134883403778, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0164, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 5.977653631284916, | |
| "grad_norm": 0.16172274947166443, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0132, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 5.988826815642458, | |
| "grad_norm": 0.2996324598789215, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0199, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.31539294123649597, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0154, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 6.011173184357542, | |
| "grad_norm": 0.29299336671829224, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0149, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 6.022346368715084, | |
| "grad_norm": 0.22472530603408813, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0147, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 6.033519553072626, | |
| "grad_norm": 0.21151946485042572, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0112, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 6.044692737430168, | |
| "grad_norm": 0.30609244108200073, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0171, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 6.055865921787709, | |
| "grad_norm": 0.22385366261005402, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0142, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 6.067039106145251, | |
| "grad_norm": 0.2809150516986847, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0134, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 6.078212290502793, | |
| "grad_norm": 0.27267298102378845, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0142, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 6.089385474860335, | |
| "grad_norm": 0.2646515667438507, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.018, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 6.100558659217877, | |
| "grad_norm": 0.2397773712873459, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0111, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 6.111731843575419, | |
| "grad_norm": 0.19921450316905975, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0127, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 6.122905027932961, | |
| "grad_norm": 0.21188603341579437, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0112, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 6.134078212290503, | |
| "grad_norm": 0.17865359783172607, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0141, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 6.145251396648045, | |
| "grad_norm": 0.29245489835739136, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.012, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 6.156424581005586, | |
| "grad_norm": 0.2389625459909439, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.011, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 6.167597765363128, | |
| "grad_norm": 0.22458067536354065, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0123, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 6.17877094972067, | |
| "grad_norm": 0.22981077432632446, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.017, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 6.189944134078212, | |
| "grad_norm": 0.32632771134376526, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0199, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 6.201117318435754, | |
| "grad_norm": 0.3364503085613251, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0117, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 6.212290502793296, | |
| "grad_norm": 0.24833473563194275, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0144, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 6.223463687150838, | |
| "grad_norm": 0.23281876742839813, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0124, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 6.23463687150838, | |
| "grad_norm": 0.1992461383342743, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0129, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 6.245810055865922, | |
| "grad_norm": 0.1674651801586151, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0163, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 6.256983240223463, | |
| "grad_norm": 0.13928933441638947, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.017, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 6.268156424581005, | |
| "grad_norm": 0.2658988833427429, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0172, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 6.279329608938547, | |
| "grad_norm": 0.3671327233314514, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0154, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 6.290502793296089, | |
| "grad_norm": 0.21535873413085938, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0125, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 6.301675977653631, | |
| "grad_norm": 0.2724049687385559, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0131, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 6.312849162011173, | |
| "grad_norm": 0.27708297967910767, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0156, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 6.324022346368715, | |
| "grad_norm": 0.1550014615058899, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0114, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 6.335195530726257, | |
| "grad_norm": 0.31002509593963623, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.015, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 6.346368715083799, | |
| "grad_norm": 0.21792708337306976, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0139, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 6.35754189944134, | |
| "grad_norm": 0.3365803062915802, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.012, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 6.368715083798882, | |
| "grad_norm": 0.09996845573186874, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0151, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 6.379888268156424, | |
| "grad_norm": 0.1910828799009323, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0141, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 6.391061452513966, | |
| "grad_norm": 0.20777705311775208, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0169, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 6.402234636871508, | |
| "grad_norm": 0.2364307940006256, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0171, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 6.41340782122905, | |
| "grad_norm": 0.2576703727245331, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0128, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 6.424581005586592, | |
| "grad_norm": 0.2906484603881836, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0132, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 6.435754189944134, | |
| "grad_norm": 0.20441843569278717, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0119, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 6.446927374301676, | |
| "grad_norm": 0.2331756353378296, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0129, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 6.4581005586592175, | |
| "grad_norm": 0.19953589141368866, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0129, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 6.4692737430167595, | |
| "grad_norm": 0.2749684154987335, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0149, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 6.4804469273743015, | |
| "grad_norm": 0.30020931363105774, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0156, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 6.4916201117318435, | |
| "grad_norm": 0.21630924940109253, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0107, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 6.5027932960893855, | |
| "grad_norm": 0.3143405020236969, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0126, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 6.5139664804469275, | |
| "grad_norm": 0.26806026697158813, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.015, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 6.5251396648044695, | |
| "grad_norm": 0.1940716654062271, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0114, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 6.5363128491620115, | |
| "grad_norm": 0.21158477663993835, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0148, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 6.547486033519553, | |
| "grad_norm": 0.2455819994211197, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0184, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 6.558659217877095, | |
| "grad_norm": 0.2907981872558594, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0136, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 6.569832402234637, | |
| "grad_norm": 0.20942939817905426, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0184, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 6.581005586592179, | |
| "grad_norm": 0.27541545033454895, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0166, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 6.592178770949721, | |
| "grad_norm": 0.23213356733322144, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0138, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 6.603351955307263, | |
| "grad_norm": 0.18830789625644684, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0165, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 6.614525139664805, | |
| "grad_norm": 0.27536147832870483, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.015, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 6.625698324022347, | |
| "grad_norm": 0.3334041237831116, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0142, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 6.636871508379889, | |
| "grad_norm": 0.24735760688781738, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0113, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 6.648044692737431, | |
| "grad_norm": 0.23135212063789368, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0113, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 6.659217877094972, | |
| "grad_norm": 0.25526630878448486, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0129, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 6.670391061452514, | |
| "grad_norm": 0.285518079996109, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0122, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 6.681564245810056, | |
| "grad_norm": 0.2192530333995819, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0136, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 6.692737430167598, | |
| "grad_norm": 0.191972017288208, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0097, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 6.70391061452514, | |
| "grad_norm": 0.23592734336853027, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0123, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.715083798882682, | |
| "grad_norm": 0.2808452546596527, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0128, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 6.726256983240224, | |
| "grad_norm": 0.2533952593803406, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0142, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 6.737430167597766, | |
| "grad_norm": 0.22106622159481049, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0128, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 6.748603351955307, | |
| "grad_norm": 0.3237411081790924, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0144, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 6.759776536312849, | |
| "grad_norm": 0.2794744074344635, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0128, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 6.770949720670391, | |
| "grad_norm": 0.14924152195453644, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0109, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 6.782122905027933, | |
| "grad_norm": 0.28169867396354675, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0128, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 6.793296089385475, | |
| "grad_norm": 0.1610495001077652, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0144, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 6.804469273743017, | |
| "grad_norm": 0.2523607313632965, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0115, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 6.815642458100559, | |
| "grad_norm": 0.30607539415359497, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0134, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 6.826815642458101, | |
| "grad_norm": 0.2993146777153015, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0126, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 6.837988826815643, | |
| "grad_norm": 0.47923293709754944, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0138, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 6.849162011173185, | |
| "grad_norm": 0.28882816433906555, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0119, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 6.860335195530726, | |
| "grad_norm": 0.23749245703220367, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0116, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 6.871508379888268, | |
| "grad_norm": 0.25361862778663635, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0101, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 6.88268156424581, | |
| "grad_norm": 0.2321014702320099, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0119, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 6.893854748603352, | |
| "grad_norm": 0.21300271153450012, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0138, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 6.905027932960894, | |
| "grad_norm": 0.2646145820617676, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0146, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 6.916201117318436, | |
| "grad_norm": 0.19039613008499146, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0153, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 6.927374301675978, | |
| "grad_norm": 0.29607996344566345, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0135, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 6.93854748603352, | |
| "grad_norm": 0.2967067062854767, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0114, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 6.949720670391061, | |
| "grad_norm": 0.17131175100803375, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0125, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 6.960893854748603, | |
| "grad_norm": 0.24170298874378204, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0137, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 6.972067039106145, | |
| "grad_norm": 0.2850925624370575, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0143, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 6.983240223463687, | |
| "grad_norm": 0.20192062854766846, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0113, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 6.994413407821229, | |
| "grad_norm": 0.20863600075244904, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0141, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 7.005586592178771, | |
| "grad_norm": 0.23733340203762054, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0163, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 7.016759776536313, | |
| "grad_norm": 0.21575501561164856, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0142, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 7.027932960893855, | |
| "grad_norm": 0.2600550949573517, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0162, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 7.039106145251397, | |
| "grad_norm": 0.24853667616844177, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0101, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 7.050279329608939, | |
| "grad_norm": 0.19858330488204956, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0107, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 7.06145251396648, | |
| "grad_norm": 0.29044222831726074, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0115, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 7.072625698324022, | |
| "grad_norm": 0.2202584445476532, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0131, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 7.083798882681564, | |
| "grad_norm": 0.15139614045619965, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0133, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 7.094972067039106, | |
| "grad_norm": 0.17525900900363922, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0117, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 7.106145251396648, | |
| "grad_norm": 0.16138982772827148, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0096, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 7.11731843575419, | |
| "grad_norm": 0.15477116405963898, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0132, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 7.128491620111732, | |
| "grad_norm": 0.21245935559272766, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0129, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 7.139664804469274, | |
| "grad_norm": 0.21567586064338684, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0118, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 7.150837988826815, | |
| "grad_norm": 0.23360063135623932, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0138, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 7.162011173184357, | |
| "grad_norm": 0.229034423828125, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0117, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 7.173184357541899, | |
| "grad_norm": 0.19448056817054749, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0099, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 7.184357541899441, | |
| "grad_norm": 0.12511207163333893, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0118, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 7.195530726256983, | |
| "grad_norm": 0.1505567580461502, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0093, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 7.206703910614525, | |
| "grad_norm": 0.17363592982292175, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0099, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 7.217877094972067, | |
| "grad_norm": 0.14407849311828613, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0091, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 7.229050279329609, | |
| "grad_norm": 0.29812091588974, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0107, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 7.240223463687151, | |
| "grad_norm": 0.19162695109844208, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0112, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 7.251396648044693, | |
| "grad_norm": 0.19228880107402802, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0104, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 7.262569832402234, | |
| "grad_norm": 0.20812398195266724, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0094, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 7.273743016759776, | |
| "grad_norm": 0.23238833248615265, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0123, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 7.284916201117318, | |
| "grad_norm": 0.2448129653930664, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0118, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 7.29608938547486, | |
| "grad_norm": 0.260628879070282, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0146, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 7.307262569832402, | |
| "grad_norm": 0.18143253028392792, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0129, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 7.318435754189944, | |
| "grad_norm": 0.2199028879404068, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0132, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 7.329608938547486, | |
| "grad_norm": 0.3112328052520752, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0154, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 7.340782122905028, | |
| "grad_norm": 0.37125319242477417, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0154, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 7.351955307262569, | |
| "grad_norm": 0.25329357385635376, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0176, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 7.363128491620111, | |
| "grad_norm": 0.20560233294963837, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0122, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 7.374301675977653, | |
| "grad_norm": 0.1722743958234787, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0095, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 7.385474860335195, | |
| "grad_norm": 0.24940669536590576, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0097, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 7.396648044692737, | |
| "grad_norm": 0.34006744623184204, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0142, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 7.407821229050279, | |
| "grad_norm": 0.25194820761680603, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0121, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 7.418994413407821, | |
| "grad_norm": 0.14867892861366272, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0098, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 7.430167597765363, | |
| "grad_norm": 0.1594792902469635, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0089, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 7.441340782122905, | |
| "grad_norm": 0.15060050785541534, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0114, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 7.452513966480447, | |
| "grad_norm": 0.3347870707511902, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0118, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 7.4636871508379885, | |
| "grad_norm": 0.24415577948093414, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0118, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 7.4748603351955305, | |
| "grad_norm": 0.21810731291770935, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.009, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 7.4860335195530725, | |
| "grad_norm": 0.25780194997787476, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0117, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 7.4972067039106145, | |
| "grad_norm": 0.2586151957511902, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0107, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 7.5083798882681565, | |
| "grad_norm": 0.14306555688381195, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0107, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 7.5195530726256985, | |
| "grad_norm": 0.1135304793715477, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0105, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 7.5307262569832405, | |
| "grad_norm": 0.29263025522232056, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0144, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 7.5418994413407825, | |
| "grad_norm": 0.2370777130126953, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0119, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 7.553072625698324, | |
| "grad_norm": 0.19841358065605164, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0093, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 7.564245810055866, | |
| "grad_norm": 0.11302493512630463, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0093, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 7.575418994413408, | |
| "grad_norm": 0.20451287925243378, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0077, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 7.58659217877095, | |
| "grad_norm": 0.22324569523334503, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0117, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 7.597765363128492, | |
| "grad_norm": 0.17033512890338898, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0111, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 7.608938547486034, | |
| "grad_norm": 0.17891326546669006, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0144, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 7.620111731843576, | |
| "grad_norm": 0.1763540357351303, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0115, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 7.631284916201118, | |
| "grad_norm": 0.28346604108810425, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0077, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 7.64245810055866, | |
| "grad_norm": 0.30081337690353394, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0111, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 7.653631284916202, | |
| "grad_norm": 0.2184545248746872, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0114, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 7.664804469273743, | |
| "grad_norm": 0.22437766194343567, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0093, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 7.675977653631285, | |
| "grad_norm": 0.23848974704742432, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0128, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 7.687150837988827, | |
| "grad_norm": 0.16164660453796387, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0122, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 7.698324022346369, | |
| "grad_norm": 0.18812666833400726, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0123, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 7.709497206703911, | |
| "grad_norm": 0.22258344292640686, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0112, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 7.720670391061453, | |
| "grad_norm": 0.25217947363853455, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.012, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 7.731843575418995, | |
| "grad_norm": 0.3171720504760742, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0122, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 7.743016759776537, | |
| "grad_norm": 0.14380356669425964, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0104, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 7.754189944134078, | |
| "grad_norm": 0.2037680745124817, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0085, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 7.76536312849162, | |
| "grad_norm": 0.16607998311519623, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0117, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 7.776536312849162, | |
| "grad_norm": 0.21972054243087769, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0107, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 7.787709497206704, | |
| "grad_norm": 0.21034041047096252, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0102, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 7.798882681564246, | |
| "grad_norm": 0.16491921246051788, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0128, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 7.810055865921788, | |
| "grad_norm": 0.34284743666648865, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0088, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 7.82122905027933, | |
| "grad_norm": 0.14697039127349854, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.012, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 7.832402234636872, | |
| "grad_norm": 0.16944429278373718, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0094, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 7.843575418994414, | |
| "grad_norm": 0.13027068972587585, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0099, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 7.854748603351956, | |
| "grad_norm": 0.20734231173992157, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0087, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 7.865921787709497, | |
| "grad_norm": 0.11551401764154434, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.009, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 7.877094972067039, | |
| "grad_norm": 0.10556943714618683, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0122, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 7.888268156424581, | |
| "grad_norm": 0.14786183834075928, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.011, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 7.899441340782123, | |
| "grad_norm": 0.22342541813850403, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0121, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 7.910614525139665, | |
| "grad_norm": 0.20289261639118195, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0105, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 7.921787709497207, | |
| "grad_norm": 0.18690979480743408, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0088, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 7.932960893854749, | |
| "grad_norm": 0.16849052906036377, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0103, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 7.94413407821229, | |
| "grad_norm": 0.19648511707782745, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.011, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 7.955307262569832, | |
| "grad_norm": 0.2015109360218048, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0094, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 7.966480446927374, | |
| "grad_norm": 0.1528281271457672, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0094, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 7.977653631284916, | |
| "grad_norm": 0.1692786067724228, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0127, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 7.988826815642458, | |
| "grad_norm": 0.24098820984363556, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0137, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.15865695476531982, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0108, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 8.011173184357542, | |
| "grad_norm": 0.28263789415359497, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0104, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 8.022346368715084, | |
| "grad_norm": 0.24335156381130219, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0128, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 8.033519553072626, | |
| "grad_norm": 0.189461812376976, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0129, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 8.044692737430168, | |
| "grad_norm": 0.19054171442985535, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0112, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 8.05586592178771, | |
| "grad_norm": 0.23102259635925293, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0104, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 8.067039106145252, | |
| "grad_norm": 0.21439997851848602, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0089, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 8.078212290502794, | |
| "grad_norm": 0.19700543582439423, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0088, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 8.089385474860336, | |
| "grad_norm": 0.11026693880558014, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0067, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 8.100558659217878, | |
| "grad_norm": 0.1278911978006363, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0093, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 8.111731843575418, | |
| "grad_norm": 0.24937446415424347, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0098, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 8.12290502793296, | |
| "grad_norm": 0.1850859522819519, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0096, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 8.134078212290502, | |
| "grad_norm": 0.23052449524402618, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.009, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 8.145251396648044, | |
| "grad_norm": 0.23592029511928558, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0073, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 8.156424581005586, | |
| "grad_norm": 0.26551276445388794, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0122, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 8.167597765363128, | |
| "grad_norm": 0.16492727398872375, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0107, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 8.17877094972067, | |
| "grad_norm": 0.12274683266878128, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0112, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 8.189944134078212, | |
| "grad_norm": 0.21083441376686096, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.01, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 8.201117318435754, | |
| "grad_norm": 0.18899521231651306, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0108, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 8.212290502793296, | |
| "grad_norm": 0.2967348098754883, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0094, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 8.223463687150838, | |
| "grad_norm": 0.10548775643110275, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.007, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 8.23463687150838, | |
| "grad_norm": 0.1476241946220398, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0107, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 8.245810055865922, | |
| "grad_norm": 0.20889157056808472, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.011, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 8.256983240223464, | |
| "grad_norm": 0.13902974128723145, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0096, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 8.268156424581006, | |
| "grad_norm": 0.16573816537857056, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0139, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 8.279329608938548, | |
| "grad_norm": 0.11391325294971466, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0086, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 8.29050279329609, | |
| "grad_norm": 0.13025890290737152, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0086, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 8.30167597765363, | |
| "grad_norm": 0.18285104632377625, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0097, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 8.312849162011172, | |
| "grad_norm": 0.18025439977645874, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0114, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 8.324022346368714, | |
| "grad_norm": 0.16312885284423828, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0095, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 8.335195530726256, | |
| "grad_norm": 0.3153015971183777, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0074, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 8.346368715083798, | |
| "grad_norm": 0.10824255645275116, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0111, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 8.35754189944134, | |
| "grad_norm": 0.20699743926525116, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0098, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 8.368715083798882, | |
| "grad_norm": 0.1689661741256714, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0086, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 8.379888268156424, | |
| "grad_norm": 0.18215268850326538, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0097, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 8.391061452513966, | |
| "grad_norm": 0.13132604956626892, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0073, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 8.402234636871508, | |
| "grad_norm": 0.1855899840593338, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.012, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 8.41340782122905, | |
| "grad_norm": 0.15027715265750885, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0084, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 8.424581005586592, | |
| "grad_norm": 0.13204026222229004, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0098, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 8.435754189944134, | |
| "grad_norm": 0.1184239313006401, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0083, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 8.446927374301676, | |
| "grad_norm": 0.11260180920362473, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0078, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 8.458100558659218, | |
| "grad_norm": 0.16024430096149445, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0078, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 8.46927374301676, | |
| "grad_norm": 0.14689168334007263, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0116, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 8.480446927374302, | |
| "grad_norm": 0.13966570794582367, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0092, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 8.491620111731844, | |
| "grad_norm": 0.1750633716583252, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0085, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 8.502793296089386, | |
| "grad_norm": 0.19457660615444183, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0101, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 8.513966480446927, | |
| "grad_norm": 0.1120801568031311, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.009, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 8.525139664804469, | |
| "grad_norm": 0.1903074085712433, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0096, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 8.53631284916201, | |
| "grad_norm": 0.1494913399219513, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0102, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 8.547486033519553, | |
| "grad_norm": 0.18682445585727692, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0099, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 8.558659217877095, | |
| "grad_norm": 0.1618165373802185, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0084, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 8.569832402234637, | |
| "grad_norm": 0.4893296957015991, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0087, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 8.581005586592179, | |
| "grad_norm": 0.1769854873418808, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0074, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 8.59217877094972, | |
| "grad_norm": 0.23743072152137756, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0089, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 8.603351955307263, | |
| "grad_norm": 0.19221241772174835, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0075, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 8.614525139664805, | |
| "grad_norm": 0.16878198087215424, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0081, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 8.625698324022347, | |
| "grad_norm": 0.2630889117717743, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0079, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 8.636871508379889, | |
| "grad_norm": 0.1042788103222847, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0107, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 8.64804469273743, | |
| "grad_norm": 0.18389323353767395, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0107, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 8.659217877094973, | |
| "grad_norm": 0.11233634501695633, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0078, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 8.670391061452515, | |
| "grad_norm": 0.13507699966430664, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0078, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 8.681564245810057, | |
| "grad_norm": 0.15847325325012207, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.007, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 8.692737430167599, | |
| "grad_norm": 0.10327979177236557, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0071, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 8.703910614525139, | |
| "grad_norm": 0.08161108195781708, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0091, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 8.71508379888268, | |
| "grad_norm": 0.17244283854961395, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0075, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 8.726256983240223, | |
| "grad_norm": 0.1439870297908783, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0081, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 8.737430167597765, | |
| "grad_norm": 0.13675066828727722, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0099, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 8.748603351955307, | |
| "grad_norm": 0.4113706052303314, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0147, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 8.759776536312849, | |
| "grad_norm": 0.1944802701473236, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0117, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 8.77094972067039, | |
| "grad_norm": 0.1219603568315506, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0085, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 8.782122905027933, | |
| "grad_norm": 0.11945028603076935, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0077, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 8.793296089385475, | |
| "grad_norm": 0.1697731912136078, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0091, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 8.804469273743017, | |
| "grad_norm": 0.14634138345718384, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0096, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 8.815642458100559, | |
| "grad_norm": 0.12393759191036224, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0073, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 8.8268156424581, | |
| "grad_norm": 0.12806782126426697, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.007, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 8.837988826815643, | |
| "grad_norm": 0.2547239363193512, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0082, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 8.849162011173185, | |
| "grad_norm": 0.15882697701454163, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0101, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 8.860335195530727, | |
| "grad_norm": 0.30307409167289734, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.008, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 8.871508379888269, | |
| "grad_norm": 0.1830519288778305, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0096, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 8.88268156424581, | |
| "grad_norm": 0.44775450229644775, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0089, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 8.893854748603353, | |
| "grad_norm": 0.21021567285060883, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0134, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 8.905027932960895, | |
| "grad_norm": 0.1550932377576828, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0101, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 8.916201117318435, | |
| "grad_norm": 0.17183873057365417, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0134, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 8.927374301675977, | |
| "grad_norm": 0.1212712898850441, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0117, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 8.938547486033519, | |
| "grad_norm": 0.2008778601884842, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0083, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 8.949720670391061, | |
| "grad_norm": 0.18488670885562897, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0087, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 8.960893854748603, | |
| "grad_norm": 0.2144506871700287, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0089, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 8.972067039106145, | |
| "grad_norm": 0.18953397870063782, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0096, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 8.983240223463687, | |
| "grad_norm": 0.22044748067855835, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0088, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 8.994413407821229, | |
| "grad_norm": 0.27205678820610046, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0064, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 9.005586592178771, | |
| "grad_norm": 0.11423490941524506, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0065, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 9.016759776536313, | |
| "grad_norm": 0.15857642889022827, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0096, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 9.027932960893855, | |
| "grad_norm": 0.145838662981987, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0065, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 9.039106145251397, | |
| "grad_norm": 0.1857743263244629, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0073, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 9.050279329608939, | |
| "grad_norm": 0.22517512738704681, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0077, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 9.061452513966481, | |
| "grad_norm": 0.16172067821025848, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0059, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 9.072625698324023, | |
| "grad_norm": 0.10074090957641602, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.008, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 9.083798882681565, | |
| "grad_norm": 0.2241544872522354, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0083, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 9.094972067039107, | |
| "grad_norm": 0.11998165398836136, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0046, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 9.106145251396647, | |
| "grad_norm": 0.21141058206558228, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0107, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 9.11731843575419, | |
| "grad_norm": 0.19172324240207672, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0079, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 9.128491620111731, | |
| "grad_norm": 0.25882598757743835, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0077, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 9.139664804469273, | |
| "grad_norm": 0.295304536819458, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0089, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 9.150837988826815, | |
| "grad_norm": 0.29473039507865906, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0089, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 9.162011173184357, | |
| "grad_norm": 0.17247381806373596, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0065, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 9.1731843575419, | |
| "grad_norm": 0.15161915123462677, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0071, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 9.184357541899441, | |
| "grad_norm": 0.1648513674736023, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0099, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 9.195530726256983, | |
| "grad_norm": 0.1701023131608963, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0096, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 9.206703910614525, | |
| "grad_norm": 0.17437003552913666, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0078, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 9.217877094972067, | |
| "grad_norm": 0.15922905504703522, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0062, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 9.22905027932961, | |
| "grad_norm": 0.12073934078216553, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.008, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 9.240223463687151, | |
| "grad_norm": 0.3056185245513916, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0091, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 9.251396648044693, | |
| "grad_norm": 0.23450371623039246, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0124, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 9.262569832402235, | |
| "grad_norm": 0.18711315095424652, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0081, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 9.273743016759777, | |
| "grad_norm": 0.21760600805282593, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0062, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 9.28491620111732, | |
| "grad_norm": 0.15848205983638763, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0073, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 9.296089385474861, | |
| "grad_norm": 0.23266057670116425, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0097, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 9.307262569832401, | |
| "grad_norm": 0.15681594610214233, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0078, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 9.318435754189943, | |
| "grad_norm": 0.17894881963729858, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0079, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 9.329608938547485, | |
| "grad_norm": 0.2559245228767395, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0092, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 9.340782122905027, | |
| "grad_norm": 0.11860784888267517, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0069, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 9.35195530726257, | |
| "grad_norm": 0.07629042863845825, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0058, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 9.363128491620111, | |
| "grad_norm": 0.15233556926250458, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0071, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 9.374301675977653, | |
| "grad_norm": 0.14106446504592896, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0109, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 9.385474860335195, | |
| "grad_norm": 0.10023949295282364, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0074, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 9.396648044692737, | |
| "grad_norm": 0.21528558433055878, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0062, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 9.40782122905028, | |
| "grad_norm": 0.16906464099884033, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0081, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 9.418994413407821, | |
| "grad_norm": 0.14748410880565643, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0064, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 9.430167597765363, | |
| "grad_norm": 0.09270267188549042, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0074, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 9.441340782122905, | |
| "grad_norm": 0.08015074580907822, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0057, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 9.452513966480447, | |
| "grad_norm": 0.09904926270246506, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0064, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 9.46368715083799, | |
| "grad_norm": 0.14222927391529083, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0062, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 9.474860335195531, | |
| "grad_norm": 0.19013752043247223, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0072, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 9.486033519553073, | |
| "grad_norm": 0.14375606179237366, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0111, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 9.497206703910614, | |
| "grad_norm": 0.1046292707324028, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0075, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 9.508379888268156, | |
| "grad_norm": 0.10911531001329422, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0073, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 9.519553072625698, | |
| "grad_norm": 0.1152840256690979, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0084, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 9.53072625698324, | |
| "grad_norm": 0.10556305944919586, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0077, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 9.541899441340782, | |
| "grad_norm": 0.12171413004398346, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0061, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 9.553072625698324, | |
| "grad_norm": 0.09937665611505508, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0067, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 9.564245810055866, | |
| "grad_norm": 0.20698967576026917, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0073, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 9.575418994413408, | |
| "grad_norm": 0.2303953468799591, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0068, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 9.58659217877095, | |
| "grad_norm": 0.14669598639011383, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0075, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 9.597765363128492, | |
| "grad_norm": 0.20348258316516876, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0068, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 9.608938547486034, | |
| "grad_norm": 0.18772922456264496, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0074, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 9.620111731843576, | |
| "grad_norm": 0.12228570133447647, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0084, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 9.631284916201118, | |
| "grad_norm": 0.1114656925201416, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0082, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 9.64245810055866, | |
| "grad_norm": 0.11628580093383789, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0071, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 9.653631284916202, | |
| "grad_norm": 0.14695218205451965, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.007, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 9.664804469273744, | |
| "grad_norm": 0.1541166603565216, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0056, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 9.675977653631286, | |
| "grad_norm": 0.15290547907352448, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0088, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 9.687150837988828, | |
| "grad_norm": 0.13804775476455688, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.006, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 9.69832402234637, | |
| "grad_norm": 0.24188724160194397, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0077, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 9.70949720670391, | |
| "grad_norm": 0.15240295231342316, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0084, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 9.720670391061452, | |
| "grad_norm": 0.21085888147354126, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0086, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 9.731843575418994, | |
| "grad_norm": 0.12192298471927643, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0072, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 9.743016759776536, | |
| "grad_norm": 0.15916647017002106, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0069, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 9.754189944134078, | |
| "grad_norm": 0.12907736003398895, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0068, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 9.76536312849162, | |
| "grad_norm": 0.2500177025794983, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.007, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 9.776536312849162, | |
| "grad_norm": 0.2467767298221588, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0085, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 9.787709497206704, | |
| "grad_norm": 0.22411592304706573, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0071, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 9.798882681564246, | |
| "grad_norm": 0.14229343831539154, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0088, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 9.810055865921788, | |
| "grad_norm": 0.12611888349056244, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.007, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 9.82122905027933, | |
| "grad_norm": 0.2142215073108673, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0111, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 9.832402234636872, | |
| "grad_norm": 0.06738999485969543, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0092, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 9.843575418994414, | |
| "grad_norm": 0.10970841348171234, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0102, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 9.854748603351956, | |
| "grad_norm": 0.13287323713302612, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.006, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 9.865921787709498, | |
| "grad_norm": 0.17489692568778992, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0083, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 9.87709497206704, | |
| "grad_norm": 0.0852900967001915, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0064, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 9.888268156424582, | |
| "grad_norm": 0.1468733847141266, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0069, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 9.899441340782122, | |
| "grad_norm": 0.22379270195960999, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0083, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 9.910614525139664, | |
| "grad_norm": 0.1418684870004654, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0072, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 9.921787709497206, | |
| "grad_norm": 0.17870178818702698, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0048, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 9.932960893854748, | |
| "grad_norm": 0.1571841835975647, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.008, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 9.94413407821229, | |
| "grad_norm": 0.2082429975271225, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0063, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 9.955307262569832, | |
| "grad_norm": 0.17222803831100464, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0077, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 9.966480446927374, | |
| "grad_norm": 0.1474296599626541, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0056, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 9.977653631284916, | |
| "grad_norm": 0.19698962569236755, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0058, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 9.988826815642458, | |
| "grad_norm": 0.1312209516763687, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.005, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.09036019444465637, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0067, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 10.011173184357542, | |
| "grad_norm": 0.12338908761739731, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0099, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 10.022346368715084, | |
| "grad_norm": 0.11135794222354889, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0078, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 10.033519553072626, | |
| "grad_norm": 0.15144264698028564, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0064, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 10.044692737430168, | |
| "grad_norm": 0.1627447009086609, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0103, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 10.05586592178771, | |
| "grad_norm": 0.11373578757047653, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0063, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 10.067039106145252, | |
| "grad_norm": 0.1445375382900238, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0083, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 10.078212290502794, | |
| "grad_norm": 0.08985786885023117, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0056, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 10.089385474860336, | |
| "grad_norm": 0.18309640884399414, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0066, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 10.100558659217878, | |
| "grad_norm": 0.22829298675060272, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0061, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 10.111731843575418, | |
| "grad_norm": 0.16799217462539673, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0055, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 10.12290502793296, | |
| "grad_norm": 0.20653171837329865, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0066, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 10.134078212290502, | |
| "grad_norm": 0.10840634256601334, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0066, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 10.145251396648044, | |
| "grad_norm": 0.0775102898478508, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0097, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 10.156424581005586, | |
| "grad_norm": 0.17576414346694946, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0047, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 10.167597765363128, | |
| "grad_norm": 0.14035837352275848, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0071, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 10.17877094972067, | |
| "grad_norm": 0.11545306444168091, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0079, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 10.189944134078212, | |
| "grad_norm": 0.12421239912509918, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0068, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 10.201117318435754, | |
| "grad_norm": 0.148824080824852, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0051, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 10.212290502793296, | |
| "grad_norm": 0.17936809360980988, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0054, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 10.223463687150838, | |
| "grad_norm": 0.13348278403282166, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0052, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 10.23463687150838, | |
| "grad_norm": 0.1465512365102768, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0112, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 10.245810055865922, | |
| "grad_norm": 0.10687586665153503, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0066, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 10.256983240223464, | |
| "grad_norm": 0.18796394765377045, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0098, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 10.268156424581006, | |
| "grad_norm": 0.1550200879573822, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0059, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 10.279329608938548, | |
| "grad_norm": 0.18638046085834503, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0062, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 10.29050279329609, | |
| "grad_norm": 0.1533498615026474, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0051, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 10.30167597765363, | |
| "grad_norm": 0.14730173349380493, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0081, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 10.312849162011172, | |
| "grad_norm": 0.08853857219219208, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0083, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 10.324022346368714, | |
| "grad_norm": 0.15355242788791656, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0077, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 10.335195530726256, | |
| "grad_norm": 0.14595624804496765, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0048, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 10.346368715083798, | |
| "grad_norm": 0.19073987007141113, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0092, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 10.35754189944134, | |
| "grad_norm": 0.09984754770994186, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0075, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 10.368715083798882, | |
| "grad_norm": 0.11532458662986755, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0054, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 10.379888268156424, | |
| "grad_norm": 0.20899036526679993, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.006, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 10.391061452513966, | |
| "grad_norm": 0.052340369671583176, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0049, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 10.402234636871508, | |
| "grad_norm": 0.07553000003099442, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0092, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 10.41340782122905, | |
| "grad_norm": 0.11951261758804321, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0074, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 10.424581005586592, | |
| "grad_norm": 0.16047358512878418, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0082, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 10.435754189944134, | |
| "grad_norm": 0.13786686956882477, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0086, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 10.446927374301676, | |
| "grad_norm": 0.1524105817079544, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0063, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 10.458100558659218, | |
| "grad_norm": 0.2028435468673706, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.006, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 10.46927374301676, | |
| "grad_norm": 0.1889026015996933, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0095, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 10.480446927374302, | |
| "grad_norm": 0.20813752710819244, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0074, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 10.491620111731844, | |
| "grad_norm": 0.2145465761423111, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0092, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 10.502793296089386, | |
| "grad_norm": 0.08039359003305435, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0083, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 10.513966480446927, | |
| "grad_norm": 0.2164773792028427, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0065, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 10.525139664804469, | |
| "grad_norm": 0.08775664120912552, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0069, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 10.53631284916201, | |
| "grad_norm": 0.1035253256559372, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0077, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 10.547486033519553, | |
| "grad_norm": 0.11383654177188873, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0056, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 10.558659217877095, | |
| "grad_norm": 0.10501349717378616, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.012, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 10.569832402234637, | |
| "grad_norm": 0.21106916666030884, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0066, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 10.581005586592179, | |
| "grad_norm": 0.08835123479366302, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.006, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 10.59217877094972, | |
| "grad_norm": 0.18978264927864075, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0066, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 10.603351955307263, | |
| "grad_norm": 0.2002374827861786, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0062, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 10.614525139664805, | |
| "grad_norm": 0.13905523717403412, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.008, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 10.625698324022347, | |
| "grad_norm": 0.13890668749809265, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0108, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 10.636871508379889, | |
| "grad_norm": 0.08558408915996552, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0069, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 10.64804469273743, | |
| "grad_norm": 0.13163861632347107, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.008, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 10.659217877094973, | |
| "grad_norm": 0.1803475320339203, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0084, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 10.670391061452515, | |
| "grad_norm": 0.18398401141166687, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0072, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 10.681564245810057, | |
| "grad_norm": 0.09667162597179413, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0087, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 10.692737430167599, | |
| "grad_norm": 0.0858139842748642, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0074, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 10.703910614525139, | |
| "grad_norm": 0.1821316033601761, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0072, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 10.71508379888268, | |
| "grad_norm": 0.14189845323562622, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0077, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 10.726256983240223, | |
| "grad_norm": 0.05395045876502991, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0054, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 10.737430167597765, | |
| "grad_norm": 0.1724148690700531, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0082, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 10.748603351955307, | |
| "grad_norm": 0.37126320600509644, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0096, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 10.759776536312849, | |
| "grad_norm": 0.10354653745889664, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0111, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 10.77094972067039, | |
| "grad_norm": 0.18879705667495728, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0066, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 10.782122905027933, | |
| "grad_norm": 0.23624072968959808, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0063, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 10.793296089385475, | |
| "grad_norm": 0.13471145927906036, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0059, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 10.804469273743017, | |
| "grad_norm": 0.18446491658687592, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0069, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 10.815642458100559, | |
| "grad_norm": 0.0826929584145546, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0058, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 10.8268156424581, | |
| "grad_norm": 0.17251650989055634, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0081, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 10.837988826815643, | |
| "grad_norm": 0.10581784695386887, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0063, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 10.849162011173185, | |
| "grad_norm": 0.15969093143939972, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0065, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 10.860335195530727, | |
| "grad_norm": 0.21299996972084045, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.008, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 10.871508379888269, | |
| "grad_norm": 0.19832740724086761, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.008, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 10.88268156424581, | |
| "grad_norm": 0.1188596561551094, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.007, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 10.893854748603353, | |
| "grad_norm": 0.11870895326137543, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0058, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 10.905027932960895, | |
| "grad_norm": 0.09646818041801453, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.006, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 10.916201117318435, | |
| "grad_norm": 0.26855528354644775, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0074, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 10.927374301675977, | |
| "grad_norm": 0.17706650495529175, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0059, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 10.938547486033519, | |
| "grad_norm": 0.2731892466545105, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0063, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 10.949720670391061, | |
| "grad_norm": 0.08223097771406174, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0078, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 10.960893854748603, | |
| "grad_norm": 0.0516105592250824, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0061, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 10.972067039106145, | |
| "grad_norm": 0.08920707553625107, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0062, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 10.983240223463687, | |
| "grad_norm": 0.32022151350975037, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.009, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 10.994413407821229, | |
| "grad_norm": 0.1300746649503708, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0069, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 11.005586592178771, | |
| "grad_norm": 0.2587031126022339, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0141, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 11.016759776536313, | |
| "grad_norm": 0.19218710064888, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0119, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 11.027932960893855, | |
| "grad_norm": 0.1364549696445465, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0049, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 11.039106145251397, | |
| "grad_norm": 0.1449739784002304, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.009, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 11.050279329608939, | |
| "grad_norm": 0.14813213050365448, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0049, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 11.061452513966481, | |
| "grad_norm": 0.07008741050958633, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0076, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 11.072625698324023, | |
| "grad_norm": 0.13158051669597626, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0075, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 11.083798882681565, | |
| "grad_norm": 0.16695921123027802, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0099, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 11.094972067039107, | |
| "grad_norm": 0.1173919215798378, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0058, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 11.106145251396647, | |
| "grad_norm": 0.3498670756816864, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0067, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 11.11731843575419, | |
| "grad_norm": 0.2431592047214508, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.007, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 11.128491620111731, | |
| "grad_norm": 0.07953112572431564, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0064, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 11.139664804469273, | |
| "grad_norm": 0.09018266201019287, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0063, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 11.150837988826815, | |
| "grad_norm": 0.1305776983499527, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0069, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 11.162011173184357, | |
| "grad_norm": 0.13608427345752716, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0074, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 11.1731843575419, | |
| "grad_norm": 0.19071027636528015, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0077, | |
| "step": 10000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 12, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |