| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.810572687224669, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00881057268722467, | |
| "grad_norm": 9.783341407775879, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.8007, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01762114537444934, | |
| "grad_norm": 3.2678112983703613, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.5843, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02643171806167401, | |
| "grad_norm": 4.176161766052246, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.3024, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03524229074889868, | |
| "grad_norm": 1.4033844470977783, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2095, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04405286343612335, | |
| "grad_norm": 1.941641926765442, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.1746, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05286343612334802, | |
| "grad_norm": 1.3670467138290405, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1567, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06167400881057269, | |
| "grad_norm": 1.6228299140930176, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1312, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07048458149779736, | |
| "grad_norm": 1.133476734161377, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1253, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07929515418502203, | |
| "grad_norm": 1.160352349281311, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1168, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0881057268722467, | |
| "grad_norm": 1.3603399991989136, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1129, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09691629955947137, | |
| "grad_norm": 1.7121895551681519, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1098, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10572687224669604, | |
| "grad_norm": 1.323413610458374, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1043, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1145374449339207, | |
| "grad_norm": 1.0168323516845703, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12334801762114538, | |
| "grad_norm": 1.188545823097229, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.1005, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13215859030837004, | |
| "grad_norm": 0.6844086050987244, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0908, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14096916299559473, | |
| "grad_norm": 1.5344898700714111, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0957, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.14977973568281938, | |
| "grad_norm": 0.9596158862113953, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0908, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.15859030837004406, | |
| "grad_norm": 0.9798301458358765, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0884, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.16740088105726872, | |
| "grad_norm": 0.9422672390937805, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0744, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1762114537444934, | |
| "grad_norm": 0.8442719578742981, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0863, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.18502202643171806, | |
| "grad_norm": 0.5888696908950806, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0803, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.19383259911894274, | |
| "grad_norm": 0.7323806285858154, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0796, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2026431718061674, | |
| "grad_norm": 0.9205628037452698, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0734, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21145374449339208, | |
| "grad_norm": 0.672389030456543, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0719, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22026431718061673, | |
| "grad_norm": 0.7175483703613281, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0661, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2290748898678414, | |
| "grad_norm": 0.6914017200469971, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0588, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.23788546255506607, | |
| "grad_norm": 0.7663117051124573, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.066, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.24669603524229075, | |
| "grad_norm": 0.7929138541221619, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.059, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2555066079295154, | |
| "grad_norm": 0.7327365279197693, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0647, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2643171806167401, | |
| "grad_norm": 0.764899492263794, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0573, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.27312775330396477, | |
| "grad_norm": 0.6707645654678345, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0604, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.28193832599118945, | |
| "grad_norm": 0.9546026587486267, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.056, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2907488986784141, | |
| "grad_norm": 1.1003812551498413, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0576, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.29955947136563876, | |
| "grad_norm": 0.9666423797607422, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0549, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.30837004405286345, | |
| "grad_norm": 0.5841639637947083, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0478, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.31718061674008813, | |
| "grad_norm": 0.8006680607795715, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0503, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.32599118942731276, | |
| "grad_norm": 0.6400935649871826, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0557, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.33480176211453744, | |
| "grad_norm": 0.7229577302932739, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0509, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3436123348017621, | |
| "grad_norm": 0.5400054454803467, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0479, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3524229074889868, | |
| "grad_norm": 0.7957359552383423, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0525, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.36123348017621143, | |
| "grad_norm": 0.9650723338127136, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0592, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3700440528634361, | |
| "grad_norm": 0.979509174823761, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0548, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3788546255506608, | |
| "grad_norm": 0.6508933305740356, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.054, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.3876651982378855, | |
| "grad_norm": 0.4911634624004364, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0474, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3964757709251101, | |
| "grad_norm": 0.8781774044036865, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0502, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4052863436123348, | |
| "grad_norm": 0.7866969704627991, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0519, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.41409691629955947, | |
| "grad_norm": 0.5730182528495789, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0537, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.42290748898678415, | |
| "grad_norm": 0.8985834717750549, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0512, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.43171806167400884, | |
| "grad_norm": 0.7285324335098267, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0501, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.44052863436123346, | |
| "grad_norm": 0.27207818627357483, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0501, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.44933920704845814, | |
| "grad_norm": 0.6679635047912598, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0496, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.4581497797356828, | |
| "grad_norm": 0.5407543182373047, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0455, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4669603524229075, | |
| "grad_norm": 0.6109707355499268, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0506, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.47577092511013214, | |
| "grad_norm": 0.748649537563324, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0452, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.4845814977973568, | |
| "grad_norm": 0.5651994347572327, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0487, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.4933920704845815, | |
| "grad_norm": 1.1109408140182495, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0448, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5022026431718062, | |
| "grad_norm": 0.601180911064148, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0424, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5110132158590308, | |
| "grad_norm": 0.6625035405158997, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0458, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5198237885462555, | |
| "grad_norm": 0.8186538219451904, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0483, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5286343612334802, | |
| "grad_norm": 0.44178858399391174, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0497, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5374449339207048, | |
| "grad_norm": 1.1327277421951294, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0502, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5462555066079295, | |
| "grad_norm": 0.6104411482810974, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0439, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5550660792951542, | |
| "grad_norm": 0.46316829323768616, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.039, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5638766519823789, | |
| "grad_norm": 0.7568310499191284, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0411, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5726872246696035, | |
| "grad_norm": 0.800849437713623, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0484, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.5814977973568282, | |
| "grad_norm": 0.5356922149658203, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0432, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.5903083700440529, | |
| "grad_norm": 0.836525022983551, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0523, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.5991189427312775, | |
| "grad_norm": 0.7024198174476624, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0494, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6079295154185022, | |
| "grad_norm": 0.42933231592178345, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0435, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6167400881057269, | |
| "grad_norm": 0.3697493374347687, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0448, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6255506607929515, | |
| "grad_norm": 0.6156218647956848, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0499, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6343612334801763, | |
| "grad_norm": 0.41467761993408203, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0435, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6431718061674009, | |
| "grad_norm": 0.5887497663497925, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0464, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6519823788546255, | |
| "grad_norm": 0.3424453139305115, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.037, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6607929515418502, | |
| "grad_norm": 0.5789796710014343, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0418, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6696035242290749, | |
| "grad_norm": 0.47994768619537354, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0348, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.6784140969162996, | |
| "grad_norm": 0.5811408758163452, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.043, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.6872246696035242, | |
| "grad_norm": 0.31499215960502625, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0391, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6960352422907489, | |
| "grad_norm": 0.6786633729934692, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0386, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7048458149779736, | |
| "grad_norm": 0.6777817606925964, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0494, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7136563876651982, | |
| "grad_norm": 0.6622751355171204, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0454, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7224669603524229, | |
| "grad_norm": 0.6081523895263672, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0458, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7312775330396476, | |
| "grad_norm": 0.40756699442863464, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0474, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7400881057268722, | |
| "grad_norm": 0.49721023440361023, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0385, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.748898678414097, | |
| "grad_norm": 0.5593914985656738, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0491, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7577092511013216, | |
| "grad_norm": 0.5159551501274109, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0398, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7665198237885462, | |
| "grad_norm": 0.405895859003067, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0425, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.775330396475771, | |
| "grad_norm": 0.6081558465957642, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0406, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7841409691629956, | |
| "grad_norm": 0.49246469140052795, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0379, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.7929515418502202, | |
| "grad_norm": 0.6059092879295349, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0455, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.801762114537445, | |
| "grad_norm": 0.3857138752937317, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0343, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8105726872246696, | |
| "grad_norm": 0.5885041356086731, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0399, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8193832599118943, | |
| "grad_norm": 0.5427616238594055, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0381, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8281938325991189, | |
| "grad_norm": 0.44055992364883423, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.036, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8370044052863436, | |
| "grad_norm": 0.36892494559288025, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.045, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8458149779735683, | |
| "grad_norm": 0.39622175693511963, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0295, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8546255506607929, | |
| "grad_norm": 0.4800586402416229, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0356, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.8634361233480177, | |
| "grad_norm": 0.4034201204776764, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.039, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.8722466960352423, | |
| "grad_norm": 0.527763307094574, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0419, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.8810572687224669, | |
| "grad_norm": 0.5146549940109253, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.038, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8898678414096917, | |
| "grad_norm": 0.7138350009918213, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0368, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.8986784140969163, | |
| "grad_norm": 0.414087176322937, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0432, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9074889867841409, | |
| "grad_norm": 0.6407654285430908, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0389, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9162995594713657, | |
| "grad_norm": 0.525131106376648, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0393, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9251101321585903, | |
| "grad_norm": 0.5931124687194824, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0347, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.933920704845815, | |
| "grad_norm": 0.5337432622909546, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0435, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9427312775330396, | |
| "grad_norm": 0.9279194474220276, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0405, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.9515418502202643, | |
| "grad_norm": 0.4171633720397949, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0425, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.960352422907489, | |
| "grad_norm": 0.4015987515449524, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0293, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.9691629955947136, | |
| "grad_norm": 0.41183045506477356, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0341, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.9779735682819384, | |
| "grad_norm": 0.5550549030303955, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0421, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.986784140969163, | |
| "grad_norm": 0.511286735534668, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0377, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9955947136563876, | |
| "grad_norm": 0.4300730526447296, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0418, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.0044052863436124, | |
| "grad_norm": 0.7544938325881958, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0377, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.013215859030837, | |
| "grad_norm": 0.59882652759552, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0395, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.0220264317180616, | |
| "grad_norm": 0.5788866281509399, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0362, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.0308370044052864, | |
| "grad_norm": 0.4007956385612488, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0354, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.039647577092511, | |
| "grad_norm": 0.536131739616394, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0354, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.0484581497797356, | |
| "grad_norm": 0.4108588695526123, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0355, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.0572687224669604, | |
| "grad_norm": 0.5112282037734985, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0401, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.066079295154185, | |
| "grad_norm": 0.40078428387641907, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0363, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.0748898678414096, | |
| "grad_norm": 0.4066162407398224, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0334, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.0837004405286343, | |
| "grad_norm": 0.5076769590377808, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0392, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.092511013215859, | |
| "grad_norm": 0.47220245003700256, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0344, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.1013215859030836, | |
| "grad_norm": 0.4301076829433441, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0393, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1101321585903083, | |
| "grad_norm": 0.4384737014770508, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0359, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.118942731277533, | |
| "grad_norm": 0.3671138286590576, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.039, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.1277533039647576, | |
| "grad_norm": 0.494729220867157, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0379, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.1365638766519823, | |
| "grad_norm": 0.46694886684417725, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0427, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.145374449339207, | |
| "grad_norm": 0.3180607855319977, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0391, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.1541850220264318, | |
| "grad_norm": 0.36170974373817444, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0328, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.1629955947136563, | |
| "grad_norm": 0.6523162722587585, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0393, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.171806167400881, | |
| "grad_norm": 0.63805091381073, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.034, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.1806167400881058, | |
| "grad_norm": 0.34985223412513733, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0343, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.1894273127753303, | |
| "grad_norm": 0.4400756061077118, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0319, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.198237885462555, | |
| "grad_norm": 0.3603322505950928, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0316, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.2070484581497798, | |
| "grad_norm": 0.48929181694984436, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0261, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.2158590308370045, | |
| "grad_norm": 0.5945678353309631, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0352, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.224669603524229, | |
| "grad_norm": 0.41877344250679016, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0328, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.2334801762114538, | |
| "grad_norm": 0.4846113622188568, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0388, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.2422907488986783, | |
| "grad_norm": 0.6444342136383057, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0304, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.251101321585903, | |
| "grad_norm": 0.3840843141078949, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0388, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.2599118942731278, | |
| "grad_norm": 0.49051016569137573, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0361, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.2687224669603525, | |
| "grad_norm": 0.34007859230041504, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0319, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.277533039647577, | |
| "grad_norm": 0.3940713107585907, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0375, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.2863436123348018, | |
| "grad_norm": 0.42878979444503784, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0372, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.2951541850220265, | |
| "grad_norm": 0.25766220688819885, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0325, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.303964757709251, | |
| "grad_norm": 0.27197545766830444, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0373, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.3127753303964758, | |
| "grad_norm": 0.6453627347946167, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0312, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.3215859030837005, | |
| "grad_norm": 0.509793221950531, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0354, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.3303964757709252, | |
| "grad_norm": 0.6267362833023071, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0422, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.3392070484581498, | |
| "grad_norm": 0.4328128695487976, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0395, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.3480176211453745, | |
| "grad_norm": 0.6406875848770142, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.039, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.356828193832599, | |
| "grad_norm": 0.4744325876235962, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.04, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.3656387665198237, | |
| "grad_norm": 0.44169074296951294, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0364, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.3744493392070485, | |
| "grad_norm": 0.36594370007514954, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0308, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.3832599118942732, | |
| "grad_norm": 0.4625873565673828, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0303, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.3920704845814977, | |
| "grad_norm": 0.5009127855300903, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0344, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.4008810572687225, | |
| "grad_norm": 0.42556479573249817, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0351, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.4096916299559472, | |
| "grad_norm": 0.4033386707305908, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0321, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.4185022026431717, | |
| "grad_norm": 0.3963862359523773, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0331, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.4273127753303965, | |
| "grad_norm": 0.469260036945343, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0288, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.4361233480176212, | |
| "grad_norm": 0.40203893184661865, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0342, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.444933920704846, | |
| "grad_norm": 0.5337649583816528, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0332, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.4537444933920705, | |
| "grad_norm": 0.5467473268508911, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0391, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.4625550660792952, | |
| "grad_norm": 0.5851401090621948, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0296, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.4713656387665197, | |
| "grad_norm": 0.39419272541999817, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0328, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.4801762114537445, | |
| "grad_norm": 0.43375474214553833, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0333, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.4889867841409692, | |
| "grad_norm": 0.4625674784183502, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0343, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.497797356828194, | |
| "grad_norm": 0.23592312633991241, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0336, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.5066079295154187, | |
| "grad_norm": 0.6758530139923096, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0377, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.5154185022026432, | |
| "grad_norm": 0.35647040605545044, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0361, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.5242290748898677, | |
| "grad_norm": 0.44328275322914124, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0311, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.5330396475770924, | |
| "grad_norm": 0.47090455889701843, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0312, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.5418502202643172, | |
| "grad_norm": 0.3932674825191498, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0329, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.550660792951542, | |
| "grad_norm": 0.40956389904022217, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0404, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.5594713656387666, | |
| "grad_norm": 0.40919333696365356, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0326, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.5682819383259912, | |
| "grad_norm": 0.37694793939590454, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0366, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.577092511013216, | |
| "grad_norm": 0.6122838854789734, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0356, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.5859030837004404, | |
| "grad_norm": 0.3556225299835205, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0341, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.5947136563876652, | |
| "grad_norm": 0.4610528349876404, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0327, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.60352422907489, | |
| "grad_norm": 0.23460887372493744, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0328, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.6123348017621146, | |
| "grad_norm": 0.2322225570678711, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0333, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.6211453744493394, | |
| "grad_norm": 0.26807940006256104, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0308, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.6299559471365639, | |
| "grad_norm": 0.26577630639076233, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0297, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.6387665198237884, | |
| "grad_norm": 0.45758599042892456, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.035, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.6475770925110131, | |
| "grad_norm": 0.3763338029384613, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0341, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.6563876651982379, | |
| "grad_norm": 0.44285768270492554, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0309, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.6651982378854626, | |
| "grad_norm": 0.2282378226518631, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.028, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.6740088105726874, | |
| "grad_norm": 0.3692968785762787, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0369, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.6828193832599119, | |
| "grad_norm": 0.3806714713573456, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0345, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.6916299559471366, | |
| "grad_norm": 0.35997503995895386, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0336, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.7004405286343611, | |
| "grad_norm": 0.3325161337852478, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0337, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.7092511013215859, | |
| "grad_norm": 0.40811505913734436, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0328, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.7180616740088106, | |
| "grad_norm": 0.4765032231807709, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0339, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.7268722466960353, | |
| "grad_norm": 0.36173051595687866, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0287, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.73568281938326, | |
| "grad_norm": 0.21599677205085754, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0356, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.7444933920704846, | |
| "grad_norm": 0.44708141684532166, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.038, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.753303964757709, | |
| "grad_norm": 0.32971513271331787, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0334, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.7621145374449338, | |
| "grad_norm": 0.25437742471694946, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0278, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.7709251101321586, | |
| "grad_norm": 0.28217947483062744, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0272, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.7797356828193833, | |
| "grad_norm": 0.19690771400928497, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0318, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.788546255506608, | |
| "grad_norm": 0.4625421464443207, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0354, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.7973568281938326, | |
| "grad_norm": 0.4511774778366089, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0371, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.8061674008810573, | |
| "grad_norm": 0.24759308993816376, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0317, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.8149779735682818, | |
| "grad_norm": 0.36464419960975647, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0274, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.8237885462555066, | |
| "grad_norm": 0.5422326326370239, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0267, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.8325991189427313, | |
| "grad_norm": 0.2565076947212219, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0297, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.841409691629956, | |
| "grad_norm": 0.24085785448551178, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0286, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.8502202643171806, | |
| "grad_norm": 0.3714599609375, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0291, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.8590308370044053, | |
| "grad_norm": 0.3025754690170288, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0302, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.8678414096916298, | |
| "grad_norm": 0.32979100942611694, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.026, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.8766519823788546, | |
| "grad_norm": 0.27037525177001953, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0299, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.8854625550660793, | |
| "grad_norm": 0.466528981924057, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.029, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.894273127753304, | |
| "grad_norm": 0.3242933452129364, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0398, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.9030837004405288, | |
| "grad_norm": 0.5478012561798096, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0338, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.9118942731277533, | |
| "grad_norm": 0.42316552996635437, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0296, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.920704845814978, | |
| "grad_norm": 0.21995742619037628, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0285, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.9295154185022025, | |
| "grad_norm": 0.3761650621891022, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0299, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.9383259911894273, | |
| "grad_norm": 0.39028650522232056, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0308, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.947136563876652, | |
| "grad_norm": 0.3024023473262787, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0272, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.9559471365638768, | |
| "grad_norm": 0.5320018529891968, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0266, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.9647577092511013, | |
| "grad_norm": 0.27007195353507996, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0307, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.973568281938326, | |
| "grad_norm": 0.5223168730735779, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0316, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.9823788546255505, | |
| "grad_norm": 0.5517076253890991, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0324, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.9911894273127753, | |
| "grad_norm": 0.4540218114852905, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0293, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6469730734825134, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.028, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.0088105726872247, | |
| "grad_norm": 0.4468275010585785, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.033, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.0176211453744495, | |
| "grad_norm": 0.3147367537021637, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0327, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.026431718061674, | |
| "grad_norm": 0.34883683919906616, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0272, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.0352422907488985, | |
| "grad_norm": 0.22206811606884003, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0337, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.0440528634361232, | |
| "grad_norm": 0.3218289315700531, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0287, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.052863436123348, | |
| "grad_norm": 0.45745208859443665, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0367, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.0616740088105727, | |
| "grad_norm": 0.34802356362342834, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0306, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.0704845814977975, | |
| "grad_norm": 0.3098474144935608, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0276, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.079295154185022, | |
| "grad_norm": 0.5373137593269348, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.03, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.0881057268722465, | |
| "grad_norm": 0.20613513886928558, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0252, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.0969162995594712, | |
| "grad_norm": 0.34800076484680176, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.025, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.105726872246696, | |
| "grad_norm": 0.19473090767860413, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.031, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.1145374449339207, | |
| "grad_norm": 0.34081539511680603, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0314, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.1233480176211454, | |
| "grad_norm": 0.44007906317710876, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0255, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.13215859030837, | |
| "grad_norm": 0.3881816864013672, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0302, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.140969162995595, | |
| "grad_norm": 0.44560763239860535, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0258, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.149779735682819, | |
| "grad_norm": 0.445132315158844, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0301, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.158590308370044, | |
| "grad_norm": 0.40997591614723206, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0273, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.1674008810572687, | |
| "grad_norm": 0.4347369074821472, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.026, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.1762114537444934, | |
| "grad_norm": 0.35197916626930237, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0284, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.185022026431718, | |
| "grad_norm": 0.4149504005908966, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0262, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.193832599118943, | |
| "grad_norm": 0.31341835856437683, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0251, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.202643171806167, | |
| "grad_norm": 0.3490375578403473, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0288, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.211453744493392, | |
| "grad_norm": 0.3163897693157196, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0309, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.2202643171806167, | |
| "grad_norm": 0.5116659998893738, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0255, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.2290748898678414, | |
| "grad_norm": 0.2709035873413086, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0244, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.237885462555066, | |
| "grad_norm": 0.32955285906791687, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0289, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.246696035242291, | |
| "grad_norm": 0.2972700595855713, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0289, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.255506607929515, | |
| "grad_norm": 0.22429923713207245, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0273, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.26431718061674, | |
| "grad_norm": 0.5024057030677795, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0287, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.2731277533039647, | |
| "grad_norm": 0.5265575647354126, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0314, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.2819383259911894, | |
| "grad_norm": 0.31660500168800354, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0319, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.290748898678414, | |
| "grad_norm": 0.4299285411834717, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0256, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.299559471365639, | |
| "grad_norm": 0.2270841747522354, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0269, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.3083700440528636, | |
| "grad_norm": 0.46884608268737793, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0291, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.317180616740088, | |
| "grad_norm": 0.20958082377910614, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0238, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.3259911894273126, | |
| "grad_norm": 0.2303617298603058, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0216, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.3348017621145374, | |
| "grad_norm": 0.29592180252075195, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0245, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.343612334801762, | |
| "grad_norm": 0.3879617154598236, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.038, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.352422907488987, | |
| "grad_norm": 0.3723200857639313, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0313, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.3612334801762116, | |
| "grad_norm": 0.36837583780288696, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0295, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.3700440528634363, | |
| "grad_norm": 0.3458777368068695, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0288, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.3788546255506606, | |
| "grad_norm": 0.4853503704071045, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0296, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.3876651982378854, | |
| "grad_norm": 0.44645997881889343, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0293, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.39647577092511, | |
| "grad_norm": 0.318408340215683, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0272, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.405286343612335, | |
| "grad_norm": 0.28629541397094727, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0262, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.4140969162995596, | |
| "grad_norm": 0.41597819328308105, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0301, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.4229074889867843, | |
| "grad_norm": 0.38253867626190186, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0229, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.431718061674009, | |
| "grad_norm": 0.499968558549881, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0258, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.4405286343612334, | |
| "grad_norm": 0.2622029185295105, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0257, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.449339207048458, | |
| "grad_norm": 0.5591828227043152, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0271, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.458149779735683, | |
| "grad_norm": 0.5326882004737854, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0282, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.4669603524229076, | |
| "grad_norm": 0.4375574290752411, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.028, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.4757709251101323, | |
| "grad_norm": 0.4028482139110565, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0295, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.4845814977973566, | |
| "grad_norm": 0.32747912406921387, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0222, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.4933920704845813, | |
| "grad_norm": 0.41480571031570435, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0282, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.502202643171806, | |
| "grad_norm": 0.3334689736366272, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0292, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.511013215859031, | |
| "grad_norm": 0.31518039107322693, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0243, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.5198237885462555, | |
| "grad_norm": 0.3201143741607666, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0287, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.5286343612334803, | |
| "grad_norm": 0.3682001531124115, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0253, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.537444933920705, | |
| "grad_norm": 0.5187533497810364, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0273, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.5462555066079293, | |
| "grad_norm": 0.5061056613922119, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0255, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.555066079295154, | |
| "grad_norm": 0.5131250619888306, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0279, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.563876651982379, | |
| "grad_norm": 0.24301305413246155, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0362, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.5726872246696035, | |
| "grad_norm": 0.5649149417877197, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.029, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.5814977973568283, | |
| "grad_norm": 0.2805394232273102, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0253, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.590308370044053, | |
| "grad_norm": 0.4454239010810852, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0269, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.5991189427312777, | |
| "grad_norm": 0.3054984211921692, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0225, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.607929515418502, | |
| "grad_norm": 0.3227088153362274, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0281, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.616740088105727, | |
| "grad_norm": 0.2978251278400421, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.027, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.6255506607929515, | |
| "grad_norm": 0.2983796000480652, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0262, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.6343612334801763, | |
| "grad_norm": 0.3664785325527191, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0226, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.643171806167401, | |
| "grad_norm": 0.4415528178215027, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.028, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.6519823788546253, | |
| "grad_norm": 0.42431381344795227, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0234, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.6607929515418505, | |
| "grad_norm": 0.21880491077899933, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0277, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.6696035242290748, | |
| "grad_norm": 0.22304697334766388, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0239, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.6784140969162995, | |
| "grad_norm": 0.3053400218486786, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0231, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.6872246696035242, | |
| "grad_norm": 0.3970130383968353, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0234, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.696035242290749, | |
| "grad_norm": 0.3227209746837616, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0294, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.7048458149779737, | |
| "grad_norm": 0.27443569898605347, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0273, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.713656387665198, | |
| "grad_norm": 0.32237517833709717, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0249, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.7224669603524227, | |
| "grad_norm": 0.3307492434978485, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0261, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 2.7312775330396475, | |
| "grad_norm": 0.27980926632881165, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0264, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.7400881057268722, | |
| "grad_norm": 0.21118228137493134, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0311, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 2.748898678414097, | |
| "grad_norm": 0.5056304335594177, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0297, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.7577092511013217, | |
| "grad_norm": 0.5140434503555298, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0305, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 2.7665198237885464, | |
| "grad_norm": 0.39177587628364563, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0299, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.7753303964757707, | |
| "grad_norm": 0.3983112871646881, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0298, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 2.7841409691629955, | |
| "grad_norm": 0.44693031907081604, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0224, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.79295154185022, | |
| "grad_norm": 0.3741726279258728, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0277, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 2.801762114537445, | |
| "grad_norm": 0.3499443829059601, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0263, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.8105726872246697, | |
| "grad_norm": 0.3661585748195648, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0237, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 2.8193832599118944, | |
| "grad_norm": 0.34154874086380005, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.031, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.828193832599119, | |
| "grad_norm": 0.25365132093429565, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0264, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 2.8370044052863435, | |
| "grad_norm": 0.34982606768608093, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0234, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 2.845814977973568, | |
| "grad_norm": 0.3106326162815094, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0261, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 2.854625550660793, | |
| "grad_norm": 0.3587213158607483, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0204, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 2.8634361233480177, | |
| "grad_norm": 0.22833660244941711, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0193, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 2.8722466960352424, | |
| "grad_norm": 0.2590237259864807, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0357, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 2.8810572687224667, | |
| "grad_norm": 0.3526532053947449, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0235, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 2.889867841409692, | |
| "grad_norm": 0.3201982080936432, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0232, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 2.898678414096916, | |
| "grad_norm": 0.2776814103126526, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0279, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 2.907488986784141, | |
| "grad_norm": 0.5462639331817627, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0216, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 2.9162995594713657, | |
| "grad_norm": 0.22424347698688507, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0263, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 2.9251101321585904, | |
| "grad_norm": 0.3213883638381958, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0255, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 2.933920704845815, | |
| "grad_norm": 0.3318139910697937, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0237, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 2.9427312775330394, | |
| "grad_norm": 0.3042970299720764, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0234, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.951541850220264, | |
| "grad_norm": 0.35554492473602295, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0268, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 2.960352422907489, | |
| "grad_norm": 0.3340638279914856, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0253, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.9691629955947136, | |
| "grad_norm": 0.34253039956092834, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0294, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 2.9779735682819384, | |
| "grad_norm": 0.25855281949043274, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0249, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.986784140969163, | |
| "grad_norm": 0.42531442642211914, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0254, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 2.995594713656388, | |
| "grad_norm": 0.4424569606781006, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.032, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.004405286343612, | |
| "grad_norm": 0.4355500638484955, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0258, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.013215859030837, | |
| "grad_norm": 0.4221145212650299, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0275, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.0220264317180616, | |
| "grad_norm": 0.31481191515922546, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0236, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.0308370044052864, | |
| "grad_norm": 0.30229127407073975, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0209, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.039647577092511, | |
| "grad_norm": 0.2897966504096985, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0275, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.048458149779736, | |
| "grad_norm": 0.36123448610305786, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0267, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.0572687224669606, | |
| "grad_norm": 0.39756864309310913, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0236, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.066079295154185, | |
| "grad_norm": 0.1493741273880005, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.021, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.0748898678414096, | |
| "grad_norm": 0.3451590836048126, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0252, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 3.0837004405286343, | |
| "grad_norm": 0.2669346034526825, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.029, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.092511013215859, | |
| "grad_norm": 0.30952003598213196, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0256, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 3.101321585903084, | |
| "grad_norm": 0.3206064701080322, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0216, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 3.1101321585903086, | |
| "grad_norm": 0.23020698130130768, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0241, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 3.118942731277533, | |
| "grad_norm": 0.2750619053840637, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.023, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 3.1277533039647576, | |
| "grad_norm": 0.6629090309143066, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0271, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 3.1365638766519823, | |
| "grad_norm": 0.2981358468532562, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0261, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 3.145374449339207, | |
| "grad_norm": 0.4417458176612854, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0277, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 3.154185022026432, | |
| "grad_norm": 0.5524442791938782, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0245, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 3.1629955947136565, | |
| "grad_norm": 0.32020482420921326, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0292, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 3.171806167400881, | |
| "grad_norm": 0.2847195267677307, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0273, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 3.1806167400881056, | |
| "grad_norm": 0.3142676055431366, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0218, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 3.1894273127753303, | |
| "grad_norm": 0.31294623017311096, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0258, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 3.198237885462555, | |
| "grad_norm": 0.35640108585357666, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0246, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 3.20704845814978, | |
| "grad_norm": 0.36918187141418457, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0245, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 3.2158590308370045, | |
| "grad_norm": 0.3345973789691925, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0251, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 3.2246696035242293, | |
| "grad_norm": 0.31015530228614807, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0231, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 3.2334801762114536, | |
| "grad_norm": 0.33162835240364075, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0226, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 3.2422907488986783, | |
| "grad_norm": 0.377908855676651, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0261, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 3.251101321585903, | |
| "grad_norm": 0.3883955776691437, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0223, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 3.2599118942731278, | |
| "grad_norm": 0.325094610452652, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0219, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 3.2687224669603525, | |
| "grad_norm": 0.2795925438404083, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0251, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 3.2775330396475773, | |
| "grad_norm": 0.26922687888145447, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.028, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 3.286343612334802, | |
| "grad_norm": 0.30862170457839966, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0238, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 3.2951541850220263, | |
| "grad_norm": 0.30990320444107056, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0245, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 3.303964757709251, | |
| "grad_norm": 0.3216609060764313, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0236, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 3.3127753303964758, | |
| "grad_norm": 0.3676994740962982, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0229, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 3.3215859030837005, | |
| "grad_norm": 0.4416605532169342, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0301, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 3.3303964757709252, | |
| "grad_norm": 0.3287545144557953, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0261, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 3.33920704845815, | |
| "grad_norm": 0.30403026938438416, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0231, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 3.3480176211453743, | |
| "grad_norm": 0.3217824101448059, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0256, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.356828193832599, | |
| "grad_norm": 0.30544620752334595, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0227, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 3.3656387665198237, | |
| "grad_norm": 0.35064464807510376, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0277, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 3.3744493392070485, | |
| "grad_norm": 0.3758690357208252, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0256, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 3.383259911894273, | |
| "grad_norm": 0.2884811758995056, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0222, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 3.392070484581498, | |
| "grad_norm": 0.24524566531181335, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0196, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 3.4008810572687223, | |
| "grad_norm": 0.24484437704086304, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0256, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 3.409691629955947, | |
| "grad_norm": 0.3164523243904114, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0233, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 3.4185022026431717, | |
| "grad_norm": 0.2974601984024048, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0244, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 3.4273127753303965, | |
| "grad_norm": 0.20895928144454956, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0186, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 3.436123348017621, | |
| "grad_norm": 0.21821817755699158, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0259, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 3.444933920704846, | |
| "grad_norm": 0.2787341773509979, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0205, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 3.4537444933920707, | |
| "grad_norm": 0.21136854588985443, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0207, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 3.462555066079295, | |
| "grad_norm": 0.28783079981803894, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0223, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 3.4713656387665197, | |
| "grad_norm": 0.3261241018772125, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0257, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 3.4801762114537445, | |
| "grad_norm": 0.35324740409851074, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0264, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 3.488986784140969, | |
| "grad_norm": 0.3515624701976776, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0242, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 3.497797356828194, | |
| "grad_norm": 0.5380238890647888, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0253, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 3.5066079295154187, | |
| "grad_norm": 0.4415524899959564, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0212, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 3.5154185022026434, | |
| "grad_norm": 0.30823996663093567, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0264, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 3.5242290748898677, | |
| "grad_norm": 0.35451507568359375, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0237, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 3.5330396475770924, | |
| "grad_norm": 0.24660764634609222, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.021, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 3.541850220264317, | |
| "grad_norm": 0.27687129378318787, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.024, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 3.550660792951542, | |
| "grad_norm": 0.34586599469184875, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0224, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 3.5594713656387666, | |
| "grad_norm": 0.20837046205997467, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0246, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 3.568281938325991, | |
| "grad_norm": 0.23617582023143768, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0218, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 3.577092511013216, | |
| "grad_norm": 0.30956313014030457, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0246, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 3.5859030837004404, | |
| "grad_norm": 0.25440889596939087, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0236, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 3.594713656387665, | |
| "grad_norm": 0.269356369972229, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0227, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 3.60352422907489, | |
| "grad_norm": 0.3169539272785187, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0216, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 3.6123348017621146, | |
| "grad_norm": 0.19459448754787445, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0239, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 3.6211453744493394, | |
| "grad_norm": 0.3735167980194092, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0235, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 3.6299559471365637, | |
| "grad_norm": 0.28738102316856384, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0217, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 3.6387665198237884, | |
| "grad_norm": 0.32610881328582764, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0212, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 3.647577092511013, | |
| "grad_norm": 0.45551782846450806, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0239, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 3.656387665198238, | |
| "grad_norm": 0.33968082070350647, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0232, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 3.6651982378854626, | |
| "grad_norm": 0.21731100976467133, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.017, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 3.6740088105726874, | |
| "grad_norm": 0.22085045278072357, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0256, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 3.682819383259912, | |
| "grad_norm": 0.33036157488822937, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0216, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 3.6916299559471364, | |
| "grad_norm": 0.34446290135383606, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.029, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 3.700440528634361, | |
| "grad_norm": 0.21548590064048767, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0197, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 3.709251101321586, | |
| "grad_norm": 0.39162519574165344, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0182, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 3.7180616740088106, | |
| "grad_norm": 0.3762594163417816, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0218, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 3.7268722466960353, | |
| "grad_norm": 0.3828361928462982, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0198, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 3.73568281938326, | |
| "grad_norm": 0.306306928396225, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0228, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 3.744493392070485, | |
| "grad_norm": 0.49352073669433594, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0231, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 3.753303964757709, | |
| "grad_norm": 0.35912954807281494, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0255, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 3.762114537444934, | |
| "grad_norm": 0.3816151022911072, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0208, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 3.7709251101321586, | |
| "grad_norm": 0.2685958445072174, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0213, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 3.7797356828193833, | |
| "grad_norm": 0.285758376121521, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0222, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 3.788546255506608, | |
| "grad_norm": 0.4136664569377899, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0265, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 3.7973568281938324, | |
| "grad_norm": 0.2725560963153839, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0217, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 3.8061674008810575, | |
| "grad_norm": 0.3193100392818451, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0211, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 3.814977973568282, | |
| "grad_norm": 0.33473455905914307, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0185, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 3.8237885462555066, | |
| "grad_norm": 0.25524085760116577, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.018, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 3.8325991189427313, | |
| "grad_norm": 0.1881541907787323, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0219, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 3.841409691629956, | |
| "grad_norm": 0.28614911437034607, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0181, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 3.850220264317181, | |
| "grad_norm": 0.27426594495773315, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0185, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 3.859030837004405, | |
| "grad_norm": 0.20244523882865906, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0204, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 3.86784140969163, | |
| "grad_norm": 0.251155287027359, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0217, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 3.8766519823788546, | |
| "grad_norm": 0.2716732919216156, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.026, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 3.8854625550660793, | |
| "grad_norm": 0.18385761976242065, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0233, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 3.894273127753304, | |
| "grad_norm": 0.3853006660938263, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0236, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 3.9030837004405288, | |
| "grad_norm": 0.24452266097068787, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.021, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 3.9118942731277535, | |
| "grad_norm": 0.38369429111480713, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0265, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 3.920704845814978, | |
| "grad_norm": 0.3623514175415039, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0223, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 3.9295154185022025, | |
| "grad_norm": 0.22040243446826935, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0236, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 3.9383259911894273, | |
| "grad_norm": 0.3161327838897705, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.021, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 3.947136563876652, | |
| "grad_norm": 0.24856112897396088, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0222, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 3.9559471365638768, | |
| "grad_norm": 0.25217586755752563, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0197, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 3.964757709251101, | |
| "grad_norm": 0.2778951823711395, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0221, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 3.9735682819383262, | |
| "grad_norm": 0.304324209690094, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0286, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 3.9823788546255505, | |
| "grad_norm": 0.44203925132751465, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0237, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 3.9911894273127753, | |
| "grad_norm": 0.2837058901786804, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0209, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.4247282147407532, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0198, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 4.008810572687224, | |
| "grad_norm": 0.34286704659461975, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0186, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 4.0176211453744495, | |
| "grad_norm": 0.2830366790294647, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0194, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 4.026431718061674, | |
| "grad_norm": 0.29630157351493835, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0237, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 4.035242290748899, | |
| "grad_norm": 0.38271835446357727, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0193, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 4.044052863436123, | |
| "grad_norm": 0.4679741859436035, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0228, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 4.052863436123348, | |
| "grad_norm": 0.2801056206226349, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0189, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 4.061674008810573, | |
| "grad_norm": 0.2729170024394989, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0254, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 4.070484581497797, | |
| "grad_norm": 0.39568910002708435, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0228, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 4.079295154185022, | |
| "grad_norm": 0.2907898426055908, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0216, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 4.0881057268722465, | |
| "grad_norm": 0.17161500453948975, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0161, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 4.096916299559472, | |
| "grad_norm": 0.23355220258235931, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0214, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 4.105726872246696, | |
| "grad_norm": 0.2941673696041107, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0283, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 4.114537444933921, | |
| "grad_norm": 0.39904913306236267, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0239, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 4.1233480176211454, | |
| "grad_norm": 0.1756250262260437, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0183, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 4.13215859030837, | |
| "grad_norm": 0.30655938386917114, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0228, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 4.140969162995595, | |
| "grad_norm": 0.2645743787288666, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0239, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 4.149779735682819, | |
| "grad_norm": 0.23189784586429596, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0189, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 4.158590308370044, | |
| "grad_norm": 0.26266565918922424, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0227, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 4.167400881057269, | |
| "grad_norm": 0.2168239802122116, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0204, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 4.176211453744493, | |
| "grad_norm": 0.2368573248386383, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0178, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 4.185022026431718, | |
| "grad_norm": 0.3335096538066864, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0182, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 4.1938325991189425, | |
| "grad_norm": 0.37966716289520264, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0214, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 4.202643171806168, | |
| "grad_norm": 0.3061222434043884, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0211, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 4.211453744493392, | |
| "grad_norm": 0.20935995876789093, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0224, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 4.220264317180617, | |
| "grad_norm": 0.1767205446958542, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0182, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 4.229074889867841, | |
| "grad_norm": 0.42953187227249146, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0252, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 4.237885462555066, | |
| "grad_norm": 0.30595964193344116, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0196, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 4.246696035242291, | |
| "grad_norm": 0.3149837255477905, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0193, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 4.255506607929515, | |
| "grad_norm": 0.25431329011917114, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.019, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 4.26431718061674, | |
| "grad_norm": 0.19693230092525482, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0304, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 4.273127753303965, | |
| "grad_norm": 0.3942834436893463, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0211, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 4.28193832599119, | |
| "grad_norm": 0.3084004819393158, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0164, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 4.290748898678414, | |
| "grad_norm": 0.30466142296791077, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0235, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 4.299559471365638, | |
| "grad_norm": 0.36026012897491455, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0223, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 4.308370044052864, | |
| "grad_norm": 0.3848453462123871, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0221, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 4.317180616740088, | |
| "grad_norm": 0.3612060844898224, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0221, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 4.325991189427313, | |
| "grad_norm": 0.310299813747406, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0225, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 4.334801762114537, | |
| "grad_norm": 0.23117437958717346, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0157, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 4.343612334801762, | |
| "grad_norm": 0.22018550336360931, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0191, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 4.352422907488987, | |
| "grad_norm": 0.3259681165218353, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0187, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 4.361233480176211, | |
| "grad_norm": 0.25801074504852295, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0223, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 4.370044052863436, | |
| "grad_norm": 0.1351986825466156, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0183, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 4.378854625550661, | |
| "grad_norm": 0.23375943303108215, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0191, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 4.387665198237886, | |
| "grad_norm": 0.2995641827583313, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0282, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 4.39647577092511, | |
| "grad_norm": 0.329458087682724, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0175, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 4.405286343612334, | |
| "grad_norm": 0.42899206280708313, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0205, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 4.41409691629956, | |
| "grad_norm": 0.39040687680244446, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0188, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 4.422907488986784, | |
| "grad_norm": 0.2957626283168793, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0197, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 4.431718061674009, | |
| "grad_norm": 0.18431790173053741, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0222, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 4.440528634361233, | |
| "grad_norm": 0.26256611943244934, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0204, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 4.4493392070484585, | |
| "grad_norm": 0.3033408224582672, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.023, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 4.458149779735683, | |
| "grad_norm": 0.294161856174469, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0183, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 4.466960352422907, | |
| "grad_norm": 0.2869643270969391, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0176, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 4.475770925110132, | |
| "grad_norm": 0.24307315051555634, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0186, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 4.484581497797357, | |
| "grad_norm": 0.30990666151046753, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0193, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 4.493392070484582, | |
| "grad_norm": 0.28828710317611694, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0213, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 4.502202643171806, | |
| "grad_norm": 0.2170025259256363, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0177, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 4.51101321585903, | |
| "grad_norm": 0.24892044067382812, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0222, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 4.5198237885462555, | |
| "grad_norm": 0.29669520258903503, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0181, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 4.52863436123348, | |
| "grad_norm": 0.2533467411994934, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.017, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 4.537444933920705, | |
| "grad_norm": 0.25442326068878174, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0193, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 4.546255506607929, | |
| "grad_norm": 0.26913464069366455, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0199, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 4.5550660792951545, | |
| "grad_norm": 0.31291624903678894, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0154, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 4.563876651982379, | |
| "grad_norm": 0.2266656756401062, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0163, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 4.572687224669604, | |
| "grad_norm": 0.26088747382164, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0215, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 4.581497797356828, | |
| "grad_norm": 0.3020000159740448, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0211, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 4.590308370044053, | |
| "grad_norm": 0.251542329788208, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0178, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 4.599118942731278, | |
| "grad_norm": 0.2034073770046234, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0198, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 4.607929515418502, | |
| "grad_norm": 0.2958037257194519, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0201, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 4.616740088105727, | |
| "grad_norm": 0.1400628685951233, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.015, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 4.6255506607929515, | |
| "grad_norm": 0.16189464926719666, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0157, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 4.634361233480176, | |
| "grad_norm": 0.28269001841545105, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0188, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 4.643171806167401, | |
| "grad_norm": 0.26359331607818604, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0251, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 4.651982378854625, | |
| "grad_norm": 0.25474244356155396, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0189, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 4.6607929515418505, | |
| "grad_norm": 0.35969099402427673, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0223, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 4.669603524229075, | |
| "grad_norm": 0.3071631193161011, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.019, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 4.6784140969163, | |
| "grad_norm": 0.31097346544265747, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.02, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 4.687224669603524, | |
| "grad_norm": 0.3924465477466583, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0186, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 4.6960352422907485, | |
| "grad_norm": 0.23977893590927124, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0164, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 4.704845814977974, | |
| "grad_norm": 0.2998979389667511, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0211, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 4.713656387665198, | |
| "grad_norm": 0.3177388906478882, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0165, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 4.722466960352423, | |
| "grad_norm": 0.16594751179218292, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0195, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 4.7312775330396475, | |
| "grad_norm": 0.3338923156261444, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0226, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 4.740088105726873, | |
| "grad_norm": 0.22867751121520996, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0188, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 4.748898678414097, | |
| "grad_norm": 0.17152239382266998, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0147, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 4.757709251101321, | |
| "grad_norm": 0.18927201628684998, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0175, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 4.766519823788546, | |
| "grad_norm": 0.319146066904068, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0227, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 4.775330396475771, | |
| "grad_norm": 0.22859056293964386, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0159, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 4.784140969162996, | |
| "grad_norm": 0.19756799936294556, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0157, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 4.79295154185022, | |
| "grad_norm": 0.18255679309368134, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0184, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 4.8017621145374445, | |
| "grad_norm": 0.25452712178230286, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0253, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 4.81057268722467, | |
| "grad_norm": 0.19899670779705048, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0169, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 4.819383259911894, | |
| "grad_norm": 0.2351873368024826, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0201, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 4.828193832599119, | |
| "grad_norm": 0.2532845735549927, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0153, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 4.8370044052863435, | |
| "grad_norm": 0.24195687472820282, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0165, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 4.845814977973569, | |
| "grad_norm": 0.28470125794410706, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0146, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 4.854625550660793, | |
| "grad_norm": 0.33941349387168884, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0206, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 4.863436123348018, | |
| "grad_norm": 0.18514783680438995, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0153, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 4.872246696035242, | |
| "grad_norm": 0.2225433588027954, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0148, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 4.881057268722467, | |
| "grad_norm": 0.14531005918979645, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0252, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 4.889867841409692, | |
| "grad_norm": 0.19637036323547363, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0128, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 4.898678414096916, | |
| "grad_norm": 0.21819591522216797, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0144, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 4.907488986784141, | |
| "grad_norm": 0.28812867403030396, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0151, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 4.916299559471366, | |
| "grad_norm": 0.2827639579772949, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0157, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 4.92511013215859, | |
| "grad_norm": 0.22692586481571198, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0191, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 4.933920704845815, | |
| "grad_norm": 0.25510334968566895, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0192, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 4.942731277533039, | |
| "grad_norm": 0.25702518224716187, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0211, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 4.951541850220265, | |
| "grad_norm": 0.1856357306241989, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0176, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 4.960352422907489, | |
| "grad_norm": 0.25965064764022827, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0163, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 4.969162995594713, | |
| "grad_norm": 0.24453793466091156, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0194, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 4.977973568281938, | |
| "grad_norm": 0.3319341540336609, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0177, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 4.986784140969163, | |
| "grad_norm": 0.19699200987815857, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0154, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 4.995594713656388, | |
| "grad_norm": 0.2864338457584381, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0201, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 5.004405286343612, | |
| "grad_norm": 0.24032606184482574, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0182, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 5.013215859030837, | |
| "grad_norm": 0.4184878468513489, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0201, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 5.022026431718062, | |
| "grad_norm": 0.20170339941978455, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0184, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 5.030837004405286, | |
| "grad_norm": 0.19894003868103027, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0177, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 5.039647577092511, | |
| "grad_norm": 0.25481143593788147, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0225, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 5.048458149779735, | |
| "grad_norm": 0.47233426570892334, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0217, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 5.057268722466961, | |
| "grad_norm": 0.23280541598796844, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0172, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 5.066079295154185, | |
| "grad_norm": 0.18920181691646576, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.018, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 5.07488986784141, | |
| "grad_norm": 0.17967194318771362, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0166, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 5.083700440528634, | |
| "grad_norm": 0.19055306911468506, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0182, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 5.092511013215859, | |
| "grad_norm": 0.3184426724910736, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0184, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 5.101321585903084, | |
| "grad_norm": 0.3454791307449341, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0158, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 5.110132158590308, | |
| "grad_norm": 0.33023422956466675, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0177, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 5.118942731277533, | |
| "grad_norm": 0.20698194205760956, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.013, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 5.127753303964758, | |
| "grad_norm": 0.21686479449272156, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0159, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 5.136563876651983, | |
| "grad_norm": 0.19286371767520905, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0182, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 5.145374449339207, | |
| "grad_norm": 0.1658671796321869, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0201, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 5.154185022026431, | |
| "grad_norm": 0.23955821990966797, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0208, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 5.1629955947136565, | |
| "grad_norm": 0.17203763127326965, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.018, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 5.171806167400881, | |
| "grad_norm": 0.1720932275056839, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0174, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 5.180616740088106, | |
| "grad_norm": 0.19525764882564545, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0189, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 5.18942731277533, | |
| "grad_norm": 0.2719067931175232, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0213, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 5.1982378854625555, | |
| "grad_norm": 0.30417177081108093, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.016, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 5.20704845814978, | |
| "grad_norm": 0.2089170664548874, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0178, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 5.215859030837004, | |
| "grad_norm": 0.2092333734035492, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0174, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 5.224669603524229, | |
| "grad_norm": 0.23303531110286713, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0175, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 5.233480176211454, | |
| "grad_norm": 0.19229938089847565, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0156, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 5.242290748898679, | |
| "grad_norm": 0.3346642553806305, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0145, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 5.251101321585903, | |
| "grad_norm": 0.22922012209892273, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0148, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 5.259911894273127, | |
| "grad_norm": 0.23253925144672394, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0145, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 5.2687224669603525, | |
| "grad_norm": 0.20831631124019623, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0158, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 5.277533039647577, | |
| "grad_norm": 0.189396470785141, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0157, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 5.286343612334802, | |
| "grad_norm": 0.2883262038230896, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0181, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 5.295154185022026, | |
| "grad_norm": 0.22241443395614624, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0133, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 5.3039647577092515, | |
| "grad_norm": 0.14543579518795013, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0167, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 5.312775330396476, | |
| "grad_norm": 0.3291434943675995, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0147, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 5.3215859030837, | |
| "grad_norm": 0.20602074265480042, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0159, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 5.330396475770925, | |
| "grad_norm": 0.24406304955482483, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0155, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 5.3392070484581495, | |
| "grad_norm": 0.20986111462116241, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0134, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 5.348017621145375, | |
| "grad_norm": 0.17950712144374847, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0137, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 5.356828193832599, | |
| "grad_norm": 0.24280305206775665, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0191, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 5.365638766519824, | |
| "grad_norm": 0.24258403480052948, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0158, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 5.3744493392070485, | |
| "grad_norm": 0.28767848014831543, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0158, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 5.383259911894273, | |
| "grad_norm": 0.2609294056892395, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0138, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 5.392070484581498, | |
| "grad_norm": 0.14162668585777283, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0151, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 5.400881057268722, | |
| "grad_norm": 0.20880243182182312, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0159, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 5.409691629955947, | |
| "grad_norm": 0.2009952813386917, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0163, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 5.418502202643172, | |
| "grad_norm": 0.20617175102233887, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0134, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 5.427312775330396, | |
| "grad_norm": 0.24645555019378662, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0155, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 5.436123348017621, | |
| "grad_norm": 0.26324668526649475, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0162, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 5.4449339207048455, | |
| "grad_norm": 0.20854219794273376, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0188, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 5.453744493392071, | |
| "grad_norm": 0.23406226933002472, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0164, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 5.462555066079295, | |
| "grad_norm": 0.16278864443302155, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0193, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 5.47136563876652, | |
| "grad_norm": 0.3837991952896118, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0157, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 5.4801762114537445, | |
| "grad_norm": 0.24542906880378723, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0149, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 5.48898678414097, | |
| "grad_norm": 0.2564907371997833, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0202, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 5.497797356828194, | |
| "grad_norm": 0.1733681559562683, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0124, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 5.506607929515418, | |
| "grad_norm": 0.28536540269851685, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0176, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 5.515418502202643, | |
| "grad_norm": 0.2272253930568695, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0189, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 5.524229074889868, | |
| "grad_norm": 0.1128871962428093, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0183, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 5.533039647577093, | |
| "grad_norm": 0.23308046162128448, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0196, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 5.541850220264317, | |
| "grad_norm": 0.23625896871089935, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0201, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 5.5506607929515415, | |
| "grad_norm": 0.3021588921546936, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0145, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 5.559471365638767, | |
| "grad_norm": 0.2362976372241974, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0129, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 5.568281938325991, | |
| "grad_norm": 0.28255513310432434, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0143, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 5.577092511013216, | |
| "grad_norm": 0.22461670637130737, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0164, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 5.58590308370044, | |
| "grad_norm": 0.14973396062850952, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0149, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 5.594713656387665, | |
| "grad_norm": 0.207713782787323, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.013, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 5.60352422907489, | |
| "grad_norm": 0.2111411690711975, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0119, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 5.612334801762114, | |
| "grad_norm": 0.11048179864883423, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0171, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 5.621145374449339, | |
| "grad_norm": 0.2623024582862854, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0168, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 5.629955947136564, | |
| "grad_norm": 0.22975510358810425, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0157, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 5.638766519823789, | |
| "grad_norm": 0.2973436713218689, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0179, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 5.647577092511013, | |
| "grad_norm": 0.20701462030410767, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.014, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 5.656387665198238, | |
| "grad_norm": 0.22361378371715546, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0139, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 5.665198237885463, | |
| "grad_norm": 0.22101789712905884, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0148, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 5.674008810572687, | |
| "grad_norm": 0.19633148610591888, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0143, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 5.682819383259912, | |
| "grad_norm": 0.1673276126384735, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0124, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 5.691629955947136, | |
| "grad_norm": 0.20139001309871674, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0116, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 5.700440528634362, | |
| "grad_norm": 0.2163841277360916, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0123, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 5.709251101321586, | |
| "grad_norm": 0.29263997077941895, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0173, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 5.71806167400881, | |
| "grad_norm": 0.31919407844543457, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0112, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 5.726872246696035, | |
| "grad_norm": 0.23940733075141907, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0156, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 5.73568281938326, | |
| "grad_norm": 0.14040683209896088, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.012, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 5.744493392070485, | |
| "grad_norm": 0.37421512603759766, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0158, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 5.753303964757709, | |
| "grad_norm": 0.17839102447032928, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0183, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 5.762114537444934, | |
| "grad_norm": 0.22899256646633148, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0135, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 5.770925110132159, | |
| "grad_norm": 0.24322623014450073, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0149, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 5.779735682819383, | |
| "grad_norm": 0.1910683661699295, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0169, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 5.788546255506608, | |
| "grad_norm": 0.19330912828445435, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0154, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 5.797356828193832, | |
| "grad_norm": 0.1553020030260086, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0213, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 5.8061674008810575, | |
| "grad_norm": 0.26821714639663696, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0163, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 5.814977973568282, | |
| "grad_norm": 0.2822837829589844, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0129, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 5.823788546255507, | |
| "grad_norm": 0.2428331971168518, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0137, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 5.832599118942731, | |
| "grad_norm": 0.18726138770580292, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0168, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 5.841409691629956, | |
| "grad_norm": 0.3458747863769531, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0172, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 5.850220264317181, | |
| "grad_norm": 0.31919363141059875, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0145, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 5.859030837004405, | |
| "grad_norm": 0.26688581705093384, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0144, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 5.86784140969163, | |
| "grad_norm": 0.3105294108390808, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0144, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 5.8766519823788546, | |
| "grad_norm": 0.17193050682544708, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0126, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 5.885462555066079, | |
| "grad_norm": 0.28694456815719604, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0178, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 5.894273127753304, | |
| "grad_norm": 0.2310262769460678, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0112, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 5.903083700440528, | |
| "grad_norm": 0.2569144070148468, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0164, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 5.9118942731277535, | |
| "grad_norm": 0.23605793714523315, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0119, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 5.920704845814978, | |
| "grad_norm": 0.2592039108276367, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0164, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 5.929515418502203, | |
| "grad_norm": 0.21431533992290497, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.014, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 5.938325991189427, | |
| "grad_norm": 0.23535235226154327, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0174, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 5.9471365638766525, | |
| "grad_norm": 0.16974180936813354, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0117, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 5.955947136563877, | |
| "grad_norm": 0.21835190057754517, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0123, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 5.964757709251101, | |
| "grad_norm": 0.34283384680747986, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0124, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 5.973568281938326, | |
| "grad_norm": 0.22306247055530548, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0118, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 5.9823788546255505, | |
| "grad_norm": 0.3040897250175476, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0165, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 5.991189427312776, | |
| "grad_norm": 0.2781446576118469, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0168, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.6030620336532593, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0177, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 6.008810572687224, | |
| "grad_norm": 0.22731463611125946, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0108, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 6.0176211453744495, | |
| "grad_norm": 0.22170773148536682, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0098, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 6.026431718061674, | |
| "grad_norm": 0.12967190146446228, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.012, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 6.035242290748899, | |
| "grad_norm": 0.27048954367637634, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.014, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 6.044052863436123, | |
| "grad_norm": 0.1873103678226471, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.011, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 6.052863436123348, | |
| "grad_norm": 0.18640875816345215, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.015, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 6.061674008810573, | |
| "grad_norm": 0.19210532307624817, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0182, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 6.070484581497797, | |
| "grad_norm": 0.21812209486961365, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0153, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 6.079295154185022, | |
| "grad_norm": 0.223178893327713, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0146, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 6.0881057268722465, | |
| "grad_norm": 0.2396155595779419, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.015, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 6.096916299559472, | |
| "grad_norm": 0.2556948661804199, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.017, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 6.105726872246696, | |
| "grad_norm": 0.1809254139661789, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0148, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 6.114537444933921, | |
| "grad_norm": 0.27187225222587585, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0107, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 6.1233480176211454, | |
| "grad_norm": 0.21413591504096985, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0131, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 6.13215859030837, | |
| "grad_norm": 0.10926367342472076, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0144, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 6.140969162995595, | |
| "grad_norm": 0.16227468848228455, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0126, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 6.149779735682819, | |
| "grad_norm": 0.21442601084709167, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0157, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 6.158590308370044, | |
| "grad_norm": 0.20361900329589844, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0127, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 6.167400881057269, | |
| "grad_norm": 0.1771620810031891, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0137, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 6.176211453744493, | |
| "grad_norm": 0.19616013765335083, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0125, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 6.185022026431718, | |
| "grad_norm": 0.13917307555675507, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0102, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 6.1938325991189425, | |
| "grad_norm": 0.261055588722229, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0111, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 6.202643171806168, | |
| "grad_norm": 0.12454350292682648, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0112, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 6.211453744493392, | |
| "grad_norm": 0.21024969220161438, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0179, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 6.220264317180617, | |
| "grad_norm": 0.15939319133758545, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0168, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 6.229074889867841, | |
| "grad_norm": 0.3114739656448364, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0135, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 6.237885462555066, | |
| "grad_norm": 0.19519902765750885, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0106, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 6.246696035242291, | |
| "grad_norm": 0.22251355648040771, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0114, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 6.255506607929515, | |
| "grad_norm": 0.24429509043693542, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0104, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 6.26431718061674, | |
| "grad_norm": 0.1587536334991455, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0136, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 6.273127753303965, | |
| "grad_norm": 0.16749411821365356, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0104, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 6.28193832599119, | |
| "grad_norm": 0.13962094485759735, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0132, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 6.290748898678414, | |
| "grad_norm": 0.19776326417922974, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0143, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 6.299559471365638, | |
| "grad_norm": 0.13095836341381073, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0153, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 6.308370044052864, | |
| "grad_norm": 0.31868863105773926, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0133, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 6.317180616740088, | |
| "grad_norm": 0.1965586245059967, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0128, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 6.325991189427313, | |
| "grad_norm": 0.1406683474779129, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0117, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 6.334801762114537, | |
| "grad_norm": 0.19301468133926392, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.015, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 6.343612334801762, | |
| "grad_norm": 0.2032468020915985, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0134, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 6.352422907488987, | |
| "grad_norm": 0.16054677963256836, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0125, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 6.361233480176211, | |
| "grad_norm": 0.12105733901262283, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0117, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 6.370044052863436, | |
| "grad_norm": 0.20047269761562347, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0135, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 6.378854625550661, | |
| "grad_norm": 0.15960243344306946, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0091, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 6.387665198237886, | |
| "grad_norm": 0.20354244112968445, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0127, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 6.39647577092511, | |
| "grad_norm": 0.24114976823329926, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0126, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 6.405286343612334, | |
| "grad_norm": 0.169529527425766, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0124, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 6.41409691629956, | |
| "grad_norm": 0.1623968482017517, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0108, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 6.422907488986784, | |
| "grad_norm": 0.2275482565164566, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0095, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 6.431718061674009, | |
| "grad_norm": 0.28174909949302673, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0164, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 6.440528634361233, | |
| "grad_norm": 0.15501025319099426, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0129, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 6.4493392070484585, | |
| "grad_norm": 0.20075616240501404, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0139, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 6.458149779735683, | |
| "grad_norm": 0.24217092990875244, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.012, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 6.466960352422907, | |
| "grad_norm": 0.18960405886173248, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0153, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 6.475770925110132, | |
| "grad_norm": 0.24632038176059723, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0128, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 6.484581497797357, | |
| "grad_norm": 0.13031667470932007, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0098, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 6.493392070484582, | |
| "grad_norm": 0.1333320289850235, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0112, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 6.502202643171806, | |
| "grad_norm": 0.18063907325267792, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0151, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 6.51101321585903, | |
| "grad_norm": 0.19713599979877472, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0152, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 6.5198237885462555, | |
| "grad_norm": 0.2273755520582199, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0122, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 6.52863436123348, | |
| "grad_norm": 0.19793947041034698, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0095, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 6.537444933920705, | |
| "grad_norm": 0.21115423738956451, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0133, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 6.546255506607929, | |
| "grad_norm": 0.21872656047344208, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0106, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 6.5550660792951545, | |
| "grad_norm": 0.2105487436056137, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0156, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 6.563876651982379, | |
| "grad_norm": 0.17240862548351288, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0119, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 6.572687224669604, | |
| "grad_norm": 0.10863512754440308, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0085, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 6.581497797356828, | |
| "grad_norm": 0.1834077537059784, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0168, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 6.590308370044053, | |
| "grad_norm": 0.23648475110530853, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0137, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 6.599118942731278, | |
| "grad_norm": 0.18672199547290802, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0144, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 6.607929515418502, | |
| "grad_norm": 0.26310789585113525, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0136, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 6.616740088105727, | |
| "grad_norm": 0.15993621945381165, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0108, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 6.6255506607929515, | |
| "grad_norm": 0.20511959493160248, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0136, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 6.634361233480176, | |
| "grad_norm": 0.2817050516605377, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0131, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 6.643171806167401, | |
| "grad_norm": 0.22976163029670715, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0133, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 6.651982378854625, | |
| "grad_norm": 0.1283281445503235, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0108, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 6.6607929515418505, | |
| "grad_norm": 0.12401619553565979, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0109, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 6.669603524229075, | |
| "grad_norm": 0.12286385893821716, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0107, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 6.6784140969163, | |
| "grad_norm": 0.1818159818649292, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0134, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 6.687224669603524, | |
| "grad_norm": 0.2135045826435089, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.016, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 6.6960352422907485, | |
| "grad_norm": 0.13272634148597717, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0127, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 6.704845814977974, | |
| "grad_norm": 0.17318959534168243, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0095, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 6.713656387665198, | |
| "grad_norm": 0.15461263060569763, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0111, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 6.722466960352423, | |
| "grad_norm": 0.2410537153482437, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0116, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 6.7312775330396475, | |
| "grad_norm": 0.2382768988609314, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0117, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 6.740088105726873, | |
| "grad_norm": 0.18189078569412231, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0107, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 6.748898678414097, | |
| "grad_norm": 0.19339823722839355, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0117, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 6.757709251101321, | |
| "grad_norm": 0.14754928648471832, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.012, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 6.766519823788546, | |
| "grad_norm": 0.18535995483398438, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0107, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 6.775330396475771, | |
| "grad_norm": 0.20881755650043488, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0116, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 6.784140969162996, | |
| "grad_norm": 0.2395937144756317, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0106, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 6.79295154185022, | |
| "grad_norm": 0.2480756640434265, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0135, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 6.8017621145374445, | |
| "grad_norm": 0.1753438413143158, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0116, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 6.81057268722467, | |
| "grad_norm": 0.15033631026744843, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0108, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 6.819383259911894, | |
| "grad_norm": 0.13928894698619843, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.012, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 6.828193832599119, | |
| "grad_norm": 0.14780718088150024, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0106, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 6.8370044052863435, | |
| "grad_norm": 0.13364066183567047, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.011, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 6.845814977973569, | |
| "grad_norm": 0.13415399193763733, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0092, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 6.854625550660793, | |
| "grad_norm": 0.12481086701154709, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0112, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 6.863436123348018, | |
| "grad_norm": 0.11756137758493423, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0105, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 6.872246696035242, | |
| "grad_norm": 0.22908249497413635, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0123, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 6.881057268722467, | |
| "grad_norm": 0.13879618048667908, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0086, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 6.889867841409692, | |
| "grad_norm": 0.34728604555130005, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.016, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 6.898678414096916, | |
| "grad_norm": 0.2109571099281311, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0144, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 6.907488986784141, | |
| "grad_norm": 0.22600877285003662, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0121, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 6.916299559471366, | |
| "grad_norm": 0.1305195391178131, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0119, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 6.92511013215859, | |
| "grad_norm": 0.15569403767585754, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0092, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 6.933920704845815, | |
| "grad_norm": 0.2546798586845398, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0115, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 6.942731277533039, | |
| "grad_norm": 0.1412612646818161, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0091, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 6.951541850220265, | |
| "grad_norm": 0.07494200021028519, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0114, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 6.960352422907489, | |
| "grad_norm": 0.15473130345344543, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0113, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 6.969162995594713, | |
| "grad_norm": 0.21891425549983978, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0126, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 6.977973568281938, | |
| "grad_norm": 0.10276913642883301, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0122, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 6.986784140969163, | |
| "grad_norm": 0.1931139975786209, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0096, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 6.995594713656388, | |
| "grad_norm": 0.1674557626247406, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0142, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 7.004405286343612, | |
| "grad_norm": 0.27852481603622437, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0134, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 7.013215859030837, | |
| "grad_norm": 0.15665708482265472, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0177, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 7.022026431718062, | |
| "grad_norm": 0.1355111002922058, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.013, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 7.030837004405286, | |
| "grad_norm": 0.28232458233833313, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0139, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 7.039647577092511, | |
| "grad_norm": 0.1924564242362976, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0124, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 7.048458149779735, | |
| "grad_norm": 0.1247735396027565, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0139, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 7.057268722466961, | |
| "grad_norm": 0.22031541168689728, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0126, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 7.066079295154185, | |
| "grad_norm": 0.15807406604290009, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.012, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 7.07488986784141, | |
| "grad_norm": 0.1326325386762619, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0127, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 7.083700440528634, | |
| "grad_norm": 0.15993936359882355, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0102, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 7.092511013215859, | |
| "grad_norm": 0.21270495653152466, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0085, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 7.101321585903084, | |
| "grad_norm": 0.10668961703777313, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0079, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 7.110132158590308, | |
| "grad_norm": 0.19661258161067963, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0135, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 7.118942731277533, | |
| "grad_norm": 0.17048905789852142, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0093, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 7.127753303964758, | |
| "grad_norm": 0.2004374861717224, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0094, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 7.136563876651983, | |
| "grad_norm": 0.23691324889659882, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0092, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 7.145374449339207, | |
| "grad_norm": 0.21149414777755737, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0096, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 7.154185022026431, | |
| "grad_norm": 0.18663689494132996, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0099, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 7.1629955947136565, | |
| "grad_norm": 0.18826255202293396, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0101, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 7.171806167400881, | |
| "grad_norm": 0.16116543114185333, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.008, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 7.180616740088106, | |
| "grad_norm": 0.09787632524967194, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0133, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 7.18942731277533, | |
| "grad_norm": 0.13738888502120972, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0112, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 7.1982378854625555, | |
| "grad_norm": 0.14665068686008453, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0097, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 7.20704845814978, | |
| "grad_norm": 0.20202532410621643, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0103, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 7.215859030837004, | |
| "grad_norm": 0.1625099778175354, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0105, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 7.224669603524229, | |
| "grad_norm": 0.25180181860923767, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0119, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 7.233480176211454, | |
| "grad_norm": 0.20228782296180725, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0086, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 7.242290748898679, | |
| "grad_norm": 0.19143271446228027, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0131, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 7.251101321585903, | |
| "grad_norm": 0.14162953197956085, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0101, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 7.259911894273127, | |
| "grad_norm": 0.21944768726825714, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0101, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 7.2687224669603525, | |
| "grad_norm": 0.14631567895412445, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.009, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 7.277533039647577, | |
| "grad_norm": 0.1052655577659607, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0095, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 7.286343612334802, | |
| "grad_norm": 0.15168747305870056, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0118, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 7.295154185022026, | |
| "grad_norm": 0.24220536649227142, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0135, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 7.3039647577092515, | |
| "grad_norm": 0.2568817436695099, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0121, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 7.312775330396476, | |
| "grad_norm": 0.09819544106721878, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0087, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 7.3215859030837, | |
| "grad_norm": 0.17261157929897308, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0099, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 7.330396475770925, | |
| "grad_norm": 0.16128742694854736, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0138, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 7.3392070484581495, | |
| "grad_norm": 0.18056555092334747, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0096, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 7.348017621145375, | |
| "grad_norm": 0.1281258761882782, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0104, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 7.356828193832599, | |
| "grad_norm": 0.14238831400871277, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0114, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 7.365638766519824, | |
| "grad_norm": 0.1334840953350067, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0106, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 7.3744493392070485, | |
| "grad_norm": 0.136586531996727, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0092, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 7.383259911894273, | |
| "grad_norm": 0.22804556787014008, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0106, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 7.392070484581498, | |
| "grad_norm": 0.16213518381118774, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.011, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 7.400881057268722, | |
| "grad_norm": 0.10505513846874237, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.011, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 7.409691629955947, | |
| "grad_norm": 0.1767432689666748, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0068, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 7.418502202643172, | |
| "grad_norm": 0.16626302897930145, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0099, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 7.427312775330396, | |
| "grad_norm": 0.16383399069309235, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0082, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 7.436123348017621, | |
| "grad_norm": 0.10148213803768158, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0087, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 7.4449339207048455, | |
| "grad_norm": 0.14975415170192719, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0094, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 7.453744493392071, | |
| "grad_norm": 0.1623256951570511, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.008, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 7.462555066079295, | |
| "grad_norm": 0.200961634516716, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0095, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 7.47136563876652, | |
| "grad_norm": 0.262118399143219, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0088, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 7.4801762114537445, | |
| "grad_norm": 0.2054542601108551, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0141, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 7.48898678414097, | |
| "grad_norm": 0.19710037112236023, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0099, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 7.497797356828194, | |
| "grad_norm": 0.1730693280696869, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0106, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 7.506607929515418, | |
| "grad_norm": 0.13827989995479584, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0107, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 7.515418502202643, | |
| "grad_norm": 0.13238520920276642, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0107, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 7.524229074889868, | |
| "grad_norm": 0.10015920549631119, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0081, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 7.533039647577093, | |
| "grad_norm": 0.152719184756279, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0101, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 7.541850220264317, | |
| "grad_norm": 0.1387982964515686, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0094, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 7.5506607929515415, | |
| "grad_norm": 0.2311563789844513, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0102, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 7.559471365638767, | |
| "grad_norm": 0.12781119346618652, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0106, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 7.568281938325991, | |
| "grad_norm": 0.06175090745091438, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0084, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 7.577092511013216, | |
| "grad_norm": 0.1708836853504181, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.011, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 7.58590308370044, | |
| "grad_norm": 0.2520051896572113, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0127, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 7.594713656387665, | |
| "grad_norm": 0.12650425732135773, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0111, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 7.60352422907489, | |
| "grad_norm": 0.24746622145175934, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0104, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 7.612334801762114, | |
| "grad_norm": 0.1990758627653122, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.009, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 7.621145374449339, | |
| "grad_norm": 0.36077943444252014, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0084, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 7.629955947136564, | |
| "grad_norm": 0.18182502686977386, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0099, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 7.638766519823789, | |
| "grad_norm": 0.13928177952766418, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0072, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 7.647577092511013, | |
| "grad_norm": 0.19271506369113922, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0101, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 7.656387665198238, | |
| "grad_norm": 0.14538010954856873, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0118, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 7.665198237885463, | |
| "grad_norm": 0.24548114836215973, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0132, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 7.674008810572687, | |
| "grad_norm": 0.18391475081443787, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0074, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 7.682819383259912, | |
| "grad_norm": 0.14087234437465668, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0082, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 7.691629955947136, | |
| "grad_norm": 0.11092592030763626, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0099, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 7.700440528634362, | |
| "grad_norm": 0.17367199063301086, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0097, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 7.709251101321586, | |
| "grad_norm": 0.15207166969776154, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0114, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 7.71806167400881, | |
| "grad_norm": 0.13426803052425385, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.009, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 7.726872246696035, | |
| "grad_norm": 0.0951467826962471, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0096, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 7.73568281938326, | |
| "grad_norm": 0.1682031899690628, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0106, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 7.744493392070485, | |
| "grad_norm": 0.20791693031787872, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0126, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 7.753303964757709, | |
| "grad_norm": 0.13085979223251343, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0098, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 7.762114537444934, | |
| "grad_norm": 0.20498213171958923, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0117, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 7.770925110132159, | |
| "grad_norm": 0.18791809678077698, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0082, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 7.779735682819383, | |
| "grad_norm": 0.10853845626115799, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0117, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 7.788546255506608, | |
| "grad_norm": 0.10245463997125626, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0105, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 7.797356828193832, | |
| "grad_norm": 0.15440000593662262, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0076, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 7.8061674008810575, | |
| "grad_norm": 0.1331048458814621, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0124, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 7.814977973568282, | |
| "grad_norm": 0.16272245347499847, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0099, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 7.823788546255507, | |
| "grad_norm": 0.13609732687473297, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0065, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 7.832599118942731, | |
| "grad_norm": 0.13780727982521057, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0111, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 7.841409691629956, | |
| "grad_norm": 0.22798976302146912, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0085, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 7.850220264317181, | |
| "grad_norm": 0.1594877392053604, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0125, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 7.859030837004405, | |
| "grad_norm": 0.08663804084062576, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0082, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 7.86784140969163, | |
| "grad_norm": 0.15692470967769623, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0093, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 7.8766519823788546, | |
| "grad_norm": 0.1499648094177246, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0082, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 7.885462555066079, | |
| "grad_norm": 0.1468629539012909, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0097, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 7.894273127753304, | |
| "grad_norm": 0.14219050109386444, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0104, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 7.903083700440528, | |
| "grad_norm": 0.09568879008293152, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0105, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 7.9118942731277535, | |
| "grad_norm": 0.10689511895179749, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0079, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 7.920704845814978, | |
| "grad_norm": 0.20396101474761963, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.01, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 7.929515418502203, | |
| "grad_norm": 0.14855751395225525, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0112, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 7.938325991189427, | |
| "grad_norm": 0.10204718261957169, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0107, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 7.9471365638766525, | |
| "grad_norm": 0.13087639212608337, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.008, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 7.955947136563877, | |
| "grad_norm": 0.1549372673034668, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0084, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 7.964757709251101, | |
| "grad_norm": 0.15368710458278656, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0082, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 7.973568281938326, | |
| "grad_norm": 0.08389320224523544, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0087, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 7.9823788546255505, | |
| "grad_norm": 0.16164475679397583, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0111, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 7.991189427312776, | |
| "grad_norm": 0.2523195743560791, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0079, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.2281274050474167, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0106, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 8.008810572687224, | |
| "grad_norm": 0.13164950907230377, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0069, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 8.017621145374449, | |
| "grad_norm": 0.2057257741689682, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0076, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 8.026431718061675, | |
| "grad_norm": 0.15970078110694885, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0102, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 8.035242290748899, | |
| "grad_norm": 0.1781604290008545, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0135, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 8.044052863436123, | |
| "grad_norm": 0.09258292615413666, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0073, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 8.052863436123348, | |
| "grad_norm": 0.14273759722709656, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0061, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 8.061674008810572, | |
| "grad_norm": 0.15033787488937378, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0081, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 8.070484581497798, | |
| "grad_norm": 0.17615845799446106, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0129, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 8.079295154185022, | |
| "grad_norm": 0.1668272316455841, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0101, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 8.088105726872246, | |
| "grad_norm": 0.1184440553188324, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0123, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 8.09691629955947, | |
| "grad_norm": 0.2241150140762329, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0096, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 8.105726872246697, | |
| "grad_norm": 0.24009497463703156, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0081, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 8.114537444933921, | |
| "grad_norm": 0.1850406676530838, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0105, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 8.123348017621145, | |
| "grad_norm": 0.23549209535121918, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0113, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 8.13215859030837, | |
| "grad_norm": 0.1622724086046219, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0091, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 8.140969162995594, | |
| "grad_norm": 0.1053854301571846, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0099, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 8.14977973568282, | |
| "grad_norm": 0.18701764941215515, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.01, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 8.158590308370044, | |
| "grad_norm": 0.12285590916872025, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0104, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 8.167400881057269, | |
| "grad_norm": 0.13647183775901794, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0092, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 8.176211453744493, | |
| "grad_norm": 0.1655772477388382, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0078, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 8.185022026431717, | |
| "grad_norm": 0.15348577499389648, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0095, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 8.193832599118943, | |
| "grad_norm": 0.16637077927589417, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.006, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 8.202643171806168, | |
| "grad_norm": 0.16254575550556183, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0122, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 8.211453744493392, | |
| "grad_norm": 0.20016156136989594, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0104, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 8.220264317180616, | |
| "grad_norm": 0.17355690896511078, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0098, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 8.229074889867842, | |
| "grad_norm": 0.2298620343208313, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0101, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 8.237885462555067, | |
| "grad_norm": 0.12235020846128464, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0094, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 8.246696035242291, | |
| "grad_norm": 0.10228944569826126, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0072, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 8.255506607929515, | |
| "grad_norm": 0.1408655047416687, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0125, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 8.26431718061674, | |
| "grad_norm": 0.22849532961845398, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0107, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 8.273127753303966, | |
| "grad_norm": 0.19355928897857666, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0094, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 8.28193832599119, | |
| "grad_norm": 0.1682409793138504, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0101, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 8.290748898678414, | |
| "grad_norm": 0.2707473933696747, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0077, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 8.299559471365638, | |
| "grad_norm": 0.26445385813713074, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0103, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 8.308370044052863, | |
| "grad_norm": 0.06407660990953445, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0098, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 8.317180616740089, | |
| "grad_norm": 0.2113770693540573, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0101, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 8.325991189427313, | |
| "grad_norm": 0.10182628035545349, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0136, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 8.334801762114537, | |
| "grad_norm": 0.18843504786491394, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0074, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 8.343612334801762, | |
| "grad_norm": 0.08799692243337631, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0093, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 8.352422907488986, | |
| "grad_norm": 0.21391931176185608, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0086, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 8.361233480176212, | |
| "grad_norm": 0.18434137105941772, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0094, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 8.370044052863436, | |
| "grad_norm": 0.15331020951271057, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0114, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 8.37885462555066, | |
| "grad_norm": 0.10256490856409073, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0127, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 8.387665198237885, | |
| "grad_norm": 0.1614340990781784, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0084, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 8.396475770925111, | |
| "grad_norm": 0.13606303930282593, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0097, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 8.405286343612335, | |
| "grad_norm": 0.1629956215620041, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0116, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 8.41409691629956, | |
| "grad_norm": 0.14277587831020355, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0114, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 8.422907488986784, | |
| "grad_norm": 0.15749751031398773, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0098, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 8.431718061674008, | |
| "grad_norm": 0.09063310921192169, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0095, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 8.440528634361234, | |
| "grad_norm": 0.14467498660087585, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0096, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 8.449339207048459, | |
| "grad_norm": 0.22473329305648804, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0117, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 8.458149779735683, | |
| "grad_norm": 0.449494868516922, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.009, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 8.466960352422907, | |
| "grad_norm": 0.14929036796092987, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0084, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 8.475770925110131, | |
| "grad_norm": 0.23419791460037231, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.012, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 8.484581497797357, | |
| "grad_norm": 0.23469005525112152, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0126, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 8.493392070484582, | |
| "grad_norm": 0.1580594927072525, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0095, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 8.502202643171806, | |
| "grad_norm": 0.1807514727115631, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0096, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 8.51101321585903, | |
| "grad_norm": 0.062112219631671906, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0087, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 8.519823788546255, | |
| "grad_norm": 0.19509808719158173, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.009, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 8.52863436123348, | |
| "grad_norm": 0.14320671558380127, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0086, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 8.537444933920705, | |
| "grad_norm": 0.15678615868091583, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0117, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 8.54625550660793, | |
| "grad_norm": 0.13673076033592224, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0083, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 8.555066079295154, | |
| "grad_norm": 0.13520823419094086, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0083, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 8.56387665198238, | |
| "grad_norm": 0.12094086408615112, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0102, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 8.572687224669604, | |
| "grad_norm": 0.11623599380254745, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0083, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 8.581497797356828, | |
| "grad_norm": 0.15491396188735962, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0091, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 8.590308370044053, | |
| "grad_norm": 0.1993149220943451, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0085, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 8.599118942731277, | |
| "grad_norm": 0.15207341313362122, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0099, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 8.607929515418503, | |
| "grad_norm": 0.28227663040161133, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0089, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 8.616740088105727, | |
| "grad_norm": 0.22268176078796387, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0089, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 8.625550660792952, | |
| "grad_norm": 0.13314330577850342, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0069, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 8.634361233480176, | |
| "grad_norm": 0.12658432126045227, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.009, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 8.6431718061674, | |
| "grad_norm": 0.06319954991340637, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0071, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 8.651982378854626, | |
| "grad_norm": 0.14937259256839752, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0092, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 8.66079295154185, | |
| "grad_norm": 0.15603239834308624, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0138, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 8.669603524229075, | |
| "grad_norm": 0.16545039415359497, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0081, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 8.678414096916299, | |
| "grad_norm": 0.1221143826842308, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.011, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 8.687224669603523, | |
| "grad_norm": 0.11476120352745056, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.015, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 8.69603524229075, | |
| "grad_norm": 0.13041569292545319, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0082, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 8.704845814977974, | |
| "grad_norm": 0.11081754416227341, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0113, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 8.713656387665198, | |
| "grad_norm": 0.11409346014261246, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0078, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 8.722466960352422, | |
| "grad_norm": 0.15909695625305176, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0087, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 8.731277533039648, | |
| "grad_norm": 0.1465723216533661, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0085, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 8.740088105726873, | |
| "grad_norm": 0.12249363958835602, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0151, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 8.748898678414097, | |
| "grad_norm": 0.08279208838939667, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0059, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 8.757709251101321, | |
| "grad_norm": 0.18889650702476501, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0098, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 8.766519823788546, | |
| "grad_norm": 0.24473944306373596, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.01, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 8.775330396475772, | |
| "grad_norm": 0.12404630333185196, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0105, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 8.784140969162996, | |
| "grad_norm": 0.14729201793670654, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0087, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 8.79295154185022, | |
| "grad_norm": 0.15030668675899506, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0084, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 8.801762114537445, | |
| "grad_norm": 0.2006319761276245, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0099, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 8.810572687224669, | |
| "grad_norm": 0.12898662686347961, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0112, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 8.810572687224669, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.02517222110107541, | |
| "train_runtime": 8217.0277, | |
| "train_samples_per_second": 38.944, | |
| "train_steps_per_second": 1.217 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |