| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 31.746031746031747, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.031746031746031744, | |
| "grad_norm": 13.21976089477539, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.1689, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06349206349206349, | |
| "grad_norm": 4.483216762542725, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.9066, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09523809523809523, | |
| "grad_norm": 3.6887826919555664, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.4741, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12698412698412698, | |
| "grad_norm": 2.358823776245117, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.312, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.15873015873015872, | |
| "grad_norm": 1.1946849822998047, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2615, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.19047619047619047, | |
| "grad_norm": 1.6524196863174438, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.2123, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 1.3347996473312378, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1613, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.25396825396825395, | |
| "grad_norm": 1.4735726118087769, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1495, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 1.4054492712020874, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1327, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.31746031746031744, | |
| "grad_norm": 1.3939247131347656, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1208, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3492063492063492, | |
| "grad_norm": 1.2570619583129883, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1205, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 1.1562747955322266, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1093, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4126984126984127, | |
| "grad_norm": 1.0317634344100952, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1083, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 0.920091450214386, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0947, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 0.9344382882118225, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0853, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5079365079365079, | |
| "grad_norm": 0.9008650779724121, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.1021, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5396825396825397, | |
| "grad_norm": 1.5005056858062744, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0999, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 1.0877991914749146, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0996, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6031746031746031, | |
| "grad_norm": 1.0370714664459229, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0818, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6349206349206349, | |
| "grad_norm": 0.9369335174560547, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0912, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.6954028606414795, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.082, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6984126984126984, | |
| "grad_norm": 0.5063722729682922, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0877, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7301587301587301, | |
| "grad_norm": 1.3703116178512573, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0891, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 1.5147759914398193, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0824, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7936507936507936, | |
| "grad_norm": 0.6317451596260071, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0734, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8253968253968254, | |
| "grad_norm": 0.8945028781890869, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0775, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.9164121150970459, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0829, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 1.0421078205108643, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0777, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9206349206349206, | |
| "grad_norm": 1.0534974336624146, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0836, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 0.8002151250839233, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0683, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9841269841269841, | |
| "grad_norm": 1.0484886169433594, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0804, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.0158730158730158, | |
| "grad_norm": 1.2353489398956299, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0752, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0476190476190477, | |
| "grad_norm": 0.5965688824653625, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0731, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0793650793650793, | |
| "grad_norm": 0.8803579211235046, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0788, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 1.1218805313110352, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0777, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.1428571428571428, | |
| "grad_norm": 0.8718439340591431, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0804, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.1746031746031746, | |
| "grad_norm": 0.7790467143058777, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0781, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.2063492063492063, | |
| "grad_norm": 1.054553508758545, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0718, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2380952380952381, | |
| "grad_norm": 0.9072209000587463, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0751, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.2698412698412698, | |
| "grad_norm": 0.6406309008598328, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0676, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.3015873015873016, | |
| "grad_norm": 0.8156818747520447, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0723, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.359697163105011, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.07, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.3650793650793651, | |
| "grad_norm": 0.9286361336708069, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0679, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.3968253968253967, | |
| "grad_norm": 0.6339474320411682, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0704, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 1.0383930206298828, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0734, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.4603174603174602, | |
| "grad_norm": 0.7700372934341431, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0709, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.492063492063492, | |
| "grad_norm": 0.7738011479377747, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.08, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5238095238095237, | |
| "grad_norm": 0.9753276109695435, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0777, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 0.8171911239624023, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0754, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.5873015873015874, | |
| "grad_norm": 0.7396946549415588, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0669, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.619047619047619, | |
| "grad_norm": 0.7050591707229614, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0752, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.6507936507936507, | |
| "grad_norm": 0.45515891909599304, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0654, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.6825396825396826, | |
| "grad_norm": 0.9701759815216064, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0667, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 1.3382041454315186, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0724, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.746031746031746, | |
| "grad_norm": 0.6421812176704407, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0633, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.6846944093704224, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0635, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.8095238095238095, | |
| "grad_norm": 0.5819793939590454, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.06, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.8412698412698414, | |
| "grad_norm": 1.1766382455825806, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0619, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.873015873015873, | |
| "grad_norm": 0.7545307874679565, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0675, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.9047619047619047, | |
| "grad_norm": 0.5790759325027466, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.065, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.9365079365079365, | |
| "grad_norm": 0.6222729086875916, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0723, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.9682539682539684, | |
| "grad_norm": 0.5961977243423462, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0576, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8678403496742249, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0502, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.0317460317460316, | |
| "grad_norm": 0.6229413151741028, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.063, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.0634920634920633, | |
| "grad_norm": 0.6003958582878113, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0619, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.0952380952380953, | |
| "grad_norm": 0.5294569134712219, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.058, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.126984126984127, | |
| "grad_norm": 0.7086028456687927, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0608, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.1587301587301586, | |
| "grad_norm": 0.5682598948478699, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0594, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.1904761904761907, | |
| "grad_norm": 0.6251933574676514, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.051, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.2222222222222223, | |
| "grad_norm": 0.6986904740333557, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0561, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.253968253968254, | |
| "grad_norm": 0.693415641784668, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0655, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.2857142857142856, | |
| "grad_norm": 0.8418262600898743, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0534, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.317460317460317, | |
| "grad_norm": 0.5003765225410461, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0579, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.3492063492063493, | |
| "grad_norm": 1.0073083639144897, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0552, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.380952380952381, | |
| "grad_norm": 0.5178171396255493, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0555, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.4126984126984126, | |
| "grad_norm": 0.6918147802352905, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0524, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.4444444444444446, | |
| "grad_norm": 0.6241569519042969, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0468, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.4761904761904763, | |
| "grad_norm": 0.6911062002182007, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0554, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.507936507936508, | |
| "grad_norm": 0.6609308123588562, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0532, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.5396825396825395, | |
| "grad_norm": 0.694909930229187, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.055, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.571428571428571, | |
| "grad_norm": 0.49486884474754333, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0615, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.6031746031746033, | |
| "grad_norm": 0.515155553817749, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0608, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.634920634920635, | |
| "grad_norm": 0.3861793279647827, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.057, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.42718321084976196, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0481, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.6984126984126986, | |
| "grad_norm": 0.7105534672737122, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0615, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.7301587301587302, | |
| "grad_norm": 0.4670024812221527, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0529, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.761904761904762, | |
| "grad_norm": 0.6207767128944397, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0412, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.7936507936507935, | |
| "grad_norm": 0.37876588106155396, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0462, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.825396825396825, | |
| "grad_norm": 0.6201185584068298, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0468, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.5746213793754578, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0533, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.888888888888889, | |
| "grad_norm": 0.5248697400093079, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0472, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.9206349206349205, | |
| "grad_norm": 0.6810121536254883, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0477, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.9523809523809526, | |
| "grad_norm": 0.45645028352737427, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0467, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.984126984126984, | |
| "grad_norm": 0.4681288003921509, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0444, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.015873015873016, | |
| "grad_norm": 0.48268362879753113, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0475, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.0476190476190474, | |
| "grad_norm": 0.7468770742416382, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0411, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.0793650793650795, | |
| "grad_norm": 0.44316691160202026, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0408, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.111111111111111, | |
| "grad_norm": 0.7548139691352844, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0489, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.142857142857143, | |
| "grad_norm": 0.5162081122398376, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0483, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.1746031746031744, | |
| "grad_norm": 0.5505052804946899, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0441, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.2063492063492065, | |
| "grad_norm": 0.5164217948913574, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0404, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.238095238095238, | |
| "grad_norm": 0.6595455408096313, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0451, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.2698412698412698, | |
| "grad_norm": 0.5057474970817566, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.042, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.3015873015873014, | |
| "grad_norm": 0.7443891763687134, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0484, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.3712266981601715, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0417, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.365079365079365, | |
| "grad_norm": 0.48856109380722046, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0471, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.3968253968253967, | |
| "grad_norm": 0.40823137760162354, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0516, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.4285714285714284, | |
| "grad_norm": 0.4344771206378937, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0495, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.4603174603174605, | |
| "grad_norm": 0.4978770911693573, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0399, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.492063492063492, | |
| "grad_norm": 0.4213084578514099, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0451, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.5238095238095237, | |
| "grad_norm": 0.3629322946071625, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0471, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.5555555555555554, | |
| "grad_norm": 0.3836223781108856, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0403, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 3.5873015873015874, | |
| "grad_norm": 0.5665341019630432, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0484, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.619047619047619, | |
| "grad_norm": 0.44514623284339905, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0472, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 3.6507936507936507, | |
| "grad_norm": 0.4966477155685425, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0448, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 3.682539682539683, | |
| "grad_norm": 0.422583669424057, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0428, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 3.7142857142857144, | |
| "grad_norm": 0.38687634468078613, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0472, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 3.746031746031746, | |
| "grad_norm": 0.5893686413764954, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0472, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 3.7777777777777777, | |
| "grad_norm": 0.5057671666145325, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.05, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.8095238095238093, | |
| "grad_norm": 0.3843430280685425, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0477, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.8412698412698414, | |
| "grad_norm": 0.5509458184242249, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0454, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.873015873015873, | |
| "grad_norm": 0.3474454879760742, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0442, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.9047619047619047, | |
| "grad_norm": 0.386695921421051, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0454, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.9365079365079367, | |
| "grad_norm": 0.5409571528434753, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0446, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.9682539682539684, | |
| "grad_norm": 0.46809226274490356, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0481, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.5593593120574951, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0374, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 4.031746031746032, | |
| "grad_norm": 0.4967573285102844, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0416, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 4.063492063492063, | |
| "grad_norm": 0.5254335999488831, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0445, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.095238095238095, | |
| "grad_norm": 0.43561768531799316, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0432, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 4.1269841269841265, | |
| "grad_norm": 0.31743791699409485, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0404, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.158730158730159, | |
| "grad_norm": 0.6584088802337646, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0457, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 4.190476190476191, | |
| "grad_norm": 0.6253817081451416, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0512, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 4.222222222222222, | |
| "grad_norm": 0.4972144067287445, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0355, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 4.253968253968254, | |
| "grad_norm": 0.4410610795021057, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0472, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.4023093283176422, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0377, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.317460317460317, | |
| "grad_norm": 0.45321226119995117, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0448, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 4.349206349206349, | |
| "grad_norm": 0.5226684808731079, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0337, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 4.380952380952381, | |
| "grad_norm": 0.3101871609687805, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0381, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 4.412698412698413, | |
| "grad_norm": 0.27930518984794617, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0332, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 4.444444444444445, | |
| "grad_norm": 0.34375303983688354, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0407, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 4.476190476190476, | |
| "grad_norm": 0.3768472969532013, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0334, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 4.507936507936508, | |
| "grad_norm": 0.4127296805381775, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0422, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 4.5396825396825395, | |
| "grad_norm": 0.35581353306770325, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0396, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 4.571428571428571, | |
| "grad_norm": 0.4689619243144989, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.043, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 4.603174603174603, | |
| "grad_norm": 0.31955939531326294, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0405, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 4.634920634920634, | |
| "grad_norm": 0.3422633409500122, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0404, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.4678588807582855, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0366, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 4.698412698412699, | |
| "grad_norm": 0.359017550945282, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0407, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 4.73015873015873, | |
| "grad_norm": 0.3341307044029236, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0393, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 4.761904761904762, | |
| "grad_norm": 0.35834068059921265, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0471, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 4.7936507936507935, | |
| "grad_norm": 0.36487454175949097, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0382, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 4.825396825396825, | |
| "grad_norm": 0.39199331402778625, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0392, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 4.857142857142857, | |
| "grad_norm": 0.6458824276924133, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0461, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 4.888888888888889, | |
| "grad_norm": 0.5050864219665527, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0467, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 4.920634920634921, | |
| "grad_norm": 0.412701278924942, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0475, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 4.9523809523809526, | |
| "grad_norm": 0.4263315498828888, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0436, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 4.984126984126984, | |
| "grad_norm": 0.5352809429168701, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0408, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 5.015873015873016, | |
| "grad_norm": 0.26459309458732605, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0401, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 5.0476190476190474, | |
| "grad_norm": 0.5071465373039246, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.042, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 5.079365079365079, | |
| "grad_norm": 0.4423050880432129, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0377, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 5.111111111111111, | |
| "grad_norm": 0.4581150412559509, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0343, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 5.142857142857143, | |
| "grad_norm": 0.6359660029411316, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0399, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 5.174603174603175, | |
| "grad_norm": 0.4536341726779938, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0354, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 5.2063492063492065, | |
| "grad_norm": 0.43002185225486755, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0424, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 5.238095238095238, | |
| "grad_norm": 0.47539520263671875, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0419, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 5.26984126984127, | |
| "grad_norm": 0.48717617988586426, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0286, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 5.301587301587301, | |
| "grad_norm": 0.516169548034668, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0379, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 5.333333333333333, | |
| "grad_norm": 0.518360435962677, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0379, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 5.365079365079365, | |
| "grad_norm": 0.46924740076065063, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0435, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 5.396825396825397, | |
| "grad_norm": 0.3263060748577118, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0365, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 5.428571428571429, | |
| "grad_norm": 0.31327471137046814, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0356, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 5.4603174603174605, | |
| "grad_norm": 0.5036608576774597, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.037, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 5.492063492063492, | |
| "grad_norm": 0.4337698221206665, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0407, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 5.523809523809524, | |
| "grad_norm": 0.3066512942314148, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0411, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 5.555555555555555, | |
| "grad_norm": 0.45750048756599426, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0431, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 5.587301587301587, | |
| "grad_norm": 0.4001326560974121, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0422, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 5.619047619047619, | |
| "grad_norm": 0.4081225097179413, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0428, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 5.650793650793651, | |
| "grad_norm": 0.30307868123054504, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0397, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 5.682539682539683, | |
| "grad_norm": 0.6120065450668335, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0433, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 5.714285714285714, | |
| "grad_norm": 0.4432617425918579, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0377, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 5.746031746031746, | |
| "grad_norm": 0.3804296553134918, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0345, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 5.777777777777778, | |
| "grad_norm": 0.35566446185112, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0374, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 5.809523809523809, | |
| "grad_norm": 0.30905136466026306, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0368, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 5.841269841269841, | |
| "grad_norm": 0.4156033396720886, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0314, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 5.8730158730158735, | |
| "grad_norm": 0.388803631067276, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0329, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 5.904761904761905, | |
| "grad_norm": 0.3741490840911865, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0428, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 5.936507936507937, | |
| "grad_norm": 0.4900755286216736, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0356, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 5.968253968253968, | |
| "grad_norm": 0.5371416807174683, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0342, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.41327881813049316, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0305, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 6.031746031746032, | |
| "grad_norm": 0.5172460675239563, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0424, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 6.063492063492063, | |
| "grad_norm": 0.46478694677352905, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0379, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 6.095238095238095, | |
| "grad_norm": 0.3499574363231659, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0351, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 6.1269841269841265, | |
| "grad_norm": 0.2917483448982239, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0317, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 6.158730158730159, | |
| "grad_norm": 0.549885094165802, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0396, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 6.190476190476191, | |
| "grad_norm": 0.4453364610671997, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0362, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 6.222222222222222, | |
| "grad_norm": 0.2958216667175293, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0314, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 6.253968253968254, | |
| "grad_norm": 0.3968123197555542, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0401, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 6.285714285714286, | |
| "grad_norm": 0.2865840196609497, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0454, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 6.317460317460317, | |
| "grad_norm": 0.3953138291835785, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0356, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 6.349206349206349, | |
| "grad_norm": 0.36198532581329346, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0285, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 6.380952380952381, | |
| "grad_norm": 0.2991617023944855, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0292, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 6.412698412698413, | |
| "grad_norm": 0.35885655879974365, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0352, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 6.444444444444445, | |
| "grad_norm": 0.31878602504730225, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.035, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 6.476190476190476, | |
| "grad_norm": 0.3159041702747345, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0387, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 6.507936507936508, | |
| "grad_norm": 0.4548608064651489, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0363, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 6.5396825396825395, | |
| "grad_norm": 0.4517418146133423, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0367, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 6.571428571428571, | |
| "grad_norm": 0.3216607868671417, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0428, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 6.603174603174603, | |
| "grad_norm": 0.4702600836753845, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0385, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 6.634920634920634, | |
| "grad_norm": 0.49176234006881714, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0356, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 6.666666666666667, | |
| "grad_norm": 0.7306638956069946, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0286, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 6.698412698412699, | |
| "grad_norm": 0.5895923972129822, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0311, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 6.73015873015873, | |
| "grad_norm": 0.41420778632164, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.031, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 6.761904761904762, | |
| "grad_norm": 0.4679277539253235, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0338, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 6.7936507936507935, | |
| "grad_norm": 0.4596228301525116, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0336, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 6.825396825396825, | |
| "grad_norm": 0.4402821362018585, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0404, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 6.857142857142857, | |
| "grad_norm": 0.4401994049549103, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.035, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 6.888888888888889, | |
| "grad_norm": 0.3071947395801544, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0326, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 6.920634920634921, | |
| "grad_norm": 0.43370434641838074, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0321, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 6.9523809523809526, | |
| "grad_norm": 0.33804747462272644, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0336, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 6.984126984126984, | |
| "grad_norm": 0.39908695220947266, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0353, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 7.015873015873016, | |
| "grad_norm": 0.4300536811351776, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0312, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 7.0476190476190474, | |
| "grad_norm": 0.36719879508018494, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0328, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 7.079365079365079, | |
| "grad_norm": 0.35475772619247437, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0325, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 7.111111111111111, | |
| "grad_norm": 0.34396812319755554, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0314, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 7.142857142857143, | |
| "grad_norm": 0.3947671949863434, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0353, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 7.174603174603175, | |
| "grad_norm": 0.4248429238796234, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0324, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 7.2063492063492065, | |
| "grad_norm": 0.2929597795009613, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0285, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 7.238095238095238, | |
| "grad_norm": 0.29275619983673096, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.04, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 7.26984126984127, | |
| "grad_norm": 0.4151788055896759, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0371, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 7.301587301587301, | |
| "grad_norm": 0.2849891185760498, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0291, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 7.333333333333333, | |
| "grad_norm": 0.31859222054481506, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0326, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 7.365079365079365, | |
| "grad_norm": 0.31533926725387573, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0333, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 7.396825396825397, | |
| "grad_norm": 0.3959997296333313, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0387, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 7.428571428571429, | |
| "grad_norm": 0.2746221721172333, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0336, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 7.4603174603174605, | |
| "grad_norm": 0.2060820460319519, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0339, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 7.492063492063492, | |
| "grad_norm": 0.3962860703468323, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0374, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 7.523809523809524, | |
| "grad_norm": 0.4183862507343292, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0379, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 7.555555555555555, | |
| "grad_norm": 0.4718814790248871, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.028, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 7.587301587301587, | |
| "grad_norm": 0.3735099136829376, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0344, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 7.619047619047619, | |
| "grad_norm": 0.33237960934638977, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0334, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 7.650793650793651, | |
| "grad_norm": 0.5768173336982727, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0305, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 7.682539682539683, | |
| "grad_norm": 0.271857351064682, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0277, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 7.714285714285714, | |
| "grad_norm": 0.29152220487594604, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0238, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 7.746031746031746, | |
| "grad_norm": 0.23735171556472778, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0363, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 7.777777777777778, | |
| "grad_norm": 0.27764931321144104, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0327, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 7.809523809523809, | |
| "grad_norm": 0.5050080418586731, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0328, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 7.841269841269841, | |
| "grad_norm": 0.4127337634563446, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.031, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 7.8730158730158735, | |
| "grad_norm": 0.2879894971847534, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0304, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 7.904761904761905, | |
| "grad_norm": 0.3028964698314667, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0267, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 7.936507936507937, | |
| "grad_norm": 0.3471127450466156, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0281, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 7.968253968253968, | |
| "grad_norm": 0.2976968586444855, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0336, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.251755028963089, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0284, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 8.031746031746032, | |
| "grad_norm": 0.24963544309139252, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0272, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 8.063492063492063, | |
| "grad_norm": 0.3640507161617279, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0291, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 8.095238095238095, | |
| "grad_norm": 0.24767106771469116, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0349, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 8.126984126984127, | |
| "grad_norm": 0.3230677843093872, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0327, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 8.158730158730158, | |
| "grad_norm": 0.3282492160797119, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0297, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 8.19047619047619, | |
| "grad_norm": 0.42755523324012756, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0302, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 8.222222222222221, | |
| "grad_norm": 0.2434818297624588, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0307, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 8.253968253968253, | |
| "grad_norm": 0.27261874079704285, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0276, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 8.285714285714286, | |
| "grad_norm": 0.3149561882019043, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0277, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 8.317460317460318, | |
| "grad_norm": 0.3742763102054596, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0332, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 8.34920634920635, | |
| "grad_norm": 0.29386818408966064, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0331, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 8.380952380952381, | |
| "grad_norm": 0.42231670022010803, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0278, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 8.412698412698413, | |
| "grad_norm": 0.2827942669391632, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0307, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 8.444444444444445, | |
| "grad_norm": 0.2928946018218994, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0404, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 8.476190476190476, | |
| "grad_norm": 0.2078196257352829, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0283, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 8.507936507936508, | |
| "grad_norm": 0.2819688618183136, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0328, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 8.53968253968254, | |
| "grad_norm": 0.4695485234260559, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.033, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 8.571428571428571, | |
| "grad_norm": 0.5232065320014954, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0384, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 8.603174603174603, | |
| "grad_norm": 0.47322961688041687, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0371, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 8.634920634920634, | |
| "grad_norm": 0.40002769231796265, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0284, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 8.666666666666666, | |
| "grad_norm": 0.5144523978233337, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0313, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 8.698412698412698, | |
| "grad_norm": 0.2859914302825928, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.037, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 8.73015873015873, | |
| "grad_norm": 0.3418373465538025, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.024, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 8.761904761904763, | |
| "grad_norm": 0.2862963080406189, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0312, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 8.793650793650794, | |
| "grad_norm": 0.24060198664665222, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0262, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 8.825396825396826, | |
| "grad_norm": 0.30361875891685486, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0343, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 8.857142857142858, | |
| "grad_norm": 0.3292262554168701, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0294, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 8.88888888888889, | |
| "grad_norm": 0.254688560962677, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0278, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 8.920634920634921, | |
| "grad_norm": 0.4283793270587921, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0313, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 8.952380952380953, | |
| "grad_norm": 0.31159675121307373, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0265, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 8.984126984126984, | |
| "grad_norm": 0.41553831100463867, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0311, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 9.015873015873016, | |
| "grad_norm": 0.22152122855186462, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0293, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 9.047619047619047, | |
| "grad_norm": 0.4773101806640625, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0279, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 9.079365079365079, | |
| "grad_norm": 0.2500156760215759, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0323, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 9.11111111111111, | |
| "grad_norm": 0.3715501129627228, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0289, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 9.142857142857142, | |
| "grad_norm": 0.3828801214694977, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0274, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 9.174603174603174, | |
| "grad_norm": 0.4772924780845642, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.025, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 9.206349206349206, | |
| "grad_norm": 0.36756375432014465, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0354, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 9.238095238095237, | |
| "grad_norm": 0.40465983748435974, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0341, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 9.26984126984127, | |
| "grad_norm": 0.34765079617500305, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0274, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 9.301587301587302, | |
| "grad_norm": 0.26325324177742004, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.029, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 9.333333333333334, | |
| "grad_norm": 0.3974003493785858, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0283, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 9.365079365079366, | |
| "grad_norm": 0.2610330283641815, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0228, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 9.396825396825397, | |
| "grad_norm": 0.25331220030784607, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0319, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 9.428571428571429, | |
| "grad_norm": 0.3714085519313812, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0361, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 9.46031746031746, | |
| "grad_norm": 0.3641420602798462, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0292, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 9.492063492063492, | |
| "grad_norm": 0.34705230593681335, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0245, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 9.523809523809524, | |
| "grad_norm": 0.3130747377872467, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0304, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 9.555555555555555, | |
| "grad_norm": 0.28271496295928955, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.027, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 9.587301587301587, | |
| "grad_norm": 0.21947787702083588, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0339, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 9.619047619047619, | |
| "grad_norm": 0.4595395624637604, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0276, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 9.65079365079365, | |
| "grad_norm": 0.2926284670829773, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0277, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 9.682539682539682, | |
| "grad_norm": 0.3345585763454437, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.035, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 9.714285714285714, | |
| "grad_norm": 0.36706966161727905, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0342, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 9.746031746031747, | |
| "grad_norm": 0.33751460909843445, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0353, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 9.777777777777779, | |
| "grad_norm": 0.3889334201812744, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0302, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 9.80952380952381, | |
| "grad_norm": 0.3555496037006378, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0332, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 9.841269841269842, | |
| "grad_norm": 0.39580729603767395, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.033, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 9.873015873015873, | |
| "grad_norm": 0.5496797561645508, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0381, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 9.904761904761905, | |
| "grad_norm": 0.40969160199165344, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0306, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 9.936507936507937, | |
| "grad_norm": 0.3210360109806061, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0333, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 9.968253968253968, | |
| "grad_norm": 0.260416716337204, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0299, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.2923371493816376, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0289, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 10.031746031746032, | |
| "grad_norm": 0.2551594376564026, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0236, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 10.063492063492063, | |
| "grad_norm": 0.35977673530578613, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.029, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 10.095238095238095, | |
| "grad_norm": 0.29153960943222046, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0271, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 10.126984126984127, | |
| "grad_norm": 0.39510470628738403, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0277, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 10.158730158730158, | |
| "grad_norm": 0.4301944077014923, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0358, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 10.19047619047619, | |
| "grad_norm": 0.36494484543800354, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0288, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 10.222222222222221, | |
| "grad_norm": 0.356656014919281, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0278, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 10.253968253968253, | |
| "grad_norm": 0.20670472085475922, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0275, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 10.285714285714286, | |
| "grad_norm": 0.21047915518283844, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0226, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 10.317460317460318, | |
| "grad_norm": 0.33195599913597107, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0251, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 10.34920634920635, | |
| "grad_norm": 0.45835080742836, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.035, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 10.380952380952381, | |
| "grad_norm": 0.41393500566482544, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0273, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 10.412698412698413, | |
| "grad_norm": 0.29867857694625854, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0287, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 10.444444444444445, | |
| "grad_norm": 0.2081543207168579, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0268, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 10.476190476190476, | |
| "grad_norm": 0.1601417064666748, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0215, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 10.507936507936508, | |
| "grad_norm": 0.2676179111003876, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0235, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 10.53968253968254, | |
| "grad_norm": 0.3044885993003845, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0245, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 10.571428571428571, | |
| "grad_norm": 0.4034262001514435, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0268, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 10.603174603174603, | |
| "grad_norm": 0.3203282356262207, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0282, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 10.634920634920634, | |
| "grad_norm": 0.29068538546562195, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0298, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 10.666666666666666, | |
| "grad_norm": 0.29592859745025635, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.028, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 10.698412698412698, | |
| "grad_norm": 0.3880532681941986, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0362, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 10.73015873015873, | |
| "grad_norm": 0.3458194136619568, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0304, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 10.761904761904763, | |
| "grad_norm": 0.3671174943447113, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.026, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 10.793650793650794, | |
| "grad_norm": 0.3829076588153839, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0311, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 10.825396825396826, | |
| "grad_norm": 0.3998180627822876, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0286, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 10.857142857142858, | |
| "grad_norm": 0.30700376629829407, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0284, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 10.88888888888889, | |
| "grad_norm": 0.2894219160079956, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0253, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 10.920634920634921, | |
| "grad_norm": 0.33705928921699524, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0241, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 10.952380952380953, | |
| "grad_norm": 0.4133813679218292, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0292, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 10.984126984126984, | |
| "grad_norm": 0.4380682110786438, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0288, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 11.015873015873016, | |
| "grad_norm": 0.24987244606018066, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0259, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 11.047619047619047, | |
| "grad_norm": 0.43653711676597595, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0247, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 11.079365079365079, | |
| "grad_norm": 0.27785545587539673, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.029, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 11.11111111111111, | |
| "grad_norm": 0.3557858467102051, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0283, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 11.142857142857142, | |
| "grad_norm": 0.42482343316078186, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0304, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 11.174603174603174, | |
| "grad_norm": 0.3655693233013153, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0287, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 11.206349206349206, | |
| "grad_norm": 0.24164991080760956, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0209, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 11.238095238095237, | |
| "grad_norm": 0.4333374798297882, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0287, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 11.26984126984127, | |
| "grad_norm": 0.5981290936470032, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0312, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 11.301587301587302, | |
| "grad_norm": 0.29767468571662903, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0303, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 11.333333333333334, | |
| "grad_norm": 0.370286226272583, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0328, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 11.365079365079366, | |
| "grad_norm": 0.28791019320487976, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0287, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 11.396825396825397, | |
| "grad_norm": 0.2591996192932129, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0255, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 11.428571428571429, | |
| "grad_norm": 0.28889262676239014, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0295, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 11.46031746031746, | |
| "grad_norm": 0.2714182734489441, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.022, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 11.492063492063492, | |
| "grad_norm": 0.2665630877017975, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0249, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 11.523809523809524, | |
| "grad_norm": 0.3079681694507599, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0278, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 11.555555555555555, | |
| "grad_norm": 0.24890892207622528, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0265, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 11.587301587301587, | |
| "grad_norm": 0.2645002603530884, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0278, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 11.619047619047619, | |
| "grad_norm": 0.20993536710739136, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0215, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 11.65079365079365, | |
| "grad_norm": 0.28892359137535095, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0271, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 11.682539682539682, | |
| "grad_norm": 0.28552883863449097, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0279, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 11.714285714285714, | |
| "grad_norm": 0.35969096422195435, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0276, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 11.746031746031747, | |
| "grad_norm": 0.33867740631103516, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0243, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 11.777777777777779, | |
| "grad_norm": 0.2469746172428131, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0311, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 11.80952380952381, | |
| "grad_norm": 0.26109179854393005, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0268, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 11.841269841269842, | |
| "grad_norm": 0.2715725004673004, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0225, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 11.873015873015873, | |
| "grad_norm": 0.3264642357826233, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0277, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 11.904761904761905, | |
| "grad_norm": 0.2848033607006073, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0284, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 11.936507936507937, | |
| "grad_norm": 0.23254472017288208, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0286, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 11.968253968253968, | |
| "grad_norm": 0.32983407378196716, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0286, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "grad_norm": 0.24920956790447235, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0273, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 12.031746031746032, | |
| "grad_norm": 0.2593471109867096, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.023, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 12.063492063492063, | |
| "grad_norm": 0.42870208621025085, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0281, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 12.095238095238095, | |
| "grad_norm": 0.2866934835910797, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0235, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 12.126984126984127, | |
| "grad_norm": 0.2857847809791565, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0274, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 12.158730158730158, | |
| "grad_norm": 0.2929547429084778, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0297, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 12.19047619047619, | |
| "grad_norm": 0.28413233160972595, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0258, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 12.222222222222221, | |
| "grad_norm": 0.29493626952171326, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0279, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 12.253968253968253, | |
| "grad_norm": 0.45703232288360596, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0307, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 12.285714285714286, | |
| "grad_norm": 0.3389991223812103, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0293, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 12.317460317460318, | |
| "grad_norm": 0.2539513409137726, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0273, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 12.34920634920635, | |
| "grad_norm": 0.32561811804771423, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0263, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 12.380952380952381, | |
| "grad_norm": 0.2889886498451233, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0284, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 12.412698412698413, | |
| "grad_norm": 0.2012103945016861, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0255, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 12.444444444444445, | |
| "grad_norm": 0.38932177424430847, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0216, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 12.476190476190476, | |
| "grad_norm": 0.26320868730545044, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0282, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 12.507936507936508, | |
| "grad_norm": 0.3304706811904907, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0269, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 12.53968253968254, | |
| "grad_norm": 0.2636423408985138, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0286, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 12.571428571428571, | |
| "grad_norm": 0.4396299421787262, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0293, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 12.603174603174603, | |
| "grad_norm": 0.37895822525024414, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0268, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 12.634920634920634, | |
| "grad_norm": 0.2761574387550354, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.024, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 12.666666666666666, | |
| "grad_norm": 0.32840973138809204, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0268, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 12.698412698412698, | |
| "grad_norm": 0.20794102549552917, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0225, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 12.73015873015873, | |
| "grad_norm": 0.29938018321990967, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0217, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 12.761904761904763, | |
| "grad_norm": 0.25751352310180664, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0238, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 12.793650793650794, | |
| "grad_norm": 0.4213655889034271, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0265, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 12.825396825396826, | |
| "grad_norm": 0.35374751687049866, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0297, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 12.857142857142858, | |
| "grad_norm": 0.23937378823757172, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0262, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 12.88888888888889, | |
| "grad_norm": 0.35627686977386475, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0283, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 12.920634920634921, | |
| "grad_norm": 0.21707098186016083, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0275, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 12.952380952380953, | |
| "grad_norm": 0.18707968294620514, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0216, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 12.984126984126984, | |
| "grad_norm": 0.34250861406326294, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0235, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 13.015873015873016, | |
| "grad_norm": 0.18528521060943604, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0252, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 13.047619047619047, | |
| "grad_norm": 0.5380460619926453, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0289, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 13.079365079365079, | |
| "grad_norm": 0.17781846225261688, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0233, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 13.11111111111111, | |
| "grad_norm": 0.3377573490142822, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0272, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 13.142857142857142, | |
| "grad_norm": 0.26099497079849243, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0231, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 13.174603174603174, | |
| "grad_norm": 0.22609737515449524, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0259, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 13.206349206349206, | |
| "grad_norm": 0.20121638476848602, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0183, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 13.238095238095237, | |
| "grad_norm": 0.2596859931945801, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0306, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 13.26984126984127, | |
| "grad_norm": 0.2791779041290283, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0253, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 13.301587301587302, | |
| "grad_norm": 0.2669035494327545, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0328, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 13.333333333333334, | |
| "grad_norm": 0.24622219800949097, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0227, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 13.365079365079366, | |
| "grad_norm": 0.3035546541213989, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0227, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 13.396825396825397, | |
| "grad_norm": 0.17289666831493378, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0262, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 13.428571428571429, | |
| "grad_norm": 0.2909645736217499, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0222, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 13.46031746031746, | |
| "grad_norm": 0.29525938630104065, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0258, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 13.492063492063492, | |
| "grad_norm": 0.27488526701927185, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0276, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 13.523809523809524, | |
| "grad_norm": 0.3114479184150696, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0298, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 13.555555555555555, | |
| "grad_norm": 0.279822438955307, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0258, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 13.587301587301587, | |
| "grad_norm": 0.27654293179512024, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0236, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 13.619047619047619, | |
| "grad_norm": 0.4042046368122101, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0217, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 13.65079365079365, | |
| "grad_norm": 0.30004021525382996, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0231, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 13.682539682539682, | |
| "grad_norm": 0.28635573387145996, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.023, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 13.714285714285714, | |
| "grad_norm": 0.34172120690345764, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0267, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 13.746031746031747, | |
| "grad_norm": 0.3556895852088928, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0224, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 13.777777777777779, | |
| "grad_norm": 0.4093778133392334, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0203, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 13.80952380952381, | |
| "grad_norm": 0.24909934401512146, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0246, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 13.841269841269842, | |
| "grad_norm": 0.23419755697250366, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0242, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 13.873015873015873, | |
| "grad_norm": 0.2079116702079773, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0246, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 13.904761904761905, | |
| "grad_norm": 0.18924421072006226, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0236, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 13.936507936507937, | |
| "grad_norm": 0.27033859491348267, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0253, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 13.968253968253968, | |
| "grad_norm": 0.2956063449382782, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0314, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "grad_norm": 0.2641093134880066, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0274, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 14.031746031746032, | |
| "grad_norm": 0.35461366176605225, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0275, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 14.063492063492063, | |
| "grad_norm": 0.2529721260070801, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0241, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 14.095238095238095, | |
| "grad_norm": 0.27950140833854675, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0265, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 14.126984126984127, | |
| "grad_norm": 0.29913845658302307, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0208, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 14.158730158730158, | |
| "grad_norm": 0.2987229824066162, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0221, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 14.19047619047619, | |
| "grad_norm": 0.30613794922828674, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0199, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 14.222222222222221, | |
| "grad_norm": 0.30882078409194946, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0236, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 14.253968253968253, | |
| "grad_norm": 0.2980142831802368, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0244, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 14.285714285714286, | |
| "grad_norm": 0.3973177969455719, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0204, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 14.317460317460318, | |
| "grad_norm": 0.30851855874061584, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0218, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 14.34920634920635, | |
| "grad_norm": 0.2909444570541382, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0184, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 14.380952380952381, | |
| "grad_norm": 0.31829047203063965, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0225, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 14.412698412698413, | |
| "grad_norm": 0.25426673889160156, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.025, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 14.444444444444445, | |
| "grad_norm": 0.33348917961120605, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0214, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 14.476190476190476, | |
| "grad_norm": 0.3672037720680237, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0224, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 14.507936507936508, | |
| "grad_norm": 0.2308174967765808, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0242, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 14.53968253968254, | |
| "grad_norm": 0.23387981951236725, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0219, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 14.571428571428571, | |
| "grad_norm": 0.21761950850486755, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0254, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 14.603174603174603, | |
| "grad_norm": 0.23047703504562378, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0241, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 14.634920634920634, | |
| "grad_norm": 0.29550090432167053, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0233, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 14.666666666666666, | |
| "grad_norm": 0.29093849658966064, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0246, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 14.698412698412698, | |
| "grad_norm": 0.34847256541252136, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0255, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 14.73015873015873, | |
| "grad_norm": 0.2968214452266693, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0234, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 14.761904761904763, | |
| "grad_norm": 0.3150116503238678, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0235, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 14.793650793650794, | |
| "grad_norm": 0.2862436771392822, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0344, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 14.825396825396826, | |
| "grad_norm": 0.2179599553346634, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0234, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 14.857142857142858, | |
| "grad_norm": 0.19166281819343567, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0189, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 14.88888888888889, | |
| "grad_norm": 0.1889706552028656, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0247, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 14.920634920634921, | |
| "grad_norm": 0.2284940630197525, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0262, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 14.952380952380953, | |
| "grad_norm": 0.26289719343185425, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0244, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 14.984126984126984, | |
| "grad_norm": 0.29965940117836, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0219, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 15.015873015873016, | |
| "grad_norm": 0.29221540689468384, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0251, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 15.047619047619047, | |
| "grad_norm": 0.4449094235897064, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.022, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 15.079365079365079, | |
| "grad_norm": 0.30396053194999695, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0236, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 15.11111111111111, | |
| "grad_norm": 0.2569115459918976, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0238, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 15.142857142857142, | |
| "grad_norm": 0.19951505959033966, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0239, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 15.174603174603174, | |
| "grad_norm": 0.21121156215667725, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0266, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 15.206349206349206, | |
| "grad_norm": 0.3025633692741394, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.016, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 15.238095238095237, | |
| "grad_norm": 0.25481992959976196, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.027, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 15.26984126984127, | |
| "grad_norm": 0.32529211044311523, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0225, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 15.301587301587302, | |
| "grad_norm": 0.2208954095840454, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0221, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 15.333333333333334, | |
| "grad_norm": 0.1693483144044876, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0243, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 15.365079365079366, | |
| "grad_norm": 0.2056979537010193, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0331, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 15.396825396825397, | |
| "grad_norm": 0.2631152272224426, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0237, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 15.428571428571429, | |
| "grad_norm": 0.3081046938896179, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0211, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 15.46031746031746, | |
| "grad_norm": 0.17033763229846954, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0178, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 15.492063492063492, | |
| "grad_norm": 0.2479829639196396, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0239, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 15.523809523809524, | |
| "grad_norm": 0.26006877422332764, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0208, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 15.555555555555555, | |
| "grad_norm": 0.33415478467941284, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0216, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 15.587301587301587, | |
| "grad_norm": 0.36169448494911194, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.025, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 15.619047619047619, | |
| "grad_norm": 0.2054392546415329, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0176, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 15.65079365079365, | |
| "grad_norm": 0.29843470454216003, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0253, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 15.682539682539682, | |
| "grad_norm": 0.3116275370121002, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0194, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 15.714285714285714, | |
| "grad_norm": 0.26341378688812256, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0238, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 15.746031746031747, | |
| "grad_norm": 0.2960616648197174, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0189, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 15.777777777777779, | |
| "grad_norm": 0.2992161810398102, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.024, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 15.80952380952381, | |
| "grad_norm": 0.17235149443149567, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0294, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 15.841269841269842, | |
| "grad_norm": 0.2955491840839386, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0223, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 15.873015873015873, | |
| "grad_norm": 0.2724284529685974, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0221, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 15.904761904761905, | |
| "grad_norm": 0.3504982590675354, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0232, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 15.936507936507937, | |
| "grad_norm": 0.2651078701019287, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0254, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 15.968253968253968, | |
| "grad_norm": 0.22011253237724304, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0219, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "grad_norm": 0.3447621166706085, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.02, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 16.03174603174603, | |
| "grad_norm": 0.2557232975959778, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0236, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 16.063492063492063, | |
| "grad_norm": 0.33828020095825195, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0198, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 16.095238095238095, | |
| "grad_norm": 0.20920541882514954, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0184, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 16.126984126984127, | |
| "grad_norm": 0.13682730495929718, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.02, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 16.158730158730158, | |
| "grad_norm": 0.41961154341697693, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0205, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 16.19047619047619, | |
| "grad_norm": 0.3330617845058441, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0241, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 16.22222222222222, | |
| "grad_norm": 0.23497287929058075, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0166, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 16.253968253968253, | |
| "grad_norm": 0.26194116473197937, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0322, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 16.285714285714285, | |
| "grad_norm": 0.23613131046295166, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0228, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 16.317460317460316, | |
| "grad_norm": 0.34424832463264465, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0239, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 16.349206349206348, | |
| "grad_norm": 0.2500576078891754, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0175, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 16.38095238095238, | |
| "grad_norm": 0.1684722900390625, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0225, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 16.41269841269841, | |
| "grad_norm": 0.312694251537323, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0185, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 16.444444444444443, | |
| "grad_norm": 0.246519535779953, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0232, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 16.476190476190474, | |
| "grad_norm": 0.2781682312488556, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0243, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 16.507936507936506, | |
| "grad_norm": 0.2413027584552765, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0241, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 16.53968253968254, | |
| "grad_norm": 0.2697490155696869, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0232, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 16.571428571428573, | |
| "grad_norm": 0.2651253342628479, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0287, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 16.603174603174605, | |
| "grad_norm": 0.26745089888572693, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0265, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 16.634920634920636, | |
| "grad_norm": 0.34986671805381775, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0198, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 16.666666666666668, | |
| "grad_norm": 0.23915472626686096, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0177, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 16.6984126984127, | |
| "grad_norm": 0.2985886335372925, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0224, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 16.73015873015873, | |
| "grad_norm": 0.3229317367076874, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.021, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 16.761904761904763, | |
| "grad_norm": 0.3187403976917267, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0186, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 16.793650793650794, | |
| "grad_norm": 0.19303451478481293, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0207, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 16.825396825396826, | |
| "grad_norm": 0.38319337368011475, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0184, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 16.857142857142858, | |
| "grad_norm": 0.2701345682144165, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0208, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 16.88888888888889, | |
| "grad_norm": 0.28472524881362915, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0184, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 16.92063492063492, | |
| "grad_norm": 0.2619391977787018, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0175, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 16.952380952380953, | |
| "grad_norm": 0.2245441973209381, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0177, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 16.984126984126984, | |
| "grad_norm": 0.1610591560602188, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0196, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 17.015873015873016, | |
| "grad_norm": 0.22560739517211914, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0218, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 17.047619047619047, | |
| "grad_norm": 0.16759437322616577, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0181, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 17.07936507936508, | |
| "grad_norm": 0.19600068032741547, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.018, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 17.11111111111111, | |
| "grad_norm": 0.22322769463062286, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0145, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 17.142857142857142, | |
| "grad_norm": 0.32207199931144714, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0181, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 17.174603174603174, | |
| "grad_norm": 0.26562565565109253, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0189, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 17.206349206349206, | |
| "grad_norm": 0.1457485556602478, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0163, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 17.238095238095237, | |
| "grad_norm": 0.3072642385959625, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0206, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 17.26984126984127, | |
| "grad_norm": 0.34067097306251526, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0222, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 17.3015873015873, | |
| "grad_norm": 0.18033282458782196, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0234, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 17.333333333333332, | |
| "grad_norm": 0.3316554129123688, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0194, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 17.365079365079364, | |
| "grad_norm": 0.24937453866004944, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0214, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 17.396825396825395, | |
| "grad_norm": 0.46569177508354187, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0162, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 17.428571428571427, | |
| "grad_norm": 0.10870670527219772, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0178, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 17.46031746031746, | |
| "grad_norm": 0.4214787483215332, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0182, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 17.49206349206349, | |
| "grad_norm": 0.25194448232650757, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0182, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 17.523809523809526, | |
| "grad_norm": 0.26536309719085693, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0199, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 17.555555555555557, | |
| "grad_norm": 0.33633047342300415, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.023, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 17.58730158730159, | |
| "grad_norm": 0.25206291675567627, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0205, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 17.61904761904762, | |
| "grad_norm": 0.28850990533828735, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0149, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 17.650793650793652, | |
| "grad_norm": 0.18880195915699005, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0178, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 17.682539682539684, | |
| "grad_norm": 0.2065879851579666, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0155, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 17.714285714285715, | |
| "grad_norm": 0.24312105774879456, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0217, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 17.746031746031747, | |
| "grad_norm": 0.36997637152671814, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0217, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 17.77777777777778, | |
| "grad_norm": 0.24876128137111664, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0228, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 17.80952380952381, | |
| "grad_norm": 0.22453536093235016, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0213, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 17.841269841269842, | |
| "grad_norm": 0.1748613566160202, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0185, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 17.873015873015873, | |
| "grad_norm": 0.33691367506980896, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0188, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 17.904761904761905, | |
| "grad_norm": 0.24070586264133453, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0196, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 17.936507936507937, | |
| "grad_norm": 0.20968905091285706, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0215, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 17.96825396825397, | |
| "grad_norm": 0.3033250868320465, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0189, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "grad_norm": 0.1783866137266159, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0189, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 18.03174603174603, | |
| "grad_norm": 0.2542335093021393, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0186, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 18.063492063492063, | |
| "grad_norm": 0.21750037372112274, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0194, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 18.095238095238095, | |
| "grad_norm": 0.1962343007326126, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0195, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 18.126984126984127, | |
| "grad_norm": 0.738716721534729, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0207, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 18.158730158730158, | |
| "grad_norm": 0.16495724022388458, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0209, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 18.19047619047619, | |
| "grad_norm": 0.3285732567310333, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.029, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 18.22222222222222, | |
| "grad_norm": 0.2541879117488861, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0176, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 18.253968253968253, | |
| "grad_norm": 0.26457884907722473, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0206, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 18.285714285714285, | |
| "grad_norm": 0.28531527519226074, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0171, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 18.317460317460316, | |
| "grad_norm": 0.20844565331935883, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0183, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 18.349206349206348, | |
| "grad_norm": 0.17347386479377747, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0183, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 18.38095238095238, | |
| "grad_norm": 0.23052413761615753, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0236, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 18.41269841269841, | |
| "grad_norm": 0.19891178607940674, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0165, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 18.444444444444443, | |
| "grad_norm": 0.18310381472110748, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0171, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 18.476190476190474, | |
| "grad_norm": 0.3552546501159668, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0151, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 18.507936507936506, | |
| "grad_norm": 0.22134336829185486, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0229, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 18.53968253968254, | |
| "grad_norm": 0.2530042231082916, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0194, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 18.571428571428573, | |
| "grad_norm": 0.26407015323638916, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0224, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 18.603174603174605, | |
| "grad_norm": 0.20496098697185516, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0177, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 18.634920634920636, | |
| "grad_norm": 0.210124209523201, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0177, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 18.666666666666668, | |
| "grad_norm": 0.2115515172481537, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0219, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 18.6984126984127, | |
| "grad_norm": 0.31640735268592834, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0199, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 18.73015873015873, | |
| "grad_norm": 0.21765416860580444, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0191, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 18.761904761904763, | |
| "grad_norm": 0.1729920208454132, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0182, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 18.793650793650794, | |
| "grad_norm": 0.19538700580596924, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0176, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 18.825396825396826, | |
| "grad_norm": 0.18663159012794495, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0226, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 18.857142857142858, | |
| "grad_norm": 0.1747313290834427, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0193, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 18.88888888888889, | |
| "grad_norm": 0.24584822356700897, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0175, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 18.92063492063492, | |
| "grad_norm": 0.33076950907707214, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0153, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 18.952380952380953, | |
| "grad_norm": 0.23563335835933685, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0195, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 18.984126984126984, | |
| "grad_norm": 0.23687244951725006, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0159, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 19.015873015873016, | |
| "grad_norm": 0.19825780391693115, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0156, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 19.047619047619047, | |
| "grad_norm": 0.18263116478919983, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0238, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 19.07936507936508, | |
| "grad_norm": 0.24976813793182373, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0163, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 19.11111111111111, | |
| "grad_norm": 0.20997342467308044, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.016, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 19.142857142857142, | |
| "grad_norm": 0.21791313588619232, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0147, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 19.174603174603174, | |
| "grad_norm": 0.22968295216560364, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0201, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 19.206349206349206, | |
| "grad_norm": 0.2760619521141052, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0143, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 19.238095238095237, | |
| "grad_norm": 0.22712457180023193, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0166, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 19.26984126984127, | |
| "grad_norm": 0.23904535174369812, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0156, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 19.3015873015873, | |
| "grad_norm": 0.23857884109020233, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0189, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 19.333333333333332, | |
| "grad_norm": 0.18940849602222443, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0142, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 19.365079365079364, | |
| "grad_norm": 0.22528968751430511, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0146, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 19.396825396825395, | |
| "grad_norm": 0.20369158685207367, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0161, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 19.428571428571427, | |
| "grad_norm": 0.17892442643642426, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.014, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 19.46031746031746, | |
| "grad_norm": 0.31294092535972595, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0162, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 19.49206349206349, | |
| "grad_norm": 0.23950867354869843, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0154, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 19.523809523809526, | |
| "grad_norm": 0.216984361410141, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0138, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 19.555555555555557, | |
| "grad_norm": 0.29031306505203247, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0201, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 19.58730158730159, | |
| "grad_norm": 0.2694243788719177, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0203, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 19.61904761904762, | |
| "grad_norm": 0.2830352783203125, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0181, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 19.650793650793652, | |
| "grad_norm": 0.5752967000007629, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.016, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 19.682539682539684, | |
| "grad_norm": 0.2517099976539612, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0192, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 19.714285714285715, | |
| "grad_norm": 0.3875671327114105, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0145, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 19.746031746031747, | |
| "grad_norm": 0.2734561264514923, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0179, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 19.77777777777778, | |
| "grad_norm": 0.21619731187820435, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0165, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 19.80952380952381, | |
| "grad_norm": 0.2739393413066864, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0168, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 19.841269841269842, | |
| "grad_norm": 0.22468553483486176, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0171, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 19.873015873015873, | |
| "grad_norm": 0.2512083649635315, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0224, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 19.904761904761905, | |
| "grad_norm": 0.20822279155254364, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0151, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 19.936507936507937, | |
| "grad_norm": 0.22378009557724, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0159, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 19.96825396825397, | |
| "grad_norm": 0.309187114238739, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0217, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.35167208313941956, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0149, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 20.03174603174603, | |
| "grad_norm": 0.1543435901403427, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0162, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 20.063492063492063, | |
| "grad_norm": 0.21264240145683289, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0134, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 20.095238095238095, | |
| "grad_norm": 0.21552325785160065, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0136, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 20.126984126984127, | |
| "grad_norm": 0.19033001363277435, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0161, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 20.158730158730158, | |
| "grad_norm": 0.29480844736099243, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0146, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 20.19047619047619, | |
| "grad_norm": 0.23883093893527985, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0143, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 20.22222222222222, | |
| "grad_norm": 0.3028823435306549, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0196, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 20.253968253968253, | |
| "grad_norm": 0.17677175998687744, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0134, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 20.285714285714285, | |
| "grad_norm": 0.2870626151561737, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0164, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 20.317460317460316, | |
| "grad_norm": 0.24381645023822784, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0186, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 20.349206349206348, | |
| "grad_norm": 0.20895810425281525, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0153, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 20.38095238095238, | |
| "grad_norm": 0.14218027889728546, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0151, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 20.41269841269841, | |
| "grad_norm": 0.2756328880786896, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0148, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 20.444444444444443, | |
| "grad_norm": 0.2614329159259796, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0152, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 20.476190476190474, | |
| "grad_norm": 0.28836408257484436, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0142, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 20.507936507936506, | |
| "grad_norm": 0.30898845195770264, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0122, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 20.53968253968254, | |
| "grad_norm": 0.2713024616241455, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0146, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 20.571428571428573, | |
| "grad_norm": 0.19818243384361267, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0164, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 20.603174603174605, | |
| "grad_norm": 0.211079403758049, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0125, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 20.634920634920636, | |
| "grad_norm": 0.2427939921617508, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0172, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 20.666666666666668, | |
| "grad_norm": 0.271630197763443, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0193, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 20.6984126984127, | |
| "grad_norm": 0.25886374711990356, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0167, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 20.73015873015873, | |
| "grad_norm": 0.19289538264274597, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0188, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 20.761904761904763, | |
| "grad_norm": 0.18344756960868835, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0174, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 20.793650793650794, | |
| "grad_norm": 0.25390347838401794, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.017, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 20.825396825396826, | |
| "grad_norm": 0.21794605255126953, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0137, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 20.857142857142858, | |
| "grad_norm": 0.11089117079973221, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0174, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 20.88888888888889, | |
| "grad_norm": 0.24245627224445343, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0235, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 20.92063492063492, | |
| "grad_norm": 0.1524720937013626, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0156, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 20.952380952380953, | |
| "grad_norm": 0.16755256056785583, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0169, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 20.984126984126984, | |
| "grad_norm": 0.32347938418388367, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0182, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 21.015873015873016, | |
| "grad_norm": 0.24090106785297394, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0166, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 21.047619047619047, | |
| "grad_norm": 0.19429044425487518, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0176, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 21.07936507936508, | |
| "grad_norm": 0.1713808923959732, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.016, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 21.11111111111111, | |
| "grad_norm": 0.16431643068790436, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0137, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 21.142857142857142, | |
| "grad_norm": 0.14166758954524994, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0157, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 21.174603174603174, | |
| "grad_norm": 0.16151298582553864, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0116, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 21.206349206349206, | |
| "grad_norm": 0.24216847121715546, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0168, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 21.238095238095237, | |
| "grad_norm": 0.23465125262737274, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0158, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 21.26984126984127, | |
| "grad_norm": 0.2020302265882492, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0144, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 21.3015873015873, | |
| "grad_norm": 0.311540812253952, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0125, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 21.333333333333332, | |
| "grad_norm": 0.1764349341392517, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0171, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 21.365079365079364, | |
| "grad_norm": 0.13585136830806732, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0192, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 21.396825396825395, | |
| "grad_norm": 0.21327026188373566, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0198, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 21.428571428571427, | |
| "grad_norm": 0.17888957262039185, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0148, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 21.46031746031746, | |
| "grad_norm": 0.24945113062858582, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0116, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 21.49206349206349, | |
| "grad_norm": 0.1706424057483673, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0141, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 21.523809523809526, | |
| "grad_norm": 0.28094932436943054, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.014, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 21.555555555555557, | |
| "grad_norm": 0.16757923364639282, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0149, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 21.58730158730159, | |
| "grad_norm": 0.2519325613975525, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0175, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 21.61904761904762, | |
| "grad_norm": 0.24635641276836395, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0142, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 21.650793650793652, | |
| "grad_norm": 0.1389232873916626, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0154, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 21.682539682539684, | |
| "grad_norm": 0.2593689262866974, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0148, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 21.714285714285715, | |
| "grad_norm": 0.14517734944820404, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.014, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 21.746031746031747, | |
| "grad_norm": 0.5272825956344604, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0114, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 21.77777777777778, | |
| "grad_norm": 0.2883329689502716, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0122, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 21.80952380952381, | |
| "grad_norm": 0.26196718215942383, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0152, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 21.841269841269842, | |
| "grad_norm": 0.16140787303447723, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0192, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 21.873015873015873, | |
| "grad_norm": 0.24231110513210297, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0176, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 21.904761904761905, | |
| "grad_norm": 0.23758836090564728, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0133, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 21.936507936507937, | |
| "grad_norm": 0.2159247100353241, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.017, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 21.96825396825397, | |
| "grad_norm": 0.2283969521522522, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0175, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "grad_norm": 0.34844380617141724, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0141, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 22.03174603174603, | |
| "grad_norm": 0.26470470428466797, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0131, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 22.063492063492063, | |
| "grad_norm": 0.4066512882709503, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0147, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 22.095238095238095, | |
| "grad_norm": 0.17412889003753662, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0118, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 22.126984126984127, | |
| "grad_norm": 0.20186087489128113, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0113, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 22.158730158730158, | |
| "grad_norm": 0.1696140617132187, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0164, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 22.19047619047619, | |
| "grad_norm": 0.2014995962381363, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0124, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 22.22222222222222, | |
| "grad_norm": 0.2359251230955124, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0145, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 22.253968253968253, | |
| "grad_norm": 0.15729577839374542, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0158, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 22.285714285714285, | |
| "grad_norm": 0.20511437952518463, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0151, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 22.317460317460316, | |
| "grad_norm": 0.19688570499420166, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0132, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 22.349206349206348, | |
| "grad_norm": 0.22759246826171875, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0147, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 22.38095238095238, | |
| "grad_norm": 0.2063080072402954, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0185, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 22.41269841269841, | |
| "grad_norm": 0.18948589265346527, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.014, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 22.444444444444443, | |
| "grad_norm": 0.18245799839496613, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0151, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 22.476190476190474, | |
| "grad_norm": 0.21448276937007904, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0107, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 22.507936507936506, | |
| "grad_norm": 0.17265522480010986, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0126, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 22.53968253968254, | |
| "grad_norm": 0.18922169506549835, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0129, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 22.571428571428573, | |
| "grad_norm": 0.2160239964723587, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0137, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 22.603174603174605, | |
| "grad_norm": 0.18435801565647125, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0145, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 22.634920634920636, | |
| "grad_norm": 0.1607568860054016, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0124, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 22.666666666666668, | |
| "grad_norm": 0.19645245373249054, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0159, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 22.6984126984127, | |
| "grad_norm": 0.1799018830060959, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.02, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 22.73015873015873, | |
| "grad_norm": 0.15858089923858643, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0144, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 22.761904761904763, | |
| "grad_norm": 0.1570800244808197, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0129, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 22.793650793650794, | |
| "grad_norm": 0.12108251452445984, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0112, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 22.825396825396826, | |
| "grad_norm": 0.25044670701026917, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0169, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 22.857142857142858, | |
| "grad_norm": 0.26566675305366516, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0149, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 22.88888888888889, | |
| "grad_norm": 0.16304172575473785, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0132, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 22.92063492063492, | |
| "grad_norm": 0.24543078243732452, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0153, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 22.952380952380953, | |
| "grad_norm": 0.2132418304681778, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0155, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 22.984126984126984, | |
| "grad_norm": 0.27100783586502075, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.014, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 23.015873015873016, | |
| "grad_norm": 0.23012369871139526, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0131, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 23.047619047619047, | |
| "grad_norm": 0.1623094230890274, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0111, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 23.07936507936508, | |
| "grad_norm": 0.09402570128440857, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.013, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 23.11111111111111, | |
| "grad_norm": 0.12756206095218658, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0125, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 23.142857142857142, | |
| "grad_norm": 0.19825483858585358, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0104, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 23.174603174603174, | |
| "grad_norm": 0.22992268204689026, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0156, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 23.206349206349206, | |
| "grad_norm": 0.18351058661937714, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0134, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 23.238095238095237, | |
| "grad_norm": 0.153513565659523, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0148, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 23.26984126984127, | |
| "grad_norm": 0.1773291677236557, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0131, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 23.3015873015873, | |
| "grad_norm": 0.2714683413505554, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0155, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 23.333333333333332, | |
| "grad_norm": 0.15280304849147797, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0122, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 23.365079365079364, | |
| "grad_norm": 0.20548415184020996, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.013, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 23.396825396825395, | |
| "grad_norm": 0.25806301832199097, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0142, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 23.428571428571427, | |
| "grad_norm": 0.14463450014591217, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0156, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 23.46031746031746, | |
| "grad_norm": 0.11153694987297058, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0151, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 23.49206349206349, | |
| "grad_norm": 0.2591457962989807, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0162, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 23.523809523809526, | |
| "grad_norm": 0.11065889149904251, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0109, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 23.555555555555557, | |
| "grad_norm": 0.17983703315258026, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0131, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 23.58730158730159, | |
| "grad_norm": 0.2734045684337616, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0118, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 23.61904761904762, | |
| "grad_norm": 0.18168853223323822, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0157, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 23.650793650793652, | |
| "grad_norm": 0.15399089455604553, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0124, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 23.682539682539684, | |
| "grad_norm": 0.12399337440729141, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0103, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 23.714285714285715, | |
| "grad_norm": 0.14077259600162506, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0144, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 23.746031746031747, | |
| "grad_norm": 0.21754680573940277, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0137, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 23.77777777777778, | |
| "grad_norm": 0.2107655107975006, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0113, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 23.80952380952381, | |
| "grad_norm": 0.2865687906742096, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0135, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 23.841269841269842, | |
| "grad_norm": 0.1446351706981659, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0081, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 23.873015873015873, | |
| "grad_norm": 0.12562544643878937, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0135, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 23.904761904761905, | |
| "grad_norm": 0.17370449006557465, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0135, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 23.936507936507937, | |
| "grad_norm": 0.11725129187107086, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0149, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 23.96825396825397, | |
| "grad_norm": 0.14277203381061554, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0118, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "grad_norm": 0.23555177450180054, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0115, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 24.03174603174603, | |
| "grad_norm": 0.13910867273807526, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.01, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 24.063492063492063, | |
| "grad_norm": 0.2797999382019043, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0185, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 24.095238095238095, | |
| "grad_norm": 0.11822985112667084, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0141, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 24.126984126984127, | |
| "grad_norm": 0.298127144575119, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0127, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 24.158730158730158, | |
| "grad_norm": 0.18566390872001648, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0098, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 24.19047619047619, | |
| "grad_norm": 0.16567307710647583, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0122, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 24.22222222222222, | |
| "grad_norm": 0.20039498805999756, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0132, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 24.253968253968253, | |
| "grad_norm": 0.14541280269622803, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0127, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 24.285714285714285, | |
| "grad_norm": 0.14615805447101593, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0129, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 24.317460317460316, | |
| "grad_norm": 0.15424121916294098, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0153, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 24.349206349206348, | |
| "grad_norm": 0.17081215977668762, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0125, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 24.38095238095238, | |
| "grad_norm": 0.21921736001968384, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0115, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 24.41269841269841, | |
| "grad_norm": 0.12656418979167938, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0118, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 24.444444444444443, | |
| "grad_norm": 0.16506506502628326, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0113, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 24.476190476190474, | |
| "grad_norm": 0.17092452943325043, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0133, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 24.507936507936506, | |
| "grad_norm": 0.15103782713413239, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0124, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 24.53968253968254, | |
| "grad_norm": 0.20803171396255493, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0158, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 24.571428571428573, | |
| "grad_norm": 0.18097475171089172, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0138, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 24.603174603174605, | |
| "grad_norm": 0.19787105917930603, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0112, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 24.634920634920636, | |
| "grad_norm": 0.13726365566253662, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0106, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 24.666666666666668, | |
| "grad_norm": 0.17715111374855042, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0128, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 24.6984126984127, | |
| "grad_norm": 0.19941899180412292, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0111, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 24.73015873015873, | |
| "grad_norm": 0.29744836688041687, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0112, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 24.761904761904763, | |
| "grad_norm": 0.12947259843349457, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0125, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 24.793650793650794, | |
| "grad_norm": 0.1753162294626236, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0093, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 24.825396825396826, | |
| "grad_norm": 0.14291156828403473, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0185, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 24.857142857142858, | |
| "grad_norm": 0.21948879957199097, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0203, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 24.88888888888889, | |
| "grad_norm": 0.1455094963312149, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0085, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 24.92063492063492, | |
| "grad_norm": 0.16455748677253723, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0161, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 24.952380952380953, | |
| "grad_norm": 0.10543867945671082, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0088, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 24.984126984126984, | |
| "grad_norm": 0.11264993250370026, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0081, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 25.015873015873016, | |
| "grad_norm": 0.28990602493286133, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0115, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 25.047619047619047, | |
| "grad_norm": 0.19487392902374268, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0122, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 25.07936507936508, | |
| "grad_norm": 0.17016515135765076, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0098, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 25.11111111111111, | |
| "grad_norm": 0.16960784792900085, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.012, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 25.142857142857142, | |
| "grad_norm": 0.10886963456869125, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0116, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 25.174603174603174, | |
| "grad_norm": 0.11710845679044724, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0104, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 25.206349206349206, | |
| "grad_norm": 0.1692613810300827, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0127, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 25.238095238095237, | |
| "grad_norm": 0.2239573895931244, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0172, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 25.26984126984127, | |
| "grad_norm": 0.18907073140144348, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0154, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 25.3015873015873, | |
| "grad_norm": 0.13175897300243378, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0126, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 25.333333333333332, | |
| "grad_norm": 0.2117002308368683, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.015, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 25.365079365079364, | |
| "grad_norm": 0.1454012542963028, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0154, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 25.396825396825395, | |
| "grad_norm": 0.1731858104467392, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0115, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 25.428571428571427, | |
| "grad_norm": 0.08537448942661285, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0119, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 25.46031746031746, | |
| "grad_norm": 0.12765368819236755, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0121, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 25.49206349206349, | |
| "grad_norm": 0.9859875440597534, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0127, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 25.523809523809526, | |
| "grad_norm": 0.1831703633069992, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0125, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 25.555555555555557, | |
| "grad_norm": 0.2556174099445343, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.011, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 25.58730158730159, | |
| "grad_norm": 0.10624843090772629, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0126, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 25.61904761904762, | |
| "grad_norm": 0.1009644940495491, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0136, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 25.650793650793652, | |
| "grad_norm": 0.21410124003887177, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0093, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 25.682539682539684, | |
| "grad_norm": 0.13875389099121094, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.01, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 25.714285714285715, | |
| "grad_norm": 0.11505603790283203, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0116, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 25.746031746031747, | |
| "grad_norm": 0.32258519530296326, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.011, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 25.77777777777778, | |
| "grad_norm": 0.1701594740152359, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0113, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 25.80952380952381, | |
| "grad_norm": 0.16179317235946655, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0102, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 25.841269841269842, | |
| "grad_norm": 0.17742367088794708, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0106, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 25.873015873015873, | |
| "grad_norm": 0.18007849156856537, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0122, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 25.904761904761905, | |
| "grad_norm": 0.1528896540403366, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0117, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 25.936507936507937, | |
| "grad_norm": 0.14314262568950653, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0115, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 25.96825396825397, | |
| "grad_norm": 0.11188101023435593, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0127, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "grad_norm": 0.13183920085430145, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0131, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 26.03174603174603, | |
| "grad_norm": 0.1485409140586853, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0103, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 26.063492063492063, | |
| "grad_norm": 0.12506891787052155, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0089, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 26.095238095238095, | |
| "grad_norm": 0.1466640681028366, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0166, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 26.126984126984127, | |
| "grad_norm": 0.16889923810958862, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0132, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 26.158730158730158, | |
| "grad_norm": 0.14313863217830658, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0086, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 26.19047619047619, | |
| "grad_norm": 0.15344250202178955, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0089, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 26.22222222222222, | |
| "grad_norm": 0.1292755901813507, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0118, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 26.253968253968253, | |
| "grad_norm": 0.17923623323440552, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0126, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 26.285714285714285, | |
| "grad_norm": 0.13148808479309082, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0134, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 26.317460317460316, | |
| "grad_norm": 0.1630675345659256, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0122, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 26.349206349206348, | |
| "grad_norm": 0.10065110772848129, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0103, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 26.38095238095238, | |
| "grad_norm": 0.1927996575832367, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0113, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 26.41269841269841, | |
| "grad_norm": 0.17922064661979675, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0111, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 26.444444444444443, | |
| "grad_norm": 0.17887645959854126, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0138, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 26.476190476190474, | |
| "grad_norm": 0.17544226348400116, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0105, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 26.507936507936506, | |
| "grad_norm": 0.16020667552947998, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0123, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 26.53968253968254, | |
| "grad_norm": 0.20573902130126953, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0109, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 26.571428571428573, | |
| "grad_norm": 0.11933104693889618, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0093, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 26.603174603174605, | |
| "grad_norm": 0.2626897096633911, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0121, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 26.634920634920636, | |
| "grad_norm": 0.15621748566627502, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0104, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 26.666666666666668, | |
| "grad_norm": 0.1559997946023941, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0123, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 26.6984126984127, | |
| "grad_norm": 0.07641695439815521, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0076, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 26.73015873015873, | |
| "grad_norm": 0.1421450525522232, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0101, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 26.761904761904763, | |
| "grad_norm": 0.21398329734802246, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0121, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 26.793650793650794, | |
| "grad_norm": 0.2561212182044983, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0126, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 26.825396825396826, | |
| "grad_norm": 0.14962054789066315, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0095, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 26.857142857142858, | |
| "grad_norm": 0.1670444905757904, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0084, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 26.88888888888889, | |
| "grad_norm": 0.125416100025177, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.011, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 26.92063492063492, | |
| "grad_norm": 0.18001534044742584, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.009, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 26.952380952380953, | |
| "grad_norm": 0.1309777945280075, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0172, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 26.984126984126984, | |
| "grad_norm": 0.0824943259358406, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0073, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 27.015873015873016, | |
| "grad_norm": 0.1012212410569191, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0098, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 27.047619047619047, | |
| "grad_norm": 0.11767841130495071, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0104, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 27.07936507936508, | |
| "grad_norm": 0.14742644131183624, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0089, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 27.11111111111111, | |
| "grad_norm": 0.11481393128633499, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0091, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 27.142857142857142, | |
| "grad_norm": 0.11379402875900269, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0098, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 27.174603174603174, | |
| "grad_norm": 0.34592297673225403, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0106, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 27.206349206349206, | |
| "grad_norm": 0.12513747811317444, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0093, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 27.238095238095237, | |
| "grad_norm": 0.15107190608978271, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0113, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 27.26984126984127, | |
| "grad_norm": 0.15692481398582458, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0101, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 27.3015873015873, | |
| "grad_norm": 0.20025835931301117, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0113, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 27.333333333333332, | |
| "grad_norm": 0.2409658133983612, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0098, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 27.365079365079364, | |
| "grad_norm": 0.1612861305475235, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0127, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 27.396825396825395, | |
| "grad_norm": 0.17995613813400269, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0095, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 27.428571428571427, | |
| "grad_norm": 0.13630923628807068, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0098, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 27.46031746031746, | |
| "grad_norm": 0.2504119575023651, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0088, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 27.49206349206349, | |
| "grad_norm": 0.1808883100748062, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0143, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 27.523809523809526, | |
| "grad_norm": 0.20890317857265472, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0128, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 27.555555555555557, | |
| "grad_norm": 0.2162272334098816, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0115, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 27.58730158730159, | |
| "grad_norm": 0.20065751671791077, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0144, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 27.61904761904762, | |
| "grad_norm": 0.11608576774597168, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.014, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 27.650793650793652, | |
| "grad_norm": 0.22321242094039917, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0081, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 27.682539682539684, | |
| "grad_norm": 0.2272641807794571, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0078, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 27.714285714285715, | |
| "grad_norm": 0.19703130424022675, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0094, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 27.746031746031747, | |
| "grad_norm": 0.105436772108078, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0077, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 27.77777777777778, | |
| "grad_norm": 0.1297864466905594, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0104, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 27.80952380952381, | |
| "grad_norm": 0.18192586302757263, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0104, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 27.841269841269842, | |
| "grad_norm": 0.20479623973369598, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0108, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 27.873015873015873, | |
| "grad_norm": 0.11318498849868774, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0086, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 27.904761904761905, | |
| "grad_norm": 0.10507036000490189, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0156, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 27.936507936507937, | |
| "grad_norm": 0.10223344713449478, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0086, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 27.96825396825397, | |
| "grad_norm": 0.27070915699005127, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0139, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "grad_norm": 0.20015652477741241, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0092, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 28.03174603174603, | |
| "grad_norm": 0.1808873414993286, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.012, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 28.063492063492063, | |
| "grad_norm": 0.09823811054229736, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0095, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 28.095238095238095, | |
| "grad_norm": 0.1830408275127411, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0108, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 28.126984126984127, | |
| "grad_norm": 0.14822478592395782, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0134, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 28.158730158730158, | |
| "grad_norm": 0.09325245767831802, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0105, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 28.19047619047619, | |
| "grad_norm": 0.1295262724161148, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0082, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 28.22222222222222, | |
| "grad_norm": 0.19573268294334412, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0111, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 28.253968253968253, | |
| "grad_norm": 0.23062677681446075, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.009, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 28.285714285714285, | |
| "grad_norm": 0.12176034599542618, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0115, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 28.317460317460316, | |
| "grad_norm": 0.10691680759191513, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0093, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 28.349206349206348, | |
| "grad_norm": 0.21592320501804352, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0084, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 28.38095238095238, | |
| "grad_norm": 0.1572159379720688, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0099, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 28.41269841269841, | |
| "grad_norm": 0.10742610692977905, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0113, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 28.444444444444443, | |
| "grad_norm": 0.09738875180482864, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0111, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 28.476190476190474, | |
| "grad_norm": 0.08133155852556229, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0088, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 28.507936507936506, | |
| "grad_norm": 0.2007368505001068, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0134, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 28.53968253968254, | |
| "grad_norm": 0.5349880456924438, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0124, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 28.571428571428573, | |
| "grad_norm": 0.1965344250202179, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0109, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 28.603174603174605, | |
| "grad_norm": 0.14040406048297882, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.011, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 28.634920634920636, | |
| "grad_norm": 0.09418956935405731, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0087, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 28.666666666666668, | |
| "grad_norm": 0.09493319690227509, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0103, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 28.6984126984127, | |
| "grad_norm": 0.11004871875047684, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0085, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 28.73015873015873, | |
| "grad_norm": 0.2629684507846832, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0102, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 28.761904761904763, | |
| "grad_norm": 0.13201604783535004, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.011, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 28.793650793650794, | |
| "grad_norm": 0.18456681072711945, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0082, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 28.825396825396826, | |
| "grad_norm": 0.06795123964548111, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0116, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 28.857142857142858, | |
| "grad_norm": 0.14543649554252625, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0107, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 28.88888888888889, | |
| "grad_norm": 0.1316184103488922, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0095, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 28.92063492063492, | |
| "grad_norm": 0.1982174515724182, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0115, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 28.952380952380953, | |
| "grad_norm": 0.09919308871030807, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0094, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 28.984126984126984, | |
| "grad_norm": 0.1608496904373169, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0085, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 29.015873015873016, | |
| "grad_norm": 0.20179031789302826, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0073, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 29.047619047619047, | |
| "grad_norm": 0.21945777535438538, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0109, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 29.07936507936508, | |
| "grad_norm": 0.17477405071258545, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0097, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 29.11111111111111, | |
| "grad_norm": 0.36155858635902405, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0092, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 29.142857142857142, | |
| "grad_norm": 0.14377503097057343, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0116, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 29.174603174603174, | |
| "grad_norm": 0.15818120539188385, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0118, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 29.206349206349206, | |
| "grad_norm": 0.16062887012958527, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0103, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 29.238095238095237, | |
| "grad_norm": 0.1281094253063202, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0087, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 29.26984126984127, | |
| "grad_norm": 0.10502026230096817, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0096, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 29.3015873015873, | |
| "grad_norm": 0.18048419058322906, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0105, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 29.333333333333332, | |
| "grad_norm": 0.05719524249434471, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0096, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 29.365079365079364, | |
| "grad_norm": 0.10882283747196198, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0115, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 29.396825396825395, | |
| "grad_norm": 0.4864766001701355, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.01, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 29.428571428571427, | |
| "grad_norm": 0.15647682547569275, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0123, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 29.46031746031746, | |
| "grad_norm": 0.3206740617752075, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0087, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 29.49206349206349, | |
| "grad_norm": 0.13462114334106445, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.008, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 29.523809523809526, | |
| "grad_norm": 0.2172459214925766, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0126, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 29.555555555555557, | |
| "grad_norm": 0.19979844987392426, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0093, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 29.58730158730159, | |
| "grad_norm": 0.2551228702068329, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0082, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 29.61904761904762, | |
| "grad_norm": 0.10434945672750473, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0117, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 29.650793650793652, | |
| "grad_norm": 0.7930538058280945, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.012, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 29.682539682539684, | |
| "grad_norm": 0.08883222192525864, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0116, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 29.714285714285715, | |
| "grad_norm": 0.07275433838367462, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0089, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 29.746031746031747, | |
| "grad_norm": 0.09541443735361099, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0141, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 29.77777777777778, | |
| "grad_norm": 0.15877476334571838, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0096, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 29.80952380952381, | |
| "grad_norm": 0.1528717428445816, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.012, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 29.841269841269842, | |
| "grad_norm": 0.179233580827713, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0102, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 29.873015873015873, | |
| "grad_norm": 0.1265888810157776, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0088, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 29.904761904761905, | |
| "grad_norm": 0.10432500392198563, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0099, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 29.936507936507937, | |
| "grad_norm": 0.10362035781145096, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0095, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 29.96825396825397, | |
| "grad_norm": 0.11423126608133316, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0079, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.149443581700325, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.014, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 30.03174603174603, | |
| "grad_norm": 0.08560554683208466, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0107, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 30.063492063492063, | |
| "grad_norm": 0.13873149454593658, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0097, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 30.095238095238095, | |
| "grad_norm": 0.42764797806739807, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0118, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 30.126984126984127, | |
| "grad_norm": 0.22315607964992523, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0112, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 30.158730158730158, | |
| "grad_norm": 0.1172967180609703, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0103, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 30.19047619047619, | |
| "grad_norm": 0.2620256841182709, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0129, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 30.22222222222222, | |
| "grad_norm": 0.08000964671373367, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0088, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 30.253968253968253, | |
| "grad_norm": 0.1810327172279358, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0098, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 30.285714285714285, | |
| "grad_norm": 0.09979384392499924, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0106, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 30.317460317460316, | |
| "grad_norm": 0.18946896493434906, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0111, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 30.349206349206348, | |
| "grad_norm": 0.15754221379756927, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0085, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 30.38095238095238, | |
| "grad_norm": 0.22235900163650513, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.012, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 30.41269841269841, | |
| "grad_norm": 0.10214060544967651, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0115, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 30.444444444444443, | |
| "grad_norm": 0.14109273254871368, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0134, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 30.476190476190474, | |
| "grad_norm": 0.14715370535850525, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0086, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 30.507936507936506, | |
| "grad_norm": 0.13609695434570312, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.012, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 30.53968253968254, | |
| "grad_norm": 0.12625136971473694, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0109, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 30.571428571428573, | |
| "grad_norm": 0.11594970524311066, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0158, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 30.603174603174605, | |
| "grad_norm": 0.13690538704395294, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0098, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 30.634920634920636, | |
| "grad_norm": 0.14713014662265778, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0107, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 30.666666666666668, | |
| "grad_norm": 0.3160416781902313, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0101, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 30.6984126984127, | |
| "grad_norm": 0.2348206341266632, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0093, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 30.73015873015873, | |
| "grad_norm": 0.10115974396467209, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0124, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 30.761904761904763, | |
| "grad_norm": 0.14889442920684814, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0095, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 30.793650793650794, | |
| "grad_norm": 0.14972299337387085, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.009, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 30.825396825396826, | |
| "grad_norm": 0.14338943362236023, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0094, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 30.857142857142858, | |
| "grad_norm": 0.1903071105480194, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0104, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 30.88888888888889, | |
| "grad_norm": 0.09225494414567947, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0105, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 30.92063492063492, | |
| "grad_norm": 0.17603430151939392, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.011, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 30.952380952380953, | |
| "grad_norm": 0.12169013917446136, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0085, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 30.984126984126984, | |
| "grad_norm": 0.18771693110466003, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0099, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 31.015873015873016, | |
| "grad_norm": 0.09715425968170166, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0075, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 31.047619047619047, | |
| "grad_norm": 0.1405325084924698, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0088, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 31.07936507936508, | |
| "grad_norm": 0.1117044985294342, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0092, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 31.11111111111111, | |
| "grad_norm": 0.17140407860279083, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0109, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 31.142857142857142, | |
| "grad_norm": 0.24908088147640228, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0084, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 31.174603174603174, | |
| "grad_norm": 0.13977456092834473, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0088, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 31.206349206349206, | |
| "grad_norm": 0.09884601086378098, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0131, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 31.238095238095237, | |
| "grad_norm": 0.2549228072166443, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0105, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 31.26984126984127, | |
| "grad_norm": 0.313724547624588, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0105, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 31.3015873015873, | |
| "grad_norm": 0.1450057327747345, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0131, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 31.333333333333332, | |
| "grad_norm": 0.1071973517537117, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0083, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 31.365079365079364, | |
| "grad_norm": 0.10313557088375092, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0112, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 31.396825396825395, | |
| "grad_norm": 0.14481806755065918, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0093, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 31.428571428571427, | |
| "grad_norm": 0.3073570132255554, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0133, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 31.46031746031746, | |
| "grad_norm": 0.24824009835720062, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0103, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 31.49206349206349, | |
| "grad_norm": 0.1142677590250969, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0141, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 31.523809523809526, | |
| "grad_norm": 0.16389498114585876, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0099, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 31.555555555555557, | |
| "grad_norm": 0.1821771115064621, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0101, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 31.58730158730159, | |
| "grad_norm": 0.20085732638835907, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0113, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 31.61904761904762, | |
| "grad_norm": 0.13289542496204376, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0119, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 31.650793650793652, | |
| "grad_norm": 0.08672798424959183, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0075, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 31.682539682539684, | |
| "grad_norm": 0.1280519664287567, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.011, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 31.714285714285715, | |
| "grad_norm": 0.1598587930202484, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0104, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 31.746031746031747, | |
| "grad_norm": 0.2141278237104416, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.013, | |
| "step": 10000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 32, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |