| { | |
| "best_global_step": 2000, | |
| "best_metric": 22.43816254416961, | |
| "best_model_checkpoint": "./SALAMA_NEWMEDTMUB/checkpoint-2000", | |
| "epoch": 2.0, | |
| "eval_steps": 1000, | |
| "global_step": 2736, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007312614259597806, | |
| "grad_norm": 23.786584854125977, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.338, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.014625228519195612, | |
| "grad_norm": 25.875110626220703, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.4003, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.021937842778793418, | |
| "grad_norm": 20.586557388305664, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.5275, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.029250457038391225, | |
| "grad_norm": 22.56281089782715, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.3006, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03656307129798903, | |
| "grad_norm": 28.27590560913086, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.2741, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.043875685557586835, | |
| "grad_norm": 15.167706489562988, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.4286, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.051188299817184646, | |
| "grad_norm": 17.841941833496094, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.3639, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.05850091407678245, | |
| "grad_norm": 1.7901560068130493, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.2633, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.06581352833638025, | |
| "grad_norm": 23.4942569732666, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.4025, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.07312614259597806, | |
| "grad_norm": 15.08558464050293, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.6375, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08043875685557587, | |
| "grad_norm": 8.504720687866211, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.2601, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.08775137111517367, | |
| "grad_norm": 11.795039176940918, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.4155, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.09506398537477148, | |
| "grad_norm": 4.088447570800781, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.3254, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.10237659963436929, | |
| "grad_norm": 9.273443222045898, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.6213, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.10968921389396709, | |
| "grad_norm": 10.197525978088379, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.2383, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1170018281535649, | |
| "grad_norm": 32.452735900878906, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.4707, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.12431444241316271, | |
| "grad_norm": 12.96947193145752, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.2176, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1316270566727605, | |
| "grad_norm": 7.767789840698242, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.2673, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.13893967093235832, | |
| "grad_norm": 13.503886222839355, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.4111, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.14625228519195613, | |
| "grad_norm": 12.849893569946289, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.3538, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.15356489945155394, | |
| "grad_norm": 20.808692932128906, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.3639, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.16087751371115175, | |
| "grad_norm": 6.0378007888793945, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.514, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.16819012797074953, | |
| "grad_norm": 10.26975154876709, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.3015, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.17550274223034734, | |
| "grad_norm": 17.779497146606445, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.2099, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.18281535648994515, | |
| "grad_norm": 10.017502784729004, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.1943, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.19012797074954296, | |
| "grad_norm": 23.421777725219727, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.2958, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.19744058500914077, | |
| "grad_norm": 11.93268871307373, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.3723, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.20475319926873858, | |
| "grad_norm": 5.4110612869262695, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.2661, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.21206581352833637, | |
| "grad_norm": 17.327363967895508, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.436, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.21937842778793418, | |
| "grad_norm": 6.25701379776001, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.2156, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.226691042047532, | |
| "grad_norm": 13.935582160949707, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.598, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2340036563071298, | |
| "grad_norm": 6.627614974975586, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.3433, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2413162705667276, | |
| "grad_norm": 6.431783676147461, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.1368, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.24862888482632542, | |
| "grad_norm": 23.54817771911621, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.2473, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.25594149908592323, | |
| "grad_norm": 8.785611152648926, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.2112, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.263254113345521, | |
| "grad_norm": 6.616166591644287, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.1278, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.27056672760511885, | |
| "grad_norm": 3.7033286094665527, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.3054, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.27787934186471663, | |
| "grad_norm": 13.525383949279785, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.4384, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2851919561243144, | |
| "grad_norm": 7.937963962554932, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0957, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.29250457038391225, | |
| "grad_norm": 16.69182777404785, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.2025, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.29981718464351004, | |
| "grad_norm": 8.825018882751465, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.3476, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3071297989031079, | |
| "grad_norm": 18.566797256469727, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.4193, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.31444241316270566, | |
| "grad_norm": 5.76287317276001, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.2035, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.3217550274223035, | |
| "grad_norm": 8.784618377685547, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.1575, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3290676416819013, | |
| "grad_norm": 16.30363655090332, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.471, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.33638025594149906, | |
| "grad_norm": 2.915956735610962, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.182, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3436928702010969, | |
| "grad_norm": 3.9583890438079834, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.1509, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3510054844606947, | |
| "grad_norm": 9.183399200439453, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.2354, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.3583180987202925, | |
| "grad_norm": 10.724503517150879, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.1361, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.3656307129798903, | |
| "grad_norm": 9.822811126708984, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.237, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.37294332723948814, | |
| "grad_norm": 15.446513175964355, | |
| "learning_rate": 9.95974955277281e-06, | |
| "loss": 0.6261, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.3802559414990859, | |
| "grad_norm": 5.505027770996094, | |
| "learning_rate": 9.915026833631485e-06, | |
| "loss": 0.3904, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3875685557586837, | |
| "grad_norm": 7.490746021270752, | |
| "learning_rate": 9.870304114490162e-06, | |
| "loss": 0.3183, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.39488117001828155, | |
| "grad_norm": 6.62838888168335, | |
| "learning_rate": 9.825581395348838e-06, | |
| "loss": 0.3496, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.40219378427787933, | |
| "grad_norm": 17.81728744506836, | |
| "learning_rate": 9.780858676207515e-06, | |
| "loss": 0.3502, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.40950639853747717, | |
| "grad_norm": 9.13936710357666, | |
| "learning_rate": 9.73613595706619e-06, | |
| "loss": 0.5141, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.41681901279707495, | |
| "grad_norm": 3.2332348823547363, | |
| "learning_rate": 9.691413237924867e-06, | |
| "loss": 0.2679, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.42413162705667273, | |
| "grad_norm": 8.26561450958252, | |
| "learning_rate": 9.646690518783543e-06, | |
| "loss": 0.3905, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.43144424131627057, | |
| "grad_norm": 6.408196449279785, | |
| "learning_rate": 9.601967799642218e-06, | |
| "loss": 0.2694, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.43875685557586835, | |
| "grad_norm": 12.754035949707031, | |
| "learning_rate": 9.557245080500895e-06, | |
| "loss": 0.4731, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.4460694698354662, | |
| "grad_norm": 0.6065673232078552, | |
| "learning_rate": 9.512522361359572e-06, | |
| "loss": 0.2143, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.453382084095064, | |
| "grad_norm": 4.9976582527160645, | |
| "learning_rate": 9.467799642218248e-06, | |
| "loss": 0.4713, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.4606946983546618, | |
| "grad_norm": 6.202505588531494, | |
| "learning_rate": 9.423076923076923e-06, | |
| "loss": 0.1564, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.4680073126142596, | |
| "grad_norm": 49.993919372558594, | |
| "learning_rate": 9.3783542039356e-06, | |
| "loss": 0.1725, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.4753199268738574, | |
| "grad_norm": 6.155549049377441, | |
| "learning_rate": 9.333631484794277e-06, | |
| "loss": 0.19, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.4826325411334552, | |
| "grad_norm": 10.210041046142578, | |
| "learning_rate": 9.288908765652953e-06, | |
| "loss": 0.1782, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.489945155393053, | |
| "grad_norm": 3.6081888675689697, | |
| "learning_rate": 9.244186046511628e-06, | |
| "loss": 0.337, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.49725776965265084, | |
| "grad_norm": 12.385890007019043, | |
| "learning_rate": 9.199463327370305e-06, | |
| "loss": 0.2003, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.5045703839122486, | |
| "grad_norm": 7.23034143447876, | |
| "learning_rate": 9.15474060822898e-06, | |
| "loss": 0.1712, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.5118829981718465, | |
| "grad_norm": 4.523240566253662, | |
| "learning_rate": 9.110017889087658e-06, | |
| "loss": 0.3199, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.5191956124314442, | |
| "grad_norm": 10.353597640991211, | |
| "learning_rate": 9.065295169946333e-06, | |
| "loss": 0.3007, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.526508226691042, | |
| "grad_norm": 5.710986137390137, | |
| "learning_rate": 9.02057245080501e-06, | |
| "loss": 0.148, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.5338208409506399, | |
| "grad_norm": 8.321440696716309, | |
| "learning_rate": 8.975849731663686e-06, | |
| "loss": 0.3764, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.5411334552102377, | |
| "grad_norm": 6.329477787017822, | |
| "learning_rate": 8.931127012522363e-06, | |
| "loss": 0.1774, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.5484460694698354, | |
| "grad_norm": 14.216864585876465, | |
| "learning_rate": 8.886404293381038e-06, | |
| "loss": 0.2678, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.5557586837294333, | |
| "grad_norm": 6.65015172958374, | |
| "learning_rate": 8.841681574239714e-06, | |
| "loss": 0.2138, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.5630712979890311, | |
| "grad_norm": 17.47846221923828, | |
| "learning_rate": 8.79695885509839e-06, | |
| "loss": 0.2223, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.5703839122486288, | |
| "grad_norm": 11.677774429321289, | |
| "learning_rate": 8.752236135957068e-06, | |
| "loss": 0.332, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.5776965265082267, | |
| "grad_norm": 12.348886489868164, | |
| "learning_rate": 8.707513416815743e-06, | |
| "loss": 0.2077, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.5850091407678245, | |
| "grad_norm": 9.304800033569336, | |
| "learning_rate": 8.662790697674419e-06, | |
| "loss": 0.1048, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.5923217550274223, | |
| "grad_norm": 6.786717414855957, | |
| "learning_rate": 8.618067978533096e-06, | |
| "loss": 0.2946, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.5996343692870201, | |
| "grad_norm": 7.69470739364624, | |
| "learning_rate": 8.573345259391773e-06, | |
| "loss": 0.2642, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.6069469835466179, | |
| "grad_norm": 4.253036975860596, | |
| "learning_rate": 8.528622540250448e-06, | |
| "loss": 0.3286, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.6142595978062158, | |
| "grad_norm": 8.26134967803955, | |
| "learning_rate": 8.483899821109124e-06, | |
| "loss": 0.0926, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.6215722120658135, | |
| "grad_norm": 14.105083465576172, | |
| "learning_rate": 8.4391771019678e-06, | |
| "loss": 0.2227, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.6288848263254113, | |
| "grad_norm": 9.787266731262207, | |
| "learning_rate": 8.394454382826476e-06, | |
| "loss": 0.2463, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.6361974405850092, | |
| "grad_norm": 7.089901447296143, | |
| "learning_rate": 8.349731663685151e-06, | |
| "loss": 0.4785, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.643510054844607, | |
| "grad_norm": 8.751269340515137, | |
| "learning_rate": 8.305008944543829e-06, | |
| "loss": 0.4554, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.6508226691042047, | |
| "grad_norm": 12.375998497009277, | |
| "learning_rate": 8.260286225402506e-06, | |
| "loss": 0.1926, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.6581352833638026, | |
| "grad_norm": 2.7685928344726562, | |
| "learning_rate": 8.215563506261181e-06, | |
| "loss": 0.3606, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.6654478976234004, | |
| "grad_norm": 1.846977949142456, | |
| "learning_rate": 8.170840787119858e-06, | |
| "loss": 0.1449, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.6727605118829981, | |
| "grad_norm": 6.678775310516357, | |
| "learning_rate": 8.126118067978534e-06, | |
| "loss": 0.1799, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.680073126142596, | |
| "grad_norm": 5.570348262786865, | |
| "learning_rate": 8.08139534883721e-06, | |
| "loss": 0.2333, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.6873857404021938, | |
| "grad_norm": 7.157234191894531, | |
| "learning_rate": 8.036672629695886e-06, | |
| "loss": 0.2279, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.6946983546617916, | |
| "grad_norm": 5.400512218475342, | |
| "learning_rate": 7.991949910554563e-06, | |
| "loss": 0.1583, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.7020109689213894, | |
| "grad_norm": 11.138449668884277, | |
| "learning_rate": 7.947227191413239e-06, | |
| "loss": 0.3318, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.7093235831809872, | |
| "grad_norm": 6.359451770782471, | |
| "learning_rate": 7.902504472271914e-06, | |
| "loss": 0.2533, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.716636197440585, | |
| "grad_norm": 5.201321601867676, | |
| "learning_rate": 7.857781753130591e-06, | |
| "loss": 0.2389, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.7239488117001828, | |
| "grad_norm": 9.445417404174805, | |
| "learning_rate": 7.813059033989268e-06, | |
| "loss": 0.32, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.7312614259597806, | |
| "grad_norm": 6.709265232086182, | |
| "learning_rate": 7.768336314847944e-06, | |
| "loss": 0.4202, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.7312614259597806, | |
| "eval_loss": 0.23008325695991516, | |
| "eval_runtime": 17.9053, | |
| "eval_samples_per_second": 3.965, | |
| "eval_steps_per_second": 3.965, | |
| "eval_wer": 22.084805653710244, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.7385740402193784, | |
| "grad_norm": 2.7625207901000977, | |
| "learning_rate": 7.723613595706619e-06, | |
| "loss": 0.3776, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.7458866544789763, | |
| "grad_norm": 11.14816665649414, | |
| "learning_rate": 7.678890876565296e-06, | |
| "loss": 0.2826, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.753199268738574, | |
| "grad_norm": 13.433893203735352, | |
| "learning_rate": 7.634168157423973e-06, | |
| "loss": 0.2046, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.7605118829981719, | |
| "grad_norm": 4.902495384216309, | |
| "learning_rate": 7.589445438282648e-06, | |
| "loss": 0.1896, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.7678244972577697, | |
| "grad_norm": 4.326854705810547, | |
| "learning_rate": 7.544722719141324e-06, | |
| "loss": 0.3618, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.7751371115173674, | |
| "grad_norm": 2.749929189682007, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.1338, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.7824497257769653, | |
| "grad_norm": 5.580015182495117, | |
| "learning_rate": 7.455277280858677e-06, | |
| "loss": 0.1232, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.7897623400365631, | |
| "grad_norm": 10.275532722473145, | |
| "learning_rate": 7.4105545617173535e-06, | |
| "loss": 0.4429, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.7970749542961609, | |
| "grad_norm": 12.147696495056152, | |
| "learning_rate": 7.365831842576029e-06, | |
| "loss": 0.2521, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.8043875685557587, | |
| "grad_norm": 5.371063232421875, | |
| "learning_rate": 7.321109123434705e-06, | |
| "loss": 0.1444, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.8117001828153565, | |
| "grad_norm": 13.506457328796387, | |
| "learning_rate": 7.2763864042933814e-06, | |
| "loss": 0.1967, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.8190127970749543, | |
| "grad_norm": 2.674086332321167, | |
| "learning_rate": 7.2316636851520585e-06, | |
| "loss": 0.144, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.8263254113345521, | |
| "grad_norm": 13.818032264709473, | |
| "learning_rate": 7.186940966010734e-06, | |
| "loss": 0.2831, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.8336380255941499, | |
| "grad_norm": 5.350680828094482, | |
| "learning_rate": 7.14221824686941e-06, | |
| "loss": 0.0879, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.8409506398537477, | |
| "grad_norm": 13.155516624450684, | |
| "learning_rate": 7.0974955277280864e-06, | |
| "loss": 0.1717, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.8482632541133455, | |
| "grad_norm": 11.464470863342285, | |
| "learning_rate": 7.052772808586763e-06, | |
| "loss": 0.2765, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.8555758683729433, | |
| "grad_norm": 14.078434944152832, | |
| "learning_rate": 7.008050089445438e-06, | |
| "loss": 0.1453, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.8628884826325411, | |
| "grad_norm": 3.5456151962280273, | |
| "learning_rate": 6.963327370304115e-06, | |
| "loss": 0.2314, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.870201096892139, | |
| "grad_norm": 7.183000564575195, | |
| "learning_rate": 6.9186046511627914e-06, | |
| "loss": 0.1827, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.8775137111517367, | |
| "grad_norm": 8.16065502166748, | |
| "learning_rate": 6.873881932021468e-06, | |
| "loss": 0.287, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.8848263254113345, | |
| "grad_norm": 7.625172138214111, | |
| "learning_rate": 6.829159212880143e-06, | |
| "loss": 0.2019, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.8921389396709324, | |
| "grad_norm": 3.9447920322418213, | |
| "learning_rate": 6.784436493738819e-06, | |
| "loss": 0.2434, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.8994515539305301, | |
| "grad_norm": 0.8182668089866638, | |
| "learning_rate": 6.7397137745974964e-06, | |
| "loss": 0.1645, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.906764168190128, | |
| "grad_norm": 11.82470989227295, | |
| "learning_rate": 6.694991055456173e-06, | |
| "loss": 0.2626, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.9140767824497258, | |
| "grad_norm": 8.1321439743042, | |
| "learning_rate": 6.650268336314849e-06, | |
| "loss": 0.2986, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.9213893967093236, | |
| "grad_norm": 21.518051147460938, | |
| "learning_rate": 6.605545617173524e-06, | |
| "loss": 0.2382, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.9287020109689214, | |
| "grad_norm": 29.53682518005371, | |
| "learning_rate": 6.560822898032201e-06, | |
| "loss": 0.1258, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.9360146252285192, | |
| "grad_norm": 3.9154770374298096, | |
| "learning_rate": 6.516100178890878e-06, | |
| "loss": 0.3329, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.943327239488117, | |
| "grad_norm": 3.4175925254821777, | |
| "learning_rate": 6.471377459749554e-06, | |
| "loss": 0.3287, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.9506398537477148, | |
| "grad_norm": 3.512779951095581, | |
| "learning_rate": 6.426654740608229e-06, | |
| "loss": 0.3368, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.9579524680073126, | |
| "grad_norm": 2.376668930053711, | |
| "learning_rate": 6.3819320214669056e-06, | |
| "loss": 0.1606, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.9652650822669104, | |
| "grad_norm": 7.044156074523926, | |
| "learning_rate": 6.337209302325582e-06, | |
| "loss": 0.502, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.9725776965265083, | |
| "grad_norm": 5.662295818328857, | |
| "learning_rate": 6.292486583184259e-06, | |
| "loss": 0.2306, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.979890310786106, | |
| "grad_norm": 15.784625053405762, | |
| "learning_rate": 6.2477638640429335e-06, | |
| "loss": 0.278, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.9872029250457038, | |
| "grad_norm": 14.018045425415039, | |
| "learning_rate": 6.2030411449016106e-06, | |
| "loss": 0.1991, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.9945155393053017, | |
| "grad_norm": 7.3681488037109375, | |
| "learning_rate": 6.158318425760287e-06, | |
| "loss": 0.1634, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.0014625228519196, | |
| "grad_norm": 2.642103672027588, | |
| "learning_rate": 6.113595706618963e-06, | |
| "loss": 0.1823, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.0087751371115175, | |
| "grad_norm": 2.048372507095337, | |
| "learning_rate": 6.0688729874776385e-06, | |
| "loss": 0.0727, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.016087751371115, | |
| "grad_norm": 5.748651027679443, | |
| "learning_rate": 6.024150268336315e-06, | |
| "loss": 0.1945, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.023400365630713, | |
| "grad_norm": 3.144056797027588, | |
| "learning_rate": 5.979427549194992e-06, | |
| "loss": 0.2842, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.0307129798903107, | |
| "grad_norm": 13.262757301330566, | |
| "learning_rate": 5.934704830053668e-06, | |
| "loss": 0.1306, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.0380255941499086, | |
| "grad_norm": 2.9336941242218018, | |
| "learning_rate": 5.8899821109123435e-06, | |
| "loss": 0.2099, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.0453382084095064, | |
| "grad_norm": 1.8705564737319946, | |
| "learning_rate": 5.84525939177102e-06, | |
| "loss": 0.1409, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.0526508226691043, | |
| "grad_norm": 1.1322466135025024, | |
| "learning_rate": 5.800536672629696e-06, | |
| "loss": 0.0737, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.059963436928702, | |
| "grad_norm": 1.922173261642456, | |
| "learning_rate": 5.755813953488373e-06, | |
| "loss": 0.0951, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.0672760511883, | |
| "grad_norm": 2.0296812057495117, | |
| "learning_rate": 5.711091234347049e-06, | |
| "loss": 0.1544, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.0745886654478976, | |
| "grad_norm": 5.363153457641602, | |
| "learning_rate": 5.666368515205725e-06, | |
| "loss": 0.1187, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.0819012797074954, | |
| "grad_norm": 14.269627571105957, | |
| "learning_rate": 5.621645796064401e-06, | |
| "loss": 0.1259, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.0892138939670932, | |
| "grad_norm": 5.864461421966553, | |
| "learning_rate": 5.576923076923077e-06, | |
| "loss": 0.1691, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.096526508226691, | |
| "grad_norm": 11.137020111083984, | |
| "learning_rate": 5.532200357781754e-06, | |
| "loss": 0.13, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.103839122486289, | |
| "grad_norm": 9.656966209411621, | |
| "learning_rate": 5.48747763864043e-06, | |
| "loss": 0.2333, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.1111517367458867, | |
| "grad_norm": 5.965723037719727, | |
| "learning_rate": 5.442754919499106e-06, | |
| "loss": 0.0848, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.1184643510054844, | |
| "grad_norm": 1.1735326051712036, | |
| "learning_rate": 5.398032200357782e-06, | |
| "loss": 0.1167, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.1257769652650822, | |
| "grad_norm": 15.251590728759766, | |
| "learning_rate": 5.3533094812164585e-06, | |
| "loss": 0.1668, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.13308957952468, | |
| "grad_norm": 3.4887843132019043, | |
| "learning_rate": 5.308586762075134e-06, | |
| "loss": 0.2222, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.1404021937842779, | |
| "grad_norm": 4.675200462341309, | |
| "learning_rate": 5.263864042933811e-06, | |
| "loss": 0.0891, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.1477148080438757, | |
| "grad_norm": 12.091926574707031, | |
| "learning_rate": 5.219141323792487e-06, | |
| "loss": 0.1041, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.1550274223034735, | |
| "grad_norm": 12.529617309570312, | |
| "learning_rate": 5.1744186046511635e-06, | |
| "loss": 0.1527, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.1623400365630714, | |
| "grad_norm": 1.1709810495376587, | |
| "learning_rate": 5.129695885509839e-06, | |
| "loss": 0.2526, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.1696526508226692, | |
| "grad_norm": 14.441437721252441, | |
| "learning_rate": 5.084973166368515e-06, | |
| "loss": 0.1384, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.1769652650822668, | |
| "grad_norm": 4.227105617523193, | |
| "learning_rate": 5.040250447227192e-06, | |
| "loss": 0.1158, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.1842778793418647, | |
| "grad_norm": 3.5501041412353516, | |
| "learning_rate": 4.9955277280858685e-06, | |
| "loss": 0.122, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.1915904936014625, | |
| "grad_norm": 2.6126749515533447, | |
| "learning_rate": 4.950805008944544e-06, | |
| "loss": 0.1775, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.1989031078610604, | |
| "grad_norm": 9.163483619689941, | |
| "learning_rate": 4.906082289803221e-06, | |
| "loss": 0.2453, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.2062157221206582, | |
| "grad_norm": 14.076329231262207, | |
| "learning_rate": 4.861359570661896e-06, | |
| "loss": 0.1924, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.213528336380256, | |
| "grad_norm": 2.836543321609497, | |
| "learning_rate": 4.8166368515205735e-06, | |
| "loss": 0.0942, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.2208409506398539, | |
| "grad_norm": 2.6679649353027344, | |
| "learning_rate": 4.771914132379249e-06, | |
| "loss": 0.1114, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.2281535648994515, | |
| "grad_norm": 0.7027280330657959, | |
| "learning_rate": 4.727191413237925e-06, | |
| "loss": 0.1682, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.2354661791590493, | |
| "grad_norm": 3.3769328594207764, | |
| "learning_rate": 4.682468694096601e-06, | |
| "loss": 0.0713, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.2427787934186472, | |
| "grad_norm": 3.878709077835083, | |
| "learning_rate": 4.637745974955278e-06, | |
| "loss": 0.1036, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.250091407678245, | |
| "grad_norm": 2.7310967445373535, | |
| "learning_rate": 4.593023255813954e-06, | |
| "loss": 0.1446, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.2574040219378428, | |
| "grad_norm": 5.925379753112793, | |
| "learning_rate": 4.54830053667263e-06, | |
| "loss": 0.0571, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.2647166361974405, | |
| "grad_norm": 3.9630186557769775, | |
| "learning_rate": 4.503577817531306e-06, | |
| "loss": 0.1524, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.2720292504570385, | |
| "grad_norm": 6.056227684020996, | |
| "learning_rate": 4.458855098389983e-06, | |
| "loss": 0.0996, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.2793418647166361, | |
| "grad_norm": 6.299959659576416, | |
| "learning_rate": 4.414132379248659e-06, | |
| "loss": 0.0601, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.286654478976234, | |
| "grad_norm": 0.71943199634552, | |
| "learning_rate": 4.369409660107335e-06, | |
| "loss": 0.169, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.2939670932358318, | |
| "grad_norm": 6.55869197845459, | |
| "learning_rate": 4.3246869409660105e-06, | |
| "loss": 0.2137, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.3012797074954296, | |
| "grad_norm": 1.6254961490631104, | |
| "learning_rate": 4.279964221824688e-06, | |
| "loss": 0.1446, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.3085923217550275, | |
| "grad_norm": 2.675734519958496, | |
| "learning_rate": 4.235241502683364e-06, | |
| "loss": 0.1809, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.315904936014625, | |
| "grad_norm": 1.1206576824188232, | |
| "learning_rate": 4.190518783542039e-06, | |
| "loss": 0.0547, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.3232175502742232, | |
| "grad_norm": 5.133749961853027, | |
| "learning_rate": 4.145796064400716e-06, | |
| "loss": 0.0895, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.3305301645338208, | |
| "grad_norm": 18.341205596923828, | |
| "learning_rate": 4.101073345259392e-06, | |
| "loss": 0.1619, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.3378427787934186, | |
| "grad_norm": 5.34306526184082, | |
| "learning_rate": 4.056350626118069e-06, | |
| "loss": 0.1332, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.3451553930530165, | |
| "grad_norm": 4.900577068328857, | |
| "learning_rate": 4.011627906976744e-06, | |
| "loss": 0.0777, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.3524680073126143, | |
| "grad_norm": 6.939302444458008, | |
| "learning_rate": 3.9669051878354205e-06, | |
| "loss": 0.1676, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.3597806215722121, | |
| "grad_norm": 2.003343343734741, | |
| "learning_rate": 3.922182468694097e-06, | |
| "loss": 0.0713, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.3670932358318097, | |
| "grad_norm": 2.5260672569274902, | |
| "learning_rate": 3.877459749552773e-06, | |
| "loss": 0.3579, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.3744058500914078, | |
| "grad_norm": 2.5146102905273438, | |
| "learning_rate": 3.832737030411449e-06, | |
| "loss": 0.0984, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.3817184643510054, | |
| "grad_norm": 8.449647903442383, | |
| "learning_rate": 3.7880143112701255e-06, | |
| "loss": 0.1154, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.3890310786106033, | |
| "grad_norm": 3.773625373840332, | |
| "learning_rate": 3.7432915921288017e-06, | |
| "loss": 0.1564, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.396343692870201, | |
| "grad_norm": 19.32281494140625, | |
| "learning_rate": 3.698568872987478e-06, | |
| "loss": 0.1983, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.403656307129799, | |
| "grad_norm": 9.078587532043457, | |
| "learning_rate": 3.653846153846154e-06, | |
| "loss": 0.1644, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.4109689213893968, | |
| "grad_norm": 12.408085823059082, | |
| "learning_rate": 3.6091234347048305e-06, | |
| "loss": 0.1009, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.4182815356489944, | |
| "grad_norm": 2.738140821456909, | |
| "learning_rate": 3.5644007155635063e-06, | |
| "loss": 0.0611, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.4255941499085925, | |
| "grad_norm": 7.583118438720703, | |
| "learning_rate": 3.5196779964221826e-06, | |
| "loss": 0.2095, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.43290676416819, | |
| "grad_norm": 3.284457206726074, | |
| "learning_rate": 3.474955277280859e-06, | |
| "loss": 0.1686, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.440219378427788, | |
| "grad_norm": 2.968374252319336, | |
| "learning_rate": 3.430232558139535e-06, | |
| "loss": 0.0613, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.4475319926873857, | |
| "grad_norm": 5.8258748054504395, | |
| "learning_rate": 3.3855098389982117e-06, | |
| "loss": 0.1864, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.4548446069469836, | |
| "grad_norm": 0.748333215713501, | |
| "learning_rate": 3.3407871198568876e-06, | |
| "loss": 0.1203, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.4621572212065814, | |
| "grad_norm": 0.5647421479225159, | |
| "learning_rate": 3.296064400715564e-06, | |
| "loss": 0.0782, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.4621572212065814, | |
| "eval_loss": 0.2140577733516693, | |
| "eval_runtime": 17.6604, | |
| "eval_samples_per_second": 4.02, | |
| "eval_steps_per_second": 4.02, | |
| "eval_wer": 22.43816254416961, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.469469835466179, | |
| "grad_norm": 0.12333594262599945, | |
| "learning_rate": 3.25134168157424e-06, | |
| "loss": 0.0782, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.476782449725777, | |
| "grad_norm": 8.930018424987793, | |
| "learning_rate": 3.2066189624329163e-06, | |
| "loss": 0.1153, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.4840950639853747, | |
| "grad_norm": 2.1959986686706543, | |
| "learning_rate": 3.161896243291592e-06, | |
| "loss": 0.13, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.4914076782449726, | |
| "grad_norm": 4.376955986022949, | |
| "learning_rate": 3.117173524150269e-06, | |
| "loss": 0.1101, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.4987202925045704, | |
| "grad_norm": 6.39250373840332, | |
| "learning_rate": 3.0724508050089446e-06, | |
| "loss": 0.0615, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.5060329067641682, | |
| "grad_norm": 7.441036224365234, | |
| "learning_rate": 3.0277280858676213e-06, | |
| "loss": 0.1345, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.513345521023766, | |
| "grad_norm": 5.1939873695373535, | |
| "learning_rate": 2.983005366726297e-06, | |
| "loss": 0.1166, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.5206581352833637, | |
| "grad_norm": 2.4300341606140137, | |
| "learning_rate": 2.9382826475849734e-06, | |
| "loss": 0.0601, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.5279707495429617, | |
| "grad_norm": 5.050365447998047, | |
| "learning_rate": 2.893559928443649e-06, | |
| "loss": 0.1126, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.5352833638025594, | |
| "grad_norm": 2.16568922996521, | |
| "learning_rate": 2.848837209302326e-06, | |
| "loss": 0.1321, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.5425959780621572, | |
| "grad_norm": 3.987548351287842, | |
| "learning_rate": 2.8041144901610017e-06, | |
| "loss": 0.073, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.549908592321755, | |
| "grad_norm": 0.9321910738945007, | |
| "learning_rate": 2.7593917710196784e-06, | |
| "loss": 0.1069, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.5572212065813529, | |
| "grad_norm": 3.621582508087158, | |
| "learning_rate": 2.714669051878354e-06, | |
| "loss": 0.164, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.5645338208409507, | |
| "grad_norm": 11.509289741516113, | |
| "learning_rate": 2.6699463327370305e-06, | |
| "loss": 0.067, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.5718464351005483, | |
| "grad_norm": 1.7638990879058838, | |
| "learning_rate": 2.625223613595707e-06, | |
| "loss": 0.0718, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.5791590493601464, | |
| "grad_norm": 4.978612899780273, | |
| "learning_rate": 2.580500894454383e-06, | |
| "loss": 0.2316, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.586471663619744, | |
| "grad_norm": 1.147152066230774, | |
| "learning_rate": 2.5357781753130596e-06, | |
| "loss": 0.0602, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.5937842778793418, | |
| "grad_norm": 1.1619781255722046, | |
| "learning_rate": 2.4910554561717355e-06, | |
| "loss": 0.1155, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.6010968921389397, | |
| "grad_norm": 0.1027296707034111, | |
| "learning_rate": 2.4463327370304117e-06, | |
| "loss": 0.0433, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.6084095063985375, | |
| "grad_norm": 1.9408206939697266, | |
| "learning_rate": 2.401610017889088e-06, | |
| "loss": 0.0422, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.6157221206581354, | |
| "grad_norm": 0.9936367869377136, | |
| "learning_rate": 2.3568872987477638e-06, | |
| "loss": 0.0544, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.623034734917733, | |
| "grad_norm": 7.004252910614014, | |
| "learning_rate": 2.31216457960644e-06, | |
| "loss": 0.0555, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.630347349177331, | |
| "grad_norm": 1.4595509767532349, | |
| "learning_rate": 2.2674418604651163e-06, | |
| "loss": 0.1049, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.6376599634369287, | |
| "grad_norm": 7.460807800292969, | |
| "learning_rate": 2.222719141323793e-06, | |
| "loss": 0.1071, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.6449725776965265, | |
| "grad_norm": 12.255995750427246, | |
| "learning_rate": 2.1779964221824688e-06, | |
| "loss": 0.1497, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.6522851919561243, | |
| "grad_norm": 6.4094061851501465, | |
| "learning_rate": 2.133273703041145e-06, | |
| "loss": 0.1272, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.6595978062157222, | |
| "grad_norm": 4.806365966796875, | |
| "learning_rate": 2.0885509838998213e-06, | |
| "loss": 0.1782, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.66691042047532, | |
| "grad_norm": 4.42228364944458, | |
| "learning_rate": 2.0438282647584975e-06, | |
| "loss": 0.0519, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.6742230347349176, | |
| "grad_norm": 0.9680853486061096, | |
| "learning_rate": 1.9991055456171738e-06, | |
| "loss": 0.0476, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.6815356489945157, | |
| "grad_norm": 1.8787600994110107, | |
| "learning_rate": 1.95438282647585e-06, | |
| "loss": 0.1524, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.6888482632541133, | |
| "grad_norm": 4.833448886871338, | |
| "learning_rate": 1.9096601073345263e-06, | |
| "loss": 0.1424, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.6961608775137111, | |
| "grad_norm": 5.576896667480469, | |
| "learning_rate": 1.8649373881932023e-06, | |
| "loss": 0.1113, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.703473491773309, | |
| "grad_norm": 17.595869064331055, | |
| "learning_rate": 1.8202146690518786e-06, | |
| "loss": 0.1031, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.7107861060329068, | |
| "grad_norm": 0.32860955595970154, | |
| "learning_rate": 1.7754919499105546e-06, | |
| "loss": 0.0116, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.7180987202925047, | |
| "grad_norm": 4.98439359664917, | |
| "learning_rate": 1.7307692307692308e-06, | |
| "loss": 0.0929, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.7254113345521023, | |
| "grad_norm": 3.4554154872894287, | |
| "learning_rate": 1.686046511627907e-06, | |
| "loss": 0.0399, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.7327239488117003, | |
| "grad_norm": 0.08476200699806213, | |
| "learning_rate": 1.6413237924865833e-06, | |
| "loss": 0.0992, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.740036563071298, | |
| "grad_norm": 6.194643020629883, | |
| "learning_rate": 1.5966010733452594e-06, | |
| "loss": 0.0812, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.7473491773308958, | |
| "grad_norm": 0.058419618755578995, | |
| "learning_rate": 1.5518783542039356e-06, | |
| "loss": 0.0396, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.7546617915904936, | |
| "grad_norm": 5.278108596801758, | |
| "learning_rate": 1.5071556350626119e-06, | |
| "loss": 0.0756, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.7619744058500915, | |
| "grad_norm": 0.3693186938762665, | |
| "learning_rate": 1.462432915921288e-06, | |
| "loss": 0.0359, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.7692870201096893, | |
| "grad_norm": 0.27072983980178833, | |
| "learning_rate": 1.4177101967799644e-06, | |
| "loss": 0.0975, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.776599634369287, | |
| "grad_norm": 4.527318954467773, | |
| "learning_rate": 1.3729874776386406e-06, | |
| "loss": 0.1725, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.783912248628885, | |
| "grad_norm": 3.0763070583343506, | |
| "learning_rate": 1.3282647584973169e-06, | |
| "loss": 0.1058, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.7912248628884826, | |
| "grad_norm": 3.3491156101226807, | |
| "learning_rate": 1.2835420393559931e-06, | |
| "loss": 0.1198, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.7985374771480804, | |
| "grad_norm": 2.7176685333251953, | |
| "learning_rate": 1.2388193202146692e-06, | |
| "loss": 0.0547, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.8058500914076783, | |
| "grad_norm": 2.514491558074951, | |
| "learning_rate": 1.1940966010733454e-06, | |
| "loss": 0.1187, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.8131627056672759, | |
| "grad_norm": 0.08074276894330978, | |
| "learning_rate": 1.1493738819320217e-06, | |
| "loss": 0.0591, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.820475319926874, | |
| "grad_norm": 3.8862452507019043, | |
| "learning_rate": 1.1046511627906977e-06, | |
| "loss": 0.11, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.8277879341864716, | |
| "grad_norm": 5.0899224281311035, | |
| "learning_rate": 1.059928443649374e-06, | |
| "loss": 0.1136, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.8351005484460696, | |
| "grad_norm": 8.998924255371094, | |
| "learning_rate": 1.0152057245080502e-06, | |
| "loss": 0.1629, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.8424131627056672, | |
| "grad_norm": 4.142125129699707, | |
| "learning_rate": 9.704830053667264e-07, | |
| "loss": 0.0944, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.849725776965265, | |
| "grad_norm": 4.672084331512451, | |
| "learning_rate": 9.257602862254025e-07, | |
| "loss": 0.0667, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.857038391224863, | |
| "grad_norm": 12.830012321472168, | |
| "learning_rate": 8.810375670840787e-07, | |
| "loss": 0.0861, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.8643510054844605, | |
| "grad_norm": 5.533012390136719, | |
| "learning_rate": 8.363148479427551e-07, | |
| "loss": 0.2207, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.8716636197440586, | |
| "grad_norm": 15.992862701416016, | |
| "learning_rate": 7.915921288014312e-07, | |
| "loss": 0.2184, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.8789762340036562, | |
| "grad_norm": 4.1802754402160645, | |
| "learning_rate": 7.468694096601075e-07, | |
| "loss": 0.1086, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.8862888482632543, | |
| "grad_norm": 2.33207368850708, | |
| "learning_rate": 7.021466905187836e-07, | |
| "loss": 0.1922, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.8936014625228519, | |
| "grad_norm": 8.93466567993164, | |
| "learning_rate": 6.574239713774598e-07, | |
| "loss": 0.1005, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.9009140767824497, | |
| "grad_norm": 0.03259089216589928, | |
| "learning_rate": 6.12701252236136e-07, | |
| "loss": 0.0188, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.9082266910420476, | |
| "grad_norm": 0.019739491865038872, | |
| "learning_rate": 5.679785330948122e-07, | |
| "loss": 0.1194, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.9155393053016452, | |
| "grad_norm": 12.475129127502441, | |
| "learning_rate": 5.232558139534884e-07, | |
| "loss": 0.07, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.9228519195612432, | |
| "grad_norm": 6.5969743728637695, | |
| "learning_rate": 4.785330948121647e-07, | |
| "loss": 0.1474, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.9301645338208409, | |
| "grad_norm": 28.243986129760742, | |
| "learning_rate": 4.338103756708408e-07, | |
| "loss": 0.1784, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.937477148080439, | |
| "grad_norm": 5.500144958496094, | |
| "learning_rate": 3.89087656529517e-07, | |
| "loss": 0.0785, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.9447897623400365, | |
| "grad_norm": 2.747636079788208, | |
| "learning_rate": 3.443649373881932e-07, | |
| "loss": 0.106, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.9521023765996344, | |
| "grad_norm": 4.100680351257324, | |
| "learning_rate": 2.996422182468694e-07, | |
| "loss": 0.06, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.9594149908592322, | |
| "grad_norm": 0.7016245126724243, | |
| "learning_rate": 2.5491949910554564e-07, | |
| "loss": 0.0801, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.9667276051188298, | |
| "grad_norm": 3.146641254425049, | |
| "learning_rate": 2.1019677996422184e-07, | |
| "loss": 0.0939, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.9740402193784279, | |
| "grad_norm": 13.350373268127441, | |
| "learning_rate": 1.6547406082289806e-07, | |
| "loss": 0.1908, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.9813528336380255, | |
| "grad_norm": 12.70032787322998, | |
| "learning_rate": 1.2075134168157423e-07, | |
| "loss": 0.1406, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.9886654478976236, | |
| "grad_norm": 0.3782537579536438, | |
| "learning_rate": 7.602862254025046e-08, | |
| "loss": 0.1945, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.9959780621572212, | |
| "grad_norm": 14.334826469421387, | |
| "learning_rate": 3.130590339892666e-08, | |
| "loss": 0.1401, | |
| "step": 2730 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2736, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.11654176882688e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |