| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 11.148285449490269, |
| "eval_steps": 150, |
| "global_step": 3000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.03707136237256719, |
| "grad_norm": 53.36411666870117, |
| "learning_rate": 1.1152416356877324e-06, |
| "loss": 10.8165, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.07414272474513438, |
| "grad_norm": 61.33858871459961, |
| "learning_rate": 2.2304832713754648e-06, |
| "loss": 10.8463, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.11121408711770157, |
| "grad_norm": 67.16305541992188, |
| "learning_rate": 3.345724907063197e-06, |
| "loss": 10.821, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.14828544949026876, |
| "grad_norm": 62.08958053588867, |
| "learning_rate": 4.4609665427509296e-06, |
| "loss": 10.4704, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.18535681186283595, |
| "grad_norm": 53.839927673339844, |
| "learning_rate": 5.576208178438661e-06, |
| "loss": 10.8568, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.22242817423540315, |
| "grad_norm": 57.02272033691406, |
| "learning_rate": 6.691449814126394e-06, |
| "loss": 10.3005, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2594995366079703, |
| "grad_norm": 88.16449737548828, |
| "learning_rate": 7.806691449814127e-06, |
| "loss": 9.6009, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2965708989805375, |
| "grad_norm": 84.35252380371094, |
| "learning_rate": 8.921933085501859e-06, |
| "loss": 9.7448, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3336422613531047, |
| "grad_norm": 77.93982696533203, |
| "learning_rate": 1.0037174721189593e-05, |
| "loss": 9.5922, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.3707136237256719, |
| "grad_norm": 56.669166564941406, |
| "learning_rate": 1.1152416356877323e-05, |
| "loss": 10.0919, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4077849860982391, |
| "grad_norm": 56.652488708496094, |
| "learning_rate": 1.2267657992565056e-05, |
| "loss": 10.2733, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.4448563484708063, |
| "grad_norm": 63.472042083740234, |
| "learning_rate": 1.3382899628252788e-05, |
| "loss": 9.4547, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4819277108433735, |
| "grad_norm": 78.0159912109375, |
| "learning_rate": 1.4498141263940521e-05, |
| "loss": 9.1738, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5189990732159406, |
| "grad_norm": 68.1931381225586, |
| "learning_rate": 1.5613382899628255e-05, |
| "loss": 10.0876, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5560704355885079, |
| "grad_norm": 49.315216064453125, |
| "learning_rate": 1.6728624535315986e-05, |
| "loss": 10.3721, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.5560704355885079, |
| "eval_loss": 2.559400796890259, |
| "eval_runtime": 34.304, |
| "eval_samples_per_second": 47.166, |
| "eval_steps_per_second": 6.763, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.593141797961075, |
| "grad_norm": 34.1899528503418, |
| "learning_rate": 1.7843866171003718e-05, |
| "loss": 10.4711, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6302131603336423, |
| "grad_norm": 37.074134826660156, |
| "learning_rate": 1.895910780669145e-05, |
| "loss": 9.9383, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6672845227062094, |
| "grad_norm": 49.323158264160156, |
| "learning_rate": 2.0074349442379185e-05, |
| "loss": 10.0448, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7043558850787767, |
| "grad_norm": 53.07639694213867, |
| "learning_rate": 2.1189591078066917e-05, |
| "loss": 9.8467, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7414272474513438, |
| "grad_norm": 38.345916748046875, |
| "learning_rate": 2.2304832713754645e-05, |
| "loss": 9.9434, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.7784986098239111, |
| "grad_norm": 47.55274963378906, |
| "learning_rate": 2.3420074349442377e-05, |
| "loss": 9.7912, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8155699721964782, |
| "grad_norm": 59.17176055908203, |
| "learning_rate": 2.4535315985130112e-05, |
| "loss": 9.7905, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8526413345690455, |
| "grad_norm": 52.57457733154297, |
| "learning_rate": 2.5650557620817844e-05, |
| "loss": 9.9412, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.8897126969416126, |
| "grad_norm": 42.251304626464844, |
| "learning_rate": 2.6765799256505576e-05, |
| "loss": 10.0084, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9267840593141798, |
| "grad_norm": 46.63966369628906, |
| "learning_rate": 2.788104089219331e-05, |
| "loss": 10.0139, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.963855421686747, |
| "grad_norm": 27.772289276123047, |
| "learning_rate": 2.8996282527881043e-05, |
| "loss": 9.3794, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 46.3179931640625, |
| "learning_rate": 2.9987608426270136e-05, |
| "loss": 9.5747, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.0370713623725671, |
| "grad_norm": 45.746551513671875, |
| "learning_rate": 2.98636926889715e-05, |
| "loss": 9.6763, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.0741427247451343, |
| "grad_norm": 49.50416564941406, |
| "learning_rate": 2.9739776951672864e-05, |
| "loss": 10.4789, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1112140871177016, |
| "grad_norm": 43.72733688354492, |
| "learning_rate": 2.9615861214374226e-05, |
| "loss": 9.7244, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.1112140871177016, |
| "eval_loss": 2.6879734992980957, |
| "eval_runtime": 34.2522, |
| "eval_samples_per_second": 47.238, |
| "eval_steps_per_second": 6.773, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.1482854494902688, |
| "grad_norm": 48.68376541137695, |
| "learning_rate": 2.9491945477075588e-05, |
| "loss": 9.9421, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.185356811862836, |
| "grad_norm": 44.014747619628906, |
| "learning_rate": 2.9368029739776953e-05, |
| "loss": 9.5022, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.222428174235403, |
| "grad_norm": 49.858642578125, |
| "learning_rate": 2.9244114002478315e-05, |
| "loss": 9.6801, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.2594995366079704, |
| "grad_norm": 49.97459411621094, |
| "learning_rate": 2.9120198265179677e-05, |
| "loss": 9.25, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.2965708989805376, |
| "grad_norm": 40.38698196411133, |
| "learning_rate": 2.8996282527881043e-05, |
| "loss": 10.2879, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.3336422613531047, |
| "grad_norm": 29.797264099121094, |
| "learning_rate": 2.8872366790582405e-05, |
| "loss": 9.97, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.3707136237256718, |
| "grad_norm": 43.83412551879883, |
| "learning_rate": 2.874845105328377e-05, |
| "loss": 9.3751, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.407784986098239, |
| "grad_norm": 42.754154205322266, |
| "learning_rate": 2.8624535315985132e-05, |
| "loss": 9.8947, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.4448563484708064, |
| "grad_norm": 50.08184051513672, |
| "learning_rate": 2.8500619578686494e-05, |
| "loss": 9.4987, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.4819277108433735, |
| "grad_norm": 41.81029510498047, |
| "learning_rate": 2.8376703841387856e-05, |
| "loss": 10.1515, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.5189990732159406, |
| "grad_norm": 44.03816223144531, |
| "learning_rate": 2.8252788104089218e-05, |
| "loss": 10.0098, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.556070435588508, |
| "grad_norm": 42.871856689453125, |
| "learning_rate": 2.8128872366790583e-05, |
| "loss": 9.5566, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.5931417979610751, |
| "grad_norm": 44.18041229248047, |
| "learning_rate": 2.8004956629491945e-05, |
| "loss": 10.1925, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.6302131603336423, |
| "grad_norm": 30.012725830078125, |
| "learning_rate": 2.788104089219331e-05, |
| "loss": 9.7408, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.6672845227062094, |
| "grad_norm": 38.104827880859375, |
| "learning_rate": 2.7757125154894673e-05, |
| "loss": 9.6873, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.6672845227062094, |
| "eval_loss": 2.5061004161834717, |
| "eval_runtime": 34.2772, |
| "eval_samples_per_second": 47.203, |
| "eval_steps_per_second": 6.768, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.7043558850787766, |
| "grad_norm": 42.93535232543945, |
| "learning_rate": 2.7633209417596035e-05, |
| "loss": 9.1932, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.7414272474513437, |
| "grad_norm": 63.935279846191406, |
| "learning_rate": 2.75092936802974e-05, |
| "loss": 9.9204, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.778498609823911, |
| "grad_norm": 30.906002044677734, |
| "learning_rate": 2.7385377942998762e-05, |
| "loss": 9.5151, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.8155699721964782, |
| "grad_norm": 48.05679702758789, |
| "learning_rate": 2.7261462205700128e-05, |
| "loss": 9.8418, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.8526413345690456, |
| "grad_norm": 56.36283874511719, |
| "learning_rate": 2.7137546468401486e-05, |
| "loss": 9.6943, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.8897126969416127, |
| "grad_norm": 47.22933578491211, |
| "learning_rate": 2.7013630731102848e-05, |
| "loss": 9.6948, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.9267840593141798, |
| "grad_norm": 36.110008239746094, |
| "learning_rate": 2.6889714993804214e-05, |
| "loss": 9.2274, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.963855421686747, |
| "grad_norm": 37.13912582397461, |
| "learning_rate": 2.6765799256505576e-05, |
| "loss": 10.138, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 51.37559509277344, |
| "learning_rate": 2.664188351920694e-05, |
| "loss": 8.7934, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.037071362372567, |
| "grad_norm": 42.32170104980469, |
| "learning_rate": 2.6517967781908303e-05, |
| "loss": 8.8304, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.0741427247451343, |
| "grad_norm": 42.406219482421875, |
| "learning_rate": 2.6394052044609665e-05, |
| "loss": 8.9593, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.1112140871177014, |
| "grad_norm": 52.097023010253906, |
| "learning_rate": 2.627013630731103e-05, |
| "loss": 9.0051, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.1482854494902686, |
| "grad_norm": 83.86861419677734, |
| "learning_rate": 2.6146220570012392e-05, |
| "loss": 9.2829, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.185356811862836, |
| "grad_norm": 38.229949951171875, |
| "learning_rate": 2.6022304832713758e-05, |
| "loss": 9.785, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.2224281742354033, |
| "grad_norm": 50.322513580322266, |
| "learning_rate": 2.5898389095415116e-05, |
| "loss": 9.6401, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.2224281742354033, |
| "eval_loss": 2.448084592819214, |
| "eval_runtime": 34.2525, |
| "eval_samples_per_second": 47.237, |
| "eval_steps_per_second": 6.773, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.266913809082484, |
| "grad_norm": 39.366676330566406, |
| "learning_rate": 2.5774473358116482e-05, |
| "loss": 8.7059, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.303985171455051, |
| "grad_norm": 41.00213623046875, |
| "learning_rate": 2.5650557620817844e-05, |
| "loss": 9.1285, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.341056533827618, |
| "grad_norm": 63.442169189453125, |
| "learning_rate": 2.5526641883519206e-05, |
| "loss": 9.3277, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.3781278962001853, |
| "grad_norm": 51.54194641113281, |
| "learning_rate": 2.540272614622057e-05, |
| "loss": 9.1577, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.4151992585727524, |
| "grad_norm": 57.583885192871094, |
| "learning_rate": 2.5278810408921933e-05, |
| "loss": 9.6032, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.4522706209453196, |
| "grad_norm": 60.13545227050781, |
| "learning_rate": 2.51548946716233e-05, |
| "loss": 8.5451, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.489341983317887, |
| "grad_norm": 139.85794067382812, |
| "learning_rate": 2.503097893432466e-05, |
| "loss": 8.6601, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.5264133456904543, |
| "grad_norm": 160.73171997070312, |
| "learning_rate": 2.4907063197026023e-05, |
| "loss": 9.522, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.5634847080630214, |
| "grad_norm": 190.7194366455078, |
| "learning_rate": 2.4783147459727385e-05, |
| "loss": 10.0619, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.6005560704355886, |
| "grad_norm": 172.5655517578125, |
| "learning_rate": 2.4659231722428747e-05, |
| "loss": 9.6444, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.6376274328081557, |
| "grad_norm": 280.9183044433594, |
| "learning_rate": 2.4535315985130112e-05, |
| "loss": 10.7593, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.674698795180723, |
| "grad_norm": 118.03450775146484, |
| "learning_rate": 2.4411400247831474e-05, |
| "loss": 10.4432, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.71177015755329, |
| "grad_norm": 332.662353515625, |
| "learning_rate": 2.428748451053284e-05, |
| "loss": 9.6033, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.748841519925857, |
| "grad_norm": 129.18788146972656, |
| "learning_rate": 2.41635687732342e-05, |
| "loss": 9.108, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.7859128822984243, |
| "grad_norm": 350.5856628417969, |
| "learning_rate": 2.4039653035935564e-05, |
| "loss": 9.884, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.7859128822984243, |
| "eval_loss": 2.599517583847046, |
| "eval_runtime": 34.0592, |
| "eval_samples_per_second": 47.506, |
| "eval_steps_per_second": 6.812, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.822984244670992, |
| "grad_norm": 285.14862060546875, |
| "learning_rate": 2.391573729863693e-05, |
| "loss": 11.3952, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.860055607043559, |
| "grad_norm": 185.5862579345703, |
| "learning_rate": 2.379182156133829e-05, |
| "loss": 10.6798, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.897126969416126, |
| "grad_norm": 77.20765686035156, |
| "learning_rate": 2.3667905824039656e-05, |
| "loss": 9.9795, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.9341983317886933, |
| "grad_norm": 88.75109100341797, |
| "learning_rate": 2.3543990086741015e-05, |
| "loss": 9.3694, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.9712696941612604, |
| "grad_norm": 53.416690826416016, |
| "learning_rate": 2.3420074349442377e-05, |
| "loss": 8.4631, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.0111214087117704, |
| "grad_norm": 72.12966918945312, |
| "learning_rate": 2.3296158612143742e-05, |
| "loss": 9.3903, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.0481927710843375, |
| "grad_norm": 159.3909912109375, |
| "learning_rate": 2.3172242874845104e-05, |
| "loss": 8.9608, |
| "step": 820 |
| }, |
| { |
| "epoch": 3.0852641334569046, |
| "grad_norm": 161.03713989257812, |
| "learning_rate": 2.304832713754647e-05, |
| "loss": 9.623, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.122335495829472, |
| "grad_norm": 136.50045776367188, |
| "learning_rate": 2.2924411400247832e-05, |
| "loss": 8.5958, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.159406858202039, |
| "grad_norm": 60.785926818847656, |
| "learning_rate": 2.2800495662949197e-05, |
| "loss": 8.6278, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.196478220574606, |
| "grad_norm": 115.2509536743164, |
| "learning_rate": 2.267657992565056e-05, |
| "loss": 8.661, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.233549582947173, |
| "grad_norm": 144.38912963867188, |
| "learning_rate": 2.255266418835192e-05, |
| "loss": 9.5836, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.2706209453197403, |
| "grad_norm": 239.74520874023438, |
| "learning_rate": 2.2428748451053287e-05, |
| "loss": 8.6221, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.3076923076923075, |
| "grad_norm": 215.12789916992188, |
| "learning_rate": 2.2304832713754645e-05, |
| "loss": 9.1995, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.344763670064875, |
| "grad_norm": 113.77183532714844, |
| "learning_rate": 2.218091697645601e-05, |
| "loss": 8.6909, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.344763670064875, |
| "eval_loss": 2.4246325492858887, |
| "eval_runtime": 34.1351, |
| "eval_samples_per_second": 47.4, |
| "eval_steps_per_second": 6.797, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.381835032437442, |
| "grad_norm": 175.02731323242188, |
| "learning_rate": 2.2057001239157373e-05, |
| "loss": 8.7632, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.4189063948100094, |
| "grad_norm": 142.76580810546875, |
| "learning_rate": 2.1933085501858735e-05, |
| "loss": 8.5604, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.4559777571825765, |
| "grad_norm": 163.0391387939453, |
| "learning_rate": 2.18091697645601e-05, |
| "loss": 9.3871, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.4930491195551436, |
| "grad_norm": 386.4294738769531, |
| "learning_rate": 2.1685254027261462e-05, |
| "loss": 9.6445, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.5301204819277108, |
| "grad_norm": 86.92581939697266, |
| "learning_rate": 2.1561338289962827e-05, |
| "loss": 9.635, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.567191844300278, |
| "grad_norm": 80.94845581054688, |
| "learning_rate": 2.143742255266419e-05, |
| "loss": 8.8689, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.6042632066728455, |
| "grad_norm": 69.99708557128906, |
| "learning_rate": 2.1313506815365555e-05, |
| "loss": 9.1411, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.641334569045412, |
| "grad_norm": 80.53229522705078, |
| "learning_rate": 2.1189591078066917e-05, |
| "loss": 8.7552, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.67840593141798, |
| "grad_norm": 53.3475341796875, |
| "learning_rate": 2.1065675340768275e-05, |
| "loss": 8.5697, |
| "step": 990 |
| }, |
| { |
| "epoch": 3.715477293790547, |
| "grad_norm": 54.19875717163086, |
| "learning_rate": 2.094175960346964e-05, |
| "loss": 8.9008, |
| "step": 1000 |
| }, |
| { |
| "epoch": 3.752548656163114, |
| "grad_norm": 52.96628952026367, |
| "learning_rate": 2.0817843866171003e-05, |
| "loss": 8.9415, |
| "step": 1010 |
| }, |
| { |
| "epoch": 3.789620018535681, |
| "grad_norm": 64.08312225341797, |
| "learning_rate": 2.0693928128872368e-05, |
| "loss": 8.1303, |
| "step": 1020 |
| }, |
| { |
| "epoch": 3.8266913809082483, |
| "grad_norm": 68.22163391113281, |
| "learning_rate": 2.057001239157373e-05, |
| "loss": 8.241, |
| "step": 1030 |
| }, |
| { |
| "epoch": 3.8637627432808155, |
| "grad_norm": 66.70902252197266, |
| "learning_rate": 2.0446096654275092e-05, |
| "loss": 8.6969, |
| "step": 1040 |
| }, |
| { |
| "epoch": 3.9008341056533826, |
| "grad_norm": 49.94658279418945, |
| "learning_rate": 2.0322180916976458e-05, |
| "loss": 9.8392, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.9008341056533826, |
| "eval_loss": 2.533297300338745, |
| "eval_runtime": 34.1106, |
| "eval_samples_per_second": 47.434, |
| "eval_steps_per_second": 6.801, |
| "step": 1050 |
| }, |
| { |
| "epoch": 3.93790546802595, |
| "grad_norm": 73.9527816772461, |
| "learning_rate": 2.6765799256505576e-05, |
| "loss": 8.5934, |
| "step": 1060 |
| }, |
| { |
| "epoch": 3.974976830398517, |
| "grad_norm": 76.90135955810547, |
| "learning_rate": 2.6703841387856257e-05, |
| "loss": 8.006, |
| "step": 1070 |
| }, |
| { |
| "epoch": 4.014828544949027, |
| "grad_norm": 85.86236572265625, |
| "learning_rate": 2.664188351920694e-05, |
| "loss": 9.0081, |
| "step": 1080 |
| }, |
| { |
| "epoch": 4.051899907321594, |
| "grad_norm": 106.57842254638672, |
| "learning_rate": 2.6579925650557622e-05, |
| "loss": 8.6706, |
| "step": 1090 |
| }, |
| { |
| "epoch": 4.088971269694161, |
| "grad_norm": 68.10344696044922, |
| "learning_rate": 2.6517967781908303e-05, |
| "loss": 9.6146, |
| "step": 1100 |
| }, |
| { |
| "epoch": 4.126042632066729, |
| "grad_norm": 67.02091217041016, |
| "learning_rate": 2.6456009913258984e-05, |
| "loss": 9.225, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.163113994439295, |
| "grad_norm": 67.66644287109375, |
| "learning_rate": 2.6394052044609665e-05, |
| "loss": 8.7522, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.200185356811863, |
| "grad_norm": 59.26814651489258, |
| "learning_rate": 2.633209417596035e-05, |
| "loss": 9.0221, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.23725671918443, |
| "grad_norm": 88.07310485839844, |
| "learning_rate": 2.627013630731103e-05, |
| "loss": 9.6458, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.274328081556997, |
| "grad_norm": 69.33895874023438, |
| "learning_rate": 2.620817843866171e-05, |
| "loss": 8.7692, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.311399443929565, |
| "grad_norm": 43.823951721191406, |
| "learning_rate": 2.6146220570012392e-05, |
| "loss": 9.2874, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.348470806302132, |
| "grad_norm": 63.590980529785156, |
| "learning_rate": 2.6084262701363077e-05, |
| "loss": 8.9276, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.385542168674699, |
| "grad_norm": 43.4519157409668, |
| "learning_rate": 2.6022304832713758e-05, |
| "loss": 8.7444, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.422613531047266, |
| "grad_norm": 72.1529312133789, |
| "learning_rate": 2.5960346964064435e-05, |
| "loss": 8.7265, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.459684893419833, |
| "grad_norm": 101.67008972167969, |
| "learning_rate": 2.5898389095415116e-05, |
| "loss": 8.7642, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.459684893419833, |
| "eval_loss": 2.6471314430236816, |
| "eval_runtime": 34.2723, |
| "eval_samples_per_second": 47.21, |
| "eval_steps_per_second": 6.769, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.4967562557924, |
| "grad_norm": 66.03606414794922, |
| "learning_rate": 2.5836431226765797e-05, |
| "loss": 8.8917, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.533827618164968, |
| "grad_norm": 60.01662826538086, |
| "learning_rate": 2.5774473358116482e-05, |
| "loss": 9.2155, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.570898980537534, |
| "grad_norm": 83.86510467529297, |
| "learning_rate": 2.5712515489467163e-05, |
| "loss": 8.6101, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.607970342910102, |
| "grad_norm": 64.35486602783203, |
| "learning_rate": 2.5650557620817844e-05, |
| "loss": 8.9904, |
| "step": 1240 |
| }, |
| { |
| "epoch": 4.645041705282669, |
| "grad_norm": 60.44017791748047, |
| "learning_rate": 2.5588599752168525e-05, |
| "loss": 9.3272, |
| "step": 1250 |
| }, |
| { |
| "epoch": 4.682113067655236, |
| "grad_norm": 56.501102447509766, |
| "learning_rate": 2.5526641883519206e-05, |
| "loss": 7.9367, |
| "step": 1260 |
| }, |
| { |
| "epoch": 4.719184430027804, |
| "grad_norm": 65.01017761230469, |
| "learning_rate": 2.546468401486989e-05, |
| "loss": 8.5891, |
| "step": 1270 |
| }, |
| { |
| "epoch": 4.7562557924003706, |
| "grad_norm": 63.42623519897461, |
| "learning_rate": 2.540272614622057e-05, |
| "loss": 8.6286, |
| "step": 1280 |
| }, |
| { |
| "epoch": 4.793327154772938, |
| "grad_norm": 58.96747589111328, |
| "learning_rate": 2.5340768277571252e-05, |
| "loss": 7.9982, |
| "step": 1290 |
| }, |
| { |
| "epoch": 4.830398517145505, |
| "grad_norm": 96.77368927001953, |
| "learning_rate": 2.5278810408921933e-05, |
| "loss": 7.5587, |
| "step": 1300 |
| }, |
| { |
| "epoch": 4.867469879518072, |
| "grad_norm": 85.92269897460938, |
| "learning_rate": 2.5216852540272614e-05, |
| "loss": 7.9405, |
| "step": 1310 |
| }, |
| { |
| "epoch": 4.904541241890639, |
| "grad_norm": 86.37085723876953, |
| "learning_rate": 2.51548946716233e-05, |
| "loss": 9.7092, |
| "step": 1320 |
| }, |
| { |
| "epoch": 4.941612604263207, |
| "grad_norm": 65.88274383544922, |
| "learning_rate": 2.509293680297398e-05, |
| "loss": 8.1475, |
| "step": 1330 |
| }, |
| { |
| "epoch": 4.978683966635774, |
| "grad_norm": 53.252742767333984, |
| "learning_rate": 2.503097893432466e-05, |
| "loss": 9.3603, |
| "step": 1340 |
| }, |
| { |
| "epoch": 5.014828544949027, |
| "grad_norm": 182.35781860351562, |
| "learning_rate": 2.4969021065675342e-05, |
| "loss": 7.6621, |
| "step": 1350 |
| }, |
| { |
| "epoch": 5.014828544949027, |
| "eval_loss": 2.8308892250061035, |
| "eval_runtime": 34.1063, |
| "eval_samples_per_second": 47.44, |
| "eval_steps_per_second": 6.802, |
| "step": 1350 |
| }, |
| { |
| "epoch": 5.051899907321594, |
| "grad_norm": 324.6659240722656, |
| "learning_rate": 2.4907063197026023e-05, |
| "loss": 9.2301, |
| "step": 1360 |
| }, |
| { |
| "epoch": 5.088971269694161, |
| "grad_norm": 4839.4453125, |
| "learning_rate": 2.4845105328376707e-05, |
| "loss": 9.7789, |
| "step": 1370 |
| }, |
| { |
| "epoch": 5.126042632066729, |
| "grad_norm": 408588.40625, |
| "learning_rate": 2.4783147459727385e-05, |
| "loss": 9.5359, |
| "step": 1380 |
| }, |
| { |
| "epoch": 5.163113994439295, |
| "grad_norm": 932.5144653320312, |
| "learning_rate": 2.4721189591078066e-05, |
| "loss": 10.8065, |
| "step": 1390 |
| }, |
| { |
| "epoch": 5.200185356811863, |
| "grad_norm": 148.0634765625, |
| "learning_rate": 2.4659231722428747e-05, |
| "loss": 10.0149, |
| "step": 1400 |
| }, |
| { |
| "epoch": 5.23725671918443, |
| "grad_norm": 1214.7279052734375, |
| "learning_rate": 2.459727385377943e-05, |
| "loss": 10.2582, |
| "step": 1410 |
| }, |
| { |
| "epoch": 5.274328081556997, |
| "grad_norm": 188.14292907714844, |
| "learning_rate": 2.4535315985130112e-05, |
| "loss": 10.16, |
| "step": 1420 |
| }, |
| { |
| "epoch": 5.311399443929565, |
| "grad_norm": 388.7134094238281, |
| "learning_rate": 2.4473358116480793e-05, |
| "loss": 10.0763, |
| "step": 1430 |
| }, |
| { |
| "epoch": 5.348470806302132, |
| "grad_norm": 270.61279296875, |
| "learning_rate": 2.4411400247831474e-05, |
| "loss": 9.5737, |
| "step": 1440 |
| }, |
| { |
| "epoch": 5.385542168674699, |
| "grad_norm": 1637.854736328125, |
| "learning_rate": 2.4349442379182155e-05, |
| "loss": 10.4816, |
| "step": 1450 |
| }, |
| { |
| "epoch": 5.422613531047266, |
| "grad_norm": 1118.7012939453125, |
| "learning_rate": 2.428748451053284e-05, |
| "loss": 8.6687, |
| "step": 1460 |
| }, |
| { |
| "epoch": 5.459684893419833, |
| "grad_norm": 788.9707641601562, |
| "learning_rate": 2.422552664188352e-05, |
| "loss": 8.4066, |
| "step": 1470 |
| }, |
| { |
| "epoch": 5.4967562557924, |
| "grad_norm": 185.22950744628906, |
| "learning_rate": 2.41635687732342e-05, |
| "loss": 9.386, |
| "step": 1480 |
| }, |
| { |
| "epoch": 5.533827618164968, |
| "grad_norm": 257.726806640625, |
| "learning_rate": 2.4101610904584883e-05, |
| "loss": 8.3911, |
| "step": 1490 |
| }, |
| { |
| "epoch": 5.570898980537534, |
| "grad_norm": 867.30908203125, |
| "learning_rate": 2.4039653035935564e-05, |
| "loss": 8.8025, |
| "step": 1500 |
| }, |
| { |
| "epoch": 5.570898980537534, |
| "eval_loss": 2.5407650470733643, |
| "eval_runtime": 34.0934, |
| "eval_samples_per_second": 47.458, |
| "eval_steps_per_second": 6.805, |
| "step": 1500 |
| }, |
| { |
| "epoch": 5.607970342910102, |
| "grad_norm": 68.35711669921875, |
| "learning_rate": 2.3977695167286248e-05, |
| "loss": 8.7939, |
| "step": 1510 |
| }, |
| { |
| "epoch": 5.645041705282669, |
| "grad_norm": 1419.1650390625, |
| "learning_rate": 2.391573729863693e-05, |
| "loss": 9.0903, |
| "step": 1520 |
| }, |
| { |
| "epoch": 5.682113067655236, |
| "grad_norm": 60.88732147216797, |
| "learning_rate": 2.385377942998761e-05, |
| "loss": 8.9878, |
| "step": 1530 |
| }, |
| { |
| "epoch": 5.719184430027804, |
| "grad_norm": 61.50148010253906, |
| "learning_rate": 2.379182156133829e-05, |
| "loss": 8.8642, |
| "step": 1540 |
| }, |
| { |
| "epoch": 5.7562557924003706, |
| "grad_norm": 73.9307861328125, |
| "learning_rate": 2.3729863692688972e-05, |
| "loss": 8.8625, |
| "step": 1550 |
| }, |
| { |
| "epoch": 5.793327154772938, |
| "grad_norm": 82.28329467773438, |
| "learning_rate": 2.3667905824039656e-05, |
| "loss": 8.4105, |
| "step": 1560 |
| }, |
| { |
| "epoch": 5.830398517145505, |
| "grad_norm": 83.54141235351562, |
| "learning_rate": 2.3605947955390337e-05, |
| "loss": 9.0163, |
| "step": 1570 |
| }, |
| { |
| "epoch": 5.867469879518072, |
| "grad_norm": 66.59397888183594, |
| "learning_rate": 2.3543990086741015e-05, |
| "loss": 8.8947, |
| "step": 1580 |
| }, |
| { |
| "epoch": 5.904541241890639, |
| "grad_norm": 55.8841667175293, |
| "learning_rate": 2.3482032218091696e-05, |
| "loss": 8.5647, |
| "step": 1590 |
| }, |
| { |
| "epoch": 5.941612604263207, |
| "grad_norm": 74.56892395019531, |
| "learning_rate": 2.3420074349442377e-05, |
| "loss": 7.7047, |
| "step": 1600 |
| }, |
| { |
| "epoch": 5.978683966635774, |
| "grad_norm": 106.54405212402344, |
| "learning_rate": 2.335811648079306e-05, |
| "loss": 8.1484, |
| "step": 1610 |
| }, |
| { |
| "epoch": 6.014828544949027, |
| "grad_norm": 107.8216781616211, |
| "learning_rate": 2.3296158612143742e-05, |
| "loss": 8.4079, |
| "step": 1620 |
| }, |
| { |
| "epoch": 6.051899907321594, |
| "grad_norm": 100.98133087158203, |
| "learning_rate": 2.3234200743494423e-05, |
| "loss": 8.5027, |
| "step": 1630 |
| }, |
| { |
| "epoch": 6.088971269694161, |
| "grad_norm": 67.31327819824219, |
| "learning_rate": 2.3172242874845104e-05, |
| "loss": 8.1805, |
| "step": 1640 |
| }, |
| { |
| "epoch": 6.126042632066729, |
| "grad_norm": 61.10900115966797, |
| "learning_rate": 2.311028500619579e-05, |
| "loss": 8.4519, |
| "step": 1650 |
| }, |
| { |
| "epoch": 6.126042632066729, |
| "eval_loss": 2.590090036392212, |
| "eval_runtime": 34.1392, |
| "eval_samples_per_second": 47.394, |
| "eval_steps_per_second": 6.796, |
| "step": 1650 |
| }, |
| { |
| "epoch": 6.163113994439295, |
| "grad_norm": 60.91183090209961, |
| "learning_rate": 2.304832713754647e-05, |
| "loss": 9.062, |
| "step": 1660 |
| }, |
| { |
| "epoch": 6.200185356811863, |
| "grad_norm": 76.2873306274414, |
| "learning_rate": 2.298636926889715e-05, |
| "loss": 8.8499, |
| "step": 1670 |
| }, |
| { |
| "epoch": 6.23725671918443, |
| "grad_norm": 74.2361068725586, |
| "learning_rate": 2.2924411400247832e-05, |
| "loss": 8.6576, |
| "step": 1680 |
| }, |
| { |
| "epoch": 6.274328081556997, |
| "grad_norm": 84.99951934814453, |
| "learning_rate": 2.2862453531598513e-05, |
| "loss": 8.4652, |
| "step": 1690 |
| }, |
| { |
| "epoch": 6.311399443929565, |
| "grad_norm": 86.40947723388672, |
| "learning_rate": 2.2800495662949197e-05, |
| "loss": 9.0782, |
| "step": 1700 |
| }, |
| { |
| "epoch": 6.348470806302132, |
| "grad_norm": 75.45568084716797, |
| "learning_rate": 2.2738537794299878e-05, |
| "loss": 8.1532, |
| "step": 1710 |
| }, |
| { |
| "epoch": 6.385542168674699, |
| "grad_norm": 86.65908813476562, |
| "learning_rate": 2.267657992565056e-05, |
| "loss": 8.5185, |
| "step": 1720 |
| }, |
| { |
| "epoch": 6.422613531047266, |
| "grad_norm": 62.620758056640625, |
| "learning_rate": 2.261462205700124e-05, |
| "loss": 9.5908, |
| "step": 1730 |
| }, |
| { |
| "epoch": 6.459684893419833, |
| "grad_norm": 73.63095092773438, |
| "learning_rate": 2.255266418835192e-05, |
| "loss": 8.4188, |
| "step": 1740 |
| }, |
| { |
| "epoch": 6.4967562557924, |
| "grad_norm": 55.175045013427734, |
| "learning_rate": 2.2490706319702606e-05, |
| "loss": 8.1885, |
| "step": 1750 |
| }, |
| { |
| "epoch": 6.533827618164968, |
| "grad_norm": 47.647621154785156, |
| "learning_rate": 2.2428748451053287e-05, |
| "loss": 8.7666, |
| "step": 1760 |
| }, |
| { |
| "epoch": 6.570898980537534, |
| "grad_norm": 191.0419158935547, |
| "learning_rate": 2.2366790582403964e-05, |
| "loss": 8.6105, |
| "step": 1770 |
| }, |
| { |
| "epoch": 6.607970342910102, |
| "grad_norm": 57.81939697265625, |
| "learning_rate": 2.2304832713754645e-05, |
| "loss": 8.664, |
| "step": 1780 |
| }, |
| { |
| "epoch": 6.645041705282669, |
| "grad_norm": 85.04927825927734, |
| "learning_rate": 2.2242874845105326e-05, |
| "loss": 8.5294, |
| "step": 1790 |
| }, |
| { |
| "epoch": 6.682113067655236, |
| "grad_norm": 55.604122161865234, |
| "learning_rate": 2.218091697645601e-05, |
| "loss": 9.1857, |
| "step": 1800 |
| }, |
| { |
| "epoch": 6.682113067655236, |
| "eval_loss": 2.497358560562134, |
| "eval_runtime": 34.0441, |
| "eval_samples_per_second": 47.527, |
| "eval_steps_per_second": 6.815, |
| "step": 1800 |
| }, |
| { |
| "epoch": 6.719184430027804, |
| "grad_norm": 75.21812438964844, |
| "learning_rate": 2.211895910780669e-05, |
| "loss": 8.7053, |
| "step": 1810 |
| }, |
| { |
| "epoch": 6.7562557924003706, |
| "grad_norm": 85.76676940917969, |
| "learning_rate": 2.2057001239157373e-05, |
| "loss": 8.1428, |
| "step": 1820 |
| }, |
| { |
| "epoch": 6.793327154772938, |
| "grad_norm": 69.85575866699219, |
| "learning_rate": 2.1995043370508054e-05, |
| "loss": 8.4988, |
| "step": 1830 |
| }, |
| { |
| "epoch": 6.830398517145505, |
| "grad_norm": 66.85325622558594, |
| "learning_rate": 2.1933085501858735e-05, |
| "loss": 8.4147, |
| "step": 1840 |
| }, |
| { |
| "epoch": 6.867469879518072, |
| "grad_norm": 60.645294189453125, |
| "learning_rate": 2.187112763320942e-05, |
| "loss": 9.069, |
| "step": 1850 |
| }, |
| { |
| "epoch": 6.904541241890639, |
| "grad_norm": 77.59921264648438, |
| "learning_rate": 2.18091697645601e-05, |
| "loss": 8.4405, |
| "step": 1860 |
| }, |
| { |
| "epoch": 6.941612604263207, |
| "grad_norm": 56.27996826171875, |
| "learning_rate": 2.174721189591078e-05, |
| "loss": 9.2157, |
| "step": 1870 |
| }, |
| { |
| "epoch": 6.978683966635774, |
| "grad_norm": 64.18400573730469, |
| "learning_rate": 2.1685254027261462e-05, |
| "loss": 9.5492, |
| "step": 1880 |
| }, |
| { |
| "epoch": 7.014828544949027, |
| "grad_norm": 76.78484344482422, |
| "learning_rate": 2.1623296158612143e-05, |
| "loss": 8.1325, |
| "step": 1890 |
| }, |
| { |
| "epoch": 7.051899907321594, |
| "grad_norm": 77.69871520996094, |
| "learning_rate": 2.1561338289962827e-05, |
| "loss": 8.324, |
| "step": 1900 |
| }, |
| { |
| "epoch": 7.088971269694161, |
| "grad_norm": 102.43340301513672, |
| "learning_rate": 2.149938042131351e-05, |
| "loss": 7.7097, |
| "step": 1910 |
| }, |
| { |
| "epoch": 7.126042632066729, |
| "grad_norm": 114.0189208984375, |
| "learning_rate": 2.143742255266419e-05, |
| "loss": 8.0982, |
| "step": 1920 |
| }, |
| { |
| "epoch": 7.163113994439295, |
| "grad_norm": 66.60838317871094, |
| "learning_rate": 2.137546468401487e-05, |
| "loss": 7.7669, |
| "step": 1930 |
| }, |
| { |
| "epoch": 7.200185356811863, |
| "grad_norm": 252.72503662109375, |
| "learning_rate": 2.1313506815365555e-05, |
| "loss": 7.809, |
| "step": 1940 |
| }, |
| { |
| "epoch": 7.23725671918443, |
| "grad_norm": 91.0379409790039, |
| "learning_rate": 2.1251548946716236e-05, |
| "loss": 7.9729, |
| "step": 1950 |
| }, |
| { |
| "epoch": 7.23725671918443, |
| "eval_loss": 2.610764741897583, |
| "eval_runtime": 34.0774, |
| "eval_samples_per_second": 47.48, |
| "eval_steps_per_second": 6.808, |
| "step": 1950 |
| }, |
| { |
| "epoch": 7.274328081556997, |
| "grad_norm": 96.09555053710938, |
| "learning_rate": 2.1189591078066917e-05, |
| "loss": 8.2125, |
| "step": 1960 |
| }, |
| { |
| "epoch": 7.311399443929565, |
| "grad_norm": 68.51132202148438, |
| "learning_rate": 2.1127633209417594e-05, |
| "loss": 7.7403, |
| "step": 1970 |
| }, |
| { |
| "epoch": 7.348470806302132, |
| "grad_norm": 94.45308685302734, |
| "learning_rate": 2.1065675340768275e-05, |
| "loss": 7.5494, |
| "step": 1980 |
| }, |
| { |
| "epoch": 7.385542168674699, |
| "grad_norm": 56.785587310791016, |
| "learning_rate": 2.100371747211896e-05, |
| "loss": 8.2821, |
| "step": 1990 |
| }, |
| { |
| "epoch": 7.422613531047266, |
| "grad_norm": 91.59173583984375, |
| "learning_rate": 2.094175960346964e-05, |
| "loss": 8.1644, |
| "step": 2000 |
| }, |
| { |
| "epoch": 7.459684893419833, |
| "grad_norm": 78.57572174072266, |
| "learning_rate": 2.0879801734820322e-05, |
| "loss": 8.1664, |
| "step": 2010 |
| }, |
| { |
| "epoch": 7.4967562557924, |
| "grad_norm": 71.83062744140625, |
| "learning_rate": 2.0817843866171003e-05, |
| "loss": 8.5876, |
| "step": 2020 |
| }, |
| { |
| "epoch": 7.533827618164968, |
| "grad_norm": 74.75129699707031, |
| "learning_rate": 2.0755885997521684e-05, |
| "loss": 8.2753, |
| "step": 2030 |
| }, |
| { |
| "epoch": 7.570898980537534, |
| "grad_norm": 96.26950073242188, |
| "learning_rate": 2.0693928128872368e-05, |
| "loss": 9.2057, |
| "step": 2040 |
| }, |
| { |
| "epoch": 7.607970342910102, |
| "grad_norm": 96.87144470214844, |
| "learning_rate": 2.063197026022305e-05, |
| "loss": 8.0052, |
| "step": 2050 |
| }, |
| { |
| "epoch": 7.645041705282669, |
| "grad_norm": 83.44422912597656, |
| "learning_rate": 2.057001239157373e-05, |
| "loss": 8.4954, |
| "step": 2060 |
| }, |
| { |
| "epoch": 7.682113067655236, |
| "grad_norm": 86.1534194946289, |
| "learning_rate": 2.050805452292441e-05, |
| "loss": 8.0325, |
| "step": 2070 |
| }, |
| { |
| "epoch": 7.719184430027804, |
| "grad_norm": 89.11744689941406, |
| "learning_rate": 2.0446096654275092e-05, |
| "loss": 8.2934, |
| "step": 2080 |
| }, |
| { |
| "epoch": 7.7562557924003706, |
| "grad_norm": 60.81666946411133, |
| "learning_rate": 2.0384138785625777e-05, |
| "loss": 9.4019, |
| "step": 2090 |
| }, |
| { |
| "epoch": 7.793327154772938, |
| "grad_norm": 82.93228912353516, |
| "learning_rate": 2.0322180916976458e-05, |
| "loss": 8.874, |
| "step": 2100 |
| }, |
| { |
| "epoch": 7.793327154772938, |
| "eval_loss": 2.452894926071167, |
| "eval_runtime": 34.0684, |
| "eval_samples_per_second": 47.493, |
| "eval_steps_per_second": 6.81, |
| "step": 2100 |
| }, |
| { |
| "epoch": 7.830398517145505, |
| "grad_norm": 62.004817962646484, |
| "learning_rate": 2.026022304832714e-05, |
| "loss": 8.0985, |
| "step": 2110 |
| }, |
| { |
| "epoch": 7.867469879518072, |
| "grad_norm": 72.79752349853516, |
| "learning_rate": 2.019826517967782e-05, |
| "loss": 8.9063, |
| "step": 2120 |
| }, |
| { |
| "epoch": 7.904541241890639, |
| "grad_norm": 1507.2855224609375, |
| "learning_rate": 2.01363073110285e-05, |
| "loss": 8.6332, |
| "step": 2130 |
| }, |
| { |
| "epoch": 7.941612604263207, |
| "grad_norm": 134.11378479003906, |
| "learning_rate": 2.0074349442379185e-05, |
| "loss": 9.4106, |
| "step": 2140 |
| }, |
| { |
| "epoch": 7.978683966635774, |
| "grad_norm": 72.19046783447266, |
| "learning_rate": 2.0012391573729866e-05, |
| "loss": 8.2304, |
| "step": 2150 |
| }, |
| { |
| "epoch": 8.014828544949028, |
| "grad_norm": 57.130165100097656, |
| "learning_rate": 1.9950433705080544e-05, |
| "loss": 7.9675, |
| "step": 2160 |
| }, |
| { |
| "epoch": 8.051899907321594, |
| "grad_norm": 72.58036041259766, |
| "learning_rate": 1.9888475836431225e-05, |
| "loss": 7.602, |
| "step": 2170 |
| }, |
| { |
| "epoch": 8.088971269694161, |
| "grad_norm": 91.77513885498047, |
| "learning_rate": 1.982651796778191e-05, |
| "loss": 7.9151, |
| "step": 2180 |
| }, |
| { |
| "epoch": 8.126042632066728, |
| "grad_norm": 111.52696228027344, |
| "learning_rate": 1.976456009913259e-05, |
| "loss": 7.5653, |
| "step": 2190 |
| }, |
| { |
| "epoch": 8.163113994439296, |
| "grad_norm": 74.9671401977539, |
| "learning_rate": 1.970260223048327e-05, |
| "loss": 7.1567, |
| "step": 2200 |
| }, |
| { |
| "epoch": 8.200185356811863, |
| "grad_norm": 124.09498596191406, |
| "learning_rate": 1.9640644361833952e-05, |
| "loss": 8.1708, |
| "step": 2210 |
| }, |
| { |
| "epoch": 8.23725671918443, |
| "grad_norm": 95.21754455566406, |
| "learning_rate": 1.9578686493184633e-05, |
| "loss": 7.0013, |
| "step": 2220 |
| }, |
| { |
| "epoch": 8.274328081556996, |
| "grad_norm": 135.3839569091797, |
| "learning_rate": 1.9516728624535318e-05, |
| "loss": 7.5999, |
| "step": 2230 |
| }, |
| { |
| "epoch": 8.311399443929565, |
| "grad_norm": 89.70751190185547, |
| "learning_rate": 1.9454770755886e-05, |
| "loss": 7.2028, |
| "step": 2240 |
| }, |
| { |
| "epoch": 8.348470806302132, |
| "grad_norm": 70.08137512207031, |
| "learning_rate": 1.939281288723668e-05, |
| "loss": 8.3759, |
| "step": 2250 |
| }, |
| { |
| "epoch": 8.348470806302132, |
| "eval_loss": 2.746619939804077, |
| "eval_runtime": 34.0831, |
| "eval_samples_per_second": 47.472, |
| "eval_steps_per_second": 6.807, |
| "step": 2250 |
| }, |
| { |
| "epoch": 8.400370713623726, |
| "grad_norm": 69.07305145263672, |
| "learning_rate": 1.933085501858736e-05, |
| "loss": 8.9083, |
| "step": 2260 |
| }, |
| { |
| "epoch": 8.437442075996293, |
| "grad_norm": 66.43407440185547, |
| "learning_rate": 1.926889714993804e-05, |
| "loss": 6.9349, |
| "step": 2270 |
| }, |
| { |
| "epoch": 8.47451343836886, |
| "grad_norm": 167.85784912109375, |
| "learning_rate": 1.9206939281288726e-05, |
| "loss": 7.5041, |
| "step": 2280 |
| }, |
| { |
| "epoch": 8.511584800741428, |
| "grad_norm": 173.38304138183594, |
| "learning_rate": 1.9144981412639407e-05, |
| "loss": 7.3744, |
| "step": 2290 |
| }, |
| { |
| "epoch": 8.548656163113995, |
| "grad_norm": 80.71511840820312, |
| "learning_rate": 1.9083023543990088e-05, |
| "loss": 8.6541, |
| "step": 2300 |
| }, |
| { |
| "epoch": 8.585727525486561, |
| "grad_norm": 82.77088928222656, |
| "learning_rate": 1.902106567534077e-05, |
| "loss": 8.6305, |
| "step": 2310 |
| }, |
| { |
| "epoch": 8.62279888785913, |
| "grad_norm": 55.3135871887207, |
| "learning_rate": 1.895910780669145e-05, |
| "loss": 8.2577, |
| "step": 2320 |
| }, |
| { |
| "epoch": 8.659870250231696, |
| "grad_norm": 64.61204528808594, |
| "learning_rate": 1.8897149938042134e-05, |
| "loss": 7.6382, |
| "step": 2330 |
| }, |
| { |
| "epoch": 8.696941612604263, |
| "grad_norm": 77.9918212890625, |
| "learning_rate": 1.8835192069392815e-05, |
| "loss": 8.114, |
| "step": 2340 |
| }, |
| { |
| "epoch": 8.73401297497683, |
| "grad_norm": 50.139678955078125, |
| "learning_rate": 1.8773234200743496e-05, |
| "loss": 7.8875, |
| "step": 2350 |
| }, |
| { |
| "epoch": 8.771084337349398, |
| "grad_norm": 241.43588256835938, |
| "learning_rate": 1.8711276332094174e-05, |
| "loss": 7.0444, |
| "step": 2360 |
| }, |
| { |
| "epoch": 8.808155699721965, |
| "grad_norm": 133.25645446777344, |
| "learning_rate": 1.8649318463444855e-05, |
| "loss": 7.7393, |
| "step": 2370 |
| }, |
| { |
| "epoch": 8.845227062094532, |
| "grad_norm": 94.1192855834961, |
| "learning_rate": 1.858736059479554e-05, |
| "loss": 8.8284, |
| "step": 2380 |
| }, |
| { |
| "epoch": 8.882298424467098, |
| "grad_norm": 83.68805694580078, |
| "learning_rate": 1.852540272614622e-05, |
| "loss": 7.997, |
| "step": 2390 |
| }, |
| { |
| "epoch": 8.919369786839667, |
| "grad_norm": 67.01426696777344, |
| "learning_rate": 1.84634448574969e-05, |
| "loss": 7.786, |
| "step": 2400 |
| }, |
| { |
| "epoch": 8.919369786839667, |
| "eval_loss": 2.6791422367095947, |
| "eval_runtime": 34.2417, |
| "eval_samples_per_second": 47.252, |
| "eval_steps_per_second": 6.775, |
| "step": 2400 |
| }, |
| { |
| "epoch": 8.956441149212234, |
| "grad_norm": 72.36073303222656, |
| "learning_rate": 1.8401486988847582e-05, |
| "loss": 7.6257, |
| "step": 2410 |
| }, |
| { |
| "epoch": 8.9935125115848, |
| "grad_norm": 70.3509521484375, |
| "learning_rate": 1.8339529120198267e-05, |
| "loss": 7.099, |
| "step": 2420 |
| }, |
| { |
| "epoch": 9.03336422613531, |
| "grad_norm": 136.34727478027344, |
| "learning_rate": 1.8277571251548948e-05, |
| "loss": 8.041, |
| "step": 2430 |
| }, |
| { |
| "epoch": 9.070435588507877, |
| "grad_norm": 81.48332214355469, |
| "learning_rate": 1.821561338289963e-05, |
| "loss": 7.8606, |
| "step": 2440 |
| }, |
| { |
| "epoch": 9.107506950880445, |
| "grad_norm": 93.1724853515625, |
| "learning_rate": 1.815365551425031e-05, |
| "loss": 7.8551, |
| "step": 2450 |
| }, |
| { |
| "epoch": 9.144578313253012, |
| "grad_norm": 132.82778930664062, |
| "learning_rate": 1.809169764560099e-05, |
| "loss": 7.3977, |
| "step": 2460 |
| }, |
| { |
| "epoch": 9.181649675625579, |
| "grad_norm": 102.73971557617188, |
| "learning_rate": 1.8029739776951675e-05, |
| "loss": 7.8721, |
| "step": 2470 |
| }, |
| { |
| "epoch": 9.218721037998147, |
| "grad_norm": 106.7470932006836, |
| "learning_rate": 1.7967781908302356e-05, |
| "loss": 7.5839, |
| "step": 2480 |
| }, |
| { |
| "epoch": 9.255792400370714, |
| "grad_norm": 84.64981842041016, |
| "learning_rate": 1.7905824039653037e-05, |
| "loss": 7.1823, |
| "step": 2490 |
| }, |
| { |
| "epoch": 9.29286376274328, |
| "grad_norm": 85.59486389160156, |
| "learning_rate": 1.7843866171003718e-05, |
| "loss": 7.5513, |
| "step": 2500 |
| }, |
| { |
| "epoch": 9.329935125115847, |
| "grad_norm": 89.53803253173828, |
| "learning_rate": 1.77819083023544e-05, |
| "loss": 8.0879, |
| "step": 2510 |
| }, |
| { |
| "epoch": 9.367006487488416, |
| "grad_norm": 43.886898040771484, |
| "learning_rate": 1.7719950433705084e-05, |
| "loss": 7.5694, |
| "step": 2520 |
| }, |
| { |
| "epoch": 9.404077849860982, |
| "grad_norm": 73.4898910522461, |
| "learning_rate": 1.7657992565055765e-05, |
| "loss": 7.3436, |
| "step": 2530 |
| }, |
| { |
| "epoch": 9.44114921223355, |
| "grad_norm": 100.42314147949219, |
| "learning_rate": 1.7596034696406446e-05, |
| "loss": 6.9425, |
| "step": 2540 |
| }, |
| { |
| "epoch": 9.478220574606116, |
| "grad_norm": 121.53675079345703, |
| "learning_rate": 1.7534076827757123e-05, |
| "loss": 7.9461, |
| "step": 2550 |
| }, |
| { |
| "epoch": 9.478220574606116, |
| "eval_loss": 2.6608545780181885, |
| "eval_runtime": 34.2437, |
| "eval_samples_per_second": 47.25, |
| "eval_steps_per_second": 6.775, |
| "step": 2550 |
| }, |
| { |
| "epoch": 9.515291936978684, |
| "grad_norm": 83.28184509277344, |
| "learning_rate": 1.7472118959107804e-05, |
| "loss": 6.782, |
| "step": 2560 |
| }, |
| { |
| "epoch": 9.552363299351251, |
| "grad_norm": 81.46753692626953, |
| "learning_rate": 1.741016109045849e-05, |
| "loss": 7.3027, |
| "step": 2570 |
| }, |
| { |
| "epoch": 9.589434661723818, |
| "grad_norm": 77.35752868652344, |
| "learning_rate": 1.734820322180917e-05, |
| "loss": 7.3348, |
| "step": 2580 |
| }, |
| { |
| "epoch": 9.626506024096386, |
| "grad_norm": 77.17479705810547, |
| "learning_rate": 1.728624535315985e-05, |
| "loss": 7.7864, |
| "step": 2590 |
| }, |
| { |
| "epoch": 9.663577386468953, |
| "grad_norm": 103.42965698242188, |
| "learning_rate": 1.722428748451053e-05, |
| "loss": 6.3552, |
| "step": 2600 |
| }, |
| { |
| "epoch": 9.70064874884152, |
| "grad_norm": 103.3230972290039, |
| "learning_rate": 1.7162329615861213e-05, |
| "loss": 7.151, |
| "step": 2610 |
| }, |
| { |
| "epoch": 9.737720111214086, |
| "grad_norm": 91.23753356933594, |
| "learning_rate": 1.7100371747211897e-05, |
| "loss": 6.1664, |
| "step": 2620 |
| }, |
| { |
| "epoch": 9.774791473586655, |
| "grad_norm": 68.76666259765625, |
| "learning_rate": 1.7038413878562578e-05, |
| "loss": 6.0398, |
| "step": 2630 |
| }, |
| { |
| "epoch": 9.811862835959221, |
| "grad_norm": 112.56900787353516, |
| "learning_rate": 1.697645600991326e-05, |
| "loss": 7.0452, |
| "step": 2640 |
| }, |
| { |
| "epoch": 9.848934198331788, |
| "grad_norm": 142.0828399658203, |
| "learning_rate": 1.691449814126394e-05, |
| "loss": 7.2457, |
| "step": 2650 |
| }, |
| { |
| "epoch": 9.886005560704357, |
| "grad_norm": 149.853515625, |
| "learning_rate": 1.685254027261462e-05, |
| "loss": 6.7531, |
| "step": 2660 |
| }, |
| { |
| "epoch": 9.923076923076923, |
| "grad_norm": 107.76028442382812, |
| "learning_rate": 1.6790582403965305e-05, |
| "loss": 6.7149, |
| "step": 2670 |
| }, |
| { |
| "epoch": 9.96014828544949, |
| "grad_norm": 83.82595825195312, |
| "learning_rate": 1.6728624535315986e-05, |
| "loss": 6.4635, |
| "step": 2680 |
| }, |
| { |
| "epoch": 9.997219647822057, |
| "grad_norm": 106.68073272705078, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 6.2237, |
| "step": 2690 |
| }, |
| { |
| "epoch": 10.037071362372567, |
| "grad_norm": 78.0780029296875, |
| "learning_rate": 1.660470879801735e-05, |
| "loss": 6.1798, |
| "step": 2700 |
| }, |
| { |
| "epoch": 10.037071362372567, |
| "eval_loss": 2.993870735168457, |
| "eval_runtime": 34.3527, |
| "eval_samples_per_second": 47.1, |
| "eval_steps_per_second": 6.753, |
| "step": 2700 |
| }, |
| { |
| "epoch": 10.074142724745135, |
| "grad_norm": 177.5696563720703, |
| "learning_rate": 1.6542750929368033e-05, |
| "loss": 7.2224, |
| "step": 2710 |
| }, |
| { |
| "epoch": 10.111214087117702, |
| "grad_norm": 134.7563018798828, |
| "learning_rate": 1.6480793060718714e-05, |
| "loss": 6.5327, |
| "step": 2720 |
| }, |
| { |
| "epoch": 10.148285449490269, |
| "grad_norm": 122.22810363769531, |
| "learning_rate": 1.6418835192069395e-05, |
| "loss": 7.4686, |
| "step": 2730 |
| }, |
| { |
| "epoch": 10.185356811862835, |
| "grad_norm": 137.185302734375, |
| "learning_rate": 1.6356877323420076e-05, |
| "loss": 6.1404, |
| "step": 2740 |
| }, |
| { |
| "epoch": 10.222428174235404, |
| "grad_norm": 100.84989166259766, |
| "learning_rate": 1.6294919454770753e-05, |
| "loss": 7.0005, |
| "step": 2750 |
| }, |
| { |
| "epoch": 10.25949953660797, |
| "grad_norm": 122.00653839111328, |
| "learning_rate": 1.6232961586121438e-05, |
| "loss": 5.7726, |
| "step": 2760 |
| }, |
| { |
| "epoch": 10.296570898980537, |
| "grad_norm": 172.2955780029297, |
| "learning_rate": 1.617100371747212e-05, |
| "loss": 6.5327, |
| "step": 2770 |
| }, |
| { |
| "epoch": 10.333642261353106, |
| "grad_norm": 195.36851501464844, |
| "learning_rate": 1.61090458488228e-05, |
| "loss": 7.5015, |
| "step": 2780 |
| }, |
| { |
| "epoch": 10.370713623725672, |
| "grad_norm": 135.3350372314453, |
| "learning_rate": 1.604708798017348e-05, |
| "loss": 6.5526, |
| "step": 2790 |
| }, |
| { |
| "epoch": 10.407784986098239, |
| "grad_norm": 106.01554870605469, |
| "learning_rate": 1.5985130111524162e-05, |
| "loss": 6.2078, |
| "step": 2800 |
| }, |
| { |
| "epoch": 10.444856348470806, |
| "grad_norm": 104.4194564819336, |
| "learning_rate": 1.5923172242874846e-05, |
| "loss": 6.1, |
| "step": 2810 |
| }, |
| { |
| "epoch": 10.481927710843374, |
| "grad_norm": 118.46540069580078, |
| "learning_rate": 1.5861214374225527e-05, |
| "loss": 7.1027, |
| "step": 2820 |
| }, |
| { |
| "epoch": 10.51899907321594, |
| "grad_norm": 108.23471069335938, |
| "learning_rate": 1.5799256505576208e-05, |
| "loss": 8.639, |
| "step": 2830 |
| }, |
| { |
| "epoch": 10.556070435588508, |
| "grad_norm": 144.46780395507812, |
| "learning_rate": 1.573729863692689e-05, |
| "loss": 6.9937, |
| "step": 2840 |
| }, |
| { |
| "epoch": 10.593141797961074, |
| "grad_norm": 108.4492416381836, |
| "learning_rate": 1.567534076827757e-05, |
| "loss": 7.2734, |
| "step": 2850 |
| }, |
| { |
| "epoch": 10.593141797961074, |
| "eval_loss": 2.8532466888427734, |
| "eval_runtime": 34.3677, |
| "eval_samples_per_second": 47.079, |
| "eval_steps_per_second": 6.751, |
| "step": 2850 |
| }, |
| { |
| "epoch": 10.630213160333643, |
| "grad_norm": 143.1586151123047, |
| "learning_rate": 1.5613382899628255e-05, |
| "loss": 7.6321, |
| "step": 2860 |
| }, |
| { |
| "epoch": 10.66728452270621, |
| "grad_norm": 98.63433837890625, |
| "learning_rate": 1.5551425030978936e-05, |
| "loss": 7.5788, |
| "step": 2870 |
| }, |
| { |
| "epoch": 10.704355885078776, |
| "grad_norm": 90.4648666381836, |
| "learning_rate": 1.5489467162329617e-05, |
| "loss": 6.7864, |
| "step": 2880 |
| }, |
| { |
| "epoch": 10.741427247451345, |
| "grad_norm": 141.69007873535156, |
| "learning_rate": 1.5427509293680298e-05, |
| "loss": 7.4237, |
| "step": 2890 |
| }, |
| { |
| "epoch": 10.778498609823911, |
| "grad_norm": 126.74280548095703, |
| "learning_rate": 1.536555142503098e-05, |
| "loss": 6.9813, |
| "step": 2900 |
| }, |
| { |
| "epoch": 10.815569972196478, |
| "grad_norm": 82.08953094482422, |
| "learning_rate": 1.5303593556381663e-05, |
| "loss": 6.6884, |
| "step": 2910 |
| }, |
| { |
| "epoch": 10.852641334569045, |
| "grad_norm": 123.8572998046875, |
| "learning_rate": 1.5241635687732344e-05, |
| "loss": 6.7464, |
| "step": 2920 |
| }, |
| { |
| "epoch": 10.889712696941613, |
| "grad_norm": 132.33477783203125, |
| "learning_rate": 1.5179677819083025e-05, |
| "loss": 7.7989, |
| "step": 2930 |
| }, |
| { |
| "epoch": 10.92678405931418, |
| "grad_norm": 81.7636489868164, |
| "learning_rate": 1.5117719950433704e-05, |
| "loss": 7.3568, |
| "step": 2940 |
| }, |
| { |
| "epoch": 10.963855421686747, |
| "grad_norm": 244.56277465820312, |
| "learning_rate": 1.5055762081784385e-05, |
| "loss": 8.6706, |
| "step": 2950 |
| }, |
| { |
| "epoch": 11.0, |
| "grad_norm": 121.42695617675781, |
| "learning_rate": 1.4993804213135068e-05, |
| "loss": 6.5687, |
| "step": 2960 |
| }, |
| { |
| "epoch": 11.037071362372567, |
| "grad_norm": 145.1683349609375, |
| "learning_rate": 1.493184634448575e-05, |
| "loss": 5.8992, |
| "step": 2970 |
| }, |
| { |
| "epoch": 11.074142724745135, |
| "grad_norm": 109.68660736083984, |
| "learning_rate": 1.4869888475836432e-05, |
| "loss": 6.4543, |
| "step": 2980 |
| }, |
| { |
| "epoch": 11.111214087117702, |
| "grad_norm": 93.12600708007812, |
| "learning_rate": 1.4807930607187113e-05, |
| "loss": 6.1386, |
| "step": 2990 |
| }, |
| { |
| "epoch": 11.148285449490269, |
| "grad_norm": 167.23182678222656, |
| "learning_rate": 1.4745972738537794e-05, |
| "loss": 6.9047, |
| "step": 3000 |
| }, |
| { |
| "epoch": 11.148285449490269, |
| "eval_loss": 2.9146969318389893, |
| "eval_runtime": 34.386, |
| "eval_samples_per_second": 47.054, |
| "eval_steps_per_second": 6.747, |
| "step": 3000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 5380, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 20, |
| "save_steps": 150, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 7, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|