| { | |
| "best_global_step": 4000, | |
| "best_metric": 0.20675921603003247, | |
| "best_model_checkpoint": "./SALAMA_C6/checkpoint-4000", | |
| "epoch": 3.0, | |
| "eval_steps": 1000, | |
| "global_step": 4407, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013614703880190605, | |
| "grad_norm": 0.12137346714735031, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.008, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02722940776038121, | |
| "grad_norm": 0.114188052713871, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0051, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04084411164057182, | |
| "grad_norm": 1.0347418785095215, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0065, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05445881552076242, | |
| "grad_norm": 1.6876862049102783, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0086, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.06807351940095303, | |
| "grad_norm": 0.44940465688705444, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0058, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08168822328114364, | |
| "grad_norm": 0.12510177493095398, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.007, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.09530292716133425, | |
| "grad_norm": 0.20447663962841034, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0076, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.10891763104152484, | |
| "grad_norm": 0.5220006108283997, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0049, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.12253233492171545, | |
| "grad_norm": 1.6316051483154297, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0056, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.13614703880190607, | |
| "grad_norm": 0.33280882239341736, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0046, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.14976174268209666, | |
| "grad_norm": 1.9523682594299316, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0059, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.16337644656228728, | |
| "grad_norm": 2.4847404956817627, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0102, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.17699115044247787, | |
| "grad_norm": 1.8841696977615356, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0102, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1906058543226685, | |
| "grad_norm": 0.3495821952819824, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0112, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2042205582028591, | |
| "grad_norm": 0.7233176231384277, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0099, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.21783526208304968, | |
| "grad_norm": 1.7731997966766357, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.0101, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.2314499659632403, | |
| "grad_norm": 1.6149709224700928, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0158, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2450646698434309, | |
| "grad_norm": 1.7614096403121948, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0149, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.2586793737236215, | |
| "grad_norm": 1.063245177268982, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0081, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.27229407760381213, | |
| "grad_norm": 2.128750801086426, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0101, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2859087814840027, | |
| "grad_norm": 4.583016872406006, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0161, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2995234853641933, | |
| "grad_norm": 2.7598137855529785, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0155, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.3131381892443839, | |
| "grad_norm": 1.4036035537719727, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0126, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.32675289312457456, | |
| "grad_norm": 2.3361454010009766, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0182, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.34036759700476515, | |
| "grad_norm": 4.159565448760986, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0132, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.35398230088495575, | |
| "grad_norm": 2.0892586708068848, | |
| "learning_rate": 9.95136933708728e-06, | |
| "loss": 0.0185, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.36759700476514634, | |
| "grad_norm": 4.004888534545898, | |
| "learning_rate": 9.900179165600206e-06, | |
| "loss": 0.012, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.381211708645337, | |
| "grad_norm": 2.4637577533721924, | |
| "learning_rate": 9.848988994113131e-06, | |
| "loss": 0.0162, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3948264125255276, | |
| "grad_norm": 2.228325128555298, | |
| "learning_rate": 9.797798822626056e-06, | |
| "loss": 0.031, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.4084411164057182, | |
| "grad_norm": 2.1333324909210205, | |
| "learning_rate": 9.746608651138983e-06, | |
| "loss": 0.0158, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.42205582028590877, | |
| "grad_norm": 3.556786060333252, | |
| "learning_rate": 9.695418479651908e-06, | |
| "loss": 0.0141, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.43567052416609936, | |
| "grad_norm": 1.8913724422454834, | |
| "learning_rate": 9.644228308164833e-06, | |
| "loss": 0.0187, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.44928522804629, | |
| "grad_norm": 2.2834181785583496, | |
| "learning_rate": 9.59303813667776e-06, | |
| "loss": 0.0195, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.4628999319264806, | |
| "grad_norm": 1.7960888147354126, | |
| "learning_rate": 9.541847965190683e-06, | |
| "loss": 0.0184, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4765146358066712, | |
| "grad_norm": 3.7277791500091553, | |
| "learning_rate": 9.49065779370361e-06, | |
| "loss": 0.0147, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4901293396868618, | |
| "grad_norm": 3.077613353729248, | |
| "learning_rate": 9.439467622216535e-06, | |
| "loss": 0.0191, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.5037440435670524, | |
| "grad_norm": 3.0859522819519043, | |
| "learning_rate": 9.38827745072946e-06, | |
| "loss": 0.0207, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.517358747447243, | |
| "grad_norm": 3.4750730991363525, | |
| "learning_rate": 9.337087279242385e-06, | |
| "loss": 0.0219, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.5309734513274337, | |
| "grad_norm": 3.5851595401763916, | |
| "learning_rate": 9.285897107755312e-06, | |
| "loss": 0.0177, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.5445881552076243, | |
| "grad_norm": 1.2438093423843384, | |
| "learning_rate": 9.234706936268237e-06, | |
| "loss": 0.0163, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.5582028590878149, | |
| "grad_norm": 2.504617691040039, | |
| "learning_rate": 9.183516764781162e-06, | |
| "loss": 0.0175, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.5718175629680055, | |
| "grad_norm": 2.9373462200164795, | |
| "learning_rate": 9.132326593294089e-06, | |
| "loss": 0.0188, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.585432266848196, | |
| "grad_norm": 1.7320044040679932, | |
| "learning_rate": 9.081136421807014e-06, | |
| "loss": 0.0216, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5990469707283866, | |
| "grad_norm": 3.027221202850342, | |
| "learning_rate": 9.02994625031994e-06, | |
| "loss": 0.0211, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.6126616746085772, | |
| "grad_norm": 2.3604636192321777, | |
| "learning_rate": 8.978756078832866e-06, | |
| "loss": 0.0224, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.6262763784887678, | |
| "grad_norm": 3.7307496070861816, | |
| "learning_rate": 8.92756590734579e-06, | |
| "loss": 0.016, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.6398910823689585, | |
| "grad_norm": 3.3601698875427246, | |
| "learning_rate": 8.876375735858716e-06, | |
| "loss": 0.0216, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.6535057862491491, | |
| "grad_norm": 2.8738534450531006, | |
| "learning_rate": 8.825185564371641e-06, | |
| "loss": 0.0219, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.6671204901293397, | |
| "grad_norm": 3.381775379180908, | |
| "learning_rate": 8.773995392884566e-06, | |
| "loss": 0.0166, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.6807351940095303, | |
| "grad_norm": 0.9475630521774292, | |
| "learning_rate": 8.722805221397493e-06, | |
| "loss": 0.0148, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6807351940095303, | |
| "eval_loss": 0.01398420799523592, | |
| "eval_runtime": 4342.5429, | |
| "eval_samples_per_second": 2.706, | |
| "eval_steps_per_second": 0.338, | |
| "eval_wer": 1.1414633216773222, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6943498978897209, | |
| "grad_norm": 5.041502952575684, | |
| "learning_rate": 8.671615049910418e-06, | |
| "loss": 0.0175, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.7079646017699115, | |
| "grad_norm": 2.7368288040161133, | |
| "learning_rate": 8.620424878423343e-06, | |
| "loss": 0.0218, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.7215793056501021, | |
| "grad_norm": 2.3039634227752686, | |
| "learning_rate": 8.56923470693627e-06, | |
| "loss": 0.0191, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.7351940095302927, | |
| "grad_norm": 2.662198543548584, | |
| "learning_rate": 8.518044535449195e-06, | |
| "loss": 0.017, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.7488087134104833, | |
| "grad_norm": 1.5759329795837402, | |
| "learning_rate": 8.46685436396212e-06, | |
| "loss": 0.0253, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.762423417290674, | |
| "grad_norm": 4.4511542320251465, | |
| "learning_rate": 8.415664192475045e-06, | |
| "loss": 0.0214, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.7760381211708646, | |
| "grad_norm": 2.462972402572632, | |
| "learning_rate": 8.36447402098797e-06, | |
| "loss": 0.0174, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.7896528250510552, | |
| "grad_norm": 2.2568044662475586, | |
| "learning_rate": 8.313283849500895e-06, | |
| "loss": 0.023, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.8032675289312458, | |
| "grad_norm": 2.2731740474700928, | |
| "learning_rate": 8.262093678013822e-06, | |
| "loss": 0.0194, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.8168822328114363, | |
| "grad_norm": 1.3251385688781738, | |
| "learning_rate": 8.210903506526747e-06, | |
| "loss": 0.0188, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.8304969366916269, | |
| "grad_norm": 1.3998346328735352, | |
| "learning_rate": 8.159713335039672e-06, | |
| "loss": 0.0183, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.8441116405718175, | |
| "grad_norm": 1.4427942037582397, | |
| "learning_rate": 8.108523163552599e-06, | |
| "loss": 0.0184, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.8577263444520081, | |
| "grad_norm": 3.7367708683013916, | |
| "learning_rate": 8.057332992065524e-06, | |
| "loss": 0.0189, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.8713410483321987, | |
| "grad_norm": 3.2188096046447754, | |
| "learning_rate": 8.00614282057845e-06, | |
| "loss": 0.018, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.8849557522123894, | |
| "grad_norm": 1.9088647365570068, | |
| "learning_rate": 7.954952649091376e-06, | |
| "loss": 0.0143, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.89857045609258, | |
| "grad_norm": 2.296995162963867, | |
| "learning_rate": 7.903762477604301e-06, | |
| "loss": 0.0174, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.9121851599727706, | |
| "grad_norm": 2.857095718383789, | |
| "learning_rate": 7.852572306117226e-06, | |
| "loss": 0.0203, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.9257998638529612, | |
| "grad_norm": 2.143751621246338, | |
| "learning_rate": 7.801382134630153e-06, | |
| "loss": 0.0274, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.9394145677331518, | |
| "grad_norm": 1.9943230152130127, | |
| "learning_rate": 7.750191963143076e-06, | |
| "loss": 0.0148, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.9530292716133424, | |
| "grad_norm": 1.7356202602386475, | |
| "learning_rate": 7.699001791656003e-06, | |
| "loss": 0.0143, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.966643975493533, | |
| "grad_norm": 1.9049350023269653, | |
| "learning_rate": 7.650371128743282e-06, | |
| "loss": 0.0164, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.9802586793737236, | |
| "grad_norm": 3.2257440090179443, | |
| "learning_rate": 7.599180957256207e-06, | |
| "loss": 0.0154, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.9938733832539143, | |
| "grad_norm": 2.0696637630462646, | |
| "learning_rate": 7.5479907857691325e-06, | |
| "loss": 0.0196, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.0074880871341048, | |
| "grad_norm": 1.411531925201416, | |
| "learning_rate": 7.4968006142820584e-06, | |
| "loss": 0.015, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.0211027910142954, | |
| "grad_norm": 7.875269889831543, | |
| "learning_rate": 7.4456104427949835e-06, | |
| "loss": 0.0074, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.034717494894486, | |
| "grad_norm": 1.7613502740859985, | |
| "learning_rate": 7.3944202713079094e-06, | |
| "loss": 0.0076, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.0483321987746765, | |
| "grad_norm": 2.590519905090332, | |
| "learning_rate": 7.343230099820835e-06, | |
| "loss": 0.0102, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.0619469026548674, | |
| "grad_norm": 1.4597114324569702, | |
| "learning_rate": 7.2920399283337604e-06, | |
| "loss": 0.0081, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.075561606535058, | |
| "grad_norm": 3.469754934310913, | |
| "learning_rate": 7.240849756846686e-06, | |
| "loss": 0.0088, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.0891763104152485, | |
| "grad_norm": 1.9216970205307007, | |
| "learning_rate": 7.1896595853596114e-06, | |
| "loss": 0.0102, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.1027910142954391, | |
| "grad_norm": 2.306184768676758, | |
| "learning_rate": 7.138469413872537e-06, | |
| "loss": 0.0135, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.1164057181756297, | |
| "grad_norm": 2.4771056175231934, | |
| "learning_rate": 7.087279242385463e-06, | |
| "loss": 0.009, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.1300204220558203, | |
| "grad_norm": 1.8208683729171753, | |
| "learning_rate": 7.0360890708983875e-06, | |
| "loss": 0.011, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.143635125936011, | |
| "grad_norm": 1.1904667615890503, | |
| "learning_rate": 6.9848988994113134e-06, | |
| "loss": 0.0074, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.1572498298162015, | |
| "grad_norm": 1.3536099195480347, | |
| "learning_rate": 6.9337087279242385e-06, | |
| "loss": 0.0097, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.170864533696392, | |
| "grad_norm": 2.2063775062561035, | |
| "learning_rate": 6.8825185564371644e-06, | |
| "loss": 0.0123, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.1844792375765827, | |
| "grad_norm": 1.0882831811904907, | |
| "learning_rate": 6.83132838495009e-06, | |
| "loss": 0.01, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.1980939414567733, | |
| "grad_norm": 0.9484243988990784, | |
| "learning_rate": 6.7801382134630154e-06, | |
| "loss": 0.0086, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.2117086453369639, | |
| "grad_norm": 2.0926241874694824, | |
| "learning_rate": 6.728948041975941e-06, | |
| "loss": 0.01, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.2253233492171545, | |
| "grad_norm": 2.288508653640747, | |
| "learning_rate": 6.6777578704888664e-06, | |
| "loss": 0.0079, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.238938053097345, | |
| "grad_norm": 0.998699963092804, | |
| "learning_rate": 6.626567699001792e-06, | |
| "loss": 0.0069, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.2525527569775359, | |
| "grad_norm": 0.9781405925750732, | |
| "learning_rate": 6.575377527514718e-06, | |
| "loss": 0.0071, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.2661674608577265, | |
| "grad_norm": 1.6135525703430176, | |
| "learning_rate": 6.524187356027643e-06, | |
| "loss": 0.0087, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.279782164737917, | |
| "grad_norm": 1.845056414604187, | |
| "learning_rate": 6.472997184540569e-06, | |
| "loss": 0.0068, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.2933968686181077, | |
| "grad_norm": 2.4946863651275635, | |
| "learning_rate": 6.4218070130534935e-06, | |
| "loss": 0.0072, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.3070115724982982, | |
| "grad_norm": 2.3637702465057373, | |
| "learning_rate": 6.3706168415664194e-06, | |
| "loss": 0.0058, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.3206262763784888, | |
| "grad_norm": 0.9564550518989563, | |
| "learning_rate": 6.319426670079345e-06, | |
| "loss": 0.0081, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.3342409802586794, | |
| "grad_norm": 2.1385440826416016, | |
| "learning_rate": 6.2682364985922704e-06, | |
| "loss": 0.0083, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.34785568413887, | |
| "grad_norm": 2.863182306289673, | |
| "learning_rate": 6.217046327105196e-06, | |
| "loss": 0.0077, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.3614703880190606, | |
| "grad_norm": 1.2685619592666626, | |
| "learning_rate": 6.1658561556181214e-06, | |
| "loss": 0.0069, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.3614703880190606, | |
| "eval_loss": 0.009049512445926666, | |
| "eval_runtime": 4354.1558, | |
| "eval_samples_per_second": 2.699, | |
| "eval_steps_per_second": 0.337, | |
| "eval_wer": 0.7107943555686831, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.3750850918992512, | |
| "grad_norm": 1.0608856678009033, | |
| "learning_rate": 6.114665984131047e-06, | |
| "loss": 0.0105, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.3886997957794418, | |
| "grad_norm": 2.16904878616333, | |
| "learning_rate": 6.063475812643973e-06, | |
| "loss": 0.0084, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.4023144996596324, | |
| "grad_norm": 1.6529253721237183, | |
| "learning_rate": 6.012285641156898e-06, | |
| "loss": 0.0072, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.415929203539823, | |
| "grad_norm": 1.693932056427002, | |
| "learning_rate": 5.961095469669824e-06, | |
| "loss": 0.0073, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.4295439074200136, | |
| "grad_norm": 1.8929098844528198, | |
| "learning_rate": 5.90990529818275e-06, | |
| "loss": 0.0064, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.4431586113002042, | |
| "grad_norm": 1.019668459892273, | |
| "learning_rate": 5.858715126695675e-06, | |
| "loss": 0.0063, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.4567733151803948, | |
| "grad_norm": 1.3953487873077393, | |
| "learning_rate": 5.8075249552085995e-06, | |
| "loss": 0.0069, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.4703880190605854, | |
| "grad_norm": 1.8717122077941895, | |
| "learning_rate": 5.7563347837215254e-06, | |
| "loss": 0.0064, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.484002722940776, | |
| "grad_norm": 0.7715520858764648, | |
| "learning_rate": 5.705144612234451e-06, | |
| "loss": 0.008, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.4976174268209665, | |
| "grad_norm": 1.3807408809661865, | |
| "learning_rate": 5.6539544407473765e-06, | |
| "loss": 0.0103, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.5112321307011571, | |
| "grad_norm": 1.8438202142715454, | |
| "learning_rate": 5.602764269260302e-06, | |
| "loss": 0.0074, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.5248468345813477, | |
| "grad_norm": 0.9440460801124573, | |
| "learning_rate": 5.551574097773228e-06, | |
| "loss": 0.0066, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 2.3463940620422363, | |
| "learning_rate": 5.500383926286153e-06, | |
| "loss": 0.0073, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.552076242341729, | |
| "grad_norm": 1.7601486444473267, | |
| "learning_rate": 5.449193754799079e-06, | |
| "loss": 0.0086, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.5656909462219195, | |
| "grad_norm": 1.7756719589233398, | |
| "learning_rate": 5.398003583312005e-06, | |
| "loss": 0.0067, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.5793056501021103, | |
| "grad_norm": 3.199690580368042, | |
| "learning_rate": 5.34681341182493e-06, | |
| "loss": 0.0094, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.592920353982301, | |
| "grad_norm": 1.2650269269943237, | |
| "learning_rate": 5.295623240337856e-06, | |
| "loss": 0.0059, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.6065350578624915, | |
| "grad_norm": 1.2948106527328491, | |
| "learning_rate": 5.244433068850781e-06, | |
| "loss": 0.0082, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.620149761742682, | |
| "grad_norm": 3.1016247272491455, | |
| "learning_rate": 5.193242897363706e-06, | |
| "loss": 0.0081, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.6337644656228727, | |
| "grad_norm": 2.5490994453430176, | |
| "learning_rate": 5.1420527258766315e-06, | |
| "loss": 0.0066, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.6473791695030633, | |
| "grad_norm": 1.739235758781433, | |
| "learning_rate": 5.090862554389557e-06, | |
| "loss": 0.0073, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.6609938733832539, | |
| "grad_norm": 1.5219982862472534, | |
| "learning_rate": 5.039672382902483e-06, | |
| "loss": 0.0077, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.6746085772634445, | |
| "grad_norm": 1.3322407007217407, | |
| "learning_rate": 4.988482211415408e-06, | |
| "loss": 0.0087, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.6882232811436353, | |
| "grad_norm": 0.6433055996894836, | |
| "learning_rate": 4.937292039928334e-06, | |
| "loss": 0.0072, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.7018379850238259, | |
| "grad_norm": 2.2464089393615723, | |
| "learning_rate": 4.886101868441259e-06, | |
| "loss": 0.0066, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.7154526889040165, | |
| "grad_norm": 1.404597520828247, | |
| "learning_rate": 4.834911696954185e-06, | |
| "loss": 0.0058, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.729067392784207, | |
| "grad_norm": 1.9167308807373047, | |
| "learning_rate": 4.78372152546711e-06, | |
| "loss": 0.007, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.7426820966643977, | |
| "grad_norm": 2.3144948482513428, | |
| "learning_rate": 4.732531353980036e-06, | |
| "loss": 0.0068, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.7562968005445883, | |
| "grad_norm": 0.5864093899726868, | |
| "learning_rate": 4.681341182492961e-06, | |
| "loss": 0.0071, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.7699115044247788, | |
| "grad_norm": 0.7078450918197632, | |
| "learning_rate": 4.630151011005887e-06, | |
| "loss": 0.0095, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.7835262083049694, | |
| "grad_norm": 1.575197458267212, | |
| "learning_rate": 4.578960839518813e-06, | |
| "loss": 0.006, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.79714091218516, | |
| "grad_norm": 2.1332499980926514, | |
| "learning_rate": 4.527770668031738e-06, | |
| "loss": 0.0069, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.8107556160653506, | |
| "grad_norm": 1.4814813137054443, | |
| "learning_rate": 4.476580496544663e-06, | |
| "loss": 0.0065, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.8243703199455412, | |
| "grad_norm": 2.0825626850128174, | |
| "learning_rate": 4.425390325057589e-06, | |
| "loss": 0.0063, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.8379850238257318, | |
| "grad_norm": 1.9306626319885254, | |
| "learning_rate": 4.374200153570514e-06, | |
| "loss": 0.0063, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.8515997277059224, | |
| "grad_norm": 2.3408215045928955, | |
| "learning_rate": 4.32300998208344e-06, | |
| "loss": 0.0102, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.865214431586113, | |
| "grad_norm": 1.6280012130737305, | |
| "learning_rate": 4.271819810596366e-06, | |
| "loss": 0.0068, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.8788291354663036, | |
| "grad_norm": 1.8683923482894897, | |
| "learning_rate": 4.220629639109291e-06, | |
| "loss": 0.0054, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.8924438393464942, | |
| "grad_norm": 0.4595645070075989, | |
| "learning_rate": 4.169439467622216e-06, | |
| "loss": 0.0062, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.9060585432266848, | |
| "grad_norm": 1.10299813747406, | |
| "learning_rate": 4.118249296135142e-06, | |
| "loss": 0.007, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.9196732471068754, | |
| "grad_norm": 3.8968849182128906, | |
| "learning_rate": 4.067059124648068e-06, | |
| "loss": 0.0065, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.933287950987066, | |
| "grad_norm": 0.46829521656036377, | |
| "learning_rate": 4.015868953160993e-06, | |
| "loss": 0.0057, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.9469026548672566, | |
| "grad_norm": 0.4478941559791565, | |
| "learning_rate": 3.964678781673919e-06, | |
| "loss": 0.0058, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.9605173587474471, | |
| "grad_norm": 2.208763599395752, | |
| "learning_rate": 3.913488610186844e-06, | |
| "loss": 0.006, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.9741320626276377, | |
| "grad_norm": 0.7722469568252563, | |
| "learning_rate": 3.862298438699769e-06, | |
| "loss": 0.0061, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.9877467665078283, | |
| "grad_norm": 3.8442399501800537, | |
| "learning_rate": 3.8111082672126953e-06, | |
| "loss": 0.0063, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.001361470388019, | |
| "grad_norm": 0.2542979419231415, | |
| "learning_rate": 3.759918095725621e-06, | |
| "loss": 0.0068, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.0149761742682095, | |
| "grad_norm": 1.0869641304016113, | |
| "learning_rate": 3.7087279242385467e-06, | |
| "loss": 0.003, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.0285908781484, | |
| "grad_norm": 0.17512322962284088, | |
| "learning_rate": 3.6575377527514722e-06, | |
| "loss": 0.0023, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.0422055820285907, | |
| "grad_norm": 1.9495315551757812, | |
| "learning_rate": 3.6063475812643977e-06, | |
| "loss": 0.0027, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.0422055820285907, | |
| "eval_loss": 0.004855828359723091, | |
| "eval_runtime": 4349.8041, | |
| "eval_samples_per_second": 2.701, | |
| "eval_steps_per_second": 0.338, | |
| "eval_wer": 0.29727592350861815, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.0558202859087813, | |
| "grad_norm": 0.16018439829349518, | |
| "learning_rate": 3.555157409777323e-06, | |
| "loss": 0.0025, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.069434989788972, | |
| "grad_norm": 0.957599937915802, | |
| "learning_rate": 3.5039672382902483e-06, | |
| "loss": 0.0023, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.0830496936691625, | |
| "grad_norm": 1.7797317504882812, | |
| "learning_rate": 3.4527770668031742e-06, | |
| "loss": 0.0027, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.096664397549353, | |
| "grad_norm": 0.6724184155464172, | |
| "learning_rate": 3.4015868953160997e-06, | |
| "loss": 0.0031, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.110279101429544, | |
| "grad_norm": 0.21301336586475372, | |
| "learning_rate": 3.3503967238290252e-06, | |
| "loss": 0.002, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.1238938053097347, | |
| "grad_norm": 0.8429257869720459, | |
| "learning_rate": 3.2992065523419507e-06, | |
| "loss": 0.0028, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.1375085091899253, | |
| "grad_norm": 0.8506041169166565, | |
| "learning_rate": 3.248016380854876e-06, | |
| "loss": 0.0023, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.151123213070116, | |
| "grad_norm": 0.7095006108283997, | |
| "learning_rate": 3.1968262093678013e-06, | |
| "loss": 0.0026, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.1647379169503065, | |
| "grad_norm": 0.2413463592529297, | |
| "learning_rate": 3.1456360378807272e-06, | |
| "loss": 0.0026, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.178352620830497, | |
| "grad_norm": 0.3816224932670593, | |
| "learning_rate": 3.0944458663936527e-06, | |
| "loss": 0.0025, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.1919673247106877, | |
| "grad_norm": 0.7502841949462891, | |
| "learning_rate": 3.0432556949065783e-06, | |
| "loss": 0.002, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 2.2055820285908783, | |
| "grad_norm": 0.24129417538642883, | |
| "learning_rate": 2.9920655234195038e-06, | |
| "loss": 0.002, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 2.219196732471069, | |
| "grad_norm": 0.24797920882701874, | |
| "learning_rate": 2.940875351932429e-06, | |
| "loss": 0.0018, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 2.2328114363512594, | |
| "grad_norm": 0.13277117908000946, | |
| "learning_rate": 2.8896851804453548e-06, | |
| "loss": 0.0028, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 2.24642614023145, | |
| "grad_norm": 0.4421047866344452, | |
| "learning_rate": 2.8384950089582803e-06, | |
| "loss": 0.0018, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 2.2600408441116406, | |
| "grad_norm": 0.7155504822731018, | |
| "learning_rate": 2.7873048374712058e-06, | |
| "loss": 0.0029, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 2.2736555479918312, | |
| "grad_norm": 0.19938398897647858, | |
| "learning_rate": 2.7361146659841313e-06, | |
| "loss": 0.0017, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 2.287270251872022, | |
| "grad_norm": 0.12812410295009613, | |
| "learning_rate": 2.684924494497057e-06, | |
| "loss": 0.0049, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 2.3008849557522124, | |
| "grad_norm": 0.19860972464084625, | |
| "learning_rate": 2.6337343230099823e-06, | |
| "loss": 0.0021, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 2.314499659632403, | |
| "grad_norm": 0.27632179856300354, | |
| "learning_rate": 2.5825441515229078e-06, | |
| "loss": 0.0024, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.3281143635125936, | |
| "grad_norm": 0.3863467872142792, | |
| "learning_rate": 2.5313539800358333e-06, | |
| "loss": 0.0027, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 2.341729067392784, | |
| "grad_norm": 0.44062647223472595, | |
| "learning_rate": 2.4801638085487588e-06, | |
| "loss": 0.0025, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 2.355343771272975, | |
| "grad_norm": 0.354746013879776, | |
| "learning_rate": 2.4289736370616843e-06, | |
| "loss": 0.0027, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 2.3689584751531654, | |
| "grad_norm": 2.042172908782959, | |
| "learning_rate": 2.3777834655746098e-06, | |
| "loss": 0.0019, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 2.382573179033356, | |
| "grad_norm": 1.107994556427002, | |
| "learning_rate": 2.3265932940875357e-06, | |
| "loss": 0.0021, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.3961878829135466, | |
| "grad_norm": 0.3227066397666931, | |
| "learning_rate": 2.2754031226004608e-06, | |
| "loss": 0.0018, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.409802586793737, | |
| "grad_norm": 0.4572041630744934, | |
| "learning_rate": 2.2242129511133863e-06, | |
| "loss": 0.0021, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.4234172906739277, | |
| "grad_norm": 0.13889265060424805, | |
| "learning_rate": 2.173022779626312e-06, | |
| "loss": 0.0014, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.4370319945541183, | |
| "grad_norm": 1.883860468864441, | |
| "learning_rate": 2.1218326081392373e-06, | |
| "loss": 0.002, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.450646698434309, | |
| "grad_norm": 0.6357561349868774, | |
| "learning_rate": 2.0706424366521628e-06, | |
| "loss": 0.0017, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.4642614023144995, | |
| "grad_norm": 0.9930075407028198, | |
| "learning_rate": 2.0194522651650887e-06, | |
| "loss": 0.0022, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.47787610619469, | |
| "grad_norm": 2.411013603210449, | |
| "learning_rate": 1.9682620936780138e-06, | |
| "loss": 0.0023, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.4914908100748807, | |
| "grad_norm": 0.14072994887828827, | |
| "learning_rate": 1.9170719221909397e-06, | |
| "loss": 0.0015, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.5051055139550717, | |
| "grad_norm": 0.12276594340801239, | |
| "learning_rate": 1.8658817507038652e-06, | |
| "loss": 0.0018, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.518720217835262, | |
| "grad_norm": 0.13401934504508972, | |
| "learning_rate": 1.8146915792167905e-06, | |
| "loss": 0.0018, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.532334921715453, | |
| "grad_norm": 0.2047947198152542, | |
| "learning_rate": 1.763501407729716e-06, | |
| "loss": 0.0023, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.545949625595643, | |
| "grad_norm": 0.49802783131599426, | |
| "learning_rate": 1.7123112362426417e-06, | |
| "loss": 0.0022, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.559564329475834, | |
| "grad_norm": 0.19259583950042725, | |
| "learning_rate": 1.661121064755567e-06, | |
| "loss": 0.0018, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.5731790333560243, | |
| "grad_norm": 0.09171450138092041, | |
| "learning_rate": 1.6099308932684927e-06, | |
| "loss": 0.0016, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.5867937372362153, | |
| "grad_norm": 0.7817858457565308, | |
| "learning_rate": 1.5587407217814182e-06, | |
| "loss": 0.0013, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.6004084411164055, | |
| "grad_norm": 0.32868897914886475, | |
| "learning_rate": 1.5075505502943435e-06, | |
| "loss": 0.0018, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.6140231449965965, | |
| "grad_norm": 0.23523026704788208, | |
| "learning_rate": 1.4563603788072692e-06, | |
| "loss": 0.0021, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.627637848876787, | |
| "grad_norm": 0.1147078424692154, | |
| "learning_rate": 1.4051702073201947e-06, | |
| "loss": 0.0027, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.6412525527569777, | |
| "grad_norm": 2.7102601528167725, | |
| "learning_rate": 1.3539800358331202e-06, | |
| "loss": 0.002, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.6548672566371683, | |
| "grad_norm": 0.24626313149929047, | |
| "learning_rate": 1.3027898643460457e-06, | |
| "loss": 0.002, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.668481960517359, | |
| "grad_norm": 1.1574679613113403, | |
| "learning_rate": 1.2515996928589714e-06, | |
| "loss": 0.0015, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.6820966643975495, | |
| "grad_norm": 1.2218636274337769, | |
| "learning_rate": 1.2004095213718967e-06, | |
| "loss": 0.0017, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.69571136827774, | |
| "grad_norm": 0.44518494606018066, | |
| "learning_rate": 1.1492193498848222e-06, | |
| "loss": 0.0029, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.7093260721579306, | |
| "grad_norm": 0.7910877466201782, | |
| "learning_rate": 1.0980291783977477e-06, | |
| "loss": 0.002, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.7229407760381212, | |
| "grad_norm": 0.4967029392719269, | |
| "learning_rate": 1.0468390069106732e-06, | |
| "loss": 0.0016, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.7229407760381212, | |
| "eval_loss": 0.0036573188845068216, | |
| "eval_runtime": 4355.9825, | |
| "eval_samples_per_second": 2.697, | |
| "eval_steps_per_second": 0.337, | |
| "eval_wer": 0.20675921603003247, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.736555479918312, | |
| "grad_norm": 0.25801071524620056, | |
| "learning_rate": 9.95648835423599e-07, | |
| "loss": 0.0018, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.7501701837985024, | |
| "grad_norm": 0.09079001843929291, | |
| "learning_rate": 9.444586639365243e-07, | |
| "loss": 0.0016, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.763784887678693, | |
| "grad_norm": 0.1338862031698227, | |
| "learning_rate": 8.932684924494497e-07, | |
| "loss": 0.0025, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.7773995915588836, | |
| "grad_norm": 0.1738889366388321, | |
| "learning_rate": 8.420783209623753e-07, | |
| "loss": 0.0014, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.791014295439074, | |
| "grad_norm": 0.13001345098018646, | |
| "learning_rate": 7.908881494753008e-07, | |
| "loss": 0.002, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.804628999319265, | |
| "grad_norm": 0.11193196475505829, | |
| "learning_rate": 7.396979779882263e-07, | |
| "loss": 0.0012, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.8182437031994554, | |
| "grad_norm": 0.19796916842460632, | |
| "learning_rate": 6.885078065011519e-07, | |
| "loss": 0.0017, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.831858407079646, | |
| "grad_norm": 0.30927911400794983, | |
| "learning_rate": 6.373176350140774e-07, | |
| "loss": 0.0015, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.8454731109598366, | |
| "grad_norm": 0.15930448472499847, | |
| "learning_rate": 5.861274635270028e-07, | |
| "loss": 0.0013, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.859087814840027, | |
| "grad_norm": 0.3418717086315155, | |
| "learning_rate": 5.349372920399284e-07, | |
| "loss": 0.0019, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.8727025187202178, | |
| "grad_norm": 0.17476582527160645, | |
| "learning_rate": 4.837471205528539e-07, | |
| "loss": 0.002, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.8863172226004083, | |
| "grad_norm": 0.17526167631149292, | |
| "learning_rate": 4.3255694906577936e-07, | |
| "loss": 0.0025, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.899931926480599, | |
| "grad_norm": 0.15068136155605316, | |
| "learning_rate": 3.813667775787049e-07, | |
| "loss": 0.0017, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.9135466303607895, | |
| "grad_norm": 0.28123319149017334, | |
| "learning_rate": 3.3017660609163047e-07, | |
| "loss": 0.0021, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.92716133424098, | |
| "grad_norm": 0.17292127013206482, | |
| "learning_rate": 2.789864346045559e-07, | |
| "loss": 0.0012, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.9407760381211707, | |
| "grad_norm": 0.25420811772346497, | |
| "learning_rate": 2.2779626311748147e-07, | |
| "loss": 0.0013, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.9543907420013613, | |
| "grad_norm": 1.6471766233444214, | |
| "learning_rate": 1.7660609163040697e-07, | |
| "loss": 0.0023, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.968005445881552, | |
| "grad_norm": 0.29647982120513916, | |
| "learning_rate": 1.254159201433325e-07, | |
| "loss": 0.0014, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.981620149761743, | |
| "grad_norm": 0.20846757292747498, | |
| "learning_rate": 7.4225748656258e-08, | |
| "loss": 0.0028, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.995234853641933, | |
| "grad_norm": 0.37198618054389954, | |
| "learning_rate": 2.303557716918352e-08, | |
| "loss": 0.0018, | |
| "step": 4400 | |
| } | |
| ], | |
| "logging_steps": 20, | |
| "max_steps": 4407, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.068794417651712e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |