| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.967741935483871, |
| "eval_steps": 500, |
| "global_step": 216, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.013824884792626729, |
| "grad_norm": 31.00213623046875, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.2089, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.027649769585253458, |
| "grad_norm": 30.27136993408203, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.1536, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.041474654377880185, |
| "grad_norm": 30.48703384399414, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.1581, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.055299539170506916, |
| "grad_norm": 30.779329299926758, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.1741, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06912442396313365, |
| "grad_norm": 31.22808837890625, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.1864, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.08294930875576037, |
| "grad_norm": 30.783327102661133, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.1993, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0967741935483871, |
| "grad_norm": 30.57423210144043, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.1506, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.11059907834101383, |
| "grad_norm": 30.952186584472656, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.1599, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.12442396313364056, |
| "grad_norm": 30.37245750427246, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.1572, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1382488479262673, |
| "grad_norm": 30.930192947387695, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.1447, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.15207373271889402, |
| "grad_norm": 29.735448837280273, |
| "learning_rate": 5.5e-07, |
| "loss": 2.0742, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.16589861751152074, |
| "grad_norm": 29.62826156616211, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.061, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.17972350230414746, |
| "grad_norm": 28.937463760375977, |
| "learning_rate": 6.5e-07, |
| "loss": 1.9974, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1935483870967742, |
| "grad_norm": 29.24833106994629, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 1.9833, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.2073732718894009, |
| "grad_norm": 28.122018814086914, |
| "learning_rate": 7.5e-07, |
| "loss": 1.8934, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.22119815668202766, |
| "grad_norm": 28.059659957885742, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.875, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.2350230414746544, |
| "grad_norm": 27.361961364746094, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.8009, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2488479262672811, |
| "grad_norm": 26.721765518188477, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.7116, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.2626728110599078, |
| "grad_norm": 25.37330436706543, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.5608, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2764976958525346, |
| "grad_norm": 25.81206703186035, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.5043, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2903225806451613, |
| "grad_norm": 25.539344787597656, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.3673, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.30414746543778803, |
| "grad_norm": 25.097164154052734, |
| "learning_rate": 1.1e-06, |
| "loss": 1.2029, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.31797235023041476, |
| "grad_norm": 24.619497299194336, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.0458, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3317972350230415, |
| "grad_norm": 23.820302963256836, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 0.8723, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.3456221198156682, |
| "grad_norm": 23.12735939025879, |
| "learning_rate": 1.25e-06, |
| "loss": 0.7183, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.35944700460829493, |
| "grad_norm": 20.127134323120117, |
| "learning_rate": 1.3e-06, |
| "loss": 0.5248, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.37327188940092165, |
| "grad_norm": 15.901495933532715, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.3689, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3870967741935484, |
| "grad_norm": 11.053832054138184, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.2482, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.4009216589861751, |
| "grad_norm": 7.248495578765869, |
| "learning_rate": 1.45e-06, |
| "loss": 0.1847, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.4147465437788018, |
| "grad_norm": 5.378540515899658, |
| "learning_rate": 1.5e-06, |
| "loss": 0.1423, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 3.8371808528900146, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.1152, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.4423963133640553, |
| "grad_norm": 2.2655274868011475, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.0845, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.45622119815668205, |
| "grad_norm": 1.5746861696243286, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.0711, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.4700460829493088, |
| "grad_norm": 1.3510947227478027, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.0734, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.4838709677419355, |
| "grad_norm": 0.9737389087677002, |
| "learning_rate": 1.75e-06, |
| "loss": 0.0651, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4976958525345622, |
| "grad_norm": 0.9815284609794617, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0593, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.511520737327189, |
| "grad_norm": 0.8567912578582764, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0543, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.5253456221198156, |
| "grad_norm": 0.6773302555084229, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0622, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.5391705069124424, |
| "grad_norm": 0.49936285614967346, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0511, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5529953917050692, |
| "grad_norm": 0.6253588795661926, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0478, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5668202764976958, |
| "grad_norm": 0.5103089809417725, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0465, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5806451612903226, |
| "grad_norm": 0.29294702410697937, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.0456, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5944700460829493, |
| "grad_norm": 0.4237954616546631, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0501, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.6082949308755761, |
| "grad_norm": 0.42243412137031555, |
| "learning_rate": 2.2e-06, |
| "loss": 0.0388, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.6221198156682027, |
| "grad_norm": 0.37881818413734436, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0415, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.6359447004608295, |
| "grad_norm": 0.4941152036190033, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.045, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.6497695852534562, |
| "grad_norm": 0.3046450912952423, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0386, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.663594470046083, |
| "grad_norm": 0.39361852407455444, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0447, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6774193548387096, |
| "grad_norm": 0.5190001130104065, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.0364, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6912442396313364, |
| "grad_norm": 0.372072696685791, |
| "learning_rate": 2.5e-06, |
| "loss": 0.043, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.7050691244239631, |
| "grad_norm": 0.3756551146507263, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0424, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.7188940092165899, |
| "grad_norm": 0.4593554437160492, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0387, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.7327188940092166, |
| "grad_norm": 0.2931855618953705, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0396, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.7465437788018433, |
| "grad_norm": 0.38429534435272217, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0373, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.7603686635944701, |
| "grad_norm": 0.3506857752799988, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.04, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7741935483870968, |
| "grad_norm": 0.29847028851509094, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0369, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7880184331797235, |
| "grad_norm": 0.3653375506401062, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0396, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.8018433179723502, |
| "grad_norm": 0.3163083791732788, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0337, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.815668202764977, |
| "grad_norm": 0.3734363615512848, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0327, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.8294930875576036, |
| "grad_norm": 0.29547712206840515, |
| "learning_rate": 3e-06, |
| "loss": 0.0365, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.8433179723502304, |
| "grad_norm": 0.4041007161140442, |
| "learning_rate": 3.05e-06, |
| "loss": 0.038, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 0.3602149784564972, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.033, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8709677419354839, |
| "grad_norm": 0.2948857545852661, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0386, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8847926267281107, |
| "grad_norm": 0.39098358154296875, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0323, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8986175115207373, |
| "grad_norm": 0.3692062795162201, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0309, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.9124423963133641, |
| "grad_norm": 0.3967229425907135, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0346, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.9262672811059908, |
| "grad_norm": 0.47776708006858826, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0355, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.9400921658986175, |
| "grad_norm": 0.21545131504535675, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0294, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.9539170506912442, |
| "grad_norm": 0.23738539218902588, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0308, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.967741935483871, |
| "grad_norm": 0.29174014925956726, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0312, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9815668202764977, |
| "grad_norm": 0.38475602865219116, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0324, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9953917050691244, |
| "grad_norm": 0.4077378809452057, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0297, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.4077378809452057, |
| "learning_rate": 3.65e-06, |
| "loss": 0.031, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.0138248847926268, |
| "grad_norm": 0.46581539511680603, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0313, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.0276497695852536, |
| "grad_norm": 0.24417200684547424, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.027, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.0414746543778801, |
| "grad_norm": 0.20425117015838623, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0307, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.055299539170507, |
| "grad_norm": 0.3578161597251892, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0312, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.0691244239631337, |
| "grad_norm": 0.39486679434776306, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0294, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0829493087557605, |
| "grad_norm": 0.3932795226573944, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0307, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.096774193548387, |
| "grad_norm": 0.2946235239505768, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0257, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.1105990783410138, |
| "grad_norm": 0.3318672776222229, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0296, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.1244239631336406, |
| "grad_norm": 0.23701588809490204, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0298, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.1382488479262673, |
| "grad_norm": 0.2415941059589386, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0256, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.1520737327188941, |
| "grad_norm": 0.24098087847232819, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0263, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.1658986175115207, |
| "grad_norm": 0.3530103862285614, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0308, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.1797235023041475, |
| "grad_norm": 0.2382838875055313, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0254, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.1935483870967742, |
| "grad_norm": 0.2670588791370392, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0255, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.2073732718894008, |
| "grad_norm": 0.30723804235458374, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0263, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.2211981566820276, |
| "grad_norm": 0.505890965461731, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0265, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.2350230414746544, |
| "grad_norm": 0.24307991564273834, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0227, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.2488479262672811, |
| "grad_norm": 0.2198561429977417, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0261, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.262672811059908, |
| "grad_norm": 0.2435183823108673, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0225, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.2764976958525347, |
| "grad_norm": 0.18837811052799225, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0218, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.2903225806451613, |
| "grad_norm": 0.3818771541118622, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0223, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.304147465437788, |
| "grad_norm": 0.2358720600605011, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0204, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.3179723502304148, |
| "grad_norm": 0.25374144315719604, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.022, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.3317972350230414, |
| "grad_norm": 0.36181601881980896, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0244, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.3456221198156681, |
| "grad_norm": 0.3156590759754181, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0233, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.359447004608295, |
| "grad_norm": 0.21958638727664948, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0218, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.3732718894009217, |
| "grad_norm": 0.34455621242523193, |
| "learning_rate": 5e-06, |
| "loss": 0.0267, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.3870967741935485, |
| "grad_norm": 0.283086359500885, |
| "learning_rate": 4.999888074163108e-06, |
| "loss": 0.0238, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.400921658986175, |
| "grad_norm": 0.28856486082077026, |
| "learning_rate": 4.999552306674345e-06, |
| "loss": 0.0186, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.4147465437788018, |
| "grad_norm": 0.26721692085266113, |
| "learning_rate": 4.998992727598557e-06, |
| "loss": 0.0193, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 0.3459971249103546, |
| "learning_rate": 4.998209387040829e-06, |
| "loss": 0.0218, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.4423963133640554, |
| "grad_norm": 0.25979122519493103, |
| "learning_rate": 4.9972023551419995e-06, |
| "loss": 0.0216, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.456221198156682, |
| "grad_norm": 0.19960424304008484, |
| "learning_rate": 4.995971722072379e-06, |
| "loss": 0.0176, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.4700460829493087, |
| "grad_norm": 0.2529441714286804, |
| "learning_rate": 4.9945175980236745e-06, |
| "loss": 0.0181, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.4838709677419355, |
| "grad_norm": 0.2690267264842987, |
| "learning_rate": 4.992840113199131e-06, |
| "loss": 0.0196, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.4976958525345623, |
| "grad_norm": 0.3516470789909363, |
| "learning_rate": 4.990939417801859e-06, |
| "loss": 0.0182, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.511520737327189, |
| "grad_norm": 0.30167508125305176, |
| "learning_rate": 4.988815682021398e-06, |
| "loss": 0.0205, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.5253456221198156, |
| "grad_norm": 0.3920849859714508, |
| "learning_rate": 4.986469096018472e-06, |
| "loss": 0.0177, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.5391705069124424, |
| "grad_norm": 0.3274078369140625, |
| "learning_rate": 4.983899869907963e-06, |
| "loss": 0.0185, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.5529953917050692, |
| "grad_norm": 0.2237282395362854, |
| "learning_rate": 4.981108233740096e-06, |
| "loss": 0.016, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.5668202764976957, |
| "grad_norm": 0.23966379463672638, |
| "learning_rate": 4.978094437479843e-06, |
| "loss": 0.0183, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.5806451612903225, |
| "grad_norm": 0.4027673602104187, |
| "learning_rate": 4.97485875098454e-06, |
| "loss": 0.0171, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.5944700460829493, |
| "grad_norm": 0.24082835018634796, |
| "learning_rate": 4.971401463979722e-06, |
| "loss": 0.016, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.608294930875576, |
| "grad_norm": 0.19387558102607727, |
| "learning_rate": 4.967722886033181e-06, |
| "loss": 0.0165, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.6221198156682028, |
| "grad_norm": 0.33696162700653076, |
| "learning_rate": 4.963823346527249e-06, |
| "loss": 0.0154, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.6359447004608296, |
| "grad_norm": 0.30290740728378296, |
| "learning_rate": 4.959703194629304e-06, |
| "loss": 0.0175, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.6497695852534562, |
| "grad_norm": 0.3781787157058716, |
| "learning_rate": 4.955362799260507e-06, |
| "loss": 0.0145, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.663594470046083, |
| "grad_norm": 0.39995357394218445, |
| "learning_rate": 4.950802549062764e-06, |
| "loss": 0.015, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.6774193548387095, |
| "grad_norm": 0.19926570355892181, |
| "learning_rate": 4.946022852363932e-06, |
| "loss": 0.0135, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.6912442396313363, |
| "grad_norm": 0.22450515627861023, |
| "learning_rate": 4.9410241371412525e-06, |
| "loss": 0.0135, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.705069124423963, |
| "grad_norm": 0.3588384985923767, |
| "learning_rate": 4.935806850983034e-06, |
| "loss": 0.0125, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.7188940092165899, |
| "grad_norm": 0.28571122884750366, |
| "learning_rate": 4.9303714610485705e-06, |
| "loss": 0.0166, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.7327188940092166, |
| "grad_norm": 0.3496967852115631, |
| "learning_rate": 4.924718454026318e-06, |
| "loss": 0.0139, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.7465437788018434, |
| "grad_norm": 0.3279854357242584, |
| "learning_rate": 4.918848336090309e-06, |
| "loss": 0.0133, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.7603686635944702, |
| "grad_norm": 0.19201801717281342, |
| "learning_rate": 4.912761632854834e-06, |
| "loss": 0.0151, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.7741935483870968, |
| "grad_norm": 0.27701929211616516, |
| "learning_rate": 4.906458889327375e-06, |
| "loss": 0.0148, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.7880184331797235, |
| "grad_norm": 0.2757968008518219, |
| "learning_rate": 4.899940669859807e-06, |
| "loss": 0.0118, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.80184331797235, |
| "grad_norm": 0.18373191356658936, |
| "learning_rate": 4.893207558097867e-06, |
| "loss": 0.0149, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.8156682027649769, |
| "grad_norm": 0.2116280496120453, |
| "learning_rate": 4.8862601569288885e-06, |
| "loss": 0.0129, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.8294930875576036, |
| "grad_norm": 0.30384117364883423, |
| "learning_rate": 4.879099088427824e-06, |
| "loss": 0.0136, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.8433179723502304, |
| "grad_norm": 0.3766787052154541, |
| "learning_rate": 4.871724993801541e-06, |
| "loss": 0.0123, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 0.3401263356208801, |
| "learning_rate": 4.864138533331411e-06, |
| "loss": 0.0122, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.870967741935484, |
| "grad_norm": 0.24321958422660828, |
| "learning_rate": 4.8563403863141825e-06, |
| "loss": 0.0123, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.8847926267281108, |
| "grad_norm": 0.16918110847473145, |
| "learning_rate": 4.84833125100116e-06, |
| "loss": 0.0104, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.8986175115207373, |
| "grad_norm": 0.23489230871200562, |
| "learning_rate": 4.840111844535682e-06, |
| "loss": 0.0122, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.912442396313364, |
| "grad_norm": 0.32796236872673035, |
| "learning_rate": 4.8316829028889076e-06, |
| "loss": 0.0109, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.9262672811059907, |
| "grad_norm": 0.24210475385189056, |
| "learning_rate": 4.823045180793914e-06, |
| "loss": 0.0118, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.9400921658986174, |
| "grad_norm": 0.3450548052787781, |
| "learning_rate": 4.8141994516781196e-06, |
| "loss": 0.0115, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.9539170506912442, |
| "grad_norm": 0.23163923621177673, |
| "learning_rate": 4.805146507594034e-06, |
| "loss": 0.0122, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.967741935483871, |
| "grad_norm": 0.8197745084762573, |
| "learning_rate": 4.7958871591483305e-06, |
| "loss": 0.0101, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.9815668202764978, |
| "grad_norm": 0.2917576730251312, |
| "learning_rate": 4.786422235429269e-06, |
| "loss": 0.0078, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.9953917050691246, |
| "grad_norm": 0.24417108297348022, |
| "learning_rate": 4.776752583932455e-06, |
| "loss": 0.0119, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.24417108297348022, |
| "learning_rate": 4.766879070484957e-06, |
| "loss": 0.0089, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.013824884792627, |
| "grad_norm": 0.4215025305747986, |
| "learning_rate": 4.756802579167781e-06, |
| "loss": 0.007, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.0276497695852536, |
| "grad_norm": 0.2002098709344864, |
| "learning_rate": 4.746524012236706e-06, |
| "loss": 0.0078, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.0414746543778803, |
| "grad_norm": 0.16432569921016693, |
| "learning_rate": 4.736044290041496e-06, |
| "loss": 0.0074, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.055299539170507, |
| "grad_norm": 0.2516174018383026, |
| "learning_rate": 4.725364350943492e-06, |
| "loss": 0.0067, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.0691244239631335, |
| "grad_norm": 0.24242427945137024, |
| "learning_rate": 4.714485151231593e-06, |
| "loss": 0.0083, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.0829493087557602, |
| "grad_norm": 0.22929197549819946, |
| "learning_rate": 4.703407665036622e-06, |
| "loss": 0.0061, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.096774193548387, |
| "grad_norm": 0.2929408550262451, |
| "learning_rate": 4.692132884244113e-06, |
| "loss": 0.0064, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.110599078341014, |
| "grad_norm": 0.22497303783893585, |
| "learning_rate": 4.680661818405485e-06, |
| "loss": 0.0061, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.1244239631336406, |
| "grad_norm": 0.13698536157608032, |
| "learning_rate": 4.668995494647653e-06, |
| "loss": 0.0059, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.1382488479262673, |
| "grad_norm": 0.32037150859832764, |
| "learning_rate": 4.657134957581057e-06, |
| "loss": 0.0067, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.152073732718894, |
| "grad_norm": 0.19389067590236664, |
| "learning_rate": 4.645081269206128e-06, |
| "loss": 0.0062, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.165898617511521, |
| "grad_norm": 0.2791127562522888, |
| "learning_rate": 4.632835508818192e-06, |
| "loss": 0.0058, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.1797235023041477, |
| "grad_norm": 0.2178739458322525, |
| "learning_rate": 4.620398772910833e-06, |
| "loss": 0.0056, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.193548387096774, |
| "grad_norm": 0.29685622453689575, |
| "learning_rate": 4.607772175077712e-06, |
| "loss": 0.0055, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.207373271889401, |
| "grad_norm": 0.6792906522750854, |
| "learning_rate": 4.59495684591285e-06, |
| "loss": 0.0057, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.2211981566820276, |
| "grad_norm": 0.17910148203372955, |
| "learning_rate": 4.581953932909403e-06, |
| "loss": 0.0046, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.2350230414746544, |
| "grad_norm": 0.12593543529510498, |
| "learning_rate": 4.5687646003569055e-06, |
| "loss": 0.0046, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.248847926267281, |
| "grad_norm": 0.15383680164813995, |
| "learning_rate": 4.555390029237026e-06, |
| "loss": 0.0059, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.262672811059908, |
| "grad_norm": 0.2324540764093399, |
| "learning_rate": 4.541831417117815e-06, |
| "loss": 0.0067, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.2764976958525347, |
| "grad_norm": 0.21278905868530273, |
| "learning_rate": 4.528089978046481e-06, |
| "loss": 0.0054, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.2903225806451615, |
| "grad_norm": 0.2499057948589325, |
| "learning_rate": 4.514166942440679e-06, |
| "loss": 0.003, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.3041474654377883, |
| "grad_norm": 0.1734611839056015, |
| "learning_rate": 4.5000635569783365e-06, |
| "loss": 0.0043, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.3179723502304146, |
| "grad_norm": 0.17815802991390228, |
| "learning_rate": 4.4857810844860325e-06, |
| "loss": 0.0048, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.3317972350230414, |
| "grad_norm": 0.22731409966945648, |
| "learning_rate": 4.471320803825915e-06, |
| "loss": 0.0034, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.345622119815668, |
| "grad_norm": 0.23811140656471252, |
| "learning_rate": 4.4566840097811956e-06, |
| "loss": 0.0029, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.359447004608295, |
| "grad_norm": 0.17744024097919464, |
| "learning_rate": 4.4418720129402145e-06, |
| "loss": 0.0029, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.3732718894009217, |
| "grad_norm": 0.24912229180335999, |
| "learning_rate": 4.426886139579083e-06, |
| "loss": 0.0049, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.3870967741935485, |
| "grad_norm": 0.17039696872234344, |
| "learning_rate": 4.411727731542937e-06, |
| "loss": 0.003, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.4009216589861753, |
| "grad_norm": 0.3089725375175476, |
| "learning_rate": 4.39639814612578e-06, |
| "loss": 0.0034, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.4147465437788016, |
| "grad_norm": 0.22647598385810852, |
| "learning_rate": 4.3808987559489536e-06, |
| "loss": 0.0058, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.4285714285714284, |
| "grad_norm": 0.19015835225582123, |
| "learning_rate": 4.365230948838232e-06, |
| "loss": 0.004, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.442396313364055, |
| "grad_norm": 0.1825973391532898, |
| "learning_rate": 4.349396127699552e-06, |
| "loss": 0.0032, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.456221198156682, |
| "grad_norm": 0.15705449879169464, |
| "learning_rate": 4.3333957103934025e-06, |
| "loss": 0.0035, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.4700460829493087, |
| "grad_norm": 0.19110225141048431, |
| "learning_rate": 4.317231129607859e-06, |
| "loss": 0.0019, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.4838709677419355, |
| "grad_norm": 0.1481270045042038, |
| "learning_rate": 4.30090383273031e-06, |
| "loss": 0.0035, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.4976958525345623, |
| "grad_norm": 0.19533571600914001, |
| "learning_rate": 4.2844152817178476e-06, |
| "loss": 0.0023, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.511520737327189, |
| "grad_norm": 0.1991293579339981, |
| "learning_rate": 4.267766952966369e-06, |
| "loss": 0.0025, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.525345622119816, |
| "grad_norm": 0.22637878358364105, |
| "learning_rate": 4.2509603371783776e-06, |
| "loss": 0.0029, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.539170506912442, |
| "grad_norm": 0.21984712779521942, |
| "learning_rate": 4.233996939229502e-06, |
| "loss": 0.0035, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.5529953917050694, |
| "grad_norm": 0.25706061720848083, |
| "learning_rate": 4.216878278033753e-06, |
| "loss": 0.0033, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.5668202764976957, |
| "grad_norm": 0.224118173122406, |
| "learning_rate": 4.199605886407515e-06, |
| "loss": 0.0017, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.5806451612903225, |
| "grad_norm": 0.0781751424074173, |
| "learning_rate": 4.1821813109322975e-06, |
| "loss": 0.002, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.5944700460829493, |
| "grad_norm": 0.2209765911102295, |
| "learning_rate": 4.164606111816256e-06, |
| "loss": 0.0018, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.608294930875576, |
| "grad_norm": 0.12815824151039124, |
| "learning_rate": 4.146881862754485e-06, |
| "loss": 0.003, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.622119815668203, |
| "grad_norm": 0.3006991147994995, |
| "learning_rate": 4.129010150788112e-06, |
| "loss": 0.0022, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.6359447004608296, |
| "grad_norm": 0.19085584580898285, |
| "learning_rate": 4.110992576162193e-06, |
| "loss": 0.0026, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.6497695852534564, |
| "grad_norm": 0.13027659058570862, |
| "learning_rate": 4.092830752182423e-06, |
| "loss": 0.0015, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.6635944700460827, |
| "grad_norm": 0.16998590528964996, |
| "learning_rate": 4.074526305070679e-06, |
| "loss": 0.0018, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.6774193548387095, |
| "grad_norm": 0.1743537187576294, |
| "learning_rate": 4.056080873819412e-06, |
| "loss": 0.0022, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.6912442396313363, |
| "grad_norm": 0.3566405177116394, |
| "learning_rate": 4.037496110044885e-06, |
| "loss": 0.0018, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.705069124423963, |
| "grad_norm": 0.274739146232605, |
| "learning_rate": 4.018773677839289e-06, |
| "loss": 0.0012, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.71889400921659, |
| "grad_norm": 0.12038746476173401, |
| "learning_rate": 3.999915253621739e-06, |
| "loss": 0.0013, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.7327188940092166, |
| "grad_norm": 0.12693172693252563, |
| "learning_rate": 3.980922525988167e-06, |
| "loss": 0.0017, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.7465437788018434, |
| "grad_norm": 0.11907753348350525, |
| "learning_rate": 3.961797195560118e-06, |
| "loss": 0.001, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.76036866359447, |
| "grad_norm": 0.1901165395975113, |
| "learning_rate": 3.942540974832486e-06, |
| "loss": 0.0028, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.774193548387097, |
| "grad_norm": 0.2039843052625656, |
| "learning_rate": 3.9231555880201655e-06, |
| "loss": 0.0011, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.7880184331797233, |
| "grad_norm": 0.16181506216526031, |
| "learning_rate": 3.903642770903671e-06, |
| "loss": 0.003, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.80184331797235, |
| "grad_norm": 0.13345211744308472, |
| "learning_rate": 3.884004270673711e-06, |
| "loss": 0.0023, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.815668202764977, |
| "grad_norm": 0.19453725218772888, |
| "learning_rate": 3.864241845774746e-06, |
| "loss": 0.001, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.8294930875576036, |
| "grad_norm": 0.18157535791397095, |
| "learning_rate": 3.844357265747531e-06, |
| "loss": 0.0029, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.8433179723502304, |
| "grad_norm": 0.17876467108726501, |
| "learning_rate": 3.8243523110706736e-06, |
| "loss": 0.0018, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.13000421226024628, |
| "learning_rate": 3.8042287730012117e-06, |
| "loss": 0.0011, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.870967741935484, |
| "grad_norm": 0.08808371424674988, |
| "learning_rate": 3.7839884534142157e-06, |
| "loss": 0.0018, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.8847926267281108, |
| "grad_norm": 0.32318148016929626, |
| "learning_rate": 3.7636331646414524e-06, |
| "loss": 0.0012, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.8986175115207375, |
| "grad_norm": 0.1259954422712326, |
| "learning_rate": 3.7431647293091076e-06, |
| "loss": 0.0012, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.912442396313364, |
| "grad_norm": 0.1344563215970993, |
| "learning_rate": 3.7225849801745835e-06, |
| "loss": 0.0006, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.9262672811059907, |
| "grad_norm": 0.09105626493692398, |
| "learning_rate": 3.701895759962397e-06, |
| "loss": 0.0007, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.9400921658986174, |
| "grad_norm": 0.11718853563070297, |
| "learning_rate": 3.6810989211991777e-06, |
| "loss": 0.0022, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.953917050691244, |
| "grad_norm": 0.10988112539052963, |
| "learning_rate": 3.6601963260477923e-06, |
| "loss": 0.0007, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.967741935483871, |
| "grad_norm": 0.12010538578033447, |
| "learning_rate": 3.6391898461406045e-06, |
| "loss": 0.0014, |
| "step": 216 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 432, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 72, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.3809900078187414e+19, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|