| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.940092165898617, | |
| "eval_steps": 500, | |
| "global_step": 360, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013824884792626729, | |
| "grad_norm": 31.00213623046875, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.2089, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.027649769585253458, | |
| "grad_norm": 30.27136993408203, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.1536, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.041474654377880185, | |
| "grad_norm": 30.48703384399414, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.1581, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.055299539170506916, | |
| "grad_norm": 30.779329299926758, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.1741, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06912442396313365, | |
| "grad_norm": 31.22808837890625, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.1864, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08294930875576037, | |
| "grad_norm": 30.783327102661133, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.1993, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0967741935483871, | |
| "grad_norm": 30.57423210144043, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.1506, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11059907834101383, | |
| "grad_norm": 30.952186584472656, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.1599, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12442396313364056, | |
| "grad_norm": 30.37245750427246, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.1572, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1382488479262673, | |
| "grad_norm": 30.930192947387695, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.1447, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.15207373271889402, | |
| "grad_norm": 29.735448837280273, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.0742, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.16589861751152074, | |
| "grad_norm": 29.62826156616211, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.061, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17972350230414746, | |
| "grad_norm": 28.937463760375977, | |
| "learning_rate": 6.5e-07, | |
| "loss": 1.9974, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 29.24833106994629, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 1.9833, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2073732718894009, | |
| "grad_norm": 28.122018814086914, | |
| "learning_rate": 7.5e-07, | |
| "loss": 1.8934, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.22119815668202766, | |
| "grad_norm": 28.059659957885742, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.875, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2350230414746544, | |
| "grad_norm": 27.361961364746094, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.8009, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2488479262672811, | |
| "grad_norm": 26.721765518188477, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.7116, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2626728110599078, | |
| "grad_norm": 25.37330436706543, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.5608, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2764976958525346, | |
| "grad_norm": 25.81206703186035, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.5043, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2903225806451613, | |
| "grad_norm": 25.539344787597656, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.3673, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.30414746543778803, | |
| "grad_norm": 25.097164154052734, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.2029, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.31797235023041476, | |
| "grad_norm": 24.619497299194336, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.0458, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3317972350230415, | |
| "grad_norm": 23.820302963256836, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.8723, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3456221198156682, | |
| "grad_norm": 23.12735939025879, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.7183, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.35944700460829493, | |
| "grad_norm": 20.127134323120117, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.5248, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.37327188940092165, | |
| "grad_norm": 15.901495933532715, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.3689, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 11.053832054138184, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.2482, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4009216589861751, | |
| "grad_norm": 7.248495578765869, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.1847, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4147465437788018, | |
| "grad_norm": 5.378540515899658, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1423, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 3.8371808528900146, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.1152, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4423963133640553, | |
| "grad_norm": 2.2655274868011475, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.0845, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.45622119815668205, | |
| "grad_norm": 1.5746861696243286, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0711, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4700460829493088, | |
| "grad_norm": 1.3510947227478027, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0734, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 0.9737389087677002, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0651, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4976958525345622, | |
| "grad_norm": 0.9815284609794617, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0593, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.511520737327189, | |
| "grad_norm": 0.8567912578582764, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0543, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5253456221198156, | |
| "grad_norm": 0.6773302555084229, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0622, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5391705069124424, | |
| "grad_norm": 0.49936285614967346, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0511, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5529953917050692, | |
| "grad_norm": 0.6253588795661926, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0478, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5668202764976958, | |
| "grad_norm": 0.5103089809417725, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0465, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.29294702410697937, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0456, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5944700460829493, | |
| "grad_norm": 0.4237954616546631, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0501, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6082949308755761, | |
| "grad_norm": 0.42243412137031555, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0388, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6221198156682027, | |
| "grad_norm": 0.37881818413734436, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0415, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6359447004608295, | |
| "grad_norm": 0.4941152036190033, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.045, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6497695852534562, | |
| "grad_norm": 0.3046450912952423, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0386, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.663594470046083, | |
| "grad_norm": 0.39361852407455444, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0447, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6774193548387096, | |
| "grad_norm": 0.5190001130104065, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0364, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6912442396313364, | |
| "grad_norm": 0.372072696685791, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.043, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7050691244239631, | |
| "grad_norm": 0.3756551146507263, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0424, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7188940092165899, | |
| "grad_norm": 0.4593554437160492, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0387, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7327188940092166, | |
| "grad_norm": 0.2931855618953705, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0396, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7465437788018433, | |
| "grad_norm": 0.38429534435272217, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0373, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7603686635944701, | |
| "grad_norm": 0.3506857752799988, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.04, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.29847028851509094, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0369, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7880184331797235, | |
| "grad_norm": 0.3653375506401062, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0396, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8018433179723502, | |
| "grad_norm": 0.3163083791732788, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0337, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.815668202764977, | |
| "grad_norm": 0.3734363615512848, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0327, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8294930875576036, | |
| "grad_norm": 0.29547712206840515, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0365, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8433179723502304, | |
| "grad_norm": 0.4041007161140442, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.038, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.3602149784564972, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.033, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8709677419354839, | |
| "grad_norm": 0.2948857545852661, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0386, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8847926267281107, | |
| "grad_norm": 0.39098358154296875, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0323, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8986175115207373, | |
| "grad_norm": 0.3692062795162201, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0309, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9124423963133641, | |
| "grad_norm": 0.3967229425907135, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0346, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9262672811059908, | |
| "grad_norm": 0.47776708006858826, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0355, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.9400921658986175, | |
| "grad_norm": 0.21545131504535675, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0294, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9539170506912442, | |
| "grad_norm": 0.23738539218902588, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0308, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.29174014925956726, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0312, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9815668202764977, | |
| "grad_norm": 0.38475602865219116, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0324, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9953917050691244, | |
| "grad_norm": 0.4077378809452057, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0297, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.4077378809452057, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.031, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.0138248847926268, | |
| "grad_norm": 0.46581539511680603, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0313, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.0276497695852536, | |
| "grad_norm": 0.24417200684547424, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.027, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.0414746543778801, | |
| "grad_norm": 0.20425117015838623, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0307, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.055299539170507, | |
| "grad_norm": 0.3578161597251892, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0312, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0691244239631337, | |
| "grad_norm": 0.39486679434776306, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0294, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0829493087557605, | |
| "grad_norm": 0.3932795226573944, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.0307, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.096774193548387, | |
| "grad_norm": 0.2946235239505768, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0257, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.1105990783410138, | |
| "grad_norm": 0.3318672776222229, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0296, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.1244239631336406, | |
| "grad_norm": 0.23701588809490204, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.0298, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.1382488479262673, | |
| "grad_norm": 0.2415941059589386, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0256, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.1520737327188941, | |
| "grad_norm": 0.24098087847232819, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0263, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.1658986175115207, | |
| "grad_norm": 0.3530103862285614, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0308, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1797235023041475, | |
| "grad_norm": 0.2382838875055313, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0254, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1935483870967742, | |
| "grad_norm": 0.2670588791370392, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0255, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.2073732718894008, | |
| "grad_norm": 0.30723804235458374, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0263, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.2211981566820276, | |
| "grad_norm": 0.505890965461731, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0265, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.2350230414746544, | |
| "grad_norm": 0.24307991564273834, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0227, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.2488479262672811, | |
| "grad_norm": 0.2198561429977417, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0261, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.262672811059908, | |
| "grad_norm": 0.2435183823108673, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0225, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2764976958525347, | |
| "grad_norm": 0.18837811052799225, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0218, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 0.3818771541118622, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0223, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.304147465437788, | |
| "grad_norm": 0.2358720600605011, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0204, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.3179723502304148, | |
| "grad_norm": 0.25374144315719604, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.022, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.3317972350230414, | |
| "grad_norm": 0.36181601881980896, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0244, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.3456221198156681, | |
| "grad_norm": 0.3156590759754181, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0233, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.359447004608295, | |
| "grad_norm": 0.21958638727664948, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0218, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3732718894009217, | |
| "grad_norm": 0.34455621242523193, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0267, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.3870967741935485, | |
| "grad_norm": 0.283086359500885, | |
| "learning_rate": 4.999888074163108e-06, | |
| "loss": 0.0238, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.400921658986175, | |
| "grad_norm": 0.28856486082077026, | |
| "learning_rate": 4.999552306674345e-06, | |
| "loss": 0.0186, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.4147465437788018, | |
| "grad_norm": 0.26721692085266113, | |
| "learning_rate": 4.998992727598557e-06, | |
| "loss": 0.0193, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.3459971249103546, | |
| "learning_rate": 4.998209387040829e-06, | |
| "loss": 0.0218, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.4423963133640554, | |
| "grad_norm": 0.25979122519493103, | |
| "learning_rate": 4.9972023551419995e-06, | |
| "loss": 0.0216, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.456221198156682, | |
| "grad_norm": 0.19960424304008484, | |
| "learning_rate": 4.995971722072379e-06, | |
| "loss": 0.0176, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.4700460829493087, | |
| "grad_norm": 0.2529441714286804, | |
| "learning_rate": 4.9945175980236745e-06, | |
| "loss": 0.0181, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.4838709677419355, | |
| "grad_norm": 0.2690267264842987, | |
| "learning_rate": 4.992840113199131e-06, | |
| "loss": 0.0196, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4976958525345623, | |
| "grad_norm": 0.3516470789909363, | |
| "learning_rate": 4.990939417801859e-06, | |
| "loss": 0.0182, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.511520737327189, | |
| "grad_norm": 0.30167508125305176, | |
| "learning_rate": 4.988815682021398e-06, | |
| "loss": 0.0205, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.5253456221198156, | |
| "grad_norm": 0.3920849859714508, | |
| "learning_rate": 4.986469096018472e-06, | |
| "loss": 0.0177, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.5391705069124424, | |
| "grad_norm": 0.3274078369140625, | |
| "learning_rate": 4.983899869907963e-06, | |
| "loss": 0.0185, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.5529953917050692, | |
| "grad_norm": 0.2237282395362854, | |
| "learning_rate": 4.981108233740096e-06, | |
| "loss": 0.016, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.5668202764976957, | |
| "grad_norm": 0.23966379463672638, | |
| "learning_rate": 4.978094437479843e-06, | |
| "loss": 0.0183, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.5806451612903225, | |
| "grad_norm": 0.4027673602104187, | |
| "learning_rate": 4.97485875098454e-06, | |
| "loss": 0.0171, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.5944700460829493, | |
| "grad_norm": 0.24082835018634796, | |
| "learning_rate": 4.971401463979722e-06, | |
| "loss": 0.016, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.608294930875576, | |
| "grad_norm": 0.19387558102607727, | |
| "learning_rate": 4.967722886033181e-06, | |
| "loss": 0.0165, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.6221198156682028, | |
| "grad_norm": 0.33696162700653076, | |
| "learning_rate": 4.963823346527249e-06, | |
| "loss": 0.0154, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.6359447004608296, | |
| "grad_norm": 0.30290740728378296, | |
| "learning_rate": 4.959703194629304e-06, | |
| "loss": 0.0175, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.6497695852534562, | |
| "grad_norm": 0.3781787157058716, | |
| "learning_rate": 4.955362799260507e-06, | |
| "loss": 0.0145, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.663594470046083, | |
| "grad_norm": 0.39995357394218445, | |
| "learning_rate": 4.950802549062764e-06, | |
| "loss": 0.015, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.6774193548387095, | |
| "grad_norm": 0.19926570355892181, | |
| "learning_rate": 4.946022852363932e-06, | |
| "loss": 0.0135, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.6912442396313363, | |
| "grad_norm": 0.22450515627861023, | |
| "learning_rate": 4.9410241371412525e-06, | |
| "loss": 0.0135, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.705069124423963, | |
| "grad_norm": 0.3588384985923767, | |
| "learning_rate": 4.935806850983034e-06, | |
| "loss": 0.0125, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.7188940092165899, | |
| "grad_norm": 0.28571122884750366, | |
| "learning_rate": 4.9303714610485705e-06, | |
| "loss": 0.0166, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.7327188940092166, | |
| "grad_norm": 0.3496967852115631, | |
| "learning_rate": 4.924718454026318e-06, | |
| "loss": 0.0139, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.7465437788018434, | |
| "grad_norm": 0.3279854357242584, | |
| "learning_rate": 4.918848336090309e-06, | |
| "loss": 0.0133, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.7603686635944702, | |
| "grad_norm": 0.19201801717281342, | |
| "learning_rate": 4.912761632854834e-06, | |
| "loss": 0.0151, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 0.27701929211616516, | |
| "learning_rate": 4.906458889327375e-06, | |
| "loss": 0.0148, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.7880184331797235, | |
| "grad_norm": 0.2757968008518219, | |
| "learning_rate": 4.899940669859807e-06, | |
| "loss": 0.0118, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.80184331797235, | |
| "grad_norm": 0.18373191356658936, | |
| "learning_rate": 4.893207558097867e-06, | |
| "loss": 0.0149, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.8156682027649769, | |
| "grad_norm": 0.2116280496120453, | |
| "learning_rate": 4.8862601569288885e-06, | |
| "loss": 0.0129, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.8294930875576036, | |
| "grad_norm": 0.30384117364883423, | |
| "learning_rate": 4.879099088427824e-06, | |
| "loss": 0.0136, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.8433179723502304, | |
| "grad_norm": 0.3766787052154541, | |
| "learning_rate": 4.871724993801541e-06, | |
| "loss": 0.0123, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.8571428571428572, | |
| "grad_norm": 0.3401263356208801, | |
| "learning_rate": 4.864138533331411e-06, | |
| "loss": 0.0122, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.870967741935484, | |
| "grad_norm": 0.24321958422660828, | |
| "learning_rate": 4.8563403863141825e-06, | |
| "loss": 0.0123, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.8847926267281108, | |
| "grad_norm": 0.16918110847473145, | |
| "learning_rate": 4.84833125100116e-06, | |
| "loss": 0.0104, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.8986175115207373, | |
| "grad_norm": 0.23489230871200562, | |
| "learning_rate": 4.840111844535682e-06, | |
| "loss": 0.0122, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.912442396313364, | |
| "grad_norm": 0.32796236872673035, | |
| "learning_rate": 4.8316829028889076e-06, | |
| "loss": 0.0109, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.9262672811059907, | |
| "grad_norm": 0.24210475385189056, | |
| "learning_rate": 4.823045180793914e-06, | |
| "loss": 0.0118, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.9400921658986174, | |
| "grad_norm": 0.3450548052787781, | |
| "learning_rate": 4.8141994516781196e-06, | |
| "loss": 0.0115, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.9539170506912442, | |
| "grad_norm": 0.23163923621177673, | |
| "learning_rate": 4.805146507594034e-06, | |
| "loss": 0.0122, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.967741935483871, | |
| "grad_norm": 0.8197745084762573, | |
| "learning_rate": 4.7958871591483305e-06, | |
| "loss": 0.0101, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.9815668202764978, | |
| "grad_norm": 0.2917576730251312, | |
| "learning_rate": 4.786422235429269e-06, | |
| "loss": 0.0078, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.9953917050691246, | |
| "grad_norm": 0.24417108297348022, | |
| "learning_rate": 4.776752583932455e-06, | |
| "loss": 0.0119, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.24417108297348022, | |
| "learning_rate": 4.766879070484957e-06, | |
| "loss": 0.0089, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.013824884792627, | |
| "grad_norm": 0.4215025305747986, | |
| "learning_rate": 4.756802579167781e-06, | |
| "loss": 0.007, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.0276497695852536, | |
| "grad_norm": 0.2002098709344864, | |
| "learning_rate": 4.746524012236706e-06, | |
| "loss": 0.0078, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.0414746543778803, | |
| "grad_norm": 0.16432569921016693, | |
| "learning_rate": 4.736044290041496e-06, | |
| "loss": 0.0074, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.055299539170507, | |
| "grad_norm": 0.2516174018383026, | |
| "learning_rate": 4.725364350943492e-06, | |
| "loss": 0.0067, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.0691244239631335, | |
| "grad_norm": 0.24242427945137024, | |
| "learning_rate": 4.714485151231593e-06, | |
| "loss": 0.0083, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.0829493087557602, | |
| "grad_norm": 0.22929197549819946, | |
| "learning_rate": 4.703407665036622e-06, | |
| "loss": 0.0061, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.096774193548387, | |
| "grad_norm": 0.2929408550262451, | |
| "learning_rate": 4.692132884244113e-06, | |
| "loss": 0.0064, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.110599078341014, | |
| "grad_norm": 0.22497303783893585, | |
| "learning_rate": 4.680661818405485e-06, | |
| "loss": 0.0061, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.1244239631336406, | |
| "grad_norm": 0.13698536157608032, | |
| "learning_rate": 4.668995494647653e-06, | |
| "loss": 0.0059, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.1382488479262673, | |
| "grad_norm": 0.32037150859832764, | |
| "learning_rate": 4.657134957581057e-06, | |
| "loss": 0.0067, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.152073732718894, | |
| "grad_norm": 0.19389067590236664, | |
| "learning_rate": 4.645081269206128e-06, | |
| "loss": 0.0062, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.165898617511521, | |
| "grad_norm": 0.2791127562522888, | |
| "learning_rate": 4.632835508818192e-06, | |
| "loss": 0.0058, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.1797235023041477, | |
| "grad_norm": 0.2178739458322525, | |
| "learning_rate": 4.620398772910833e-06, | |
| "loss": 0.0056, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.193548387096774, | |
| "grad_norm": 0.29685622453689575, | |
| "learning_rate": 4.607772175077712e-06, | |
| "loss": 0.0055, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.207373271889401, | |
| "grad_norm": 0.6792906522750854, | |
| "learning_rate": 4.59495684591285e-06, | |
| "loss": 0.0057, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.2211981566820276, | |
| "grad_norm": 0.17910148203372955, | |
| "learning_rate": 4.581953932909403e-06, | |
| "loss": 0.0046, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.2350230414746544, | |
| "grad_norm": 0.12593543529510498, | |
| "learning_rate": 4.5687646003569055e-06, | |
| "loss": 0.0046, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.248847926267281, | |
| "grad_norm": 0.15383680164813995, | |
| "learning_rate": 4.555390029237026e-06, | |
| "loss": 0.0059, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.262672811059908, | |
| "grad_norm": 0.2324540764093399, | |
| "learning_rate": 4.541831417117815e-06, | |
| "loss": 0.0067, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.2764976958525347, | |
| "grad_norm": 0.21278905868530273, | |
| "learning_rate": 4.528089978046481e-06, | |
| "loss": 0.0054, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.2903225806451615, | |
| "grad_norm": 0.2499057948589325, | |
| "learning_rate": 4.514166942440679e-06, | |
| "loss": 0.003, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.3041474654377883, | |
| "grad_norm": 0.1734611839056015, | |
| "learning_rate": 4.5000635569783365e-06, | |
| "loss": 0.0043, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.3179723502304146, | |
| "grad_norm": 0.17815802991390228, | |
| "learning_rate": 4.4857810844860325e-06, | |
| "loss": 0.0048, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.3317972350230414, | |
| "grad_norm": 0.22731409966945648, | |
| "learning_rate": 4.471320803825915e-06, | |
| "loss": 0.0034, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.345622119815668, | |
| "grad_norm": 0.23811140656471252, | |
| "learning_rate": 4.4566840097811956e-06, | |
| "loss": 0.0029, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.359447004608295, | |
| "grad_norm": 0.17744024097919464, | |
| "learning_rate": 4.4418720129402145e-06, | |
| "loss": 0.0029, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.3732718894009217, | |
| "grad_norm": 0.24912229180335999, | |
| "learning_rate": 4.426886139579083e-06, | |
| "loss": 0.0049, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.3870967741935485, | |
| "grad_norm": 0.17039696872234344, | |
| "learning_rate": 4.411727731542937e-06, | |
| "loss": 0.003, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.4009216589861753, | |
| "grad_norm": 0.3089725375175476, | |
| "learning_rate": 4.39639814612578e-06, | |
| "loss": 0.0034, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.4147465437788016, | |
| "grad_norm": 0.22647598385810852, | |
| "learning_rate": 4.3808987559489536e-06, | |
| "loss": 0.0058, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.4285714285714284, | |
| "grad_norm": 0.19015835225582123, | |
| "learning_rate": 4.365230948838232e-06, | |
| "loss": 0.004, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.442396313364055, | |
| "grad_norm": 0.1825973391532898, | |
| "learning_rate": 4.349396127699552e-06, | |
| "loss": 0.0032, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.456221198156682, | |
| "grad_norm": 0.15705449879169464, | |
| "learning_rate": 4.3333957103934025e-06, | |
| "loss": 0.0035, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.4700460829493087, | |
| "grad_norm": 0.19110225141048431, | |
| "learning_rate": 4.317231129607859e-06, | |
| "loss": 0.0019, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.4838709677419355, | |
| "grad_norm": 0.1481270045042038, | |
| "learning_rate": 4.30090383273031e-06, | |
| "loss": 0.0035, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.4976958525345623, | |
| "grad_norm": 0.19533571600914001, | |
| "learning_rate": 4.2844152817178476e-06, | |
| "loss": 0.0023, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.511520737327189, | |
| "grad_norm": 0.1991293579339981, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.0025, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.525345622119816, | |
| "grad_norm": 0.22637878358364105, | |
| "learning_rate": 4.2509603371783776e-06, | |
| "loss": 0.0029, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.539170506912442, | |
| "grad_norm": 0.21984712779521942, | |
| "learning_rate": 4.233996939229502e-06, | |
| "loss": 0.0035, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.5529953917050694, | |
| "grad_norm": 0.25706061720848083, | |
| "learning_rate": 4.216878278033753e-06, | |
| "loss": 0.0033, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.5668202764976957, | |
| "grad_norm": 0.224118173122406, | |
| "learning_rate": 4.199605886407515e-06, | |
| "loss": 0.0017, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 0.0781751424074173, | |
| "learning_rate": 4.1821813109322975e-06, | |
| "loss": 0.002, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.5944700460829493, | |
| "grad_norm": 0.2209765911102295, | |
| "learning_rate": 4.164606111816256e-06, | |
| "loss": 0.0018, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.608294930875576, | |
| "grad_norm": 0.12815824151039124, | |
| "learning_rate": 4.146881862754485e-06, | |
| "loss": 0.003, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.622119815668203, | |
| "grad_norm": 0.3006991147994995, | |
| "learning_rate": 4.129010150788112e-06, | |
| "loss": 0.0022, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.6359447004608296, | |
| "grad_norm": 0.19085584580898285, | |
| "learning_rate": 4.110992576162193e-06, | |
| "loss": 0.0026, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.6497695852534564, | |
| "grad_norm": 0.13027659058570862, | |
| "learning_rate": 4.092830752182423e-06, | |
| "loss": 0.0015, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.6635944700460827, | |
| "grad_norm": 0.16998590528964996, | |
| "learning_rate": 4.074526305070679e-06, | |
| "loss": 0.0018, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.6774193548387095, | |
| "grad_norm": 0.1743537187576294, | |
| "learning_rate": 4.056080873819412e-06, | |
| "loss": 0.0022, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.6912442396313363, | |
| "grad_norm": 0.3566405177116394, | |
| "learning_rate": 4.037496110044885e-06, | |
| "loss": 0.0018, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.705069124423963, | |
| "grad_norm": 0.274739146232605, | |
| "learning_rate": 4.018773677839289e-06, | |
| "loss": 0.0012, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.71889400921659, | |
| "grad_norm": 0.12038746476173401, | |
| "learning_rate": 3.999915253621739e-06, | |
| "loss": 0.0013, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.7327188940092166, | |
| "grad_norm": 0.12693172693252563, | |
| "learning_rate": 3.980922525988167e-06, | |
| "loss": 0.0017, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.7465437788018434, | |
| "grad_norm": 0.11907753348350525, | |
| "learning_rate": 3.961797195560118e-06, | |
| "loss": 0.001, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.76036866359447, | |
| "grad_norm": 0.1901165395975113, | |
| "learning_rate": 3.942540974832486e-06, | |
| "loss": 0.0028, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.774193548387097, | |
| "grad_norm": 0.2039843052625656, | |
| "learning_rate": 3.9231555880201655e-06, | |
| "loss": 0.0011, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.7880184331797233, | |
| "grad_norm": 0.16181506216526031, | |
| "learning_rate": 3.903642770903671e-06, | |
| "loss": 0.003, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.80184331797235, | |
| "grad_norm": 0.13345211744308472, | |
| "learning_rate": 3.884004270673711e-06, | |
| "loss": 0.0023, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.815668202764977, | |
| "grad_norm": 0.19453725218772888, | |
| "learning_rate": 3.864241845774746e-06, | |
| "loss": 0.001, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.8294930875576036, | |
| "grad_norm": 0.18157535791397095, | |
| "learning_rate": 3.844357265747531e-06, | |
| "loss": 0.0029, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.8433179723502304, | |
| "grad_norm": 0.17876467108726501, | |
| "learning_rate": 3.8243523110706736e-06, | |
| "loss": 0.0018, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.13000421226024628, | |
| "learning_rate": 3.8042287730012117e-06, | |
| "loss": 0.0011, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.870967741935484, | |
| "grad_norm": 0.08808371424674988, | |
| "learning_rate": 3.7839884534142157e-06, | |
| "loss": 0.0018, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.8847926267281108, | |
| "grad_norm": 0.32318148016929626, | |
| "learning_rate": 3.7636331646414524e-06, | |
| "loss": 0.0012, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.8986175115207375, | |
| "grad_norm": 0.1259954422712326, | |
| "learning_rate": 3.7431647293091076e-06, | |
| "loss": 0.0012, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.912442396313364, | |
| "grad_norm": 0.1344563215970993, | |
| "learning_rate": 3.7225849801745835e-06, | |
| "loss": 0.0006, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.9262672811059907, | |
| "grad_norm": 0.09105626493692398, | |
| "learning_rate": 3.701895759962397e-06, | |
| "loss": 0.0007, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.9400921658986174, | |
| "grad_norm": 0.11718853563070297, | |
| "learning_rate": 3.6810989211991777e-06, | |
| "loss": 0.0022, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.953917050691244, | |
| "grad_norm": 0.10988112539052963, | |
| "learning_rate": 3.6601963260477923e-06, | |
| "loss": 0.0007, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.967741935483871, | |
| "grad_norm": 0.12010538578033447, | |
| "learning_rate": 3.6391898461406045e-06, | |
| "loss": 0.0014, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.9815668202764978, | |
| "grad_norm": 0.12934529781341553, | |
| "learning_rate": 3.6180813624118898e-06, | |
| "loss": 0.001, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.9953917050691246, | |
| "grad_norm": 0.05664035677909851, | |
| "learning_rate": 3.5968727649294134e-06, | |
| "loss": 0.0002, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.07633747160434723, | |
| "learning_rate": 3.575565952725193e-06, | |
| "loss": 0.0002, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 3.013824884792627, | |
| "grad_norm": 0.16964735090732574, | |
| "learning_rate": 3.55416283362546e-06, | |
| "loss": 0.0005, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.0276497695852536, | |
| "grad_norm": 0.03826030716300011, | |
| "learning_rate": 3.5326653240798283e-06, | |
| "loss": 0.0003, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 3.0414746543778803, | |
| "grad_norm": 0.05900357663631439, | |
| "learning_rate": 3.5110753489896924e-06, | |
| "loss": 0.0006, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 3.055299539170507, | |
| "grad_norm": 0.06874338537454605, | |
| "learning_rate": 3.4893948415358803e-06, | |
| "loss": 0.0002, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 3.0691244239631335, | |
| "grad_norm": 0.10445930808782578, | |
| "learning_rate": 3.4676257430055438e-06, | |
| "loss": 0.0011, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 3.0829493087557602, | |
| "grad_norm": 0.03757224604487419, | |
| "learning_rate": 3.4457700026183378e-06, | |
| "loss": 0.0002, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.096774193548387, | |
| "grad_norm": 0.2678232491016388, | |
| "learning_rate": 3.4238295773518924e-06, | |
| "loss": 0.0012, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 3.110599078341014, | |
| "grad_norm": 0.11278262734413147, | |
| "learning_rate": 3.4018064317665745e-06, | |
| "loss": 0.0003, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 3.1244239631336406, | |
| "grad_norm": 0.03823389112949371, | |
| "learning_rate": 3.3797025378295826e-06, | |
| "loss": 0.0002, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 3.1382488479262673, | |
| "grad_norm": 0.015309945680201054, | |
| "learning_rate": 3.357519874738382e-06, | |
| "loss": 0.0, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 3.152073732718894, | |
| "grad_norm": 0.12372211366891861, | |
| "learning_rate": 3.3352604287434752e-06, | |
| "loss": 0.0007, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.165898617511521, | |
| "grad_norm": 0.062292926013469696, | |
| "learning_rate": 3.31292619297056e-06, | |
| "loss": 0.0003, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.1797235023041477, | |
| "grad_norm": 0.02390543930232525, | |
| "learning_rate": 3.29051916724206e-06, | |
| "loss": 0.0001, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.193548387096774, | |
| "grad_norm": 0.035650208592414856, | |
| "learning_rate": 3.2680413578980623e-06, | |
| "loss": 0.0001, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.207373271889401, | |
| "grad_norm": 0.04304494708776474, | |
| "learning_rate": 3.245494777616664e-06, | |
| "loss": 0.0001, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.2211981566820276, | |
| "grad_norm": 0.07038014382123947, | |
| "learning_rate": 3.2228814452337587e-06, | |
| "loss": 0.0005, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.2350230414746544, | |
| "grad_norm": 0.18309231102466583, | |
| "learning_rate": 3.2002033855622683e-06, | |
| "loss": 0.0005, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.248847926267281, | |
| "grad_norm": 0.04949740692973137, | |
| "learning_rate": 3.177462629210838e-06, | |
| "loss": 0.0002, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.262672811059908, | |
| "grad_norm": 0.0319606214761734, | |
| "learning_rate": 3.154661212402017e-06, | |
| "loss": 0.0001, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.2764976958525347, | |
| "grad_norm": 0.062357429414987564, | |
| "learning_rate": 3.131801176789934e-06, | |
| "loss": 0.0003, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.2903225806451615, | |
| "grad_norm": 0.060603659600019455, | |
| "learning_rate": 3.1088845692774798e-06, | |
| "loss": 0.0003, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.3041474654377883, | |
| "grad_norm": 0.12379086762666702, | |
| "learning_rate": 3.0859134418330373e-06, | |
| "loss": 0.0013, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.3179723502304146, | |
| "grad_norm": 0.028559578582644463, | |
| "learning_rate": 3.0628898513067357e-06, | |
| "loss": 0.0002, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.3317972350230414, | |
| "grad_norm": 0.04983198642730713, | |
| "learning_rate": 3.0398158592462847e-06, | |
| "loss": 0.0001, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.345622119815668, | |
| "grad_norm": 0.07023701816797256, | |
| "learning_rate": 3.0166935317123824e-06, | |
| "loss": 0.0009, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.359447004608295, | |
| "grad_norm": 0.046779777854681015, | |
| "learning_rate": 2.9935249390937184e-06, | |
| "loss": 0.0002, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.3732718894009217, | |
| "grad_norm": 0.07187545299530029, | |
| "learning_rate": 2.970312155921585e-06, | |
| "loss": 0.0006, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.3870967741935485, | |
| "grad_norm": 0.019256649538874626, | |
| "learning_rate": 2.9470572606841295e-06, | |
| "loss": 0.0001, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.4009216589861753, | |
| "grad_norm": 0.0477205291390419, | |
| "learning_rate": 2.9237623356402423e-06, | |
| "loss": 0.0002, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.4147465437788016, | |
| "grad_norm": 0.06807561218738556, | |
| "learning_rate": 2.900429466633107e-06, | |
| "loss": 0.0003, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.4285714285714284, | |
| "grad_norm": 0.1516796499490738, | |
| "learning_rate": 2.8770607429034352e-06, | |
| "loss": 0.0006, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.442396313364055, | |
| "grad_norm": 0.045213282108306885, | |
| "learning_rate": 2.8536582569023964e-06, | |
| "loss": 0.0001, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.456221198156682, | |
| "grad_norm": 0.020110802724957466, | |
| "learning_rate": 2.8302241041042564e-06, | |
| "loss": 0.0001, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.4700460829493087, | |
| "grad_norm": 0.03242102265357971, | |
| "learning_rate": 2.8067603828187446e-06, | |
| "loss": 0.0002, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.4838709677419355, | |
| "grad_norm": 0.039639636874198914, | |
| "learning_rate": 2.7832691940031755e-06, | |
| "loss": 0.0001, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.4976958525345623, | |
| "grad_norm": 0.06943561136722565, | |
| "learning_rate": 2.759752641074322e-06, | |
| "loss": 0.0003, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.511520737327189, | |
| "grad_norm": 0.02593497559428215, | |
| "learning_rate": 2.7362128297200784e-06, | |
| "loss": 0.0001, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.525345622119816, | |
| "grad_norm": 0.02811415307223797, | |
| "learning_rate": 2.712651867710914e-06, | |
| "loss": 0.0002, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.539170506912442, | |
| "grad_norm": 0.07381757348775864, | |
| "learning_rate": 2.6890718647111424e-06, | |
| "loss": 0.0003, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.5529953917050694, | |
| "grad_norm": 0.014391073025763035, | |
| "learning_rate": 2.665474932090017e-06, | |
| "loss": 0.0001, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.5668202764976957, | |
| "grad_norm": 0.027200503274798393, | |
| "learning_rate": 2.6418631827326857e-06, | |
| "loss": 0.0001, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.5806451612903225, | |
| "grad_norm": 0.02720312774181366, | |
| "learning_rate": 2.6182387308509927e-06, | |
| "loss": 0.0001, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.5944700460829493, | |
| "grad_norm": 0.04352420195937157, | |
| "learning_rate": 2.5946036917941765e-06, | |
| "loss": 0.0004, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.608294930875576, | |
| "grad_norm": 0.03459783270955086, | |
| "learning_rate": 2.570960181859458e-06, | |
| "loss": 0.0001, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.622119815668203, | |
| "grad_norm": 0.03097033128142357, | |
| "learning_rate": 2.547310318102548e-06, | |
| "loss": 0.0001, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.6359447004608296, | |
| "grad_norm": 0.0076720695942640305, | |
| "learning_rate": 2.5236562181480794e-06, | |
| "loss": 0.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.6497695852534564, | |
| "grad_norm": 0.023994900286197662, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0001, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.6635944700460827, | |
| "grad_norm": 0.005682840943336487, | |
| "learning_rate": 2.4763437818519205e-06, | |
| "loss": 0.0, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.6774193548387095, | |
| "grad_norm": 0.030443254858255386, | |
| "learning_rate": 2.4526896818974534e-06, | |
| "loss": 0.0002, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.6912442396313363, | |
| "grad_norm": 0.008863283321261406, | |
| "learning_rate": 2.429039818140543e-06, | |
| "loss": 0.0, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.705069124423963, | |
| "grad_norm": 0.009775679558515549, | |
| "learning_rate": 2.405396308205825e-06, | |
| "loss": 0.0001, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.71889400921659, | |
| "grad_norm": 0.019227130338549614, | |
| "learning_rate": 2.381761269149009e-06, | |
| "loss": 0.0001, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 3.7327188940092166, | |
| "grad_norm": 0.037880923599004745, | |
| "learning_rate": 2.358136817267315e-06, | |
| "loss": 0.0002, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 3.7465437788018434, | |
| "grad_norm": 0.006014773156493902, | |
| "learning_rate": 2.334525067909983e-06, | |
| "loss": 0.0, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 3.76036866359447, | |
| "grad_norm": 0.024770596995949745, | |
| "learning_rate": 2.3109281352888593e-06, | |
| "loss": 0.0001, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 3.774193548387097, | |
| "grad_norm": 0.008392867632210255, | |
| "learning_rate": 2.2873481322890866e-06, | |
| "loss": 0.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.7880184331797233, | |
| "grad_norm": 0.030915284529328346, | |
| "learning_rate": 2.263787170279922e-06, | |
| "loss": 0.0002, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 3.80184331797235, | |
| "grad_norm": 0.04161324352025986, | |
| "learning_rate": 2.2402473589256793e-06, | |
| "loss": 0.0002, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 3.815668202764977, | |
| "grad_norm": 0.04104781523346901, | |
| "learning_rate": 2.2167308059968258e-06, | |
| "loss": 0.0001, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 3.8294930875576036, | |
| "grad_norm": 0.02981358952820301, | |
| "learning_rate": 2.193239617181256e-06, | |
| "loss": 0.0002, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 3.8433179723502304, | |
| "grad_norm": 0.03616194799542427, | |
| "learning_rate": 2.169775895895745e-06, | |
| "loss": 0.0002, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.857142857142857, | |
| "grad_norm": 0.003307241713628173, | |
| "learning_rate": 2.146341743097604e-06, | |
| "loss": 0.0, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 0.023682212457060814, | |
| "learning_rate": 2.1229392570965656e-06, | |
| "loss": 0.0002, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.8847926267281108, | |
| "grad_norm": 0.08077914267778397, | |
| "learning_rate": 2.0995705333668948e-06, | |
| "loss": 0.0006, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.8986175115207375, | |
| "grad_norm": 0.012258109636604786, | |
| "learning_rate": 2.0762376643597586e-06, | |
| "loss": 0.0001, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.912442396313364, | |
| "grad_norm": 0.012420260347425938, | |
| "learning_rate": 2.0529427393158704e-06, | |
| "loss": 0.0001, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.9262672811059907, | |
| "grad_norm": 0.03773212060332298, | |
| "learning_rate": 2.0296878440784164e-06, | |
| "loss": 0.0001, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.9400921658986174, | |
| "grad_norm": 0.03834720700979233, | |
| "learning_rate": 2.006475060906283e-06, | |
| "loss": 0.0001, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.953917050691244, | |
| "grad_norm": 0.06677021831274033, | |
| "learning_rate": 1.9833064682876175e-06, | |
| "loss": 0.0001, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.967741935483871, | |
| "grad_norm": 0.010336378589272499, | |
| "learning_rate": 1.9601841407537157e-06, | |
| "loss": 0.0, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.9815668202764978, | |
| "grad_norm": 0.003101527225226164, | |
| "learning_rate": 1.937110148693265e-06, | |
| "loss": 0.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.9953917050691246, | |
| "grad_norm": 0.033553846180438995, | |
| "learning_rate": 1.9140865581669627e-06, | |
| "loss": 0.0001, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.033553846180438995, | |
| "learning_rate": 1.8911154307225204e-06, | |
| "loss": 0.0001, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 4.013824884792626, | |
| "grad_norm": 0.050998423248529434, | |
| "learning_rate": 1.8681988232100674e-06, | |
| "loss": 0.0, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 4.027649769585254, | |
| "grad_norm": 0.016056543216109276, | |
| "learning_rate": 1.8453387875979834e-06, | |
| "loss": 0.0, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 4.04147465437788, | |
| "grad_norm": 0.003037769114598632, | |
| "learning_rate": 1.822537370789163e-06, | |
| "loss": 0.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.055299539170507, | |
| "grad_norm": 0.01739078015089035, | |
| "learning_rate": 1.7997966144377328e-06, | |
| "loss": 0.0001, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 4.0691244239631335, | |
| "grad_norm": 0.012232556939125061, | |
| "learning_rate": 1.7771185547662417e-06, | |
| "loss": 0.0, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 4.082949308755761, | |
| "grad_norm": 0.0021200303453952074, | |
| "learning_rate": 1.754505222383337e-06, | |
| "loss": 0.0, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 4.096774193548387, | |
| "grad_norm": 0.0028847770299762487, | |
| "learning_rate": 1.7319586421019383e-06, | |
| "loss": 0.0, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 4.110599078341014, | |
| "grad_norm": 0.02263806015253067, | |
| "learning_rate": 1.7094808327579401e-06, | |
| "loss": 0.0001, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.124423963133641, | |
| "grad_norm": 0.008537916466593742, | |
| "learning_rate": 1.6870738070294412e-06, | |
| "loss": 0.0, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 4.138248847926267, | |
| "grad_norm": 0.0025778906419873238, | |
| "learning_rate": 1.6647395712565256e-06, | |
| "loss": 0.0, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 4.152073732718894, | |
| "grad_norm": 0.008922383189201355, | |
| "learning_rate": 1.6424801252616186e-06, | |
| "loss": 0.0, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 4.1658986175115205, | |
| "grad_norm": 0.008659109473228455, | |
| "learning_rate": 1.6202974621704176e-06, | |
| "loss": 0.0, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 4.179723502304148, | |
| "grad_norm": 0.006877637468278408, | |
| "learning_rate": 1.5981935682334266e-06, | |
| "loss": 0.0001, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 0.00891530979424715, | |
| "learning_rate": 1.5761704226481078e-06, | |
| "loss": 0.0001, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 4.207373271889401, | |
| "grad_norm": 0.005250364542007446, | |
| "learning_rate": 1.5542299973816626e-06, | |
| "loss": 0.0, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.221198156682028, | |
| "grad_norm": 0.005915890447795391, | |
| "learning_rate": 1.5323742569944573e-06, | |
| "loss": 0.0, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.235023041474655, | |
| "grad_norm": 0.008372033014893532, | |
| "learning_rate": 1.5106051584641208e-06, | |
| "loss": 0.0, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.248847926267281, | |
| "grad_norm": 0.0033532341476529837, | |
| "learning_rate": 1.4889246510103078e-06, | |
| "loss": 0.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.2626728110599075, | |
| "grad_norm": 0.017146175727248192, | |
| "learning_rate": 1.4673346759201728e-06, | |
| "loss": 0.0001, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.276497695852535, | |
| "grad_norm": 0.010326577350497246, | |
| "learning_rate": 1.44583716637454e-06, | |
| "loss": 0.0, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.290322580645161, | |
| "grad_norm": 0.0025458873715251684, | |
| "learning_rate": 1.4244340472748076e-06, | |
| "loss": 0.0, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.304147465437788, | |
| "grad_norm": 0.0022526225075125694, | |
| "learning_rate": 1.403127235070587e-06, | |
| "loss": 0.0, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.317972350230415, | |
| "grad_norm": 0.029883896932005882, | |
| "learning_rate": 1.381918637588112e-06, | |
| "loss": 0.0001, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.331797235023042, | |
| "grad_norm": 0.005568996071815491, | |
| "learning_rate": 1.3608101538593965e-06, | |
| "loss": 0.0, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.345622119815668, | |
| "grad_norm": 0.005829329136759043, | |
| "learning_rate": 1.3398036739522088e-06, | |
| "loss": 0.0, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.359447004608295, | |
| "grad_norm": 0.010301432572305202, | |
| "learning_rate": 1.3189010788008234e-06, | |
| "loss": 0.0, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.373271889400922, | |
| "grad_norm": 0.0017158942064270377, | |
| "learning_rate": 1.2981042400376032e-06, | |
| "loss": 0.0, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.387096774193548, | |
| "grad_norm": 0.00932268425822258, | |
| "learning_rate": 1.277415019825417e-06, | |
| "loss": 0.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.400921658986175, | |
| "grad_norm": 0.002834129147231579, | |
| "learning_rate": 1.2568352706908937e-06, | |
| "loss": 0.0, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.414746543778802, | |
| "grad_norm": 0.002881410764530301, | |
| "learning_rate": 1.2363668353585486e-06, | |
| "loss": 0.0, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.428571428571429, | |
| "grad_norm": 0.01270334329456091, | |
| "learning_rate": 1.216011546585785e-06, | |
| "loss": 0.0, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.442396313364055, | |
| "grad_norm": 0.010466398671269417, | |
| "learning_rate": 1.195771226998789e-06, | |
| "loss": 0.0, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.456221198156682, | |
| "grad_norm": 0.00680310744792223, | |
| "learning_rate": 1.1756476889293269e-06, | |
| "loss": 0.0001, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.470046082949309, | |
| "grad_norm": 0.012853645719587803, | |
| "learning_rate": 1.1556427342524698e-06, | |
| "loss": 0.0, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.483870967741936, | |
| "grad_norm": 0.009427334181964397, | |
| "learning_rate": 1.1357581542252555e-06, | |
| "loss": 0.0, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.497695852534562, | |
| "grad_norm": 0.002769877901300788, | |
| "learning_rate": 1.1159957293262888e-06, | |
| "loss": 0.0, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.511520737327189, | |
| "grad_norm": 0.003061062190681696, | |
| "learning_rate": 1.0963572290963298e-06, | |
| "loss": 0.0, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.525345622119816, | |
| "grad_norm": 0.02316022664308548, | |
| "learning_rate": 1.0768444119798357e-06, | |
| "loss": 0.0001, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.539170506912442, | |
| "grad_norm": 0.005801917053759098, | |
| "learning_rate": 1.0574590251675145e-06, | |
| "loss": 0.0, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.552995391705069, | |
| "grad_norm": 0.006601040717214346, | |
| "learning_rate": 1.0382028044398823e-06, | |
| "loss": 0.0, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.566820276497696, | |
| "grad_norm": 0.0029323461931198835, | |
| "learning_rate": 1.0190774740118343e-06, | |
| "loss": 0.0, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.580645161290323, | |
| "grad_norm": 0.02404218353331089, | |
| "learning_rate": 1.0000847463782615e-06, | |
| "loss": 0.0, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.594470046082949, | |
| "grad_norm": 0.0017514690989628434, | |
| "learning_rate": 9.812263221607114e-07, | |
| "loss": 0.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.6082949308755765, | |
| "grad_norm": 0.006374394986778498, | |
| "learning_rate": 9.625038899551162e-07, | |
| "loss": 0.0, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 4.622119815668203, | |
| "grad_norm": 0.009410557337105274, | |
| "learning_rate": 9.439191261805894e-07, | |
| "loss": 0.0001, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 4.635944700460829, | |
| "grad_norm": 0.006760374642908573, | |
| "learning_rate": 9.254736949293216e-07, | |
| "loss": 0.0, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 4.649769585253456, | |
| "grad_norm": 0.002450983738526702, | |
| "learning_rate": 9.07169247817579e-07, | |
| "loss": 0.0, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 4.663594470046083, | |
| "grad_norm": 0.006942540407180786, | |
| "learning_rate": 8.890074238378074e-07, | |
| "loss": 0.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.67741935483871, | |
| "grad_norm": 0.0011738522443920374, | |
| "learning_rate": 8.709898492118885e-07, | |
| "loss": 0.0, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 4.691244239631336, | |
| "grad_norm": 0.002247450640425086, | |
| "learning_rate": 8.531181372455161e-07, | |
| "loss": 0.0, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 4.705069124423963, | |
| "grad_norm": 0.007639207877218723, | |
| "learning_rate": 8.353938881837445e-07, | |
| "loss": 0.0, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 4.71889400921659, | |
| "grad_norm": 0.03200659900903702, | |
| "learning_rate": 8.178186890677029e-07, | |
| "loss": 0.0001, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 4.732718894009217, | |
| "grad_norm": 0.006035828962922096, | |
| "learning_rate": 8.003941135924859e-07, | |
| "loss": 0.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 4.746543778801843, | |
| "grad_norm": 0.008785420097410679, | |
| "learning_rate": 7.83121721966248e-07, | |
| "loss": 0.0, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 4.76036866359447, | |
| "grad_norm": 0.0018518904689699411, | |
| "learning_rate": 7.66003060770498e-07, | |
| "loss": 0.0, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 4.774193548387097, | |
| "grad_norm": 0.0026455053593963385, | |
| "learning_rate": 7.490396628216237e-07, | |
| "loss": 0.0, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 4.788018433179723, | |
| "grad_norm": 0.011131388135254383, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.0, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 4.8018433179723505, | |
| "grad_norm": 0.00781510304659605, | |
| "learning_rate": 7.155847182821524e-07, | |
| "loss": 0.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.815668202764977, | |
| "grad_norm": 0.007307104766368866, | |
| "learning_rate": 6.990961672696908e-07, | |
| "loss": 0.0, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 4.829493087557603, | |
| "grad_norm": 0.007883993908762932, | |
| "learning_rate": 6.827688703921407e-07, | |
| "loss": 0.0, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 4.84331797235023, | |
| "grad_norm": 0.029551049694418907, | |
| "learning_rate": 6.666042896065983e-07, | |
| "loss": 0.0, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 4.857142857142857, | |
| "grad_norm": 0.00934284646064043, | |
| "learning_rate": 6.506038723004484e-07, | |
| "loss": 0.0, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 4.870967741935484, | |
| "grad_norm": 0.003877162467688322, | |
| "learning_rate": 6.347690511617693e-07, | |
| "loss": 0.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 4.88479262672811, | |
| "grad_norm": 0.005847269669175148, | |
| "learning_rate": 6.191012440510469e-07, | |
| "loss": 0.0, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 4.8986175115207375, | |
| "grad_norm": 0.001081401132978499, | |
| "learning_rate": 6.036018538742208e-07, | |
| "loss": 0.0, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 4.912442396313364, | |
| "grad_norm": 0.002679765224456787, | |
| "learning_rate": 5.882722684570638e-07, | |
| "loss": 0.0, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 4.926267281105991, | |
| "grad_norm": 0.011573799885809422, | |
| "learning_rate": 5.731138604209169e-07, | |
| "loss": 0.0, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 4.940092165898617, | |
| "grad_norm": 0.0016941127832978964, | |
| "learning_rate": 5.581279870597866e-07, | |
| "loss": 0.0, | |
| "step": 360 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 432, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 72, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.298790820660537e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |