| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9815668202764978, | |
| "eval_steps": 500, | |
| "global_step": 144, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013824884792626729, | |
| "grad_norm": 31.00213623046875, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.2089, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.027649769585253458, | |
| "grad_norm": 30.27136993408203, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.1536, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.041474654377880185, | |
| "grad_norm": 30.48703384399414, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.1581, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.055299539170506916, | |
| "grad_norm": 30.779329299926758, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.1741, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06912442396313365, | |
| "grad_norm": 31.22808837890625, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.1864, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08294930875576037, | |
| "grad_norm": 30.783327102661133, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.1993, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0967741935483871, | |
| "grad_norm": 30.57423210144043, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.1506, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11059907834101383, | |
| "grad_norm": 30.952186584472656, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.1599, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12442396313364056, | |
| "grad_norm": 30.37245750427246, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.1572, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1382488479262673, | |
| "grad_norm": 30.930192947387695, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.1447, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.15207373271889402, | |
| "grad_norm": 29.735448837280273, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.0742, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.16589861751152074, | |
| "grad_norm": 29.62826156616211, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.061, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17972350230414746, | |
| "grad_norm": 28.937463760375977, | |
| "learning_rate": 6.5e-07, | |
| "loss": 1.9974, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 29.24833106994629, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 1.9833, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2073732718894009, | |
| "grad_norm": 28.122018814086914, | |
| "learning_rate": 7.5e-07, | |
| "loss": 1.8934, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.22119815668202766, | |
| "grad_norm": 28.059659957885742, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 1.875, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2350230414746544, | |
| "grad_norm": 27.361961364746094, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 1.8009, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2488479262672811, | |
| "grad_norm": 26.721765518188477, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.7116, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2626728110599078, | |
| "grad_norm": 25.37330436706543, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.5608, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2764976958525346, | |
| "grad_norm": 25.81206703186035, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.5043, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2903225806451613, | |
| "grad_norm": 25.539344787597656, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.3673, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.30414746543778803, | |
| "grad_norm": 25.097164154052734, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.2029, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.31797235023041476, | |
| "grad_norm": 24.619497299194336, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.0458, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3317972350230415, | |
| "grad_norm": 23.820302963256836, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.8723, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3456221198156682, | |
| "grad_norm": 23.12735939025879, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.7183, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.35944700460829493, | |
| "grad_norm": 20.127134323120117, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.5248, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.37327188940092165, | |
| "grad_norm": 15.901495933532715, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.3689, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 11.053832054138184, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.2482, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4009216589861751, | |
| "grad_norm": 7.248495578765869, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.1847, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4147465437788018, | |
| "grad_norm": 5.378540515899658, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1423, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 3.8371808528900146, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.1152, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4423963133640553, | |
| "grad_norm": 2.2655274868011475, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.0845, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.45622119815668205, | |
| "grad_norm": 1.5746861696243286, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0711, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4700460829493088, | |
| "grad_norm": 1.3510947227478027, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.0734, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 0.9737389087677002, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0651, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4976958525345622, | |
| "grad_norm": 0.9815284609794617, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0593, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.511520737327189, | |
| "grad_norm": 0.8567912578582764, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0543, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5253456221198156, | |
| "grad_norm": 0.6773302555084229, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0622, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5391705069124424, | |
| "grad_norm": 0.49936285614967346, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0511, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5529953917050692, | |
| "grad_norm": 0.6253588795661926, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0478, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5668202764976958, | |
| "grad_norm": 0.5103089809417725, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0465, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.29294702410697937, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0456, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5944700460829493, | |
| "grad_norm": 0.4237954616546631, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0501, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6082949308755761, | |
| "grad_norm": 0.42243412137031555, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0388, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6221198156682027, | |
| "grad_norm": 0.37881818413734436, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0415, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6359447004608295, | |
| "grad_norm": 0.4941152036190033, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.045, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6497695852534562, | |
| "grad_norm": 0.3046450912952423, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0386, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.663594470046083, | |
| "grad_norm": 0.39361852407455444, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0447, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6774193548387096, | |
| "grad_norm": 0.5190001130104065, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0364, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6912442396313364, | |
| "grad_norm": 0.372072696685791, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.043, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7050691244239631, | |
| "grad_norm": 0.3756551146507263, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0424, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7188940092165899, | |
| "grad_norm": 0.4593554437160492, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0387, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7327188940092166, | |
| "grad_norm": 0.2931855618953705, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0396, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7465437788018433, | |
| "grad_norm": 0.38429534435272217, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0373, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7603686635944701, | |
| "grad_norm": 0.3506857752799988, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.04, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.29847028851509094, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0369, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7880184331797235, | |
| "grad_norm": 0.3653375506401062, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0396, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8018433179723502, | |
| "grad_norm": 0.3163083791732788, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0337, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.815668202764977, | |
| "grad_norm": 0.3734363615512848, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0327, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8294930875576036, | |
| "grad_norm": 0.29547712206840515, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0365, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8433179723502304, | |
| "grad_norm": 0.4041007161140442, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.038, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.3602149784564972, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.033, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8709677419354839, | |
| "grad_norm": 0.2948857545852661, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0386, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8847926267281107, | |
| "grad_norm": 0.39098358154296875, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0323, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8986175115207373, | |
| "grad_norm": 0.3692062795162201, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0309, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9124423963133641, | |
| "grad_norm": 0.3967229425907135, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0346, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9262672811059908, | |
| "grad_norm": 0.47776708006858826, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0355, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.9400921658986175, | |
| "grad_norm": 0.21545131504535675, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0294, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9539170506912442, | |
| "grad_norm": 0.23738539218902588, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0308, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.29174014925956726, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0312, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9815668202764977, | |
| "grad_norm": 0.38475602865219116, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0324, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9953917050691244, | |
| "grad_norm": 0.4077378809452057, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0297, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.4077378809452057, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.031, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.0138248847926268, | |
| "grad_norm": 0.46581539511680603, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0313, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.0276497695852536, | |
| "grad_norm": 0.24417200684547424, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.027, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.0414746543778801, | |
| "grad_norm": 0.20425117015838623, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0307, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.055299539170507, | |
| "grad_norm": 0.3578161597251892, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0312, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0691244239631337, | |
| "grad_norm": 0.39486679434776306, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0294, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0829493087557605, | |
| "grad_norm": 0.3932795226573944, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.0307, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.096774193548387, | |
| "grad_norm": 0.2946235239505768, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0257, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.1105990783410138, | |
| "grad_norm": 0.3318672776222229, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0296, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.1244239631336406, | |
| "grad_norm": 0.23701588809490204, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.0298, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.1382488479262673, | |
| "grad_norm": 0.2415941059589386, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0256, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.1520737327188941, | |
| "grad_norm": 0.24098087847232819, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0263, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.1658986175115207, | |
| "grad_norm": 0.3530103862285614, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0308, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1797235023041475, | |
| "grad_norm": 0.2382838875055313, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0254, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1935483870967742, | |
| "grad_norm": 0.2670588791370392, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0255, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.2073732718894008, | |
| "grad_norm": 0.30723804235458374, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0263, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.2211981566820276, | |
| "grad_norm": 0.505890965461731, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0265, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.2350230414746544, | |
| "grad_norm": 0.24307991564273834, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0227, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.2488479262672811, | |
| "grad_norm": 0.2198561429977417, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0261, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.262672811059908, | |
| "grad_norm": 0.2435183823108673, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0225, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2764976958525347, | |
| "grad_norm": 0.18837811052799225, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0218, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 0.3818771541118622, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0223, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.304147465437788, | |
| "grad_norm": 0.2358720600605011, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0204, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.3179723502304148, | |
| "grad_norm": 0.25374144315719604, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.022, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.3317972350230414, | |
| "grad_norm": 0.36181601881980896, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0244, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.3456221198156681, | |
| "grad_norm": 0.3156590759754181, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0233, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.359447004608295, | |
| "grad_norm": 0.21958638727664948, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0218, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3732718894009217, | |
| "grad_norm": 0.34455621242523193, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0267, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.3870967741935485, | |
| "grad_norm": 0.283086359500885, | |
| "learning_rate": 4.999888074163108e-06, | |
| "loss": 0.0238, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.400921658986175, | |
| "grad_norm": 0.28856486082077026, | |
| "learning_rate": 4.999552306674345e-06, | |
| "loss": 0.0186, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.4147465437788018, | |
| "grad_norm": 0.26721692085266113, | |
| "learning_rate": 4.998992727598557e-06, | |
| "loss": 0.0193, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.3459971249103546, | |
| "learning_rate": 4.998209387040829e-06, | |
| "loss": 0.0218, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.4423963133640554, | |
| "grad_norm": 0.25979122519493103, | |
| "learning_rate": 4.9972023551419995e-06, | |
| "loss": 0.0216, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.456221198156682, | |
| "grad_norm": 0.19960424304008484, | |
| "learning_rate": 4.995971722072379e-06, | |
| "loss": 0.0176, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.4700460829493087, | |
| "grad_norm": 0.2529441714286804, | |
| "learning_rate": 4.9945175980236745e-06, | |
| "loss": 0.0181, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.4838709677419355, | |
| "grad_norm": 0.2690267264842987, | |
| "learning_rate": 4.992840113199131e-06, | |
| "loss": 0.0196, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4976958525345623, | |
| "grad_norm": 0.3516470789909363, | |
| "learning_rate": 4.990939417801859e-06, | |
| "loss": 0.0182, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.511520737327189, | |
| "grad_norm": 0.30167508125305176, | |
| "learning_rate": 4.988815682021398e-06, | |
| "loss": 0.0205, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.5253456221198156, | |
| "grad_norm": 0.3920849859714508, | |
| "learning_rate": 4.986469096018472e-06, | |
| "loss": 0.0177, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.5391705069124424, | |
| "grad_norm": 0.3274078369140625, | |
| "learning_rate": 4.983899869907963e-06, | |
| "loss": 0.0185, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.5529953917050692, | |
| "grad_norm": 0.2237282395362854, | |
| "learning_rate": 4.981108233740096e-06, | |
| "loss": 0.016, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.5668202764976957, | |
| "grad_norm": 0.23966379463672638, | |
| "learning_rate": 4.978094437479843e-06, | |
| "loss": 0.0183, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.5806451612903225, | |
| "grad_norm": 0.4027673602104187, | |
| "learning_rate": 4.97485875098454e-06, | |
| "loss": 0.0171, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.5944700460829493, | |
| "grad_norm": 0.24082835018634796, | |
| "learning_rate": 4.971401463979722e-06, | |
| "loss": 0.016, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.608294930875576, | |
| "grad_norm": 0.19387558102607727, | |
| "learning_rate": 4.967722886033181e-06, | |
| "loss": 0.0165, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.6221198156682028, | |
| "grad_norm": 0.33696162700653076, | |
| "learning_rate": 4.963823346527249e-06, | |
| "loss": 0.0154, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.6359447004608296, | |
| "grad_norm": 0.30290740728378296, | |
| "learning_rate": 4.959703194629304e-06, | |
| "loss": 0.0175, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.6497695852534562, | |
| "grad_norm": 0.3781787157058716, | |
| "learning_rate": 4.955362799260507e-06, | |
| "loss": 0.0145, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.663594470046083, | |
| "grad_norm": 0.39995357394218445, | |
| "learning_rate": 4.950802549062764e-06, | |
| "loss": 0.015, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.6774193548387095, | |
| "grad_norm": 0.19926570355892181, | |
| "learning_rate": 4.946022852363932e-06, | |
| "loss": 0.0135, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.6912442396313363, | |
| "grad_norm": 0.22450515627861023, | |
| "learning_rate": 4.9410241371412525e-06, | |
| "loss": 0.0135, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.705069124423963, | |
| "grad_norm": 0.3588384985923767, | |
| "learning_rate": 4.935806850983034e-06, | |
| "loss": 0.0125, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.7188940092165899, | |
| "grad_norm": 0.28571122884750366, | |
| "learning_rate": 4.9303714610485705e-06, | |
| "loss": 0.0166, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.7327188940092166, | |
| "grad_norm": 0.3496967852115631, | |
| "learning_rate": 4.924718454026318e-06, | |
| "loss": 0.0139, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.7465437788018434, | |
| "grad_norm": 0.3279854357242584, | |
| "learning_rate": 4.918848336090309e-06, | |
| "loss": 0.0133, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.7603686635944702, | |
| "grad_norm": 0.19201801717281342, | |
| "learning_rate": 4.912761632854834e-06, | |
| "loss": 0.0151, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 0.27701929211616516, | |
| "learning_rate": 4.906458889327375e-06, | |
| "loss": 0.0148, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.7880184331797235, | |
| "grad_norm": 0.2757968008518219, | |
| "learning_rate": 4.899940669859807e-06, | |
| "loss": 0.0118, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.80184331797235, | |
| "grad_norm": 0.18373191356658936, | |
| "learning_rate": 4.893207558097867e-06, | |
| "loss": 0.0149, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.8156682027649769, | |
| "grad_norm": 0.2116280496120453, | |
| "learning_rate": 4.8862601569288885e-06, | |
| "loss": 0.0129, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.8294930875576036, | |
| "grad_norm": 0.30384117364883423, | |
| "learning_rate": 4.879099088427824e-06, | |
| "loss": 0.0136, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.8433179723502304, | |
| "grad_norm": 0.3766787052154541, | |
| "learning_rate": 4.871724993801541e-06, | |
| "loss": 0.0123, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.8571428571428572, | |
| "grad_norm": 0.3401263356208801, | |
| "learning_rate": 4.864138533331411e-06, | |
| "loss": 0.0122, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.870967741935484, | |
| "grad_norm": 0.24321958422660828, | |
| "learning_rate": 4.8563403863141825e-06, | |
| "loss": 0.0123, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.8847926267281108, | |
| "grad_norm": 0.16918110847473145, | |
| "learning_rate": 4.84833125100116e-06, | |
| "loss": 0.0104, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.8986175115207373, | |
| "grad_norm": 0.23489230871200562, | |
| "learning_rate": 4.840111844535682e-06, | |
| "loss": 0.0122, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.912442396313364, | |
| "grad_norm": 0.32796236872673035, | |
| "learning_rate": 4.8316829028889076e-06, | |
| "loss": 0.0109, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.9262672811059907, | |
| "grad_norm": 0.24210475385189056, | |
| "learning_rate": 4.823045180793914e-06, | |
| "loss": 0.0118, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.9400921658986174, | |
| "grad_norm": 0.3450548052787781, | |
| "learning_rate": 4.8141994516781196e-06, | |
| "loss": 0.0115, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.9539170506912442, | |
| "grad_norm": 0.23163923621177673, | |
| "learning_rate": 4.805146507594034e-06, | |
| "loss": 0.0122, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.967741935483871, | |
| "grad_norm": 0.8197745084762573, | |
| "learning_rate": 4.7958871591483305e-06, | |
| "loss": 0.0101, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.9815668202764978, | |
| "grad_norm": 0.2917576730251312, | |
| "learning_rate": 4.786422235429269e-06, | |
| "loss": 0.0078, | |
| "step": 144 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 432, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 72, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.220896013978436e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |