| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9878213802435725, |
| "eval_steps": 500, |
| "global_step": 276, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.010825439783491205, |
| "grad_norm": 5.807889292253254, |
| "learning_rate": 4.04061e-06, |
| "loss": 0.8567, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.02165087956698241, |
| "grad_norm": 5.811690427529652, |
| "learning_rate": 8.08122e-06, |
| "loss": 0.8604, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03247631935047361, |
| "grad_norm": 4.3700725808471175, |
| "learning_rate": 1.212183e-05, |
| "loss": 0.8206, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.04330175913396482, |
| "grad_norm": 2.118091273430897, |
| "learning_rate": 1.616244e-05, |
| "loss": 0.7738, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.05412719891745602, |
| "grad_norm": 5.4648275516158655, |
| "learning_rate": 2.020305e-05, |
| "loss": 0.7833, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.06495263870094722, |
| "grad_norm": 8.553109003392324, |
| "learning_rate": 2.424366e-05, |
| "loss": 0.7883, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.07577807848443843, |
| "grad_norm": 6.503055944847511, |
| "learning_rate": 2.828427e-05, |
| "loss": 0.7834, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.08660351826792964, |
| "grad_norm": 3.8336486548329196, |
| "learning_rate": 3.232488e-05, |
| "loss": 0.7479, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.09742895805142084, |
| "grad_norm": 2.684849997410019, |
| "learning_rate": 3.636549e-05, |
| "loss": 0.6909, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.10825439783491204, |
| "grad_norm": 2.161503695468669, |
| "learning_rate": 4.04061e-05, |
| "loss": 0.6719, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.11907983761840325, |
| "grad_norm": 1.884181921509628, |
| "learning_rate": 4.444671e-05, |
| "loss": 0.6462, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.12990527740189445, |
| "grad_norm": 1.9443412615480906, |
| "learning_rate": 4.848732e-05, |
| "loss": 0.6351, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.14073071718538566, |
| "grad_norm": 1.541881119258122, |
| "learning_rate": 5.2527930000000004e-05, |
| "loss": 0.6228, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.15155615696887687, |
| "grad_norm": 1.476390312725872, |
| "learning_rate": 5.656854e-05, |
| "loss": 0.6311, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.16238159675236807, |
| "grad_norm": 2.344926743941767, |
| "learning_rate": 6.060915e-05, |
| "loss": 0.6178, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.17320703653585928, |
| "grad_norm": 1.4098664598368982, |
| "learning_rate": 6.464976e-05, |
| "loss": 0.604, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.18403247631935046, |
| "grad_norm": 2.1013992594295536, |
| "learning_rate": 6.869037e-05, |
| "loss": 0.6064, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.19485791610284167, |
| "grad_norm": 3.729407760648478, |
| "learning_rate": 7.273098e-05, |
| "loss": 0.5883, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.20568335588633288, |
| "grad_norm": 1.9287898977179496, |
| "learning_rate": 7.677159e-05, |
| "loss": 0.5875, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2165087956698241, |
| "grad_norm": 2.141344286692683, |
| "learning_rate": 8.08122e-05, |
| "loss": 0.5996, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2273342354533153, |
| "grad_norm": 1.718091978310923, |
| "learning_rate": 8.485281e-05, |
| "loss": 0.5792, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2381596752368065, |
| "grad_norm": 1.5391109254273139, |
| "learning_rate": 8.889342e-05, |
| "loss": 0.5806, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2489851150202977, |
| "grad_norm": 1.5474160910291503, |
| "learning_rate": 9.293403e-05, |
| "loss": 0.573, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.2598105548037889, |
| "grad_norm": 2.015119305794227, |
| "learning_rate": 9.697464e-05, |
| "loss": 0.5753, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.2706359945872801, |
| "grad_norm": 1.2610566881703422, |
| "learning_rate": 0.00010101525000000001, |
| "loss": 0.564, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.2814614343707713, |
| "grad_norm": 2.2902330489952347, |
| "learning_rate": 0.00010505586000000001, |
| "loss": 0.5529, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2922868741542625, |
| "grad_norm": 1.3601476225980274, |
| "learning_rate": 0.00010909647, |
| "loss": 0.5578, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.30311231393775373, |
| "grad_norm": 2.5182389116157196, |
| "learning_rate": 0.00011313708, |
| "loss": 0.5725, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.31393775372124494, |
| "grad_norm": 2.1290153329265697, |
| "learning_rate": 0.00011313254125743567, |
| "loss": 0.5751, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.32476319350473615, |
| "grad_norm": 1.3037114858673693, |
| "learning_rate": 0.00011311892575806915, |
| "loss": 0.5513, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.33558863328822736, |
| "grad_norm": 1.950503866929347, |
| "learning_rate": 0.00011309623568676317, |
| "loss": 0.5561, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.34641407307171856, |
| "grad_norm": 1.3015061613988743, |
| "learning_rate": 0.00011306447468456599, |
| "loss": 0.5524, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.3572395128552097, |
| "grad_norm": 2.844778505243191, |
| "learning_rate": 0.0001130236478481272, |
| "loss": 0.56, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3680649526387009, |
| "grad_norm": 1.6940649560718253, |
| "learning_rate": 0.00011297376172887978, |
| "loss": 0.5715, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.37889039242219213, |
| "grad_norm": 2.296456387550305, |
| "learning_rate": 0.00011291482433198894, |
| "loss": 0.5509, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.38971583220568334, |
| "grad_norm": 2.2983132666819706, |
| "learning_rate": 0.00011284684511506735, |
| "loss": 0.5394, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.40054127198917455, |
| "grad_norm": 1.4966185446845217, |
| "learning_rate": 0.00011276983498665771, |
| "loss": 0.5478, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.41136671177266576, |
| "grad_norm": 2.0647499571970616, |
| "learning_rate": 0.00011268380630448204, |
| "loss": 0.535, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.42219215155615697, |
| "grad_norm": 12.286624512326911, |
| "learning_rate": 0.00011258877287345884, |
| "loss": 0.6342, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.4330175913396482, |
| "grad_norm": 142.759088551662, |
| "learning_rate": 0.00011248474994348775, |
| "loss": 0.9216, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.4438430311231394, |
| "grad_norm": 4.27084515384108, |
| "learning_rate": 0.00011237175420700238, |
| "loss": 0.6257, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.4546684709066306, |
| "grad_norm": 1.3276672657350452, |
| "learning_rate": 0.00011224980379629178, |
| "loss": 0.5676, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.4654939106901218, |
| "grad_norm": 2.5925858366379186, |
| "learning_rate": 0.0001121189182805907, |
| "loss": 0.5571, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.476319350473613, |
| "grad_norm": 1.8224855694265998, |
| "learning_rate": 0.00011197911866293939, |
| "loss": 0.5752, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.4871447902571042, |
| "grad_norm": 1.9153712099066185, |
| "learning_rate": 0.00011183042737681327, |
| "loss": 0.5675, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.4979702300405954, |
| "grad_norm": 1.4984797297805004, |
| "learning_rate": 0.00011167286828252297, |
| "loss": 0.5509, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5087956698240866, |
| "grad_norm": 1.3000230619923165, |
| "learning_rate": 0.00011150646666338564, |
| "loss": 0.5437, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5196211096075778, |
| "grad_norm": 1.1273300703794096, |
| "learning_rate": 0.00011133124922166763, |
| "loss": 0.5353, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.530446549391069, |
| "grad_norm": 0.9939902978944836, |
| "learning_rate": 0.00011114724407429968, |
| "loss": 0.532, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.5412719891745602, |
| "grad_norm": 1.4227558376445275, |
| "learning_rate": 0.00011095448074836505, |
| "loss": 0.5445, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.5520974289580515, |
| "grad_norm": 1.6500800441263705, |
| "learning_rate": 0.00011075299017636123, |
| "loss": 0.5331, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.5629228687415426, |
| "grad_norm": 0.8867959751888957, |
| "learning_rate": 0.00011054280469123641, |
| "loss": 0.5243, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.5737483085250338, |
| "grad_norm": 1.4279560361603063, |
| "learning_rate": 0.00011032395802120092, |
| "loss": 0.5243, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.584573748308525, |
| "grad_norm": 0.956318053267928, |
| "learning_rate": 0.00011009648528431498, |
| "loss": 0.5345, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.5953991880920162, |
| "grad_norm": 1.163599419520797, |
| "learning_rate": 0.00010986042298285322, |
| "loss": 0.5178, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.6062246278755075, |
| "grad_norm": 1.0528942367951604, |
| "learning_rate": 0.00010961580899744742, |
| "loss": 0.5246, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.6170500676589986, |
| "grad_norm": 1.152436232083363, |
| "learning_rate": 0.00010936268258100764, |
| "loss": 0.5218, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.6278755074424899, |
| "grad_norm": 1.288579266241482, |
| "learning_rate": 0.00010910108435242351, |
| "loss": 0.5221, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.638700947225981, |
| "grad_norm": 1.148211758545437, |
| "learning_rate": 0.00010883105629004603, |
| "loss": 0.5022, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.6495263870094723, |
| "grad_norm": 0.8500064313704729, |
| "learning_rate": 0.00010855264172495148, |
| "loss": 0.5053, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.6603518267929634, |
| "grad_norm": 1.526146143997704, |
| "learning_rate": 0.00010826588533398805, |
| "loss": 0.5204, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.6711772665764547, |
| "grad_norm": 0.8393491341796541, |
| "learning_rate": 0.00010797083313260666, |
| "loss": 0.5043, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.6820027063599459, |
| "grad_norm": 1.484485217333713, |
| "learning_rate": 0.0001076675324674768, |
| "loss": 0.5159, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.6928281461434371, |
| "grad_norm": 1.0584547196728273, |
| "learning_rate": 0.00010735603200888909, |
| "loss": 0.5075, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.7036535859269283, |
| "grad_norm": 1.3549586079445122, |
| "learning_rate": 0.00010703638174294492, |
| "loss": 0.5008, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.7144790257104194, |
| "grad_norm": 0.6668671224564001, |
| "learning_rate": 0.00010670863296353553, |
| "loss": 0.5041, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.7253044654939107, |
| "grad_norm": 1.2494202811591621, |
| "learning_rate": 0.00010637283826411074, |
| "loss": 0.5023, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.7361299052774019, |
| "grad_norm": 0.9215229237806866, |
| "learning_rate": 0.00010602905152923944, |
| "loss": 0.5114, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.7469553450608931, |
| "grad_norm": 1.0110141606592546, |
| "learning_rate": 0.00010567732792596276, |
| "loss": 0.5007, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.7577807848443843, |
| "grad_norm": 0.8129099507326153, |
| "learning_rate": 0.00010531772389494152, |
| "loss": 0.4829, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.7686062246278755, |
| "grad_norm": 0.8894581968902109, |
| "learning_rate": 0.0001049502971413992, |
| "loss": 0.4843, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.7794316644113667, |
| "grad_norm": 1.0766490523242207, |
| "learning_rate": 0.00010457510662586213, |
| "loss": 0.4952, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.790257104194858, |
| "grad_norm": 0.9096596609582451, |
| "learning_rate": 0.00010419221255469808, |
| "loss": 0.4991, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.8010825439783491, |
| "grad_norm": 0.6679397460269166, |
| "learning_rate": 0.00010380167637045515, |
| "loss": 0.4934, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.8119079837618404, |
| "grad_norm": 0.5550572181240487, |
| "learning_rate": 0.00010340356074200197, |
| "loss": 0.488, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.8227334235453315, |
| "grad_norm": 0.5738282924820368, |
| "learning_rate": 0.00010299792955447147, |
| "loss": 0.4813, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.8335588633288228, |
| "grad_norm": 0.6379614597485335, |
| "learning_rate": 0.00010258484789900923, |
| "loss": 0.4793, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.8443843031123139, |
| "grad_norm": 1.1998908169361215, |
| "learning_rate": 0.00010216438206232844, |
| "loss": 0.4868, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.8552097428958051, |
| "grad_norm": 0.957795674849162, |
| "learning_rate": 0.00010173659951607282, |
| "loss": 0.4896, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.8660351826792964, |
| "grad_norm": 1.0879462318136461, |
| "learning_rate": 0.00010130156890598967, |
| "loss": 0.484, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.8768606224627875, |
| "grad_norm": 1.0365480887213832, |
| "learning_rate": 0.00010085936004091438, |
| "loss": 0.491, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.8876860622462788, |
| "grad_norm": 0.8798367719337482, |
| "learning_rate": 0.0001004100438815681, |
| "loss": 0.4779, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.8985115020297699, |
| "grad_norm": 1.0902126566790828, |
| "learning_rate": 9.995369252917098e-05, |
| "loss": 0.4851, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.9093369418132612, |
| "grad_norm": 1.3512814806916393, |
| "learning_rate": 9.949037921387205e-05, |
| "loss": 0.4854, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.9201623815967523, |
| "grad_norm": 0.7496109448453503, |
| "learning_rate": 9.902017828299812e-05, |
| "loss": 0.477, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.9309878213802436, |
| "grad_norm": 0.8284390728674584, |
| "learning_rate": 9.854316518912328e-05, |
| "loss": 0.4856, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.9418132611637348, |
| "grad_norm": 0.9111112551516629, |
| "learning_rate": 9.805941647796121e-05, |
| "loss": 0.4813, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.952638700947226, |
| "grad_norm": 0.9417845675499721, |
| "learning_rate": 9.756900977608196e-05, |
| "loss": 0.4655, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.9634641407307172, |
| "grad_norm": 1.2726325932306266, |
| "learning_rate": 9.707202377845526e-05, |
| "loss": 0.4848, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.9742895805142084, |
| "grad_norm": 0.8519456352856999, |
| "learning_rate": 9.656853823582246e-05, |
| "loss": 0.4783, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.9851150202976996, |
| "grad_norm": 1.2137981567024454, |
| "learning_rate": 9.605863394189905e-05, |
| "loss": 0.4808, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.9959404600811907, |
| "grad_norm": 0.8493809236019717, |
| "learning_rate": 9.554239272040974e-05, |
| "loss": 0.4794, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.006765899864682, |
| "grad_norm": 1.1076546554118754, |
| "learning_rate": 9.501989741195838e-05, |
| "loss": 0.7428, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.0175913396481733, |
| "grad_norm": 1.6305614136858817, |
| "learning_rate": 9.44912318607345e-05, |
| "loss": 0.4696, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.0284167794316643, |
| "grad_norm": 0.7416433203255349, |
| "learning_rate": 9.395648090105913e-05, |
| "loss": 0.4548, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.0392422192151556, |
| "grad_norm": 1.3812445546043028, |
| "learning_rate": 9.341573034377135e-05, |
| "loss": 0.4597, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.0500676589986468, |
| "grad_norm": 0.9119824983143324, |
| "learning_rate": 9.28690669624584e-05, |
| "loss": 0.4469, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.060893098782138, |
| "grad_norm": 1.1446563492957806, |
| "learning_rate": 9.231657847953129e-05, |
| "loss": 0.4579, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.0717185385656292, |
| "grad_norm": 0.7486659375486545, |
| "learning_rate": 9.175835355214798e-05, |
| "loss": 0.4525, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.0825439783491204, |
| "grad_norm": 0.751379457270728, |
| "learning_rate": 9.119448175798677e-05, |
| "loss": 0.4465, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.0933694181326117, |
| "grad_norm": 0.6717441683857258, |
| "learning_rate": 9.062505358087179e-05, |
| "loss": 0.449, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.104194857916103, |
| "grad_norm": 0.7286206911922811, |
| "learning_rate": 9.005016039625325e-05, |
| "loss": 0.4427, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.115020297699594, |
| "grad_norm": 0.6470798687068924, |
| "learning_rate": 8.94698944565445e-05, |
| "loss": 0.4495, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.1258457374830853, |
| "grad_norm": 0.6480605622628216, |
| "learning_rate": 8.888434887631836e-05, |
| "loss": 0.4389, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.1366711772665765, |
| "grad_norm": 0.4296944063679475, |
| "learning_rate": 8.829361761736521e-05, |
| "loss": 0.4423, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.1474966170500678, |
| "grad_norm": 0.6099046152220434, |
| "learning_rate": 8.769779547361498e-05, |
| "loss": 0.4474, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.1583220568335588, |
| "grad_norm": 0.4360950041119209, |
| "learning_rate": 8.709697805592578e-05, |
| "loss": 0.4404, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.16914749661705, |
| "grad_norm": 0.5017314757670758, |
| "learning_rate": 8.649126177674119e-05, |
| "loss": 0.436, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.1799729364005414, |
| "grad_norm": 0.49957087979350345, |
| "learning_rate": 8.588074383461926e-05, |
| "loss": 0.441, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.1907983761840324, |
| "grad_norm": 0.492100303371075, |
| "learning_rate": 8.526552219863502e-05, |
| "loss": 0.446, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.2016238159675237, |
| "grad_norm": 0.5107451733829671, |
| "learning_rate": 8.464569559265956e-05, |
| "loss": 0.4437, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.212449255751015, |
| "grad_norm": 0.5195883033342021, |
| "learning_rate": 8.402136347951797e-05, |
| "loss": 0.4383, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.2232746955345062, |
| "grad_norm": 0.484203520160995, |
| "learning_rate": 8.339262604502857e-05, |
| "loss": 0.4345, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.2341001353179972, |
| "grad_norm": 0.4052565892354387, |
| "learning_rate": 8.275958418192629e-05, |
| "loss": 0.4406, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.2449255751014885, |
| "grad_norm": 0.44019644743443515, |
| "learning_rate": 8.212233947367252e-05, |
| "loss": 0.4469, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.2557510148849798, |
| "grad_norm": 0.5974100344045092, |
| "learning_rate": 8.148099417815411e-05, |
| "loss": 0.4383, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.266576454668471, |
| "grad_norm": 0.645910343994611, |
| "learning_rate": 8.083565121127416e-05, |
| "loss": 0.4365, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.277401894451962, |
| "grad_norm": 0.7963176183442402, |
| "learning_rate": 8.01864141304373e-05, |
| "loss": 0.4373, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.2882273342354533, |
| "grad_norm": 1.1462155724648733, |
| "learning_rate": 7.953338711793185e-05, |
| "loss": 0.4423, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.2990527740189446, |
| "grad_norm": 0.8620914093895499, |
| "learning_rate": 7.887667496421197e-05, |
| "loss": 0.4335, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.3098782138024356, |
| "grad_norm": 0.8473383837505596, |
| "learning_rate": 7.821638305108189e-05, |
| "loss": 0.4398, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.320703653585927, |
| "grad_norm": 1.0572991577428552, |
| "learning_rate": 7.755261733478551e-05, |
| "loss": 0.4367, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.3315290933694182, |
| "grad_norm": 1.140899053275173, |
| "learning_rate": 7.688548432900373e-05, |
| "loss": 0.4394, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.3423545331529092, |
| "grad_norm": 0.5348253131421844, |
| "learning_rate": 7.621509108776232e-05, |
| "loss": 0.4347, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.3531799729364005, |
| "grad_norm": 0.4277663692986505, |
| "learning_rate": 7.554154518825313e-05, |
| "loss": 0.4353, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.3640054127198917, |
| "grad_norm": 0.8923758173820789, |
| "learning_rate": 7.486495471357116e-05, |
| "loss": 0.4353, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.374830852503383, |
| "grad_norm": 1.0914091592056432, |
| "learning_rate": 7.418542823537082e-05, |
| "loss": 0.4293, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.3856562922868743, |
| "grad_norm": 0.493002517578744, |
| "learning_rate": 7.350307479644334e-05, |
| "loss": 0.4289, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.3964817320703653, |
| "grad_norm": 0.47827880244934046, |
| "learning_rate": 7.281800389321898e-05, |
| "loss": 0.4289, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.4073071718538566, |
| "grad_norm": 0.9376108017080121, |
| "learning_rate": 7.21303254581962e-05, |
| "loss": 0.4358, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.4181326116373478, |
| "grad_norm": 0.9428376309570359, |
| "learning_rate": 7.144014984230094e-05, |
| "loss": 0.4362, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.4289580514208389, |
| "grad_norm": 0.5757201367588043, |
| "learning_rate": 7.074758779717872e-05, |
| "loss": 0.4369, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.4397834912043301, |
| "grad_norm": 0.6048455591912312, |
| "learning_rate": 7.005275045742245e-05, |
| "loss": 0.4295, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.4506089309878214, |
| "grad_norm": 0.523998052532321, |
| "learning_rate": 6.935574932273883e-05, |
| "loss": 0.4315, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.4614343707713127, |
| "grad_norm": 0.5397427917080534, |
| "learning_rate": 6.865669624005612e-05, |
| "loss": 0.4329, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.472259810554804, |
| "grad_norm": 0.5235978832989827, |
| "learning_rate": 6.795570338557618e-05, |
| "loss": 0.4313, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.483085250338295, |
| "grad_norm": 0.3129416083173963, |
| "learning_rate": 6.72528832467737e-05, |
| "loss": 0.4279, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.4939106901217862, |
| "grad_norm": 0.4970063830078409, |
| "learning_rate": 6.65483486043455e-05, |
| "loss": 0.4341, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.5047361299052775, |
| "grad_norm": 0.47909790019765314, |
| "learning_rate": 6.584221251411267e-05, |
| "loss": 0.433, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.5155615696887685, |
| "grad_norm": 0.3226892723695655, |
| "learning_rate": 6.513458828887876e-05, |
| "loss": 0.4319, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.5263870094722598, |
| "grad_norm": 0.4440079453972877, |
| "learning_rate": 6.442558948024646e-05, |
| "loss": 0.429, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.537212449255751, |
| "grad_norm": 0.5152686044377337, |
| "learning_rate": 6.371532986039628e-05, |
| "loss": 0.4293, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.548037889039242, |
| "grad_norm": 0.43779509661289856, |
| "learning_rate": 6.30039234038295e-05, |
| "loss": 0.436, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.5588633288227334, |
| "grad_norm": 0.31984409816781834, |
| "learning_rate": 6.229148426907894e-05, |
| "loss": 0.4274, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.5696887686062246, |
| "grad_norm": 0.27406976333777083, |
| "learning_rate": 6.157812678039005e-05, |
| "loss": 0.4254, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.5805142083897157, |
| "grad_norm": 0.28606524914172654, |
| "learning_rate": 6.086396540937541e-05, |
| "loss": 0.4254, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.5913396481732072, |
| "grad_norm": 0.31699705967037795, |
| "learning_rate": 6.014911475664562e-05, |
| "loss": 0.4222, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.6021650879566982, |
| "grad_norm": 0.27781981741999395, |
| "learning_rate": 5.9433689533419476e-05, |
| "loss": 0.4411, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.6129905277401895, |
| "grad_norm": 0.31382913986251637, |
| "learning_rate": 5.871780454311643e-05, |
| "loss": 0.4235, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.6238159675236807, |
| "grad_norm": 0.31850123273660286, |
| "learning_rate": 5.8001574662934144e-05, |
| "loss": 0.4303, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.6346414073071718, |
| "grad_norm": 0.24056778596700748, |
| "learning_rate": 5.7285114825414433e-05, |
| "loss": 0.4311, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.645466847090663, |
| "grad_norm": 0.2586338153936464, |
| "learning_rate": 5.656854e-05, |
| "loss": 0.4247, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.6562922868741543, |
| "grad_norm": 0.23655283610366792, |
| "learning_rate": 5.5851965174585584e-05, |
| "loss": 0.4368, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.6671177266576453, |
| "grad_norm": 0.2592063875823944, |
| "learning_rate": 5.5135505337065866e-05, |
| "loss": 0.4294, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.6779431664411368, |
| "grad_norm": 0.25226779767266616, |
| "learning_rate": 5.441927545688358e-05, |
| "loss": 0.4225, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.6887686062246279, |
| "grad_norm": 0.216123495205421, |
| "learning_rate": 5.3703390466580534e-05, |
| "loss": 0.4302, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.699594046008119, |
| "grad_norm": 0.2822002037132782, |
| "learning_rate": 5.2987965243354394e-05, |
| "loss": 0.4171, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.7104194857916104, |
| "grad_norm": 0.21868337726749193, |
| "learning_rate": 5.22731145906246e-05, |
| "loss": 0.4278, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.7212449255751014, |
| "grad_norm": 0.2416291692003587, |
| "learning_rate": 5.1558953219609965e-05, |
| "loss": 0.4344, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.7320703653585927, |
| "grad_norm": 0.21904695751514086, |
| "learning_rate": 5.084559573092107e-05, |
| "loss": 0.4288, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.742895805142084, |
| "grad_norm": 0.23360874499091466, |
| "learning_rate": 5.013315659617052e-05, |
| "loss": 0.4258, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.753721244925575, |
| "grad_norm": 0.220076679488733, |
| "learning_rate": 4.9421750139603754e-05, |
| "loss": 0.4311, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.7645466847090663, |
| "grad_norm": 0.2620382815091359, |
| "learning_rate": 4.871149051975356e-05, |
| "loss": 0.4244, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.7753721244925575, |
| "grad_norm": 0.20533381271916243, |
| "learning_rate": 4.8002491711121275e-05, |
| "loss": 0.4214, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.7861975642760486, |
| "grad_norm": 0.21959248207061696, |
| "learning_rate": 4.729486748588734e-05, |
| "loss": 0.4286, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.79702300405954, |
| "grad_norm": 0.24544077165398928, |
| "learning_rate": 4.6588731395654526e-05, |
| "loss": 0.4214, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.8078484438430311, |
| "grad_norm": 0.21079099929633802, |
| "learning_rate": 4.588419675322632e-05, |
| "loss": 0.4271, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.8186738836265224, |
| "grad_norm": 0.27737463865816947, |
| "learning_rate": 4.5181376614423824e-05, |
| "loss": 0.4242, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.8294993234100136, |
| "grad_norm": 0.18815438760526704, |
| "learning_rate": 4.448038375994388e-05, |
| "loss": 0.4321, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.8403247631935047, |
| "grad_norm": 0.22645725922608995, |
| "learning_rate": 4.3781330677261174e-05, |
| "loss": 0.4231, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.851150202976996, |
| "grad_norm": 0.2511332563566454, |
| "learning_rate": 4.308432954257754e-05, |
| "loss": 0.4193, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.8619756427604872, |
| "grad_norm": 0.2133202149757832, |
| "learning_rate": 4.238949220282127e-05, |
| "loss": 0.4206, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.8728010825439783, |
| "grad_norm": 0.22435313580130933, |
| "learning_rate": 4.169693015769905e-05, |
| "loss": 0.4144, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.8836265223274695, |
| "grad_norm": 0.2702204492113976, |
| "learning_rate": 4.1006754541803794e-05, |
| "loss": 0.4271, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.8944519621109608, |
| "grad_norm": 0.21199239148820068, |
| "learning_rate": 4.031907610678103e-05, |
| "loss": 0.4214, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.9052774018944518, |
| "grad_norm": 0.23353872965172265, |
| "learning_rate": 3.963400520355668e-05, |
| "loss": 0.4162, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.9161028416779433, |
| "grad_norm": 0.26040652564822436, |
| "learning_rate": 3.89516517646292e-05, |
| "loss": 0.4184, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.9269282814614344, |
| "grad_norm": 0.22200003840805208, |
| "learning_rate": 3.827212528642884e-05, |
| "loss": 0.4276, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.9377537212449256, |
| "grad_norm": 0.23150185026637812, |
| "learning_rate": 3.7595534811746885e-05, |
| "loss": 0.4218, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.9485791610284169, |
| "grad_norm": 0.21285367773990543, |
| "learning_rate": 3.6921988912237676e-05, |
| "loss": 0.424, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.959404600811908, |
| "grad_norm": 0.17015460687966813, |
| "learning_rate": 3.6251595670996275e-05, |
| "loss": 0.4224, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.9702300405953992, |
| "grad_norm": 0.18773663319664743, |
| "learning_rate": 3.5584462665214514e-05, |
| "loss": 0.4186, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.9810554803788905, |
| "grad_norm": 0.19181378149826905, |
| "learning_rate": 3.492069694891813e-05, |
| "loss": 0.427, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.9918809201623815, |
| "grad_norm": 0.21051925665273966, |
| "learning_rate": 3.4260405035788045e-05, |
| "loss": 0.4274, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.002706359945873, |
| "grad_norm": 0.3073354258820752, |
| "learning_rate": 3.360369288206815e-05, |
| "loss": 0.6681, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.013531799729364, |
| "grad_norm": 0.25064046553309194, |
| "learning_rate": 3.295066586956273e-05, |
| "loss": 0.3897, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.024357239512855, |
| "grad_norm": 0.24338261440618886, |
| "learning_rate": 3.230142878872587e-05, |
| "loss": 0.3883, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.0351826792963466, |
| "grad_norm": 0.22284014657928528, |
| "learning_rate": 3.1656085821845896e-05, |
| "loss": 0.3893, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.0460081190798376, |
| "grad_norm": 0.2194057958448553, |
| "learning_rate": 3.101474052632748e-05, |
| "loss": 0.3827, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.0568335588633286, |
| "grad_norm": 0.23698144898082665, |
| "learning_rate": 3.0377495818073712e-05, |
| "loss": 0.3878, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.06765899864682, |
| "grad_norm": 0.2543712215613022, |
| "learning_rate": 2.974445395497144e-05, |
| "loss": 0.3924, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.078484438430311, |
| "grad_norm": 0.25600423720764554, |
| "learning_rate": 2.9115716520482054e-05, |
| "loss": 0.3967, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.089309878213802, |
| "grad_norm": 0.2511300968768311, |
| "learning_rate": 2.849138440734045e-05, |
| "loss": 0.3885, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.1001353179972937, |
| "grad_norm": 0.2253675731410667, |
| "learning_rate": 2.787155780136501e-05, |
| "loss": 0.3805, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.1109607577807847, |
| "grad_norm": 0.23257374074012813, |
| "learning_rate": 2.725633616538076e-05, |
| "loss": 0.3901, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.121786197564276, |
| "grad_norm": 0.20754444795906105, |
| "learning_rate": 2.6645818223258827e-05, |
| "loss": 0.3889, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.1326116373477673, |
| "grad_norm": 0.23974692697842115, |
| "learning_rate": 2.6040101944074254e-05, |
| "loss": 0.3836, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.1434370771312583, |
| "grad_norm": 0.1865329505582591, |
| "learning_rate": 2.5439284526385022e-05, |
| "loss": 0.3831, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.15426251691475, |
| "grad_norm": 0.20901891546099988, |
| "learning_rate": 2.48434623826348e-05, |
| "loss": 0.3897, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.165087956698241, |
| "grad_norm": 0.17039892814308769, |
| "learning_rate": 2.4252731123681627e-05, |
| "loss": 0.3921, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.175913396481732, |
| "grad_norm": 0.18978843204311083, |
| "learning_rate": 2.366718554345549e-05, |
| "loss": 0.3869, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.1867388362652234, |
| "grad_norm": 0.13803495552773962, |
| "learning_rate": 2.3086919603746738e-05, |
| "loss": 0.3842, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.1975642760487144, |
| "grad_norm": 0.19506478963791687, |
| "learning_rate": 2.25120264191282e-05, |
| "loss": 0.3909, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.208389715832206, |
| "grad_norm": 0.13474482771600654, |
| "learning_rate": 2.194259824201323e-05, |
| "loss": 0.3886, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.219215155615697, |
| "grad_norm": 0.14807125927777573, |
| "learning_rate": 2.1378726447852015e-05, |
| "loss": 0.3867, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.230040595399188, |
| "grad_norm": 0.15365440578968242, |
| "learning_rate": 2.082050152046872e-05, |
| "loss": 0.3881, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.2408660351826795, |
| "grad_norm": 0.14558829971823375, |
| "learning_rate": 2.0268013037541607e-05, |
| "loss": 0.3914, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.2516914749661705, |
| "grad_norm": 0.1371560184262318, |
| "learning_rate": 1.9721349656228674e-05, |
| "loss": 0.3823, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.2625169147496615, |
| "grad_norm": 0.11747332446934883, |
| "learning_rate": 1.918059909894089e-05, |
| "loss": 0.387, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.273342354533153, |
| "grad_norm": 0.13170177080479875, |
| "learning_rate": 1.8645848139265496e-05, |
| "loss": 0.3828, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.284167794316644, |
| "grad_norm": 0.12950576152608145, |
| "learning_rate": 1.8117182588041633e-05, |
| "loss": 0.3843, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.2949932341001356, |
| "grad_norm": 0.1262147300774054, |
| "learning_rate": 1.759468727959026e-05, |
| "loss": 0.3848, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.3058186738836266, |
| "grad_norm": 0.13499342219785854, |
| "learning_rate": 1.707844605810096e-05, |
| "loss": 0.3922, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.3166441136671176, |
| "grad_norm": 0.12019629775786407, |
| "learning_rate": 1.6568541764177544e-05, |
| "loss": 0.3868, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.3274695534506087, |
| "grad_norm": 0.123057853560049, |
| "learning_rate": 1.6065056221544746e-05, |
| "loss": 0.3875, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.3382949932341, |
| "grad_norm": 0.1276153162222321, |
| "learning_rate": 1.556807022391805e-05, |
| "loss": 0.3932, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.349120433017591, |
| "grad_norm": 0.11893041854841553, |
| "learning_rate": 1.5077663522038803e-05, |
| "loss": 0.3866, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.3599458728010827, |
| "grad_norm": 0.12645634242130177, |
| "learning_rate": 1.4593914810876749e-05, |
| "loss": 0.3941, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.3707713125845737, |
| "grad_norm": 0.1325594756769902, |
| "learning_rate": 1.4116901717001894e-05, |
| "loss": 0.3915, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.381596752368065, |
| "grad_norm": 0.11427081831052392, |
| "learning_rate": 1.3646700786127952e-05, |
| "loss": 0.3799, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.3924221921515563, |
| "grad_norm": 0.1182894319259809, |
| "learning_rate": 1.3183387470829026e-05, |
| "loss": 0.3865, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.4032476319350473, |
| "grad_norm": 0.13812990571742856, |
| "learning_rate": 1.2727036118431905e-05, |
| "loss": 0.3916, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.414073071718539, |
| "grad_norm": 0.1240184216152659, |
| "learning_rate": 1.2277719959085634e-05, |
| "loss": 0.392, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.42489851150203, |
| "grad_norm": 0.12146813464559397, |
| "learning_rate": 1.1835511094010329e-05, |
| "loss": 0.389, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.435723951285521, |
| "grad_norm": 0.12612143081575952, |
| "learning_rate": 1.1400480483927205e-05, |
| "loss": 0.3869, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.4465493910690124, |
| "grad_norm": 0.14119782771540126, |
| "learning_rate": 1.0972697937671591e-05, |
| "loss": 0.3898, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.4573748308525034, |
| "grad_norm": 0.11466559847267338, |
| "learning_rate": 1.0552232100990782e-05, |
| "loss": 0.3833, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.4682002706359945, |
| "grad_norm": 0.11857984272947328, |
| "learning_rate": 1.0139150445528555e-05, |
| "loss": 0.3853, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.479025710419486, |
| "grad_norm": 0.11379457603595537, |
| "learning_rate": 9.733519257998048e-06, |
| "loss": 0.3821, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.489851150202977, |
| "grad_norm": 0.12172192596025545, |
| "learning_rate": 9.335403629544873e-06, |
| "loss": 0.3833, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.500676589986468, |
| "grad_norm": 0.11773376245986623, |
| "learning_rate": 8.94486744530191e-06, |
| "loss": 0.393, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.5115020297699595, |
| "grad_norm": 0.10285756232686279, |
| "learning_rate": 8.561973374137876e-06, |
| "loss": 0.3897, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.5223274695534506, |
| "grad_norm": 0.11531820935775133, |
| "learning_rate": 8.186782858600797e-06, |
| "loss": 0.3849, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.533152909336942, |
| "grad_norm": 0.10364864354320348, |
| "learning_rate": 7.819356105058482e-06, |
| "loss": 0.3886, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.543978349120433, |
| "grad_norm": 0.09908743652461002, |
| "learning_rate": 7.459752074037232e-06, |
| "loss": 0.3803, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.554803788903924, |
| "grad_norm": 0.0955884820706306, |
| "learning_rate": 7.1080284707605624e-06, |
| "loss": 0.3804, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.565629228687415, |
| "grad_norm": 0.09846787056135109, |
| "learning_rate": 6.764241735889261e-06, |
| "loss": 0.3866, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.5764546684709067, |
| "grad_norm": 0.09799859422334903, |
| "learning_rate": 6.428447036464477e-06, |
| "loss": 0.3858, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.5872801082543977, |
| "grad_norm": 0.09679034120618978, |
| "learning_rate": 6.100698257055085e-06, |
| "loss": 0.3834, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.598105548037889, |
| "grad_norm": 0.10093400603955019, |
| "learning_rate": 5.781047991110923e-06, |
| "loss": 0.3896, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.60893098782138, |
| "grad_norm": 0.10104638095796786, |
| "learning_rate": 5.46954753252318e-06, |
| "loss": 0.3924, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.6197564276048713, |
| "grad_norm": 0.10038405717214048, |
| "learning_rate": 5.166246867393346e-06, |
| "loss": 0.3918, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.6305818673883627, |
| "grad_norm": 0.09904219658805112, |
| "learning_rate": 4.87119466601194e-06, |
| "loss": 0.3835, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.641407307171854, |
| "grad_norm": 0.11316612551154176, |
| "learning_rate": 4.58443827504852e-06, |
| "loss": 0.3821, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.6522327469553453, |
| "grad_norm": 0.097473479720162, |
| "learning_rate": 4.306023709953976e-06, |
| "loss": 0.3833, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.6630581867388363, |
| "grad_norm": 0.0952854432600168, |
| "learning_rate": 4.035995647576508e-06, |
| "loss": 0.3834, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.6738836265223274, |
| "grad_norm": 0.09299918262798558, |
| "learning_rate": 3.774397418992369e-06, |
| "loss": 0.3889, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.6847090663058184, |
| "grad_norm": 0.09888399612743574, |
| "learning_rate": 3.521271002552602e-06, |
| "loss": 0.3851, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.69553450608931, |
| "grad_norm": 0.10298924594419286, |
| "learning_rate": 3.2766570171467893e-06, |
| "loss": 0.3806, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.706359945872801, |
| "grad_norm": 0.10085730184506674, |
| "learning_rate": 3.040594715685033e-06, |
| "loss": 0.3918, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.7171853856562924, |
| "grad_norm": 0.09724136823771373, |
| "learning_rate": 2.8131219787990694e-06, |
| "loss": 0.3876, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.7280108254397835, |
| "grad_norm": 0.08743227467952154, |
| "learning_rate": 2.5942753087635825e-06, |
| "loss": 0.3913, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.7388362652232745, |
| "grad_norm": 0.09212384514082983, |
| "learning_rate": 2.3840898236387664e-06, |
| "loss": 0.3903, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.749661705006766, |
| "grad_norm": 0.08767500659026299, |
| "learning_rate": 2.1825992516349626e-06, |
| "loss": 0.3837, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.760487144790257, |
| "grad_norm": 0.08408432260309959, |
| "learning_rate": 1.9898359257003126e-06, |
| "loss": 0.3797, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.7713125845737485, |
| "grad_norm": 0.08602719858789641, |
| "learning_rate": 1.8058307783323744e-06, |
| "loss": 0.379, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.7821380243572396, |
| "grad_norm": 0.08198855685273775, |
| "learning_rate": 1.630613336614365e-06, |
| "loss": 0.3913, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.7929634641407306, |
| "grad_norm": 0.08559891153184518, |
| "learning_rate": 1.464211717477035e-06, |
| "loss": 0.3848, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.803788903924222, |
| "grad_norm": 0.08076564462446145, |
| "learning_rate": 1.30665262318675e-06, |
| "loss": 0.3833, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.814614343707713, |
| "grad_norm": 0.07997674076899126, |
| "learning_rate": 1.1579613370606156e-06, |
| "loss": 0.3893, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.825439783491204, |
| "grad_norm": 0.08211459263332578, |
| "learning_rate": 1.0181617194093145e-06, |
| "loss": 0.3787, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.8362652232746957, |
| "grad_norm": 0.08012043175321967, |
| "learning_rate": 8.872762037082338e-07, |
| "loss": 0.3841, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.8470906630581867, |
| "grad_norm": 0.08182261519883058, |
| "learning_rate": 7.653257929976282e-07, |
| "loss": 0.3882, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.8579161028416777, |
| "grad_norm": 0.08367217916154837, |
| "learning_rate": 6.523300565122583e-07, |
| "loss": 0.3838, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.8687415426251692, |
| "grad_norm": 0.07915591706058425, |
| "learning_rate": 5.483071265411561e-07, |
| "loss": 0.3881, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.8795669824086603, |
| "grad_norm": 0.0834396132172137, |
| "learning_rate": 4.5327369551796154e-07, |
| "loss": 0.3861, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.8903924221921518, |
| "grad_norm": 0.08040680444066323, |
| "learning_rate": 3.6724501334229996e-07, |
| "loss": 0.3824, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.901217861975643, |
| "grad_norm": 0.07960262290429217, |
| "learning_rate": 2.9023488493264326e-07, |
| "loss": 0.3831, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.912043301759134, |
| "grad_norm": 0.0813772697299331, |
| "learning_rate": 2.2225566801107246e-07, |
| "loss": 0.3852, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.9228687415426253, |
| "grad_norm": 0.0799088400195742, |
| "learning_rate": 1.6331827112021966e-07, |
| "loss": 0.3873, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.9336941813261164, |
| "grad_norm": 0.0792761094231235, |
| "learning_rate": 1.134321518728097e-07, |
| "loss": 0.384, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.944519621109608, |
| "grad_norm": 0.08041405015290681, |
| "learning_rate": 7.260531543401518e-08, |
| "loss": 0.39, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.955345060893099, |
| "grad_norm": 0.08011333717359025, |
| "learning_rate": 4.0844313236838236e-08, |
| "loss": 0.387, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.96617050067659, |
| "grad_norm": 0.08677763388970985, |
| "learning_rate": 1.815424193085831e-08, |
| "loss": 0.3861, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.976995940460081, |
| "grad_norm": 0.0781257700674137, |
| "learning_rate": 4.538742564345954e-09, |
| "loss": 0.3934, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.9878213802435725, |
| "grad_norm": 0.07617532903691134, |
| "learning_rate": 0.0, |
| "loss": 0.381, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.9878213802435725, |
| "step": 276, |
| "total_flos": 6.629121904995205e+18, |
| "train_loss": 0.46416003235440323, |
| "train_runtime": 27521.3535, |
| "train_samples_per_second": 5.155, |
| "train_steps_per_second": 0.01 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 276, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.629121904995205e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|