| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9976711690731253, | |
| "eval_steps": 500, | |
| "global_step": 3219, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004657661853749418, | |
| "grad_norm": 67.33102442199409, | |
| "learning_rate": 7.763975155279503e-07, | |
| "loss": 10.9243, | |
| "num_tokens": 5242880.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009315323707498836, | |
| "grad_norm": 76.939315174805, | |
| "learning_rate": 1.5527950310559006e-06, | |
| "loss": 10.6916, | |
| "num_tokens": 10485760.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013972985561248253, | |
| "grad_norm": 151.11828495401204, | |
| "learning_rate": 2.329192546583851e-06, | |
| "loss": 9.0337, | |
| "num_tokens": 15691438.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.018630647414997672, | |
| "grad_norm": 42.59146354485402, | |
| "learning_rate": 3.1055900621118013e-06, | |
| "loss": 2.9531, | |
| "num_tokens": 20915252.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02328830926874709, | |
| "grad_norm": 4.571031193380479, | |
| "learning_rate": 3.881987577639752e-06, | |
| "loss": 1.4789, | |
| "num_tokens": 26134828.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027945971122496506, | |
| "grad_norm": 1.4567711175401257, | |
| "learning_rate": 4.658385093167702e-06, | |
| "loss": 1.1124, | |
| "num_tokens": 31377708.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.032603632976245925, | |
| "grad_norm": 0.9575763876050868, | |
| "learning_rate": 5.4347826086956525e-06, | |
| "loss": 0.9113, | |
| "num_tokens": 36557688.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.037261294829995344, | |
| "grad_norm": 0.7672718225201524, | |
| "learning_rate": 6.2111801242236025e-06, | |
| "loss": 0.8087, | |
| "num_tokens": 41800322.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04191895668374476, | |
| "grad_norm": 0.4649090081953615, | |
| "learning_rate": 6.9875776397515525e-06, | |
| "loss": 0.7057, | |
| "num_tokens": 47043202.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04657661853749418, | |
| "grad_norm": 0.4761637414641936, | |
| "learning_rate": 7.763975155279503e-06, | |
| "loss": 0.6795, | |
| "num_tokens": 52286082.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05123428039124359, | |
| "grad_norm": 0.36599632405025506, | |
| "learning_rate": 8.540372670807453e-06, | |
| "loss": 0.6451, | |
| "num_tokens": 57528962.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05589194224499301, | |
| "grad_norm": 0.32929608448754455, | |
| "learning_rate": 9.316770186335403e-06, | |
| "loss": 0.6268, | |
| "num_tokens": 62771842.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06054960409874243, | |
| "grad_norm": 0.3395838468399009, | |
| "learning_rate": 1.0093167701863353e-05, | |
| "loss": 0.6077, | |
| "num_tokens": 67980354.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06520726595249185, | |
| "grad_norm": 0.2904281833306969, | |
| "learning_rate": 1.0869565217391305e-05, | |
| "loss": 0.5759, | |
| "num_tokens": 73223234.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06986492780624126, | |
| "grad_norm": 0.28386534521971535, | |
| "learning_rate": 1.1645962732919255e-05, | |
| "loss": 0.5736, | |
| "num_tokens": 78437814.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07452258965999069, | |
| "grad_norm": 0.26889441664983793, | |
| "learning_rate": 1.2422360248447205e-05, | |
| "loss": 0.5806, | |
| "num_tokens": 83680694.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0791802515137401, | |
| "grad_norm": 0.29017750849113716, | |
| "learning_rate": 1.3198757763975155e-05, | |
| "loss": 0.5498, | |
| "num_tokens": 88923574.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08383791336748952, | |
| "grad_norm": 0.2916041557822553, | |
| "learning_rate": 1.3975155279503105e-05, | |
| "loss": 0.5525, | |
| "num_tokens": 94166454.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08849557522123894, | |
| "grad_norm": 0.2535438400227719, | |
| "learning_rate": 1.4751552795031057e-05, | |
| "loss": 0.5575, | |
| "num_tokens": 99409334.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09315323707498836, | |
| "grad_norm": 0.26627176578640954, | |
| "learning_rate": 1.5527950310559007e-05, | |
| "loss": 0.5437, | |
| "num_tokens": 104652214.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09781089892873777, | |
| "grad_norm": 0.26249269198814096, | |
| "learning_rate": 1.630434782608696e-05, | |
| "loss": 0.5415, | |
| "num_tokens": 109895094.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10246856078248719, | |
| "grad_norm": 0.25829159612176866, | |
| "learning_rate": 1.7080745341614907e-05, | |
| "loss": 0.5265, | |
| "num_tokens": 115137974.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10712622263623661, | |
| "grad_norm": 0.2823723655019188, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 0.5249, | |
| "num_tokens": 120380854.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11178388448998602, | |
| "grad_norm": 0.3076324188937903, | |
| "learning_rate": 1.8633540372670807e-05, | |
| "loss": 0.5359, | |
| "num_tokens": 125623734.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11644154634373545, | |
| "grad_norm": 0.282862899697644, | |
| "learning_rate": 1.940993788819876e-05, | |
| "loss": 0.5104, | |
| "num_tokens": 130866614.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12109920819748486, | |
| "grad_norm": 0.2774431130535118, | |
| "learning_rate": 2.0186335403726707e-05, | |
| "loss": 0.5108, | |
| "num_tokens": 136068456.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1257568700512343, | |
| "grad_norm": 0.28055723436069646, | |
| "learning_rate": 2.096273291925466e-05, | |
| "loss": 0.5188, | |
| "num_tokens": 141311336.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1304145319049837, | |
| "grad_norm": 0.27423438711852416, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.5083, | |
| "num_tokens": 146554216.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1350721937587331, | |
| "grad_norm": 0.3131755299165881, | |
| "learning_rate": 2.2515527950310562e-05, | |
| "loss": 0.5193, | |
| "num_tokens": 151797096.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13972985561248252, | |
| "grad_norm": 0.32914744269501367, | |
| "learning_rate": 2.329192546583851e-05, | |
| "loss": 0.51, | |
| "num_tokens": 157039976.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14438751746623196, | |
| "grad_norm": 0.29372223832524097, | |
| "learning_rate": 2.4068322981366462e-05, | |
| "loss": 0.4905, | |
| "num_tokens": 162282856.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14904517931998137, | |
| "grad_norm": 0.37141035661401894, | |
| "learning_rate": 2.484472049689441e-05, | |
| "loss": 0.499, | |
| "num_tokens": 167466294.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1537028411737308, | |
| "grad_norm": 0.38976448402892616, | |
| "learning_rate": 2.5621118012422362e-05, | |
| "loss": 0.4959, | |
| "num_tokens": 172703130.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1583605030274802, | |
| "grad_norm": 0.39616851892995664, | |
| "learning_rate": 2.639751552795031e-05, | |
| "loss": 0.4938, | |
| "num_tokens": 177946010.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1630181648812296, | |
| "grad_norm": 0.40986023718949466, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.5058, | |
| "num_tokens": 183145930.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16767582673497905, | |
| "grad_norm": 0.3414802402931686, | |
| "learning_rate": 2.795031055900621e-05, | |
| "loss": 0.4969, | |
| "num_tokens": 188388810.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17233348858872846, | |
| "grad_norm": 0.3615570942497211, | |
| "learning_rate": 2.8726708074534165e-05, | |
| "loss": 0.4973, | |
| "num_tokens": 193628148.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17699115044247787, | |
| "grad_norm": 0.39771567973637517, | |
| "learning_rate": 2.9503105590062114e-05, | |
| "loss": 0.4883, | |
| "num_tokens": 198871028.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18164881229622729, | |
| "grad_norm": 0.3857506356368623, | |
| "learning_rate": 3.0279503105590062e-05, | |
| "loss": 0.4936, | |
| "num_tokens": 204113908.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.18630647414997673, | |
| "grad_norm": 0.4432459136095662, | |
| "learning_rate": 3.1055900621118014e-05, | |
| "loss": 0.4642, | |
| "num_tokens": 209266418.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19096413600372614, | |
| "grad_norm": 0.7772917910003737, | |
| "learning_rate": 3.183229813664597e-05, | |
| "loss": 0.4873, | |
| "num_tokens": 214355738.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.19562179785747555, | |
| "grad_norm": 0.5328482333878942, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.4746, | |
| "num_tokens": 219598618.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20027945971122496, | |
| "grad_norm": 0.45947984068235176, | |
| "learning_rate": 3.3385093167701865e-05, | |
| "loss": 0.4866, | |
| "num_tokens": 224779924.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20493712156497437, | |
| "grad_norm": 0.34742382353716433, | |
| "learning_rate": 3.4161490683229814e-05, | |
| "loss": 0.4825, | |
| "num_tokens": 230001220.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2095947834187238, | |
| "grad_norm": 0.49753988938771765, | |
| "learning_rate": 3.493788819875777e-05, | |
| "loss": 0.4825, | |
| "num_tokens": 235244100.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21425244527247322, | |
| "grad_norm": 0.3673278239204217, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.4811, | |
| "num_tokens": 240459436.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21891010712622264, | |
| "grad_norm": 0.3161790418951936, | |
| "learning_rate": 3.6490683229813665e-05, | |
| "loss": 0.4748, | |
| "num_tokens": 245702316.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22356776897997205, | |
| "grad_norm": 0.35055463031491224, | |
| "learning_rate": 3.7267080745341614e-05, | |
| "loss": 0.4739, | |
| "num_tokens": 250945196.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22822543083372146, | |
| "grad_norm": 0.2921331301697579, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.4614, | |
| "num_tokens": 256130440.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2328830926874709, | |
| "grad_norm": 0.3110695504126849, | |
| "learning_rate": 3.881987577639752e-05, | |
| "loss": 0.458, | |
| "num_tokens": 261373320.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2375407545412203, | |
| "grad_norm": 0.2967122310066169, | |
| "learning_rate": 3.9596273291925465e-05, | |
| "loss": 0.4709, | |
| "num_tokens": 266595056.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.24219841639496972, | |
| "grad_norm": 0.2982886660712102, | |
| "learning_rate": 4.0372670807453414e-05, | |
| "loss": 0.475, | |
| "num_tokens": 271837936.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24685607824871914, | |
| "grad_norm": 0.3329006183483635, | |
| "learning_rate": 4.114906832298137e-05, | |
| "loss": 0.4682, | |
| "num_tokens": 277080816.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2515137401024686, | |
| "grad_norm": 0.38970904445550836, | |
| "learning_rate": 4.192546583850932e-05, | |
| "loss": 0.4565, | |
| "num_tokens": 282323696.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25617140195621796, | |
| "grad_norm": 0.4369531707461984, | |
| "learning_rate": 4.270186335403727e-05, | |
| "loss": 0.4729, | |
| "num_tokens": 287548638.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.2608290638099674, | |
| "grad_norm": 0.42524177881581987, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.4614, | |
| "num_tokens": 292791518.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.26548672566371684, | |
| "grad_norm": 0.4751488639539711, | |
| "learning_rate": 4.425465838509317e-05, | |
| "loss": 0.4585, | |
| "num_tokens": 298034398.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2701443875174662, | |
| "grad_norm": 0.3644768672464485, | |
| "learning_rate": 4.5031055900621124e-05, | |
| "loss": 0.457, | |
| "num_tokens": 303277278.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.27480204937121566, | |
| "grad_norm": 0.3047133925659272, | |
| "learning_rate": 4.580745341614907e-05, | |
| "loss": 0.4504, | |
| "num_tokens": 308520158.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.27945971122496505, | |
| "grad_norm": 0.39263245656838175, | |
| "learning_rate": 4.658385093167702e-05, | |
| "loss": 0.4631, | |
| "num_tokens": 313763038.0, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2841173730787145, | |
| "grad_norm": 0.40096830155685276, | |
| "learning_rate": 4.736024844720497e-05, | |
| "loss": 0.4679, | |
| "num_tokens": 318941236.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2887750349324639, | |
| "grad_norm": 0.3558814367566021, | |
| "learning_rate": 4.8136645962732924e-05, | |
| "loss": 0.4634, | |
| "num_tokens": 324184116.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.2934326967862133, | |
| "grad_norm": 0.370424689118892, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.4525, | |
| "num_tokens": 329426996.0, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.29809035863996275, | |
| "grad_norm": 0.3791809460398773, | |
| "learning_rate": 4.968944099378882e-05, | |
| "loss": 0.4603, | |
| "num_tokens": 334669876.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.30274802049371213, | |
| "grad_norm": 0.38319182748199093, | |
| "learning_rate": 4.994822229892993e-05, | |
| "loss": 0.4625, | |
| "num_tokens": 339912756.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3074056823474616, | |
| "grad_norm": 0.38779313712886077, | |
| "learning_rate": 4.986192613047981e-05, | |
| "loss": 0.461, | |
| "num_tokens": 345155636.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.312063344201211, | |
| "grad_norm": 0.27638531691227697, | |
| "learning_rate": 4.977562996202969e-05, | |
| "loss": 0.4442, | |
| "num_tokens": 350398516.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3167210060549604, | |
| "grad_norm": 0.27022493306394835, | |
| "learning_rate": 4.968933379357957e-05, | |
| "loss": 0.4578, | |
| "num_tokens": 355641396.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32137866790870984, | |
| "grad_norm": 0.3347376521152644, | |
| "learning_rate": 4.9603037625129445e-05, | |
| "loss": 0.4461, | |
| "num_tokens": 360884276.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3260363297624592, | |
| "grad_norm": 0.3648933857337277, | |
| "learning_rate": 4.951674145667933e-05, | |
| "loss": 0.4484, | |
| "num_tokens": 366127156.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33069399161620866, | |
| "grad_norm": 0.42762304252098077, | |
| "learning_rate": 4.94304452882292e-05, | |
| "loss": 0.4455, | |
| "num_tokens": 371370036.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3353516534699581, | |
| "grad_norm": 0.31962919233998804, | |
| "learning_rate": 4.934414911977908e-05, | |
| "loss": 0.455, | |
| "num_tokens": 376600048.0, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3400093153237075, | |
| "grad_norm": 0.30813546351394594, | |
| "learning_rate": 4.9257852951328965e-05, | |
| "loss": 0.4525, | |
| "num_tokens": 381842928.0, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3446669771774569, | |
| "grad_norm": 0.3011372075622818, | |
| "learning_rate": 4.917155678287884e-05, | |
| "loss": 0.4662, | |
| "num_tokens": 387073976.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3493246390312063, | |
| "grad_norm": 0.33370043151261536, | |
| "learning_rate": 4.908526061442872e-05, | |
| "loss": 0.4577, | |
| "num_tokens": 392316856.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35398230088495575, | |
| "grad_norm": 0.28402406718571166, | |
| "learning_rate": 4.89989644459786e-05, | |
| "loss": 0.4447, | |
| "num_tokens": 397559736.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3586399627387052, | |
| "grad_norm": 0.3301597262092118, | |
| "learning_rate": 4.891266827752848e-05, | |
| "loss": 0.4394, | |
| "num_tokens": 402802616.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.36329762459245457, | |
| "grad_norm": 0.34903880210349214, | |
| "learning_rate": 4.882637210907836e-05, | |
| "loss": 0.4425, | |
| "num_tokens": 408014414.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.367955286446204, | |
| "grad_norm": 0.3325741730285312, | |
| "learning_rate": 4.874007594062824e-05, | |
| "loss": 0.4384, | |
| "num_tokens": 413257294.0, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.37261294829995345, | |
| "grad_norm": 0.2675018939136458, | |
| "learning_rate": 4.865377977217811e-05, | |
| "loss": 0.4368, | |
| "num_tokens": 418500174.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37727061015370283, | |
| "grad_norm": 0.314282832864609, | |
| "learning_rate": 4.8567483603728e-05, | |
| "loss": 0.4375, | |
| "num_tokens": 423692782.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3819282720074523, | |
| "grad_norm": 0.27207577009260525, | |
| "learning_rate": 4.8481187435277875e-05, | |
| "loss": 0.4439, | |
| "num_tokens": 428845062.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38658593386120166, | |
| "grad_norm": 0.3341128489295161, | |
| "learning_rate": 4.839489126682776e-05, | |
| "loss": 0.443, | |
| "num_tokens": 434087942.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3912435957149511, | |
| "grad_norm": 0.35282928264777474, | |
| "learning_rate": 4.830859509837763e-05, | |
| "loss": 0.4339, | |
| "num_tokens": 439278078.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.39590125756870054, | |
| "grad_norm": 0.26420407141485547, | |
| "learning_rate": 4.822229892992751e-05, | |
| "loss": 0.4505, | |
| "num_tokens": 444520958.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4005589194224499, | |
| "grad_norm": 0.33964407278246755, | |
| "learning_rate": 4.8136002761477395e-05, | |
| "loss": 0.4429, | |
| "num_tokens": 449763838.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40521658127619936, | |
| "grad_norm": 0.3291386933311448, | |
| "learning_rate": 4.804970659302727e-05, | |
| "loss": 0.4468, | |
| "num_tokens": 454963270.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.40987424312994875, | |
| "grad_norm": 0.33397547527041516, | |
| "learning_rate": 4.796341042457715e-05, | |
| "loss": 0.4559, | |
| "num_tokens": 460206150.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4145319049836982, | |
| "grad_norm": 0.31893402500871526, | |
| "learning_rate": 4.787711425612703e-05, | |
| "loss": 0.4315, | |
| "num_tokens": 465378662.0, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4191895668374476, | |
| "grad_norm": 0.3080841317238311, | |
| "learning_rate": 4.779081808767691e-05, | |
| "loss": 0.4347, | |
| "num_tokens": 470602566.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.423847228691197, | |
| "grad_norm": 0.2640466141961378, | |
| "learning_rate": 4.770452191922679e-05, | |
| "loss": 0.4566, | |
| "num_tokens": 475800020.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.42850489054494645, | |
| "grad_norm": 0.2736661595754163, | |
| "learning_rate": 4.761822575077667e-05, | |
| "loss": 0.435, | |
| "num_tokens": 481042900.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43316255239869583, | |
| "grad_norm": 0.3004268561099602, | |
| "learning_rate": 4.753192958232654e-05, | |
| "loss": 0.4449, | |
| "num_tokens": 486220500.0, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.43782021425244527, | |
| "grad_norm": 0.2909191347281704, | |
| "learning_rate": 4.744563341387643e-05, | |
| "loss": 0.4363, | |
| "num_tokens": 491463380.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4424778761061947, | |
| "grad_norm": 0.26997217546091656, | |
| "learning_rate": 4.7359337245426306e-05, | |
| "loss": 0.4453, | |
| "num_tokens": 496706260.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4471355379599441, | |
| "grad_norm": 0.2315623487760298, | |
| "learning_rate": 4.7273041076976184e-05, | |
| "loss": 0.4294, | |
| "num_tokens": 501949140.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45179319981369354, | |
| "grad_norm": 0.2811561762636778, | |
| "learning_rate": 4.718674490852606e-05, | |
| "loss": 0.4328, | |
| "num_tokens": 507192020.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.4564508616674429, | |
| "grad_norm": 0.2644242230899603, | |
| "learning_rate": 4.710044874007594e-05, | |
| "loss": 0.4368, | |
| "num_tokens": 512433054.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.46110852352119236, | |
| "grad_norm": 0.26028285207985, | |
| "learning_rate": 4.7014152571625826e-05, | |
| "loss": 0.4276, | |
| "num_tokens": 517658160.0, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4657661853749418, | |
| "grad_norm": 0.25196174510797353, | |
| "learning_rate": 4.6927856403175704e-05, | |
| "loss": 0.4357, | |
| "num_tokens": 522901040.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4704238472286912, | |
| "grad_norm": 0.2316136361924091, | |
| "learning_rate": 4.684156023472558e-05, | |
| "loss": 0.4292, | |
| "num_tokens": 528127586.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.4750815090824406, | |
| "grad_norm": 0.30574537535316576, | |
| "learning_rate": 4.675526406627546e-05, | |
| "loss": 0.4253, | |
| "num_tokens": 533355284.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.47973917093619, | |
| "grad_norm": 0.32364388470619504, | |
| "learning_rate": 4.666896789782534e-05, | |
| "loss": 0.4381, | |
| "num_tokens": 538598164.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.48439683278993945, | |
| "grad_norm": 0.2875978480356378, | |
| "learning_rate": 4.658267172937522e-05, | |
| "loss": 0.4344, | |
| "num_tokens": 543841044.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.4890544946436889, | |
| "grad_norm": 0.28884356974319597, | |
| "learning_rate": 4.64963755609251e-05, | |
| "loss": 0.4419, | |
| "num_tokens": 549083924.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.49371215649743827, | |
| "grad_norm": 0.31258251554949046, | |
| "learning_rate": 4.641007939247497e-05, | |
| "loss": 0.4278, | |
| "num_tokens": 554323932.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.4983698183511877, | |
| "grad_norm": 0.44363435118041294, | |
| "learning_rate": 4.632378322402486e-05, | |
| "loss": 0.4366, | |
| "num_tokens": 559555680.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5030274802049371, | |
| "grad_norm": 0.2662509559017355, | |
| "learning_rate": 4.6237487055574736e-05, | |
| "loss": 0.4394, | |
| "num_tokens": 564798560.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5076851420586865, | |
| "grad_norm": 0.3323830979152279, | |
| "learning_rate": 4.6151190887124615e-05, | |
| "loss": 0.4277, | |
| "num_tokens": 569918486.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5123428039124359, | |
| "grad_norm": 0.34771077887506324, | |
| "learning_rate": 4.606489471867449e-05, | |
| "loss": 0.4428, | |
| "num_tokens": 575160720.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5170004657661854, | |
| "grad_norm": 0.2915768343471689, | |
| "learning_rate": 4.597859855022437e-05, | |
| "loss": 0.4287, | |
| "num_tokens": 580403600.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5216581276199348, | |
| "grad_norm": 0.25977978611681063, | |
| "learning_rate": 4.589230238177425e-05, | |
| "loss": 0.4336, | |
| "num_tokens": 585635440.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.25877369496780933, | |
| "learning_rate": 4.5806006213324134e-05, | |
| "loss": 0.4314, | |
| "num_tokens": 590878320.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5309734513274337, | |
| "grad_norm": 0.26779788656924575, | |
| "learning_rate": 4.5719710044874006e-05, | |
| "loss": 0.4308, | |
| "num_tokens": 596063606.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5356311131811831, | |
| "grad_norm": 0.27973998837918573, | |
| "learning_rate": 4.563341387642389e-05, | |
| "loss": 0.4294, | |
| "num_tokens": 601306486.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5402887750349324, | |
| "grad_norm": 0.26143548585622206, | |
| "learning_rate": 4.554711770797377e-05, | |
| "loss": 0.4326, | |
| "num_tokens": 606451006.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5449464368886818, | |
| "grad_norm": 0.24563133850949329, | |
| "learning_rate": 4.546082153952365e-05, | |
| "loss": 0.4257, | |
| "num_tokens": 611693886.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5496040987424313, | |
| "grad_norm": 0.27445647969317083, | |
| "learning_rate": 4.5374525371073526e-05, | |
| "loss": 0.4418, | |
| "num_tokens": 616936766.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5542617605961807, | |
| "grad_norm": 0.2945035451085223, | |
| "learning_rate": 4.5288229202623404e-05, | |
| "loss": 0.4327, | |
| "num_tokens": 622179646.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5589194224499301, | |
| "grad_norm": 0.2700962621973337, | |
| "learning_rate": 4.520193303417328e-05, | |
| "loss": 0.4364, | |
| "num_tokens": 627422526.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5635770843036796, | |
| "grad_norm": 0.2811186268833258, | |
| "learning_rate": 4.511563686572317e-05, | |
| "loss": 0.4118, | |
| "num_tokens": 632619928.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.568234746157429, | |
| "grad_norm": 0.29041103101636173, | |
| "learning_rate": 4.5029340697273045e-05, | |
| "loss": 0.4303, | |
| "num_tokens": 637862808.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5728924080111784, | |
| "grad_norm": 0.245797994190876, | |
| "learning_rate": 4.4943044528822923e-05, | |
| "loss": 0.4229, | |
| "num_tokens": 643105688.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5775500698649279, | |
| "grad_norm": 0.2846510410757793, | |
| "learning_rate": 4.48567483603728e-05, | |
| "loss": 0.427, | |
| "num_tokens": 648348568.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5822077317186772, | |
| "grad_norm": 0.31326110744818847, | |
| "learning_rate": 4.477045219192268e-05, | |
| "loss": 0.4367, | |
| "num_tokens": 653591448.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5868653935724266, | |
| "grad_norm": 0.2719597858973542, | |
| "learning_rate": 4.4684156023472565e-05, | |
| "loss": 0.4224, | |
| "num_tokens": 658834328.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5915230554261761, | |
| "grad_norm": 0.2231384344598576, | |
| "learning_rate": 4.4597859855022436e-05, | |
| "loss": 0.4221, | |
| "num_tokens": 664028780.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5961807172799255, | |
| "grad_norm": 0.2478478343306457, | |
| "learning_rate": 4.4511563686572315e-05, | |
| "loss": 0.4284, | |
| "num_tokens": 669256250.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6008383791336749, | |
| "grad_norm": 0.28776659196487936, | |
| "learning_rate": 4.44252675181222e-05, | |
| "loss": 0.4264, | |
| "num_tokens": 674460550.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6054960409874243, | |
| "grad_norm": 0.2956533073670127, | |
| "learning_rate": 4.433897134967208e-05, | |
| "loss": 0.424, | |
| "num_tokens": 679701528.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6101537028411738, | |
| "grad_norm": 0.2502991680548602, | |
| "learning_rate": 4.4252675181221956e-05, | |
| "loss": 0.4314, | |
| "num_tokens": 684944408.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6148113646949231, | |
| "grad_norm": 0.2913354439152498, | |
| "learning_rate": 4.4166379012771834e-05, | |
| "loss": 0.4327, | |
| "num_tokens": 690187288.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6194690265486725, | |
| "grad_norm": 0.233248050974019, | |
| "learning_rate": 4.408008284432171e-05, | |
| "loss": 0.4259, | |
| "num_tokens": 695372640.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.624126688402422, | |
| "grad_norm": 0.23364040668855193, | |
| "learning_rate": 4.39937866758716e-05, | |
| "loss": 0.4287, | |
| "num_tokens": 700615520.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6287843502561714, | |
| "grad_norm": 0.29652596000743614, | |
| "learning_rate": 4.3907490507421476e-05, | |
| "loss": 0.4217, | |
| "num_tokens": 705831004.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6334420121099208, | |
| "grad_norm": 0.2683127265039611, | |
| "learning_rate": 4.382119433897135e-05, | |
| "loss": 0.4251, | |
| "num_tokens": 711015208.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6380996739636703, | |
| "grad_norm": 0.3256519885723937, | |
| "learning_rate": 4.373489817052123e-05, | |
| "loss": 0.4354, | |
| "num_tokens": 716205240.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6427573358174197, | |
| "grad_norm": 0.30935819364802847, | |
| "learning_rate": 4.364860200207111e-05, | |
| "loss": 0.4248, | |
| "num_tokens": 721448120.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6474149976711691, | |
| "grad_norm": 0.28845601036265905, | |
| "learning_rate": 4.356230583362099e-05, | |
| "loss": 0.4246, | |
| "num_tokens": 726691000.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6520726595249184, | |
| "grad_norm": 0.35095951511202333, | |
| "learning_rate": 4.347600966517087e-05, | |
| "loss": 0.4212, | |
| "num_tokens": 731930908.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6567303213786679, | |
| "grad_norm": 0.2477651003613894, | |
| "learning_rate": 4.3389713496720745e-05, | |
| "loss": 0.4184, | |
| "num_tokens": 737133156.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6613879832324173, | |
| "grad_norm": 0.24393660787559812, | |
| "learning_rate": 4.330341732827063e-05, | |
| "loss": 0.432, | |
| "num_tokens": 742336330.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6660456450861667, | |
| "grad_norm": 0.2723342955491686, | |
| "learning_rate": 4.321712115982051e-05, | |
| "loss": 0.4247, | |
| "num_tokens": 747579210.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6707033069399162, | |
| "grad_norm": 0.28012328049219287, | |
| "learning_rate": 4.3130824991370387e-05, | |
| "loss": 0.4162, | |
| "num_tokens": 752752000.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6753609687936656, | |
| "grad_norm": 0.2417243115660318, | |
| "learning_rate": 4.3044528822920265e-05, | |
| "loss": 0.42, | |
| "num_tokens": 757994880.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.680018630647415, | |
| "grad_norm": 0.258058497553529, | |
| "learning_rate": 4.295823265447014e-05, | |
| "loss": 0.4283, | |
| "num_tokens": 763237760.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6846762925011645, | |
| "grad_norm": 0.2624503165351534, | |
| "learning_rate": 4.287193648602002e-05, | |
| "loss": 0.4156, | |
| "num_tokens": 768480640.0, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6893339543549138, | |
| "grad_norm": 0.21895429003504352, | |
| "learning_rate": 4.27856403175699e-05, | |
| "loss": 0.4199, | |
| "num_tokens": 773723520.0, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6939916162086632, | |
| "grad_norm": 0.2945442268650668, | |
| "learning_rate": 4.269934414911978e-05, | |
| "loss": 0.4181, | |
| "num_tokens": 778966400.0, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6986492780624126, | |
| "grad_norm": 0.26637672444046956, | |
| "learning_rate": 4.261304798066966e-05, | |
| "loss": 0.4257, | |
| "num_tokens": 784209280.0, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7033069399161621, | |
| "grad_norm": 0.28800148253809854, | |
| "learning_rate": 4.252675181221954e-05, | |
| "loss": 0.4138, | |
| "num_tokens": 789452160.0, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7079646017699115, | |
| "grad_norm": 0.2421160589881002, | |
| "learning_rate": 4.244045564376942e-05, | |
| "loss": 0.4186, | |
| "num_tokens": 794695040.0, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7126222636236609, | |
| "grad_norm": 0.24526775919740107, | |
| "learning_rate": 4.23541594753193e-05, | |
| "loss": 0.4255, | |
| "num_tokens": 799937676.0, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7172799254774104, | |
| "grad_norm": 0.26058594933314355, | |
| "learning_rate": 4.2267863306869176e-05, | |
| "loss": 0.4251, | |
| "num_tokens": 805180556.0, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7219375873311598, | |
| "grad_norm": 0.336972108348959, | |
| "learning_rate": 4.2181567138419054e-05, | |
| "loss": 0.4133, | |
| "num_tokens": 810397834.0, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7265952491849091, | |
| "grad_norm": 0.2698826306891896, | |
| "learning_rate": 4.209527096996894e-05, | |
| "loss": 0.4302, | |
| "num_tokens": 815565546.0, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7312529110386586, | |
| "grad_norm": 0.2978506798603573, | |
| "learning_rate": 4.200897480151881e-05, | |
| "loss": 0.4227, | |
| "num_tokens": 820802646.0, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.735910572892408, | |
| "grad_norm": 0.22370068906989737, | |
| "learning_rate": 4.1922678633068695e-05, | |
| "loss": 0.414, | |
| "num_tokens": 826045526.0, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7405682347461574, | |
| "grad_norm": 0.22568875909202213, | |
| "learning_rate": 4.1836382464618573e-05, | |
| "loss": 0.4241, | |
| "num_tokens": 831245076.0, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7452258965999069, | |
| "grad_norm": 0.22113115108696535, | |
| "learning_rate": 4.175008629616845e-05, | |
| "loss": 0.4171, | |
| "num_tokens": 836487956.0, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7498835584536563, | |
| "grad_norm": 0.21379162612177327, | |
| "learning_rate": 4.166379012771833e-05, | |
| "loss": 0.4343, | |
| "num_tokens": 841670226.0, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7545412203074057, | |
| "grad_norm": 0.25709512413105684, | |
| "learning_rate": 4.157749395926821e-05, | |
| "loss": 0.4185, | |
| "num_tokens": 846891280.0, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.759198882161155, | |
| "grad_norm": 0.2473626840874685, | |
| "learning_rate": 4.1491197790818086e-05, | |
| "loss": 0.4209, | |
| "num_tokens": 852076096.0, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7638565440149045, | |
| "grad_norm": 0.23848950456080154, | |
| "learning_rate": 4.140490162236797e-05, | |
| "loss": 0.42, | |
| "num_tokens": 857318976.0, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7685142058686539, | |
| "grad_norm": 0.28531631522495154, | |
| "learning_rate": 4.131860545391785e-05, | |
| "loss": 0.4094, | |
| "num_tokens": 862561856.0, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7731718677224033, | |
| "grad_norm": 0.24000048136153668, | |
| "learning_rate": 4.123230928546773e-05, | |
| "loss": 0.4186, | |
| "num_tokens": 867804736.0, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7778295295761528, | |
| "grad_norm": 0.23468116378771062, | |
| "learning_rate": 4.1146013117017606e-05, | |
| "loss": 0.4242, | |
| "num_tokens": 873045328.0, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.7824871914299022, | |
| "grad_norm": 0.2474430391244038, | |
| "learning_rate": 4.1059716948567484e-05, | |
| "loss": 0.4092, | |
| "num_tokens": 878249566.0, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7871448532836516, | |
| "grad_norm": 0.22365408143865523, | |
| "learning_rate": 4.097342078011737e-05, | |
| "loss": 0.4168, | |
| "num_tokens": 883492446.0, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7918025151374011, | |
| "grad_norm": 0.2284100469501981, | |
| "learning_rate": 4.088712461166724e-05, | |
| "loss": 0.417, | |
| "num_tokens": 888735326.0, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7964601769911505, | |
| "grad_norm": 0.2543274022593722, | |
| "learning_rate": 4.080082844321712e-05, | |
| "loss": 0.409, | |
| "num_tokens": 893978206.0, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8011178388448998, | |
| "grad_norm": 0.2366868461626474, | |
| "learning_rate": 4.0714532274767004e-05, | |
| "loss": 0.4114, | |
| "num_tokens": 899180370.0, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8057755006986492, | |
| "grad_norm": 0.26104381359599865, | |
| "learning_rate": 4.062823610631688e-05, | |
| "loss": 0.4152, | |
| "num_tokens": 904405480.0, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8104331625523987, | |
| "grad_norm": 0.22347183308122576, | |
| "learning_rate": 4.054193993786676e-05, | |
| "loss": 0.4084, | |
| "num_tokens": 909648360.0, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8150908244061481, | |
| "grad_norm": 0.23790061185920547, | |
| "learning_rate": 4.045564376941664e-05, | |
| "loss": 0.4187, | |
| "num_tokens": 914891240.0, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8197484862598975, | |
| "grad_norm": 0.23147353386983963, | |
| "learning_rate": 4.036934760096652e-05, | |
| "loss": 0.403, | |
| "num_tokens": 920134120.0, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.824406148113647, | |
| "grad_norm": 0.226927817097489, | |
| "learning_rate": 4.02830514325164e-05, | |
| "loss": 0.419, | |
| "num_tokens": 925348344.0, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8290638099673964, | |
| "grad_norm": 0.21317663633293657, | |
| "learning_rate": 4.019675526406628e-05, | |
| "loss": 0.4179, | |
| "num_tokens": 930591224.0, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8337214718211458, | |
| "grad_norm": 0.22884972751523144, | |
| "learning_rate": 4.011045909561615e-05, | |
| "loss": 0.4165, | |
| "num_tokens": 935834104.0, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8383791336748952, | |
| "grad_norm": 0.22549685802546812, | |
| "learning_rate": 4.0024162927166037e-05, | |
| "loss": 0.4122, | |
| "num_tokens": 941076984.0, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8430367955286446, | |
| "grad_norm": 0.23775471062904485, | |
| "learning_rate": 3.9937866758715915e-05, | |
| "loss": 0.4163, | |
| "num_tokens": 946319864.0, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.847694457382394, | |
| "grad_norm": 0.23841314630097593, | |
| "learning_rate": 3.98515705902658e-05, | |
| "loss": 0.4222, | |
| "num_tokens": 951515422.0, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8523521192361434, | |
| "grad_norm": 0.22312343382900762, | |
| "learning_rate": 3.976527442181567e-05, | |
| "loss": 0.4133, | |
| "num_tokens": 956704248.0, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8570097810898929, | |
| "grad_norm": 0.25890525425501226, | |
| "learning_rate": 3.967897825336555e-05, | |
| "loss": 0.4206, | |
| "num_tokens": 961947128.0, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8616674429436423, | |
| "grad_norm": 0.27422447905775904, | |
| "learning_rate": 3.9592682084915434e-05, | |
| "loss": 0.4147, | |
| "num_tokens": 967190008.0, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8663251047973917, | |
| "grad_norm": 0.25907315385547003, | |
| "learning_rate": 3.950638591646531e-05, | |
| "loss": 0.4174, | |
| "num_tokens": 972432888.0, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8709827666511412, | |
| "grad_norm": 0.24801783004374894, | |
| "learning_rate": 3.942008974801519e-05, | |
| "loss": 0.404, | |
| "num_tokens": 977675768.0, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8756404285048905, | |
| "grad_norm": 0.25870664015319944, | |
| "learning_rate": 3.933379357956507e-05, | |
| "loss": 0.4057, | |
| "num_tokens": 982908826.0, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8802980903586399, | |
| "grad_norm": 0.22871408465369003, | |
| "learning_rate": 3.924749741111495e-05, | |
| "loss": 0.4081, | |
| "num_tokens": 988129568.0, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8849557522123894, | |
| "grad_norm": 0.24811397008583894, | |
| "learning_rate": 3.916120124266483e-05, | |
| "loss": 0.398, | |
| "num_tokens": 993372448.0, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8896134140661388, | |
| "grad_norm": 0.22202538727671148, | |
| "learning_rate": 3.9074905074214704e-05, | |
| "loss": 0.4123, | |
| "num_tokens": 998615328.0, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8942710759198882, | |
| "grad_norm": 0.9525378143124741, | |
| "learning_rate": 3.898860890576458e-05, | |
| "loss": 0.4175, | |
| "num_tokens": 1003840426.0, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8989287377736377, | |
| "grad_norm": 0.26485813953212173, | |
| "learning_rate": 3.890231273731447e-05, | |
| "loss": 0.4125, | |
| "num_tokens": 1009083306.0, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9035863996273871, | |
| "grad_norm": 0.2365029447922154, | |
| "learning_rate": 3.8816016568864345e-05, | |
| "loss": 0.4153, | |
| "num_tokens": 1014326186.0, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9082440614811365, | |
| "grad_norm": 0.23975678130834552, | |
| "learning_rate": 3.8729720400414224e-05, | |
| "loss": 0.4102, | |
| "num_tokens": 1019569066.0, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.9129017233348858, | |
| "grad_norm": 0.23663689458237938, | |
| "learning_rate": 3.86434242319641e-05, | |
| "loss": 0.4131, | |
| "num_tokens": 1024811946.0, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9175593851886353, | |
| "grad_norm": 0.24706898178116934, | |
| "learning_rate": 3.855712806351398e-05, | |
| "loss": 0.4175, | |
| "num_tokens": 1030054826.0, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9222170470423847, | |
| "grad_norm": 0.24223978043509703, | |
| "learning_rate": 3.8470831895063865e-05, | |
| "loss": 0.3901, | |
| "num_tokens": 1035279144.0, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9268747088961341, | |
| "grad_norm": 0.25002125401458763, | |
| "learning_rate": 3.838453572661374e-05, | |
| "loss": 0.4064, | |
| "num_tokens": 1040522024.0, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9315323707498836, | |
| "grad_norm": 0.24388480441326726, | |
| "learning_rate": 3.8298239558163615e-05, | |
| "loss": 0.4084, | |
| "num_tokens": 1045764904.0, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.936190032603633, | |
| "grad_norm": 0.253011557441483, | |
| "learning_rate": 3.82119433897135e-05, | |
| "loss": 0.4177, | |
| "num_tokens": 1051007784.0, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9408476944573824, | |
| "grad_norm": 0.23214279433661952, | |
| "learning_rate": 3.812564722126338e-05, | |
| "loss": 0.4075, | |
| "num_tokens": 1056250664.0, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9455053563111319, | |
| "grad_norm": 0.215120191665915, | |
| "learning_rate": 3.8039351052813256e-05, | |
| "loss": 0.4094, | |
| "num_tokens": 1061493544.0, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9501630181648812, | |
| "grad_norm": 0.22917969065957147, | |
| "learning_rate": 3.7953054884363134e-05, | |
| "loss": 0.4178, | |
| "num_tokens": 1066736424.0, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9548206800186306, | |
| "grad_norm": 0.2277099086565493, | |
| "learning_rate": 3.786675871591301e-05, | |
| "loss": 0.4102, | |
| "num_tokens": 1071979304.0, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.95947834187238, | |
| "grad_norm": 0.22949277379505856, | |
| "learning_rate": 3.77804625474629e-05, | |
| "loss": 0.4022, | |
| "num_tokens": 1077222184.0, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9641360037261295, | |
| "grad_norm": 0.23394686604005885, | |
| "learning_rate": 3.7694166379012776e-05, | |
| "loss": 0.4054, | |
| "num_tokens": 1082465064.0, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9687936655798789, | |
| "grad_norm": 0.21669708811502705, | |
| "learning_rate": 3.7607870210562654e-05, | |
| "loss": 0.4043, | |
| "num_tokens": 1087707944.0, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9734513274336283, | |
| "grad_norm": 0.23035723781561768, | |
| "learning_rate": 3.752157404211253e-05, | |
| "loss": 0.4052, | |
| "num_tokens": 1092923046.0, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.9781089892873778, | |
| "grad_norm": 0.25030172606804196, | |
| "learning_rate": 3.743527787366241e-05, | |
| "loss": 0.4056, | |
| "num_tokens": 1098165926.0, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9827666511411272, | |
| "grad_norm": 0.24316882158161224, | |
| "learning_rate": 3.734898170521229e-05, | |
| "loss": 0.4095, | |
| "num_tokens": 1103324696.0, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9874243129948765, | |
| "grad_norm": 0.23356019872200276, | |
| "learning_rate": 3.7262685536762174e-05, | |
| "loss": 0.4087, | |
| "num_tokens": 1108567576.0, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.992081974848626, | |
| "grad_norm": 0.23003894581212256, | |
| "learning_rate": 3.7176389368312045e-05, | |
| "loss": 0.4075, | |
| "num_tokens": 1113810456.0, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9967396367023754, | |
| "grad_norm": 0.24677191490487543, | |
| "learning_rate": 3.709009319986193e-05, | |
| "loss": 0.3984, | |
| "num_tokens": 1119053336.0, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.00093153237075, | |
| "grad_norm": 0.35346282231833437, | |
| "learning_rate": 3.700379703141181e-05, | |
| "loss": 0.4101, | |
| "num_tokens": 1123444248.0, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.0055891942244992, | |
| "grad_norm": 0.29051830503728265, | |
| "learning_rate": 3.6917500862961687e-05, | |
| "loss": 0.3564, | |
| "num_tokens": 1128625632.0, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.0102468560782487, | |
| "grad_norm": 0.22412946028005143, | |
| "learning_rate": 3.6831204694511565e-05, | |
| "loss": 0.3425, | |
| "num_tokens": 1133868512.0, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.0149045179319982, | |
| "grad_norm": 0.23098063417394873, | |
| "learning_rate": 3.674490852606144e-05, | |
| "loss": 0.3556, | |
| "num_tokens": 1139111392.0, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.0195621797857475, | |
| "grad_norm": 0.24874761426335876, | |
| "learning_rate": 3.665861235761132e-05, | |
| "loss": 0.3512, | |
| "num_tokens": 1144313368.0, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.024219841639497, | |
| "grad_norm": 0.2500066695441668, | |
| "learning_rate": 3.6572316189161206e-05, | |
| "loss": 0.3552, | |
| "num_tokens": 1149498184.0, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.0288775034932465, | |
| "grad_norm": 0.2267873489991183, | |
| "learning_rate": 3.6486020020711085e-05, | |
| "loss": 0.3541, | |
| "num_tokens": 1154741064.0, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.0335351653469957, | |
| "grad_norm": 0.20573124460682662, | |
| "learning_rate": 3.639972385226096e-05, | |
| "loss": 0.3543, | |
| "num_tokens": 1159983944.0, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.0381928272007452, | |
| "grad_norm": 0.22900996460882025, | |
| "learning_rate": 3.631342768381084e-05, | |
| "loss": 0.3459, | |
| "num_tokens": 1165183376.0, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.0428504890544947, | |
| "grad_norm": 0.2367962553195853, | |
| "learning_rate": 3.622713151536072e-05, | |
| "loss": 0.3527, | |
| "num_tokens": 1170426256.0, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.047508150908244, | |
| "grad_norm": 0.22182529597694525, | |
| "learning_rate": 3.6140835346910604e-05, | |
| "loss": 0.357, | |
| "num_tokens": 1175669136.0, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.0521658127619935, | |
| "grad_norm": 0.23825198016517224, | |
| "learning_rate": 3.6054539178460476e-05, | |
| "loss": 0.3454, | |
| "num_tokens": 1180830872.0, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.056823474615743, | |
| "grad_norm": 0.22953576181084298, | |
| "learning_rate": 3.5968243010010354e-05, | |
| "loss": 0.3532, | |
| "num_tokens": 1186073752.0, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.0614811364694923, | |
| "grad_norm": 0.23033830576834902, | |
| "learning_rate": 3.588194684156024e-05, | |
| "loss": 0.3536, | |
| "num_tokens": 1191316632.0, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.0661387983232418, | |
| "grad_norm": 0.23573345416407196, | |
| "learning_rate": 3.579565067311012e-05, | |
| "loss": 0.3525, | |
| "num_tokens": 1196496612.0, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.0707964601769913, | |
| "grad_norm": 0.20521570602212028, | |
| "learning_rate": 3.5709354504659995e-05, | |
| "loss": 0.3497, | |
| "num_tokens": 1201739492.0, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.0754541220307405, | |
| "grad_norm": 0.25778357113766126, | |
| "learning_rate": 3.5623058336209874e-05, | |
| "loss": 0.3531, | |
| "num_tokens": 1206939480.0, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.08011178388449, | |
| "grad_norm": 0.232455524635055, | |
| "learning_rate": 3.553676216775975e-05, | |
| "loss": 0.3613, | |
| "num_tokens": 1212064592.0, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.0847694457382393, | |
| "grad_norm": 0.21965684086039253, | |
| "learning_rate": 3.545046599930964e-05, | |
| "loss": 0.3585, | |
| "num_tokens": 1217281960.0, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.0894271075919888, | |
| "grad_norm": 0.20895707760559631, | |
| "learning_rate": 3.536416983085951e-05, | |
| "loss": 0.3562, | |
| "num_tokens": 1222524840.0, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.0940847694457383, | |
| "grad_norm": 0.22384203139796538, | |
| "learning_rate": 3.5277873662409386e-05, | |
| "loss": 0.3561, | |
| "num_tokens": 1227767720.0, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.0987424312994876, | |
| "grad_norm": 0.24149404715801545, | |
| "learning_rate": 3.519157749395927e-05, | |
| "loss": 0.3487, | |
| "num_tokens": 1232972020.0, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.103400093153237, | |
| "grad_norm": 0.22786611157600012, | |
| "learning_rate": 3.510528132550915e-05, | |
| "loss": 0.3499, | |
| "num_tokens": 1238214900.0, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.1080577550069866, | |
| "grad_norm": 0.2264342222623477, | |
| "learning_rate": 3.501898515705903e-05, | |
| "loss": 0.332, | |
| "num_tokens": 1243457780.0, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.1127154168607358, | |
| "grad_norm": 0.24306452699893255, | |
| "learning_rate": 3.4932688988608906e-05, | |
| "loss": 0.3425, | |
| "num_tokens": 1248618688.0, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.1173730787144853, | |
| "grad_norm": 0.21841658951947124, | |
| "learning_rate": 3.4846392820158784e-05, | |
| "loss": 0.3504, | |
| "num_tokens": 1253838334.0, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.1220307405682348, | |
| "grad_norm": 0.2276119099647855, | |
| "learning_rate": 3.476009665170867e-05, | |
| "loss": 0.3597, | |
| "num_tokens": 1259062652.0, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.126688402421984, | |
| "grad_norm": 0.2580980253788764, | |
| "learning_rate": 3.467380048325855e-05, | |
| "loss": 0.3535, | |
| "num_tokens": 1264270772.0, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.1313460642757336, | |
| "grad_norm": 0.21960897285302391, | |
| "learning_rate": 3.458750431480842e-05, | |
| "loss": 0.3481, | |
| "num_tokens": 1269495882.0, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.136003726129483, | |
| "grad_norm": 0.2316950316967988, | |
| "learning_rate": 3.4501208146358304e-05, | |
| "loss": 0.3592, | |
| "num_tokens": 1274688984.0, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.1406613879832324, | |
| "grad_norm": 0.24634350140770184, | |
| "learning_rate": 3.441491197790818e-05, | |
| "loss": 0.3479, | |
| "num_tokens": 1279931864.0, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.1453190498369819, | |
| "grad_norm": 0.23829808907517083, | |
| "learning_rate": 3.432861580945806e-05, | |
| "loss": 0.3517, | |
| "num_tokens": 1285174744.0, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.1499767116907313, | |
| "grad_norm": 0.23752769986557593, | |
| "learning_rate": 3.424231964100794e-05, | |
| "loss": 0.351, | |
| "num_tokens": 1290357536.0, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.1546343735444806, | |
| "grad_norm": 0.21547144493639267, | |
| "learning_rate": 3.415602347255782e-05, | |
| "loss": 0.3556, | |
| "num_tokens": 1295587548.0, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.1592920353982301, | |
| "grad_norm": 0.2000934232902581, | |
| "learning_rate": 3.40697273041077e-05, | |
| "loss": 0.3584, | |
| "num_tokens": 1300830428.0, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.1639496972519794, | |
| "grad_norm": 0.2210789397431835, | |
| "learning_rate": 3.398343113565758e-05, | |
| "loss": 0.36, | |
| "num_tokens": 1306073308.0, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1686073591057289, | |
| "grad_norm": 0.21558999456730066, | |
| "learning_rate": 3.389713496720746e-05, | |
| "loss": 0.3502, | |
| "num_tokens": 1311249846.0, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.1732650209594784, | |
| "grad_norm": 0.21690954473595722, | |
| "learning_rate": 3.381083879875734e-05, | |
| "loss": 0.3596, | |
| "num_tokens": 1316492726.0, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.1779226828132279, | |
| "grad_norm": 0.21692163966918396, | |
| "learning_rate": 3.3724542630307215e-05, | |
| "loss": 0.3471, | |
| "num_tokens": 1321729528.0, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.1825803446669771, | |
| "grad_norm": 0.2269743906263902, | |
| "learning_rate": 3.363824646185709e-05, | |
| "loss": 0.3414, | |
| "num_tokens": 1326922446.0, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.1872380065207266, | |
| "grad_norm": 0.22249317255409107, | |
| "learning_rate": 3.355195029340698e-05, | |
| "loss": 0.3521, | |
| "num_tokens": 1332165326.0, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.191895668374476, | |
| "grad_norm": 0.21849270645891788, | |
| "learning_rate": 3.346565412495685e-05, | |
| "loss": 0.3551, | |
| "num_tokens": 1337408206.0, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.1965533302282254, | |
| "grad_norm": 0.22515863764156271, | |
| "learning_rate": 3.3379357956506735e-05, | |
| "loss": 0.3596, | |
| "num_tokens": 1342651086.0, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.201210992081975, | |
| "grad_norm": 0.2319062412397265, | |
| "learning_rate": 3.329306178805661e-05, | |
| "loss": 0.3487, | |
| "num_tokens": 1347876028.0, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.2058686539357242, | |
| "grad_norm": 0.20349874075689556, | |
| "learning_rate": 3.320676561960649e-05, | |
| "loss": 0.3526, | |
| "num_tokens": 1353118908.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.2105263157894737, | |
| "grad_norm": 0.19197298827729622, | |
| "learning_rate": 3.312046945115637e-05, | |
| "loss": 0.3491, | |
| "num_tokens": 1358361788.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.2151839776432232, | |
| "grad_norm": 0.2414011354406214, | |
| "learning_rate": 3.303417328270625e-05, | |
| "loss": 0.359, | |
| "num_tokens": 1363556240.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.2198416394969724, | |
| "grad_norm": 0.24172074049578962, | |
| "learning_rate": 3.2947877114256126e-05, | |
| "loss": 0.3613, | |
| "num_tokens": 1368799120.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.224499301350722, | |
| "grad_norm": 0.22785019870552942, | |
| "learning_rate": 3.286158094580601e-05, | |
| "loss": 0.351, | |
| "num_tokens": 1374042000.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.2291569632044714, | |
| "grad_norm": 0.25052556395528863, | |
| "learning_rate": 3.277528477735589e-05, | |
| "loss": 0.3612, | |
| "num_tokens": 1379284880.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.2338146250582207, | |
| "grad_norm": 0.20368125241205054, | |
| "learning_rate": 3.268898860890577e-05, | |
| "loss": 0.3519, | |
| "num_tokens": 1384484350.0, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.2384722869119702, | |
| "grad_norm": 0.2356051247971543, | |
| "learning_rate": 3.2602692440455645e-05, | |
| "loss": 0.3583, | |
| "num_tokens": 1389727230.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.2431299487657197, | |
| "grad_norm": 0.25338890983980733, | |
| "learning_rate": 3.2516396272005524e-05, | |
| "loss": 0.3513, | |
| "num_tokens": 1394919838.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.247787610619469, | |
| "grad_norm": 0.25633423433388874, | |
| "learning_rate": 3.243010010355541e-05, | |
| "loss": 0.3513, | |
| "num_tokens": 1400162718.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.2524452724732185, | |
| "grad_norm": 0.20877105721800487, | |
| "learning_rate": 3.234380393510528e-05, | |
| "loss": 0.342, | |
| "num_tokens": 1405378054.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.257102934326968, | |
| "grad_norm": 0.20764077977393994, | |
| "learning_rate": 3.225750776665516e-05, | |
| "loss": 0.3523, | |
| "num_tokens": 1410620934.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.2617605961807172, | |
| "grad_norm": 0.1897613698280296, | |
| "learning_rate": 3.217121159820504e-05, | |
| "loss": 0.3465, | |
| "num_tokens": 1415863814.0, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.2664182580344667, | |
| "grad_norm": 0.2202701493032462, | |
| "learning_rate": 3.208491542975492e-05, | |
| "loss": 0.3418, | |
| "num_tokens": 1421106694.0, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.271075919888216, | |
| "grad_norm": 0.19380884866876125, | |
| "learning_rate": 3.19986192613048e-05, | |
| "loss": 0.3551, | |
| "num_tokens": 1426349574.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.2757335817419655, | |
| "grad_norm": 0.2457371816745664, | |
| "learning_rate": 3.191232309285468e-05, | |
| "loss": 0.3532, | |
| "num_tokens": 1431592454.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.280391243595715, | |
| "grad_norm": 0.18986957309731253, | |
| "learning_rate": 3.1826026924404556e-05, | |
| "loss": 0.3521, | |
| "num_tokens": 1436835334.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.2850489054494645, | |
| "grad_norm": 0.2100992241228242, | |
| "learning_rate": 3.173973075595444e-05, | |
| "loss": 0.3518, | |
| "num_tokens": 1442078214.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.2897065673032138, | |
| "grad_norm": 0.20216252869907372, | |
| "learning_rate": 3.165343458750431e-05, | |
| "loss": 0.3518, | |
| "num_tokens": 1447321094.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.2943642291569633, | |
| "grad_norm": 0.2381741381411174, | |
| "learning_rate": 3.156713841905419e-05, | |
| "loss": 0.3588, | |
| "num_tokens": 1452563974.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.2990218910107125, | |
| "grad_norm": 0.2025166817939017, | |
| "learning_rate": 3.1480842250604076e-05, | |
| "loss": 0.352, | |
| "num_tokens": 1457790520.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.303679552864462, | |
| "grad_norm": 0.24307415461117446, | |
| "learning_rate": 3.1394546082153954e-05, | |
| "loss": 0.3564, | |
| "num_tokens": 1463015630.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.3083372147182115, | |
| "grad_norm": 0.2276783730925848, | |
| "learning_rate": 3.130824991370383e-05, | |
| "loss": 0.3467, | |
| "num_tokens": 1468217794.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.312994876571961, | |
| "grad_norm": 0.23192785509420596, | |
| "learning_rate": 3.122195374525371e-05, | |
| "loss": 0.346, | |
| "num_tokens": 1473372988.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.3176525384257103, | |
| "grad_norm": 0.2036047145944534, | |
| "learning_rate": 3.113565757680359e-05, | |
| "loss": 0.3651, | |
| "num_tokens": 1478610484.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.3223102002794598, | |
| "grad_norm": 0.23299332238046108, | |
| "learning_rate": 3.1049361408353474e-05, | |
| "loss": 0.3557, | |
| "num_tokens": 1483853364.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.326967862133209, | |
| "grad_norm": 0.18699230041143197, | |
| "learning_rate": 3.096306523990335e-05, | |
| "loss": 0.3467, | |
| "num_tokens": 1489042190.0, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.3316255239869585, | |
| "grad_norm": 0.21180981242027214, | |
| "learning_rate": 3.0876769071453223e-05, | |
| "loss": 0.3498, | |
| "num_tokens": 1494232222.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.336283185840708, | |
| "grad_norm": 0.1950047019027231, | |
| "learning_rate": 3.079047290300311e-05, | |
| "loss": 0.3527, | |
| "num_tokens": 1499475102.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.3409408476944573, | |
| "grad_norm": 0.18855160572542237, | |
| "learning_rate": 3.070417673455299e-05, | |
| "loss": 0.3518, | |
| "num_tokens": 1504716080.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.3455985095482068, | |
| "grad_norm": 0.20849189907803892, | |
| "learning_rate": 3.061788056610287e-05, | |
| "loss": 0.3585, | |
| "num_tokens": 1509958960.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.350256171401956, | |
| "grad_norm": 0.20063018970714375, | |
| "learning_rate": 3.053158439765274e-05, | |
| "loss": 0.3479, | |
| "num_tokens": 1515201840.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.3549138332557056, | |
| "grad_norm": 0.204032419022711, | |
| "learning_rate": 3.0445288229202625e-05, | |
| "loss": 0.3569, | |
| "num_tokens": 1520444720.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.359571495109455, | |
| "grad_norm": 0.23049008800758747, | |
| "learning_rate": 3.0358992060752506e-05, | |
| "loss": 0.3411, | |
| "num_tokens": 1525687356.0, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.3642291569632046, | |
| "grad_norm": 0.2113070258428004, | |
| "learning_rate": 3.027269589230238e-05, | |
| "loss": 0.3482, | |
| "num_tokens": 1530885262.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.3688868188169538, | |
| "grad_norm": 0.22870195454048076, | |
| "learning_rate": 3.018639972385226e-05, | |
| "loss": 0.3535, | |
| "num_tokens": 1536099842.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.3735444806707033, | |
| "grad_norm": 0.21752935461988035, | |
| "learning_rate": 3.010010355540214e-05, | |
| "loss": 0.3472, | |
| "num_tokens": 1541293860.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.3782021425244526, | |
| "grad_norm": 0.22068094359126383, | |
| "learning_rate": 3.001380738695202e-05, | |
| "loss": 0.3495, | |
| "num_tokens": 1546481532.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.382859804378202, | |
| "grad_norm": 0.21257181024789698, | |
| "learning_rate": 2.99275112185019e-05, | |
| "loss": 0.3566, | |
| "num_tokens": 1551724412.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.3875174662319516, | |
| "grad_norm": 0.20312677454868905, | |
| "learning_rate": 2.984121505005178e-05, | |
| "loss": 0.355, | |
| "num_tokens": 1556967292.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.392175128085701, | |
| "grad_norm": 0.19503499762486276, | |
| "learning_rate": 2.9754918881601657e-05, | |
| "loss": 0.3552, | |
| "num_tokens": 1562210172.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.3968327899394504, | |
| "grad_norm": 0.20343927439484102, | |
| "learning_rate": 2.966862271315154e-05, | |
| "loss": 0.3443, | |
| "num_tokens": 1567453052.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.4014904517931999, | |
| "grad_norm": 0.22296672319818084, | |
| "learning_rate": 2.9582326544701417e-05, | |
| "loss": 0.3492, | |
| "num_tokens": 1572651232.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.4061481136469491, | |
| "grad_norm": 0.20203361073245366, | |
| "learning_rate": 2.9496030376251292e-05, | |
| "loss": 0.3483, | |
| "num_tokens": 1577894112.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.4108057755006986, | |
| "grad_norm": 0.19550306147614668, | |
| "learning_rate": 2.9409734207801177e-05, | |
| "loss": 0.3581, | |
| "num_tokens": 1583076382.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.4154634373544481, | |
| "grad_norm": 0.2050674974251282, | |
| "learning_rate": 2.9323438039351052e-05, | |
| "loss": 0.3498, | |
| "num_tokens": 1588319262.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.4201210992081974, | |
| "grad_norm": 0.21938334456840067, | |
| "learning_rate": 2.9237141870900937e-05, | |
| "loss": 0.35, | |
| "num_tokens": 1593562142.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.424778761061947, | |
| "grad_norm": 0.19574361141182345, | |
| "learning_rate": 2.9150845702450812e-05, | |
| "loss": 0.3411, | |
| "num_tokens": 1598805022.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.4294364229156964, | |
| "grad_norm": 0.19885339753093287, | |
| "learning_rate": 2.906454953400069e-05, | |
| "loss": 0.3582, | |
| "num_tokens": 1604047902.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.4340940847694457, | |
| "grad_norm": 0.18111722431294414, | |
| "learning_rate": 2.897825336555057e-05, | |
| "loss": 0.3488, | |
| "num_tokens": 1609290782.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.4387517466231952, | |
| "grad_norm": 0.18127481884169266, | |
| "learning_rate": 2.889195719710045e-05, | |
| "loss": 0.3492, | |
| "num_tokens": 1614533662.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.4434094084769447, | |
| "grad_norm": 0.18701696942336432, | |
| "learning_rate": 2.8805661028650328e-05, | |
| "loss": 0.3438, | |
| "num_tokens": 1619776542.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.448067070330694, | |
| "grad_norm": 0.19205373912041923, | |
| "learning_rate": 2.871936486020021e-05, | |
| "loss": 0.3406, | |
| "num_tokens": 1625019422.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.4527247321844434, | |
| "grad_norm": 0.21295860515858667, | |
| "learning_rate": 2.8633068691750088e-05, | |
| "loss": 0.3487, | |
| "num_tokens": 1630262302.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.4573823940381927, | |
| "grad_norm": 0.19924933656195012, | |
| "learning_rate": 2.854677252329997e-05, | |
| "loss": 0.3567, | |
| "num_tokens": 1635461518.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.4620400558919422, | |
| "grad_norm": 0.23875994256595146, | |
| "learning_rate": 2.8460476354849848e-05, | |
| "loss": 0.3682, | |
| "num_tokens": 1640704398.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.4666977177456917, | |
| "grad_norm": 0.23000204083195677, | |
| "learning_rate": 2.8374180186399723e-05, | |
| "loss": 0.35, | |
| "num_tokens": 1645947278.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.4713553795994412, | |
| "grad_norm": 0.2264031161269673, | |
| "learning_rate": 2.8287884017949608e-05, | |
| "loss": 0.3421, | |
| "num_tokens": 1651190158.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.4760130414531905, | |
| "grad_norm": 0.20891389467403518, | |
| "learning_rate": 2.8201587849499482e-05, | |
| "loss": 0.3502, | |
| "num_tokens": 1656433038.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.48067070330694, | |
| "grad_norm": 0.20533956437048226, | |
| "learning_rate": 2.811529168104936e-05, | |
| "loss": 0.3502, | |
| "num_tokens": 1661675918.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.4853283651606892, | |
| "grad_norm": 0.21085864230142837, | |
| "learning_rate": 2.8028995512599242e-05, | |
| "loss": 0.3481, | |
| "num_tokens": 1666918798.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.4899860270144387, | |
| "grad_norm": 0.21108505521502463, | |
| "learning_rate": 2.794269934414912e-05, | |
| "loss": 0.354, | |
| "num_tokens": 1672161678.0, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.4946436888681882, | |
| "grad_norm": 0.19794890472679366, | |
| "learning_rate": 2.7856403175699002e-05, | |
| "loss": 0.3452, | |
| "num_tokens": 1677379408.0, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.4993013507219377, | |
| "grad_norm": 0.19896817062079572, | |
| "learning_rate": 2.777010700724888e-05, | |
| "loss": 0.3512, | |
| "num_tokens": 1682574966.0, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.503959012575687, | |
| "grad_norm": 0.21125845084111664, | |
| "learning_rate": 2.768381083879876e-05, | |
| "loss": 0.3494, | |
| "num_tokens": 1687810252.0, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.5086166744294365, | |
| "grad_norm": 0.19097914542166763, | |
| "learning_rate": 2.759751467034864e-05, | |
| "loss": 0.3511, | |
| "num_tokens": 1693051286.0, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.5132743362831858, | |
| "grad_norm": 0.19189077180611866, | |
| "learning_rate": 2.751121850189852e-05, | |
| "loss": 0.354, | |
| "num_tokens": 1698294166.0, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.5179319981369352, | |
| "grad_norm": 0.2093939156257332, | |
| "learning_rate": 2.7424922333448393e-05, | |
| "loss": 0.3511, | |
| "num_tokens": 1703537046.0, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.5225896599906847, | |
| "grad_norm": 0.20151878777033294, | |
| "learning_rate": 2.7338626164998278e-05, | |
| "loss": 0.3612, | |
| "num_tokens": 1708779926.0, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.5272473218444342, | |
| "grad_norm": 0.1845362258407387, | |
| "learning_rate": 2.7252329996548153e-05, | |
| "loss": 0.3544, | |
| "num_tokens": 1714022806.0, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.5319049836981835, | |
| "grad_norm": 0.22798956460459371, | |
| "learning_rate": 2.7166033828098038e-05, | |
| "loss": 0.3575, | |
| "num_tokens": 1719265686.0, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.5365626455519328, | |
| "grad_norm": 0.1981522748422671, | |
| "learning_rate": 2.7079737659647913e-05, | |
| "loss": 0.3428, | |
| "num_tokens": 1724508566.0, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.5412203074056823, | |
| "grad_norm": 0.22336939787082538, | |
| "learning_rate": 2.699344149119779e-05, | |
| "loss": 0.3415, | |
| "num_tokens": 1729751446.0, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.5458779692594318, | |
| "grad_norm": 0.2125825296306686, | |
| "learning_rate": 2.6907145322747673e-05, | |
| "loss": 0.3419, | |
| "num_tokens": 1734965670.0, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.5505356311131813, | |
| "grad_norm": 0.19560502057235923, | |
| "learning_rate": 2.682084915429755e-05, | |
| "loss": 0.3546, | |
| "num_tokens": 1740208550.0, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.5551932929669308, | |
| "grad_norm": 0.20229187401318324, | |
| "learning_rate": 2.673455298584743e-05, | |
| "loss": 0.3512, | |
| "num_tokens": 1745389856.0, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.55985095482068, | |
| "grad_norm": 0.23555557304781208, | |
| "learning_rate": 2.664825681739731e-05, | |
| "loss": 0.3474, | |
| "num_tokens": 1750632736.0, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.5645086166744293, | |
| "grad_norm": 0.21194127265717164, | |
| "learning_rate": 2.6561960648947186e-05, | |
| "loss": 0.3546, | |
| "num_tokens": 1755875616.0, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.5691662785281788, | |
| "grad_norm": 0.21797195008954087, | |
| "learning_rate": 2.647566448049707e-05, | |
| "loss": 0.3445, | |
| "num_tokens": 1761107456.0, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.5738239403819283, | |
| "grad_norm": 0.1942274108044053, | |
| "learning_rate": 2.6389368312046945e-05, | |
| "loss": 0.352, | |
| "num_tokens": 1766350336.0, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.5784816022356778, | |
| "grad_norm": 0.18650714834741594, | |
| "learning_rate": 2.6303072143596824e-05, | |
| "loss": 0.3483, | |
| "num_tokens": 1771593216.0, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.583139264089427, | |
| "grad_norm": 0.2230734606320288, | |
| "learning_rate": 2.6216775975146705e-05, | |
| "loss": 0.3516, | |
| "num_tokens": 1776820058.0, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.5877969259431766, | |
| "grad_norm": 0.19418870162076396, | |
| "learning_rate": 2.6130479806696584e-05, | |
| "loss": 0.3466, | |
| "num_tokens": 1782062938.0, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 1.5924545877969258, | |
| "grad_norm": 0.1909104299861916, | |
| "learning_rate": 2.6044183638246462e-05, | |
| "loss": 0.3539, | |
| "num_tokens": 1787305818.0, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.5971122496506753, | |
| "grad_norm": 0.21718714231788452, | |
| "learning_rate": 2.5957887469796343e-05, | |
| "loss": 0.3554, | |
| "num_tokens": 1792484830.0, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 1.6017699115044248, | |
| "grad_norm": 0.18894682956472955, | |
| "learning_rate": 2.587159130134622e-05, | |
| "loss": 0.3419, | |
| "num_tokens": 1797727710.0, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.6064275733581743, | |
| "grad_norm": 0.2054226737758291, | |
| "learning_rate": 2.5785295132896096e-05, | |
| "loss": 0.3487, | |
| "num_tokens": 1802970590.0, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.6110852352119236, | |
| "grad_norm": 0.19366178175650386, | |
| "learning_rate": 2.569899896444598e-05, | |
| "loss": 0.3606, | |
| "num_tokens": 1808213470.0, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.6157428970656729, | |
| "grad_norm": 0.2031217079400216, | |
| "learning_rate": 2.5612702795995856e-05, | |
| "loss": 0.3595, | |
| "num_tokens": 1813456350.0, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 1.6204005589194224, | |
| "grad_norm": 0.1823865953606195, | |
| "learning_rate": 2.552640662754574e-05, | |
| "loss": 0.3407, | |
| "num_tokens": 1818699230.0, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.6250582207731719, | |
| "grad_norm": 0.1974535992621535, | |
| "learning_rate": 2.5440110459095616e-05, | |
| "loss": 0.3685, | |
| "num_tokens": 1823942110.0, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 1.6297158826269214, | |
| "grad_norm": 0.18440028548384846, | |
| "learning_rate": 2.5353814290645494e-05, | |
| "loss": 0.3502, | |
| "num_tokens": 1829161686.0, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.6343735444806708, | |
| "grad_norm": 0.2004482861618546, | |
| "learning_rate": 2.5267518122195376e-05, | |
| "loss": 0.3568, | |
| "num_tokens": 1834404566.0, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 1.6390312063344201, | |
| "grad_norm": 0.21625186736279364, | |
| "learning_rate": 2.5181221953745254e-05, | |
| "loss": 0.3445, | |
| "num_tokens": 1839647446.0, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.6436888681881694, | |
| "grad_norm": 0.20181187661783057, | |
| "learning_rate": 2.5094925785295132e-05, | |
| "loss": 0.3448, | |
| "num_tokens": 1844890326.0, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 1.648346530041919, | |
| "grad_norm": 0.20608069805762663, | |
| "learning_rate": 2.5008629616845014e-05, | |
| "loss": 0.3431, | |
| "num_tokens": 1850121374.0, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.6530041918956684, | |
| "grad_norm": 0.1696865222401396, | |
| "learning_rate": 2.4922333448394892e-05, | |
| "loss": 0.3577, | |
| "num_tokens": 1855364254.0, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 1.6576618537494179, | |
| "grad_norm": 0.18541709280315763, | |
| "learning_rate": 2.483603727994477e-05, | |
| "loss": 0.343, | |
| "num_tokens": 1860607134.0, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.6623195156031674, | |
| "grad_norm": 0.2029935317922359, | |
| "learning_rate": 2.4749741111494652e-05, | |
| "loss": 0.3534, | |
| "num_tokens": 1865850014.0, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 1.6669771774569166, | |
| "grad_norm": 0.18473610285502226, | |
| "learning_rate": 2.466344494304453e-05, | |
| "loss": 0.3461, | |
| "num_tokens": 1871092894.0, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.671634839310666, | |
| "grad_norm": 0.21117929063018806, | |
| "learning_rate": 2.457714877459441e-05, | |
| "loss": 0.3436, | |
| "num_tokens": 1876335774.0, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.6762925011644154, | |
| "grad_norm": 0.19408168490648486, | |
| "learning_rate": 2.4490852606144287e-05, | |
| "loss": 0.3428, | |
| "num_tokens": 1881578654.0, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.680950163018165, | |
| "grad_norm": 0.21682500876513885, | |
| "learning_rate": 2.440455643769417e-05, | |
| "loss": 0.3507, | |
| "num_tokens": 1886793756.0, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 1.6856078248719144, | |
| "grad_norm": 0.2074985861029763, | |
| "learning_rate": 2.4318260269244047e-05, | |
| "loss": 0.3482, | |
| "num_tokens": 1892022954.0, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.6902654867256637, | |
| "grad_norm": 0.20487435765361425, | |
| "learning_rate": 2.4231964100793925e-05, | |
| "loss": 0.3485, | |
| "num_tokens": 1897265834.0, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 1.6949231485794132, | |
| "grad_norm": 0.19999800324031056, | |
| "learning_rate": 2.4145667932343803e-05, | |
| "loss": 0.3362, | |
| "num_tokens": 1902508714.0, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.6995808104331624, | |
| "grad_norm": 0.19676867004190018, | |
| "learning_rate": 2.4059371763893685e-05, | |
| "loss": 0.3391, | |
| "num_tokens": 1907751594.0, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 1.704238472286912, | |
| "grad_norm": 0.22458936819618602, | |
| "learning_rate": 2.3973075595443563e-05, | |
| "loss": 0.344, | |
| "num_tokens": 1912994474.0, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.7088961341406614, | |
| "grad_norm": 0.1977114316037129, | |
| "learning_rate": 2.388677942699344e-05, | |
| "loss": 0.3511, | |
| "num_tokens": 1918171986.0, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 1.713553795994411, | |
| "grad_norm": 0.21741317292957027, | |
| "learning_rate": 2.3800483258543323e-05, | |
| "loss": 0.3449, | |
| "num_tokens": 1923414866.0, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.7182114578481602, | |
| "grad_norm": 0.17822797391288583, | |
| "learning_rate": 2.37141870900932e-05, | |
| "loss": 0.3494, | |
| "num_tokens": 1928641926.0, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 1.7228691197019095, | |
| "grad_norm": 0.21714481260805338, | |
| "learning_rate": 2.3627890921643083e-05, | |
| "loss": 0.3589, | |
| "num_tokens": 1933884806.0, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.727526781555659, | |
| "grad_norm": 0.1924145719456602, | |
| "learning_rate": 2.3541594753192957e-05, | |
| "loss": 0.3405, | |
| "num_tokens": 1939127686.0, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 1.7321844434094085, | |
| "grad_norm": 0.19586499023678394, | |
| "learning_rate": 2.345529858474284e-05, | |
| "loss": 0.3634, | |
| "num_tokens": 1944370566.0, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.736842105263158, | |
| "grad_norm": 0.20737114490387157, | |
| "learning_rate": 2.3369002416292717e-05, | |
| "loss": 0.3436, | |
| "num_tokens": 1949613446.0, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.7414997671169075, | |
| "grad_norm": 0.16950602273666904, | |
| "learning_rate": 2.32827062478426e-05, | |
| "loss": 0.3481, | |
| "num_tokens": 1954856326.0, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.7461574289706567, | |
| "grad_norm": 0.2012574722837541, | |
| "learning_rate": 2.3196410079392474e-05, | |
| "loss": 0.3627, | |
| "num_tokens": 1960099206.0, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 1.750815090824406, | |
| "grad_norm": 0.1925693050719745, | |
| "learning_rate": 2.3110113910942355e-05, | |
| "loss": 0.3538, | |
| "num_tokens": 1965342086.0, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.7554727526781555, | |
| "grad_norm": 0.1884807747456036, | |
| "learning_rate": 2.3023817742492234e-05, | |
| "loss": 0.3539, | |
| "num_tokens": 1970582094.0, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 1.760130414531905, | |
| "grad_norm": 0.19670078760717086, | |
| "learning_rate": 2.2937521574042115e-05, | |
| "loss": 0.351, | |
| "num_tokens": 1975767338.0, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.7647880763856545, | |
| "grad_norm": 0.1955286685665457, | |
| "learning_rate": 2.285122540559199e-05, | |
| "loss": 0.3612, | |
| "num_tokens": 1980963902.0, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 1.7694457382394038, | |
| "grad_norm": 0.1902621049952639, | |
| "learning_rate": 2.276492923714187e-05, | |
| "loss": 0.3501, | |
| "num_tokens": 1986206782.0, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.7741034000931533, | |
| "grad_norm": 0.20972805854000362, | |
| "learning_rate": 2.267863306869175e-05, | |
| "loss": 0.351, | |
| "num_tokens": 1991388204.0, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 1.7787610619469025, | |
| "grad_norm": 0.20476220944741774, | |
| "learning_rate": 2.259233690024163e-05, | |
| "loss": 0.3421, | |
| "num_tokens": 1996631084.0, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.783418723800652, | |
| "grad_norm": 0.196454271618973, | |
| "learning_rate": 2.250604073179151e-05, | |
| "loss": 0.3386, | |
| "num_tokens": 2001873964.0, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 1.7880763856544015, | |
| "grad_norm": 0.19392463808407623, | |
| "learning_rate": 2.2419744563341388e-05, | |
| "loss": 0.3441, | |
| "num_tokens": 2007116844.0, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.792734047508151, | |
| "grad_norm": 0.20942913062066923, | |
| "learning_rate": 2.233344839489127e-05, | |
| "loss": 0.3534, | |
| "num_tokens": 2012298654.0, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 1.7973917093619003, | |
| "grad_norm": 0.18917674695825362, | |
| "learning_rate": 2.2247152226441148e-05, | |
| "loss": 0.3428, | |
| "num_tokens": 2017523752.0, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.8020493712156498, | |
| "grad_norm": 0.18401524322063684, | |
| "learning_rate": 2.2160856057991026e-05, | |
| "loss": 0.3382, | |
| "num_tokens": 2022766632.0, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 1.806707033069399, | |
| "grad_norm": 0.18320628101906591, | |
| "learning_rate": 2.2074559889540904e-05, | |
| "loss": 0.3416, | |
| "num_tokens": 2028009512.0, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.8113646949231486, | |
| "grad_norm": 0.19085847726485056, | |
| "learning_rate": 2.1988263721090786e-05, | |
| "loss": 0.3521, | |
| "num_tokens": 2033209432.0, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 1.816022356776898, | |
| "grad_norm": 0.19352324451418237, | |
| "learning_rate": 2.1901967552640664e-05, | |
| "loss": 0.3424, | |
| "num_tokens": 2038449340.0, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.8206800186306475, | |
| "grad_norm": 0.18678469536630937, | |
| "learning_rate": 2.1815671384190542e-05, | |
| "loss": 0.3471, | |
| "num_tokens": 2043692220.0, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 1.8253376804843968, | |
| "grad_norm": 0.1994428165836453, | |
| "learning_rate": 2.172937521574042e-05, | |
| "loss": 0.3475, | |
| "num_tokens": 2048935100.0, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.829995342338146, | |
| "grad_norm": 0.1939365053975312, | |
| "learning_rate": 2.1643079047290302e-05, | |
| "loss": 0.3494, | |
| "num_tokens": 2054123726.0, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 1.8346530041918956, | |
| "grad_norm": 0.22805560140180176, | |
| "learning_rate": 2.155678287884018e-05, | |
| "loss": 0.3446, | |
| "num_tokens": 2059366606.0, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.839310666045645, | |
| "grad_norm": 0.183131491338049, | |
| "learning_rate": 2.147048671039006e-05, | |
| "loss": 0.3498, | |
| "num_tokens": 2064609486.0, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 1.8439683278993946, | |
| "grad_norm": 0.1906950812720939, | |
| "learning_rate": 2.1384190541939937e-05, | |
| "loss": 0.3431, | |
| "num_tokens": 2069852366.0, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.848625989753144, | |
| "grad_norm": 0.2041757996249356, | |
| "learning_rate": 2.129789437348982e-05, | |
| "loss": 0.3559, | |
| "num_tokens": 2075076504.0, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 1.8532836516068933, | |
| "grad_norm": 0.1883303666609199, | |
| "learning_rate": 2.1211598205039697e-05, | |
| "loss": 0.3513, | |
| "num_tokens": 2080319384.0, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.8579413134606426, | |
| "grad_norm": 0.1843711588911157, | |
| "learning_rate": 2.1125302036589575e-05, | |
| "loss": 0.3386, | |
| "num_tokens": 2085562264.0, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 1.8625989753143921, | |
| "grad_norm": 0.18628330415520214, | |
| "learning_rate": 2.1039005868139457e-05, | |
| "loss": 0.3477, | |
| "num_tokens": 2090805144.0, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.8672566371681416, | |
| "grad_norm": 0.23500970887896458, | |
| "learning_rate": 2.0952709699689335e-05, | |
| "loss": 0.3549, | |
| "num_tokens": 2096036892.0, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 1.871914299021891, | |
| "grad_norm": 0.18197506296123228, | |
| "learning_rate": 2.0866413531239216e-05, | |
| "loss": 0.343, | |
| "num_tokens": 2101279772.0, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.8765719608756404, | |
| "grad_norm": 0.20671196189814875, | |
| "learning_rate": 2.078011736278909e-05, | |
| "loss": 0.3546, | |
| "num_tokens": 2106522652.0, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 1.8812296227293899, | |
| "grad_norm": 0.19593100455782345, | |
| "learning_rate": 2.0693821194338973e-05, | |
| "loss": 0.3407, | |
| "num_tokens": 2111743394.0, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.8858872845831391, | |
| "grad_norm": 0.20762098307454577, | |
| "learning_rate": 2.060752502588885e-05, | |
| "loss": 0.347, | |
| "num_tokens": 2116986274.0, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 1.8905449464368886, | |
| "grad_norm": 0.21548501945058646, | |
| "learning_rate": 2.0521228857438733e-05, | |
| "loss": 0.3443, | |
| "num_tokens": 2122229154.0, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.8952026082906381, | |
| "grad_norm": 0.18677325339567766, | |
| "learning_rate": 2.0434932688988608e-05, | |
| "loss": 0.3423, | |
| "num_tokens": 2127472034.0, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 1.8998602701443876, | |
| "grad_norm": 0.693654344230401, | |
| "learning_rate": 2.034863652053849e-05, | |
| "loss": 0.3595, | |
| "num_tokens": 2132714914.0, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.904517931998137, | |
| "grad_norm": 0.19553939875949755, | |
| "learning_rate": 2.0262340352088367e-05, | |
| "loss": 0.3535, | |
| "num_tokens": 2137957794.0, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 1.9091755938518864, | |
| "grad_norm": 0.21004994480591466, | |
| "learning_rate": 2.017604418363825e-05, | |
| "loss": 0.3485, | |
| "num_tokens": 2143200674.0, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.9138332557056357, | |
| "grad_norm": 0.19689016795272193, | |
| "learning_rate": 2.0089748015188127e-05, | |
| "loss": 0.3485, | |
| "num_tokens": 2148417246.0, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 1.9184909175593852, | |
| "grad_norm": 0.19790258562320587, | |
| "learning_rate": 2.0003451846738005e-05, | |
| "loss": 0.3431, | |
| "num_tokens": 2153571210.0, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.9231485794131347, | |
| "grad_norm": 0.19410937427280278, | |
| "learning_rate": 1.9917155678287887e-05, | |
| "loss": 0.3442, | |
| "num_tokens": 2158814090.0, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 1.9278062412668842, | |
| "grad_norm": 0.19821672945592456, | |
| "learning_rate": 1.9830859509837765e-05, | |
| "loss": 0.3478, | |
| "num_tokens": 2164056970.0, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.9324639031206334, | |
| "grad_norm": 0.1837324706991494, | |
| "learning_rate": 1.9744563341387643e-05, | |
| "loss": 0.3463, | |
| "num_tokens": 2169299850.0, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 1.9371215649743827, | |
| "grad_norm": 0.1978548591677322, | |
| "learning_rate": 1.9658267172937522e-05, | |
| "loss": 0.3407, | |
| "num_tokens": 2174542730.0, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.9417792268281322, | |
| "grad_norm": 0.17668221155038566, | |
| "learning_rate": 1.9571971004487403e-05, | |
| "loss": 0.3511, | |
| "num_tokens": 2179720330.0, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 1.9464368886818817, | |
| "grad_norm": 0.17019525989877277, | |
| "learning_rate": 1.948567483603728e-05, | |
| "loss": 0.3354, | |
| "num_tokens": 2184963210.0, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.9510945505356312, | |
| "grad_norm": 0.18275614788344408, | |
| "learning_rate": 1.939937866758716e-05, | |
| "loss": 0.3437, | |
| "num_tokens": 2190165052.0, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 1.9557522123893807, | |
| "grad_norm": 0.1744861559454576, | |
| "learning_rate": 1.9313082499137038e-05, | |
| "loss": 0.3437, | |
| "num_tokens": 2195407932.0, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.96040987424313, | |
| "grad_norm": 0.1897322741100087, | |
| "learning_rate": 1.922678633068692e-05, | |
| "loss": 0.3522, | |
| "num_tokens": 2200650812.0, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 1.9650675360968792, | |
| "grad_norm": 0.17724268577544813, | |
| "learning_rate": 1.9140490162236798e-05, | |
| "loss": 0.3368, | |
| "num_tokens": 2205893692.0, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.9697251979506287, | |
| "grad_norm": 0.20678150230764236, | |
| "learning_rate": 1.9054193993786676e-05, | |
| "loss": 0.3453, | |
| "num_tokens": 2211109840.0, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 1.9743828598043782, | |
| "grad_norm": 0.18393842549658648, | |
| "learning_rate": 1.8967897825336554e-05, | |
| "loss": 0.3483, | |
| "num_tokens": 2216352720.0, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.9790405216581277, | |
| "grad_norm": 0.1830915802108831, | |
| "learning_rate": 1.8881601656886436e-05, | |
| "loss": 0.3428, | |
| "num_tokens": 2221537760.0, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 1.983698183511877, | |
| "grad_norm": 0.18031856582533748, | |
| "learning_rate": 1.8795305488436314e-05, | |
| "loss": 0.3451, | |
| "num_tokens": 2226780640.0, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.9883558453656265, | |
| "grad_norm": 0.19861350848808676, | |
| "learning_rate": 1.8709009319986192e-05, | |
| "loss": 0.3513, | |
| "num_tokens": 2232023520.0, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 1.9930135072193758, | |
| "grad_norm": 0.17297343993693245, | |
| "learning_rate": 1.8622713151536074e-05, | |
| "loss": 0.3489, | |
| "num_tokens": 2237266400.0, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.9976711690731253, | |
| "grad_norm": 0.18023342857541846, | |
| "learning_rate": 1.8536416983085952e-05, | |
| "loss": 0.3507, | |
| "num_tokens": 2242509280.0, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 2.0018630647415, | |
| "grad_norm": 0.1986015937191958, | |
| "learning_rate": 1.8450120814635834e-05, | |
| "loss": 0.327, | |
| "num_tokens": 2246888496.0, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.0065207265952494, | |
| "grad_norm": 0.22605988230603666, | |
| "learning_rate": 1.836382464618571e-05, | |
| "loss": 0.2884, | |
| "num_tokens": 2252112814.0, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 2.0111783884489984, | |
| "grad_norm": 0.19484877291001645, | |
| "learning_rate": 1.827752847773559e-05, | |
| "loss": 0.2821, | |
| "num_tokens": 2257355694.0, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.015836050302748, | |
| "grad_norm": 0.21525775395131613, | |
| "learning_rate": 1.819123230928547e-05, | |
| "loss": 0.2822, | |
| "num_tokens": 2262579456.0, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 2.0204937121564974, | |
| "grad_norm": 0.19825582521158533, | |
| "learning_rate": 1.810493614083535e-05, | |
| "loss": 0.2715, | |
| "num_tokens": 2267822336.0, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.025151374010247, | |
| "grad_norm": 0.2124132954355725, | |
| "learning_rate": 1.8018639972385225e-05, | |
| "loss": 0.288, | |
| "num_tokens": 2273022256.0, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 2.0298090358639964, | |
| "grad_norm": 0.19677434027226495, | |
| "learning_rate": 1.7932343803935107e-05, | |
| "loss": 0.2761, | |
| "num_tokens": 2278265136.0, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.034466697717746, | |
| "grad_norm": 0.20191992477077836, | |
| "learning_rate": 1.7846047635484985e-05, | |
| "loss": 0.2817, | |
| "num_tokens": 2283508016.0, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 2.039124359571495, | |
| "grad_norm": 0.20452033973254435, | |
| "learning_rate": 1.7759751467034866e-05, | |
| "loss": 0.2829, | |
| "num_tokens": 2288728034.0, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.0437820214252445, | |
| "grad_norm": 0.19831172032553795, | |
| "learning_rate": 1.767345529858474e-05, | |
| "loss": 0.2811, | |
| "num_tokens": 2293970914.0, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 2.048439683278994, | |
| "grad_norm": 0.18586466575244776, | |
| "learning_rate": 1.7587159130134623e-05, | |
| "loss": 0.2781, | |
| "num_tokens": 2299152298.0, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.0530973451327434, | |
| "grad_norm": 0.1870058269577344, | |
| "learning_rate": 1.75008629616845e-05, | |
| "loss": 0.2836, | |
| "num_tokens": 2304395178.0, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 2.057755006986493, | |
| "grad_norm": 0.18807764036891927, | |
| "learning_rate": 1.7414566793234383e-05, | |
| "loss": 0.2718, | |
| "num_tokens": 2309599416.0, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.062412668840242, | |
| "grad_norm": 0.19359502375284748, | |
| "learning_rate": 1.732827062478426e-05, | |
| "loss": 0.2817, | |
| "num_tokens": 2314842296.0, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 2.0670703306939915, | |
| "grad_norm": 0.1808062058102467, | |
| "learning_rate": 1.724197445633414e-05, | |
| "loss": 0.2786, | |
| "num_tokens": 2320085176.0, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.071727992547741, | |
| "grad_norm": 0.22116076730012768, | |
| "learning_rate": 1.715567828788402e-05, | |
| "loss": 0.2782, | |
| "num_tokens": 2325328056.0, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 2.0763856544014905, | |
| "grad_norm": 0.19232180028153642, | |
| "learning_rate": 1.70693821194339e-05, | |
| "loss": 0.2807, | |
| "num_tokens": 2330570936.0, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.08104331625524, | |
| "grad_norm": 0.18338047196288235, | |
| "learning_rate": 1.6983085950983777e-05, | |
| "loss": 0.2751, | |
| "num_tokens": 2335813816.0, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 2.0857009781089895, | |
| "grad_norm": 0.18788287304798185, | |
| "learning_rate": 1.6896789782533655e-05, | |
| "loss": 0.2853, | |
| "num_tokens": 2341056696.0, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.0903586399627385, | |
| "grad_norm": 0.19253695313103228, | |
| "learning_rate": 1.6810493614083537e-05, | |
| "loss": 0.2803, | |
| "num_tokens": 2346238006.0, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 2.095016301816488, | |
| "grad_norm": 0.19785130387477917, | |
| "learning_rate": 1.6724197445633415e-05, | |
| "loss": 0.2761, | |
| "num_tokens": 2351480886.0, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.0996739636702375, | |
| "grad_norm": 0.19329232770120308, | |
| "learning_rate": 1.6637901277183294e-05, | |
| "loss": 0.2875, | |
| "num_tokens": 2356679066.0, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 2.104331625523987, | |
| "grad_norm": 0.1899792003832837, | |
| "learning_rate": 1.6551605108733172e-05, | |
| "loss": 0.2765, | |
| "num_tokens": 2361921946.0, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.1089892873777365, | |
| "grad_norm": 0.18903714226389592, | |
| "learning_rate": 1.6465308940283053e-05, | |
| "loss": 0.2866, | |
| "num_tokens": 2367121162.0, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 2.113646949231486, | |
| "grad_norm": 0.1746594335751977, | |
| "learning_rate": 1.637901277183293e-05, | |
| "loss": 0.2789, | |
| "num_tokens": 2372364042.0, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.118304611085235, | |
| "grad_norm": 0.5050153579238088, | |
| "learning_rate": 1.629271660338281e-05, | |
| "loss": 0.2807, | |
| "num_tokens": 2377606922.0, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 2.1229622729389845, | |
| "grad_norm": 0.19505340635281, | |
| "learning_rate": 1.620642043493269e-05, | |
| "loss": 0.2786, | |
| "num_tokens": 2382827664.0, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.127619934792734, | |
| "grad_norm": 0.18052210932343418, | |
| "learning_rate": 1.612012426648257e-05, | |
| "loss": 0.2839, | |
| "num_tokens": 2388027652.0, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 2.1322775966464835, | |
| "grad_norm": 0.1925968962538661, | |
| "learning_rate": 1.6033828098032448e-05, | |
| "loss": 0.283, | |
| "num_tokens": 2393270532.0, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.136935258500233, | |
| "grad_norm": 0.18870769866663178, | |
| "learning_rate": 1.5947531929582326e-05, | |
| "loss": 0.2855, | |
| "num_tokens": 2398467742.0, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 2.1415929203539825, | |
| "grad_norm": 0.19999916228913664, | |
| "learning_rate": 1.5861235761132208e-05, | |
| "loss": 0.2873, | |
| "num_tokens": 2403710622.0, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.1462505822077316, | |
| "grad_norm": 0.2048204437283971, | |
| "learning_rate": 1.5774939592682086e-05, | |
| "loss": 0.2825, | |
| "num_tokens": 2408951214.0, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 2.150908244061481, | |
| "grad_norm": 0.17257468085534114, | |
| "learning_rate": 1.5688643424231964e-05, | |
| "loss": 0.2709, | |
| "num_tokens": 2414194094.0, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.1555659059152306, | |
| "grad_norm": 0.19313877297374035, | |
| "learning_rate": 1.5602347255781842e-05, | |
| "loss": 0.2771, | |
| "num_tokens": 2419371694.0, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 2.16022356776898, | |
| "grad_norm": 0.17837385971565448, | |
| "learning_rate": 1.5516051087331724e-05, | |
| "loss": 0.2754, | |
| "num_tokens": 2424614574.0, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.1648812296227296, | |
| "grad_norm": 0.2068818526772754, | |
| "learning_rate": 1.5429754918881602e-05, | |
| "loss": 0.2813, | |
| "num_tokens": 2429841120.0, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 2.1695388914764786, | |
| "grad_norm": 0.16938117986852516, | |
| "learning_rate": 1.534345875043148e-05, | |
| "loss": 0.2819, | |
| "num_tokens": 2435040552.0, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.174196553330228, | |
| "grad_norm": 0.20302648116412747, | |
| "learning_rate": 1.525716258198136e-05, | |
| "loss": 0.2769, | |
| "num_tokens": 2440283432.0, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 2.1788542151839776, | |
| "grad_norm": 0.18963843054849086, | |
| "learning_rate": 1.517086641353124e-05, | |
| "loss": 0.2798, | |
| "num_tokens": 2445485274.0, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.183511877037727, | |
| "grad_norm": 0.17712909732258916, | |
| "learning_rate": 1.508457024508112e-05, | |
| "loss": 0.2853, | |
| "num_tokens": 2450728154.0, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 2.1881695388914766, | |
| "grad_norm": 0.19164840988929963, | |
| "learning_rate": 1.4998274076630997e-05, | |
| "loss": 0.2792, | |
| "num_tokens": 2455971034.0, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.192827200745226, | |
| "grad_norm": 0.17284981692660478, | |
| "learning_rate": 1.4911977908180877e-05, | |
| "loss": 0.2758, | |
| "num_tokens": 2461186370.0, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 2.197484862598975, | |
| "grad_norm": 0.22210531782560708, | |
| "learning_rate": 1.4825681739730757e-05, | |
| "loss": 0.2859, | |
| "num_tokens": 2466429250.0, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.2021425244527246, | |
| "grad_norm": 0.1997610143528838, | |
| "learning_rate": 1.4739385571280637e-05, | |
| "loss": 0.2842, | |
| "num_tokens": 2471561792.0, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 2.206800186306474, | |
| "grad_norm": 0.1998775222598786, | |
| "learning_rate": 1.4653089402830513e-05, | |
| "loss": 0.2832, | |
| "num_tokens": 2476744062.0, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.2114578481602236, | |
| "grad_norm": 0.20222989333597705, | |
| "learning_rate": 1.4566793234380393e-05, | |
| "loss": 0.2833, | |
| "num_tokens": 2481986942.0, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 2.216115510013973, | |
| "grad_norm": 0.1886906512364816, | |
| "learning_rate": 1.4480497065930273e-05, | |
| "loss": 0.2801, | |
| "num_tokens": 2487212052.0, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.2207731718677226, | |
| "grad_norm": 0.18134583058738324, | |
| "learning_rate": 1.4394200897480153e-05, | |
| "loss": 0.292, | |
| "num_tokens": 2492454932.0, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 2.2254308337214717, | |
| "grad_norm": 0.1660862369317987, | |
| "learning_rate": 1.4307904729030031e-05, | |
| "loss": 0.2853, | |
| "num_tokens": 2497656374.0, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.230088495575221, | |
| "grad_norm": 0.1971229075628501, | |
| "learning_rate": 1.4221608560579911e-05, | |
| "loss": 0.2813, | |
| "num_tokens": 2502899254.0, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 2.2347461574289706, | |
| "grad_norm": 0.18139104897138042, | |
| "learning_rate": 1.4135312392129791e-05, | |
| "loss": 0.2707, | |
| "num_tokens": 2508113316.0, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.23940381928272, | |
| "grad_norm": 0.1892962235265078, | |
| "learning_rate": 1.404901622367967e-05, | |
| "loss": 0.2826, | |
| "num_tokens": 2513356196.0, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 2.2440614811364696, | |
| "grad_norm": 0.1890865257037905, | |
| "learning_rate": 1.3962720055229547e-05, | |
| "loss": 0.2815, | |
| "num_tokens": 2518599076.0, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.248719142990219, | |
| "grad_norm": 0.17339521156851126, | |
| "learning_rate": 1.3876423886779427e-05, | |
| "loss": 0.2801, | |
| "num_tokens": 2523815648.0, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 2.253376804843968, | |
| "grad_norm": 0.18512650177462558, | |
| "learning_rate": 1.3790127718329307e-05, | |
| "loss": 0.2712, | |
| "num_tokens": 2529044846.0, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.2580344666977177, | |
| "grad_norm": 0.18069956781351978, | |
| "learning_rate": 1.3703831549879187e-05, | |
| "loss": 0.2838, | |
| "num_tokens": 2534287726.0, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 2.262692128551467, | |
| "grad_norm": 0.18565732444765026, | |
| "learning_rate": 1.3617535381429064e-05, | |
| "loss": 0.2746, | |
| "num_tokens": 2539530606.0, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.2673497904052167, | |
| "grad_norm": 0.1871993772647682, | |
| "learning_rate": 1.3531239212978944e-05, | |
| "loss": 0.2834, | |
| "num_tokens": 2544773486.0, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 2.272007452258966, | |
| "grad_norm": 0.17924941803324446, | |
| "learning_rate": 1.3444943044528824e-05, | |
| "loss": 0.2787, | |
| "num_tokens": 2549964918.0, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.276665114112715, | |
| "grad_norm": 0.19093223481893315, | |
| "learning_rate": 1.3358646876078703e-05, | |
| "loss": 0.2797, | |
| "num_tokens": 2555170596.0, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 2.2813227759664647, | |
| "grad_norm": 0.17956845023130855, | |
| "learning_rate": 1.327235070762858e-05, | |
| "loss": 0.2769, | |
| "num_tokens": 2560411630.0, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.285980437820214, | |
| "grad_norm": 0.17560371734880062, | |
| "learning_rate": 1.318605453917846e-05, | |
| "loss": 0.2779, | |
| "num_tokens": 2565604238.0, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 2.2906380996739637, | |
| "grad_norm": 0.21065635435209265, | |
| "learning_rate": 1.309975837072834e-05, | |
| "loss": 0.2739, | |
| "num_tokens": 2570828142.0, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.295295761527713, | |
| "grad_norm": 0.19245047970875234, | |
| "learning_rate": 1.301346220227822e-05, | |
| "loss": 0.2905, | |
| "num_tokens": 2576047788.0, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 2.2999534233814627, | |
| "grad_norm": 0.19084393903098723, | |
| "learning_rate": 1.2927166033828098e-05, | |
| "loss": 0.2784, | |
| "num_tokens": 2581290668.0, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.3046110852352117, | |
| "grad_norm": 0.18510600488904666, | |
| "learning_rate": 1.2840869865377978e-05, | |
| "loss": 0.2862, | |
| "num_tokens": 2586517510.0, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 2.3092687470889612, | |
| "grad_norm": 0.18544751395418727, | |
| "learning_rate": 1.2754573696927858e-05, | |
| "loss": 0.2777, | |
| "num_tokens": 2591760390.0, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.3139264089427107, | |
| "grad_norm": 0.18436137925070303, | |
| "learning_rate": 1.2668277528477738e-05, | |
| "loss": 0.2864, | |
| "num_tokens": 2597003270.0, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 2.3185840707964602, | |
| "grad_norm": 0.19349975500709696, | |
| "learning_rate": 1.2581981360027614e-05, | |
| "loss": 0.2882, | |
| "num_tokens": 2602243278.0, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.3232417326502097, | |
| "grad_norm": 0.17703097751891092, | |
| "learning_rate": 1.2495685191577494e-05, | |
| "loss": 0.2789, | |
| "num_tokens": 2607486158.0, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 2.3278993945039588, | |
| "grad_norm": 0.1659083165730546, | |
| "learning_rate": 1.2409389023127374e-05, | |
| "loss": 0.2711, | |
| "num_tokens": 2612655868.0, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.3325570563577083, | |
| "grad_norm": 0.18005815751092819, | |
| "learning_rate": 1.2323092854677252e-05, | |
| "loss": 0.2798, | |
| "num_tokens": 2617898748.0, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 2.3372147182114578, | |
| "grad_norm": 0.1780411722567769, | |
| "learning_rate": 1.2236796686227132e-05, | |
| "loss": 0.279, | |
| "num_tokens": 2623141628.0, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.3418723800652073, | |
| "grad_norm": 0.19086008988031192, | |
| "learning_rate": 1.215050051777701e-05, | |
| "loss": 0.2802, | |
| "num_tokens": 2628384508.0, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 2.3465300419189568, | |
| "grad_norm": 0.19329590664733318, | |
| "learning_rate": 1.206420434932689e-05, | |
| "loss": 0.2826, | |
| "num_tokens": 2633569752.0, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.3511877037727063, | |
| "grad_norm": 0.18252646989220042, | |
| "learning_rate": 1.1977908180876769e-05, | |
| "loss": 0.2832, | |
| "num_tokens": 2638812632.0, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 2.3558453656264557, | |
| "grad_norm": 0.18010842531622825, | |
| "learning_rate": 1.1891612012426649e-05, | |
| "loss": 0.2809, | |
| "num_tokens": 2644055512.0, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.360503027480205, | |
| "grad_norm": 0.1868027557205583, | |
| "learning_rate": 1.1805315843976528e-05, | |
| "loss": 0.2734, | |
| "num_tokens": 2649298392.0, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 2.3651606893339543, | |
| "grad_norm": 0.1781498425771034, | |
| "learning_rate": 1.1719019675526408e-05, | |
| "loss": 0.271, | |
| "num_tokens": 2654541272.0, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.369818351187704, | |
| "grad_norm": 0.1752070240282013, | |
| "learning_rate": 1.1632723507076287e-05, | |
| "loss": 0.2835, | |
| "num_tokens": 2659784152.0, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 2.3744760130414533, | |
| "grad_norm": 0.18754915348019258, | |
| "learning_rate": 1.1546427338626167e-05, | |
| "loss": 0.2828, | |
| "num_tokens": 2665027032.0, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.3791336748952028, | |
| "grad_norm": 0.17900504501732425, | |
| "learning_rate": 1.1460131170176045e-05, | |
| "loss": 0.2801, | |
| "num_tokens": 2670269912.0, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 2.383791336748952, | |
| "grad_norm": 0.229836582847559, | |
| "learning_rate": 1.1373835001725925e-05, | |
| "loss": 0.2799, | |
| "num_tokens": 2675512792.0, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.3884489986027013, | |
| "grad_norm": 0.1924929254146652, | |
| "learning_rate": 1.1287538833275803e-05, | |
| "loss": 0.2811, | |
| "num_tokens": 2680749892.0, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 2.393106660456451, | |
| "grad_norm": 0.17965080241880685, | |
| "learning_rate": 1.1201242664825683e-05, | |
| "loss": 0.2843, | |
| "num_tokens": 2685992772.0, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.3977643223102003, | |
| "grad_norm": 0.20025937418135847, | |
| "learning_rate": 1.1114946496375561e-05, | |
| "loss": 0.2823, | |
| "num_tokens": 2691235652.0, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 2.40242198416395, | |
| "grad_norm": 0.18203158861968113, | |
| "learning_rate": 1.1028650327925441e-05, | |
| "loss": 0.2759, | |
| "num_tokens": 2696478532.0, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.4070796460176993, | |
| "grad_norm": 0.17998581262118785, | |
| "learning_rate": 1.094235415947532e-05, | |
| "loss": 0.2782, | |
| "num_tokens": 2701721412.0, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 2.4117373078714484, | |
| "grad_norm": 0.16334776454467836, | |
| "learning_rate": 1.0856057991025199e-05, | |
| "loss": 0.2716, | |
| "num_tokens": 2706964292.0, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.416394969725198, | |
| "grad_norm": 0.1812811314939578, | |
| "learning_rate": 1.0769761822575077e-05, | |
| "loss": 0.2869, | |
| "num_tokens": 2712088070.0, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 2.4210526315789473, | |
| "grad_norm": 0.17853054704775273, | |
| "learning_rate": 1.0683465654124957e-05, | |
| "loss": 0.2826, | |
| "num_tokens": 2717309806.0, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.425710293432697, | |
| "grad_norm": 0.180788936493665, | |
| "learning_rate": 1.0597169485674835e-05, | |
| "loss": 0.2756, | |
| "num_tokens": 2722552686.0, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 2.4303679552864463, | |
| "grad_norm": 0.17731436864306924, | |
| "learning_rate": 1.0510873317224715e-05, | |
| "loss": 0.2731, | |
| "num_tokens": 2727795566.0, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.4350256171401954, | |
| "grad_norm": 0.17960486488455507, | |
| "learning_rate": 1.0424577148774595e-05, | |
| "loss": 0.2782, | |
| "num_tokens": 2733038446.0, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 2.439683278993945, | |
| "grad_norm": 0.1792907498644809, | |
| "learning_rate": 1.0338280980324475e-05, | |
| "loss": 0.2804, | |
| "num_tokens": 2738249702.0, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.4443409408476944, | |
| "grad_norm": 0.17053354625082628, | |
| "learning_rate": 1.0251984811874353e-05, | |
| "loss": 0.2743, | |
| "num_tokens": 2743492582.0, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 2.448998602701444, | |
| "grad_norm": 0.17441328465967873, | |
| "learning_rate": 1.0165688643424233e-05, | |
| "loss": 0.2761, | |
| "num_tokens": 2748735462.0, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.4536562645551934, | |
| "grad_norm": 0.17029393117188185, | |
| "learning_rate": 1.0079392474974112e-05, | |
| "loss": 0.2812, | |
| "num_tokens": 2753978342.0, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 2.458313926408943, | |
| "grad_norm": 0.1835972130923889, | |
| "learning_rate": 9.993096306523992e-06, | |
| "loss": 0.2818, | |
| "num_tokens": 2759221222.0, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.4629715882626924, | |
| "grad_norm": 0.18283495611721226, | |
| "learning_rate": 9.90680013807387e-06, | |
| "loss": 0.282, | |
| "num_tokens": 2764464102.0, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 2.4676292501164414, | |
| "grad_norm": 0.18170217653306106, | |
| "learning_rate": 9.82050396962375e-06, | |
| "loss": 0.2773, | |
| "num_tokens": 2769706982.0, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.472286911970191, | |
| "grad_norm": 0.18586287035909088, | |
| "learning_rate": 9.734207801173628e-06, | |
| "loss": 0.2796, | |
| "num_tokens": 2774879410.0, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 2.4769445738239404, | |
| "grad_norm": 0.18666261157849295, | |
| "learning_rate": 9.647911632723508e-06, | |
| "loss": 0.2732, | |
| "num_tokens": 2780111158.0, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.48160223567769, | |
| "grad_norm": 0.1803840456523778, | |
| "learning_rate": 9.561615464273386e-06, | |
| "loss": 0.2779, | |
| "num_tokens": 2785354038.0, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 2.4862598975314394, | |
| "grad_norm": 0.18478228521070383, | |
| "learning_rate": 9.475319295823266e-06, | |
| "loss": 0.2778, | |
| "num_tokens": 2790596918.0, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.4909175593851884, | |
| "grad_norm": 0.1670265570018267, | |
| "learning_rate": 9.389023127373144e-06, | |
| "loss": 0.2749, | |
| "num_tokens": 2795839798.0, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 2.495575221238938, | |
| "grad_norm": 0.16840048298285515, | |
| "learning_rate": 9.302726958923024e-06, | |
| "loss": 0.2749, | |
| "num_tokens": 2801082678.0, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.5002328830926874, | |
| "grad_norm": 0.19161204244755625, | |
| "learning_rate": 9.216430790472904e-06, | |
| "loss": 0.2807, | |
| "num_tokens": 2806325558.0, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 2.504890544946437, | |
| "grad_norm": 0.18267704276009222, | |
| "learning_rate": 9.130134622022784e-06, | |
| "loss": 0.2715, | |
| "num_tokens": 2811568438.0, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.5095482068001864, | |
| "grad_norm": 0.18672959028829378, | |
| "learning_rate": 9.043838453572662e-06, | |
| "loss": 0.2764, | |
| "num_tokens": 2816811318.0, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 2.514205868653936, | |
| "grad_norm": 0.1805982726001966, | |
| "learning_rate": 8.957542285122542e-06, | |
| "loss": 0.273, | |
| "num_tokens": 2822054198.0, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.5188635305076854, | |
| "grad_norm": 0.1952787161603713, | |
| "learning_rate": 8.87124611667242e-06, | |
| "loss": 0.2806, | |
| "num_tokens": 2827297078.0, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 2.5235211923614345, | |
| "grad_norm": 0.17806032000949407, | |
| "learning_rate": 8.7849499482223e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 2832539958.0, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.528178854215184, | |
| "grad_norm": 0.18515081643064224, | |
| "learning_rate": 8.698653779772179e-06, | |
| "loss": 0.2798, | |
| "num_tokens": 2837782838.0, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 2.5328365160689335, | |
| "grad_norm": 0.1768553639096686, | |
| "learning_rate": 8.612357611322058e-06, | |
| "loss": 0.2877, | |
| "num_tokens": 2843025718.0, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.537494177922683, | |
| "grad_norm": 0.17947697526797882, | |
| "learning_rate": 8.526061442871937e-06, | |
| "loss": 0.2786, | |
| "num_tokens": 2848268598.0, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 2.542151839776432, | |
| "grad_norm": 0.17998722843224282, | |
| "learning_rate": 8.439765274421817e-06, | |
| "loss": 0.2872, | |
| "num_tokens": 2853458630.0, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.5468095016301815, | |
| "grad_norm": 0.17898244143789072, | |
| "learning_rate": 8.353469105971695e-06, | |
| "loss": 0.2733, | |
| "num_tokens": 2858701510.0, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 2.551467163483931, | |
| "grad_norm": 0.1817182224750741, | |
| "learning_rate": 8.267172937521575e-06, | |
| "loss": 0.2731, | |
| "num_tokens": 2863944390.0, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.5561248253376805, | |
| "grad_norm": 0.18586878309021465, | |
| "learning_rate": 8.180876769071453e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 2869187270.0, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 2.56078248719143, | |
| "grad_norm": 0.17539350457646027, | |
| "learning_rate": 8.094580600621333e-06, | |
| "loss": 0.2764, | |
| "num_tokens": 2874430150.0, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.5654401490451795, | |
| "grad_norm": 0.1673083327707559, | |
| "learning_rate": 8.008284432171211e-06, | |
| "loss": 0.2737, | |
| "num_tokens": 2879673030.0, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 2.570097810898929, | |
| "grad_norm": 0.18684551753231196, | |
| "learning_rate": 7.921988263721091e-06, | |
| "loss": 0.2782, | |
| "num_tokens": 2884915910.0, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.574755472752678, | |
| "grad_norm": 0.19020695981801614, | |
| "learning_rate": 7.835692095270971e-06, | |
| "loss": 0.275, | |
| "num_tokens": 2890124422.0, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 2.5794131346064275, | |
| "grad_norm": 0.17805547331290397, | |
| "learning_rate": 7.749395926820851e-06, | |
| "loss": 0.2757, | |
| "num_tokens": 2895367302.0, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.584070796460177, | |
| "grad_norm": 0.17977332146759065, | |
| "learning_rate": 7.663099758370729e-06, | |
| "loss": 0.2721, | |
| "num_tokens": 2900606020.0, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 2.5887284583139265, | |
| "grad_norm": 0.1766150912361693, | |
| "learning_rate": 7.576803589920608e-06, | |
| "loss": 0.281, | |
| "num_tokens": 2905848900.0, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.5933861201676756, | |
| "grad_norm": 0.17820833865743285, | |
| "learning_rate": 7.490507421470487e-06, | |
| "loss": 0.2817, | |
| "num_tokens": 2911048450.0, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 2.598043782021425, | |
| "grad_norm": 0.1650203673857743, | |
| "learning_rate": 7.4042112530203655e-06, | |
| "loss": 0.2842, | |
| "num_tokens": 2916231888.0, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.6027014438751745, | |
| "grad_norm": 0.17928450647076113, | |
| "learning_rate": 7.317915084570245e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 2921474768.0, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 2.607359105728924, | |
| "grad_norm": 0.1781766502241174, | |
| "learning_rate": 7.231618916120124e-06, | |
| "loss": 0.279, | |
| "num_tokens": 2926717648.0, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.6120167675826735, | |
| "grad_norm": 0.17182761683076658, | |
| "learning_rate": 7.1453227476700035e-06, | |
| "loss": 0.2771, | |
| "num_tokens": 2931960528.0, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 2.616674429436423, | |
| "grad_norm": 0.18611149574471159, | |
| "learning_rate": 7.059026579219883e-06, | |
| "loss": 0.2784, | |
| "num_tokens": 2937181824.0, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.6213320912901725, | |
| "grad_norm": 0.16212620368705455, | |
| "learning_rate": 6.9727304107697625e-06, | |
| "loss": 0.2717, | |
| "num_tokens": 2942424704.0, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 2.625989753143922, | |
| "grad_norm": 0.17641814838923287, | |
| "learning_rate": 6.886434242319641e-06, | |
| "loss": 0.2809, | |
| "num_tokens": 2947667584.0, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.630647414997671, | |
| "grad_norm": 0.17740627188494387, | |
| "learning_rate": 6.800138073869521e-06, | |
| "loss": 0.2829, | |
| "num_tokens": 2952910464.0, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 2.6353050768514206, | |
| "grad_norm": 0.2012596822222485, | |
| "learning_rate": 6.713841905419399e-06, | |
| "loss": 0.2755, | |
| "num_tokens": 2958153344.0, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.63996273870517, | |
| "grad_norm": 0.18480054290800957, | |
| "learning_rate": 6.627545736969279e-06, | |
| "loss": 0.2837, | |
| "num_tokens": 2963396224.0, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 2.6446204005589196, | |
| "grad_norm": 0.16775213311974657, | |
| "learning_rate": 6.541249568519157e-06, | |
| "loss": 0.2872, | |
| "num_tokens": 2968590242.0, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.6492780624126686, | |
| "grad_norm": 0.1833007391135205, | |
| "learning_rate": 6.454953400069037e-06, | |
| "loss": 0.2785, | |
| "num_tokens": 2973833122.0, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 2.653935724266418, | |
| "grad_norm": 0.1714979857927384, | |
| "learning_rate": 6.368657231618916e-06, | |
| "loss": 0.2836, | |
| "num_tokens": 2979031028.0, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.6585933861201676, | |
| "grad_norm": 0.18119968242684117, | |
| "learning_rate": 6.282361063168796e-06, | |
| "loss": 0.274, | |
| "num_tokens": 2984273908.0, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 2.663251047973917, | |
| "grad_norm": 0.17537137823131904, | |
| "learning_rate": 6.196064894718675e-06, | |
| "loss": 0.2808, | |
| "num_tokens": 2989434158.0, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.6679087098276666, | |
| "grad_norm": 0.16955168602232965, | |
| "learning_rate": 6.109768726268554e-06, | |
| "loss": 0.2796, | |
| "num_tokens": 2994677038.0, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 2.672566371681416, | |
| "grad_norm": 0.177439891932718, | |
| "learning_rate": 6.023472557818433e-06, | |
| "loss": 0.2775, | |
| "num_tokens": 2999919918.0, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.6772240335351656, | |
| "grad_norm": 0.16927196197129377, | |
| "learning_rate": 5.937176389368312e-06, | |
| "loss": 0.2773, | |
| "num_tokens": 3005147388.0, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 2.6818816953889146, | |
| "grad_norm": 0.17800360796517303, | |
| "learning_rate": 5.850880220918191e-06, | |
| "loss": 0.2787, | |
| "num_tokens": 3010305506.0, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.686539357242664, | |
| "grad_norm": 0.17121152652168495, | |
| "learning_rate": 5.76458405246807e-06, | |
| "loss": 0.2854, | |
| "num_tokens": 3015490322.0, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 2.6911970190964136, | |
| "grad_norm": 0.17111735656894042, | |
| "learning_rate": 5.67828788401795e-06, | |
| "loss": 0.2768, | |
| "num_tokens": 3020733202.0, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.695854680950163, | |
| "grad_norm": 0.17999311020181102, | |
| "learning_rate": 5.591991715567829e-06, | |
| "loss": 0.2743, | |
| "num_tokens": 3025976082.0, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 2.700512342803912, | |
| "grad_norm": 0.1637780954417033, | |
| "learning_rate": 5.5056955471177085e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 3031218962.0, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.7051700046576617, | |
| "grad_norm": 0.1614855528244564, | |
| "learning_rate": 5.4193993786675876e-06, | |
| "loss": 0.2707, | |
| "num_tokens": 3036461842.0, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 2.709827666511411, | |
| "grad_norm": 0.16781255756959587, | |
| "learning_rate": 5.333103210217467e-06, | |
| "loss": 0.2767, | |
| "num_tokens": 3041676422.0, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.7144853283651607, | |
| "grad_norm": 0.1895011409768443, | |
| "learning_rate": 5.246807041767346e-06, | |
| "loss": 0.2834, | |
| "num_tokens": 3046901580.0, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 2.71914299021891, | |
| "grad_norm": 0.17175060439126638, | |
| "learning_rate": 5.160510873317225e-06, | |
| "loss": 0.2751, | |
| "num_tokens": 3052144460.0, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.7238006520726596, | |
| "grad_norm": 0.17156275104582772, | |
| "learning_rate": 5.074214704867105e-06, | |
| "loss": 0.2836, | |
| "num_tokens": 3057357558.0, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 2.728458313926409, | |
| "grad_norm": 0.16284461952961435, | |
| "learning_rate": 4.987918536416984e-06, | |
| "loss": 0.2821, | |
| "num_tokens": 3062600438.0, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.7331159757801586, | |
| "grad_norm": 0.16866216119106014, | |
| "learning_rate": 4.901622367966863e-06, | |
| "loss": 0.2771, | |
| "num_tokens": 3067843318.0, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 2.7377736376339077, | |
| "grad_norm": 0.17571301338118883, | |
| "learning_rate": 4.815326199516742e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 3073086198.0, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.742431299487657, | |
| "grad_norm": 0.1748891230324752, | |
| "learning_rate": 4.729030031066621e-06, | |
| "loss": 0.286, | |
| "num_tokens": 3078319256.0, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 2.7470889613414067, | |
| "grad_norm": 0.16817756974824372, | |
| "learning_rate": 4.6427338626165e-06, | |
| "loss": 0.2742, | |
| "num_tokens": 3083562136.0, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.751746623195156, | |
| "grad_norm": 0.18159529130785648, | |
| "learning_rate": 4.556437694166379e-06, | |
| "loss": 0.2781, | |
| "num_tokens": 3088805016.0, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 2.7564042850489052, | |
| "grad_norm": 0.17575730830312236, | |
| "learning_rate": 4.470141525716258e-06, | |
| "loss": 0.2808, | |
| "num_tokens": 3094047896.0, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.7610619469026547, | |
| "grad_norm": 0.1603378644499192, | |
| "learning_rate": 4.383845357266138e-06, | |
| "loss": 0.2797, | |
| "num_tokens": 3099290776.0, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 2.765719608756404, | |
| "grad_norm": 0.16767802662920747, | |
| "learning_rate": 4.297549188816017e-06, | |
| "loss": 0.2773, | |
| "num_tokens": 3104533656.0, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.7703772706101537, | |
| "grad_norm": 0.18230969031536753, | |
| "learning_rate": 4.211253020365896e-06, | |
| "loss": 0.2763, | |
| "num_tokens": 3109751024.0, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 2.775034932463903, | |
| "grad_norm": 0.17676504498736173, | |
| "learning_rate": 4.124956851915775e-06, | |
| "loss": 0.2758, | |
| "num_tokens": 3114993904.0, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.7796925943176527, | |
| "grad_norm": 0.1755017514383313, | |
| "learning_rate": 4.0386606834656544e-06, | |
| "loss": 0.283, | |
| "num_tokens": 3120236784.0, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 2.784350256171402, | |
| "grad_norm": 0.16678856809979542, | |
| "learning_rate": 3.9523645150155335e-06, | |
| "loss": 0.279, | |
| "num_tokens": 3125461726.0, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.7890079180251512, | |
| "grad_norm": 0.17079025329063813, | |
| "learning_rate": 3.8660683465654126e-06, | |
| "loss": 0.2756, | |
| "num_tokens": 3130604236.0, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 2.7936655798789007, | |
| "grad_norm": 0.16866357815528293, | |
| "learning_rate": 3.779772178115292e-06, | |
| "loss": 0.2815, | |
| "num_tokens": 3135847116.0, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.7983232417326502, | |
| "grad_norm": 0.165358000972932, | |
| "learning_rate": 3.693476009665171e-06, | |
| "loss": 0.2749, | |
| "num_tokens": 3141089996.0, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 2.8029809035863997, | |
| "grad_norm": 0.1665672180893357, | |
| "learning_rate": 3.6071798412150506e-06, | |
| "loss": 0.2829, | |
| "num_tokens": 3146332876.0, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.807638565440149, | |
| "grad_norm": 0.16668973224451872, | |
| "learning_rate": 3.5208836727649297e-06, | |
| "loss": 0.2675, | |
| "num_tokens": 3151575756.0, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 2.8122962272938983, | |
| "grad_norm": 0.16763276386629677, | |
| "learning_rate": 3.434587504314809e-06, | |
| "loss": 0.2802, | |
| "num_tokens": 3156818636.0, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.8169538891476478, | |
| "grad_norm": 0.1606503796129178, | |
| "learning_rate": 3.348291335864688e-06, | |
| "loss": 0.2762, | |
| "num_tokens": 3162007886.0, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 2.8216115510013973, | |
| "grad_norm": 0.16082077134672, | |
| "learning_rate": 3.2619951674145674e-06, | |
| "loss": 0.2755, | |
| "num_tokens": 3167202338.0, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.8262692128551468, | |
| "grad_norm": 0.171770003288442, | |
| "learning_rate": 3.1756989989644464e-06, | |
| "loss": 0.2776, | |
| "num_tokens": 3172445218.0, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 2.8309268747088963, | |
| "grad_norm": 0.1781635025885573, | |
| "learning_rate": 3.089402830514325e-06, | |
| "loss": 0.2838, | |
| "num_tokens": 3177682714.0, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.8355845365626458, | |
| "grad_norm": 0.16224120130335926, | |
| "learning_rate": 3.0031066620642046e-06, | |
| "loss": 0.2758, | |
| "num_tokens": 3182925594.0, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 2.840242198416395, | |
| "grad_norm": 0.1773258605032563, | |
| "learning_rate": 2.9168104936140837e-06, | |
| "loss": 0.2758, | |
| "num_tokens": 3188162430.0, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.8448998602701443, | |
| "grad_norm": 0.18113357370834676, | |
| "learning_rate": 2.8305143251639627e-06, | |
| "loss": 0.2822, | |
| "num_tokens": 3193342410.0, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 2.849557522123894, | |
| "grad_norm": 0.16497463306822172, | |
| "learning_rate": 2.7442181567138422e-06, | |
| "loss": 0.2843, | |
| "num_tokens": 3198585290.0, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.8542151839776433, | |
| "grad_norm": 0.16558447816953742, | |
| "learning_rate": 2.6579219882637213e-06, | |
| "loss": 0.2812, | |
| "num_tokens": 3203814572.0, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 2.858872845831393, | |
| "grad_norm": 0.16072456677510413, | |
| "learning_rate": 2.5716258198136004e-06, | |
| "loss": 0.2706, | |
| "num_tokens": 3209051374.0, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.863530507685142, | |
| "grad_norm": 0.17362892342933103, | |
| "learning_rate": 2.4853296513634795e-06, | |
| "loss": 0.2779, | |
| "num_tokens": 3214294254.0, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 2.8681881695388913, | |
| "grad_norm": 0.17503150042214272, | |
| "learning_rate": 2.399033482913359e-06, | |
| "loss": 0.2711, | |
| "num_tokens": 3219537134.0, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.872845831392641, | |
| "grad_norm": 0.16119458676510087, | |
| "learning_rate": 2.312737314463238e-06, | |
| "loss": 0.2777, | |
| "num_tokens": 3224780014.0, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 2.8775034932463903, | |
| "grad_norm": 0.16699880421287658, | |
| "learning_rate": 2.226441146013117e-06, | |
| "loss": 0.284, | |
| "num_tokens": 3230001068.0, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 2.88216115510014, | |
| "grad_norm": 0.16627934804758981, | |
| "learning_rate": 2.140144977562996e-06, | |
| "loss": 0.2726, | |
| "num_tokens": 3235243948.0, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 2.8868188169538893, | |
| "grad_norm": 0.16396890517641147, | |
| "learning_rate": 2.0538488091128757e-06, | |
| "loss": 0.2792, | |
| "num_tokens": 3240486828.0, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.891476478807639, | |
| "grad_norm": 0.17144761011016538, | |
| "learning_rate": 1.9675526406627547e-06, | |
| "loss": 0.2802, | |
| "num_tokens": 3245729708.0, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 2.896134140661388, | |
| "grad_norm": 0.15954650975862936, | |
| "learning_rate": 1.8812564722126338e-06, | |
| "loss": 0.272, | |
| "num_tokens": 3250972588.0, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 2.9007918025151374, | |
| "grad_norm": 0.16799956358128115, | |
| "learning_rate": 1.794960303762513e-06, | |
| "loss": 0.27, | |
| "num_tokens": 3256215468.0, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 2.905449464368887, | |
| "grad_norm": 0.1704354543786866, | |
| "learning_rate": 1.7086641353123924e-06, | |
| "loss": 0.2786, | |
| "num_tokens": 3261418642.0, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.9101071262226363, | |
| "grad_norm": 0.16467883928844412, | |
| "learning_rate": 1.6223679668622715e-06, | |
| "loss": 0.2804, | |
| "num_tokens": 3266596154.0, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 2.9147647880763854, | |
| "grad_norm": 0.16340339874234697, | |
| "learning_rate": 1.5360717984121505e-06, | |
| "loss": 0.2681, | |
| "num_tokens": 3271821252.0, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 2.919422449930135, | |
| "grad_norm": 0.163029895019225, | |
| "learning_rate": 1.4497756299620296e-06, | |
| "loss": 0.2761, | |
| "num_tokens": 3277064132.0, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 2.9240801117838844, | |
| "grad_norm": 0.16100787075368342, | |
| "learning_rate": 1.363479461511909e-06, | |
| "loss": 0.2832, | |
| "num_tokens": 3282307012.0, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.928737773637634, | |
| "grad_norm": 0.16743880483407425, | |
| "learning_rate": 1.277183293061788e-06, | |
| "loss": 0.288, | |
| "num_tokens": 3287546350.0, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 2.9333954354913834, | |
| "grad_norm": 0.1661902073480324, | |
| "learning_rate": 1.1908871246116673e-06, | |
| "loss": 0.2795, | |
| "num_tokens": 3292761750.0, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 2.938053097345133, | |
| "grad_norm": 0.2439107507732261, | |
| "learning_rate": 1.1045909561615463e-06, | |
| "loss": 0.2766, | |
| "num_tokens": 3297949422.0, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 2.9427107591988824, | |
| "grad_norm": 0.17255496530766432, | |
| "learning_rate": 1.0182947877114256e-06, | |
| "loss": 0.2792, | |
| "num_tokens": 3303192302.0, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.9473684210526314, | |
| "grad_norm": 0.16881455645613058, | |
| "learning_rate": 9.319986192613048e-07, | |
| "loss": 0.2812, | |
| "num_tokens": 3308435182.0, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 2.952026082906381, | |
| "grad_norm": 0.15822881722027485, | |
| "learning_rate": 8.45702450811184e-07, | |
| "loss": 0.276, | |
| "num_tokens": 3313678062.0, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 2.9566837447601304, | |
| "grad_norm": 0.16766083653742891, | |
| "learning_rate": 7.594062823610632e-07, | |
| "loss": 0.2805, | |
| "num_tokens": 3318920942.0, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 2.96134140661388, | |
| "grad_norm": 0.15870743459977205, | |
| "learning_rate": 6.731101139109423e-07, | |
| "loss": 0.2759, | |
| "num_tokens": 3324163822.0, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.9659990684676294, | |
| "grad_norm": 0.16551455321319547, | |
| "learning_rate": 5.868139454608215e-07, | |
| "loss": 0.2771, | |
| "num_tokens": 3329404888.0, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 2.9706567303213784, | |
| "grad_norm": 0.16483886795933497, | |
| "learning_rate": 5.005177770107007e-07, | |
| "loss": 0.2695, | |
| "num_tokens": 3334635936.0, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 2.975314392175128, | |
| "grad_norm": 0.1784458378960505, | |
| "learning_rate": 4.142216085605799e-07, | |
| "loss": 0.2815, | |
| "num_tokens": 3339878816.0, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 2.9799720540288774, | |
| "grad_norm": 0.16429742261043642, | |
| "learning_rate": 3.279254401104591e-07, | |
| "loss": 0.2811, | |
| "num_tokens": 3345029172.0, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.984629715882627, | |
| "grad_norm": 0.1635147457528966, | |
| "learning_rate": 2.416292716603383e-07, | |
| "loss": 0.2813, | |
| "num_tokens": 3350272052.0, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 2.9892873777363764, | |
| "grad_norm": 0.16254857607926765, | |
| "learning_rate": 1.5533310321021747e-07, | |
| "loss": 0.2656, | |
| "num_tokens": 3355456072.0, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 2.993945039590126, | |
| "grad_norm": 0.18226207727928628, | |
| "learning_rate": 6.903693476009665e-08, | |
| "loss": 0.268, | |
| "num_tokens": 3360698952.0, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 2.9976711690731253, | |
| "num_tokens": 3364893256.0, | |
| "step": 3219, | |
| "total_flos": 2.754977641940386e+18, | |
| "train_loss": 0.41265300498611623, | |
| "train_runtime": 32686.4433, | |
| "train_samples_per_second": 1.576, | |
| "train_steps_per_second": 0.098 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3219, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.754977641940386e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |