| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 1084, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0009225092250922509, |
| "grad_norm": 9.879005196470441, |
| "learning_rate": 1.8348623853211012e-07, |
| "loss": 1.1608, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.004612546125461255, |
| "grad_norm": 7.655910446918255, |
| "learning_rate": 9.174311926605506e-07, |
| "loss": 1.1303, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.00922509225092251, |
| "grad_norm": 3.826483404781423, |
| "learning_rate": 1.8348623853211011e-06, |
| "loss": 1.057, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013837638376383764, |
| "grad_norm": 2.5297095573033848, |
| "learning_rate": 2.7522935779816517e-06, |
| "loss": 1.0238, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01845018450184502, |
| "grad_norm": 2.076014984102216, |
| "learning_rate": 3.6697247706422022e-06, |
| "loss": 0.9931, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023062730627306273, |
| "grad_norm": 2.2473094022560702, |
| "learning_rate": 4.587155963302753e-06, |
| "loss": 0.982, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.027675276752767528, |
| "grad_norm": 2.2077005499118263, |
| "learning_rate": 5.504587155963303e-06, |
| "loss": 0.9799, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03228782287822878, |
| "grad_norm": 2.681967179141725, |
| "learning_rate": 6.422018348623854e-06, |
| "loss": 0.9813, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.03690036900369004, |
| "grad_norm": 2.009267803614227, |
| "learning_rate": 7.3394495412844045e-06, |
| "loss": 0.9806, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04151291512915129, |
| "grad_norm": 2.325805328726729, |
| "learning_rate": 8.256880733944956e-06, |
| "loss": 0.9864, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.046125461254612546, |
| "grad_norm": 1.9896142939107335, |
| "learning_rate": 9.174311926605506e-06, |
| "loss": 0.9964, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0507380073800738, |
| "grad_norm": 2.1738056440029596, |
| "learning_rate": 1.0091743119266055e-05, |
| "loss": 0.9907, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.055350553505535055, |
| "grad_norm": 2.2084206608201318, |
| "learning_rate": 1.1009174311926607e-05, |
| "loss": 0.9938, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.05996309963099631, |
| "grad_norm": 1.9122756224073667, |
| "learning_rate": 1.1926605504587156e-05, |
| "loss": 1.0037, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.06457564575645756, |
| "grad_norm": 1.9502365708956255, |
| "learning_rate": 1.2844036697247708e-05, |
| "loss": 0.9862, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06918819188191883, |
| "grad_norm": 2.075499698627552, |
| "learning_rate": 1.3761467889908258e-05, |
| "loss": 1.0033, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.07380073800738007, |
| "grad_norm": 2.1545392779152945, |
| "learning_rate": 1.4678899082568809e-05, |
| "loss": 1.017, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07841328413284133, |
| "grad_norm": 2.605188021304721, |
| "learning_rate": 1.559633027522936e-05, |
| "loss": 1.0047, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.08302583025830258, |
| "grad_norm": 2.0541459666048283, |
| "learning_rate": 1.6513761467889912e-05, |
| "loss": 0.9979, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08763837638376384, |
| "grad_norm": 2.622653900846617, |
| "learning_rate": 1.743119266055046e-05, |
| "loss": 0.9893, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.09225092250922509, |
| "grad_norm": 2.098407387067215, |
| "learning_rate": 1.834862385321101e-05, |
| "loss": 1.0365, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.09686346863468635, |
| "grad_norm": 2.666408541426136, |
| "learning_rate": 1.9266055045871563e-05, |
| "loss": 1.0105, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.1014760147601476, |
| "grad_norm": 1.8024914459736627, |
| "learning_rate": 1.9999948088910656e-05, |
| "loss": 1.0134, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.10608856088560886, |
| "grad_norm": 2.278493425852852, |
| "learning_rate": 1.9998131257372878e-05, |
| "loss": 1.0248, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.11070110701107011, |
| "grad_norm": 1.7012270945345256, |
| "learning_rate": 1.999371941029485e-05, |
| "loss": 1.0151, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.11531365313653137, |
| "grad_norm": 1.9857838398529595, |
| "learning_rate": 1.9986713692771732e-05, |
| "loss": 1.0372, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.11992619926199262, |
| "grad_norm": 1.7843829233975885, |
| "learning_rate": 1.9977115923137912e-05, |
| "loss": 1.0293, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.12453874538745388, |
| "grad_norm": 1.8211048216505743, |
| "learning_rate": 1.9964928592495046e-05, |
| "loss": 1.0304, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.12915129151291513, |
| "grad_norm": 1.9992651492979876, |
| "learning_rate": 1.9950154864065497e-05, |
| "loss": 1.0296, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.13376383763837638, |
| "grad_norm": 2.0563450724498926, |
| "learning_rate": 1.993279857237133e-05, |
| "loss": 1.0318, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.13837638376383765, |
| "grad_norm": 2.1600302078293923, |
| "learning_rate": 1.9912864222239045e-05, |
| "loss": 1.0394, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.1429889298892989, |
| "grad_norm": 2.3716133887504673, |
| "learning_rate": 1.9890356987630362e-05, |
| "loss": 1.0208, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.14760147601476015, |
| "grad_norm": 1.9293133175631185, |
| "learning_rate": 1.986528271029931e-05, |
| "loss": 1.0235, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1522140221402214, |
| "grad_norm": 1.7548574176466065, |
| "learning_rate": 1.9837647898276008e-05, |
| "loss": 1.0238, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.15682656826568267, |
| "grad_norm": 2.2129287691926702, |
| "learning_rate": 1.9807459724177497e-05, |
| "loss": 1.025, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.16143911439114392, |
| "grad_norm": 1.9948980962439093, |
| "learning_rate": 1.977472602334609e-05, |
| "loss": 1.0316, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.16605166051660517, |
| "grad_norm": 1.886711939815748, |
| "learning_rate": 1.973945529181572e-05, |
| "loss": 1.0337, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.1706642066420664, |
| "grad_norm": 1.774671605104065, |
| "learning_rate": 1.9701656684106764e-05, |
| "loss": 1.0397, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.1752767527675277, |
| "grad_norm": 1.6377775121369593, |
| "learning_rate": 1.9661340010850025e-05, |
| "loss": 1.0286, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.17988929889298894, |
| "grad_norm": 1.829002902538178, |
| "learning_rate": 1.9618515736240353e-05, |
| "loss": 1.0504, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.18450184501845018, |
| "grad_norm": 1.8662767259784019, |
| "learning_rate": 1.9573194975320672e-05, |
| "loss": 1.0368, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.18911439114391143, |
| "grad_norm": 3.33578824273974, |
| "learning_rate": 1.952538949109708e-05, |
| "loss": 1.0443, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1937269372693727, |
| "grad_norm": 1.818065014179501, |
| "learning_rate": 1.9475111691485737e-05, |
| "loss": 1.0369, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.19833948339483395, |
| "grad_norm": 1.8877484997554483, |
| "learning_rate": 1.9422374626092414e-05, |
| "loss": 1.0316, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.2029520295202952, |
| "grad_norm": 1.866312455226494, |
| "learning_rate": 1.936719198282545e-05, |
| "loss": 1.0293, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.20756457564575645, |
| "grad_norm": 1.8191472868871426, |
| "learning_rate": 1.930957808434307e-05, |
| "loss": 1.025, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.21217712177121772, |
| "grad_norm": 2.224797932067502, |
| "learning_rate": 1.9249547884335917e-05, |
| "loss": 1.028, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.21678966789667897, |
| "grad_norm": 1.8603191584655618, |
| "learning_rate": 1.9187116963645845e-05, |
| "loss": 1.0341, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.22140221402214022, |
| "grad_norm": 1.756409158503403, |
| "learning_rate": 1.912230152622189e-05, |
| "loss": 1.03, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.22601476014760147, |
| "grad_norm": 1.7596177476062325, |
| "learning_rate": 1.9055118394914545e-05, |
| "loss": 1.0327, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.23062730627306274, |
| "grad_norm": 1.6039904446641322, |
| "learning_rate": 1.898558500710939e-05, |
| "loss": 1.0303, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.235239852398524, |
| "grad_norm": 1.6454680105378445, |
| "learning_rate": 1.891371941020121e-05, |
| "loss": 1.0521, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.23985239852398524, |
| "grad_norm": 1.6791233099458316, |
| "learning_rate": 1.88395402569098e-05, |
| "loss": 1.0154, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.2444649446494465, |
| "grad_norm": 1.6340752310845936, |
| "learning_rate": 1.8763066800438638e-05, |
| "loss": 1.0248, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.24907749077490776, |
| "grad_norm": 1.7484264554619282, |
| "learning_rate": 1.868431888947773e-05, |
| "loss": 1.0368, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.253690036900369, |
| "grad_norm": 1.5709778769682974, |
| "learning_rate": 1.860331696305188e-05, |
| "loss": 1.0268, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.25830258302583026, |
| "grad_norm": 1.6415817992690929, |
| "learning_rate": 1.852008204521572e-05, |
| "loss": 1.0199, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2629151291512915, |
| "grad_norm": 1.5545392798101016, |
| "learning_rate": 1.8434635739596945e-05, |
| "loss": 1.0365, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.26752767527675275, |
| "grad_norm": 1.6274788922408019, |
| "learning_rate": 1.834700022378907e-05, |
| "loss": 1.0543, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.272140221402214, |
| "grad_norm": 1.5735104394639736, |
| "learning_rate": 1.825719824359524e-05, |
| "loss": 1.0392, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.2767527675276753, |
| "grad_norm": 1.5442792006248922, |
| "learning_rate": 1.816525310712456e-05, |
| "loss": 1.0095, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.28136531365313655, |
| "grad_norm": 1.5617245892166962, |
| "learning_rate": 1.8071188678742457e-05, |
| "loss": 1.0172, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.2859778597785978, |
| "grad_norm": 1.5675886717996073, |
| "learning_rate": 1.7975029372876706e-05, |
| "loss": 1.0211, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.29059040590405905, |
| "grad_norm": 1.5557645058846048, |
| "learning_rate": 1.787680014768065e-05, |
| "loss": 1.0436, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.2952029520295203, |
| "grad_norm": 1.8395883798421626, |
| "learning_rate": 1.777652649855531e-05, |
| "loss": 1.019, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.29981549815498154, |
| "grad_norm": 1.7135351165175936, |
| "learning_rate": 1.7674234451532065e-05, |
| "loss": 1.0311, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.3044280442804428, |
| "grad_norm": 1.6687475528877076, |
| "learning_rate": 1.7569950556517566e-05, |
| "loss": 1.051, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.30904059040590404, |
| "grad_norm": 1.5220155050824387, |
| "learning_rate": 1.7463701880402738e-05, |
| "loss": 1.0194, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.31365313653136534, |
| "grad_norm": 1.6022069289411618, |
| "learning_rate": 1.7355516000037555e-05, |
| "loss": 1.0278, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.3182656826568266, |
| "grad_norm": 1.6448864090084199, |
| "learning_rate": 1.7245420995073453e-05, |
| "loss": 1.04, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.32287822878228783, |
| "grad_norm": 1.6122752728238614, |
| "learning_rate": 1.7133445440675268e-05, |
| "loss": 1.0271, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.3274907749077491, |
| "grad_norm": 1.7368385968044937, |
| "learning_rate": 1.7019618400104572e-05, |
| "loss": 1.0261, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.33210332103321033, |
| "grad_norm": 1.4937346262516389, |
| "learning_rate": 1.6903969417176244e-05, |
| "loss": 1.0183, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3367158671586716, |
| "grad_norm": 1.5392262396741352, |
| "learning_rate": 1.6786528508590436e-05, |
| "loss": 1.0333, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3413284132841328, |
| "grad_norm": 1.5060160487077425, |
| "learning_rate": 1.666732615614169e-05, |
| "loss": 1.0202, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3459409594095941, |
| "grad_norm": 1.4396593085630534, |
| "learning_rate": 1.6546393298807405e-05, |
| "loss": 1.0177, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.3505535055350554, |
| "grad_norm": 1.6333472702203904, |
| "learning_rate": 1.6423761324717636e-05, |
| "loss": 1.0231, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.3551660516605166, |
| "grad_norm": 1.504092861636445, |
| "learning_rate": 1.6299462063008272e-05, |
| "loss": 1.0158, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.35977859778597787, |
| "grad_norm": 1.5220081804343326, |
| "learning_rate": 1.61735277755598e-05, |
| "loss": 1.0234, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3643911439114391, |
| "grad_norm": 1.895612280128563, |
| "learning_rate": 1.6045991148623752e-05, |
| "loss": 1.0198, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.36900369003690037, |
| "grad_norm": 1.4894155162032452, |
| "learning_rate": 1.5916885284338937e-05, |
| "loss": 1.0054, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.3736162361623616, |
| "grad_norm": 1.4521159052358987, |
| "learning_rate": 1.5786243692139826e-05, |
| "loss": 1.0269, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.37822878228782286, |
| "grad_norm": 1.61303511010769, |
| "learning_rate": 1.5654100280059155e-05, |
| "loss": 1.0036, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.3828413284132841, |
| "grad_norm": 1.4988881986858458, |
| "learning_rate": 1.5520489345927095e-05, |
| "loss": 1.0033, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.3874538745387454, |
| "grad_norm": 1.581025954581138, |
| "learning_rate": 1.538544556846925e-05, |
| "loss": 1.0252, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.39206642066420666, |
| "grad_norm": 1.456650501812658, |
| "learning_rate": 1.5249003998305787e-05, |
| "loss": 1.0138, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.3966789667896679, |
| "grad_norm": 1.539223747046445, |
| "learning_rate": 1.5111200048854055e-05, |
| "loss": 1.0196, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.40129151291512916, |
| "grad_norm": 1.5019379559489976, |
| "learning_rate": 1.4972069487137024e-05, |
| "loss": 1.0096, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.4059040590405904, |
| "grad_norm": 1.5153393705339384, |
| "learning_rate": 1.4831648424499953e-05, |
| "loss": 1.0158, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.41051660516605165, |
| "grad_norm": 1.529728627131339, |
| "learning_rate": 1.4689973307237687e-05, |
| "loss": 1.029, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.4151291512915129, |
| "grad_norm": 1.4498097509123067, |
| "learning_rate": 1.4547080907135024e-05, |
| "loss": 1.0128, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.41974169741697415, |
| "grad_norm": 1.4398642418367182, |
| "learning_rate": 1.4403008311922593e-05, |
| "loss": 1.0114, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.42435424354243545, |
| "grad_norm": 1.4334389023182947, |
| "learning_rate": 1.4257792915650728e-05, |
| "loss": 1.0129, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.4289667896678967, |
| "grad_norm": 1.3790653098835177, |
| "learning_rate": 1.4111472408983843e-05, |
| "loss": 1.0067, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.43357933579335795, |
| "grad_norm": 1.531778143299581, |
| "learning_rate": 1.3964084769417823e-05, |
| "loss": 1.0299, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.4381918819188192, |
| "grad_norm": 1.6468266668948526, |
| "learning_rate": 1.3815668251422953e-05, |
| "loss": 0.9948, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.44280442804428044, |
| "grad_norm": 1.4394058806179657, |
| "learning_rate": 1.3666261376514978e-05, |
| "loss": 0.9927, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.4474169741697417, |
| "grad_norm": 1.4631354881671865, |
| "learning_rate": 1.3515902923256832e-05, |
| "loss": 1.0085, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.45202952029520294, |
| "grad_norm": 1.5528017899308464, |
| "learning_rate": 1.3364631917193671e-05, |
| "loss": 1.0009, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4566420664206642, |
| "grad_norm": 1.5641118900079227, |
| "learning_rate": 1.321248762072377e-05, |
| "loss": 1.0035, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4612546125461255, |
| "grad_norm": 1.427538389522347, |
| "learning_rate": 1.3059509522907998e-05, |
| "loss": 0.9972, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.46586715867158673, |
| "grad_norm": 1.4027604883178342, |
| "learning_rate": 1.2905737329220394e-05, |
| "loss": 0.9964, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.470479704797048, |
| "grad_norm": 1.4818784249586863, |
| "learning_rate": 1.2751210951242636e-05, |
| "loss": 1.0051, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.47509225092250923, |
| "grad_norm": 1.4347005479102528, |
| "learning_rate": 1.2595970496304975e-05, |
| "loss": 0.9944, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.4797047970479705, |
| "grad_norm": 1.413444073621643, |
| "learning_rate": 1.2440056257076376e-05, |
| "loss": 1.0083, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.4843173431734317, |
| "grad_norm": 1.419019271785058, |
| "learning_rate": 1.2283508701106559e-05, |
| "loss": 0.9791, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.488929889298893, |
| "grad_norm": 1.4186181018875217, |
| "learning_rate": 1.2126368460322637e-05, |
| "loss": 1.009, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.4935424354243542, |
| "grad_norm": 1.3468021239882193, |
| "learning_rate": 1.1968676320483103e-05, |
| "loss": 0.9683, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.4981549815498155, |
| "grad_norm": 1.4514533004271843, |
| "learning_rate": 1.1810473210591882e-05, |
| "loss": 0.9858, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5027675276752768, |
| "grad_norm": 1.4701086504136045, |
| "learning_rate": 1.1651800192275197e-05, |
| "loss": 0.98, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.507380073800738, |
| "grad_norm": 1.4064612801772798, |
| "learning_rate": 1.1492698449124042e-05, |
| "loss": 0.9912, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5119926199261993, |
| "grad_norm": 1.4076865755772636, |
| "learning_rate": 1.1333209276004959e-05, |
| "loss": 0.996, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.5166051660516605, |
| "grad_norm": 1.4207553263975552, |
| "learning_rate": 1.1173374068341962e-05, |
| "loss": 1.0054, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.5212177121771218, |
| "grad_norm": 1.473170979274932, |
| "learning_rate": 1.1013234311372353e-05, |
| "loss": 0.9838, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.525830258302583, |
| "grad_norm": 1.5387009889923415, |
| "learning_rate": 1.0852831569379217e-05, |
| "loss": 0.9909, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.5304428044280443, |
| "grad_norm": 1.4627168813274178, |
| "learning_rate": 1.0692207474903421e-05, |
| "loss": 0.9911, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5350553505535055, |
| "grad_norm": 1.3592006850484868, |
| "learning_rate": 1.0531403717937888e-05, |
| "loss": 0.9892, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.5396678966789668, |
| "grad_norm": 1.464660400563219, |
| "learning_rate": 1.037046203510694e-05, |
| "loss": 0.9769, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.544280442804428, |
| "grad_norm": 1.4164385056303843, |
| "learning_rate": 1.0209424198833571e-05, |
| "loss": 0.9867, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.5488929889298892, |
| "grad_norm": 1.4781644717790594, |
| "learning_rate": 1.0048332006497406e-05, |
| "loss": 0.9936, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.5535055350553506, |
| "grad_norm": 1.3752948746478606, |
| "learning_rate": 9.887227269586184e-06, |
| "loss": 0.9811, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.5581180811808119, |
| "grad_norm": 1.4619546545367863, |
| "learning_rate": 9.7261518028436e-06, |
| "loss": 0.9904, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5627306273062731, |
| "grad_norm": 1.419965736763686, |
| "learning_rate": 9.565147413416266e-06, |
| "loss": 0.9783, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5673431734317343, |
| "grad_norm": 1.350150125081973, |
| "learning_rate": 9.404255890002677e-06, |
| "loss": 0.997, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5719557195571956, |
| "grad_norm": 1.2940289037264123, |
| "learning_rate": 9.243518992006944e-06, |
| "loss": 0.9738, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5765682656826568, |
| "grad_norm": 1.3142331077158889, |
| "learning_rate": 9.082978438700138e-06, |
| "loss": 0.9682, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.5811808118081181, |
| "grad_norm": 1.4300227650124722, |
| "learning_rate": 8.922675898392072e-06, |
| "loss": 0.9669, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5857933579335793, |
| "grad_norm": 1.356418938878727, |
| "learning_rate": 8.762652977616258e-06, |
| "loss": 0.9864, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5904059040590406, |
| "grad_norm": 1.3879153993308309, |
| "learning_rate": 8.602951210330942e-06, |
| "loss": 0.966, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.5950184501845018, |
| "grad_norm": 1.3387503003861179, |
| "learning_rate": 8.443612047138965e-06, |
| "loss": 0.9821, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.5996309963099631, |
| "grad_norm": 1.4037652698540581, |
| "learning_rate": 8.284676844529258e-06, |
| "loss": 0.9453, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6042435424354243, |
| "grad_norm": 1.2929012204631607, |
| "learning_rate": 8.126186854142752e-06, |
| "loss": 0.9524, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.6088560885608856, |
| "grad_norm": 1.3487500744758194, |
| "learning_rate": 7.968183212065537e-06, |
| "loss": 0.9646, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.6134686346863468, |
| "grad_norm": 1.333903786467664, |
| "learning_rate": 7.81070692815195e-06, |
| "loss": 0.9615, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.6180811808118081, |
| "grad_norm": 1.4027782099563988, |
| "learning_rate": 7.6537988753805e-06, |
| "loss": 0.9562, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.6226937269372693, |
| "grad_norm": 1.3329160407381975, |
| "learning_rate": 7.497499779245268e-06, |
| "loss": 0.9649, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6273062730627307, |
| "grad_norm": 1.385053581233387, |
| "learning_rate": 7.3418502071856004e-06, |
| "loss": 0.973, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.6319188191881919, |
| "grad_norm": 1.3619701099182098, |
| "learning_rate": 7.186890558056836e-06, |
| "loss": 0.9529, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.6365313653136532, |
| "grad_norm": 1.7255836183681283, |
| "learning_rate": 7.0326610516447825e-06, |
| "loss": 0.9528, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.6411439114391144, |
| "grad_norm": 1.3895643302678875, |
| "learning_rate": 6.879201718226658e-06, |
| "loss": 0.9437, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.6457564575645757, |
| "grad_norm": 1.2635427072180987, |
| "learning_rate": 6.7265523881812335e-06, |
| "loss": 0.9566, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.6503690036900369, |
| "grad_norm": 1.2964967768976416, |
| "learning_rate": 6.574752681650864e-06, |
| "loss": 0.9423, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.6549815498154982, |
| "grad_norm": 1.3004483795560853, |
| "learning_rate": 6.423841998258069e-06, |
| "loss": 0.9489, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.6595940959409594, |
| "grad_norm": 1.3300508963972943, |
| "learning_rate": 6.273859506879365e-06, |
| "loss": 0.9678, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.6642066420664207, |
| "grad_norm": 1.335002114775608, |
| "learning_rate": 6.124844135478971e-06, |
| "loss": 0.9651, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.6688191881918819, |
| "grad_norm": 1.2821828510875994, |
| "learning_rate": 5.976834561005069e-06, |
| "loss": 0.96, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6734317343173432, |
| "grad_norm": 1.3516698715915405, |
| "learning_rate": 5.829869199351188e-06, |
| "loss": 0.9489, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.6780442804428044, |
| "grad_norm": 1.3698081329045038, |
| "learning_rate": 5.68398619538536e-06, |
| "loss": 0.9432, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6826568265682657, |
| "grad_norm": 1.3628137289234168, |
| "learning_rate": 5.53922341304961e-06, |
| "loss": 0.9378, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6872693726937269, |
| "grad_norm": 1.4451005486429895, |
| "learning_rate": 5.39561842553239e-06, |
| "loss": 0.9759, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6918819188191881, |
| "grad_norm": 1.34802980092958, |
| "learning_rate": 5.2532085055164205e-06, |
| "loss": 0.9497, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.6964944649446494, |
| "grad_norm": 1.2901906508870224, |
| "learning_rate": 5.112030615504601e-06, |
| "loss": 0.9525, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.7011070110701108, |
| "grad_norm": 1.3026456358679475, |
| "learning_rate": 4.972121398226371e-06, |
| "loss": 0.9386, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.705719557195572, |
| "grad_norm": 1.2828381081470803, |
| "learning_rate": 4.833517167127077e-06, |
| "loss": 0.9533, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.7103321033210332, |
| "grad_norm": 1.2727558772245062, |
| "learning_rate": 4.6962538969428416e-06, |
| "loss": 0.9515, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.7149446494464945, |
| "grad_norm": 1.2545716044322412, |
| "learning_rate": 4.560367214363295e-06, |
| "loss": 0.9458, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7195571955719557, |
| "grad_norm": 1.3997422567402023, |
| "learning_rate": 4.425892388784681e-06, |
| "loss": 0.9486, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.724169741697417, |
| "grad_norm": 1.3926886164079586, |
| "learning_rate": 4.292864323155684e-06, |
| "loss": 0.9353, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.7287822878228782, |
| "grad_norm": 1.3247566722736441, |
| "learning_rate": 4.161317544918345e-06, |
| "loss": 0.9383, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.7333948339483395, |
| "grad_norm": 1.2986263471392532, |
| "learning_rate": 4.031286197046493e-06, |
| "loss": 0.9424, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.7380073800738007, |
| "grad_norm": 1.28228382145648, |
| "learning_rate": 3.902804029183907e-06, |
| "loss": 0.9447, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.742619926199262, |
| "grad_norm": 1.3400702965604647, |
| "learning_rate": 3.775904388884618e-06, |
| "loss": 0.942, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.7472324723247232, |
| "grad_norm": 1.2936237260722665, |
| "learning_rate": 3.650620212957524e-06, |
| "loss": 0.9408, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.7518450184501845, |
| "grad_norm": 1.4304247441278308, |
| "learning_rate": 3.5269840189176616e-06, |
| "loss": 0.9284, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.7564575645756457, |
| "grad_norm": 1.305494507360913, |
| "learning_rate": 3.405027896546277e-06, |
| "loss": 0.9262, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.761070110701107, |
| "grad_norm": 1.308925566580474, |
| "learning_rate": 3.2847834995619067e-06, |
| "loss": 0.945, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.7656826568265682, |
| "grad_norm": 1.2536170257409696, |
| "learning_rate": 3.1662820374046776e-06, |
| "loss": 0.9352, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.7702952029520295, |
| "grad_norm": 1.2991569031814136, |
| "learning_rate": 3.0495542671358745e-06, |
| "loss": 0.9295, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.7749077490774908, |
| "grad_norm": 1.2942572243043484, |
| "learning_rate": 2.934630485454948e-06, |
| "loss": 0.9356, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.7795202952029521, |
| "grad_norm": 1.2969441354368494, |
| "learning_rate": 2.8215405208360237e-06, |
| "loss": 0.9396, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7841328413284133, |
| "grad_norm": 1.3086775669789674, |
| "learning_rate": 2.7103137257858867e-06, |
| "loss": 0.936, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7887453874538746, |
| "grad_norm": 1.3434048733910837, |
| "learning_rate": 2.600978969225558e-06, |
| "loss": 0.9409, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.7933579335793358, |
| "grad_norm": 1.296167492638027, |
| "learning_rate": 2.493564628997369e-06, |
| "loss": 0.9473, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.7979704797047971, |
| "grad_norm": 1.311623550016214, |
| "learning_rate": 2.3880985844994674e-06, |
| "loss": 0.9291, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.8025830258302583, |
| "grad_norm": 1.301490638284301, |
| "learning_rate": 2.284608209449746e-06, |
| "loss": 0.9362, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.8071955719557196, |
| "grad_norm": 1.3798624445873309, |
| "learning_rate": 2.183120364780975e-06, |
| "loss": 0.9392, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8118081180811808, |
| "grad_norm": 1.268353502358265, |
| "learning_rate": 2.083661391669043e-06, |
| "loss": 0.9254, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.816420664206642, |
| "grad_norm": 1.3210277461741775, |
| "learning_rate": 1.986257104696121e-06, |
| "loss": 0.9368, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.8210332103321033, |
| "grad_norm": 1.5157935642706408, |
| "learning_rate": 1.8909327851504633e-06, |
| "loss": 0.9528, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.8256457564575646, |
| "grad_norm": 1.3333545496304948, |
| "learning_rate": 1.7977131744646724e-06, |
| "loss": 0.9329, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.8302583025830258, |
| "grad_norm": 1.347331500812731, |
| "learning_rate": 1.7066224677940313e-06, |
| "loss": 0.9435, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.834870848708487, |
| "grad_norm": 1.2761329236408203, |
| "learning_rate": 1.6176843077366755e-06, |
| "loss": 0.9138, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.8394833948339483, |
| "grad_norm": 1.3043962582226256, |
| "learning_rate": 1.5309217781971419e-06, |
| "loss": 0.9136, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.8440959409594095, |
| "grad_norm": 1.3083548235399793, |
| "learning_rate": 1.446357398394934e-06, |
| "loss": 0.9232, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.8487084870848709, |
| "grad_norm": 1.3211262947707485, |
| "learning_rate": 1.3640131170196758e-06, |
| "loss": 0.9157, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.8533210332103321, |
| "grad_norm": 1.3384683921980203, |
| "learning_rate": 1.2839103065343084e-06, |
| "loss": 0.9279, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.8579335793357934, |
| "grad_norm": 1.3261806463300432, |
| "learning_rate": 1.2060697576278812e-06, |
| "loss": 0.9328, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.8625461254612546, |
| "grad_norm": 1.2728242581820297, |
| "learning_rate": 1.1305116738193211e-06, |
| "loss": 0.9345, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.8671586715867159, |
| "grad_norm": 1.3143400007494828, |
| "learning_rate": 1.0572556662136036e-06, |
| "loss": 0.93, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.8717712177121771, |
| "grad_norm": 1.295212740806946, |
| "learning_rate": 9.863207484116987e-07, |
| "loss": 0.9179, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.8763837638376384, |
| "grad_norm": 1.2855127014689818, |
| "learning_rate": 9.177253315755796e-07, |
| "loss": 0.9378, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.8809963099630996, |
| "grad_norm": 1.2681235730706315, |
| "learning_rate": 8.514872196496182e-07, |
| "loss": 0.9346, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.8856088560885609, |
| "grad_norm": 1.2143518692829294, |
| "learning_rate": 7.876236047395525e-07, |
| "loss": 0.9262, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.8902214022140221, |
| "grad_norm": 1.299174334552006, |
| "learning_rate": 7.26151062650291e-07, |
| "loss": 0.9344, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.8948339483394834, |
| "grad_norm": 1.297697552514886, |
| "learning_rate": 6.670855485836525e-07, |
| "loss": 0.928, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.8994464944649446, |
| "grad_norm": 1.2744527095598805, |
| "learning_rate": 6.104423929971948e-07, |
| "loss": 0.9328, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9040590405904059, |
| "grad_norm": 1.2765941289109337, |
| "learning_rate": 5.562362976251901e-07, |
| "loss": 0.938, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.9086715867158671, |
| "grad_norm": 1.2089053143434019, |
| "learning_rate": 5.044813316627994e-07, |
| "loss": 0.9268, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.9132841328413284, |
| "grad_norm": 1.2947676609809995, |
| "learning_rate": 4.5519092811439627e-07, |
| "loss": 0.9245, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.9178966789667896, |
| "grad_norm": 1.3486891226132347, |
| "learning_rate": 4.083778803070504e-07, |
| "loss": 0.9439, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.922509225092251, |
| "grad_norm": 1.2409553441737478, |
| "learning_rate": 3.6405433856999684e-07, |
| "loss": 0.9573, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9271217712177122, |
| "grad_norm": 1.3249006446671916, |
| "learning_rate": 3.2223180708102933e-07, |
| "loss": 0.9321, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.9317343173431735, |
| "grad_norm": 1.3038931924440842, |
| "learning_rate": 2.829211408805932e-07, |
| "loss": 0.9176, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.9363468634686347, |
| "grad_norm": 1.5465830904468008, |
| "learning_rate": 2.461325430543482e-07, |
| "loss": 0.9315, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.940959409594096, |
| "grad_norm": 1.2223049246668372, |
| "learning_rate": 2.1187556208496885e-07, |
| "loss": 0.9209, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.9455719557195572, |
| "grad_norm": 1.2465907104655198, |
| "learning_rate": 1.8015908937382587e-07, |
| "loss": 0.9329, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.9501845018450185, |
| "grad_norm": 1.277648704747807, |
| "learning_rate": 1.5099135693322776e-07, |
| "loss": 0.9335, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.9547970479704797, |
| "grad_norm": 1.2306241677383007, |
| "learning_rate": 1.2437993524979984e-07, |
| "loss": 0.9032, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.959409594095941, |
| "grad_norm": 1.2873660924318513, |
| "learning_rate": 1.0033173131956175e-07, |
| "loss": 0.9214, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.9640221402214022, |
| "grad_norm": 1.2494503702573898, |
| "learning_rate": 7.885298685522235e-08, |
| "loss": 0.9154, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.9686346863468634, |
| "grad_norm": 1.305292259939521, |
| "learning_rate": 5.99492766661347e-08, |
| "loss": 0.922, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.9732472324723247, |
| "grad_norm": 1.2774482413132695, |
| "learning_rate": 4.362550721136338e-08, |
| "loss": 0.9254, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.977859778597786, |
| "grad_norm": 1.2579860114165076, |
| "learning_rate": 2.988591532620322e-08, |
| "loss": 0.9152, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.9824723247232472, |
| "grad_norm": 1.2611965951575315, |
| "learning_rate": 1.8734067122514464e-08, |
| "loss": 0.9224, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.9870848708487084, |
| "grad_norm": 1.2577037970249887, |
| "learning_rate": 1.0172857063137643e-08, |
| "loss": 0.925, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.9916974169741697, |
| "grad_norm": 1.2765277598884546, |
| "learning_rate": 4.204507210633368e-09, |
| "loss": 0.9467, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.996309963099631, |
| "grad_norm": 1.2924793783251516, |
| "learning_rate": 8.30566650548148e-10, |
| "loss": 0.9403, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 0.9425297975540161, |
| "eval_runtime": 141.5855, |
| "eval_samples_per_second": 108.415, |
| "eval_steps_per_second": 1.695, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 1084, |
| "total_flos": 453935093514240.0, |
| "train_loss": 0.9827820636689443, |
| "train_runtime": 7081.9617, |
| "train_samples_per_second": 19.583, |
| "train_steps_per_second": 0.153 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 1084, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 453935093514240.0, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|