| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5023863350916855, | |
| "eval_steps": 500, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0025119316754584277, | |
| "grad_norm": 244.78928615182636, | |
| "learning_rate": 3.75e-07, | |
| "loss": 14.0363, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005023863350916855, | |
| "grad_norm": 318.1891497601652, | |
| "learning_rate": 7.916666666666667e-07, | |
| "loss": 13.4987, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007535795026375283, | |
| "grad_norm": 211.3992791294665, | |
| "learning_rate": 1.2083333333333333e-06, | |
| "loss": 11.4405, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01004772670183371, | |
| "grad_norm": 425.154938297453, | |
| "learning_rate": 1.6250000000000001e-06, | |
| "loss": 8.7926, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.012559658377292138, | |
| "grad_norm": 136.69306564524402, | |
| "learning_rate": 2.041666666666667e-06, | |
| "loss": 7.1601, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015071590052750565, | |
| "grad_norm": 173.2049647337299, | |
| "learning_rate": 2.4583333333333332e-06, | |
| "loss": 6.2254, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.017583521728208994, | |
| "grad_norm": 163.0490968387617, | |
| "learning_rate": 2.875e-06, | |
| "loss": 5.7153, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02009545340366742, | |
| "grad_norm": 70.98311735934806, | |
| "learning_rate": 3.2916666666666668e-06, | |
| "loss": 5.3196, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02260738507912585, | |
| "grad_norm": 101.87087924315038, | |
| "learning_rate": 3.708333333333334e-06, | |
| "loss": 5.1087, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.025119316754584276, | |
| "grad_norm": 215.4312626681897, | |
| "learning_rate": 4.125e-06, | |
| "loss": 4.9029, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.027631248430042703, | |
| "grad_norm": 78.6441119779914, | |
| "learning_rate": 4.541666666666667e-06, | |
| "loss": 4.8007, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03014318010550113, | |
| "grad_norm": 112.82426912718005, | |
| "learning_rate": 4.958333333333334e-06, | |
| "loss": 4.8032, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.032655111780959555, | |
| "grad_norm": 44.43109383036881, | |
| "learning_rate": 4.999932966293553e-06, | |
| "loss": 4.5469, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03516704345641799, | |
| "grad_norm": 42.94277182614008, | |
| "learning_rate": 4.999701249454364e-06, | |
| "loss": 4.6328, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.037678975131876416, | |
| "grad_norm": 102.19912456135354, | |
| "learning_rate": 4.999304037243215e-06, | |
| "loss": 4.5481, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04019090680733484, | |
| "grad_norm": 103.48520496328905, | |
| "learning_rate": 4.998741355957963e-06, | |
| "loss": 4.0883, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04270283848279327, | |
| "grad_norm": 59.75671346819062, | |
| "learning_rate": 4.998013242851519e-06, | |
| "loss": 4.09, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0452147701582517, | |
| "grad_norm": 54.56786450011987, | |
| "learning_rate": 4.997119746129383e-06, | |
| "loss": 3.8574, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.047726701833710125, | |
| "grad_norm": 41.17219526900339, | |
| "learning_rate": 4.996060924946452e-06, | |
| "loss": 3.3506, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05023863350916855, | |
| "grad_norm": 64.19755013449358, | |
| "learning_rate": 4.994836849403107e-06, | |
| "loss": 3.1208, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05275056518462698, | |
| "grad_norm": 45.5637995880343, | |
| "learning_rate": 4.9934476005405665e-06, | |
| "loss": 2.9927, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05526249686008541, | |
| "grad_norm": 45.04513390690093, | |
| "learning_rate": 4.991893270335526e-06, | |
| "loss": 2.6818, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.057774428535543834, | |
| "grad_norm": 46.258300578654584, | |
| "learning_rate": 4.990173961694062e-06, | |
| "loss": 2.4673, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06028636021100226, | |
| "grad_norm": 53.325400128100654, | |
| "learning_rate": 4.988289788444829e-06, | |
| "loss": 2.1375, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06279829188646069, | |
| "grad_norm": 28.819163526585655, | |
| "learning_rate": 4.986240875331513e-06, | |
| "loss": 1.9451, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06531022356191911, | |
| "grad_norm": 63.6642536784459, | |
| "learning_rate": 4.984027358004582e-06, | |
| "loss": 1.7398, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06782215523737754, | |
| "grad_norm": 64.052007917374, | |
| "learning_rate": 4.981649383012295e-06, | |
| "loss": 1.5556, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07033408691283598, | |
| "grad_norm": 41.550046723661794, | |
| "learning_rate": 4.979107107791009e-06, | |
| "loss": 1.5029, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0728460185882944, | |
| "grad_norm": 28.030603583599877, | |
| "learning_rate": 4.976400700654752e-06, | |
| "loss": 1.4048, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07535795026375283, | |
| "grad_norm": 21.7743052603298, | |
| "learning_rate": 4.9735303407840775e-06, | |
| "loss": 1.3447, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07786988193921125, | |
| "grad_norm": 27.724313133031774, | |
| "learning_rate": 4.970496218214205e-06, | |
| "loss": 1.369, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08038181361466969, | |
| "grad_norm": 39.89486925273176, | |
| "learning_rate": 4.9672985338224355e-06, | |
| "loss": 1.2511, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.0828937452901281, | |
| "grad_norm": 32.71736411454423, | |
| "learning_rate": 4.963937499314857e-06, | |
| "loss": 1.1617, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08540567696558654, | |
| "grad_norm": 17.47130667747679, | |
| "learning_rate": 4.960413337212321e-06, | |
| "loss": 1.1987, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.08791760864104496, | |
| "grad_norm": 17.44490963530016, | |
| "learning_rate": 4.956726280835718e-06, | |
| "loss": 1.2543, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0904295403165034, | |
| "grad_norm": 52.67566693581501, | |
| "learning_rate": 4.952876574290525e-06, | |
| "loss": 1.1756, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09294147199196182, | |
| "grad_norm": 36.08376015056633, | |
| "learning_rate": 4.948864472450646e-06, | |
| "loss": 1.1497, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09545340366742025, | |
| "grad_norm": 33.06467813616241, | |
| "learning_rate": 4.9446902409415386e-06, | |
| "loss": 1.1862, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09796533534287867, | |
| "grad_norm": 79.8613840478269, | |
| "learning_rate": 4.940354156122624e-06, | |
| "loss": 1.1443, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.1004772670183371, | |
| "grad_norm": 28.636783892356842, | |
| "learning_rate": 4.935856505068999e-06, | |
| "loss": 1.0705, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10298919869379553, | |
| "grad_norm": 28.248493771801982, | |
| "learning_rate": 4.931197585552418e-06, | |
| "loss": 1.0342, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10550113036925396, | |
| "grad_norm": 35.58893396345627, | |
| "learning_rate": 4.92637770602159e-06, | |
| "loss": 1.0984, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10801306204471238, | |
| "grad_norm": 29.5108168082451, | |
| "learning_rate": 4.92139718558175e-06, | |
| "loss": 1.121, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11052499372017081, | |
| "grad_norm": 17.994080670599825, | |
| "learning_rate": 4.916256353973535e-06, | |
| "loss": 1.1537, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11303692539562923, | |
| "grad_norm": 32.844817441284924, | |
| "learning_rate": 4.910955551551153e-06, | |
| "loss": 1.1486, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11554885707108767, | |
| "grad_norm": 30.914619782560816, | |
| "learning_rate": 4.90549512925985e-06, | |
| "loss": 1.0481, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11806078874654609, | |
| "grad_norm": 25.55475057225604, | |
| "learning_rate": 4.899875448612672e-06, | |
| "loss": 1.0072, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.12057272042200452, | |
| "grad_norm": 201.00243368632337, | |
| "learning_rate": 4.894096881666538e-06, | |
| "loss": 1.1146, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12308465209746294, | |
| "grad_norm": 51.017788501532685, | |
| "learning_rate": 4.8881598109976e-06, | |
| "loss": 0.9959, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.12559658377292138, | |
| "grad_norm": 30.076609186117565, | |
| "learning_rate": 4.882064629675917e-06, | |
| "loss": 1.0998, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1281085154483798, | |
| "grad_norm": 25.914605989320286, | |
| "learning_rate": 4.875811741239431e-06, | |
| "loss": 0.9488, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13062044712383822, | |
| "grad_norm": 174.16033120144348, | |
| "learning_rate": 4.869401559667253e-06, | |
| "loss": 1.0104, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13313237879929665, | |
| "grad_norm": 24.53663814061994, | |
| "learning_rate": 4.862834509352251e-06, | |
| "loss": 0.9866, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.1356443104747551, | |
| "grad_norm": 106.69096534898281, | |
| "learning_rate": 4.8561110250729546e-06, | |
| "loss": 1.0145, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.13815624215021352, | |
| "grad_norm": 13.875443655979048, | |
| "learning_rate": 4.849231551964771e-06, | |
| "loss": 1.0714, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14066817382567195, | |
| "grad_norm": 21.267058863118724, | |
| "learning_rate": 4.842196545490512e-06, | |
| "loss": 1.0451, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.14318010550113036, | |
| "grad_norm": 28.25986035068646, | |
| "learning_rate": 4.8350064714102415e-06, | |
| "loss": 0.9979, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1456920371765888, | |
| "grad_norm": 18.771189948926747, | |
| "learning_rate": 4.827661805750438e-06, | |
| "loss": 1.0446, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.14820396885204723, | |
| "grad_norm": 33.442572448255376, | |
| "learning_rate": 4.82016303477248e-06, | |
| "loss": 1.0067, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15071590052750566, | |
| "grad_norm": 18.758813933167882, | |
| "learning_rate": 4.812510654940453e-06, | |
| "loss": 0.9925, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.15322783220296407, | |
| "grad_norm": 18.44228949074116, | |
| "learning_rate": 4.804705172888277e-06, | |
| "loss": 0.9199, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1557397638784225, | |
| "grad_norm": 18.907319724853284, | |
| "learning_rate": 4.796747105386169e-06, | |
| "loss": 0.9933, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.15825169555388094, | |
| "grad_norm": 24.999775435118767, | |
| "learning_rate": 4.788636979306428e-06, | |
| "loss": 0.946, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.16076362722933937, | |
| "grad_norm": 20.69959166678649, | |
| "learning_rate": 4.78037533158855e-06, | |
| "loss": 1.0123, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16327555890479778, | |
| "grad_norm": 88.2316410192213, | |
| "learning_rate": 4.771962709203684e-06, | |
| "loss": 0.9311, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1657874905802562, | |
| "grad_norm": 18.849075967996672, | |
| "learning_rate": 4.763399669118414e-06, | |
| "loss": 0.9532, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.16829942225571465, | |
| "grad_norm": 9.168139689519153, | |
| "learning_rate": 4.754686778257891e-06, | |
| "loss": 0.8347, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.17081135393117308, | |
| "grad_norm": 57.688726700615845, | |
| "learning_rate": 4.745824613468293e-06, | |
| "loss": 0.892, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.1733232856066315, | |
| "grad_norm": 34.38325408055489, | |
| "learning_rate": 4.736813761478638e-06, | |
| "loss": 0.941, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.17583521728208992, | |
| "grad_norm": 18.535830277017187, | |
| "learning_rate": 4.727654818861937e-06, | |
| "loss": 0.9297, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.17834714895754836, | |
| "grad_norm": 48.80919542270613, | |
| "learning_rate": 4.718348391995698e-06, | |
| "loss": 0.9512, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.1808590806330068, | |
| "grad_norm": 10.424093008575271, | |
| "learning_rate": 4.7088950970217825e-06, | |
| "loss": 0.9061, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.1833710123084652, | |
| "grad_norm": 13.175028312423365, | |
| "learning_rate": 4.699295559805606e-06, | |
| "loss": 0.9454, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.18588294398392363, | |
| "grad_norm": 65.05485953479175, | |
| "learning_rate": 4.68955041589471e-06, | |
| "loss": 0.9451, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.18839487565938207, | |
| "grad_norm": 25.29768837033324, | |
| "learning_rate": 4.679660310476682e-06, | |
| "loss": 0.831, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1909068073348405, | |
| "grad_norm": 19.717151893244445, | |
| "learning_rate": 4.669625898336439e-06, | |
| "loss": 0.8221, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1934187390102989, | |
| "grad_norm": 41.31445498318851, | |
| "learning_rate": 4.659447843812876e-06, | |
| "loss": 0.8756, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.19593067068575734, | |
| "grad_norm": 14.81608234505718, | |
| "learning_rate": 4.649126820754887e-06, | |
| "loss": 0.9191, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.19844260236121578, | |
| "grad_norm": 19.95005110698172, | |
| "learning_rate": 4.638663512476748e-06, | |
| "loss": 0.9372, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2009545340366742, | |
| "grad_norm": 20.065073396172792, | |
| "learning_rate": 4.628058611712879e-06, | |
| "loss": 1.0179, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.20346646571213264, | |
| "grad_norm": 16.42772007372194, | |
| "learning_rate": 4.617312820571981e-06, | |
| "loss": 0.9572, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.20597839738759105, | |
| "grad_norm": 15.309541598862848, | |
| "learning_rate": 4.606426850490551e-06, | |
| "loss": 0.9536, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.20849032906304948, | |
| "grad_norm": 15.317369217882426, | |
| "learning_rate": 4.595401422185783e-06, | |
| "loss": 0.8942, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21100226073850792, | |
| "grad_norm": 12.500948604145567, | |
| "learning_rate": 4.584237265607847e-06, | |
| "loss": 0.8162, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21351419241396635, | |
| "grad_norm": 37.708490424859214, | |
| "learning_rate": 4.5729351198915715e-06, | |
| "loss": 0.9038, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.21602612408942476, | |
| "grad_norm": 34.796355808216596, | |
| "learning_rate": 4.561495733307496e-06, | |
| "loss": 0.9207, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2185380557648832, | |
| "grad_norm": 26.298494542320878, | |
| "learning_rate": 4.549919863212341e-06, | |
| "loss": 0.9457, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.22104998744034163, | |
| "grad_norm": 16.186659690778107, | |
| "learning_rate": 4.538208275998861e-06, | |
| "loss": 1.0715, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.22356191911580006, | |
| "grad_norm": 24.662608067526172, | |
| "learning_rate": 4.526361747045108e-06, | |
| "loss": 0.9606, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.22607385079125847, | |
| "grad_norm": 40.835876651445666, | |
| "learning_rate": 4.514381060663092e-06, | |
| "loss": 0.933, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2285857824667169, | |
| "grad_norm": 21.30586385592757, | |
| "learning_rate": 4.50226701004686e-06, | |
| "loss": 0.8754, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.23109771414217534, | |
| "grad_norm": 16.612023821577484, | |
| "learning_rate": 4.49002039721998e-06, | |
| "loss": 0.7735, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.23360964581763377, | |
| "grad_norm": 21.74905032903839, | |
| "learning_rate": 4.477642032982439e-06, | |
| "loss": 0.856, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.23612157749309218, | |
| "grad_norm": 59.36191090209393, | |
| "learning_rate": 4.4651327368569695e-06, | |
| "loss": 0.9044, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.2386335091685506, | |
| "grad_norm": 15.835736575328223, | |
| "learning_rate": 4.452493337034785e-06, | |
| "loss": 0.9162, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.24114544084400905, | |
| "grad_norm": 15.54957354753592, | |
| "learning_rate": 4.439724670320755e-06, | |
| "loss": 0.9259, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.24365737251946748, | |
| "grad_norm": 28.022932138978874, | |
| "learning_rate": 4.426827582077999e-06, | |
| "loss": 0.8854, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2461693041949259, | |
| "grad_norm": 13.702537050157368, | |
| "learning_rate": 4.413802926171921e-06, | |
| "loss": 0.8087, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.24868123587038432, | |
| "grad_norm": 30.82271055352541, | |
| "learning_rate": 4.400651564913676e-06, | |
| "loss": 0.9174, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.25119316754584275, | |
| "grad_norm": 19.961883729903487, | |
| "learning_rate": 4.387374369003083e-06, | |
| "loss": 0.8261, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2537050992213012, | |
| "grad_norm": 13.910420016384712, | |
| "learning_rate": 4.373972217470976e-06, | |
| "loss": 0.8063, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2562170308967596, | |
| "grad_norm": 15.74547796378487, | |
| "learning_rate": 4.36044599762101e-06, | |
| "loss": 0.893, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.25872896257221806, | |
| "grad_norm": 22.817146904726147, | |
| "learning_rate": 4.346796604970913e-06, | |
| "loss": 0.825, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.26124089424767644, | |
| "grad_norm": 34.97383754680491, | |
| "learning_rate": 4.333024943193198e-06, | |
| "loss": 0.8637, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.26375282592313487, | |
| "grad_norm": 14.442348394650033, | |
| "learning_rate": 4.31913192405534e-06, | |
| "loss": 0.8512, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2662647575985933, | |
| "grad_norm": 109.10193874564091, | |
| "learning_rate": 4.305118467359402e-06, | |
| "loss": 0.8808, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.26877668927405174, | |
| "grad_norm": 27.206026550683518, | |
| "learning_rate": 4.290985500881143e-06, | |
| "loss": 0.8114, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2712886209495102, | |
| "grad_norm": 16.12708282763735, | |
| "learning_rate": 4.276733960308599e-06, | |
| "loss": 0.8569, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.2738005526249686, | |
| "grad_norm": 84.431643774864, | |
| "learning_rate": 4.262364789180123e-06, | |
| "loss": 0.9139, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.27631248430042704, | |
| "grad_norm": 14.872893259437928, | |
| "learning_rate": 4.247878938821929e-06, | |
| "loss": 0.7897, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.2788244159758855, | |
| "grad_norm": 44.05664559088591, | |
| "learning_rate": 4.2332773682851e-06, | |
| "loss": 0.8999, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2813363476513439, | |
| "grad_norm": 14.79051628084432, | |
| "learning_rate": 4.218561044282099e-06, | |
| "loss": 0.8443, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2838482793268023, | |
| "grad_norm": 33.45294178219554, | |
| "learning_rate": 4.20373094112276e-06, | |
| "loss": 0.8567, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2863602110022607, | |
| "grad_norm": 46.4659449561201, | |
| "learning_rate": 4.18878804064979e-06, | |
| "loss": 0.9058, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.28887214267771916, | |
| "grad_norm": 30.620320847801434, | |
| "learning_rate": 4.173733332173759e-06, | |
| "loss": 0.8482, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2913840743531776, | |
| "grad_norm": 32.099385967886, | |
| "learning_rate": 4.158567812407609e-06, | |
| "loss": 0.9224, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.293896006028636, | |
| "grad_norm": 26.04095063399648, | |
| "learning_rate": 4.1432924854006554e-06, | |
| "loss": 0.7986, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.29640793770409446, | |
| "grad_norm": 21.53191952152889, | |
| "learning_rate": 4.127908362472121e-06, | |
| "loss": 0.8285, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.2989198693795529, | |
| "grad_norm": 14.804545412809087, | |
| "learning_rate": 4.112416462144181e-06, | |
| "loss": 0.8189, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.30143180105501133, | |
| "grad_norm": 28.04536852758184, | |
| "learning_rate": 4.096817810074521e-06, | |
| "loss": 0.9102, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3039437327304697, | |
| "grad_norm": 16.38088643600367, | |
| "learning_rate": 4.081113438988443e-06, | |
| "loss": 0.8876, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.30645566440592814, | |
| "grad_norm": 26.286121352348058, | |
| "learning_rate": 4.065304388610491e-06, | |
| "loss": 0.8078, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.3089675960813866, | |
| "grad_norm": 49.451144669082176, | |
| "learning_rate": 4.049391705595605e-06, | |
| "loss": 0.874, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.311479527756845, | |
| "grad_norm": 43.93733084504646, | |
| "learning_rate": 4.033376443459842e-06, | |
| "loss": 0.8575, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.31399145943230344, | |
| "grad_norm": 33.5919976090378, | |
| "learning_rate": 4.017259662510613e-06, | |
| "loss": 0.8988, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.3165033911077619, | |
| "grad_norm": 41.047784413490206, | |
| "learning_rate": 4.00104242977649e-06, | |
| "loss": 0.8776, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3190153227832203, | |
| "grad_norm": 21.485503202698204, | |
| "learning_rate": 3.9847258189365664e-06, | |
| "loss": 0.9317, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.32152725445867875, | |
| "grad_norm": 18.745800000012018, | |
| "learning_rate": 3.968310910249364e-06, | |
| "loss": 0.8406, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3240391861341371, | |
| "grad_norm": 36.22488794899786, | |
| "learning_rate": 3.951798790481318e-06, | |
| "loss": 0.8217, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.32655111780959556, | |
| "grad_norm": 16.051366022833278, | |
| "learning_rate": 3.9351905528348285e-06, | |
| "loss": 0.8654, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.329063049485054, | |
| "grad_norm": 28.70539547850433, | |
| "learning_rate": 3.918487296875881e-06, | |
| "loss": 0.8681, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3315749811605124, | |
| "grad_norm": 14.816194644055994, | |
| "learning_rate": 3.901690128461248e-06, | |
| "loss": 0.8818, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.33408691283597086, | |
| "grad_norm": 13.007885823046339, | |
| "learning_rate": 3.8848001596652765e-06, | |
| "loss": 0.8706, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.3365988445114293, | |
| "grad_norm": 12.482101372342157, | |
| "learning_rate": 3.867818508706261e-06, | |
| "loss": 0.8012, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.33911077618688773, | |
| "grad_norm": 26.169043780262097, | |
| "learning_rate": 3.850746299872412e-06, | |
| "loss": 0.9217, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.34162270786234616, | |
| "grad_norm": 46.59460344018054, | |
| "learning_rate": 3.833584663447418e-06, | |
| "loss": 0.876, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.3441346395378046, | |
| "grad_norm": 16.84714833096335, | |
| "learning_rate": 3.816334735635618e-06, | |
| "loss": 0.8291, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.346646571213263, | |
| "grad_norm": 37.545682101952806, | |
| "learning_rate": 3.7989976584867743e-06, | |
| "loss": 0.7684, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.3491585028887214, | |
| "grad_norm": 27.139012954460355, | |
| "learning_rate": 3.7815745798204646e-06, | |
| "loss": 0.761, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.35167043456417985, | |
| "grad_norm": 32.23972648827027, | |
| "learning_rate": 3.7640666531500868e-06, | |
| "loss": 0.796, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.3541823662396383, | |
| "grad_norm": 31.529769235569418, | |
| "learning_rate": 3.7464750376064934e-06, | |
| "loss": 0.8033, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3566942979150967, | |
| "grad_norm": 20.56253529260212, | |
| "learning_rate": 3.7288008978612457e-06, | |
| "loss": 0.8213, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.35920622959055515, | |
| "grad_norm": 10.619817778124629, | |
| "learning_rate": 3.711045404049507e-06, | |
| "loss": 0.829, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.3617181612660136, | |
| "grad_norm": 19.304755989056705, | |
| "learning_rate": 3.693209731692573e-06, | |
| "loss": 0.8201, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.364230092941472, | |
| "grad_norm": 18.94828073608545, | |
| "learning_rate": 3.675295061620047e-06, | |
| "loss": 0.851, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3667420246169304, | |
| "grad_norm": 29.832008452352895, | |
| "learning_rate": 3.6573025798916566e-06, | |
| "loss": 0.7624, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.36925395629238883, | |
| "grad_norm": 34.613004229596356, | |
| "learning_rate": 3.6392334777187354e-06, | |
| "loss": 0.7536, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.37176588796784726, | |
| "grad_norm": 15.983386653537558, | |
| "learning_rate": 3.621088951385353e-06, | |
| "loss": 0.769, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3742778196433057, | |
| "grad_norm": 18.541258614645898, | |
| "learning_rate": 3.6028702021691164e-06, | |
| "loss": 0.8561, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.37678975131876413, | |
| "grad_norm": 13.569243914961227, | |
| "learning_rate": 3.5845784362616375e-06, | |
| "loss": 0.8026, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.37930168299422257, | |
| "grad_norm": 13.589477313526036, | |
| "learning_rate": 3.566214864688674e-06, | |
| "loss": 0.7856, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.381813614669681, | |
| "grad_norm": 9.08955551951589, | |
| "learning_rate": 3.5477807032299565e-06, | |
| "loss": 0.8099, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.38432554634513943, | |
| "grad_norm": 30.698836150904377, | |
| "learning_rate": 3.5292771723386916e-06, | |
| "loss": 0.7516, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.3868374780205978, | |
| "grad_norm": 13.203900449832538, | |
| "learning_rate": 3.5107054970607624e-06, | |
| "loss": 0.8014, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.38934940969605625, | |
| "grad_norm": 22.948546424383103, | |
| "learning_rate": 3.4920669069536265e-06, | |
| "loss": 0.8049, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3918613413715147, | |
| "grad_norm": 31.428429983393126, | |
| "learning_rate": 3.4733626360049065e-06, | |
| "loss": 0.8858, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3943732730469731, | |
| "grad_norm": 17.236743198133453, | |
| "learning_rate": 3.4545939225506935e-06, | |
| "loss": 0.8511, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.39688520472243155, | |
| "grad_norm": 22.980911438080426, | |
| "learning_rate": 3.4357620091935656e-06, | |
| "loss": 0.7983, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.39939713639789, | |
| "grad_norm": 11.441966137764789, | |
| "learning_rate": 3.416868142720316e-06, | |
| "loss": 0.7239, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.4019090680733484, | |
| "grad_norm": 14.91445014623267, | |
| "learning_rate": 3.39791357401941e-06, | |
| "loss": 0.7582, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.40442099974880685, | |
| "grad_norm": 41.79040186628522, | |
| "learning_rate": 3.3788995579981694e-06, | |
| "loss": 0.8557, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.4069329314242653, | |
| "grad_norm": 14.432086044310571, | |
| "learning_rate": 3.359827353499685e-06, | |
| "loss": 0.7389, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.40944486309972367, | |
| "grad_norm": 15.440731757695739, | |
| "learning_rate": 3.340698223219484e-06, | |
| "loss": 0.7378, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.4119567947751821, | |
| "grad_norm": 9.55068956773867, | |
| "learning_rate": 3.321513433621919e-06, | |
| "loss": 0.7992, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.41446872645064053, | |
| "grad_norm": 21.998448239749745, | |
| "learning_rate": 3.3022742548563293e-06, | |
| "loss": 0.7342, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.41698065812609897, | |
| "grad_norm": 10.474200018473434, | |
| "learning_rate": 3.282981960672948e-06, | |
| "loss": 0.7554, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.4194925898015574, | |
| "grad_norm": 20.24084901441791, | |
| "learning_rate": 3.2636378283385684e-06, | |
| "loss": 0.7177, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.42200452147701584, | |
| "grad_norm": 15.252907769914906, | |
| "learning_rate": 3.2442431385519853e-06, | |
| "loss": 0.8415, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.42451645315247427, | |
| "grad_norm": 31.941655660403704, | |
| "learning_rate": 3.2247991753592018e-06, | |
| "loss": 0.7944, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.4270283848279327, | |
| "grad_norm": 12.168344877357876, | |
| "learning_rate": 3.2053072260684206e-06, | |
| "loss": 0.7722, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.4295403165033911, | |
| "grad_norm": 24.94079803525497, | |
| "learning_rate": 3.1857685811648143e-06, | |
| "loss": 0.7928, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4320522481788495, | |
| "grad_norm": 15.210129562675117, | |
| "learning_rate": 3.1661845342250874e-06, | |
| "loss": 0.7647, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.43456417985430795, | |
| "grad_norm": 29.737670350280883, | |
| "learning_rate": 3.1465563818318356e-06, | |
| "loss": 0.757, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.4370761115297664, | |
| "grad_norm": 34.11288282754126, | |
| "learning_rate": 3.1268854234877023e-06, | |
| "loss": 0.7198, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.4395880432052248, | |
| "grad_norm": 25.982392999301357, | |
| "learning_rate": 3.107172961529343e-06, | |
| "loss": 0.8304, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.44209997488068326, | |
| "grad_norm": 19.060965746755112, | |
| "learning_rate": 3.0874203010412057e-06, | |
| "loss": 0.7495, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.4446119065561417, | |
| "grad_norm": 9.450290682003281, | |
| "learning_rate": 3.067628749769125e-06, | |
| "loss": 0.6833, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4471238382316001, | |
| "grad_norm": 81.52825084551178, | |
| "learning_rate": 3.047799618033739e-06, | |
| "loss": 0.7265, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.4496357699070585, | |
| "grad_norm": 112.89167490869183, | |
| "learning_rate": 3.027934218643742e-06, | |
| "loss": 0.7124, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.45214770158251694, | |
| "grad_norm": 12.80679045659251, | |
| "learning_rate": 3.008033866808967e-06, | |
| "loss": 0.795, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.45465963325797537, | |
| "grad_norm": 7.979181961331163, | |
| "learning_rate": 2.9880998800533095e-06, | |
| "loss": 0.7602, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.4571715649334338, | |
| "grad_norm": 104.22429484767227, | |
| "learning_rate": 2.968133578127501e-06, | |
| "loss": 0.7924, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.45968349660889224, | |
| "grad_norm": 25.072483756192565, | |
| "learning_rate": 2.9481362829217335e-06, | |
| "loss": 0.7684, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4621954282843507, | |
| "grad_norm": 28.08445712014526, | |
| "learning_rate": 2.9281093183781406e-06, | |
| "loss": 0.7569, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.4647073599598091, | |
| "grad_norm": 17.77498223316829, | |
| "learning_rate": 2.9080540104031487e-06, | |
| "loss": 0.8381, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.46721929163526754, | |
| "grad_norm": 32.223471519237506, | |
| "learning_rate": 2.887971686779688e-06, | |
| "loss": 0.7015, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.4697312233107259, | |
| "grad_norm": 48.40756577090116, | |
| "learning_rate": 2.8678636770792907e-06, | |
| "loss": 0.7002, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.47224315498618435, | |
| "grad_norm": 24.520924633617422, | |
| "learning_rate": 2.847731312574061e-06, | |
| "loss": 0.7943, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.4747550866616428, | |
| "grad_norm": 22.88033862082146, | |
| "learning_rate": 2.8275759261485407e-06, | |
| "loss": 0.6797, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.4772670183371012, | |
| "grad_norm": 21.004816938320573, | |
| "learning_rate": 2.80739885221146e-06, | |
| "loss": 0.8262, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.47977895001255966, | |
| "grad_norm": 40.505775555323616, | |
| "learning_rate": 2.7872014266073972e-06, | |
| "loss": 0.856, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4822908816880181, | |
| "grad_norm": 15.196929474940113, | |
| "learning_rate": 2.7669849865283303e-06, | |
| "loss": 0.6539, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.4848028133634765, | |
| "grad_norm": 21.136530968784285, | |
| "learning_rate": 2.746750870425114e-06, | |
| "loss": 0.6839, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.48731474503893496, | |
| "grad_norm": 66.94533407380263, | |
| "learning_rate": 2.7265004179188607e-06, | |
| "loss": 0.7212, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.4898266767143934, | |
| "grad_norm": 17.537525895857943, | |
| "learning_rate": 2.7062349697122536e-06, | |
| "loss": 0.7541, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.4923386083898518, | |
| "grad_norm": 54.32967278575285, | |
| "learning_rate": 2.68595586750078e-06, | |
| "loss": 0.7619, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.4948505400653102, | |
| "grad_norm": 21.831994544086985, | |
| "learning_rate": 2.665664453883907e-06, | |
| "loss": 0.7713, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.49736247174076864, | |
| "grad_norm": 27.261105082644335, | |
| "learning_rate": 2.6453620722761897e-06, | |
| "loss": 0.7319, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.4998744034162271, | |
| "grad_norm": 303.80383252156145, | |
| "learning_rate": 2.6250500668183325e-06, | |
| "loss": 0.7719, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5023863350916855, | |
| "grad_norm": 18.975055416205706, | |
| "learning_rate": 2.6047297822881962e-06, | |
| "loss": 0.8072, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3981, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 250, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2517310013177856.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |