| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.4711425206124853, | |
| "eval_steps": 500, | |
| "global_step": 2800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0016826518593303045, | |
| "grad_norm": 3.75418950341011, | |
| "learning_rate": 4.9999999999999996e-06, | |
| "loss": 0.9983, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.003365303718660609, | |
| "grad_norm": 4.027030925274863, | |
| "learning_rate": 9.999999999999999e-06, | |
| "loss": 0.9697, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.005047955577990914, | |
| "grad_norm": 4.048987349136423, | |
| "learning_rate": 1.5e-05, | |
| "loss": 0.9412, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.006730607437321218, | |
| "grad_norm": 5.720158971431411, | |
| "learning_rate": 1.9999999999999998e-05, | |
| "loss": 0.8783, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.008413259296651522, | |
| "grad_norm": 4.718965032869529, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.8454, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.010095911155981827, | |
| "grad_norm": 3.5785181087788835, | |
| "learning_rate": 3e-05, | |
| "loss": 0.809, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.011778563015312132, | |
| "grad_norm": 4.11981684712826, | |
| "learning_rate": 2.9999786123888308e-05, | |
| "loss": 0.7556, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.013461214874642436, | |
| "grad_norm": 6.082559649594005, | |
| "learning_rate": 2.9999144501652298e-05, | |
| "loss": 0.7613, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.01514386673397274, | |
| "grad_norm": 1.957553999291205, | |
| "learning_rate": 2.9998075151588992e-05, | |
| "loss": 0.7784, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.016826518593303044, | |
| "grad_norm": 1.6706087540201593, | |
| "learning_rate": 2.999657810419285e-05, | |
| "loss": 0.7658, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01850917045263335, | |
| "grad_norm": 2.909734954037323, | |
| "learning_rate": 2.999465340215489e-05, | |
| "loss": 0.7331, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.020191822311963654, | |
| "grad_norm": 1.977272298268717, | |
| "learning_rate": 2.999230110036149e-05, | |
| "loss": 0.7507, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.02187447417129396, | |
| "grad_norm": 1.8089524113272115, | |
| "learning_rate": 2.99895212658928e-05, | |
| "loss": 0.7309, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.023557126030624265, | |
| "grad_norm": 2.134962179309057, | |
| "learning_rate": 2.9986313978020846e-05, | |
| "loss": 0.721, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.02523977788995457, | |
| "grad_norm": 11.10353091330302, | |
| "learning_rate": 2.9982679328207262e-05, | |
| "loss": 0.7338, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.02692242974928487, | |
| "grad_norm": 1.4444344817739057, | |
| "learning_rate": 2.9978617420100692e-05, | |
| "loss": 0.7227, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.028605081608615177, | |
| "grad_norm": 1.453288161439029, | |
| "learning_rate": 2.9974128369533805e-05, | |
| "loss": 0.7107, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.03028773346794548, | |
| "grad_norm": 3.475164856876678, | |
| "learning_rate": 2.9969212304520034e-05, | |
| "loss": 0.7303, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.03197038532727579, | |
| "grad_norm": 1.1636824531496957, | |
| "learning_rate": 2.9963869365249895e-05, | |
| "loss": 0.6688, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.03365303718660609, | |
| "grad_norm": 1.8518695174363622, | |
| "learning_rate": 2.995809970408699e-05, | |
| "loss": 0.7003, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0353356890459364, | |
| "grad_norm": 4.09791760479377, | |
| "learning_rate": 2.9951903485563685e-05, | |
| "loss": 0.7442, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0370183409052667, | |
| "grad_norm": 2.4987929291159956, | |
| "learning_rate": 2.99452808863764e-05, | |
| "loss": 0.7517, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.03870099276459701, | |
| "grad_norm": 3.4584802037194087, | |
| "learning_rate": 2.993823209538056e-05, | |
| "loss": 0.7537, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.04038364462392731, | |
| "grad_norm": 2.511130636368107, | |
| "learning_rate": 2.9930757313585238e-05, | |
| "loss": 0.7599, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.04206629648325761, | |
| "grad_norm": 1.7030446444812277, | |
| "learning_rate": 2.9922856754147406e-05, | |
| "loss": 0.7126, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.04374894834258792, | |
| "grad_norm": 4.790377413030976, | |
| "learning_rate": 2.9914530642365852e-05, | |
| "loss": 0.72, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.04543160020191822, | |
| "grad_norm": 2.0321244924961976, | |
| "learning_rate": 2.990577921567476e-05, | |
| "loss": 0.6733, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.04711425206124853, | |
| "grad_norm": 2.310370624749643, | |
| "learning_rate": 2.989660272363696e-05, | |
| "loss": 0.7212, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.04879690392057883, | |
| "grad_norm": 3.451763592410144, | |
| "learning_rate": 2.988700142793676e-05, | |
| "loss": 0.7237, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.05047955577990914, | |
| "grad_norm": 5.317302731978485, | |
| "learning_rate": 2.9876975602372536e-05, | |
| "loss": 0.7558, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.05216220763923944, | |
| "grad_norm": 2.3026448136142914, | |
| "learning_rate": 2.9866525532848906e-05, | |
| "loss": 0.6985, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.05384485949856974, | |
| "grad_norm": 1.8320545447196381, | |
| "learning_rate": 2.9855651517368567e-05, | |
| "loss": 0.7227, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.05552751135790005, | |
| "grad_norm": 1.9908218789466392, | |
| "learning_rate": 2.9844353866023802e-05, | |
| "loss": 0.7075, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.05721016321723035, | |
| "grad_norm": 5.182840115712529, | |
| "learning_rate": 2.9832632900987642e-05, | |
| "loss": 0.7207, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.05889281507656066, | |
| "grad_norm": 1.5483797249278837, | |
| "learning_rate": 2.982048895650468e-05, | |
| "loss": 0.7233, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.06057546693589096, | |
| "grad_norm": 2.3382590504722693, | |
| "learning_rate": 2.9807922378881537e-05, | |
| "loss": 0.7002, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.06225811879522127, | |
| "grad_norm": 3.1859655239636937, | |
| "learning_rate": 2.979493352647697e-05, | |
| "loss": 0.7201, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.06394077065455157, | |
| "grad_norm": 0.9149159742557087, | |
| "learning_rate": 2.9781522769691686e-05, | |
| "loss": 0.7136, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.06562342251388188, | |
| "grad_norm": 10.861566072795899, | |
| "learning_rate": 2.9767690490957758e-05, | |
| "loss": 0.7068, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.06730607437321218, | |
| "grad_norm": 2.8618866775651006, | |
| "learning_rate": 2.9753437084727713e-05, | |
| "loss": 0.7239, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.06898872623254249, | |
| "grad_norm": 2.8726068570785097, | |
| "learning_rate": 2.9738762957463292e-05, | |
| "loss": 0.7245, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.0706713780918728, | |
| "grad_norm": 2.4481298042739112, | |
| "learning_rate": 2.9723668527623877e-05, | |
| "loss": 0.7752, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0723540299512031, | |
| "grad_norm": 1.8599931346602536, | |
| "learning_rate": 2.9708154225654526e-05, | |
| "loss": 0.7323, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.0740366818105334, | |
| "grad_norm": 1.2855737813743626, | |
| "learning_rate": 2.9692220493973712e-05, | |
| "loss": 0.7037, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.0757193336698637, | |
| "grad_norm": 4.629091463528233, | |
| "learning_rate": 2.9675867786960718e-05, | |
| "loss": 0.6867, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.07740198552919401, | |
| "grad_norm": 6.294427059845777, | |
| "learning_rate": 2.9659096570942654e-05, | |
| "loss": 0.7272, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.07908463738852431, | |
| "grad_norm": 2.4758348810051345, | |
| "learning_rate": 2.9641907324181194e-05, | |
| "loss": 0.6779, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.08076728924785462, | |
| "grad_norm": 1.3455245255212915, | |
| "learning_rate": 2.96243005368589e-05, | |
| "loss": 0.7051, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.08244994110718493, | |
| "grad_norm": 4.796150475871981, | |
| "learning_rate": 2.960627671106527e-05, | |
| "loss": 0.7547, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08413259296651522, | |
| "grad_norm": 2.684441445075641, | |
| "learning_rate": 2.9587836360782405e-05, | |
| "loss": 0.709, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.08581524482584553, | |
| "grad_norm": 1.3869329152815553, | |
| "learning_rate": 2.9568980011870357e-05, | |
| "loss": 0.7073, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.08749789668517584, | |
| "grad_norm": 2.5576974478207197, | |
| "learning_rate": 2.954970820205214e-05, | |
| "loss": 0.6918, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.08918054854450615, | |
| "grad_norm": 1.1525450967004647, | |
| "learning_rate": 2.9530021480898393e-05, | |
| "loss": 0.6698, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.09086320040383644, | |
| "grad_norm": 2.847083851829901, | |
| "learning_rate": 2.9509920409811696e-05, | |
| "loss": 0.671, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.09254585226316675, | |
| "grad_norm": 2.561042091789346, | |
| "learning_rate": 2.9489405562010565e-05, | |
| "loss": 0.75, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.09422850412249706, | |
| "grad_norm": 4.458337350053255, | |
| "learning_rate": 2.9468477522513132e-05, | |
| "loss": 0.7277, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.09591115598182735, | |
| "grad_norm": 3.114622509219852, | |
| "learning_rate": 2.9447136888120408e-05, | |
| "loss": 0.6967, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.09759380784115766, | |
| "grad_norm": 1.6295210229360877, | |
| "learning_rate": 2.9425384267399327e-05, | |
| "loss": 0.6867, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.09927645970048797, | |
| "grad_norm": 1.7579117810504754, | |
| "learning_rate": 2.940322028066534e-05, | |
| "loss": 0.7236, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.10095911155981828, | |
| "grad_norm": 1.788183804411441, | |
| "learning_rate": 2.938064555996476e-05, | |
| "loss": 0.6864, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.10264176341914857, | |
| "grad_norm": 2.8340511721646373, | |
| "learning_rate": 2.9357660749056713e-05, | |
| "loss": 0.6847, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.10432441527847888, | |
| "grad_norm": 2.5230840193297985, | |
| "learning_rate": 2.9334266503394803e-05, | |
| "loss": 0.6889, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.10600706713780919, | |
| "grad_norm": 7.346086885083334, | |
| "learning_rate": 2.9310463490108397e-05, | |
| "loss": 0.7419, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.10768971899713949, | |
| "grad_norm": 2.356832890545339, | |
| "learning_rate": 2.928625238798362e-05, | |
| "loss": 0.7369, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.1093723708564698, | |
| "grad_norm": 2.4978380391841095, | |
| "learning_rate": 2.9261633887443993e-05, | |
| "loss": 0.6948, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1110550227158001, | |
| "grad_norm": 3.535487375505793, | |
| "learning_rate": 2.9236608690530738e-05, | |
| "loss": 0.7081, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.11273767457513041, | |
| "grad_norm": 2.522638625540884, | |
| "learning_rate": 2.921117751088276e-05, | |
| "loss": 0.7191, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.1144203264344607, | |
| "grad_norm": 3.055823541699581, | |
| "learning_rate": 2.91853410737163e-05, | |
| "loss": 0.74, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.11610297829379101, | |
| "grad_norm": 3.270117047516123, | |
| "learning_rate": 2.915910011580426e-05, | |
| "loss": 0.6829, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.11778563015312132, | |
| "grad_norm": 2.3219806056695367, | |
| "learning_rate": 2.9132455385455176e-05, | |
| "loss": 0.7062, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.11946828201245162, | |
| "grad_norm": 1.541921603113568, | |
| "learning_rate": 2.9105407642491895e-05, | |
| "loss": 0.7217, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.12115093387178193, | |
| "grad_norm": 1.557595298876376, | |
| "learning_rate": 2.907795765822989e-05, | |
| "loss": 0.7083, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.12283358573111224, | |
| "grad_norm": 2.3829156571868753, | |
| "learning_rate": 2.9050106215455283e-05, | |
| "loss": 0.6992, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.12451623759044254, | |
| "grad_norm": 7.536777098548366, | |
| "learning_rate": 2.9021854108402516e-05, | |
| "loss": 0.7248, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12619888944977284, | |
| "grad_norm": 1.3408030642895519, | |
| "learning_rate": 2.8993202142731693e-05, | |
| "loss": 0.6375, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.12788154130910315, | |
| "grad_norm": 2.4880776314537254, | |
| "learning_rate": 2.8964151135505616e-05, | |
| "loss": 0.7063, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.12956419316843346, | |
| "grad_norm": 1.5507053769862247, | |
| "learning_rate": 2.8934701915166477e-05, | |
| "loss": 0.73, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.13124684502776376, | |
| "grad_norm": 3.5622930633942564, | |
| "learning_rate": 2.890485532151225e-05, | |
| "loss": 0.7521, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.13292949688709407, | |
| "grad_norm": 4.188153799459233, | |
| "learning_rate": 2.887461220567271e-05, | |
| "loss": 0.6841, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.13461214874642435, | |
| "grad_norm": 2.702901312773331, | |
| "learning_rate": 2.8843973430085204e-05, | |
| "loss": 0.694, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.13629480060575466, | |
| "grad_norm": 3.8663384632605293, | |
| "learning_rate": 2.8812939868470016e-05, | |
| "loss": 0.7376, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.13797745246508497, | |
| "grad_norm": 7.613582881082294, | |
| "learning_rate": 2.878151240580548e-05, | |
| "loss": 0.7082, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.13966010432441528, | |
| "grad_norm": 2.8755666754814015, | |
| "learning_rate": 2.874969193830274e-05, | |
| "loss": 0.7486, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.1413427561837456, | |
| "grad_norm": 2.049640563529798, | |
| "learning_rate": 2.871747937338016e-05, | |
| "loss": 0.7375, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.1430254080430759, | |
| "grad_norm": 3.2253208680917993, | |
| "learning_rate": 2.8684875629637505e-05, | |
| "loss": 0.7183, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1447080599024062, | |
| "grad_norm": 2.0453993741696306, | |
| "learning_rate": 2.8651881636829698e-05, | |
| "loss": 0.6953, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.1463907117617365, | |
| "grad_norm": 1.3478445170381042, | |
| "learning_rate": 2.861849833584032e-05, | |
| "loss": 0.7205, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.1480733636210668, | |
| "grad_norm": 6.483405424500114, | |
| "learning_rate": 2.8584726678654787e-05, | |
| "loss": 0.7331, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.1497560154803971, | |
| "grad_norm": 1.6912080503281164, | |
| "learning_rate": 2.85505676283332e-05, | |
| "loss": 0.6985, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.1514386673397274, | |
| "grad_norm": 2.089097733011486, | |
| "learning_rate": 2.851602215898287e-05, | |
| "loss": 0.7291, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.15312131919905772, | |
| "grad_norm": 3.3599665631038325, | |
| "learning_rate": 2.8481091255730552e-05, | |
| "loss": 0.7125, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.15480397105838803, | |
| "grad_norm": 5.803874517218743, | |
| "learning_rate": 2.844577591469435e-05, | |
| "loss": 0.6614, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.15648662291771834, | |
| "grad_norm": 4.180624256153927, | |
| "learning_rate": 2.8410077142955304e-05, | |
| "loss": 0.6921, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.15816927477704862, | |
| "grad_norm": 2.51395384445247, | |
| "learning_rate": 2.8373995958528683e-05, | |
| "loss": 0.6788, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.15985192663637893, | |
| "grad_norm": 2.0786229734439, | |
| "learning_rate": 2.8337533390334942e-05, | |
| "loss": 0.6324, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.16153457849570924, | |
| "grad_norm": 2.1798201763285774, | |
| "learning_rate": 2.8300690478170388e-05, | |
| "loss": 0.7128, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.16321723035503954, | |
| "grad_norm": 1.7736042633296192, | |
| "learning_rate": 2.826346827267753e-05, | |
| "loss": 0.6854, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.16489988221436985, | |
| "grad_norm": 3.6499571810784377, | |
| "learning_rate": 2.8225867835315114e-05, | |
| "loss": 0.7246, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.16658253407370016, | |
| "grad_norm": 8.401076529411414, | |
| "learning_rate": 2.8187890238327842e-05, | |
| "loss": 0.7166, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.16826518593303044, | |
| "grad_norm": 1.6815155727131568, | |
| "learning_rate": 2.814953656471583e-05, | |
| "loss": 0.6962, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.16994783779236075, | |
| "grad_norm": 3.59100648398944, | |
| "learning_rate": 2.8110807908203682e-05, | |
| "loss": 0.7271, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.17163048965169106, | |
| "grad_norm": 2.9612400836384034, | |
| "learning_rate": 2.8071705373209328e-05, | |
| "loss": 0.7048, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.17331314151102137, | |
| "grad_norm": 1.6314524411685434, | |
| "learning_rate": 2.803223007481252e-05, | |
| "loss": 0.7237, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.17499579337035168, | |
| "grad_norm": 4.046292885407821, | |
| "learning_rate": 2.7992383138723034e-05, | |
| "loss": 0.7066, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.17667844522968199, | |
| "grad_norm": 3.4626891652569665, | |
| "learning_rate": 2.7952165701248573e-05, | |
| "loss": 0.7537, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1783610970890123, | |
| "grad_norm": 4.129895397644279, | |
| "learning_rate": 2.7911578909262353e-05, | |
| "loss": 0.7348, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.18004374894834257, | |
| "grad_norm": 2.1894044487856847, | |
| "learning_rate": 2.787062392017041e-05, | |
| "loss": 0.7145, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.18172640080767288, | |
| "grad_norm": 2.988495224416439, | |
| "learning_rate": 2.7829301901878592e-05, | |
| "loss": 0.7091, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1834090526670032, | |
| "grad_norm": 2.493227176786327, | |
| "learning_rate": 2.7787614032759243e-05, | |
| "loss": 0.7427, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.1850917045263335, | |
| "grad_norm": 2.9382266505350723, | |
| "learning_rate": 2.7745561501617605e-05, | |
| "loss": 0.7081, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1867743563856638, | |
| "grad_norm": 1.9294251174769146, | |
| "learning_rate": 2.7703145507657923e-05, | |
| "loss": 0.679, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.18845700824499412, | |
| "grad_norm": 7.011830550553666, | |
| "learning_rate": 2.766036726044926e-05, | |
| "loss": 0.6962, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.19013966010432443, | |
| "grad_norm": 1.8058177496791177, | |
| "learning_rate": 2.7617227979890957e-05, | |
| "loss": 0.6953, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.1918223119636547, | |
| "grad_norm": 2.2546595962288727, | |
| "learning_rate": 2.7573728896177897e-05, | |
| "loss": 0.6853, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.19350496382298502, | |
| "grad_norm": 1.7701647300358836, | |
| "learning_rate": 2.7529871249765397e-05, | |
| "loss": 0.737, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.19518761568231532, | |
| "grad_norm": 3.2767535691041396, | |
| "learning_rate": 2.7485656291333845e-05, | |
| "loss": 0.6878, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.19687026754164563, | |
| "grad_norm": 1.231100350207441, | |
| "learning_rate": 2.7441085281753028e-05, | |
| "loss": 0.7044, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.19855291940097594, | |
| "grad_norm": 5.103379397758491, | |
| "learning_rate": 2.739615949204617e-05, | |
| "loss": 0.7028, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.20023557126030625, | |
| "grad_norm": 1.745258105735824, | |
| "learning_rate": 2.7350880203353703e-05, | |
| "loss": 0.7123, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.20191822311963656, | |
| "grad_norm": 2.528898960464809, | |
| "learning_rate": 2.7305248706896722e-05, | |
| "loss": 0.7242, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.20360087497896684, | |
| "grad_norm": 1.329326803950539, | |
| "learning_rate": 2.7259266303940164e-05, | |
| "loss": 0.7315, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.20528352683829715, | |
| "grad_norm": 3.523954433912976, | |
| "learning_rate": 2.7212934305755697e-05, | |
| "loss": 0.7022, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.20696617869762746, | |
| "grad_norm": 1.3845861665687345, | |
| "learning_rate": 2.7166254033584343e-05, | |
| "loss": 0.6788, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.20864883055695777, | |
| "grad_norm": 1.6893702845026013, | |
| "learning_rate": 2.7119226818598784e-05, | |
| "loss": 0.7083, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.21033148241628807, | |
| "grad_norm": 3.481606379952265, | |
| "learning_rate": 2.7071854001865402e-05, | |
| "loss": 0.7104, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.21201413427561838, | |
| "grad_norm": 1.3880604016054, | |
| "learning_rate": 2.702413693430604e-05, | |
| "loss": 0.7192, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.2136967861349487, | |
| "grad_norm": 2.7420634271532625, | |
| "learning_rate": 2.697607697665948e-05, | |
| "loss": 0.7329, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.21537943799427897, | |
| "grad_norm": 1.3383701328350484, | |
| "learning_rate": 2.6927675499442648e-05, | |
| "loss": 0.7523, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.21706208985360928, | |
| "grad_norm": 5.63600709352392, | |
| "learning_rate": 2.68789338829115e-05, | |
| "loss": 0.6938, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.2187447417129396, | |
| "grad_norm": 1.973997298554772, | |
| "learning_rate": 2.6829853517021698e-05, | |
| "loss": 0.7024, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2204273935722699, | |
| "grad_norm": 5.331233664305369, | |
| "learning_rate": 2.6780435801388945e-05, | |
| "loss": 0.6978, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2221100454316002, | |
| "grad_norm": 14.545018258920948, | |
| "learning_rate": 2.6730682145249093e-05, | |
| "loss": 0.7288, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.22379269729093051, | |
| "grad_norm": 2.772459303589031, | |
| "learning_rate": 2.668059396741795e-05, | |
| "loss": 0.69, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.22547534915026082, | |
| "grad_norm": 1.9806140492727284, | |
| "learning_rate": 2.6630172696250804e-05, | |
| "loss": 0.7194, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.2271580010095911, | |
| "grad_norm": 2.5305067313330305, | |
| "learning_rate": 2.6579419769601715e-05, | |
| "loss": 0.7209, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.2288406528689214, | |
| "grad_norm": 4.329479239778255, | |
| "learning_rate": 2.6528336634782493e-05, | |
| "loss": 0.7263, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.23052330472825172, | |
| "grad_norm": 2.4385930080514124, | |
| "learning_rate": 2.6476924748521443e-05, | |
| "loss": 0.7169, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.23220595658758203, | |
| "grad_norm": 4.486791723774815, | |
| "learning_rate": 2.6425185576921812e-05, | |
| "loss": 0.6791, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.23388860844691234, | |
| "grad_norm": 2.1648975510177353, | |
| "learning_rate": 2.637312059541997e-05, | |
| "loss": 0.722, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.23557126030624265, | |
| "grad_norm": 2.497984836932449, | |
| "learning_rate": 2.632073128874336e-05, | |
| "loss": 0.737, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.23725391216557296, | |
| "grad_norm": 1.6911389710154248, | |
| "learning_rate": 2.6268019150868144e-05, | |
| "loss": 0.7027, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.23893656402490324, | |
| "grad_norm": 5.094854691429602, | |
| "learning_rate": 2.62149856849766e-05, | |
| "loss": 0.7431, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.24061921588423354, | |
| "grad_norm": 1.6056704058079299, | |
| "learning_rate": 2.616163240341426e-05, | |
| "loss": 0.7215, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.24230186774356385, | |
| "grad_norm": 2.0440590394408793, | |
| "learning_rate": 2.6107960827646774e-05, | |
| "loss": 0.6864, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.24398451960289416, | |
| "grad_norm": 1.4019933491248435, | |
| "learning_rate": 2.6053972488216538e-05, | |
| "loss": 0.7007, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.24566717146222447, | |
| "grad_norm": 6.4772716175425185, | |
| "learning_rate": 2.5999668924699035e-05, | |
| "loss": 0.6963, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.24734982332155478, | |
| "grad_norm": 1.235157923543473, | |
| "learning_rate": 2.5945051685658923e-05, | |
| "loss": 0.7158, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2490324751808851, | |
| "grad_norm": 1.6576585358395288, | |
| "learning_rate": 2.5890122328605908e-05, | |
| "loss": 0.6918, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.25071512704021537, | |
| "grad_norm": 2.6005430314710645, | |
| "learning_rate": 2.5834882419950295e-05, | |
| "loss": 0.6666, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.2523977788995457, | |
| "grad_norm": 3.83061566974576, | |
| "learning_rate": 2.577933353495833e-05, | |
| "loss": 0.724, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.254080430758876, | |
| "grad_norm": 2.259260300802235, | |
| "learning_rate": 2.5723477257707293e-05, | |
| "loss": 0.725, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2557630826182063, | |
| "grad_norm": 3.1023391020410283, | |
| "learning_rate": 2.566731518104029e-05, | |
| "loss": 0.709, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2574457344775366, | |
| "grad_norm": 2.375072076607274, | |
| "learning_rate": 2.5610848906520878e-05, | |
| "loss": 0.7031, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2591283863368669, | |
| "grad_norm": 1.638162563319741, | |
| "learning_rate": 2.5554080044387344e-05, | |
| "loss": 0.7031, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.2608110381961972, | |
| "grad_norm": 8.846026339935685, | |
| "learning_rate": 2.5497010213506825e-05, | |
| "loss": 0.7119, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.26249369005552753, | |
| "grad_norm": 4.589496329936434, | |
| "learning_rate": 2.5439641041329128e-05, | |
| "loss": 0.7043, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.26417634191485784, | |
| "grad_norm": 0.9945782670551377, | |
| "learning_rate": 2.5381974163840313e-05, | |
| "loss": 0.7026, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.26585899377418815, | |
| "grad_norm": 2.341138070970226, | |
| "learning_rate": 2.532401122551605e-05, | |
| "loss": 0.744, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2675416456335184, | |
| "grad_norm": 3.446122331658564, | |
| "learning_rate": 2.526575387927473e-05, | |
| "loss": 0.6861, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.2692242974928487, | |
| "grad_norm": 4.165637435951758, | |
| "learning_rate": 2.52072037864303e-05, | |
| "loss": 0.7065, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.270906949352179, | |
| "grad_norm": 2.156163863520989, | |
| "learning_rate": 2.5148362616644926e-05, | |
| "loss": 0.7383, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2725896012115093, | |
| "grad_norm": 1.489844754983356, | |
| "learning_rate": 2.508923204788135e-05, | |
| "loss": 0.7061, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.27427225307083963, | |
| "grad_norm": 1.330475018363876, | |
| "learning_rate": 2.5029813766355062e-05, | |
| "loss": 0.6916, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.27595490493016994, | |
| "grad_norm": 1.5166804777641398, | |
| "learning_rate": 2.4970109466486202e-05, | |
| "loss": 0.6998, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.27763755678950025, | |
| "grad_norm": 1.1006885605957994, | |
| "learning_rate": 2.491012085085122e-05, | |
| "loss": 0.7095, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.27932020864883056, | |
| "grad_norm": 3.137721907226618, | |
| "learning_rate": 2.4849849630134384e-05, | |
| "loss": 0.7204, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.28100286050816087, | |
| "grad_norm": 2.0954950376873747, | |
| "learning_rate": 2.4789297523078924e-05, | |
| "loss": 0.7149, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.2826855123674912, | |
| "grad_norm": 1.59599915913629, | |
| "learning_rate": 2.4728466256438072e-05, | |
| "loss": 0.7052, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.2843681642268215, | |
| "grad_norm": 8.944835293630385, | |
| "learning_rate": 2.4667357564925798e-05, | |
| "loss": 0.7161, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.2860508160861518, | |
| "grad_norm": 6.306096521027849, | |
| "learning_rate": 2.460597319116735e-05, | |
| "loss": 0.7219, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2877334679454821, | |
| "grad_norm": 8.531648843126508, | |
| "learning_rate": 2.4544314885649552e-05, | |
| "loss": 0.7195, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.2894161198048124, | |
| "grad_norm": 6.214651933223859, | |
| "learning_rate": 2.4482384406670883e-05, | |
| "loss": 0.6836, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.29109877166414266, | |
| "grad_norm": 3.495702421141607, | |
| "learning_rate": 2.4420183520291354e-05, | |
| "loss": 0.7241, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.292781423523473, | |
| "grad_norm": 7.473552612209333, | |
| "learning_rate": 2.4357714000282127e-05, | |
| "loss": 0.6664, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.2944640753828033, | |
| "grad_norm": 2.528030042904385, | |
| "learning_rate": 2.4294977628074938e-05, | |
| "loss": 0.7415, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2961467272421336, | |
| "grad_norm": 15.300967237030209, | |
| "learning_rate": 2.42319761927113e-05, | |
| "loss": 0.7336, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.2978293791014639, | |
| "grad_norm": 5.5616370399668345, | |
| "learning_rate": 2.4168711490791484e-05, | |
| "loss": 0.72, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.2995120309607942, | |
| "grad_norm": 3.7113016371508993, | |
| "learning_rate": 2.4105185326423286e-05, | |
| "loss": 0.723, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.3011946828201245, | |
| "grad_norm": 2.0578017620481397, | |
| "learning_rate": 2.4041399511170574e-05, | |
| "loss": 0.7008, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.3028773346794548, | |
| "grad_norm": 1.5192292607159725, | |
| "learning_rate": 2.3977355864001635e-05, | |
| "loss": 0.7107, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.30455998653878513, | |
| "grad_norm": 1.4432235055034852, | |
| "learning_rate": 2.3913056211237304e-05, | |
| "loss": 0.7112, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.30624263839811544, | |
| "grad_norm": 3.8368406145537577, | |
| "learning_rate": 2.3848502386498866e-05, | |
| "loss": 0.6875, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.30792529025744575, | |
| "grad_norm": 1.7029662451991225, | |
| "learning_rate": 2.3783696230655802e-05, | |
| "loss": 0.6797, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.30960794211677606, | |
| "grad_norm": 2.340409810623994, | |
| "learning_rate": 2.371863959177326e-05, | |
| "loss": 0.7211, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.31129059397610637, | |
| "grad_norm": 8.08876148980577, | |
| "learning_rate": 2.365333432505937e-05, | |
| "loss": 0.7208, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.3129732458354367, | |
| "grad_norm": 3.415957276427778, | |
| "learning_rate": 2.3587782292812323e-05, | |
| "loss": 0.707, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.31465589769476693, | |
| "grad_norm": 1.116630095030083, | |
| "learning_rate": 2.35219853643673e-05, | |
| "loss": 0.6863, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.31633854955409724, | |
| "grad_norm": 1.9365729913801322, | |
| "learning_rate": 2.3455945416043132e-05, | |
| "loss": 0.705, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.31802120141342755, | |
| "grad_norm": 1.401982720531144, | |
| "learning_rate": 2.338966433108879e-05, | |
| "loss": 0.6872, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.31970385327275785, | |
| "grad_norm": 1.6958509020945827, | |
| "learning_rate": 2.3323143999629712e-05, | |
| "loss": 0.7146, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.32138650513208816, | |
| "grad_norm": 1.6193006043035303, | |
| "learning_rate": 2.3256386318613877e-05, | |
| "loss": 0.6887, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.32306915699141847, | |
| "grad_norm": 12.979287887341894, | |
| "learning_rate": 2.318939319175771e-05, | |
| "loss": 0.7063, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.3247518088507488, | |
| "grad_norm": 2.053699792676608, | |
| "learning_rate": 2.3122166529491822e-05, | |
| "loss": 0.7921, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3264344607100791, | |
| "grad_norm": 2.1805832639041993, | |
| "learning_rate": 2.3054708248906483e-05, | |
| "loss": 0.6892, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.3281171125694094, | |
| "grad_norm": 2.627137721499779, | |
| "learning_rate": 2.2987020273696996e-05, | |
| "loss": 0.6937, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.3297997644287397, | |
| "grad_norm": 3.4422222646515284, | |
| "learning_rate": 2.2919104534108825e-05, | |
| "loss": 0.7274, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.33148241628807, | |
| "grad_norm": 3.942895129812573, | |
| "learning_rate": 2.2850962966882547e-05, | |
| "loss": 0.7089, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3331650681474003, | |
| "grad_norm": 2.250499246098397, | |
| "learning_rate": 2.278259751519861e-05, | |
| "loss": 0.7288, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.33484772000673063, | |
| "grad_norm": 1.9217973398784702, | |
| "learning_rate": 2.2714010128621957e-05, | |
| "loss": 0.6971, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.3365303718660609, | |
| "grad_norm": 7.4365036240913005, | |
| "learning_rate": 2.2645202763046385e-05, | |
| "loss": 0.7027, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3382130237253912, | |
| "grad_norm": 3.6010458486700267, | |
| "learning_rate": 2.2576177380638808e-05, | |
| "loss": 0.7135, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.3398956755847215, | |
| "grad_norm": 3.6134685076054756, | |
| "learning_rate": 2.2506935949783277e-05, | |
| "loss": 0.6703, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3415783274440518, | |
| "grad_norm": 2.047032124552865, | |
| "learning_rate": 2.243748044502485e-05, | |
| "loss": 0.7021, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.3432609793033821, | |
| "grad_norm": 7.2853023401907135, | |
| "learning_rate": 2.236781284701332e-05, | |
| "loss": 0.698, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.3449436311627124, | |
| "grad_norm": 10.62863599608186, | |
| "learning_rate": 2.229793514244666e-05, | |
| "loss": 0.7083, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.34662628302204274, | |
| "grad_norm": 2.7228396995619293, | |
| "learning_rate": 2.222784932401445e-05, | |
| "loss": 0.6714, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.34830893488137304, | |
| "grad_norm": 8.496361795340652, | |
| "learning_rate": 2.2157557390341e-05, | |
| "loss": 0.7215, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.34999158674070335, | |
| "grad_norm": 2.7703412032428885, | |
| "learning_rate": 2.2087061345928375e-05, | |
| "loss": 0.7355, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.35167423860003366, | |
| "grad_norm": 1.615589476440187, | |
| "learning_rate": 2.2016363201099205e-05, | |
| "loss": 0.6552, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.35335689045936397, | |
| "grad_norm": 2.325164551192993, | |
| "learning_rate": 2.1945464971939424e-05, | |
| "loss": 0.7059, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3550395423186943, | |
| "grad_norm": 3.5764361397956113, | |
| "learning_rate": 2.1874368680240692e-05, | |
| "loss": 0.7005, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3567221941780246, | |
| "grad_norm": 1.347667616644207, | |
| "learning_rate": 2.1803076353442806e-05, | |
| "loss": 0.7102, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3584048460373549, | |
| "grad_norm": 2.0318308073252425, | |
| "learning_rate": 2.1731590024575848e-05, | |
| "loss": 0.7173, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.36008749789668515, | |
| "grad_norm": 1.3373304099117234, | |
| "learning_rate": 2.165991173220223e-05, | |
| "loss": 0.7601, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.36177014975601546, | |
| "grad_norm": 1.9787611361857145, | |
| "learning_rate": 2.158804352035855e-05, | |
| "loss": 0.721, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.36345280161534577, | |
| "grad_norm": 1.0674118498737855, | |
| "learning_rate": 2.1515987438497295e-05, | |
| "loss": 0.6998, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3651354534746761, | |
| "grad_norm": 2.0137284693641386, | |
| "learning_rate": 2.1443745541428416e-05, | |
| "loss": 0.6765, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3668181053340064, | |
| "grad_norm": 4.298295885455176, | |
| "learning_rate": 2.137131988926072e-05, | |
| "loss": 0.7012, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.3685007571933367, | |
| "grad_norm": 9.91613007277119, | |
| "learning_rate": 2.129871254734312e-05, | |
| "loss": 0.7062, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.370183409052667, | |
| "grad_norm": 1.6702084657906056, | |
| "learning_rate": 2.122592558620575e-05, | |
| "loss": 0.7319, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.3718660609119973, | |
| "grad_norm": 2.2151204900067634, | |
| "learning_rate": 2.1152961081500906e-05, | |
| "loss": 0.6853, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3735487127713276, | |
| "grad_norm": 2.06281971620876, | |
| "learning_rate": 2.1079821113943863e-05, | |
| "loss": 0.7137, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.3752313646306579, | |
| "grad_norm": 2.865770701923554, | |
| "learning_rate": 2.100650776925353e-05, | |
| "loss": 0.7066, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.37691401648998824, | |
| "grad_norm": 2.36832805891566, | |
| "learning_rate": 2.0933023138092995e-05, | |
| "loss": 0.7401, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.37859666834931854, | |
| "grad_norm": 1.54287595832039, | |
| "learning_rate": 2.0859369316009877e-05, | |
| "loss": 0.7535, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.38027932020864885, | |
| "grad_norm": 2.919513911195001, | |
| "learning_rate": 2.0785548403376592e-05, | |
| "loss": 0.71, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.38196197206797916, | |
| "grad_norm": 1.618528464339543, | |
| "learning_rate": 2.0711562505330437e-05, | |
| "loss": 0.7181, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.3836446239273094, | |
| "grad_norm": 1.1878290222512224, | |
| "learning_rate": 2.063741373171357e-05, | |
| "loss": 0.732, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3853272757866397, | |
| "grad_norm": 2.6515115997158554, | |
| "learning_rate": 2.0563104197012847e-05, | |
| "loss": 0.709, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.38700992764597003, | |
| "grad_norm": 1.3199743255372487, | |
| "learning_rate": 2.048863602029951e-05, | |
| "loss": 0.7276, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.38869257950530034, | |
| "grad_norm": 1.9262341939684873, | |
| "learning_rate": 2.0414011325168777e-05, | |
| "loss": 0.6964, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.39037523136463065, | |
| "grad_norm": 1.3563137443786035, | |
| "learning_rate": 2.0339232239679252e-05, | |
| "loss": 0.685, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.39205788322396096, | |
| "grad_norm": 1.6219810171886226, | |
| "learning_rate": 2.026430089629229e-05, | |
| "loss": 0.6916, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.39374053508329127, | |
| "grad_norm": 2.441077712269494, | |
| "learning_rate": 2.0189219431811123e-05, | |
| "loss": 0.7447, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.3954231869426216, | |
| "grad_norm": 2.1115157955300616, | |
| "learning_rate": 2.0113989987319988e-05, | |
| "loss": 0.6802, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.3971058388019519, | |
| "grad_norm": 1.4354556930544433, | |
| "learning_rate": 2.0038614708123023e-05, | |
| "loss": 0.7004, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.3987884906612822, | |
| "grad_norm": 2.4000999827221756, | |
| "learning_rate": 1.996309574368311e-05, | |
| "loss": 0.6898, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.4004711425206125, | |
| "grad_norm": 1.03142424758583, | |
| "learning_rate": 1.9887435247560586e-05, | |
| "loss": 0.7086, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.4021537943799428, | |
| "grad_norm": 2.8813939519904914, | |
| "learning_rate": 1.981163537735181e-05, | |
| "loss": 0.6954, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.4038364462392731, | |
| "grad_norm": 1.1816627689525059, | |
| "learning_rate": 1.9735698294627644e-05, | |
| "loss": 0.7142, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.4055190980986034, | |
| "grad_norm": 1.3945618099849828, | |
| "learning_rate": 1.9659626164871828e-05, | |
| "loss": 0.7097, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.4072017499579337, | |
| "grad_norm": 1.9043603505045925, | |
| "learning_rate": 1.95834211574192e-05, | |
| "loss": 0.6992, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.408884401817264, | |
| "grad_norm": 3.5606637873787648, | |
| "learning_rate": 1.9507085445393855e-05, | |
| "loss": 0.7118, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.4105670536765943, | |
| "grad_norm": 1.569864161622481, | |
| "learning_rate": 1.9430621205647156e-05, | |
| "loss": 0.6971, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.4122497055359246, | |
| "grad_norm": 4.4331480365167, | |
| "learning_rate": 1.935403061869568e-05, | |
| "loss": 0.6726, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.4139323573952549, | |
| "grad_norm": 4.432472927572385, | |
| "learning_rate": 1.9277315868659017e-05, | |
| "loss": 0.718, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.4156150092545852, | |
| "grad_norm": 1.8000872219993729, | |
| "learning_rate": 1.920047914319749e-05, | |
| "loss": 0.7391, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.41729766111391553, | |
| "grad_norm": 2.2415704708896294, | |
| "learning_rate": 1.9123522633449772e-05, | |
| "loss": 0.7093, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.41898031297324584, | |
| "grad_norm": 0.9860844679413583, | |
| "learning_rate": 1.9046448533970423e-05, | |
| "loss": 0.6874, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.42066296483257615, | |
| "grad_norm": 4.705732364227723, | |
| "learning_rate": 1.8969259042667255e-05, | |
| "loss": 0.6766, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.42234561669190646, | |
| "grad_norm": 3.3731463958248264, | |
| "learning_rate": 1.8891956360738706e-05, | |
| "loss": 0.7155, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.42402826855123676, | |
| "grad_norm": 2.2599456108772915, | |
| "learning_rate": 1.881454269261105e-05, | |
| "loss": 0.6901, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.4257109204105671, | |
| "grad_norm": 1.887949729545168, | |
| "learning_rate": 1.873702024587553e-05, | |
| "loss": 0.7528, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4273935722698974, | |
| "grad_norm": 3.5001965876274115, | |
| "learning_rate": 1.865939123122541e-05, | |
| "loss": 0.6795, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.4290762241292277, | |
| "grad_norm": 3.739512720034628, | |
| "learning_rate": 1.858165786239293e-05, | |
| "loss": 0.6902, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.43075887598855794, | |
| "grad_norm": 2.835735723710262, | |
| "learning_rate": 1.850382235608618e-05, | |
| "loss": 0.7013, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.43244152784788825, | |
| "grad_norm": 3.0809412432066594, | |
| "learning_rate": 1.842588693192589e-05, | |
| "loss": 0.6916, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.43412417970721856, | |
| "grad_norm": 1.9629938051045357, | |
| "learning_rate": 1.834785381238212e-05, | |
| "loss": 0.7165, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.43580683156654887, | |
| "grad_norm": 2.2447376170957023, | |
| "learning_rate": 1.8269725222710895e-05, | |
| "loss": 0.7273, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.4374894834258792, | |
| "grad_norm": 2.214057365632279, | |
| "learning_rate": 1.8191503390890745e-05, | |
| "loss": 0.7073, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4391721352852095, | |
| "grad_norm": 1.8172031418314936, | |
| "learning_rate": 1.8113190547559167e-05, | |
| "loss": 0.715, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4408547871445398, | |
| "grad_norm": 2.835706633111018, | |
| "learning_rate": 1.8034788925949024e-05, | |
| "loss": 0.706, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4425374390038701, | |
| "grad_norm": 1.1621082466184123, | |
| "learning_rate": 1.795630076182484e-05, | |
| "loss": 0.7084, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4442200908632004, | |
| "grad_norm": 11.176574486878446, | |
| "learning_rate": 1.7877728293419067e-05, | |
| "loss": 0.7092, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.4459027427225307, | |
| "grad_norm": 10.672692821884855, | |
| "learning_rate": 1.7799073761368236e-05, | |
| "loss": 0.7247, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.44758539458186103, | |
| "grad_norm": 3.089797606768679, | |
| "learning_rate": 1.7720339408649084e-05, | |
| "loss": 0.7212, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.44926804644119134, | |
| "grad_norm": 11.51700032068762, | |
| "learning_rate": 1.7641527480514575e-05, | |
| "loss": 0.708, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.45095069830052165, | |
| "grad_norm": 2.9535720728987007, | |
| "learning_rate": 1.756264022442987e-05, | |
| "loss": 0.6986, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4526333501598519, | |
| "grad_norm": 4.148500258838489, | |
| "learning_rate": 1.7483679890008247e-05, | |
| "loss": 0.6571, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.4543160020191822, | |
| "grad_norm": 1.7950072159263486, | |
| "learning_rate": 1.740464872894695e-05, | |
| "loss": 0.6751, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4559986538785125, | |
| "grad_norm": 1.3583282737793725, | |
| "learning_rate": 1.732554899496297e-05, | |
| "loss": 0.7705, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.4576813057378428, | |
| "grad_norm": 2.184156205512354, | |
| "learning_rate": 1.7246382943728777e-05, | |
| "loss": 0.6631, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.45936395759717313, | |
| "grad_norm": 1.6297260663890405, | |
| "learning_rate": 1.7167152832807997e-05, | |
| "loss": 0.6946, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.46104660945650344, | |
| "grad_norm": 1.6037313522955436, | |
| "learning_rate": 1.7087860921591047e-05, | |
| "loss": 0.6914, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.46272926131583375, | |
| "grad_norm": 1.6197737501652023, | |
| "learning_rate": 1.7008509471230673e-05, | |
| "loss": 0.7312, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.46441191317516406, | |
| "grad_norm": 2.0167791329962395, | |
| "learning_rate": 1.69291007445775e-05, | |
| "loss": 0.6459, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.46609456503449437, | |
| "grad_norm": 7.215038584013174, | |
| "learning_rate": 1.684963700611548e-05, | |
| "loss": 0.6989, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4677772168938247, | |
| "grad_norm": 3.3650128982999603, | |
| "learning_rate": 1.677012052189734e-05, | |
| "loss": 0.7156, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.469459868753155, | |
| "grad_norm": 1.6353091631825594, | |
| "learning_rate": 1.6690553559479946e-05, | |
| "loss": 0.6916, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.4711425206124853, | |
| "grad_norm": 2.314420263644613, | |
| "learning_rate": 1.6610938387859623e-05, | |
| "loss": 0.7351, | |
| "step": 2800 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5943, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 400, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.2756662754476032e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |