| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1699, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005885815185403178, | |
| "grad_norm": 81.18565670358198, | |
| "learning_rate": 5.882352941176471e-08, | |
| "loss": 4.1356, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002942907592701589, | |
| "grad_norm": 77.04540875822474, | |
| "learning_rate": 2.9411764705882356e-07, | |
| "loss": 4.1935, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.005885815185403178, | |
| "grad_norm": 35.44829095248985, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 3.9438, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008828722778104767, | |
| "grad_norm": 22.5439620439223, | |
| "learning_rate": 8.823529411764707e-07, | |
| "loss": 3.4429, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.011771630370806356, | |
| "grad_norm": 13.2019843638693, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 3.1438, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.014714537963507945, | |
| "grad_norm": 14.772036939700145, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 2.9141, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.017657445556209534, | |
| "grad_norm": 20.5779186199068, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 2.6074, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.020600353148911125, | |
| "grad_norm": 27.382526732783585, | |
| "learning_rate": 2.058823529411765e-06, | |
| "loss": 2.2919, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.023543260741612712, | |
| "grad_norm": 20.764560265022364, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 1.7801, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.026486168334314303, | |
| "grad_norm": 11.788637809965795, | |
| "learning_rate": 2.647058823529412e-06, | |
| "loss": 1.3328, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02942907592701589, | |
| "grad_norm": 22.53245322685116, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.1855, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.032371983519717484, | |
| "grad_norm": 6.015914744919097, | |
| "learning_rate": 3.2352941176470594e-06, | |
| "loss": 1.1366, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.03531489111241907, | |
| "grad_norm": 5.217837662899846, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 1.1055, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03825779870512066, | |
| "grad_norm": 3.357783317857939, | |
| "learning_rate": 3.8235294117647055e-06, | |
| "loss": 1.0953, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04120070629782225, | |
| "grad_norm": 4.996001471413591, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 1.0496, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04414361389052384, | |
| "grad_norm": 8.621809656222444, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 1.0292, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.047086521483225424, | |
| "grad_norm": 5.339001901811776, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 1.0423, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.050029429075927015, | |
| "grad_norm": 3.493074689207993, | |
| "learning_rate": 5e-06, | |
| "loss": 1.026, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.052972336668628606, | |
| "grad_norm": 9.685062179270181, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 1.005, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.055915244261330196, | |
| "grad_norm": 6.933487463420427, | |
| "learning_rate": 5.588235294117647e-06, | |
| "loss": 0.9885, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.05885815185403178, | |
| "grad_norm": 8.28144947566066, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.9873, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06180105944673337, | |
| "grad_norm": 17.077053218905366, | |
| "learning_rate": 6.176470588235295e-06, | |
| "loss": 0.9694, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.06474396703943497, | |
| "grad_norm": 19.687199094462986, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.9622, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06768687463213655, | |
| "grad_norm": 13.804537783692052, | |
| "learning_rate": 6.764705882352942e-06, | |
| "loss": 0.9563, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.07062978222483814, | |
| "grad_norm": 10.94725198202626, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.9414, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07357268981753973, | |
| "grad_norm": 18.46306700568472, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 0.9289, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.07651559741024132, | |
| "grad_norm": 14.894213258364779, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.9238, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07945850500294291, | |
| "grad_norm": 7.569487604501164, | |
| "learning_rate": 7.941176470588236e-06, | |
| "loss": 0.9183, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0824014125956445, | |
| "grad_norm": 21.99231321792487, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.911, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08534432018834609, | |
| "grad_norm": 18.115769647379153, | |
| "learning_rate": 8.529411764705883e-06, | |
| "loss": 0.9047, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.08828722778104768, | |
| "grad_norm": 10.705484744227235, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.8932, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09123013537374926, | |
| "grad_norm": 16.313242301856516, | |
| "learning_rate": 9.11764705882353e-06, | |
| "loss": 0.8899, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.09417304296645085, | |
| "grad_norm": 12.978636094048102, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.8916, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09711595055915244, | |
| "grad_norm": 11.506756600035605, | |
| "learning_rate": 9.705882352941177e-06, | |
| "loss": 0.8789, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.10005885815185403, | |
| "grad_norm": 24.989193177477567, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8768, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10300176574455562, | |
| "grad_norm": 13.397711202568765, | |
| "learning_rate": 9.99973614764657e-06, | |
| "loss": 0.8801, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.10594467333725721, | |
| "grad_norm": 12.037309022627504, | |
| "learning_rate": 9.99894461843351e-06, | |
| "loss": 0.8731, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1088875809299588, | |
| "grad_norm": 22.40769728661026, | |
| "learning_rate": 9.997625495899555e-06, | |
| "loss": 0.8712, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.11183048852266039, | |
| "grad_norm": 14.584492756570954, | |
| "learning_rate": 9.99577891926614e-06, | |
| "loss": 0.8491, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11477339611536198, | |
| "grad_norm": 9.426683321327127, | |
| "learning_rate": 9.9934050834227e-06, | |
| "loss": 0.8674, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.11771630370806356, | |
| "grad_norm": 25.968161707891955, | |
| "learning_rate": 9.990504238906107e-06, | |
| "loss": 0.8573, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12065921130076515, | |
| "grad_norm": 13.072895533976467, | |
| "learning_rate": 9.98707669187422e-06, | |
| "loss": 0.8525, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.12360211889346674, | |
| "grad_norm": 11.095395155610879, | |
| "learning_rate": 9.98312280407358e-06, | |
| "loss": 0.8553, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12654502648616833, | |
| "grad_norm": 16.418159232638153, | |
| "learning_rate": 9.978642992801229e-06, | |
| "loss": 0.8555, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.12948793407886994, | |
| "grad_norm": 4.450231202820506, | |
| "learning_rate": 9.973637730860664e-06, | |
| "loss": 0.8413, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.13243084167157151, | |
| "grad_norm": 4.111542494606179, | |
| "learning_rate": 9.968107546511942e-06, | |
| "loss": 0.8424, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.1353737492642731, | |
| "grad_norm": 13.693095667132473, | |
| "learning_rate": 9.962053023415926e-06, | |
| "loss": 0.8561, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1383166568569747, | |
| "grad_norm": 5.646518368088086, | |
| "learning_rate": 9.955474800572684e-06, | |
| "loss": 0.8636, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.14125956444967627, | |
| "grad_norm": 5.108991758643153, | |
| "learning_rate": 9.948373572254044e-06, | |
| "loss": 0.8503, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14420247204237788, | |
| "grad_norm": 9.81020252703176, | |
| "learning_rate": 9.94075008793033e-06, | |
| "loss": 0.873, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.14714537963507945, | |
| "grad_norm": 7.935285563014131, | |
| "learning_rate": 9.932605152191255e-06, | |
| "loss": 0.8433, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15008828722778106, | |
| "grad_norm": 3.3393405756912484, | |
| "learning_rate": 9.923939624661005e-06, | |
| "loss": 0.8296, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.15303119482048264, | |
| "grad_norm": 3.830858966207542, | |
| "learning_rate": 9.914754419907508e-06, | |
| "loss": 0.8317, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1559741024131842, | |
| "grad_norm": 16.404195583675744, | |
| "learning_rate": 9.905050507345926e-06, | |
| "loss": 0.8419, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.15891701000588582, | |
| "grad_norm": 9.685851257998209, | |
| "learning_rate": 9.894828911136323e-06, | |
| "loss": 0.8649, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1618599175985874, | |
| "grad_norm": 5.518903601787907, | |
| "learning_rate": 9.884090710075585e-06, | |
| "loss": 0.8508, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.164802825191289, | |
| "grad_norm": 6.557040852323014, | |
| "learning_rate": 9.87283703748356e-06, | |
| "loss": 0.8537, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.16774573278399058, | |
| "grad_norm": 6.51479177962607, | |
| "learning_rate": 9.861069081083452e-06, | |
| "loss": 0.8454, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.17068864037669218, | |
| "grad_norm": 4.48759740362246, | |
| "learning_rate": 9.84878808287645e-06, | |
| "loss": 0.8494, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17363154796939376, | |
| "grad_norm": 3.077914439009112, | |
| "learning_rate": 9.835995339010673e-06, | |
| "loss": 0.8371, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.17657445556209536, | |
| "grad_norm": 7.741990389634322, | |
| "learning_rate": 9.822692199644346e-06, | |
| "loss": 0.8489, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17951736315479694, | |
| "grad_norm": 7.12110354743661, | |
| "learning_rate": 9.808880068803323e-06, | |
| "loss": 0.8386, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.18246027074749852, | |
| "grad_norm": 4.874821068849924, | |
| "learning_rate": 9.794560404232898e-06, | |
| "loss": 0.8399, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.18540317834020012, | |
| "grad_norm": 4.563419733002005, | |
| "learning_rate": 9.779734717243946e-06, | |
| "loss": 0.8271, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.1883460859329017, | |
| "grad_norm": 10.93234206126768, | |
| "learning_rate": 9.764404572553427e-06, | |
| "loss": 0.8123, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1912889935256033, | |
| "grad_norm": 9.419206687007401, | |
| "learning_rate": 9.748571588119247e-06, | |
| "loss": 0.8188, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.19423190111830488, | |
| "grad_norm": 6.977872680215194, | |
| "learning_rate": 9.732237434969487e-06, | |
| "loss": 0.8255, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.19717480871100648, | |
| "grad_norm": 5.908304115359454, | |
| "learning_rate": 9.715403837026046e-06, | |
| "loss": 0.8265, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.20011771630370806, | |
| "grad_norm": 6.797809972888078, | |
| "learning_rate": 9.698072570922698e-06, | |
| "loss": 0.8118, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.20306062389640966, | |
| "grad_norm": 6.704121135386284, | |
| "learning_rate": 9.680245465817583e-06, | |
| "loss": 0.8096, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.20600353148911124, | |
| "grad_norm": 8.645898444887452, | |
| "learning_rate": 9.661924403200155e-06, | |
| "loss": 0.8038, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.20894643908181282, | |
| "grad_norm": 6.272395534856629, | |
| "learning_rate": 9.64311131669261e-06, | |
| "loss": 0.7848, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.21188934667451442, | |
| "grad_norm": 7.0264316123358155, | |
| "learning_rate": 9.623808191845806e-06, | |
| "loss": 0.7885, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.214832254267216, | |
| "grad_norm": 6.754946681714514, | |
| "learning_rate": 9.604017065929715e-06, | |
| "loss": 0.7937, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.2177751618599176, | |
| "grad_norm": 6.611299682141483, | |
| "learning_rate": 9.583740027718396e-06, | |
| "loss": 0.8036, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22071806945261918, | |
| "grad_norm": 4.018544136474155, | |
| "learning_rate": 9.562979217269547e-06, | |
| "loss": 0.7969, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.22366097704532079, | |
| "grad_norm": 7.101373596601596, | |
| "learning_rate": 9.54173682569865e-06, | |
| "loss": 0.796, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.22660388463802236, | |
| "grad_norm": 6.399510185421723, | |
| "learning_rate": 9.520015094947704e-06, | |
| "loss": 0.8075, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.22954679223072397, | |
| "grad_norm": 9.296984628579494, | |
| "learning_rate": 9.497816317548625e-06, | |
| "loss": 0.7799, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.23248969982342554, | |
| "grad_norm": 4.786255899468576, | |
| "learning_rate": 9.475142836381274e-06, | |
| "loss": 0.7695, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.23543260741612712, | |
| "grad_norm": 4.559456259891477, | |
| "learning_rate": 9.4519970444262e-06, | |
| "loss": 0.7741, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.23837551500882873, | |
| "grad_norm": 7.466092853647954, | |
| "learning_rate": 9.428381384512073e-06, | |
| "loss": 0.7619, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.2413184226015303, | |
| "grad_norm": 6.623530673850783, | |
| "learning_rate": 9.404298349057873e-06, | |
| "loss": 0.7731, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2442613301942319, | |
| "grad_norm": 6.068028651768666, | |
| "learning_rate": 9.379750479809832e-06, | |
| "loss": 0.7574, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.24720423778693348, | |
| "grad_norm": 8.419516596990583, | |
| "learning_rate": 9.354740367573178e-06, | |
| "loss": 0.7821, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.25014714537963506, | |
| "grad_norm": 4.198177608767893, | |
| "learning_rate": 9.329270651938702e-06, | |
| "loss": 0.7547, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.25309005297233667, | |
| "grad_norm": 4.566575750438902, | |
| "learning_rate": 9.303344021004169e-06, | |
| "loss": 0.7664, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.25603296056503827, | |
| "grad_norm": 8.680313490795726, | |
| "learning_rate": 9.27696321109061e-06, | |
| "loss": 0.7668, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.2589758681577399, | |
| "grad_norm": 9.12784954370401, | |
| "learning_rate": 9.250131006453544e-06, | |
| "loss": 0.7678, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2619187757504414, | |
| "grad_norm": 10.144210993180703, | |
| "learning_rate": 9.222850238989104e-06, | |
| "loss": 0.765, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.26486168334314303, | |
| "grad_norm": 3.0429868552146964, | |
| "learning_rate": 9.195123787935168e-06, | |
| "loss": 0.7445, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.26780459093584463, | |
| "grad_norm": 4.691784082396774, | |
| "learning_rate": 9.166954579567483e-06, | |
| "loss": 0.7649, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.2707474985285462, | |
| "grad_norm": 4.7416354261595615, | |
| "learning_rate": 9.138345586890818e-06, | |
| "loss": 0.7496, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2736904061212478, | |
| "grad_norm": 6.223772502898528, | |
| "learning_rate": 9.109299829325191e-06, | |
| "loss": 0.7439, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.2766333137139494, | |
| "grad_norm": 5.069169897647759, | |
| "learning_rate": 9.0798203723872e-06, | |
| "loss": 0.7552, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.279576221306651, | |
| "grad_norm": 4.067678548778218, | |
| "learning_rate": 9.049910327366481e-06, | |
| "loss": 0.7479, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.28251912889935255, | |
| "grad_norm": 7.216787820352084, | |
| "learning_rate": 9.019572850997339e-06, | |
| "loss": 0.7456, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.28546203649205415, | |
| "grad_norm": 6.118517874215981, | |
| "learning_rate": 8.988811145125592e-06, | |
| "loss": 0.7373, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.28840494408475575, | |
| "grad_norm": 6.974203549951666, | |
| "learning_rate": 8.957628456370636e-06, | |
| "loss": 0.7544, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2913478516774573, | |
| "grad_norm": 6.659489098506463, | |
| "learning_rate": 8.926028075782791e-06, | |
| "loss": 0.7485, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.2942907592701589, | |
| "grad_norm": 6.963976313909436, | |
| "learning_rate": 8.89401333849598e-06, | |
| "loss": 0.7364, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2972336668628605, | |
| "grad_norm": 5.060368338459147, | |
| "learning_rate": 8.861587623375711e-06, | |
| "loss": 0.7335, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.3001765744555621, | |
| "grad_norm": 11.693154707714637, | |
| "learning_rate": 8.828754352662483e-06, | |
| "loss": 0.7469, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.30311948204826367, | |
| "grad_norm": 8.708261644979073, | |
| "learning_rate": 8.795516991610592e-06, | |
| "loss": 0.7296, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.30606238964096527, | |
| "grad_norm": 5.926416635055216, | |
| "learning_rate": 8.761879048122414e-06, | |
| "loss": 0.7443, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3090052972336669, | |
| "grad_norm": 12.904699986898022, | |
| "learning_rate": 8.727844072378173e-06, | |
| "loss": 0.7182, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.3119482048263684, | |
| "grad_norm": 6.368508385097698, | |
| "learning_rate": 8.693415656461244e-06, | |
| "loss": 0.734, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.31489111241907003, | |
| "grad_norm": 3.3588114427301607, | |
| "learning_rate": 8.658597433979057e-06, | |
| "loss": 0.7248, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.31783402001177163, | |
| "grad_norm": 12.331947606606729, | |
| "learning_rate": 8.623393079679588e-06, | |
| "loss": 0.7344, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.32077692760447324, | |
| "grad_norm": 12.166702155420914, | |
| "learning_rate": 8.587806309063531e-06, | |
| "loss": 0.726, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.3237198351971748, | |
| "grad_norm": 4.612832603972562, | |
| "learning_rate": 8.551840877992155e-06, | |
| "loss": 0.7283, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3266627427898764, | |
| "grad_norm": 5.77424459107182, | |
| "learning_rate": 8.515500582290914e-06, | |
| "loss": 0.7155, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.329605650382578, | |
| "grad_norm": 3.6459134260142214, | |
| "learning_rate": 8.478789257348827e-06, | |
| "loss": 0.7116, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3325485579752796, | |
| "grad_norm": 3.0011507986582733, | |
| "learning_rate": 8.441710777713683e-06, | |
| "loss": 0.7037, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.33549146556798115, | |
| "grad_norm": 5.360195180614321, | |
| "learning_rate": 8.404269056683133e-06, | |
| "loss": 0.7169, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.33843437316068276, | |
| "grad_norm": 5.707195645216086, | |
| "learning_rate": 8.366468045891658e-06, | |
| "loss": 0.7102, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.34137728075338436, | |
| "grad_norm": 3.5974238955778457, | |
| "learning_rate": 8.32831173489352e-06, | |
| "loss": 0.7115, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3443201883460859, | |
| "grad_norm": 3.2599056325905367, | |
| "learning_rate": 8.289804150741704e-06, | |
| "loss": 0.7154, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.3472630959387875, | |
| "grad_norm": 4.229649245193178, | |
| "learning_rate": 8.250949357562892e-06, | |
| "loss": 0.7057, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3502060035314891, | |
| "grad_norm": 5.194627560952757, | |
| "learning_rate": 8.211751456128532e-06, | |
| "loss": 0.7251, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.3531489111241907, | |
| "grad_norm": 3.0868739009823583, | |
| "learning_rate": 8.172214583422038e-06, | |
| "loss": 0.7067, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.35609181871689227, | |
| "grad_norm": 4.2974502097872405, | |
| "learning_rate": 8.132342912202178e-06, | |
| "loss": 0.7144, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3590347263095939, | |
| "grad_norm": 3.271991629082987, | |
| "learning_rate": 8.092140650562665e-06, | |
| "loss": 0.7103, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3619776339022955, | |
| "grad_norm": 4.2083604726976915, | |
| "learning_rate": 8.051612041488038e-06, | |
| "loss": 0.7211, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.36492054149499703, | |
| "grad_norm": 3.6340200304717114, | |
| "learning_rate": 8.010761362405851e-06, | |
| "loss": 0.7272, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.36786344908769864, | |
| "grad_norm": 5.783094842613372, | |
| "learning_rate": 7.969592924735234e-06, | |
| "loss": 0.7054, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.37080635668040024, | |
| "grad_norm": 5.0424325252776745, | |
| "learning_rate": 7.928111073431847e-06, | |
| "loss": 0.7122, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.37374926427310184, | |
| "grad_norm": 5.196701737520978, | |
| "learning_rate": 7.886320186529327e-06, | |
| "loss": 0.6953, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.3766921718658034, | |
| "grad_norm": 3.608090265928746, | |
| "learning_rate": 7.844224674677225e-06, | |
| "loss": 0.6978, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.379635079458505, | |
| "grad_norm": 4.731749503563869, | |
| "learning_rate": 7.801828980675485e-06, | |
| "loss": 0.6953, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.3825779870512066, | |
| "grad_norm": 3.0237351828264547, | |
| "learning_rate": 7.75913757900556e-06, | |
| "loss": 0.6738, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3855208946439082, | |
| "grad_norm": 3.0717508997327365, | |
| "learning_rate": 7.716154975358174e-06, | |
| "loss": 0.7046, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.38846380223660976, | |
| "grad_norm": 3.4394705043914477, | |
| "learning_rate": 7.672885706157778e-06, | |
| "loss": 0.6982, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.39140670982931136, | |
| "grad_norm": 3.354889681681399, | |
| "learning_rate": 7.629334338083774e-06, | |
| "loss": 0.665, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.39434961742201297, | |
| "grad_norm": 4.968268480091777, | |
| "learning_rate": 7.585505467588547e-06, | |
| "loss": 0.6836, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.3972925250147145, | |
| "grad_norm": 3.4164646148678917, | |
| "learning_rate": 7.541403720412353e-06, | |
| "loss": 0.6951, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.4002354326074161, | |
| "grad_norm": 5.683831574907592, | |
| "learning_rate": 7.497033751095099e-06, | |
| "loss": 0.6928, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4031783402001177, | |
| "grad_norm": 4.472151489983979, | |
| "learning_rate": 7.4524002424851185e-06, | |
| "loss": 0.6797, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.40612124779281933, | |
| "grad_norm": 6.186377434937082, | |
| "learning_rate": 7.407507905244924e-06, | |
| "loss": 0.672, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4090641553855209, | |
| "grad_norm": 2.9651290505497045, | |
| "learning_rate": 7.362361477354052e-06, | |
| "loss": 0.6791, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.4120070629782225, | |
| "grad_norm": 5.390553555502008, | |
| "learning_rate": 7.3169657236090004e-06, | |
| "loss": 0.6689, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4149499705709241, | |
| "grad_norm": 4.10914126631982, | |
| "learning_rate": 7.271325435120353e-06, | |
| "loss": 0.6845, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.41789287816362564, | |
| "grad_norm": 8.644264069486146, | |
| "learning_rate": 7.225445428807121e-06, | |
| "loss": 0.6791, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.42083578575632724, | |
| "grad_norm": 4.677705564927546, | |
| "learning_rate": 7.179330546888364e-06, | |
| "loss": 0.6713, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.42377869334902885, | |
| "grad_norm": 6.154537296560613, | |
| "learning_rate": 7.132985656372126e-06, | |
| "loss": 0.6579, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.42672160094173045, | |
| "grad_norm": 8.540942755538525, | |
| "learning_rate": 7.086415648541782e-06, | |
| "loss": 0.6576, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.429664508534432, | |
| "grad_norm": 5.046772189845541, | |
| "learning_rate": 7.0396254384398e-06, | |
| "loss": 0.6435, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4326074161271336, | |
| "grad_norm": 3.4905163179341967, | |
| "learning_rate": 6.9926199643489985e-06, | |
| "loss": 0.6405, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.4355503237198352, | |
| "grad_norm": 3.005636207711306, | |
| "learning_rate": 6.945404187271365e-06, | |
| "loss": 0.6461, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4384932313125368, | |
| "grad_norm": 3.4654929507322803, | |
| "learning_rate": 6.897983090404457e-06, | |
| "loss": 0.6239, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.44143613890523836, | |
| "grad_norm": 3.6846119188733666, | |
| "learning_rate": 6.850361678615481e-06, | |
| "loss": 0.6548, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.44437904649793997, | |
| "grad_norm": 3.2356789677548106, | |
| "learning_rate": 6.802544977913067e-06, | |
| "loss": 0.6741, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.44732195409064157, | |
| "grad_norm": 4.998250903563615, | |
| "learning_rate": 6.754538034916819e-06, | |
| "loss": 0.6631, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4502648616833431, | |
| "grad_norm": 5.140876271152121, | |
| "learning_rate": 6.706345916324692e-06, | |
| "loss": 0.6458, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.4532077692760447, | |
| "grad_norm": 3.486582775324299, | |
| "learning_rate": 6.657973708378251e-06, | |
| "loss": 0.6468, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.45615067686874633, | |
| "grad_norm": 3.2392766779187543, | |
| "learning_rate": 6.609426516325859e-06, | |
| "loss": 0.6453, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.45909358446144793, | |
| "grad_norm": 8.066053393796732, | |
| "learning_rate": 6.560709463883865e-06, | |
| "loss": 0.629, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4620364920541495, | |
| "grad_norm": 4.508373615955478, | |
| "learning_rate": 6.511827692695847e-06, | |
| "loss": 0.6364, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.4649793996468511, | |
| "grad_norm": 3.7473795272990755, | |
| "learning_rate": 6.462786361789948e-06, | |
| "loss": 0.6499, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4679223072395527, | |
| "grad_norm": 2.9615724505608543, | |
| "learning_rate": 6.413590647034401e-06, | |
| "loss": 0.6346, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.47086521483225424, | |
| "grad_norm": 3.04892457656011, | |
| "learning_rate": 6.364245740591251e-06, | |
| "loss": 0.6322, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.47380812242495585, | |
| "grad_norm": 2.918336925557892, | |
| "learning_rate": 6.314756850368377e-06, | |
| "loss": 0.648, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.47675103001765745, | |
| "grad_norm": 3.7501594414028516, | |
| "learning_rate": 6.2651291994698425e-06, | |
| "loss": 0.6372, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.47969393761035906, | |
| "grad_norm": 2.6175973358406597, | |
| "learning_rate": 6.215368025644637e-06, | |
| "loss": 0.6233, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.4826368452030606, | |
| "grad_norm": 3.216759692623588, | |
| "learning_rate": 6.1654785807338925e-06, | |
| "loss": 0.6261, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4855797527957622, | |
| "grad_norm": 4.2663028236726, | |
| "learning_rate": 6.11546613011659e-06, | |
| "loss": 0.6402, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.4885226603884638, | |
| "grad_norm": 3.0098989981042683, | |
| "learning_rate": 6.065335952153846e-06, | |
| "loss": 0.6283, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4914655679811654, | |
| "grad_norm": 2.9805289997816997, | |
| "learning_rate": 6.015093337631833e-06, | |
| "loss": 0.625, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.49440847557386697, | |
| "grad_norm": 2.828079466519479, | |
| "learning_rate": 5.964743589203388e-06, | |
| "loss": 0.6271, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4973513831665686, | |
| "grad_norm": 3.056832170036487, | |
| "learning_rate": 5.9142920208283564e-06, | |
| "loss": 0.6163, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5002942907592701, | |
| "grad_norm": 4.967318743335985, | |
| "learning_rate": 5.863743957212759e-06, | |
| "loss": 0.6231, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5032371983519718, | |
| "grad_norm": 7.691396927048123, | |
| "learning_rate": 5.8131047332468106e-06, | |
| "loss": 0.6279, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.5061801059446733, | |
| "grad_norm": 10.485146930542516, | |
| "learning_rate": 5.762379693441883e-06, | |
| "loss": 0.617, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5091230135373749, | |
| "grad_norm": 5.793807677023011, | |
| "learning_rate": 5.711574191366427e-06, | |
| "loss": 0.6117, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.5120659211300765, | |
| "grad_norm": 3.75783456933447, | |
| "learning_rate": 5.660693589080958e-06, | |
| "loss": 0.6083, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5150088287227781, | |
| "grad_norm": 4.787442679225073, | |
| "learning_rate": 5.60974325657214e-06, | |
| "loss": 0.6062, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.5179517363154797, | |
| "grad_norm": 4.424520409909319, | |
| "learning_rate": 5.558728571186027e-06, | |
| "loss": 0.6087, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5208946439081813, | |
| "grad_norm": 4.60877448542516, | |
| "learning_rate": 5.507654917060541e-06, | |
| "loss": 0.6144, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.5238375515008828, | |
| "grad_norm": 5.463543352263803, | |
| "learning_rate": 5.456527684557217e-06, | |
| "loss": 0.6234, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5267804590935845, | |
| "grad_norm": 4.713832072636091, | |
| "learning_rate": 5.405352269692301e-06, | |
| "loss": 0.6036, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.5297233666862861, | |
| "grad_norm": 3.074792807909731, | |
| "learning_rate": 5.354134073567255e-06, | |
| "loss": 0.5953, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5326662742789876, | |
| "grad_norm": 2.624507780176144, | |
| "learning_rate": 5.302878501798714e-06, | |
| "loss": 0.6024, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.5356091818716893, | |
| "grad_norm": 2.4586044761464505, | |
| "learning_rate": 5.2515909639479715e-06, | |
| "loss": 0.583, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5385520894643908, | |
| "grad_norm": 3.1643013838767318, | |
| "learning_rate": 5.200276872950052e-06, | |
| "loss": 0.5848, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.5414949970570924, | |
| "grad_norm": 5.524616335650943, | |
| "learning_rate": 5.148941644542428e-06, | |
| "loss": 0.6066, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.544437904649794, | |
| "grad_norm": 2.9626617937595383, | |
| "learning_rate": 5.09759069669343e-06, | |
| "loss": 0.5868, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.5473808122424956, | |
| "grad_norm": 4.179026591658555, | |
| "learning_rate": 5.046229449030432e-06, | |
| "loss": 0.5938, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5503237198351971, | |
| "grad_norm": 2.9736230937939436, | |
| "learning_rate": 4.994863322267865e-06, | |
| "loss": 0.5854, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5532666274278988, | |
| "grad_norm": 2.6674395025105384, | |
| "learning_rate": 4.943497737635103e-06, | |
| "loss": 0.5773, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5562095350206003, | |
| "grad_norm": 3.3999264932445965, | |
| "learning_rate": 4.892138116304297e-06, | |
| "loss": 0.5899, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.559152442613302, | |
| "grad_norm": 3.7946695534722954, | |
| "learning_rate": 4.840789878818238e-06, | |
| "loss": 0.5909, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5620953502060035, | |
| "grad_norm": 2.9170374244463777, | |
| "learning_rate": 4.789458444518243e-06, | |
| "loss": 0.585, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5650382577987051, | |
| "grad_norm": 3.804085500882333, | |
| "learning_rate": 4.73814923097221e-06, | |
| "loss": 0.5811, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5679811653914068, | |
| "grad_norm": 2.627248184612479, | |
| "learning_rate": 4.6868676534028415e-06, | |
| "loss": 0.5741, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5709240729841083, | |
| "grad_norm": 2.758710046147963, | |
| "learning_rate": 4.635619124116105e-06, | |
| "loss": 0.5751, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5738669805768098, | |
| "grad_norm": 2.533940367785718, | |
| "learning_rate": 4.584409051930027e-06, | |
| "loss": 0.5727, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5768098881695115, | |
| "grad_norm": 4.7034871762477755, | |
| "learning_rate": 4.533242841603836e-06, | |
| "loss": 0.5958, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5797527957622131, | |
| "grad_norm": 3.9130158274657205, | |
| "learning_rate": 4.4821258932675324e-06, | |
| "loss": 0.5752, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5826957033549146, | |
| "grad_norm": 2.923581564704699, | |
| "learning_rate": 4.431063601851965e-06, | |
| "loss": 0.5855, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5856386109476163, | |
| "grad_norm": 2.68100418754764, | |
| "learning_rate": 4.38006135651944e-06, | |
| "loss": 0.5592, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.5885815185403178, | |
| "grad_norm": 2.5427064423534054, | |
| "learning_rate": 4.329124540094939e-06, | |
| "loss": 0.5738, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5915244261330195, | |
| "grad_norm": 2.56361328953547, | |
| "learning_rate": 4.278258528498023e-06, | |
| "loss": 0.5518, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.594467333725721, | |
| "grad_norm": 3.570826652815698, | |
| "learning_rate": 4.227468690175433e-06, | |
| "loss": 0.5565, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5974102413184226, | |
| "grad_norm": 3.144931194434484, | |
| "learning_rate": 4.17676038553452e-06, | |
| "loss": 0.5542, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.6003531489111242, | |
| "grad_norm": 3.0428538279770487, | |
| "learning_rate": 4.126138966377495e-06, | |
| "loss": 0.5724, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6032960565038258, | |
| "grad_norm": 2.9781764362393863, | |
| "learning_rate": 4.075609775336585e-06, | |
| "loss": 0.5651, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.6062389640965273, | |
| "grad_norm": 2.8792302508241896, | |
| "learning_rate": 4.02517814531018e-06, | |
| "loss": 0.5538, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.609181871689229, | |
| "grad_norm": 2.852533794505324, | |
| "learning_rate": 3.974849398899991e-06, | |
| "loss": 0.5488, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.6121247792819305, | |
| "grad_norm": 3.645969861195433, | |
| "learning_rate": 3.924628847849287e-06, | |
| "loss": 0.5534, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6150676868746321, | |
| "grad_norm": 2.4948233282423002, | |
| "learning_rate": 3.874521792482305e-06, | |
| "loss": 0.5661, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.6180105944673338, | |
| "grad_norm": 2.6660019183479577, | |
| "learning_rate": 3.8245335211448404e-06, | |
| "loss": 0.5601, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6209535020600353, | |
| "grad_norm": 3.7571638678391346, | |
| "learning_rate": 3.7746693096461027e-06, | |
| "loss": 0.5522, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.6238964096527368, | |
| "grad_norm": 2.891024598849757, | |
| "learning_rate": 3.7249344207019163e-06, | |
| "loss": 0.5384, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6268393172454385, | |
| "grad_norm": 3.117981111832482, | |
| "learning_rate": 3.675334103379282e-06, | |
| "loss": 0.5297, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.6297822248381401, | |
| "grad_norm": 2.471751054844093, | |
| "learning_rate": 3.625873592542377e-06, | |
| "loss": 0.5488, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6327251324308417, | |
| "grad_norm": 2.9406065713758545, | |
| "learning_rate": 3.576558108300081e-06, | |
| "loss": 0.5406, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.6356680400235433, | |
| "grad_norm": 2.5304758647440466, | |
| "learning_rate": 3.5273928554550218e-06, | |
| "loss": 0.5377, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6386109476162448, | |
| "grad_norm": 4.358193416602383, | |
| "learning_rate": 3.478383022954267e-06, | |
| "loss": 0.5262, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.6415538552089465, | |
| "grad_norm": 3.751043254388853, | |
| "learning_rate": 3.4295337833416797e-06, | |
| "loss": 0.5347, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.644496762801648, | |
| "grad_norm": 2.8478083940715058, | |
| "learning_rate": 3.3808502922119884e-06, | |
| "loss": 0.5517, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.6474396703943496, | |
| "grad_norm": 3.592053051641115, | |
| "learning_rate": 3.3323376876666787e-06, | |
| "loss": 0.5185, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6503825779870512, | |
| "grad_norm": 3.2832783558616523, | |
| "learning_rate": 3.2840010897717045e-06, | |
| "loss": 0.5283, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.6533254855797528, | |
| "grad_norm": 3.7958128452191313, | |
| "learning_rate": 3.235845600017107e-06, | |
| "loss": 0.5304, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6562683931724543, | |
| "grad_norm": 2.458980007684118, | |
| "learning_rate": 3.1878763007786075e-06, | |
| "loss": 0.5119, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.659211300765156, | |
| "grad_norm": 5.356928896927627, | |
| "learning_rate": 3.1400982547812088e-06, | |
| "loss": 0.5356, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6621542083578575, | |
| "grad_norm": 4.006321789010636, | |
| "learning_rate": 3.0925165045648564e-06, | |
| "loss": 0.5144, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.6650971159505592, | |
| "grad_norm": 3.1366794171510164, | |
| "learning_rate": 3.0451360719522653e-06, | |
| "loss": 0.5259, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6680400235432608, | |
| "grad_norm": 3.075050005395039, | |
| "learning_rate": 2.997961957518892e-06, | |
| "loss": 0.5126, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6709829311359623, | |
| "grad_norm": 2.614210901095361, | |
| "learning_rate": 2.950999140065184e-06, | |
| "loss": 0.526, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.673925838728664, | |
| "grad_norm": 2.668099567229874, | |
| "learning_rate": 2.904252576091105e-06, | |
| "loss": 0.5207, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6768687463213655, | |
| "grad_norm": 2.822258046228147, | |
| "learning_rate": 2.8577271992730223e-06, | |
| "loss": 0.5179, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6798116539140671, | |
| "grad_norm": 2.7498849578418842, | |
| "learning_rate": 2.8114279199430027e-06, | |
| "loss": 0.5208, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6827545615067687, | |
| "grad_norm": 3.7794257172644086, | |
| "learning_rate": 2.765359624570574e-06, | |
| "loss": 0.5156, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6856974690994703, | |
| "grad_norm": 2.9809135197781527, | |
| "learning_rate": 2.719527175246993e-06, | |
| "loss": 0.5233, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.6886403766921718, | |
| "grad_norm": 2.600094892048898, | |
| "learning_rate": 2.6739354091721094e-06, | |
| "loss": 0.5223, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6915832842848735, | |
| "grad_norm": 3.392788220792135, | |
| "learning_rate": 2.6285891381438346e-06, | |
| "loss": 0.5343, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.694526191877575, | |
| "grad_norm": 2.620537806287387, | |
| "learning_rate": 2.583493148050297e-06, | |
| "loss": 0.5113, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6974690994702767, | |
| "grad_norm": 2.4542848921872817, | |
| "learning_rate": 2.5386521983647468e-06, | |
| "loss": 0.511, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.7004120070629782, | |
| "grad_norm": 2.5882709829545556, | |
| "learning_rate": 2.494071021643222e-06, | |
| "loss": 0.5061, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7033549146556798, | |
| "grad_norm": 2.2977723019021306, | |
| "learning_rate": 2.4497543230250843e-06, | |
| "loss": 0.5041, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.7062978222483814, | |
| "grad_norm": 2.706988899429715, | |
| "learning_rate": 2.4057067797364247e-06, | |
| "loss": 0.5034, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.709240729841083, | |
| "grad_norm": 2.8986602480661716, | |
| "learning_rate": 2.361933040596424e-06, | |
| "loss": 0.5081, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.7121836374337845, | |
| "grad_norm": 3.069283780524911, | |
| "learning_rate": 2.31843772552672e-06, | |
| "loss": 0.4812, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7151265450264862, | |
| "grad_norm": 2.942823198350691, | |
| "learning_rate": 2.275225425063813e-06, | |
| "loss": 0.4862, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.7180694526191878, | |
| "grad_norm": 4.0835750894481, | |
| "learning_rate": 2.2323006998745673e-06, | |
| "loss": 0.5002, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7210123602118893, | |
| "grad_norm": 2.6814535983918764, | |
| "learning_rate": 2.189668080274891e-06, | |
| "loss": 0.4844, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.723955267804591, | |
| "grad_norm": 2.476076418297489, | |
| "learning_rate": 2.1473320657515913e-06, | |
| "loss": 0.4882, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7268981753972925, | |
| "grad_norm": 2.9506625115965073, | |
| "learning_rate": 2.1052971244874905e-06, | |
| "loss": 0.496, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.7298410829899941, | |
| "grad_norm": 2.4718878265701214, | |
| "learning_rate": 2.063567692889864e-06, | |
| "loss": 0.496, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7327839905826957, | |
| "grad_norm": 2.5394148181117724, | |
| "learning_rate": 2.0221481751222027e-06, | |
| "loss": 0.4918, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.7357268981753973, | |
| "grad_norm": 2.4239900265691436, | |
| "learning_rate": 1.981042942639404e-06, | |
| "loss": 0.4825, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7386698057680989, | |
| "grad_norm": 2.582847306611647, | |
| "learning_rate": 1.9402563337264012e-06, | |
| "loss": 0.479, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.7416127133608005, | |
| "grad_norm": 2.8447951582688344, | |
| "learning_rate": 1.899792653040291e-06, | |
| "loss": 0.4856, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.744555620953502, | |
| "grad_norm": 2.7091883817883504, | |
| "learning_rate": 1.8596561711560267e-06, | |
| "loss": 0.4785, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.7474985285462037, | |
| "grad_norm": 2.5437072619561443, | |
| "learning_rate": 1.8198511241156902e-06, | |
| "loss": 0.4846, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7504414361389052, | |
| "grad_norm": 3.156459227921938, | |
| "learning_rate": 1.780381712981415e-06, | |
| "loss": 0.4765, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.7533843437316068, | |
| "grad_norm": 2.606788537147239, | |
| "learning_rate": 1.7412521033920087e-06, | |
| "loss": 0.4945, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7563272513243084, | |
| "grad_norm": 2.7549835249891057, | |
| "learning_rate": 1.7024664251233052e-06, | |
| "loss": 0.4795, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.75927015891701, | |
| "grad_norm": 2.499144800895581, | |
| "learning_rate": 1.6640287716522975e-06, | |
| "loss": 0.4691, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7622130665097115, | |
| "grad_norm": 2.4799314569355677, | |
| "learning_rate": 1.6259431997251191e-06, | |
| "loss": 0.4972, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.7651559741024132, | |
| "grad_norm": 2.5207515424299034, | |
| "learning_rate": 1.5882137289288812e-06, | |
| "loss": 0.4734, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7680988816951148, | |
| "grad_norm": 2.383436407772927, | |
| "learning_rate": 1.5508443412674518e-06, | |
| "loss": 0.4944, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7710417892878164, | |
| "grad_norm": 2.7793788321817656, | |
| "learning_rate": 1.513838980741183e-06, | |
| "loss": 0.4749, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.773984696880518, | |
| "grad_norm": 2.301895560667882, | |
| "learning_rate": 1.4772015529306587e-06, | |
| "loss": 0.4619, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7769276044732195, | |
| "grad_norm": 2.446229173062197, | |
| "learning_rate": 1.4409359245845005e-06, | |
| "loss": 0.4671, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7798705120659212, | |
| "grad_norm": 2.998324656500335, | |
| "learning_rate": 1.4050459232112652e-06, | |
| "loss": 0.4708, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.7828134196586227, | |
| "grad_norm": 2.287094207368545, | |
| "learning_rate": 1.3695353366754798e-06, | |
| "loss": 0.4666, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7857563272513243, | |
| "grad_norm": 2.652209930668726, | |
| "learning_rate": 1.3344079127978788e-06, | |
| "loss": 0.458, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.7886992348440259, | |
| "grad_norm": 2.6373384490448872, | |
| "learning_rate": 1.299667358959847e-06, | |
| "loss": 0.4802, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7916421424367275, | |
| "grad_norm": 2.6241240764044034, | |
| "learning_rate": 1.2653173417121367e-06, | |
| "loss": 0.458, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.794585050029429, | |
| "grad_norm": 2.596475069136379, | |
| "learning_rate": 1.2313614863879074e-06, | |
| "loss": 0.4589, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7975279576221307, | |
| "grad_norm": 2.6042310296871856, | |
| "learning_rate": 1.1978033767200936e-06, | |
| "loss": 0.4613, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.8004708652148322, | |
| "grad_norm": 2.4907632128373614, | |
| "learning_rate": 1.1646465544631802e-06, | |
| "loss": 0.4672, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8034137728075338, | |
| "grad_norm": 2.8404270982027624, | |
| "learning_rate": 1.131894519019403e-06, | |
| "loss": 0.4647, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.8063566804002354, | |
| "grad_norm": 2.511899627431363, | |
| "learning_rate": 1.0995507270694121e-06, | |
| "loss": 0.4568, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.809299587992937, | |
| "grad_norm": 2.49013220750732, | |
| "learning_rate": 1.0676185922074583e-06, | |
| "loss": 0.4657, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.8122424955856387, | |
| "grad_norm": 2.4156192880535747, | |
| "learning_rate": 1.036101484581117e-06, | |
| "loss": 0.4616, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8151854031783402, | |
| "grad_norm": 2.4647230171771364, | |
| "learning_rate": 1.0050027305355926e-06, | |
| "loss": 0.4651, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.8181283107710418, | |
| "grad_norm": 2.90805484751002, | |
| "learning_rate": 9.743256122626644e-07, | |
| "loss": 0.4582, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8210712183637434, | |
| "grad_norm": 2.770355546856387, | |
| "learning_rate": 9.440733674542751e-07, | |
| "loss": 0.47, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.824014125956445, | |
| "grad_norm": 2.894362207485248, | |
| "learning_rate": 9.142491889608179e-07, | |
| "loss": 0.4525, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8269570335491465, | |
| "grad_norm": 2.404589039415564, | |
| "learning_rate": 8.848562244541675e-07, | |
| "loss": 0.4682, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.8298999411418482, | |
| "grad_norm": 2.492758443409389, | |
| "learning_rate": 8.558975760954685e-07, | |
| "loss": 0.4595, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8328428487345497, | |
| "grad_norm": 3.0988070143588202, | |
| "learning_rate": 8.273763002077284e-07, | |
| "loss": 0.4568, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.8357857563272513, | |
| "grad_norm": 3.440047173962592, | |
| "learning_rate": 7.992954069532543e-07, | |
| "loss": 0.461, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8387286639199529, | |
| "grad_norm": 2.657598459534566, | |
| "learning_rate": 7.716578600159535e-07, | |
| "loss": 0.4503, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.8416715715126545, | |
| "grad_norm": 2.572563487365978, | |
| "learning_rate": 7.444665762885484e-07, | |
| "loss": 0.4615, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8446144791053561, | |
| "grad_norm": 2.439187441026128, | |
| "learning_rate": 7.177244255647209e-07, | |
| "loss": 0.4446, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.8475573866980577, | |
| "grad_norm": 2.233969483468557, | |
| "learning_rate": 6.914342302362309e-07, | |
| "loss": 0.4511, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8505002942907592, | |
| "grad_norm": 2.471799044846796, | |
| "learning_rate": 6.65598764995043e-07, | |
| "loss": 0.4497, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.8534432018834609, | |
| "grad_norm": 2.755442497972038, | |
| "learning_rate": 6.402207565404811e-07, | |
| "loss": 0.4487, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8563861094761624, | |
| "grad_norm": 2.4539325525236326, | |
| "learning_rate": 6.153028832914453e-07, | |
| "loss": 0.4541, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.859329017068864, | |
| "grad_norm": 2.2940758992178014, | |
| "learning_rate": 5.908477751037361e-07, | |
| "loss": 0.4425, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8622719246615657, | |
| "grad_norm": 2.5143891555656435, | |
| "learning_rate": 5.668580129924949e-07, | |
| "loss": 0.4475, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.8652148322542672, | |
| "grad_norm": 2.643528212719313, | |
| "learning_rate": 5.433361288597949e-07, | |
| "loss": 0.4374, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8681577398469688, | |
| "grad_norm": 2.3103830994227774, | |
| "learning_rate": 5.20284605227433e-07, | |
| "loss": 0.4434, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.8711006474396704, | |
| "grad_norm": 2.1890313025355668, | |
| "learning_rate": 4.977058749749115e-07, | |
| "loss": 0.4464, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.874043555032372, | |
| "grad_norm": 2.311388455333076, | |
| "learning_rate": 4.7560232108267714e-07, | |
| "loss": 0.4501, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.8769864626250736, | |
| "grad_norm": 2.2768695736500817, | |
| "learning_rate": 4.5397627638061604e-07, | |
| "loss": 0.4519, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8799293702177752, | |
| "grad_norm": 2.4901928210573305, | |
| "learning_rate": 4.3283002330184233e-07, | |
| "loss": 0.4426, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.8828722778104767, | |
| "grad_norm": 2.2512157372682213, | |
| "learning_rate": 4.1216579364181333e-07, | |
| "loss": 0.4465, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8858151854031784, | |
| "grad_norm": 2.2396613255090947, | |
| "learning_rate": 3.919857683227818e-07, | |
| "loss": 0.447, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.8887580929958799, | |
| "grad_norm": 2.4441734845394096, | |
| "learning_rate": 3.72292077163614e-07, | |
| "loss": 0.4455, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8917010005885815, | |
| "grad_norm": 2.3390391914462128, | |
| "learning_rate": 3.5308679865501516e-07, | |
| "loss": 0.4329, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.8946439081812831, | |
| "grad_norm": 2.467951011914622, | |
| "learning_rate": 3.343719597401596e-07, | |
| "loss": 0.4373, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8975868157739847, | |
| "grad_norm": 2.506469216208831, | |
| "learning_rate": 3.1614953560076213e-07, | |
| "loss": 0.4413, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.9005297233666862, | |
| "grad_norm": 2.3383168243956174, | |
| "learning_rate": 2.984214494486215e-07, | |
| "loss": 0.4548, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9034726309593879, | |
| "grad_norm": 2.630425109995007, | |
| "learning_rate": 2.811895723226382e-07, | |
| "loss": 0.4524, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.9064155385520895, | |
| "grad_norm": 2.4286937188874775, | |
| "learning_rate": 2.6445572289134634e-07, | |
| "loss": 0.4376, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.909358446144791, | |
| "grad_norm": 2.216575869780935, | |
| "learning_rate": 2.482216672609677e-07, | |
| "loss": 0.4351, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.9123013537374927, | |
| "grad_norm": 2.3654422420764973, | |
| "learning_rate": 2.3248911878901547e-07, | |
| "loss": 0.4355, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9152442613301942, | |
| "grad_norm": 2.3549052063334495, | |
| "learning_rate": 2.1725973790346522e-07, | |
| "loss": 0.4346, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.9181871689228959, | |
| "grad_norm": 2.3113103859994535, | |
| "learning_rate": 2.0253513192751374e-07, | |
| "loss": 0.4367, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9211300765155974, | |
| "grad_norm": 2.7576756319444553, | |
| "learning_rate": 1.8831685490993457e-07, | |
| "loss": 0.4381, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.924072984108299, | |
| "grad_norm": 2.369997826369225, | |
| "learning_rate": 1.7460640746107094e-07, | |
| "loss": 0.4454, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9270158917010006, | |
| "grad_norm": 2.6393554236756, | |
| "learning_rate": 1.614052365944524e-07, | |
| "loss": 0.4481, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.9299587992937022, | |
| "grad_norm": 2.4054005953759128, | |
| "learning_rate": 1.4871473557407924e-07, | |
| "loss": 0.4412, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9329017068864037, | |
| "grad_norm": 2.491831338754737, | |
| "learning_rate": 1.3653624376737562e-07, | |
| "loss": 0.4451, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.9358446144791054, | |
| "grad_norm": 3.0815694783925305, | |
| "learning_rate": 1.2487104650383165e-07, | |
| "loss": 0.4283, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9387875220718069, | |
| "grad_norm": 2.2641401566584545, | |
| "learning_rate": 1.1372037493934785e-07, | |
| "loss": 0.4237, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.9417304296645085, | |
| "grad_norm": 2.7762443739962057, | |
| "learning_rate": 1.0308540592629756e-07, | |
| "loss": 0.4404, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9446733372572101, | |
| "grad_norm": 2.6178876501865145, | |
| "learning_rate": 9.296726188932293e-08, | |
| "loss": 0.4306, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.9476162448499117, | |
| "grad_norm": 2.6239109904141604, | |
| "learning_rate": 8.336701070686904e-08, | |
| "loss": 0.4461, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9505591524426134, | |
| "grad_norm": 2.2437383637665715, | |
| "learning_rate": 7.428566559848416e-08, | |
| "loss": 0.4345, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.9535020600353149, | |
| "grad_norm": 2.1833908010974605, | |
| "learning_rate": 6.572418501788025e-08, | |
| "loss": 0.4359, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9564449676280165, | |
| "grad_norm": 2.3432337307633215, | |
| "learning_rate": 5.768347255177609e-08, | |
| "loss": 0.4169, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.9593878752207181, | |
| "grad_norm": 2.809678686382748, | |
| "learning_rate": 5.0164376824535235e-08, | |
| "loss": 0.4353, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9623307828134197, | |
| "grad_norm": 2.36827229347058, | |
| "learning_rate": 4.3167691408599334e-08, | |
| "loss": 0.4284, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.9652736904061212, | |
| "grad_norm": 2.29034432840639, | |
| "learning_rate": 3.6694154740733456e-08, | |
| "loss": 0.4443, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9682165979988229, | |
| "grad_norm": 2.313570657915266, | |
| "learning_rate": 3.074445004409121e-08, | |
| "loss": 0.4382, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.9711595055915244, | |
| "grad_norm": 2.3743788836695776, | |
| "learning_rate": 2.5319205256107425e-08, | |
| "loss": 0.418, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.974102413184226, | |
| "grad_norm": 2.3747057502664854, | |
| "learning_rate": 2.0418992962224495e-08, | |
| "loss": 0.4463, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.9770453207769276, | |
| "grad_norm": 2.2646005029150618, | |
| "learning_rate": 1.6044330335460712e-08, | |
| "loss": 0.4517, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9799882283696292, | |
| "grad_norm": 2.3395141439258302, | |
| "learning_rate": 1.2195679081827283e-08, | |
| "loss": 0.447, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.9829311359623308, | |
| "grad_norm": 2.7310828433799377, | |
| "learning_rate": 8.873445391601176e-09, | |
| "loss": 0.4445, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.9858740435550324, | |
| "grad_norm": 2.3333374487486203, | |
| "learning_rate": 6.077979896453312e-09, | |
| "loss": 0.4354, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.9888169511477339, | |
| "grad_norm": 2.3497335141047513, | |
| "learning_rate": 3.809577632443717e-09, | |
| "loss": 0.4321, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9917598587404356, | |
| "grad_norm": 2.3914591270933983, | |
| "learning_rate": 2.0684780088825463e-09, | |
| "loss": 0.4372, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.9947027663331371, | |
| "grad_norm": 2.3017358226327653, | |
| "learning_rate": 8.548647830630696e-10, | |
| "loss": 0.4339, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9976456739258387, | |
| "grad_norm": 2.621035259676665, | |
| "learning_rate": 1.6886604086774337e-10, | |
| "loss": 0.4319, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 4.2193, | |
| "eval_samples_per_second": 2.37, | |
| "eval_steps_per_second": 0.711, | |
| "step": 1699 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1699, | |
| "total_flos": 116982426894336.0, | |
| "train_loss": 0.6940072515278861, | |
| "train_runtime": 20640.2326, | |
| "train_samples_per_second": 1.317, | |
| "train_steps_per_second": 0.082 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1699, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 116982426894336.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |