| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 13.780893042575286, |
| "eval_steps": 500, |
| "global_step": 840, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.08307372793354102, |
| "grad_norm": 0.5125107169151306, |
| "learning_rate": 2.0833333333333334e-06, |
| "loss": 3.5515, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.16614745586708204, |
| "grad_norm": 0.4532736539840698, |
| "learning_rate": 4.166666666666667e-06, |
| "loss": 3.6807, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.24922118380062305, |
| "grad_norm": 0.4483909606933594, |
| "learning_rate": 6.25e-06, |
| "loss": 3.5357, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3322949117341641, |
| "grad_norm": 0.5020395517349243, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 3.645, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.4153686396677051, |
| "grad_norm": 0.8230155110359192, |
| "learning_rate": 1.0416666666666668e-05, |
| "loss": 3.5682, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.4984423676012461, |
| "grad_norm": 1.3333394527435303, |
| "learning_rate": 1.25e-05, |
| "loss": 3.5155, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5815160955347871, |
| "grad_norm": 0.7118450999259949, |
| "learning_rate": 1.4583333333333335e-05, |
| "loss": 3.4724, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.6645898234683282, |
| "grad_norm": 1.0282073020935059, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 3.4205, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.7476635514018691, |
| "grad_norm": 1.0468826293945312, |
| "learning_rate": 1.8750000000000002e-05, |
| "loss": 3.3295, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.8307372793354102, |
| "grad_norm": 0.8730693459510803, |
| "learning_rate": 2.0833333333333336e-05, |
| "loss": 3.0831, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.9138110072689511, |
| "grad_norm": 0.9377574324607849, |
| "learning_rate": 2.2916666666666667e-05, |
| "loss": 2.8412, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.9968847352024922, |
| "grad_norm": 1.0129190683364868, |
| "learning_rate": 2.5e-05, |
| "loss": 2.3229, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.066458982346833, |
| "grad_norm": 0.8808080554008484, |
| "learning_rate": 2.7083333333333332e-05, |
| "loss": 2.0489, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.1495327102803738, |
| "grad_norm": 0.8092290759086609, |
| "learning_rate": 2.916666666666667e-05, |
| "loss": 1.9307, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.2326064382139148, |
| "grad_norm": 0.8002105951309204, |
| "learning_rate": 3.125e-05, |
| "loss": 1.7402, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.3156801661474558, |
| "grad_norm": 0.8581973314285278, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 1.6696, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.398753894080997, |
| "grad_norm": 0.7443532347679138, |
| "learning_rate": 3.541666666666667e-05, |
| "loss": 1.5175, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.4818276220145379, |
| "grad_norm": 0.9584633708000183, |
| "learning_rate": 3.7500000000000003e-05, |
| "loss": 1.3968, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.5649013499480788, |
| "grad_norm": 0.8683136105537415, |
| "learning_rate": 3.958333333333333e-05, |
| "loss": 1.299, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.64797507788162, |
| "grad_norm": 0.7825261950492859, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 1.1271, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.731048805815161, |
| "grad_norm": 0.654670000076294, |
| "learning_rate": 4.375e-05, |
| "loss": 1.14, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.814122533748702, |
| "grad_norm": 0.7511588335037231, |
| "learning_rate": 4.5833333333333334e-05, |
| "loss": 1.1614, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.897196261682243, |
| "grad_norm": 0.6596113443374634, |
| "learning_rate": 4.791666666666667e-05, |
| "loss": 1.0587, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.980269989615784, |
| "grad_norm": 0.7166474461555481, |
| "learning_rate": 5e-05, |
| "loss": 1.0169, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.0498442367601246, |
| "grad_norm": 0.6620935797691345, |
| "learning_rate": 4.999735579817769e-05, |
| "loss": 0.985, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.132917964693666, |
| "grad_norm": 0.7846258878707886, |
| "learning_rate": 4.998942375205502e-05, |
| "loss": 0.9594, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.2159916926272065, |
| "grad_norm": 0.652454137802124, |
| "learning_rate": 4.997620553954645e-05, |
| "loss": 0.9322, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.2990654205607477, |
| "grad_norm": 0.7020059823989868, |
| "learning_rate": 4.995770395678171e-05, |
| "loss": 0.9242, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.382139148494289, |
| "grad_norm": 0.8148695826530457, |
| "learning_rate": 4.993392291751431e-05, |
| "loss": 0.8394, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.4652128764278296, |
| "grad_norm": 0.8813133835792542, |
| "learning_rate": 4.990486745229364e-05, |
| "loss": 0.992, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.5482866043613708, |
| "grad_norm": 0.7893730401992798, |
| "learning_rate": 4.987054370740083e-05, |
| "loss": 0.899, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.6313603322949115, |
| "grad_norm": 0.7719221711158752, |
| "learning_rate": 4.983095894354858e-05, |
| "loss": 0.9416, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.7144340602284527, |
| "grad_norm": 0.8439667820930481, |
| "learning_rate": 4.9786121534345265e-05, |
| "loss": 0.8401, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.797507788161994, |
| "grad_norm": 0.833251953125, |
| "learning_rate": 4.973604096452361e-05, |
| "loss": 0.817, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.8805815160955346, |
| "grad_norm": 0.7526916265487671, |
| "learning_rate": 4.9680727827934354e-05, |
| "loss": 0.8191, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.9636552440290758, |
| "grad_norm": 1.0640058517456055, |
| "learning_rate": 4.962019382530521e-05, |
| "loss": 0.8607, |
| "step": 180 |
| }, |
| { |
| "epoch": 3.0332294911734166, |
| "grad_norm": 0.784114420413971, |
| "learning_rate": 4.9554451761765766e-05, |
| "loss": 0.761, |
| "step": 185 |
| }, |
| { |
| "epoch": 3.1163032191069573, |
| "grad_norm": 0.9396352767944336, |
| "learning_rate": 4.948351554413879e-05, |
| "loss": 0.6821, |
| "step": 190 |
| }, |
| { |
| "epoch": 3.1993769470404985, |
| "grad_norm": 0.9770582914352417, |
| "learning_rate": 4.940740017799833e-05, |
| "loss": 0.742, |
| "step": 195 |
| }, |
| { |
| "epoch": 3.2824506749740396, |
| "grad_norm": 0.8752853870391846, |
| "learning_rate": 4.9326121764495596e-05, |
| "loss": 0.69, |
| "step": 200 |
| }, |
| { |
| "epoch": 3.3655244029075804, |
| "grad_norm": 0.8496589064598083, |
| "learning_rate": 4.92396974969529e-05, |
| "loss": 0.769, |
| "step": 205 |
| }, |
| { |
| "epoch": 3.4485981308411215, |
| "grad_norm": 0.8192639350891113, |
| "learning_rate": 4.914814565722671e-05, |
| "loss": 0.7298, |
| "step": 210 |
| }, |
| { |
| "epoch": 3.5316718587746623, |
| "grad_norm": 1.00070059299469, |
| "learning_rate": 4.905148561184033e-05, |
| "loss": 0.7815, |
| "step": 215 |
| }, |
| { |
| "epoch": 3.6147455867082035, |
| "grad_norm": 1.1614112854003906, |
| "learning_rate": 4.894973780788722e-05, |
| "loss": 0.7975, |
| "step": 220 |
| }, |
| { |
| "epoch": 3.6978193146417446, |
| "grad_norm": 1.014397382736206, |
| "learning_rate": 4.884292376870567e-05, |
| "loss": 0.696, |
| "step": 225 |
| }, |
| { |
| "epoch": 3.7808930425752854, |
| "grad_norm": 0.9468530416488647, |
| "learning_rate": 4.873106608932585e-05, |
| "loss": 0.7194, |
| "step": 230 |
| }, |
| { |
| "epoch": 3.8639667705088265, |
| "grad_norm": 1.2201341390609741, |
| "learning_rate": 4.8614188431690125e-05, |
| "loss": 0.7795, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.9470404984423677, |
| "grad_norm": 0.9476346373558044, |
| "learning_rate": 4.849231551964771e-05, |
| "loss": 0.7202, |
| "step": 240 |
| }, |
| { |
| "epoch": 4.0166147455867085, |
| "grad_norm": 0.9945451021194458, |
| "learning_rate": 4.836547313372471e-05, |
| "loss": 0.6931, |
| "step": 245 |
| }, |
| { |
| "epoch": 4.099688473520249, |
| "grad_norm": 1.1113696098327637, |
| "learning_rate": 4.823368810567056e-05, |
| "loss": 0.6557, |
| "step": 250 |
| }, |
| { |
| "epoch": 4.18276220145379, |
| "grad_norm": 1.3515146970748901, |
| "learning_rate": 4.8096988312782174e-05, |
| "loss": 0.6295, |
| "step": 255 |
| }, |
| { |
| "epoch": 4.265835929387332, |
| "grad_norm": 1.375272274017334, |
| "learning_rate": 4.7955402672006854e-05, |
| "loss": 0.6423, |
| "step": 260 |
| }, |
| { |
| "epoch": 4.348909657320872, |
| "grad_norm": 1.1683202981948853, |
| "learning_rate": 4.780896113382536e-05, |
| "loss": 0.6167, |
| "step": 265 |
| }, |
| { |
| "epoch": 4.431983385254413, |
| "grad_norm": 1.2091785669326782, |
| "learning_rate": 4.765769467591625e-05, |
| "loss": 0.6373, |
| "step": 270 |
| }, |
| { |
| "epoch": 4.515057113187955, |
| "grad_norm": 1.3007235527038574, |
| "learning_rate": 4.750163529660303e-05, |
| "loss": 0.5941, |
| "step": 275 |
| }, |
| { |
| "epoch": 4.598130841121495, |
| "grad_norm": 1.2785530090332031, |
| "learning_rate": 4.734081600808531e-05, |
| "loss": 0.6384, |
| "step": 280 |
| }, |
| { |
| "epoch": 4.681204569055036, |
| "grad_norm": 1.2808083295822144, |
| "learning_rate": 4.717527082945554e-05, |
| "loss": 0.6592, |
| "step": 285 |
| }, |
| { |
| "epoch": 4.764278296988578, |
| "grad_norm": 1.1978651285171509, |
| "learning_rate": 4.700503477950278e-05, |
| "loss": 0.6285, |
| "step": 290 |
| }, |
| { |
| "epoch": 4.8473520249221185, |
| "grad_norm": 1.3983532190322876, |
| "learning_rate": 4.68301438693049e-05, |
| "loss": 0.5941, |
| "step": 295 |
| }, |
| { |
| "epoch": 4.930425752855659, |
| "grad_norm": 1.286287784576416, |
| "learning_rate": 4.665063509461097e-05, |
| "loss": 0.5652, |
| "step": 300 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 3.1714959144592285, |
| "learning_rate": 4.6466546428015336e-05, |
| "loss": 0.5657, |
| "step": 305 |
| }, |
| { |
| "epoch": 5.083073727933541, |
| "grad_norm": 1.0886554718017578, |
| "learning_rate": 4.627791681092499e-05, |
| "loss": 0.4776, |
| "step": 310 |
| }, |
| { |
| "epoch": 5.166147455867082, |
| "grad_norm": 1.3735958337783813, |
| "learning_rate": 4.608478614532215e-05, |
| "loss": 0.5114, |
| "step": 315 |
| }, |
| { |
| "epoch": 5.249221183800623, |
| "grad_norm": 1.5809016227722168, |
| "learning_rate": 4.588719528532342e-05, |
| "loss": 0.5537, |
| "step": 320 |
| }, |
| { |
| "epoch": 5.332294911734164, |
| "grad_norm": 1.401424765586853, |
| "learning_rate": 4.568518602853776e-05, |
| "loss": 0.5299, |
| "step": 325 |
| }, |
| { |
| "epoch": 5.415368639667705, |
| "grad_norm": 1.467054009437561, |
| "learning_rate": 4.54788011072248e-05, |
| "loss": 0.5236, |
| "step": 330 |
| }, |
| { |
| "epoch": 5.498442367601246, |
| "grad_norm": 1.253794550895691, |
| "learning_rate": 4.526808417925531e-05, |
| "loss": 0.5632, |
| "step": 335 |
| }, |
| { |
| "epoch": 5.581516095534787, |
| "grad_norm": 1.4579623937606812, |
| "learning_rate": 4.50530798188761e-05, |
| "loss": 0.5556, |
| "step": 340 |
| }, |
| { |
| "epoch": 5.6645898234683285, |
| "grad_norm": 1.171007513999939, |
| "learning_rate": 4.4833833507280884e-05, |
| "loss": 0.4934, |
| "step": 345 |
| }, |
| { |
| "epoch": 5.747663551401869, |
| "grad_norm": 1.4656859636306763, |
| "learning_rate": 4.4610391622989396e-05, |
| "loss": 0.5697, |
| "step": 350 |
| }, |
| { |
| "epoch": 5.83073727933541, |
| "grad_norm": 1.4671518802642822, |
| "learning_rate": 4.438280143203665e-05, |
| "loss": 0.5289, |
| "step": 355 |
| }, |
| { |
| "epoch": 5.913811007268951, |
| "grad_norm": 1.4010504484176636, |
| "learning_rate": 4.415111107797445e-05, |
| "loss": 0.5205, |
| "step": 360 |
| }, |
| { |
| "epoch": 5.996884735202492, |
| "grad_norm": 1.6909047365188599, |
| "learning_rate": 4.391536957168733e-05, |
| "loss": 0.5469, |
| "step": 365 |
| }, |
| { |
| "epoch": 6.066458982346833, |
| "grad_norm": 1.3057804107666016, |
| "learning_rate": 4.36756267810249e-05, |
| "loss": 0.5087, |
| "step": 370 |
| }, |
| { |
| "epoch": 6.149532710280374, |
| "grad_norm": 1.7389354705810547, |
| "learning_rate": 4.34319334202531e-05, |
| "loss": 0.446, |
| "step": 375 |
| }, |
| { |
| "epoch": 6.232606438213915, |
| "grad_norm": 1.6178336143493652, |
| "learning_rate": 4.318434103932622e-05, |
| "loss": 0.4888, |
| "step": 380 |
| }, |
| { |
| "epoch": 6.315680166147456, |
| "grad_norm": 1.6565852165222168, |
| "learning_rate": 4.293290201298223e-05, |
| "loss": 0.4784, |
| "step": 385 |
| }, |
| { |
| "epoch": 6.398753894080997, |
| "grad_norm": 1.4080264568328857, |
| "learning_rate": 4.267766952966369e-05, |
| "loss": 0.4155, |
| "step": 390 |
| }, |
| { |
| "epoch": 6.481827622014538, |
| "grad_norm": 1.4906692504882812, |
| "learning_rate": 4.241869758026638e-05, |
| "loss": 0.4854, |
| "step": 395 |
| }, |
| { |
| "epoch": 6.564901349948079, |
| "grad_norm": 1.6544594764709473, |
| "learning_rate": 4.215604094671835e-05, |
| "loss": 0.4753, |
| "step": 400 |
| }, |
| { |
| "epoch": 6.64797507788162, |
| "grad_norm": 1.3182097673416138, |
| "learning_rate": 4.188975519039151e-05, |
| "loss": 0.4352, |
| "step": 405 |
| }, |
| { |
| "epoch": 6.731048805815161, |
| "grad_norm": 1.531633734703064, |
| "learning_rate": 4.1619896640348445e-05, |
| "loss": 0.4786, |
| "step": 410 |
| }, |
| { |
| "epoch": 6.814122533748702, |
| "grad_norm": 1.358223557472229, |
| "learning_rate": 4.1346522381426744e-05, |
| "loss": 0.4108, |
| "step": 415 |
| }, |
| { |
| "epoch": 6.897196261682243, |
| "grad_norm": 1.6483898162841797, |
| "learning_rate": 4.1069690242163484e-05, |
| "loss": 0.4828, |
| "step": 420 |
| }, |
| { |
| "epoch": 6.980269989615784, |
| "grad_norm": 1.5941141843795776, |
| "learning_rate": 4.078945878256244e-05, |
| "loss": 0.4276, |
| "step": 425 |
| }, |
| { |
| "epoch": 7.049844236760125, |
| "grad_norm": 1.367634892463684, |
| "learning_rate": 4.05058872817065e-05, |
| "loss": 0.3997, |
| "step": 430 |
| }, |
| { |
| "epoch": 7.132917964693665, |
| "grad_norm": 1.3028111457824707, |
| "learning_rate": 4.021903572521802e-05, |
| "loss": 0.3919, |
| "step": 435 |
| }, |
| { |
| "epoch": 7.215991692627207, |
| "grad_norm": 1.8897424936294556, |
| "learning_rate": 3.9928964792569655e-05, |
| "loss": 0.3778, |
| "step": 440 |
| }, |
| { |
| "epoch": 7.299065420560748, |
| "grad_norm": 1.6549698114395142, |
| "learning_rate": 3.963573584424852e-05, |
| "loss": 0.408, |
| "step": 445 |
| }, |
| { |
| "epoch": 7.382139148494288, |
| "grad_norm": 1.3696308135986328, |
| "learning_rate": 3.933941090877615e-05, |
| "loss": 0.4262, |
| "step": 450 |
| }, |
| { |
| "epoch": 7.46521287642783, |
| "grad_norm": 1.6997928619384766, |
| "learning_rate": 3.9040052669587325e-05, |
| "loss": 0.3982, |
| "step": 455 |
| }, |
| { |
| "epoch": 7.548286604361371, |
| "grad_norm": 1.3938806056976318, |
| "learning_rate": 3.873772445177015e-05, |
| "loss": 0.4113, |
| "step": 460 |
| }, |
| { |
| "epoch": 7.6313603322949115, |
| "grad_norm": 1.8341262340545654, |
| "learning_rate": 3.84324902086706e-05, |
| "loss": 0.4265, |
| "step": 465 |
| }, |
| { |
| "epoch": 7.714434060228453, |
| "grad_norm": 1.6543951034545898, |
| "learning_rate": 3.8124414508364e-05, |
| "loss": 0.4112, |
| "step": 470 |
| }, |
| { |
| "epoch": 7.797507788161994, |
| "grad_norm": 1.5131211280822754, |
| "learning_rate": 3.781356251999663e-05, |
| "loss": 0.4098, |
| "step": 475 |
| }, |
| { |
| "epoch": 7.880581516095535, |
| "grad_norm": 1.7143309116363525, |
| "learning_rate": 3.7500000000000003e-05, |
| "loss": 0.4183, |
| "step": 480 |
| }, |
| { |
| "epoch": 7.963655244029075, |
| "grad_norm": 1.4201680421829224, |
| "learning_rate": 3.718379327818106e-05, |
| "loss": 0.388, |
| "step": 485 |
| }, |
| { |
| "epoch": 8.033229491173417, |
| "grad_norm": 1.36739182472229, |
| "learning_rate": 3.686500924369101e-05, |
| "loss": 0.3927, |
| "step": 490 |
| }, |
| { |
| "epoch": 8.116303219106957, |
| "grad_norm": 1.348694086074829, |
| "learning_rate": 3.654371533087586e-05, |
| "loss": 0.3614, |
| "step": 495 |
| }, |
| { |
| "epoch": 8.199376947040498, |
| "grad_norm": 1.7005705833435059, |
| "learning_rate": 3.621997950501156e-05, |
| "loss": 0.3509, |
| "step": 500 |
| }, |
| { |
| "epoch": 8.28245067497404, |
| "grad_norm": 1.4668842554092407, |
| "learning_rate": 3.589387024792699e-05, |
| "loss": 0.344, |
| "step": 505 |
| }, |
| { |
| "epoch": 8.36552440290758, |
| "grad_norm": 1.6029119491577148, |
| "learning_rate": 3.556545654351749e-05, |
| "loss": 0.3599, |
| "step": 510 |
| }, |
| { |
| "epoch": 8.448598130841122, |
| "grad_norm": 1.5699125528335571, |
| "learning_rate": 3.523480786315231e-05, |
| "loss": 0.3579, |
| "step": 515 |
| }, |
| { |
| "epoch": 8.531671858774663, |
| "grad_norm": 1.4507211446762085, |
| "learning_rate": 3.490199415097892e-05, |
| "loss": 0.3341, |
| "step": 520 |
| }, |
| { |
| "epoch": 8.614745586708203, |
| "grad_norm": 1.460924744606018, |
| "learning_rate": 3.456708580912725e-05, |
| "loss": 0.3762, |
| "step": 525 |
| }, |
| { |
| "epoch": 8.697819314641745, |
| "grad_norm": 1.6329962015151978, |
| "learning_rate": 3.423015368281711e-05, |
| "loss": 0.3948, |
| "step": 530 |
| }, |
| { |
| "epoch": 8.780893042575286, |
| "grad_norm": 1.5261616706848145, |
| "learning_rate": 3.389126904537192e-05, |
| "loss": 0.3594, |
| "step": 535 |
| }, |
| { |
| "epoch": 8.863966770508826, |
| "grad_norm": 1.6733344793319702, |
| "learning_rate": 3.355050358314172e-05, |
| "loss": 0.3856, |
| "step": 540 |
| }, |
| { |
| "epoch": 8.947040498442368, |
| "grad_norm": 1.4160898923873901, |
| "learning_rate": 3.3207929380339034e-05, |
| "loss": 0.3875, |
| "step": 545 |
| }, |
| { |
| "epoch": 9.016614745586708, |
| "grad_norm": 1.2456085681915283, |
| "learning_rate": 3.2863618903790346e-05, |
| "loss": 0.3318, |
| "step": 550 |
| }, |
| { |
| "epoch": 9.09968847352025, |
| "grad_norm": 1.309977650642395, |
| "learning_rate": 3.251764498760683e-05, |
| "loss": 0.3239, |
| "step": 555 |
| }, |
| { |
| "epoch": 9.18276220145379, |
| "grad_norm": 1.6747145652770996, |
| "learning_rate": 3.217008081777726e-05, |
| "loss": 0.342, |
| "step": 560 |
| }, |
| { |
| "epoch": 9.26583592938733, |
| "grad_norm": 1.6204376220703125, |
| "learning_rate": 3.182099991668653e-05, |
| "loss": 0.3368, |
| "step": 565 |
| }, |
| { |
| "epoch": 9.348909657320872, |
| "grad_norm": 1.6952364444732666, |
| "learning_rate": 3.147047612756302e-05, |
| "loss": 0.333, |
| "step": 570 |
| }, |
| { |
| "epoch": 9.431983385254414, |
| "grad_norm": 1.6254109144210815, |
| "learning_rate": 3.11185835988581e-05, |
| "loss": 0.3488, |
| "step": 575 |
| }, |
| { |
| "epoch": 9.515057113187954, |
| "grad_norm": 1.7684122323989868, |
| "learning_rate": 3.076539676856101e-05, |
| "loss": 0.3461, |
| "step": 580 |
| }, |
| { |
| "epoch": 9.598130841121495, |
| "grad_norm": 1.480283260345459, |
| "learning_rate": 3.0410990348452573e-05, |
| "loss": 0.3457, |
| "step": 585 |
| }, |
| { |
| "epoch": 9.681204569055037, |
| "grad_norm": 1.4912574291229248, |
| "learning_rate": 3.0055439308300952e-05, |
| "loss": 0.3307, |
| "step": 590 |
| }, |
| { |
| "epoch": 9.764278296988577, |
| "grad_norm": 1.5759741067886353, |
| "learning_rate": 2.9698818860002797e-05, |
| "loss": 0.3387, |
| "step": 595 |
| }, |
| { |
| "epoch": 9.847352024922118, |
| "grad_norm": 1.475316047668457, |
| "learning_rate": 2.9341204441673266e-05, |
| "loss": 0.342, |
| "step": 600 |
| }, |
| { |
| "epoch": 9.93042575285566, |
| "grad_norm": 1.5280460119247437, |
| "learning_rate": 2.898267170168807e-05, |
| "loss": 0.3325, |
| "step": 605 |
| }, |
| { |
| "epoch": 10.0, |
| "grad_norm": 4.123524188995361, |
| "learning_rate": 2.8623296482681166e-05, |
| "loss": 0.3528, |
| "step": 610 |
| }, |
| { |
| "epoch": 10.083073727933542, |
| "grad_norm": 1.3620072603225708, |
| "learning_rate": 2.8263154805501297e-05, |
| "loss": 0.3282, |
| "step": 615 |
| }, |
| { |
| "epoch": 10.166147455867081, |
| "grad_norm": 1.8294273614883423, |
| "learning_rate": 2.7902322853130757e-05, |
| "loss": 0.3066, |
| "step": 620 |
| }, |
| { |
| "epoch": 10.249221183800623, |
| "grad_norm": 1.5608752965927124, |
| "learning_rate": 2.7540876954570048e-05, |
| "loss": 0.3033, |
| "step": 625 |
| }, |
| { |
| "epoch": 10.332294911734165, |
| "grad_norm": 1.7856054306030273, |
| "learning_rate": 2.717889356869146e-05, |
| "loss": 0.2969, |
| "step": 630 |
| }, |
| { |
| "epoch": 10.415368639667705, |
| "grad_norm": 1.4942408800125122, |
| "learning_rate": 2.681644926806527e-05, |
| "loss": 0.3153, |
| "step": 635 |
| }, |
| { |
| "epoch": 10.498442367601246, |
| "grad_norm": 1.6234569549560547, |
| "learning_rate": 2.6453620722761896e-05, |
| "loss": 0.3277, |
| "step": 640 |
| }, |
| { |
| "epoch": 10.581516095534788, |
| "grad_norm": 1.6694717407226562, |
| "learning_rate": 2.6090484684133404e-05, |
| "loss": 0.3228, |
| "step": 645 |
| }, |
| { |
| "epoch": 10.664589823468328, |
| "grad_norm": 1.6755950450897217, |
| "learning_rate": 2.5727117968577784e-05, |
| "loss": 0.3316, |
| "step": 650 |
| }, |
| { |
| "epoch": 10.74766355140187, |
| "grad_norm": 1.3962074518203735, |
| "learning_rate": 2.536359744128957e-05, |
| "loss": 0.3188, |
| "step": 655 |
| }, |
| { |
| "epoch": 10.83073727933541, |
| "grad_norm": 1.7147635221481323, |
| "learning_rate": 2.5e-05, |
| "loss": 0.3463, |
| "step": 660 |
| }, |
| { |
| "epoch": 10.91381100726895, |
| "grad_norm": 1.5997766256332397, |
| "learning_rate": 2.4636402558710432e-05, |
| "loss": 0.335, |
| "step": 665 |
| }, |
| { |
| "epoch": 10.996884735202492, |
| "grad_norm": 1.483155369758606, |
| "learning_rate": 2.4272882031422215e-05, |
| "loss": 0.3084, |
| "step": 670 |
| }, |
| { |
| "epoch": 11.066458982346832, |
| "grad_norm": 1.3599936962127686, |
| "learning_rate": 2.3909515315866605e-05, |
| "loss": 0.311, |
| "step": 675 |
| }, |
| { |
| "epoch": 11.149532710280374, |
| "grad_norm": 1.5479919910430908, |
| "learning_rate": 2.3546379277238107e-05, |
| "loss": 0.2999, |
| "step": 680 |
| }, |
| { |
| "epoch": 11.232606438213915, |
| "grad_norm": 1.5448635816574097, |
| "learning_rate": 2.3183550731934735e-05, |
| "loss": 0.291, |
| "step": 685 |
| }, |
| { |
| "epoch": 11.315680166147455, |
| "grad_norm": 1.4930479526519775, |
| "learning_rate": 2.2821106431308544e-05, |
| "loss": 0.2879, |
| "step": 690 |
| }, |
| { |
| "epoch": 11.398753894080997, |
| "grad_norm": 1.578282356262207, |
| "learning_rate": 2.2459123045429954e-05, |
| "loss": 0.3033, |
| "step": 695 |
| }, |
| { |
| "epoch": 11.481827622014539, |
| "grad_norm": 1.4327830076217651, |
| "learning_rate": 2.2097677146869242e-05, |
| "loss": 0.2946, |
| "step": 700 |
| }, |
| { |
| "epoch": 11.564901349948078, |
| "grad_norm": 1.75092351436615, |
| "learning_rate": 2.173684519449872e-05, |
| "loss": 0.2965, |
| "step": 705 |
| }, |
| { |
| "epoch": 11.64797507788162, |
| "grad_norm": 1.4633839130401611, |
| "learning_rate": 2.1376703517318837e-05, |
| "loss": 0.3082, |
| "step": 710 |
| }, |
| { |
| "epoch": 11.731048805815162, |
| "grad_norm": 1.4483939409255981, |
| "learning_rate": 2.101732829831194e-05, |
| "loss": 0.3048, |
| "step": 715 |
| }, |
| { |
| "epoch": 11.814122533748701, |
| "grad_norm": 1.4273526668548584, |
| "learning_rate": 2.0658795558326743e-05, |
| "loss": 0.3093, |
| "step": 720 |
| }, |
| { |
| "epoch": 11.897196261682243, |
| "grad_norm": 1.536641240119934, |
| "learning_rate": 2.0301181139997205e-05, |
| "loss": 0.3185, |
| "step": 725 |
| }, |
| { |
| "epoch": 11.980269989615785, |
| "grad_norm": 1.6694177389144897, |
| "learning_rate": 1.9944560691699057e-05, |
| "loss": 0.3197, |
| "step": 730 |
| }, |
| { |
| "epoch": 12.049844236760125, |
| "grad_norm": 1.5738413333892822, |
| "learning_rate": 1.958900965154743e-05, |
| "loss": 0.3235, |
| "step": 735 |
| }, |
| { |
| "epoch": 12.132917964693666, |
| "grad_norm": 1.4949880838394165, |
| "learning_rate": 1.9234603231438995e-05, |
| "loss": 0.2752, |
| "step": 740 |
| }, |
| { |
| "epoch": 12.215991692627206, |
| "grad_norm": 1.7407513856887817, |
| "learning_rate": 1.8881416401141904e-05, |
| "loss": 0.282, |
| "step": 745 |
| }, |
| { |
| "epoch": 12.299065420560748, |
| "grad_norm": 1.6446201801300049, |
| "learning_rate": 1.852952387243698e-05, |
| "loss": 0.2847, |
| "step": 750 |
| }, |
| { |
| "epoch": 12.38213914849429, |
| "grad_norm": 1.5613073110580444, |
| "learning_rate": 1.8179000083313483e-05, |
| "loss": 0.2923, |
| "step": 755 |
| }, |
| { |
| "epoch": 12.46521287642783, |
| "grad_norm": 1.3661807775497437, |
| "learning_rate": 1.7829919182222752e-05, |
| "loss": 0.2881, |
| "step": 760 |
| }, |
| { |
| "epoch": 12.54828660436137, |
| "grad_norm": 1.5903205871582031, |
| "learning_rate": 1.7482355012393177e-05, |
| "loss": 0.2897, |
| "step": 765 |
| }, |
| { |
| "epoch": 12.631360332294912, |
| "grad_norm": 1.5441614389419556, |
| "learning_rate": 1.7136381096209664e-05, |
| "loss": 0.292, |
| "step": 770 |
| }, |
| { |
| "epoch": 12.714434060228452, |
| "grad_norm": 1.6512565612792969, |
| "learning_rate": 1.6792070619660975e-05, |
| "loss": 0.289, |
| "step": 775 |
| }, |
| { |
| "epoch": 12.797507788161994, |
| "grad_norm": 1.5514371395111084, |
| "learning_rate": 1.6449496416858284e-05, |
| "loss": 0.3012, |
| "step": 780 |
| }, |
| { |
| "epoch": 12.880581516095535, |
| "grad_norm": 1.7717050313949585, |
| "learning_rate": 1.6108730954628093e-05, |
| "loss": 0.3055, |
| "step": 785 |
| }, |
| { |
| "epoch": 12.963655244029075, |
| "grad_norm": 1.4902904033660889, |
| "learning_rate": 1.5769846317182893e-05, |
| "loss": 0.2913, |
| "step": 790 |
| }, |
| { |
| "epoch": 13.033229491173417, |
| "grad_norm": 1.3144031763076782, |
| "learning_rate": 1.5432914190872757e-05, |
| "loss": 0.3069, |
| "step": 795 |
| }, |
| { |
| "epoch": 13.116303219106957, |
| "grad_norm": 1.3925071954727173, |
| "learning_rate": 1.509800584902108e-05, |
| "loss": 0.2701, |
| "step": 800 |
| }, |
| { |
| "epoch": 13.199376947040498, |
| "grad_norm": 1.7400624752044678, |
| "learning_rate": 1.4765192136847685e-05, |
| "loss": 0.2742, |
| "step": 805 |
| }, |
| { |
| "epoch": 13.28245067497404, |
| "grad_norm": 1.5998958349227905, |
| "learning_rate": 1.443454345648252e-05, |
| "loss": 0.2778, |
| "step": 810 |
| }, |
| { |
| "epoch": 13.36552440290758, |
| "grad_norm": 1.7994098663330078, |
| "learning_rate": 1.4106129752073022e-05, |
| "loss": 0.2836, |
| "step": 815 |
| }, |
| { |
| "epoch": 13.448598130841122, |
| "grad_norm": 1.672831416130066, |
| "learning_rate": 1.3780020494988446e-05, |
| "loss": 0.2709, |
| "step": 820 |
| }, |
| { |
| "epoch": 13.531671858774663, |
| "grad_norm": 1.7876700162887573, |
| "learning_rate": 1.3456284669124158e-05, |
| "loss": 0.2687, |
| "step": 825 |
| }, |
| { |
| "epoch": 13.614745586708203, |
| "grad_norm": 1.6950455904006958, |
| "learning_rate": 1.313499075630899e-05, |
| "loss": 0.2904, |
| "step": 830 |
| }, |
| { |
| "epoch": 13.697819314641745, |
| "grad_norm": 1.6819844245910645, |
| "learning_rate": 1.2816206721818944e-05, |
| "loss": 0.2876, |
| "step": 835 |
| }, |
| { |
| "epoch": 13.780893042575286, |
| "grad_norm": 1.5784549713134766, |
| "learning_rate": 1.2500000000000006e-05, |
| "loss": 0.286, |
| "step": 840 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 1200, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 20, |
| "save_steps": 60, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.623805579480269e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|