| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9786802030456854, | |
| "eval_steps": 500, | |
| "global_step": 183, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016243654822335026, | |
| "grad_norm": 5.861148280465038, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 0.7971, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03248730964467005, | |
| "grad_norm": 5.903622852368125, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 0.8013, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.048730964467005075, | |
| "grad_norm": 4.414814742973429, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 0.7483, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0649746192893401, | |
| "grad_norm": 2.1084544285026685, | |
| "learning_rate": 1.6842105263157896e-05, | |
| "loss": 0.696, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.08121827411167512, | |
| "grad_norm": 5.670885486524175, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 0.7237, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.09746192893401015, | |
| "grad_norm": 10.09125086973651, | |
| "learning_rate": 2.526315789473684e-05, | |
| "loss": 0.7683, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.11370558375634518, | |
| "grad_norm": 6.783333688153429, | |
| "learning_rate": 2.9473684210526317e-05, | |
| "loss": 0.7212, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1299492385786802, | |
| "grad_norm": 4.468625434130326, | |
| "learning_rate": 3.368421052631579e-05, | |
| "loss": 0.6675, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.14619289340101524, | |
| "grad_norm": 2.839936985092543, | |
| "learning_rate": 3.789473684210526e-05, | |
| "loss": 0.6339, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.16243654822335024, | |
| "grad_norm": 2.13780637268824, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 0.6086, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.17868020304568527, | |
| "grad_norm": 1.5941717672571045, | |
| "learning_rate": 4.6315789473684214e-05, | |
| "loss": 0.5825, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1949238578680203, | |
| "grad_norm": 1.6168928558054896, | |
| "learning_rate": 5.052631578947368e-05, | |
| "loss": 0.5701, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.21116751269035533, | |
| "grad_norm": 2.2327738877787646, | |
| "learning_rate": 5.4736842105263165e-05, | |
| "loss": 0.5649, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.22741116751269036, | |
| "grad_norm": 1.922360650495667, | |
| "learning_rate": 5.8947368421052634e-05, | |
| "loss": 0.5556, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2436548223350254, | |
| "grad_norm": 1.5360747043737273, | |
| "learning_rate": 6.315789473684212e-05, | |
| "loss": 0.5472, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2598984771573604, | |
| "grad_norm": 1.6023781510033264, | |
| "learning_rate": 6.736842105263159e-05, | |
| "loss": 0.5383, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.27614213197969545, | |
| "grad_norm": 1.4948570507713637, | |
| "learning_rate": 7.157894736842105e-05, | |
| "loss": 0.5259, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2923857868020305, | |
| "grad_norm": 1.6235378104579135, | |
| "learning_rate": 7.578947368421052e-05, | |
| "loss": 0.5215, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3086294416243655, | |
| "grad_norm": 1.49280941435323, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5219, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.3248730964467005, | |
| "grad_norm": 1.52673025515821, | |
| "learning_rate": 7.999266113727178e-05, | |
| "loss": 0.5159, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3411167512690355, | |
| "grad_norm": 2.0944356574595644, | |
| "learning_rate": 7.99706472420324e-05, | |
| "loss": 0.5082, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.35736040609137054, | |
| "grad_norm": 1.1426245341381593, | |
| "learning_rate": 7.993396639212965e-05, | |
| "loss": 0.5065, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.37360406091370557, | |
| "grad_norm": 2.027227172313339, | |
| "learning_rate": 7.988263204734962e-05, | |
| "loss": 0.5082, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3898477157360406, | |
| "grad_norm": 1.2244497820900966, | |
| "learning_rate": 7.98166630444778e-05, | |
| "loss": 0.5009, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.40609137055837563, | |
| "grad_norm": 1.6772440184784814, | |
| "learning_rate": 7.973608359038701e-05, | |
| "loss": 0.4935, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.42233502538071066, | |
| "grad_norm": 1.3035902371007322, | |
| "learning_rate": 7.964092325315485e-05, | |
| "loss": 0.4869, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.4385786802030457, | |
| "grad_norm": 1.3526293786861174, | |
| "learning_rate": 7.953121695121395e-05, | |
| "loss": 0.492, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4548223350253807, | |
| "grad_norm": 1.425792402521663, | |
| "learning_rate": 7.94070049405388e-05, | |
| "loss": 0.4773, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.47106598984771575, | |
| "grad_norm": 1.0589770114481525, | |
| "learning_rate": 7.926833279987421e-05, | |
| "loss": 0.4869, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4873096446700508, | |
| "grad_norm": 1.0852485944575374, | |
| "learning_rate": 7.911525141401037e-05, | |
| "loss": 0.4752, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5035532994923858, | |
| "grad_norm": 1.1576752537494561, | |
| "learning_rate": 7.894781695511117e-05, | |
| "loss": 0.4811, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.5197969543147208, | |
| "grad_norm": 2.0538631486502723, | |
| "learning_rate": 7.876609086210207e-05, | |
| "loss": 0.4832, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5360406091370559, | |
| "grad_norm": 1.1528883813172177, | |
| "learning_rate": 7.857013981812564e-05, | |
| "loss": 0.4706, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.5522842639593909, | |
| "grad_norm": 1.6961278477225181, | |
| "learning_rate": 7.836003572607253e-05, | |
| "loss": 0.4933, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.5685279187817259, | |
| "grad_norm": 1.338921085872361, | |
| "learning_rate": 7.813585568219723e-05, | |
| "loss": 0.4735, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.584771573604061, | |
| "grad_norm": 1.2911770652190075, | |
| "learning_rate": 7.789768194782818e-05, | |
| "loss": 0.4795, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.601015228426396, | |
| "grad_norm": 1.1735059474903817, | |
| "learning_rate": 7.764560191918247e-05, | |
| "loss": 0.4739, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.617258883248731, | |
| "grad_norm": 1.0123782894306088, | |
| "learning_rate": 7.737970809529645e-05, | |
| "loss": 0.4665, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.6335025380710659, | |
| "grad_norm": 0.9595374983763921, | |
| "learning_rate": 7.71000980440838e-05, | |
| "loss": 0.4616, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.649746192893401, | |
| "grad_norm": 1.0251669587395573, | |
| "learning_rate": 7.680687436653367e-05, | |
| "loss": 0.4578, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.665989847715736, | |
| "grad_norm": 0.8481767848175804, | |
| "learning_rate": 7.650014465906201e-05, | |
| "loss": 0.4532, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.682233502538071, | |
| "grad_norm": 1.1450293435840186, | |
| "learning_rate": 7.618002147402967e-05, | |
| "loss": 0.4504, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.698477157360406, | |
| "grad_norm": 0.7376186540483177, | |
| "learning_rate": 7.584662227844223e-05, | |
| "loss": 0.4514, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7147208121827411, | |
| "grad_norm": 1.002643345794404, | |
| "learning_rate": 7.550006941084619e-05, | |
| "loss": 0.4531, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.7309644670050761, | |
| "grad_norm": 1.2178757607184856, | |
| "learning_rate": 7.51404900364377e-05, | |
| "loss": 0.441, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.7472081218274111, | |
| "grad_norm": 0.8379800700247181, | |
| "learning_rate": 7.476801610040021e-05, | |
| "loss": 0.4404, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.7634517766497462, | |
| "grad_norm": 0.7565253428528901, | |
| "learning_rate": 7.438278427948805e-05, | |
| "loss": 0.4382, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.7796954314720812, | |
| "grad_norm": 0.6727947399441317, | |
| "learning_rate": 7.398493593187383e-05, | |
| "loss": 0.4373, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.7959390862944162, | |
| "grad_norm": 0.876356454587121, | |
| "learning_rate": 7.357461704527802e-05, | |
| "loss": 0.4373, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.8121827411167513, | |
| "grad_norm": 1.0412780327861013, | |
| "learning_rate": 7.315197818339979e-05, | |
| "loss": 0.4406, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.8284263959390863, | |
| "grad_norm": 1.2604470089396205, | |
| "learning_rate": 7.271717443066871e-05, | |
| "loss": 0.4344, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.8446700507614213, | |
| "grad_norm": 1.004867279026282, | |
| "learning_rate": 7.227036533533753e-05, | |
| "loss": 0.433, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.8609137055837564, | |
| "grad_norm": 0.8563093497070937, | |
| "learning_rate": 7.181171485093706e-05, | |
| "loss": 0.4304, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.8771573604060914, | |
| "grad_norm": 0.5671831882105272, | |
| "learning_rate": 7.134139127611457e-05, | |
| "loss": 0.4221, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.8934010152284264, | |
| "grad_norm": 0.5655405062857799, | |
| "learning_rate": 7.085956719287773e-05, | |
| "loss": 0.4288, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9096446700507614, | |
| "grad_norm": 0.6227846963328939, | |
| "learning_rate": 7.036641940326682e-05, | |
| "loss": 0.4271, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.9258883248730965, | |
| "grad_norm": 0.7242656258268864, | |
| "learning_rate": 6.986212886447851e-05, | |
| "loss": 0.4313, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.9421319796954315, | |
| "grad_norm": 0.7433411082090003, | |
| "learning_rate": 6.934688062246474e-05, | |
| "loss": 0.4218, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.9583756345177665, | |
| "grad_norm": 0.8720528869263957, | |
| "learning_rate": 6.882086374403148e-05, | |
| "loss": 0.4348, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.9746192893401016, | |
| "grad_norm": 0.9990535700111963, | |
| "learning_rate": 6.828427124746191e-05, | |
| "loss": 0.4272, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.9908629441624366, | |
| "grad_norm": 1.015667186765777, | |
| "learning_rate": 6.773730003168967e-05, | |
| "loss": 0.4279, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.0101522842639594, | |
| "grad_norm": 2.3437756145181603, | |
| "learning_rate": 6.718015080404824e-05, | |
| "loss": 0.691, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.0263959390862945, | |
| "grad_norm": 0.7192408099739781, | |
| "learning_rate": 6.661302800662261e-05, | |
| "loss": 0.4051, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.0426395939086295, | |
| "grad_norm": 0.8432945110635002, | |
| "learning_rate": 6.603613974123086e-05, | |
| "loss": 0.4116, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.0588832487309645, | |
| "grad_norm": 1.2726135726974086, | |
| "learning_rate": 6.54496976930624e-05, | |
| "loss": 0.4076, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.0751269035532995, | |
| "grad_norm": 1.0079271087443598, | |
| "learning_rate": 6.485391705300173e-05, | |
| "loss": 0.4069, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.0913705583756346, | |
| "grad_norm": 1.1533020481630858, | |
| "learning_rate": 6.424901643866553e-05, | |
| "loss": 0.4011, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.1076142131979696, | |
| "grad_norm": 1.0026635744570633, | |
| "learning_rate": 6.363521781418243e-05, | |
| "loss": 0.4033, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.1238578680203046, | |
| "grad_norm": 0.9231770673079176, | |
| "learning_rate": 6.301274640874483e-05, | |
| "loss": 0.4032, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.1401015228426397, | |
| "grad_norm": 0.8692546537652855, | |
| "learning_rate": 6.238183063396257e-05, | |
| "loss": 0.397, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.1563451776649747, | |
| "grad_norm": 0.9442686170156996, | |
| "learning_rate": 6.174270200004885e-05, | |
| "loss": 0.3919, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.1725888324873097, | |
| "grad_norm": 0.9040277727758386, | |
| "learning_rate": 6.109559503086918e-05, | |
| "loss": 0.4003, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.1888324873096447, | |
| "grad_norm": 1.124693266326007, | |
| "learning_rate": 6.044074717788442e-05, | |
| "loss": 0.3989, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.2050761421319798, | |
| "grad_norm": 0.7075238799261049, | |
| "learning_rate": 5.9778398733019614e-05, | |
| "loss": 0.3908, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.2213197969543148, | |
| "grad_norm": 0.7559974740351498, | |
| "learning_rate": 5.910879274049052e-05, | |
| "loss": 0.3987, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.2375634517766498, | |
| "grad_norm": 0.8753481648950223, | |
| "learning_rate": 5.84321749076202e-05, | |
| "loss": 0.4045, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.2538071065989849, | |
| "grad_norm": 0.8249863395981523, | |
| "learning_rate": 5.7748793514678394e-05, | |
| "loss": 0.4017, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.2700507614213197, | |
| "grad_norm": 0.8834530813481759, | |
| "learning_rate": 5.705889932377679e-05, | |
| "loss": 0.3972, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.286294416243655, | |
| "grad_norm": 0.5321200318731625, | |
| "learning_rate": 5.636274548685361e-05, | |
| "loss": 0.4009, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.3025380710659897, | |
| "grad_norm": 0.6712989019156405, | |
| "learning_rate": 5.566058745278117e-05, | |
| "loss": 0.39, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.318781725888325, | |
| "grad_norm": 0.5440707965852146, | |
| "learning_rate": 5.4952682873630755e-05, | |
| "loss": 0.3979, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.3350253807106598, | |
| "grad_norm": 0.6907333140636629, | |
| "learning_rate": 5.4239291510128936e-05, | |
| "loss": 0.3974, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.351269035532995, | |
| "grad_norm": 0.7093211514864298, | |
| "learning_rate": 5.352067513634011e-05, | |
| "loss": 0.395, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.3675126903553299, | |
| "grad_norm": 0.5197383653605123, | |
| "learning_rate": 5.279709744361036e-05, | |
| "loss": 0.3917, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.383756345177665, | |
| "grad_norm": 0.4873841137171276, | |
| "learning_rate": 5.20688239438077e-05, | |
| "loss": 0.3897, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.7772345510903597, | |
| "learning_rate": 5.133612187189429e-05, | |
| "loss": 0.3958, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.4162436548223352, | |
| "grad_norm": 0.6568207816645832, | |
| "learning_rate": 5.059926008786648e-05, | |
| "loss": 0.3885, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.43248730964467, | |
| "grad_norm": 0.6865987314717193, | |
| "learning_rate": 4.985850897809838e-05, | |
| "loss": 0.3913, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.448730964467005, | |
| "grad_norm": 0.6754318415370026, | |
| "learning_rate": 4.911414035612551e-05, | |
| "loss": 0.3897, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.46497461928934, | |
| "grad_norm": 0.5510166556882232, | |
| "learning_rate": 4.836642736290468e-05, | |
| "loss": 0.3863, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.481218274111675, | |
| "grad_norm": 0.4218642671019698, | |
| "learning_rate": 4.761564436658674e-05, | |
| "loss": 0.3905, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.49746192893401, | |
| "grad_norm": 0.33249291138365206, | |
| "learning_rate": 4.686206686183914e-05, | |
| "loss": 0.3842, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.5137055837563453, | |
| "grad_norm": 0.36240839911771244, | |
| "learning_rate": 4.610597136875498e-05, | |
| "loss": 0.3849, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.5299492385786801, | |
| "grad_norm": 0.4075634735970758, | |
| "learning_rate": 4.534763533138594e-05, | |
| "loss": 0.382, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.5461928934010152, | |
| "grad_norm": 0.4020419111096129, | |
| "learning_rate": 4.458733701593603e-05, | |
| "loss": 0.3902, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.5624365482233502, | |
| "grad_norm": 0.4162126818044518, | |
| "learning_rate": 4.3825355408653694e-05, | |
| "loss": 0.3865, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.5786802030456852, | |
| "grad_norm": 0.3250125445127836, | |
| "learning_rate": 4.306197011345984e-05, | |
| "loss": 0.3927, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.5949238578680203, | |
| "grad_norm": 0.37873273335865215, | |
| "learning_rate": 4.229746124934894e-05, | |
| "loss": 0.3897, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.6111675126903553, | |
| "grad_norm": 0.26283572176655795, | |
| "learning_rate": 4.153210934760142e-05, | |
| "loss": 0.3898, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.6274111675126903, | |
| "grad_norm": 0.3697844911122642, | |
| "learning_rate": 4.0766195248844574e-05, | |
| "loss": 0.3915, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.6436548223350254, | |
| "grad_norm": 0.4192576770954174, | |
| "learning_rate": 4e-05, | |
| "loss": 0.3938, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.6598984771573604, | |
| "grad_norm": 0.24580117390367293, | |
| "learning_rate": 3.923380475115544e-05, | |
| "loss": 0.3837, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.6761421319796954, | |
| "grad_norm": 0.35508418766154465, | |
| "learning_rate": 3.846789065239859e-05, | |
| "loss": 0.3866, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.6923857868020304, | |
| "grad_norm": 0.3642394227261978, | |
| "learning_rate": 3.770253875065107e-05, | |
| "loss": 0.3879, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.7086294416243655, | |
| "grad_norm": 0.23721046069527885, | |
| "learning_rate": 3.6938029886540174e-05, | |
| "loss": 0.386, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.7248730964467005, | |
| "grad_norm": 0.3182396199511866, | |
| "learning_rate": 3.617464459134631e-05, | |
| "loss": 0.382, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.7411167512690355, | |
| "grad_norm": 0.23591223152132948, | |
| "learning_rate": 3.541266298406399e-05, | |
| "loss": 0.3837, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.7573604060913706, | |
| "grad_norm": 0.3103657704912103, | |
| "learning_rate": 3.4652364668614065e-05, | |
| "loss": 0.385, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.7736040609137056, | |
| "grad_norm": 0.23604167874137535, | |
| "learning_rate": 3.3894028631245017e-05, | |
| "loss": 0.388, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.7898477157360406, | |
| "grad_norm": 0.2123461071124295, | |
| "learning_rate": 3.313793313816087e-05, | |
| "loss": 0.3873, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.8060913705583757, | |
| "grad_norm": 0.2159520420107397, | |
| "learning_rate": 3.238435563341326e-05, | |
| "loss": 0.3837, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.8223350253807107, | |
| "grad_norm": 0.22146736675422216, | |
| "learning_rate": 3.163357263709534e-05, | |
| "loss": 0.3819, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.8385786802030457, | |
| "grad_norm": 0.17002552231432885, | |
| "learning_rate": 3.088585964387451e-05, | |
| "loss": 0.3768, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.8548223350253807, | |
| "grad_norm": 0.24119956983584898, | |
| "learning_rate": 3.0141491021901644e-05, | |
| "loss": 0.3839, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.8710659898477158, | |
| "grad_norm": 0.2013830370445654, | |
| "learning_rate": 2.9400739912133543e-05, | |
| "loss": 0.3797, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.8873096446700508, | |
| "grad_norm": 0.18674898588211797, | |
| "learning_rate": 2.866387812810572e-05, | |
| "loss": 0.3823, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.9035532994923858, | |
| "grad_norm": 0.26548717082542594, | |
| "learning_rate": 2.793117605619231e-05, | |
| "loss": 0.378, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.9197969543147209, | |
| "grad_norm": 0.2002277680016461, | |
| "learning_rate": 2.7202902556389647e-05, | |
| "loss": 0.3814, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.936040609137056, | |
| "grad_norm": 0.15827356517250973, | |
| "learning_rate": 2.6479324863659897e-05, | |
| "loss": 0.3721, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.952284263959391, | |
| "grad_norm": 0.1653044744580572, | |
| "learning_rate": 2.5760708489871077e-05, | |
| "loss": 0.3854, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.9685279187817257, | |
| "grad_norm": 0.16729489813208265, | |
| "learning_rate": 2.504731712636925e-05, | |
| "loss": 0.3784, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.984771573604061, | |
| "grad_norm": 0.16164016444282092, | |
| "learning_rate": 2.4339412547218845e-05, | |
| "loss": 0.3809, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.0040609137055836, | |
| "grad_norm": 0.28163111232184695, | |
| "learning_rate": 2.3637254513146406e-05, | |
| "loss": 0.606, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.020304568527919, | |
| "grad_norm": 0.1925240063892539, | |
| "learning_rate": 2.294110067622321e-05, | |
| "loss": 0.3647, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.0365482233502537, | |
| "grad_norm": 0.20226990255325633, | |
| "learning_rate": 2.225120648532161e-05, | |
| "loss": 0.3608, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.052791878172589, | |
| "grad_norm": 0.16740913026001164, | |
| "learning_rate": 2.1567825092379807e-05, | |
| "loss": 0.3614, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.0690355329949237, | |
| "grad_norm": 0.25083361243306157, | |
| "learning_rate": 2.089120725950948e-05, | |
| "loss": 0.3682, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.085279187817259, | |
| "grad_norm": 0.19080011782710088, | |
| "learning_rate": 2.0221601266980383e-05, | |
| "loss": 0.3548, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.1015228426395938, | |
| "grad_norm": 0.2188934640616483, | |
| "learning_rate": 1.955925282211558e-05, | |
| "loss": 0.3552, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.117766497461929, | |
| "grad_norm": 0.2172533644259959, | |
| "learning_rate": 1.890440496913083e-05, | |
| "loss": 0.3641, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.134010152284264, | |
| "grad_norm": 0.1707808407927364, | |
| "learning_rate": 1.825729799995116e-05, | |
| "loss": 0.3641, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.150253807106599, | |
| "grad_norm": 0.1946400955168161, | |
| "learning_rate": 1.761816936603744e-05, | |
| "loss": 0.3618, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.166497461928934, | |
| "grad_norm": 0.14159881045312211, | |
| "learning_rate": 1.6987253591255178e-05, | |
| "loss": 0.3546, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.182741116751269, | |
| "grad_norm": 0.1881834295592266, | |
| "learning_rate": 1.6364782185817592e-05, | |
| "loss": 0.3549, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.198984771573604, | |
| "grad_norm": 0.15289061814224608, | |
| "learning_rate": 1.5750983561334493e-05, | |
| "loss": 0.3586, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.215228426395939, | |
| "grad_norm": 0.15911195051937885, | |
| "learning_rate": 1.5146082946998286e-05, | |
| "loss": 0.3559, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.231472081218274, | |
| "grad_norm": 0.19045813298325232, | |
| "learning_rate": 1.4550302306937619e-05, | |
| "loss": 0.3639, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.2477157360406093, | |
| "grad_norm": 0.15335108480289958, | |
| "learning_rate": 1.396386025876916e-05, | |
| "loss": 0.355, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.263959390862944, | |
| "grad_norm": 0.16446774618332, | |
| "learning_rate": 1.338697199337739e-05, | |
| "loss": 0.3549, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.2802030456852793, | |
| "grad_norm": 0.15106141305507895, | |
| "learning_rate": 1.2819849195951778e-05, | |
| "loss": 0.3586, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.296446700507614, | |
| "grad_norm": 0.13993724665364407, | |
| "learning_rate": 1.2262699968310336e-05, | |
| "loss": 0.3604, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.3126903553299494, | |
| "grad_norm": 0.1363435862641145, | |
| "learning_rate": 1.1715728752538103e-05, | |
| "loss": 0.3551, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.328934010152284, | |
| "grad_norm": 0.1339274080889315, | |
| "learning_rate": 1.1179136255968523e-05, | |
| "loss": 0.3563, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.3451776649746194, | |
| "grad_norm": 0.12185082172688805, | |
| "learning_rate": 1.065311937753526e-05, | |
| "loss": 0.359, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.3614213197969542, | |
| "grad_norm": 0.1321809457616183, | |
| "learning_rate": 1.0137871135521493e-05, | |
| "loss": 0.3548, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.3776649746192895, | |
| "grad_norm": 0.13050325616691505, | |
| "learning_rate": 9.633580596733179e-06, | |
| "loss": 0.3523, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.3939086294416243, | |
| "grad_norm": 0.11130286495660592, | |
| "learning_rate": 9.140432807122282e-06, | |
| "loss": 0.359, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.4101522842639596, | |
| "grad_norm": 0.10084069337286101, | |
| "learning_rate": 8.658608723885433e-06, | |
| "loss": 0.3518, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.4263959390862944, | |
| "grad_norm": 0.10103011757244325, | |
| "learning_rate": 8.18828514906294e-06, | |
| "loss": 0.3535, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.4426395939086296, | |
| "grad_norm": 0.09880519324295578, | |
| "learning_rate": 7.729634664662469e-06, | |
| "loss": 0.3534, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.4588832487309644, | |
| "grad_norm": 0.10471446375611543, | |
| "learning_rate": 7.282825569331296e-06, | |
| "loss": 0.3596, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.4751269035532997, | |
| "grad_norm": 0.10882725576899555, | |
| "learning_rate": 6.848021816600221e-06, | |
| "loss": 0.361, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.4913705583756345, | |
| "grad_norm": 0.09782268685408212, | |
| "learning_rate": 6.425382954722002e-06, | |
| "loss": 0.3614, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.5076142131979697, | |
| "grad_norm": 0.10430688918677015, | |
| "learning_rate": 6.015064068126188e-06, | |
| "loss": 0.3575, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.5238578680203045, | |
| "grad_norm": 0.1172067268082759, | |
| "learning_rate": 5.617215720511962e-06, | |
| "loss": 0.351, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.5401015228426393, | |
| "grad_norm": 0.09574414741627191, | |
| "learning_rate": 5.231983899599798e-06, | |
| "loss": 0.3543, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.5563451776649746, | |
| "grad_norm": 0.09746185929647122, | |
| "learning_rate": 4.859509963562313e-06, | |
| "loss": 0.3646, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.57258883248731, | |
| "grad_norm": 0.10144569993947126, | |
| "learning_rate": 4.4999305891538294e-06, | |
| "loss": 0.3546, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.5888324873096447, | |
| "grad_norm": 0.09770560240880942, | |
| "learning_rate": 4.153377721557781e-06, | |
| "loss": 0.3579, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.6050761421319795, | |
| "grad_norm": 0.09280446790594858, | |
| "learning_rate": 3.819978525970336e-06, | |
| "loss": 0.3565, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.6213197969543147, | |
| "grad_norm": 0.08692751824611722, | |
| "learning_rate": 3.499855340938001e-06, | |
| "loss": 0.3553, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.63756345177665, | |
| "grad_norm": 0.10842712708563858, | |
| "learning_rate": 3.19312563346633e-06, | |
| "loss": 0.3545, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.6538071065989848, | |
| "grad_norm": 0.09032134534631381, | |
| "learning_rate": 2.8999019559162156e-06, | |
| "loss": 0.3552, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.6700507614213196, | |
| "grad_norm": 0.09027568441791341, | |
| "learning_rate": 2.6202919047035604e-06, | |
| "loss": 0.3468, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.686294416243655, | |
| "grad_norm": 0.08414383492724592, | |
| "learning_rate": 2.3543980808175393e-06, | |
| "loss": 0.3562, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.70253807106599, | |
| "grad_norm": 0.07832242786610931, | |
| "learning_rate": 2.10231805217183e-06, | |
| "loss": 0.3556, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.718781725888325, | |
| "grad_norm": 0.08668215141175065, | |
| "learning_rate": 1.8641443178027784e-06, | |
| "loss": 0.354, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.7350253807106597, | |
| "grad_norm": 0.09642452144914332, | |
| "learning_rate": 1.6399642739274791e-06, | |
| "loss": 0.3534, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.751269035532995, | |
| "grad_norm": 0.08397615302565715, | |
| "learning_rate": 1.4298601818743607e-06, | |
| "loss": 0.3585, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.76751269035533, | |
| "grad_norm": 0.0868453744624661, | |
| "learning_rate": 1.233909137897924e-06, | |
| "loss": 0.3632, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.783756345177665, | |
| "grad_norm": 0.085225634007902, | |
| "learning_rate": 1.0521830448888414e-06, | |
| "loss": 0.3581, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.08094902436979953, | |
| "learning_rate": 8.847485859896365e-07, | |
| "loss": 0.3502, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.816243654822335, | |
| "grad_norm": 0.08165676150513626, | |
| "learning_rate": 7.31667200125803e-07, | |
| "loss": 0.3567, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.8324873096446703, | |
| "grad_norm": 0.08289385702790433, | |
| "learning_rate": 5.929950594612032e-07, | |
| "loss": 0.3554, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.848730964467005, | |
| "grad_norm": 0.07955035485502655, | |
| "learning_rate": 4.687830487860634e-07, | |
| "loss": 0.3563, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.86497461928934, | |
| "grad_norm": 0.07869020841570933, | |
| "learning_rate": 3.590767468451528e-07, | |
| "loss": 0.3505, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.881218274111675, | |
| "grad_norm": 0.08319077707558656, | |
| "learning_rate": 2.639164096129987e-07, | |
| "loss": 0.3541, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.89746192893401, | |
| "grad_norm": 0.07867342327367309, | |
| "learning_rate": 1.833369555222042e-07, | |
| "loss": 0.356, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.9137055837563453, | |
| "grad_norm": 0.07940276764093634, | |
| "learning_rate": 1.1736795265038237e-07, | |
| "loss": 0.3602, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.92994923857868, | |
| "grad_norm": 0.08102239091967386, | |
| "learning_rate": 6.603360787035495e-08, | |
| "loss": 0.3592, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.9461928934010153, | |
| "grad_norm": 0.07325271521654589, | |
| "learning_rate": 2.9352757967600064e-08, | |
| "loss": 0.3495, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.96243654822335, | |
| "grad_norm": 0.07584071622767746, | |
| "learning_rate": 7.338862728225593e-09, | |
| "loss": 0.3571, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.9786802030456854, | |
| "grad_norm": 0.07924817576453998, | |
| "learning_rate": 0.0, | |
| "loss": 0.3592, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.9786802030456854, | |
| "step": 183, | |
| "total_flos": 4.709134058959929e+18, | |
| "train_loss": 0.42409751356625164, | |
| "train_runtime": 40618.2962, | |
| "train_samples_per_second": 2.327, | |
| "train_steps_per_second": 0.005 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 183, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.709134058959929e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |