| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 536, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009328358208955223, | |
| "grad_norm": 1.453897786558338, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.8491, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 1.1644840596539492, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.8006, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.027985074626865673, | |
| "grad_norm": 0.7401001416095307, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.7315, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 0.45715435120985837, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.6803, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04664179104477612, | |
| "grad_norm": 0.452456199763512, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.6578, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 0.3770950269873419, | |
| "learning_rate": 4.999614302517356e-05, | |
| "loss": 0.6581, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06529850746268656, | |
| "grad_norm": 0.3867121454463547, | |
| "learning_rate": 4.997257741198456e-05, | |
| "loss": 0.6419, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 0.30411968998458416, | |
| "learning_rate": 4.992761136351291e-05, | |
| "loss": 0.627, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08395522388059702, | |
| "grad_norm": 0.3178169814537855, | |
| "learning_rate": 4.986128770052603e-05, | |
| "loss": 0.6084, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.2807145763333074, | |
| "learning_rate": 4.9773669582457364e-05, | |
| "loss": 0.61, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10261194029850747, | |
| "grad_norm": 0.34303198218488506, | |
| "learning_rate": 4.966484044726024e-05, | |
| "loss": 0.6137, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.32762450828392975, | |
| "learning_rate": 4.953490393195063e-05, | |
| "loss": 0.6003, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12126865671641791, | |
| "grad_norm": 0.38462981393609424, | |
| "learning_rate": 4.938398377391461e-05, | |
| "loss": 0.5972, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.4924998750785629, | |
| "learning_rate": 4.921222369307427e-05, | |
| "loss": 0.5939, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13992537313432835, | |
| "grad_norm": 0.4428236238520031, | |
| "learning_rate": 4.901978725502454e-05, | |
| "loss": 0.5895, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.39121495707754167, | |
| "learning_rate": 4.880685771527114e-05, | |
| "loss": 0.5895, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15858208955223882, | |
| "grad_norm": 0.42141569967189374, | |
| "learning_rate": 4.8573637844718e-05, | |
| "loss": 0.5945, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.37097437462801275, | |
| "learning_rate": 4.83203497365703e-05, | |
| "loss": 0.591, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17723880597014927, | |
| "grad_norm": 0.4736806212925385, | |
| "learning_rate": 4.8047234594837143e-05, | |
| "loss": 0.5782, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.4098423065809189, | |
| "learning_rate": 4.775455250463507e-05, | |
| "loss": 0.5749, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1958955223880597, | |
| "grad_norm": 0.43057914859393537, | |
| "learning_rate": 4.744258218451135e-05, | |
| "loss": 0.5846, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.3079441344852731, | |
| "learning_rate": 4.71116207210228e-05, | |
| "loss": 0.5758, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.21455223880597016, | |
| "grad_norm": 0.35622829843104137, | |
| "learning_rate": 4.676198328582288e-05, | |
| "loss": 0.5712, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.3303782414556567, | |
| "learning_rate": 4.6394002835526535e-05, | |
| "loss": 0.582, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2332089552238806, | |
| "grad_norm": 0.36692461693115874, | |
| "learning_rate": 4.6008029794638596e-05, | |
| "loss": 0.5665, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.3438548029811277, | |
| "learning_rate": 4.560443172184763e-05, | |
| "loss": 0.5691, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.251865671641791, | |
| "grad_norm": 0.3148363976766754, | |
| "learning_rate": 4.5183592960003104e-05, | |
| "loss": 0.5619, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.27079202033744576, | |
| "learning_rate": 4.4745914270109055e-05, | |
| "loss": 0.5671, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.27052238805970147, | |
| "grad_norm": 0.35294951179615874, | |
| "learning_rate": 4.429181244968301e-05, | |
| "loss": 0.5663, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.2830160917424769, | |
| "learning_rate": 4.38217199358434e-05, | |
| "loss": 0.5602, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2891791044776119, | |
| "grad_norm": 0.35651413739101817, | |
| "learning_rate": 4.3336084393503545e-05, | |
| "loss": 0.5569, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.3354623584514313, | |
| "learning_rate": 4.283536828906436e-05, | |
| "loss": 0.5692, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.30783582089552236, | |
| "grad_norm": 0.31607235976679265, | |
| "learning_rate": 4.2320048450011684e-05, | |
| "loss": 0.5626, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.28829206615383307, | |
| "learning_rate": 4.179061561083777e-05, | |
| "loss": 0.5547, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.32649253731343286, | |
| "grad_norm": 0.3376743915281689, | |
| "learning_rate": 4.124757394571914e-05, | |
| "loss": 0.561, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.270382329131434, | |
| "learning_rate": 4.069144058839605e-05, | |
| "loss": 0.5621, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3451492537313433, | |
| "grad_norm": 0.2973987443207836, | |
| "learning_rate": 4.012274513971061e-05, | |
| "loss": 0.5661, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.28887092947840104, | |
| "learning_rate": 3.954202916327264e-05, | |
| "loss": 0.5542, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.36380597014925375, | |
| "grad_norm": 0.2655355295288525, | |
| "learning_rate": 3.894984566973346e-05, | |
| "loss": 0.5541, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.2811432888948243, | |
| "learning_rate": 3.834675859015876e-05, | |
| "loss": 0.5582, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3824626865671642, | |
| "grad_norm": 0.2515745875368655, | |
| "learning_rate": 3.77333422390021e-05, | |
| "loss": 0.5489, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.28289824813151215, | |
| "learning_rate": 3.711018076719034e-05, | |
| "loss": 0.556, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.40111940298507465, | |
| "grad_norm": 0.267361883328445, | |
| "learning_rate": 3.647786760584194e-05, | |
| "loss": 0.5604, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.26327372469127713, | |
| "learning_rate": 3.583700490114776e-05, | |
| "loss": 0.5585, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4197761194029851, | |
| "grad_norm": 0.306243298391607, | |
| "learning_rate": 3.518820294095267e-05, | |
| "loss": 0.5545, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.26926366241042204, | |
| "learning_rate": 3.453207957358377e-05, | |
| "loss": 0.5464, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.43843283582089554, | |
| "grad_norm": 0.2690060278556445, | |
| "learning_rate": 3.386925961947906e-05, | |
| "loss": 0.5475, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.2826334847052104, | |
| "learning_rate": 3.320037427617639e-05, | |
| "loss": 0.555, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.457089552238806, | |
| "grad_norm": 0.26144344293288974, | |
| "learning_rate": 3.252606051722972e-05, | |
| "loss": 0.5535, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.2911683020725898, | |
| "learning_rate": 3.1846960485624886e-05, | |
| "loss": 0.5427, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.47574626865671643, | |
| "grad_norm": 0.25802207095689544, | |
| "learning_rate": 3.1163720882272516e-05, | |
| "loss": 0.5505, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.2937192282456116, | |
| "learning_rate": 3.047699235016056e-05, | |
| "loss": 0.5428, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4944029850746269, | |
| "grad_norm": 0.3102482607702826, | |
| "learning_rate": 2.9787428854752736e-05, | |
| "loss": 0.5367, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.2548120450864961, | |
| "learning_rate": 2.9095687061223058e-05, | |
| "loss": 0.5513, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5130597014925373, | |
| "grad_norm": 0.30245109922773245, | |
| "learning_rate": 2.8402425709119435e-05, | |
| "loss": 0.5504, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.27315442361776204, | |
| "learning_rate": 2.7708304985051868e-05, | |
| "loss": 0.5474, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5317164179104478, | |
| "grad_norm": 0.2469052900266435, | |
| "learning_rate": 2.7013985894002623e-05, | |
| "loss": 0.5353, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.2381522137991754, | |
| "learning_rate": 2.6320129629857093e-05, | |
| "loss": 0.5481, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5503731343283582, | |
| "grad_norm": 0.2435217695212293, | |
| "learning_rate": 2.56273969457547e-05, | |
| "loss": 0.537, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.23199546267694518, | |
| "learning_rate": 2.4936447524859625e-05, | |
| "loss": 0.5413, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5690298507462687, | |
| "grad_norm": 0.30029854485663093, | |
| "learning_rate": 2.4247939352150386e-05, | |
| "loss": 0.5365, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.25071572100454237, | |
| "learning_rate": 2.3562528087826573e-05, | |
| "loss": 0.5426, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5876865671641791, | |
| "grad_norm": 0.2361656180291179, | |
| "learning_rate": 2.2880866442929544e-05, | |
| "loss": 0.5396, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.23596745328071683, | |
| "learning_rate": 2.2203603557771447e-05, | |
| "loss": 0.5357, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6063432835820896, | |
| "grad_norm": 0.24184948453857802, | |
| "learning_rate": 2.153138438376473e-05, | |
| "loss": 0.5339, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.23369544200091724, | |
| "learning_rate": 2.0864849069240645e-05, | |
| "loss": 0.5386, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.2273042144154694, | |
| "learning_rate": 2.0204632349841667e-05, | |
| "loss": 0.5355, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.2094409370066005, | |
| "learning_rate": 1.9551362944068462e-05, | |
| "loss": 0.5377, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6436567164179104, | |
| "grad_norm": 0.22818911316518153, | |
| "learning_rate": 1.890566295455678e-05, | |
| "loss": 0.531, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.21863119932090663, | |
| "learning_rate": 1.8268147275654707e-05, | |
| "loss": 0.541, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6623134328358209, | |
| "grad_norm": 0.2106369913594231, | |
| "learning_rate": 1.7639423007864252e-05, | |
| "loss": 0.5278, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.21604757242141778, | |
| "learning_rate": 1.702008887970491e-05, | |
| "loss": 0.5287, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6809701492537313, | |
| "grad_norm": 0.21597143378114195, | |
| "learning_rate": 1.6410734677549872e-05, | |
| "loss": 0.5318, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.21420392632858606, | |
| "learning_rate": 1.58119406839777e-05, | |
| "loss": 0.5308, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6996268656716418, | |
| "grad_norm": 0.23220484507328432, | |
| "learning_rate": 1.5224277125174388e-05, | |
| "loss": 0.5337, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.21960291661563633, | |
| "learning_rate": 1.464830362791204e-05, | |
| "loss": 0.5359, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.7182835820895522, | |
| "grad_norm": 0.20363391645776452, | |
| "learning_rate": 1.4084568686621314e-05, | |
| "loss": 0.5383, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.20430129686406656, | |
| "learning_rate": 1.3533609141065008e-05, | |
| "loss": 0.5366, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7369402985074627, | |
| "grad_norm": 0.21391864672286387, | |
| "learning_rate": 1.299594966511038e-05, | |
| "loss": 0.5338, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 0.20750528279220357, | |
| "learning_rate": 1.2472102267086904e-05, | |
| "loss": 0.5295, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7555970149253731, | |
| "grad_norm": 0.20652656803309716, | |
| "learning_rate": 1.1962565802205255e-05, | |
| "loss": 0.5362, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 0.19195629879165202, | |
| "learning_rate": 1.1467825497501954e-05, | |
| "loss": 0.5225, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7742537313432836, | |
| "grad_norm": 0.21151922246650445, | |
| "learning_rate": 1.0988352489762006e-05, | |
| "loss": 0.5384, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.204760450632927, | |
| "learning_rate": 1.052460337685951e-05, | |
| "loss": 0.5298, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.792910447761194, | |
| "grad_norm": 0.19922336763600637, | |
| "learning_rate": 1.0077019782943584e-05, | |
| "loss": 0.5282, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.2104055047251735, | |
| "learning_rate": 9.646027937883622e-06, | |
| "loss": 0.529, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.8115671641791045, | |
| "grad_norm": 0.18141387654521896, | |
| "learning_rate": 9.232038271374377e-06, | |
| "loss": 0.531, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.19879663171910475, | |
| "learning_rate": 8.835445022087426e-06, | |
| "loss": 0.5256, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8302238805970149, | |
| "grad_norm": 0.19565091593372, | |
| "learning_rate": 8.456625862241193e-06, | |
| "loss": 0.5358, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.187496803748416, | |
| "learning_rate": 8.095941537947057e-06, | |
| "loss": 0.5328, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8488805970149254, | |
| "grad_norm": 0.20371049579133477, | |
| "learning_rate": 7.753735525674059e-06, | |
| "loss": 0.5256, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.1964002704579327, | |
| "learning_rate": 7.430333705159286e-06, | |
| "loss": 0.5359, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.8675373134328358, | |
| "grad_norm": 0.1848105783285991, | |
| "learning_rate": 7.126044049075548e-06, | |
| "loss": 0.5408, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.18246504825507467, | |
| "learning_rate": 6.8411563297516995e-06, | |
| "loss": 0.5208, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8861940298507462, | |
| "grad_norm": 0.17351842603547765, | |
| "learning_rate": 6.575941843225068e-06, | |
| "loss": 0.5246, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.20341591173191198, | |
| "learning_rate": 6.330653150888617e-06, | |
| "loss": 0.5292, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.9048507462686567, | |
| "grad_norm": 0.20204818775308625, | |
| "learning_rate": 6.105523838979022e-06, | |
| "loss": 0.5373, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.18421926232056562, | |
| "learning_rate": 5.900768296134551e-06, | |
| "loss": 0.5238, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9235074626865671, | |
| "grad_norm": 0.1788995157537539, | |
| "learning_rate": 5.7165815092346825e-06, | |
| "loss": 0.526, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.18233827133162475, | |
| "learning_rate": 5.553138877715833e-06, | |
| "loss": 0.5279, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.9421641791044776, | |
| "grad_norm": 0.182345435097955, | |
| "learning_rate": 5.410596046540051e-06, | |
| "loss": 0.5229, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.19791728733223776, | |
| "learning_rate": 5.28908875797568e-06, | |
| "loss": 0.5235, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.960820895522388, | |
| "grad_norm": 0.18416166735753317, | |
| "learning_rate": 5.1887327223312296e-06, | |
| "loss": 0.5246, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.19627109559115194, | |
| "learning_rate": 5.109623507765466e-06, | |
| "loss": 0.5289, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.9794776119402985, | |
| "grad_norm": 0.19326569885491154, | |
| "learning_rate": 5.051836449278715e-06, | |
| "loss": 0.5311, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.19312098703576008, | |
| "learning_rate": 5.015426576972003e-06, | |
| "loss": 0.5298, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9981343283582089, | |
| "grad_norm": 0.17724401661078515, | |
| "learning_rate": 5.000428563642382e-06, | |
| "loss": 0.5299, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 536, | |
| "total_flos": 488621249396736.0, | |
| "train_loss": 0.5610535256564617, | |
| "train_runtime": 6454.4726, | |
| "train_samples_per_second": 5.314, | |
| "train_steps_per_second": 0.083 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 536, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 488621249396736.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |