| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 536, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009328358208955223, | |
| "grad_norm": 1.4529272838242249, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.8491, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 1.1689508861038835, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.8005, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.027985074626865673, | |
| "grad_norm": 0.7491351005527522, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.7315, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 0.46441624130037606, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.6803, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04664179104477612, | |
| "grad_norm": 0.4634226186946367, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.6577, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 0.4030963463525461, | |
| "learning_rate": 4.999614302517356e-05, | |
| "loss": 0.6581, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06529850746268656, | |
| "grad_norm": 0.39406929868265494, | |
| "learning_rate": 4.997257741198456e-05, | |
| "loss": 0.6419, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 0.31318784447485654, | |
| "learning_rate": 4.992761136351291e-05, | |
| "loss": 0.627, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08395522388059702, | |
| "grad_norm": 0.3353753917281023, | |
| "learning_rate": 4.986128770052603e-05, | |
| "loss": 0.6084, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.2954075007188722, | |
| "learning_rate": 4.9773669582457364e-05, | |
| "loss": 0.61, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10261194029850747, | |
| "grad_norm": 0.3692314525331676, | |
| "learning_rate": 4.966484044726024e-05, | |
| "loss": 0.6137, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.3600438394484195, | |
| "learning_rate": 4.953490393195063e-05, | |
| "loss": 0.6004, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12126865671641791, | |
| "grad_norm": 0.39009463063899535, | |
| "learning_rate": 4.938398377391461e-05, | |
| "loss": 0.5973, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.5329218771045787, | |
| "learning_rate": 4.921222369307427e-05, | |
| "loss": 0.594, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13992537313432835, | |
| "grad_norm": 0.46575182507115465, | |
| "learning_rate": 4.901978725502454e-05, | |
| "loss": 0.5895, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.39800373886063, | |
| "learning_rate": 4.880685771527114e-05, | |
| "loss": 0.5895, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15858208955223882, | |
| "grad_norm": 0.42200587253551747, | |
| "learning_rate": 4.8573637844718e-05, | |
| "loss": 0.5944, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.4495424363631174, | |
| "learning_rate": 4.83203497365703e-05, | |
| "loss": 0.5911, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.17723880597014927, | |
| "grad_norm": 0.46522791184194295, | |
| "learning_rate": 4.8047234594837143e-05, | |
| "loss": 0.5782, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.4425302293891964, | |
| "learning_rate": 4.775455250463507e-05, | |
| "loss": 0.5749, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1958955223880597, | |
| "grad_norm": 0.40683117962693205, | |
| "learning_rate": 4.744258218451135e-05, | |
| "loss": 0.5848, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.28500611318023544, | |
| "learning_rate": 4.71116207210228e-05, | |
| "loss": 0.576, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.21455223880597016, | |
| "grad_norm": 0.35259387788620616, | |
| "learning_rate": 4.676198328582288e-05, | |
| "loss": 0.5712, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.3411126136195717, | |
| "learning_rate": 4.6394002835526535e-05, | |
| "loss": 0.5822, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2332089552238806, | |
| "grad_norm": 0.41571669987803755, | |
| "learning_rate": 4.6008029794638596e-05, | |
| "loss": 0.5668, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.3783814665428634, | |
| "learning_rate": 4.560443172184763e-05, | |
| "loss": 0.5695, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.251865671641791, | |
| "grad_norm": 0.36003096332997486, | |
| "learning_rate": 4.5183592960003104e-05, | |
| "loss": 0.562, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.29625254021632597, | |
| "learning_rate": 4.4745914270109055e-05, | |
| "loss": 0.5672, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.27052238805970147, | |
| "grad_norm": 0.4041052584419407, | |
| "learning_rate": 4.429181244968301e-05, | |
| "loss": 0.5665, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.28581434778217296, | |
| "learning_rate": 4.38217199358434e-05, | |
| "loss": 0.5604, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2891791044776119, | |
| "grad_norm": 0.4031760526528498, | |
| "learning_rate": 4.3336084393503545e-05, | |
| "loss": 0.5573, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.3787628774367758, | |
| "learning_rate": 4.283536828906436e-05, | |
| "loss": 0.5698, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.30783582089552236, | |
| "grad_norm": 0.4316425611729101, | |
| "learning_rate": 4.2320048450011684e-05, | |
| "loss": 0.5631, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.32889164452115366, | |
| "learning_rate": 4.179061561083777e-05, | |
| "loss": 0.555, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.32649253731343286, | |
| "grad_norm": 0.3304034607225048, | |
| "learning_rate": 4.124757394571914e-05, | |
| "loss": 0.5612, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.28777523778739394, | |
| "learning_rate": 4.069144058839605e-05, | |
| "loss": 0.562, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3451492537313433, | |
| "grad_norm": 0.3339855615199367, | |
| "learning_rate": 4.012274513971061e-05, | |
| "loss": 0.5662, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.3197899362876164, | |
| "learning_rate": 3.954202916327264e-05, | |
| "loss": 0.5544, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.36380597014925375, | |
| "grad_norm": 0.3250020382984713, | |
| "learning_rate": 3.894984566973346e-05, | |
| "loss": 0.5544, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.3329747882916182, | |
| "learning_rate": 3.834675859015876e-05, | |
| "loss": 0.5584, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3824626865671642, | |
| "grad_norm": 0.2931796751529486, | |
| "learning_rate": 3.77333422390021e-05, | |
| "loss": 0.5492, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.309605999568671, | |
| "learning_rate": 3.711018076719034e-05, | |
| "loss": 0.5563, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.40111940298507465, | |
| "grad_norm": 0.29067978843774794, | |
| "learning_rate": 3.647786760584194e-05, | |
| "loss": 0.5606, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.2720420372815392, | |
| "learning_rate": 3.583700490114776e-05, | |
| "loss": 0.5587, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4197761194029851, | |
| "grad_norm": 0.3190566242172115, | |
| "learning_rate": 3.518820294095267e-05, | |
| "loss": 0.5546, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.2972056748474866, | |
| "learning_rate": 3.453207957358377e-05, | |
| "loss": 0.5465, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.43843283582089554, | |
| "grad_norm": 0.31648213924949936, | |
| "learning_rate": 3.386925961947906e-05, | |
| "loss": 0.5478, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.3483684766363109, | |
| "learning_rate": 3.320037427617639e-05, | |
| "loss": 0.5552, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.457089552238806, | |
| "grad_norm": 0.2870111399875252, | |
| "learning_rate": 3.252606051722972e-05, | |
| "loss": 0.5536, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.3290811706730747, | |
| "learning_rate": 3.1846960485624886e-05, | |
| "loss": 0.5429, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.47574626865671643, | |
| "grad_norm": 0.29255944971499237, | |
| "learning_rate": 3.1163720882272516e-05, | |
| "loss": 0.5506, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.31439453371777304, | |
| "learning_rate": 3.047699235016056e-05, | |
| "loss": 0.5429, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4944029850746269, | |
| "grad_norm": 0.3425522743911677, | |
| "learning_rate": 2.9787428854752736e-05, | |
| "loss": 0.5368, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.27793747214527936, | |
| "learning_rate": 2.9095687061223058e-05, | |
| "loss": 0.5514, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5130597014925373, | |
| "grad_norm": 0.3383812023651381, | |
| "learning_rate": 2.8402425709119435e-05, | |
| "loss": 0.5506, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.27903329251231174, | |
| "learning_rate": 2.7708304985051868e-05, | |
| "loss": 0.5476, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5317164179104478, | |
| "grad_norm": 0.26659760763870816, | |
| "learning_rate": 2.7013985894002623e-05, | |
| "loss": 0.5355, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.27151810632711876, | |
| "learning_rate": 2.6320129629857093e-05, | |
| "loss": 0.5483, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5503731343283582, | |
| "grad_norm": 0.26109145689554275, | |
| "learning_rate": 2.56273969457547e-05, | |
| "loss": 0.5372, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.25106284404522056, | |
| "learning_rate": 2.4936447524859625e-05, | |
| "loss": 0.5415, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5690298507462687, | |
| "grad_norm": 0.3274744617644554, | |
| "learning_rate": 2.4247939352150386e-05, | |
| "loss": 0.5366, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.26437036147373966, | |
| "learning_rate": 2.3562528087826573e-05, | |
| "loss": 0.5428, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5876865671641791, | |
| "grad_norm": 0.2446519338891704, | |
| "learning_rate": 2.2880866442929544e-05, | |
| "loss": 0.5398, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.24911911219120392, | |
| "learning_rate": 2.2203603557771447e-05, | |
| "loss": 0.5358, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6063432835820896, | |
| "grad_norm": 0.2516922089571222, | |
| "learning_rate": 2.153138438376473e-05, | |
| "loss": 0.5341, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.2412997869982118, | |
| "learning_rate": 2.0864849069240645e-05, | |
| "loss": 0.5388, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.2565617737795945, | |
| "learning_rate": 2.0204632349841667e-05, | |
| "loss": 0.5356, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.23064002794765903, | |
| "learning_rate": 1.9551362944068462e-05, | |
| "loss": 0.5379, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6436567164179104, | |
| "grad_norm": 0.24633590530445001, | |
| "learning_rate": 1.890566295455678e-05, | |
| "loss": 0.5312, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.2387974629546731, | |
| "learning_rate": 1.8268147275654707e-05, | |
| "loss": 0.5412, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6623134328358209, | |
| "grad_norm": 0.23072321595406606, | |
| "learning_rate": 1.7639423007864252e-05, | |
| "loss": 0.528, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.22136194889078148, | |
| "learning_rate": 1.702008887970491e-05, | |
| "loss": 0.5288, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6809701492537313, | |
| "grad_norm": 0.22646201154385875, | |
| "learning_rate": 1.6410734677549872e-05, | |
| "loss": 0.5321, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.22901452524086416, | |
| "learning_rate": 1.58119406839777e-05, | |
| "loss": 0.531, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6996268656716418, | |
| "grad_norm": 0.25990616664969085, | |
| "learning_rate": 1.5224277125174388e-05, | |
| "loss": 0.534, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.2294421044982824, | |
| "learning_rate": 1.464830362791204e-05, | |
| "loss": 0.5361, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.7182835820895522, | |
| "grad_norm": 0.2217290229674116, | |
| "learning_rate": 1.4084568686621314e-05, | |
| "loss": 0.5384, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.2162028885642014, | |
| "learning_rate": 1.3533609141065008e-05, | |
| "loss": 0.5368, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7369402985074627, | |
| "grad_norm": 0.23748115013736273, | |
| "learning_rate": 1.299594966511038e-05, | |
| "loss": 0.534, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 0.22565894301779696, | |
| "learning_rate": 1.2472102267086904e-05, | |
| "loss": 0.5297, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7555970149253731, | |
| "grad_norm": 0.21905287170194962, | |
| "learning_rate": 1.1962565802205255e-05, | |
| "loss": 0.5364, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 0.20940519149092718, | |
| "learning_rate": 1.1467825497501954e-05, | |
| "loss": 0.5227, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7742537313432836, | |
| "grad_norm": 0.23494185563834197, | |
| "learning_rate": 1.0988352489762006e-05, | |
| "loss": 0.5386, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.21057179681993918, | |
| "learning_rate": 1.052460337685951e-05, | |
| "loss": 0.5301, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.792910447761194, | |
| "grad_norm": 0.2131633170898678, | |
| "learning_rate": 1.0077019782943584e-05, | |
| "loss": 0.5283, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.219677996534737, | |
| "learning_rate": 9.646027937883622e-06, | |
| "loss": 0.5292, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.8115671641791045, | |
| "grad_norm": 0.20532036673430643, | |
| "learning_rate": 9.232038271374377e-06, | |
| "loss": 0.5312, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.2274426668213202, | |
| "learning_rate": 8.835445022087426e-06, | |
| "loss": 0.5258, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8302238805970149, | |
| "grad_norm": 0.21658031336344372, | |
| "learning_rate": 8.456625862241193e-06, | |
| "loss": 0.5361, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.1969825219644592, | |
| "learning_rate": 8.095941537947057e-06, | |
| "loss": 0.533, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8488805970149254, | |
| "grad_norm": 0.23076534818899264, | |
| "learning_rate": 7.753735525674059e-06, | |
| "loss": 0.5257, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.20880898976821344, | |
| "learning_rate": 7.430333705159286e-06, | |
| "loss": 0.5362, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.8675373134328358, | |
| "grad_norm": 0.20216937595208878, | |
| "learning_rate": 7.126044049075548e-06, | |
| "loss": 0.541, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.1944238128936159, | |
| "learning_rate": 6.8411563297516995e-06, | |
| "loss": 0.521, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8861940298507462, | |
| "grad_norm": 0.18302437521460974, | |
| "learning_rate": 6.575941843225068e-06, | |
| "loss": 0.5248, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.2194502098879558, | |
| "learning_rate": 6.330653150888617e-06, | |
| "loss": 0.5294, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.9048507462686567, | |
| "grad_norm": 0.21482900804537108, | |
| "learning_rate": 6.105523838979022e-06, | |
| "loss": 0.5375, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.19312573942582062, | |
| "learning_rate": 5.900768296134551e-06, | |
| "loss": 0.524, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9235074626865671, | |
| "grad_norm": 0.18903344539396477, | |
| "learning_rate": 5.7165815092346825e-06, | |
| "loss": 0.5262, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.1911384927096407, | |
| "learning_rate": 5.553138877715833e-06, | |
| "loss": 0.5281, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.9421641791044776, | |
| "grad_norm": 0.19367838422770262, | |
| "learning_rate": 5.410596046540051e-06, | |
| "loss": 0.5231, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.20829703892741963, | |
| "learning_rate": 5.28908875797568e-06, | |
| "loss": 0.5237, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.960820895522388, | |
| "grad_norm": 0.19163012582670946, | |
| "learning_rate": 5.1887327223312296e-06, | |
| "loss": 0.5248, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.20150691323789874, | |
| "learning_rate": 5.109623507765466e-06, | |
| "loss": 0.5291, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.9794776119402985, | |
| "grad_norm": 0.20723359673688968, | |
| "learning_rate": 5.051836449278715e-06, | |
| "loss": 0.5313, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.20425578544955647, | |
| "learning_rate": 5.015426576972003e-06, | |
| "loss": 0.53, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9981343283582089, | |
| "grad_norm": 0.18922418280122189, | |
| "learning_rate": 5.000428563642382e-06, | |
| "loss": 0.5301, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 536, | |
| "total_flos": 488621249396736.0, | |
| "train_loss": 0.5612209483758727, | |
| "train_runtime": 11742.8979, | |
| "train_samples_per_second": 2.921, | |
| "train_steps_per_second": 0.046 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 536, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 488621249396736.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |