| { | |
| "best_metric": 1.2051972150802612, | |
| "best_model_checkpoint": "gator/output_mlm/checkpoint-225000", | |
| "epoch": 3.0, | |
| "global_step": 233133, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9892765074013544e-05, | |
| "loss": 1.6961, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.978553014802709e-05, | |
| "loss": 1.6895, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9678295222040635e-05, | |
| "loss": 1.6703, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9571060296054184e-05, | |
| "loss": 1.6708, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.946382537006773e-05, | |
| "loss": 1.6626, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.935659044408128e-05, | |
| "loss": 1.659, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9249355518094824e-05, | |
| "loss": 1.652, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.914212059210837e-05, | |
| "loss": 1.6472, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9034885666121915e-05, | |
| "loss": 1.6317, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.8927650740135464e-05, | |
| "loss": 1.6339, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.8820415814149006e-05, | |
| "loss": 1.6211, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.8713180888162555e-05, | |
| "loss": 1.632, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.86059459621761e-05, | |
| "loss": 1.624, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.8498711036189646e-05, | |
| "loss": 1.6209, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.8391476110203195e-05, | |
| "loss": 1.6198, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.828424118421674e-05, | |
| "loss": 1.6184, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.8177006258230286e-05, | |
| "loss": 1.6141, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.806977133224383e-05, | |
| "loss": 1.6055, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.7962536406257376e-05, | |
| "loss": 1.6018, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.785530148027092e-05, | |
| "loss": 1.6092, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.774806655428447e-05, | |
| "loss": 1.5921, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.764083162829801e-05, | |
| "loss": 1.5968, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.753359670231156e-05, | |
| "loss": 1.6, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.742636177632511e-05, | |
| "loss": 1.5957, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.731912685033865e-05, | |
| "loss": 1.5944, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.72118919243522e-05, | |
| "loss": 1.5849, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.710465699836574e-05, | |
| "loss": 1.5823, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.699742207237929e-05, | |
| "loss": 1.5947, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.689018714639283e-05, | |
| "loss": 1.5874, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.678295222040638e-05, | |
| "loss": 1.5832, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "eval_loss": 1.4992027282714844, | |
| "eval_runtime": 3385.2625, | |
| "eval_samples_per_second": 39.094, | |
| "eval_steps_per_second": 4.887, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.667571729441992e-05, | |
| "loss": 1.5678, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.656848236843347e-05, | |
| "loss": 1.5862, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.646124744244702e-05, | |
| "loss": 1.5828, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.635401251646057e-05, | |
| "loss": 1.5777, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.624677759047411e-05, | |
| "loss": 1.5722, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.613954266448765e-05, | |
| "loss": 1.5723, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.60323077385012e-05, | |
| "loss": 1.5827, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5925072812514744e-05, | |
| "loss": 1.572, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.581783788652829e-05, | |
| "loss": 1.565, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.5710602960541835e-05, | |
| "loss": 1.5628, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.5603368034555384e-05, | |
| "loss": 1.5598, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.5496133108568926e-05, | |
| "loss": 1.5571, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.538889818258248e-05, | |
| "loss": 1.5546, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.5281663256596023e-05, | |
| "loss": 1.5578, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.517442833060957e-05, | |
| "loss": 1.554, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.5067193404623114e-05, | |
| "loss": 1.5488, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.495995847863666e-05, | |
| "loss": 1.5512, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.4852723552650205e-05, | |
| "loss": 1.5636, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.4745488626663754e-05, | |
| "loss": 1.5437, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.4638253700677296e-05, | |
| "loss": 1.5515, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.453101877469084e-05, | |
| "loss": 1.5587, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.4423783848704394e-05, | |
| "loss": 1.5413, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.4316548922717936e-05, | |
| "loss": 1.5381, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.4209313996731485e-05, | |
| "loss": 1.5483, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.410207907074503e-05, | |
| "loss": 1.5436, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.3994844144758576e-05, | |
| "loss": 1.5457, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.388760921877212e-05, | |
| "loss": 1.5367, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.378037429278567e-05, | |
| "loss": 1.5404, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.367313936679921e-05, | |
| "loss": 1.5414, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.356590444081276e-05, | |
| "loss": 1.5325, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_loss": 1.4652618169784546, | |
| "eval_runtime": 3382.9946, | |
| "eval_samples_per_second": 39.12, | |
| "eval_steps_per_second": 4.89, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.3458669514826307e-05, | |
| "loss": 1.5386, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.335143458883985e-05, | |
| "loss": 1.5513, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.32441996628534e-05, | |
| "loss": 1.5384, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.313696473686694e-05, | |
| "loss": 1.5381, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.302972981088049e-05, | |
| "loss": 1.5429, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.292249488489403e-05, | |
| "loss": 1.5325, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.281525995890758e-05, | |
| "loss": 1.5388, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.270802503292112e-05, | |
| "loss": 1.5353, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.260079010693467e-05, | |
| "loss": 1.5311, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.249355518094821e-05, | |
| "loss": 1.5356, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.238632025496176e-05, | |
| "loss": 1.5347, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.227908532897531e-05, | |
| "loss": 1.5198, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.217185040298885e-05, | |
| "loss": 1.5196, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.20646154770024e-05, | |
| "loss": 1.5173, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.195738055101594e-05, | |
| "loss": 1.52, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.185014562502949e-05, | |
| "loss": 1.5155, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.1742910699043034e-05, | |
| "loss": 1.512, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.163567577305658e-05, | |
| "loss": 1.5146, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.1528440847070125e-05, | |
| "loss": 1.5138, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.142120592108368e-05, | |
| "loss": 1.5101, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.131397099509722e-05, | |
| "loss": 1.4977, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.120673606911077e-05, | |
| "loss": 1.5153, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.1099501143124314e-05, | |
| "loss": 1.5053, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.0992266217137856e-05, | |
| "loss": 1.5088, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.0885031291151405e-05, | |
| "loss": 1.5111, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.077779636516495e-05, | |
| "loss": 1.5069, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.0670561439178496e-05, | |
| "loss": 1.5085, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.056332651319204e-05, | |
| "loss": 1.5064, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.0456091587205586e-05, | |
| "loss": 1.5004, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.0348856661219135e-05, | |
| "loss": 1.4979, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "eval_loss": 1.4358571767807007, | |
| "eval_runtime": 3383.7351, | |
| "eval_samples_per_second": 39.112, | |
| "eval_steps_per_second": 4.889, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.0241621735232684e-05, | |
| "loss": 1.5037, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.0134386809246226e-05, | |
| "loss": 1.4942, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.0027151883259775e-05, | |
| "loss": 1.4907, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.991991695727332e-05, | |
| "loss": 1.4985, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.9812682031286866e-05, | |
| "loss": 1.4996, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.970544710530041e-05, | |
| "loss": 1.4959, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.959821217931396e-05, | |
| "loss": 1.4876, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.94909772533275e-05, | |
| "loss": 1.5026, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.938374232734105e-05, | |
| "loss": 1.4985, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.92765074013546e-05, | |
| "loss": 1.4923, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.916927247536814e-05, | |
| "loss": 1.5005, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.906203754938169e-05, | |
| "loss": 1.5018, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.895480262339523e-05, | |
| "loss": 1.4829, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.884756769740878e-05, | |
| "loss": 1.4858, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.874033277142232e-05, | |
| "loss": 1.497, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.863309784543587e-05, | |
| "loss": 1.4849, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.852586291944941e-05, | |
| "loss": 1.4851, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.841862799346296e-05, | |
| "loss": 1.4893, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.831139306747651e-05, | |
| "loss": 1.4887, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.820415814149005e-05, | |
| "loss": 1.4805, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.80969232155036e-05, | |
| "loss": 1.4815, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.798968828951714e-05, | |
| "loss": 1.477, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.788245336353069e-05, | |
| "loss": 1.4791, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.777521843754423e-05, | |
| "loss": 1.4915, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.766798351155778e-05, | |
| "loss": 1.4935, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.7560748585571324e-05, | |
| "loss": 1.4711, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.745351365958487e-05, | |
| "loss": 1.4789, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.734627873359842e-05, | |
| "loss": 1.471, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.7239043807611964e-05, | |
| "loss": 1.4733, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.713180888162551e-05, | |
| "loss": 1.4715, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "eval_loss": 1.4038593769073486, | |
| "eval_runtime": 3383.4345, | |
| "eval_samples_per_second": 39.115, | |
| "eval_steps_per_second": 4.889, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.7024573955639055e-05, | |
| "loss": 1.4746, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.6917339029652604e-05, | |
| "loss": 1.4745, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.6810104103666146e-05, | |
| "loss": 1.4699, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.6702869177679695e-05, | |
| "loss": 1.4672, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.659563425169324e-05, | |
| "loss": 1.4654, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.6488399325706786e-05, | |
| "loss": 1.4628, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.6381164399720335e-05, | |
| "loss": 1.4698, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.6273929473733884e-05, | |
| "loss": 1.4667, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6166694547747426e-05, | |
| "loss": 1.4591, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.6059459621760974e-05, | |
| "loss": 1.4674, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.5952224695774517e-05, | |
| "loss": 1.467, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.5844989769788065e-05, | |
| "loss": 1.4652, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.573775484380161e-05, | |
| "loss": 1.4609, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.563051991781515e-05, | |
| "loss": 1.4657, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.55232849918287e-05, | |
| "loss": 1.4614, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.541605006584225e-05, | |
| "loss": 1.4527, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.5308815139855796e-05, | |
| "loss": 1.452, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.520158021386934e-05, | |
| "loss": 1.4666, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.509434528788289e-05, | |
| "loss": 1.4544, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 3.498711036189643e-05, | |
| "loss": 1.448, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.487987543590998e-05, | |
| "loss": 1.4554, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 3.477264050992352e-05, | |
| "loss": 1.4451, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.466540558393707e-05, | |
| "loss": 1.4546, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.455817065795061e-05, | |
| "loss": 1.4573, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.445093573196416e-05, | |
| "loss": 1.4486, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.434370080597771e-05, | |
| "loss": 1.4474, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.423646587999125e-05, | |
| "loss": 1.456, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.41292309540048e-05, | |
| "loss": 1.4537, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 3.402199602801834e-05, | |
| "loss": 1.4532, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.391476110203189e-05, | |
| "loss": 1.4448, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "eval_loss": 1.3876514434814453, | |
| "eval_runtime": 3381.619, | |
| "eval_samples_per_second": 39.136, | |
| "eval_steps_per_second": 4.892, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.380752617604543e-05, | |
| "loss": 1.4447, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.370029125005898e-05, | |
| "loss": 1.4423, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.3593056324072524e-05, | |
| "loss": 1.4477, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.348582139808607e-05, | |
| "loss": 1.4408, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.337858647209962e-05, | |
| "loss": 1.45, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.3271351546113163e-05, | |
| "loss": 1.4316, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.316411662012671e-05, | |
| "loss": 1.4364, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.3056881694140254e-05, | |
| "loss": 1.4296, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.29496467681538e-05, | |
| "loss": 1.426, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 3.2842411842167345e-05, | |
| "loss": 1.4301, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.2735176916180894e-05, | |
| "loss": 1.436, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 3.2627941990194436e-05, | |
| "loss": 1.4253, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.2520707064207985e-05, | |
| "loss": 1.4303, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.2413472138221534e-05, | |
| "loss": 1.4286, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.230623721223508e-05, | |
| "loss": 1.4331, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.2199002286248625e-05, | |
| "loss": 1.4307, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.2091767360262174e-05, | |
| "loss": 1.419, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.1984532434275716e-05, | |
| "loss": 1.422, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.187729750828926e-05, | |
| "loss": 1.4204, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.177006258230281e-05, | |
| "loss": 1.4301, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.166282765631635e-05, | |
| "loss": 1.4126, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.15555927303299e-05, | |
| "loss": 1.4248, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.144835780434344e-05, | |
| "loss": 1.423, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.1341122878356995e-05, | |
| "loss": 1.4192, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.123388795237054e-05, | |
| "loss": 1.418, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.1126653026384086e-05, | |
| "loss": 1.4194, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.101941810039763e-05, | |
| "loss": 1.4, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.091218317441118e-05, | |
| "loss": 1.4023, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.080494824842472e-05, | |
| "loss": 1.4258, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.069771332243827e-05, | |
| "loss": 1.4191, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "eval_loss": 1.3602887392044067, | |
| "eval_runtime": 3380.9917, | |
| "eval_samples_per_second": 39.144, | |
| "eval_steps_per_second": 4.893, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.059047839645181e-05, | |
| "loss": 1.413, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.0483243470465356e-05, | |
| "loss": 1.4107, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.0376008544478908e-05, | |
| "loss": 1.4151, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.0268773618492454e-05, | |
| "loss": 1.4119, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.0161538692505996e-05, | |
| "loss": 1.4049, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.005430376651954e-05, | |
| "loss": 1.4124, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.9947068840533087e-05, | |
| "loss": 1.4025, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.9839833914546632e-05, | |
| "loss": 1.4116, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9732598988560177e-05, | |
| "loss": 1.4148, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9625364062573723e-05, | |
| "loss": 1.4042, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.951812913658727e-05, | |
| "loss": 1.4009, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9410894210600814e-05, | |
| "loss": 1.3971, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 2.9303659284614366e-05, | |
| "loss": 1.4069, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.919642435862791e-05, | |
| "loss": 1.4172, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.9089189432641457e-05, | |
| "loss": 1.3947, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.8981954506655003e-05, | |
| "loss": 1.3998, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.8874719580668548e-05, | |
| "loss": 1.4033, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 2.8767484654682093e-05, | |
| "loss": 1.3905, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 2.866024972869564e-05, | |
| "loss": 1.3934, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.855301480270918e-05, | |
| "loss": 1.3938, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8445779876722727e-05, | |
| "loss": 1.4021, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 2.833854495073628e-05, | |
| "loss": 1.3977, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.8231310024749824e-05, | |
| "loss": 1.4034, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 2.812407509876337e-05, | |
| "loss": 1.3978, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.8016840172776915e-05, | |
| "loss": 1.3985, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.790960524679046e-05, | |
| "loss": 1.3881, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 2.7802370320804006e-05, | |
| "loss": 1.3916, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.769513539481755e-05, | |
| "loss": 1.3981, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.7587900468831097e-05, | |
| "loss": 1.392, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.7480665542844643e-05, | |
| "loss": 1.3988, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "eval_loss": 1.3425246477127075, | |
| "eval_runtime": 3384.4417, | |
| "eval_samples_per_second": 39.104, | |
| "eval_steps_per_second": 4.888, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.737343061685819e-05, | |
| "loss": 1.3916, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 2.7266195690871737e-05, | |
| "loss": 1.3971, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.7158960764885282e-05, | |
| "loss": 1.3993, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.7051725838898828e-05, | |
| "loss": 1.3899, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.6944490912912373e-05, | |
| "loss": 1.3771, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2.683725598692592e-05, | |
| "loss": 1.3988, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.6730021060939464e-05, | |
| "loss": 1.3913, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 2.662278613495301e-05, | |
| "loss": 1.3929, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 2.6515551208966555e-05, | |
| "loss": 1.3879, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.64083162829801e-05, | |
| "loss": 1.385, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.630108135699365e-05, | |
| "loss": 1.3798, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.6193846431007195e-05, | |
| "loss": 1.3844, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 2.608661150502074e-05, | |
| "loss": 1.3789, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 2.5979376579034286e-05, | |
| "loss": 1.3734, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.587214165304783e-05, | |
| "loss": 1.3851, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.5764906727061377e-05, | |
| "loss": 1.3802, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.5657671801074922e-05, | |
| "loss": 1.3762, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5550436875088468e-05, | |
| "loss": 1.3746, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5443201949102013e-05, | |
| "loss": 1.38, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.5335967023115565e-05, | |
| "loss": 1.3842, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.522873209712911e-05, | |
| "loss": 1.3718, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5121497171142656e-05, | |
| "loss": 1.3839, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.5014262245156202e-05, | |
| "loss": 1.375, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.4907027319169744e-05, | |
| "loss": 1.3647, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.479979239318329e-05, | |
| "loss": 1.3787, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4692557467196835e-05, | |
| "loss": 1.3692, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.4585322541210384e-05, | |
| "loss": 1.371, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.447808761522393e-05, | |
| "loss": 1.3635, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.4370852689237475e-05, | |
| "loss": 1.368, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.426361776325102e-05, | |
| "loss": 1.3699, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "eval_loss": 1.323049545288086, | |
| "eval_runtime": 3382.1272, | |
| "eval_samples_per_second": 39.13, | |
| "eval_steps_per_second": 4.891, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.4156382837264566e-05, | |
| "loss": 1.3738, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.4049147911278114e-05, | |
| "loss": 1.3661, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.394191298529166e-05, | |
| "loss": 1.3682, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.3834678059305205e-05, | |
| "loss": 1.3731, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.372744313331875e-05, | |
| "loss": 1.3668, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.3620208207332296e-05, | |
| "loss": 1.3665, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.3512973281345842e-05, | |
| "loss": 1.3606, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.3405738355359387e-05, | |
| "loss": 1.3559, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.3298503429372933e-05, | |
| "loss": 1.3663, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.3191268503386478e-05, | |
| "loss": 1.3656, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.3084033577400027e-05, | |
| "loss": 1.3765, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.2976798651413573e-05, | |
| "loss": 1.3688, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2869563725427118e-05, | |
| "loss": 1.3631, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.2762328799440664e-05, | |
| "loss": 1.3493, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.265509387345421e-05, | |
| "loss": 1.36, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2547858947467758e-05, | |
| "loss": 1.3563, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.2440624021481303e-05, | |
| "loss": 1.3518, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.233338909549485e-05, | |
| "loss": 1.3553, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.222615416950839e-05, | |
| "loss": 1.3541, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.211891924352194e-05, | |
| "loss": 1.3482, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.2011684317535485e-05, | |
| "loss": 1.3502, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.190444939154903e-05, | |
| "loss": 1.352, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.1797214465562576e-05, | |
| "loss": 1.3555, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.168997953957612e-05, | |
| "loss": 1.3587, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.158274461358967e-05, | |
| "loss": 1.3456, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.1475509687603216e-05, | |
| "loss": 1.3486, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.136827476161676e-05, | |
| "loss": 1.3491, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.1261039835630307e-05, | |
| "loss": 1.3479, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.1153804909643852e-05, | |
| "loss": 1.3405, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.1046569983657398e-05, | |
| "loss": 1.3493, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "eval_loss": 1.301226258277893, | |
| "eval_runtime": 3380.5336, | |
| "eval_samples_per_second": 39.149, | |
| "eval_steps_per_second": 4.894, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.0939335057670943e-05, | |
| "loss": 1.3478, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.083210013168449e-05, | |
| "loss": 1.3441, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.0724865205698034e-05, | |
| "loss": 1.3483, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.061763027971158e-05, | |
| "loss": 1.3367, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.051039535372513e-05, | |
| "loss": 1.3468, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0403160427738674e-05, | |
| "loss": 1.3434, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.029592550175222e-05, | |
| "loss": 1.3377, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0188690575765765e-05, | |
| "loss": 1.3383, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.0081455649779314e-05, | |
| "loss": 1.3427, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.997422072379286e-05, | |
| "loss": 1.3477, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.9866985797806405e-05, | |
| "loss": 1.3364, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.975975087181995e-05, | |
| "loss": 1.3447, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9652515945833492e-05, | |
| "loss": 1.3365, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.954528101984704e-05, | |
| "loss": 1.3231, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.9438046093860587e-05, | |
| "loss": 1.3362, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9330811167874132e-05, | |
| "loss": 1.3415, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.9223576241887678e-05, | |
| "loss": 1.3323, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.9116341315901223e-05, | |
| "loss": 1.3316, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.9009106389914772e-05, | |
| "loss": 1.3304, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8901871463928317e-05, | |
| "loss": 1.329, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8794636537941863e-05, | |
| "loss": 1.3268, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8687401611955408e-05, | |
| "loss": 1.337, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.8580166685968957e-05, | |
| "loss": 1.3277, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.84729317599825e-05, | |
| "loss": 1.323, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.8365696833996045e-05, | |
| "loss": 1.3318, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.825846190800959e-05, | |
| "loss": 1.3227, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.8151226982023136e-05, | |
| "loss": 1.3334, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.8043992056036685e-05, | |
| "loss": 1.3255, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.793675713005023e-05, | |
| "loss": 1.3244, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7829522204063775e-05, | |
| "loss": 1.3201, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "eval_loss": 1.2773252725601196, | |
| "eval_runtime": 3341.8511, | |
| "eval_samples_per_second": 39.602, | |
| "eval_steps_per_second": 4.95, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.772228727807732e-05, | |
| "loss": 1.3188, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.7615052352090866e-05, | |
| "loss": 1.3203, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7507817426104415e-05, | |
| "loss": 1.3123, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.740058250011796e-05, | |
| "loss": 1.319, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7293347574131506e-05, | |
| "loss": 1.3225, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.718611264814505e-05, | |
| "loss": 1.3231, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.7078877722158597e-05, | |
| "loss": 1.3213, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.6971642796172143e-05, | |
| "loss": 1.315, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6864407870185688e-05, | |
| "loss": 1.3203, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6757172944199234e-05, | |
| "loss": 1.3208, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.664993801821278e-05, | |
| "loss": 1.3155, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6542703092226328e-05, | |
| "loss": 1.3038, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.6435468166239873e-05, | |
| "loss": 1.2996, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.632823324025342e-05, | |
| "loss": 1.3023, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6220998314266964e-05, | |
| "loss": 1.3112, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.611376338828051e-05, | |
| "loss": 1.3002, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.600652846229406e-05, | |
| "loss": 1.2967, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.58992935363076e-05, | |
| "loss": 1.2996, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5792058610321146e-05, | |
| "loss": 1.3006, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.568482368433469e-05, | |
| "loss": 1.2903, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.557758875834824e-05, | |
| "loss": 1.3077, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5470353832361786e-05, | |
| "loss": 1.2952, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.536311890637533e-05, | |
| "loss": 1.2984, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.5255883980388877e-05, | |
| "loss": 1.3062, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.5148649054402422e-05, | |
| "loss": 1.3005, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.5041414128415971e-05, | |
| "loss": 1.2961, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.4934179202429515e-05, | |
| "loss": 1.299, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.482694427644306e-05, | |
| "loss": 1.2931, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4719709350456606e-05, | |
| "loss": 1.2962, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.4612474424470151e-05, | |
| "loss": 1.2993, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "eval_loss": 1.261716604232788, | |
| "eval_runtime": 3339.8508, | |
| "eval_samples_per_second": 39.626, | |
| "eval_steps_per_second": 4.953, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.45052394984837e-05, | |
| "loss": 1.2979, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.4398004572497246e-05, | |
| "loss": 1.2992, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.4290769646510791e-05, | |
| "loss": 1.3039, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.4183534720524335e-05, | |
| "loss": 1.2953, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.4076299794537884e-05, | |
| "loss": 1.2837, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.396906486855143e-05, | |
| "loss": 1.2971, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3861829942564975e-05, | |
| "loss": 1.2963, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.375459501657852e-05, | |
| "loss": 1.288, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.3647360090592066e-05, | |
| "loss": 1.2953, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3540125164605613e-05, | |
| "loss": 1.2892, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.3432890238619158e-05, | |
| "loss": 1.2931, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.3325655312632704e-05, | |
| "loss": 1.2865, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.321842038664625e-05, | |
| "loss": 1.2925, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.3111185460659795e-05, | |
| "loss": 1.286, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.3003950534673342e-05, | |
| "loss": 1.2949, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2896715608686887e-05, | |
| "loss": 1.2841, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2789480682700433e-05, | |
| "loss": 1.2869, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.2682245756713978e-05, | |
| "loss": 1.2791, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2575010830727527e-05, | |
| "loss": 1.2822, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.2467775904741071e-05, | |
| "loss": 1.2857, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2360540978754616e-05, | |
| "loss": 1.2913, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2253306052768162e-05, | |
| "loss": 1.273, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.2146071126781709e-05, | |
| "loss": 1.2783, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.2038836200795255e-05, | |
| "loss": 1.2707, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.1931601274808802e-05, | |
| "loss": 1.2747, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.1824366348822347e-05, | |
| "loss": 1.2814, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1717131422835893e-05, | |
| "loss": 1.2862, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1609896496849438e-05, | |
| "loss": 1.2773, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.1502661570862984e-05, | |
| "loss": 1.2783, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.139542664487653e-05, | |
| "loss": 1.2745, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "eval_loss": 1.2489670515060425, | |
| "eval_runtime": 3344.7969, | |
| "eval_samples_per_second": 39.567, | |
| "eval_steps_per_second": 4.946, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.1288191718890076e-05, | |
| "loss": 1.2783, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.1180956792903623e-05, | |
| "loss": 1.2782, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.1073721866917169e-05, | |
| "loss": 1.2727, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0966486940930713e-05, | |
| "loss": 1.2769, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.085925201494426e-05, | |
| "loss": 1.2743, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0752017088957805e-05, | |
| "loss": 1.271, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0644782162971352e-05, | |
| "loss": 1.2747, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0537547236984898e-05, | |
| "loss": 1.2733, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0430312310998443e-05, | |
| "loss": 1.2719, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.0323077385011989e-05, | |
| "loss": 1.2724, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0215842459025534e-05, | |
| "loss": 1.2699, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0108607533039081e-05, | |
| "loss": 1.2707, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.0001372607052627e-05, | |
| "loss": 1.2805, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.894137681066174e-06, | |
| "loss": 1.2695, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.78690275507972e-06, | |
| "loss": 1.2618, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.679667829093265e-06, | |
| "loss": 1.2636, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.57243290310681e-06, | |
| "loss": 1.2738, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.465197977120356e-06, | |
| "loss": 1.266, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.357963051133903e-06, | |
| "loss": 1.2702, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.250728125147449e-06, | |
| "loss": 1.2629, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 9.143493199160994e-06, | |
| "loss": 1.2653, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.03625827317454e-06, | |
| "loss": 1.2544, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 8.929023347188087e-06, | |
| "loss": 1.2661, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.821788421201632e-06, | |
| "loss": 1.2697, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 8.714553495215178e-06, | |
| "loss": 1.262, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 8.607318569228725e-06, | |
| "loss": 1.2724, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.50008364324227e-06, | |
| "loss": 1.2617, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.392848717255816e-06, | |
| "loss": 1.263, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.285613791269361e-06, | |
| "loss": 1.2629, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 8.178378865282908e-06, | |
| "loss": 1.2614, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "eval_loss": 1.2283380031585693, | |
| "eval_runtime": 3383.2987, | |
| "eval_samples_per_second": 39.117, | |
| "eval_steps_per_second": 4.89, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 8.071143939296454e-06, | |
| "loss": 1.2589, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 7.96390901331e-06, | |
| "loss": 1.2497, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 7.856674087323545e-06, | |
| "loss": 1.256, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.74943916133709e-06, | |
| "loss": 1.2563, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.642204235350637e-06, | |
| "loss": 1.2531, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.534969309364183e-06, | |
| "loss": 1.2586, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 7.427734383377728e-06, | |
| "loss": 1.2589, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 7.320499457391275e-06, | |
| "loss": 1.2623, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.21326453140482e-06, | |
| "loss": 1.2573, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.106029605418367e-06, | |
| "loss": 1.2487, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.998794679431912e-06, | |
| "loss": 1.2522, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.891559753445459e-06, | |
| "loss": 1.2486, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.7843248274590046e-06, | |
| "loss": 1.253, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.67708990147255e-06, | |
| "loss": 1.2453, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.569854975486096e-06, | |
| "loss": 1.2463, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.462620049499642e-06, | |
| "loss": 1.2485, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.355385123513188e-06, | |
| "loss": 1.2521, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.248150197526734e-06, | |
| "loss": 1.2441, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.14091527154028e-06, | |
| "loss": 1.2513, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.033680345553825e-06, | |
| "loss": 1.2594, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 5.926445419567372e-06, | |
| "loss": 1.2539, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.819210493580918e-06, | |
| "loss": 1.2503, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.7119755675944635e-06, | |
| "loss": 1.2463, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.60474064160801e-06, | |
| "loss": 1.2378, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.497505715621555e-06, | |
| "loss": 1.2447, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.390270789635101e-06, | |
| "loss": 1.2434, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.283035863648647e-06, | |
| "loss": 1.2469, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.175800937662193e-06, | |
| "loss": 1.2505, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.068566011675739e-06, | |
| "loss": 1.2485, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.961331085689285e-06, | |
| "loss": 1.2424, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "eval_loss": 1.2152148485183716, | |
| "eval_runtime": 3385.5322, | |
| "eval_samples_per_second": 39.091, | |
| "eval_steps_per_second": 4.886, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.854096159702831e-06, | |
| "loss": 1.2362, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.746861233716376e-06, | |
| "loss": 1.2378, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.6396263077299224e-06, | |
| "loss": 1.2462, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.532391381743469e-06, | |
| "loss": 1.2415, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.425156455757014e-06, | |
| "loss": 1.248, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.3179215297705605e-06, | |
| "loss": 1.2307, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.210686603784107e-06, | |
| "loss": 1.2438, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.1034516777976515e-06, | |
| "loss": 1.2451, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.996216751811198e-06, | |
| "loss": 1.2388, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.888981825824744e-06, | |
| "loss": 1.2413, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.78174689983829e-06, | |
| "loss": 1.241, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.674511973851836e-06, | |
| "loss": 1.2288, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.5672770478653818e-06, | |
| "loss": 1.2341, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.460042121878928e-06, | |
| "loss": 1.2368, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.352807195892473e-06, | |
| "loss": 1.2298, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.2455722699060195e-06, | |
| "loss": 1.2289, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.1383373439195654e-06, | |
| "loss": 1.2313, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.0311024179331113e-06, | |
| "loss": 1.2341, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.923867491946657e-06, | |
| "loss": 1.2347, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 2.816632565960203e-06, | |
| "loss": 1.2415, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.709397639973749e-06, | |
| "loss": 1.2351, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.602162713987295e-06, | |
| "loss": 1.2316, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.494927788000841e-06, | |
| "loss": 1.229, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.3876928620143866e-06, | |
| "loss": 1.2256, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 2.2804579360279325e-06, | |
| "loss": 1.2342, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 2.173223010041479e-06, | |
| "loss": 1.2367, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 2.0659880840550243e-06, | |
| "loss": 1.2343, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.95875315806857e-06, | |
| "loss": 1.2325, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.8515182320821163e-06, | |
| "loss": 1.2304, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.7442833060956624e-06, | |
| "loss": 1.2296, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "eval_loss": 1.2051972150802612, | |
| "eval_runtime": 3383.7385, | |
| "eval_samples_per_second": 39.112, | |
| "eval_steps_per_second": 4.889, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.637048380109208e-06, | |
| "loss": 1.2192, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.529813454122754e-06, | |
| "loss": 1.2305, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.4225785281362999e-06, | |
| "loss": 1.2292, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.315343602149846e-06, | |
| "loss": 1.2292, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.2081086761633916e-06, | |
| "loss": 1.2286, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.1008737501769378e-06, | |
| "loss": 1.2283, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.936388241904836e-07, | |
| "loss": 1.2272, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.864038982040294e-07, | |
| "loss": 1.2292, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.791689722175754e-07, | |
| "loss": 1.2343, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.719340462311213e-07, | |
| "loss": 1.2179, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 5.646991202446672e-07, | |
| "loss": 1.2239, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.5746419425821316e-07, | |
| "loss": 1.2263, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.5022926827175906e-07, | |
| "loss": 1.2263, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.4299434228530495e-07, | |
| "loss": 1.2282, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.3575941629885087e-07, | |
| "loss": 1.2334, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2.8524490312396787e-08, | |
| "loss": 1.224, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 233133, | |
| "total_flos": 4.975552182291128e+18, | |
| "train_loss": 1.3875968830903724, | |
| "train_runtime": 695161.7821, | |
| "train_samples_per_second": 10.732, | |
| "train_steps_per_second": 0.335 | |
| } | |
| ], | |
| "max_steps": 233133, | |
| "num_train_epochs": 3, | |
| "total_flos": 4.975552182291128e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |