| { | |
| "best_metric": 0.6636257171630859, | |
| "best_model_checkpoint": "./enko_mbartLarge_100p_run1/checkpoint-190759", | |
| "epoch": 6.0, | |
| "eval_steps": 500, | |
| "global_step": 381519, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.940000000000001e-06, | |
| "loss": 1.6525, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.994e-05, | |
| "loss": 1.3306, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.994e-05, | |
| "loss": 1.3005, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.994e-05, | |
| "loss": 1.2871, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9940000000000006e-05, | |
| "loss": 1.306, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9973930136971904e-05, | |
| "loss": 1.2876, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.994770259332065e-05, | |
| "loss": 1.2609, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.992142248946168e-05, | |
| "loss": 1.244, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9895142385602714e-05, | |
| "loss": 1.2051, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.986886228174374e-05, | |
| "loss": 1.1997, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9842582177884766e-05, | |
| "loss": 1.1854, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.98163020740258e-05, | |
| "loss": 1.1666, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9790021970166825e-05, | |
| "loss": 1.1596, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.976374186630786e-05, | |
| "loss": 1.1559, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.973746176244889e-05, | |
| "loss": 1.1555, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.9711181658589916e-05, | |
| "loss": 1.1458, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.968495411493866e-05, | |
| "loss": 1.1098, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.9658674011079694e-05, | |
| "loss": 1.1169, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.963239390722073e-05, | |
| "loss": 1.114, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.960616636356947e-05, | |
| "loss": 1.1099, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.95798862597105e-05, | |
| "loss": 1.0846, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.955360615585153e-05, | |
| "loss": 1.1021, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.952732605199256e-05, | |
| "loss": 1.0739, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.950104594813359e-05, | |
| "loss": 1.0657, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.9474765844274615e-05, | |
| "loss": 1.0641, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.944848574041565e-05, | |
| "loss": 1.0696, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.94222581967644e-05, | |
| "loss": 1.0457, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.9396030653113144e-05, | |
| "loss": 1.0573, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.936975054925417e-05, | |
| "loss": 1.0302, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.93434704453952e-05, | |
| "loss": 1.0347, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.9317190341536236e-05, | |
| "loss": 1.0304, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.929096279788498e-05, | |
| "loss": 1.0454, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.926468269402601e-05, | |
| "loss": 1.0406, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.923840259016704e-05, | |
| "loss": 1.0306, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.921212248630807e-05, | |
| "loss": 1.0306, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.91858423824491e-05, | |
| "loss": 1.0237, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.9159562278590124e-05, | |
| "loss": 1.0113, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.913328217473116e-05, | |
| "loss": 1.0101, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.910700207087219e-05, | |
| "loss": 0.9928, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.9080721967013216e-05, | |
| "loss": 0.9942, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.905444186315424e-05, | |
| "loss": 1.0043, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.9028161759295275e-05, | |
| "loss": 1.0028, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.900188165543631e-05, | |
| "loss": 0.9883, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.897565411178505e-05, | |
| "loss": 1.0023, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.89494265681338e-05, | |
| "loss": 0.9935, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.892314646427483e-05, | |
| "loss": 0.9928, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.889686636041586e-05, | |
| "loss": 0.9948, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.887058625655689e-05, | |
| "loss": 0.974, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.8844306152697915e-05, | |
| "loss": 0.9837, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.881802604883895e-05, | |
| "loss": 0.9751, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.879174594497998e-05, | |
| "loss": 0.9702, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.8765465841121006e-05, | |
| "loss": 0.9717, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.873918573726204e-05, | |
| "loss": 0.9724, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.8712905633403065e-05, | |
| "loss": 0.9692, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.868667808975182e-05, | |
| "loss": 0.9535, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.866039798589284e-05, | |
| "loss": 0.9641, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.863417044224159e-05, | |
| "loss": 0.9774, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.860789033838262e-05, | |
| "loss": 0.9671, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.858161023452365e-05, | |
| "loss": 0.9458, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.855533013066468e-05, | |
| "loss": 0.9606, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.8529050026805705e-05, | |
| "loss": 0.9557, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.850276992294674e-05, | |
| "loss": 0.9664, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.8476489819087764e-05, | |
| "loss": 0.9427, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.84502097152288e-05, | |
| "loss": 0.957, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.842392961136983e-05, | |
| "loss": 0.9466, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.8397649507510856e-05, | |
| "loss": 0.9262, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.837136940365188e-05, | |
| "loss": 0.9524, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.8345089299792914e-05, | |
| "loss": 0.9303, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.831880919593395e-05, | |
| "loss": 0.9355, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.829252909207497e-05, | |
| "loss": 0.9463, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.8266248988216006e-05, | |
| "loss": 0.9333, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.823996888435703e-05, | |
| "loss": 0.9293, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.8213688780498065e-05, | |
| "loss": 0.9272, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.818740867663909e-05, | |
| "loss": 0.928, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.8161128572780124e-05, | |
| "loss": 0.9346, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.813484846892115e-05, | |
| "loss": 0.9361, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.81086209252699e-05, | |
| "loss": 0.9197, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.8082393381618646e-05, | |
| "loss": 0.9289, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.805611327775967e-05, | |
| "loss": 0.9232, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.8029833173900705e-05, | |
| "loss": 0.9134, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.800355307004174e-05, | |
| "loss": 0.9232, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.7977272966182764e-05, | |
| "loss": 0.9315, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.7950992862323796e-05, | |
| "loss": 0.9152, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.792471275846482e-05, | |
| "loss": 0.9179, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.7898485214813574e-05, | |
| "loss": 0.914, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.787225767116232e-05, | |
| "loss": 0.9078, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.7845977567303345e-05, | |
| "loss": 0.9129, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.781969746344438e-05, | |
| "loss": 0.899, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.779341735958541e-05, | |
| "loss": 0.9003, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.7767137255726436e-05, | |
| "loss": 0.8976, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.774085715186747e-05, | |
| "loss": 0.9007, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.7714577048008495e-05, | |
| "loss": 0.9053, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.768829694414952e-05, | |
| "loss": 0.892, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.766201684029056e-05, | |
| "loss": 0.9049, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.763573673643159e-05, | |
| "loss": 0.8988, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.760956175298805e-05, | |
| "loss": 0.9086, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.758328164912908e-05, | |
| "loss": 0.903, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.755700154527011e-05, | |
| "loss": 0.895, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.753072144141114e-05, | |
| "loss": 0.8982, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.750444133755217e-05, | |
| "loss": 0.9047, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.7478161233693194e-05, | |
| "loss": 0.9088, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.745188112983423e-05, | |
| "loss": 0.892, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.742560102597526e-05, | |
| "loss": 0.89, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.7399320922116286e-05, | |
| "loss": 0.8909, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.737309337846503e-05, | |
| "loss": 0.8792, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.734681327460606e-05, | |
| "loss": 0.8783, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.7320533170747096e-05, | |
| "loss": 0.8869, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.729425306688812e-05, | |
| "loss": 0.8873, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.726797296302915e-05, | |
| "loss": 0.8927, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.72417454193779e-05, | |
| "loss": 0.8817, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.721546531551893e-05, | |
| "loss": 0.8746, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.718918521165996e-05, | |
| "loss": 0.8845, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.7162905107800984e-05, | |
| "loss": 0.8768, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.713662500394202e-05, | |
| "loss": 0.8834, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.711034490008305e-05, | |
| "loss": 0.8769, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.7084064796224076e-05, | |
| "loss": 0.865, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.705783725257282e-05, | |
| "loss": 0.8786, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.7031557148713854e-05, | |
| "loss": 0.885, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.7005277044854886e-05, | |
| "loss": 0.8804, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.697899694099591e-05, | |
| "loss": 0.8834, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.6952716837136945e-05, | |
| "loss": 0.8698, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.692643673327797e-05, | |
| "loss": 0.8605, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.6900156629419004e-05, | |
| "loss": 0.8721, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.687387652556003e-05, | |
| "loss": 0.8914, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.684759642170106e-05, | |
| "loss": 0.8743, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.682131631784209e-05, | |
| "loss": 0.8677, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.679514133439856e-05, | |
| "loss": 0.8673, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_bleu": 52.6771, | |
| "eval_gen_len": 15.2057, | |
| "eval_loss": 0.7307217121124268, | |
| "eval_runtime": 9605.9098, | |
| "eval_samples_per_second": 13.439, | |
| "eval_steps_per_second": 1.68, | |
| "step": 63586 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.6768913790747304e-05, | |
| "loss": 0.7651, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.674263368688833e-05, | |
| "loss": 0.7382, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.671635358302936e-05, | |
| "loss": 0.7193, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.6690073479170396e-05, | |
| "loss": 0.7344, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.666384593551914e-05, | |
| "loss": 0.7191, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.6637565831660166e-05, | |
| "loss": 0.7228, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.66112857278012e-05, | |
| "loss": 0.7285, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.658500562394223e-05, | |
| "loss": 0.7373, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.655872552008326e-05, | |
| "loss": 0.7209, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.6532445416224284e-05, | |
| "loss": 0.7345, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.6506217872573036e-05, | |
| "loss": 0.7348, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.647993776871407e-05, | |
| "loss": 0.7391, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.6453657664855094e-05, | |
| "loss": 0.7367, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.642737756099612e-05, | |
| "loss": 0.7282, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.640109745713715e-05, | |
| "loss": 0.7379, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.6374817353278186e-05, | |
| "loss": 0.7313, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.634853724941921e-05, | |
| "loss": 0.7399, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.6322257145560245e-05, | |
| "loss": 0.731, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.629597704170127e-05, | |
| "loss": 0.7435, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.62696969378423e-05, | |
| "loss": 0.7361, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.624341683398333e-05, | |
| "loss": 0.7315, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.621713673012436e-05, | |
| "loss": 0.7404, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.619090918647311e-05, | |
| "loss": 0.7399, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.616468164282186e-05, | |
| "loss": 0.7378, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.6138401538962885e-05, | |
| "loss": 0.7391, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.611212143510391e-05, | |
| "loss": 0.7499, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.6085841331244944e-05, | |
| "loss": 0.7537, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.605956122738597e-05, | |
| "loss": 0.7423, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.6033281123527e-05, | |
| "loss": 0.7453, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.6007001019668035e-05, | |
| "loss": 0.7439, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.598072091580906e-05, | |
| "loss": 0.7446, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.595444081195009e-05, | |
| "loss": 0.7412, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.592821326829884e-05, | |
| "loss": 0.7399, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.590193316443987e-05, | |
| "loss": 0.7302, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.58756530605809e-05, | |
| "loss": 0.7431, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.5849372956721924e-05, | |
| "loss": 0.7437, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.5823145413070675e-05, | |
| "loss": 0.7443, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.579686530921171e-05, | |
| "loss": 0.7496, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.577063776556045e-05, | |
| "loss": 0.7395, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.574435766170148e-05, | |
| "loss": 0.7307, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.571807755784251e-05, | |
| "loss": 0.7467, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.5691797453983544e-05, | |
| "loss": 0.7474, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.566551735012457e-05, | |
| "loss": 0.7352, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.5639237246265596e-05, | |
| "loss": 0.7377, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.561295714240663e-05, | |
| "loss": 0.7389, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.558667703854766e-05, | |
| "loss": 0.7372, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.556039693468869e-05, | |
| "loss": 0.7488, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.553416939103743e-05, | |
| "loss": 0.7303, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.5507889287178466e-05, | |
| "loss": 0.7243, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.54816091833195e-05, | |
| "loss": 0.7318, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.5455329079460524e-05, | |
| "loss": 0.7421, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.542904897560155e-05, | |
| "loss": 0.7441, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.540276887174258e-05, | |
| "loss": 0.7388, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.537659388829905e-05, | |
| "loss": 0.7381, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.535031378444008e-05, | |
| "loss": 0.7402, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 4.5324033680581106e-05, | |
| "loss": 0.7377, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.529775357672214e-05, | |
| "loss": 0.7474, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.527152603307088e-05, | |
| "loss": 0.7473, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.5245245929211916e-05, | |
| "loss": 0.7345, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.521896582535294e-05, | |
| "loss": 0.7366, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.5192685721493975e-05, | |
| "loss": 0.7518, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.516640561763501e-05, | |
| "loss": 0.7454, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.514017807398375e-05, | |
| "loss": 0.7432, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.511389797012478e-05, | |
| "loss": 0.7448, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.508761786626581e-05, | |
| "loss": 0.7415, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.5061337762406844e-05, | |
| "loss": 0.7358, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.503505765854787e-05, | |
| "loss": 0.7491, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.5008777554688896e-05, | |
| "loss": 0.7429, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.498249745082993e-05, | |
| "loss": 0.748, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.4956217346970955e-05, | |
| "loss": 0.7467, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.492993724311199e-05, | |
| "loss": 0.7459, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.490365713925302e-05, | |
| "loss": 0.7402, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.4877377035394046e-05, | |
| "loss": 0.7532, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.485120205195052e-05, | |
| "loss": 0.7464, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.482492194809154e-05, | |
| "loss": 0.7441, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.479869440444029e-05, | |
| "loss": 0.7445, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.4772414300581314e-05, | |
| "loss": 0.7316, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.474613419672235e-05, | |
| "loss": 0.7437, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.471985409286338e-05, | |
| "loss": 0.7456, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.4693573989004405e-05, | |
| "loss": 0.7508, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.466729388514543e-05, | |
| "loss": 0.7436, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.4641013781286464e-05, | |
| "loss": 0.7413, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.46147336774275e-05, | |
| "loss": 0.7305, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.458845357356852e-05, | |
| "loss": 0.7496, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.4562173469709555e-05, | |
| "loss": 0.7467, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.453589336585058e-05, | |
| "loss": 0.7404, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.4509613261991614e-05, | |
| "loss": 0.74, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.448333315813265e-05, | |
| "loss": 0.7432, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.445705305427367e-05, | |
| "loss": 0.7401, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.44307729504147e-05, | |
| "loss": 0.7382, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.440449284655573e-05, | |
| "loss": 0.7352, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.4378212742696765e-05, | |
| "loss": 0.7293, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.435193263883779e-05, | |
| "loss": 0.742, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.432565253497882e-05, | |
| "loss": 0.7426, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.429942499132757e-05, | |
| "loss": 0.7428, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.42731448874686e-05, | |
| "loss": 0.7391, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.4246917343817346e-05, | |
| "loss": 0.7449, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.422063723995837e-05, | |
| "loss": 0.7377, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.4194357136099405e-05, | |
| "loss": 0.7468, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.416807703224044e-05, | |
| "loss": 0.7398, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.4141796928381463e-05, | |
| "loss": 0.7371, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.411556938473021e-05, | |
| "loss": 0.7371, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.4089289280871234e-05, | |
| "loss": 0.7443, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.4063009177012274e-05, | |
| "loss": 0.7375, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.40367290731533e-05, | |
| "loss": 0.7339, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.4010448969294326e-05, | |
| "loss": 0.7267, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.398416886543536e-05, | |
| "loss": 0.7378, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.3957888761576385e-05, | |
| "loss": 0.7313, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.393160865771742e-05, | |
| "loss": 0.739, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.390532855385845e-05, | |
| "loss": 0.7314, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.3879048449999476e-05, | |
| "loss": 0.7328, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.385282090634822e-05, | |
| "loss": 0.741, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.382659336269697e-05, | |
| "loss": 0.7419, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.3800313258838e-05, | |
| "loss": 0.7353, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.377403315497903e-05, | |
| "loss": 0.7315, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.374775305112006e-05, | |
| "loss": 0.7457, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.372147294726109e-05, | |
| "loss": 0.7442, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.369519284340212e-05, | |
| "loss": 0.7421, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.366891273954315e-05, | |
| "loss": 0.7298, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.3642632635684175e-05, | |
| "loss": 0.7387, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.361635253182521e-05, | |
| "loss": 0.7322, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.359012498817395e-05, | |
| "loss": 0.7308, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.3563844884314985e-05, | |
| "loss": 0.7491, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.353756478045601e-05, | |
| "loss": 0.7292, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.3511284676597044e-05, | |
| "loss": 0.7307, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 4.348500457273808e-05, | |
| "loss": 0.7399, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.345877702908682e-05, | |
| "loss": 0.7184, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_bleu": 55.1744, | |
| "eval_gen_len": 15.3621, | |
| "eval_loss": 0.6736727952957153, | |
| "eval_runtime": 9675.9055, | |
| "eval_samples_per_second": 13.342, | |
| "eval_steps_per_second": 1.668, | |
| "step": 127173 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.343249692522785e-05, | |
| "loss": 0.6388, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.340621682136888e-05, | |
| "loss": 0.5765, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 4.3379936717509913e-05, | |
| "loss": 0.5839, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 4.335365661365094e-05, | |
| "loss": 0.5815, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.3327376509791965e-05, | |
| "loss": 0.5716, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.3301096405933e-05, | |
| "loss": 0.5826, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.327481630207403e-05, | |
| "loss": 0.5783, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.3248588758422776e-05, | |
| "loss": 0.5879, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.32223086545638e-05, | |
| "loss": 0.5824, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.3196028550704835e-05, | |
| "loss": 0.583, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.316974844684587e-05, | |
| "loss": 0.5797, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.3143468342986893e-05, | |
| "loss": 0.5963, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.311724079933564e-05, | |
| "loss": 0.5841, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.309096069547667e-05, | |
| "loss": 0.5863, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.3064680591617704e-05, | |
| "loss": 0.5947, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.303840048775873e-05, | |
| "loss": 0.5977, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.3012172944107475e-05, | |
| "loss": 0.5848, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.298589284024851e-05, | |
| "loss": 0.5972, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.295961273638954e-05, | |
| "loss": 0.5979, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.2933385192738285e-05, | |
| "loss": 0.5879, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 4.290710508887931e-05, | |
| "loss": 0.6024, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 4.288082498502034e-05, | |
| "loss": 0.5893, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 4.285459744136909e-05, | |
| "loss": 0.6006, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.282831733751012e-05, | |
| "loss": 0.5983, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.280203723365115e-05, | |
| "loss": 0.6009, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 4.2775757129792173e-05, | |
| "loss": 0.6056, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 4.274947702593321e-05, | |
| "loss": 0.5942, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 4.272330204248967e-05, | |
| "loss": 0.5949, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 4.26970219386307e-05, | |
| "loss": 0.5972, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 4.2670741834771735e-05, | |
| "loss": 0.6073, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.264446173091276e-05, | |
| "loss": 0.6031, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 4.2618181627053794e-05, | |
| "loss": 0.5975, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.259190152319482e-05, | |
| "loss": 0.5965, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.2565621419335846e-05, | |
| "loss": 0.5991, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.2539341315476886e-05, | |
| "loss": 0.597, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.251306121161791e-05, | |
| "loss": 0.5909, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.248678110775894e-05, | |
| "loss": 0.6014, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.246050100389997e-05, | |
| "loss": 0.6016, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.2434220900041e-05, | |
| "loss": 0.6034, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.240794079618203e-05, | |
| "loss": 0.6033, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.238166069232306e-05, | |
| "loss": 0.6034, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.235538058846409e-05, | |
| "loss": 0.6123, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.2329100484605114e-05, | |
| "loss": 0.6087, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.230282038074615e-05, | |
| "loss": 0.6092, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.227654027688718e-05, | |
| "loss": 0.6045, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.2250312733235925e-05, | |
| "loss": 0.5975, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.2224137749792395e-05, | |
| "loss": 0.6023, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.219785764593342e-05, | |
| "loss": 0.6035, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.2171630102282166e-05, | |
| "loss": 0.5928, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.214534999842319e-05, | |
| "loss": 0.6116, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.2119069894564225e-05, | |
| "loss": 0.6186, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.209278979070526e-05, | |
| "loss": 0.6025, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.2066509686846283e-05, | |
| "loss": 0.5997, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.204022958298731e-05, | |
| "loss": 0.6094, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.201394947912834e-05, | |
| "loss": 0.6109, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 4.1987669375269375e-05, | |
| "loss": 0.5984, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.19613892714104e-05, | |
| "loss": 0.6177, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.1935109167551434e-05, | |
| "loss": 0.6136, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.190882906369246e-05, | |
| "loss": 0.6128, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.188260152004121e-05, | |
| "loss": 0.6172, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.185632141618224e-05, | |
| "loss": 0.6041, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.183004131232327e-05, | |
| "loss": 0.6154, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.1803761208464296e-05, | |
| "loss": 0.6094, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.177748110460533e-05, | |
| "loss": 0.6059, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.175120100074636e-05, | |
| "loss": 0.6089, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.1724973457095107e-05, | |
| "loss": 0.6122, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.169874591344385e-05, | |
| "loss": 0.6093, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 4.1672465809584884e-05, | |
| "loss": 0.6174, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 4.164618570572591e-05, | |
| "loss": 0.6301, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 4.161990560186694e-05, | |
| "loss": 0.6204, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 4.159362549800797e-05, | |
| "loss": 0.6207, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 4.1567345394149e-05, | |
| "loss": 0.6102, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 4.154106529029003e-05, | |
| "loss": 0.6088, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 4.151478518643106e-05, | |
| "loss": 0.6231, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 4.1488505082572087e-05, | |
| "loss": 0.6195, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 4.146222497871311e-05, | |
| "loss": 0.6214, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 4.1435997435061864e-05, | |
| "loss": 0.624, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 4.14097173312029e-05, | |
| "loss": 0.6218, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 4.138343722734392e-05, | |
| "loss": 0.6084, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 4.135715712348495e-05, | |
| "loss": 0.6155, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 4.133087701962599e-05, | |
| "loss": 0.6153, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 4.1304596915767015e-05, | |
| "loss": 0.6224, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 4.127831681190804e-05, | |
| "loss": 0.6132, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 4.125203670804907e-05, | |
| "loss": 0.6136, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 4.12257566041901e-05, | |
| "loss": 0.6097, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 4.119952906053885e-05, | |
| "loss": 0.6226, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 4.117324895667988e-05, | |
| "loss": 0.6152, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 4.114696885282091e-05, | |
| "loss": 0.6188, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.112074130916966e-05, | |
| "loss": 0.6199, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 4.109446120531069e-05, | |
| "loss": 0.626, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.106823366165943e-05, | |
| "loss": 0.6256, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.104195355780046e-05, | |
| "loss": 0.6169, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.10156734539415e-05, | |
| "loss": 0.6167, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.0989393350082524e-05, | |
| "loss": 0.6147, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.096311324622355e-05, | |
| "loss": 0.621, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.0936833142364576e-05, | |
| "loss": 0.6063, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.091055303850561e-05, | |
| "loss": 0.6216, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 4.088427293464664e-05, | |
| "loss": 0.6307, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.085799283078767e-05, | |
| "loss": 0.6313, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.08317127269287e-05, | |
| "loss": 0.6286, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.0805432623069726e-05, | |
| "loss": 0.6109, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.077915251921076e-05, | |
| "loss": 0.6175, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.075287241535179e-05, | |
| "loss": 0.6211, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.072659231149282e-05, | |
| "loss": 0.6227, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.0700312207633844e-05, | |
| "loss": 0.6254, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 4.0674032103774877e-05, | |
| "loss": 0.6212, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 4.064775199991591e-05, | |
| "loss": 0.6257, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 4.0621471896056935e-05, | |
| "loss": 0.6255, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 4.059519179219797e-05, | |
| "loss": 0.622, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.0568911688338994e-05, | |
| "loss": 0.6278, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 4.0542684144687746e-05, | |
| "loss": 0.6298, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 4.051640404082877e-05, | |
| "loss": 0.6197, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 4.0490176497177517e-05, | |
| "loss": 0.6169, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 4.046389639331855e-05, | |
| "loss": 0.6148, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 4.043761628945958e-05, | |
| "loss": 0.6153, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.041133618560061e-05, | |
| "loss": 0.622, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 4.0385056081741634e-05, | |
| "loss": 0.6291, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 4.035882853809038e-05, | |
| "loss": 0.6172, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 4.033254843423142e-05, | |
| "loss": 0.6156, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 4.0306268330372445e-05, | |
| "loss": 0.6274, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 4.028004078672119e-05, | |
| "loss": 0.6319, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 4.025381324306994e-05, | |
| "loss": 0.6218, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 4.022753313921097e-05, | |
| "loss": 0.6184, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.0201253035352e-05, | |
| "loss": 0.6275, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 4.0174972931493026e-05, | |
| "loss": 0.6218, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 4.014869282763405e-05, | |
| "loss": 0.6179, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 4.01224652839828e-05, | |
| "loss": 0.6294, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_bleu": 55.7536, | |
| "eval_gen_len": 15.2192, | |
| "eval_loss": 0.6636257171630859, | |
| "eval_runtime": 9606.0302, | |
| "eval_samples_per_second": 13.439, | |
| "eval_steps_per_second": 1.68, | |
| "step": 190759 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 4.0096185180123836e-05, | |
| "loss": 0.551, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 4.006990507626486e-05, | |
| "loss": 0.4652, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 4.004362497240589e-05, | |
| "loss": 0.4579, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 4.001734486854692e-05, | |
| "loss": 0.4586, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.9991064764687954e-05, | |
| "loss": 0.4651, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.99648372210367e-05, | |
| "loss": 0.4688, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.9938557117177725e-05, | |
| "loss": 0.4685, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.991227701331876e-05, | |
| "loss": 0.4687, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.988599690945979e-05, | |
| "loss": 0.4672, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.9859716805600816e-05, | |
| "loss": 0.4693, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.983348926194956e-05, | |
| "loss": 0.4741, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.9807209158090594e-05, | |
| "loss": 0.4708, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.9780929054231627e-05, | |
| "loss": 0.4696, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.975464895037265e-05, | |
| "loss": 0.469, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.9728368846513685e-05, | |
| "loss": 0.4665, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.970208874265471e-05, | |
| "loss": 0.4742, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.9675808638795744e-05, | |
| "loss": 0.4854, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.964952853493677e-05, | |
| "loss": 0.4816, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.96232484310778e-05, | |
| "loss": 0.4851, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.959702088742655e-05, | |
| "loss": 0.4814, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.957074078356758e-05, | |
| "loss": 0.4766, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.9544513239916325e-05, | |
| "loss": 0.4869, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.951823313605735e-05, | |
| "loss": 0.4754, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.9491953032198384e-05, | |
| "loss": 0.4805, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.946567292833942e-05, | |
| "loss": 0.4876, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.943939282448044e-05, | |
| "loss": 0.4936, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.9413112720621476e-05, | |
| "loss": 0.4869, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.93868326167625e-05, | |
| "loss": 0.4943, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.9360552512903535e-05, | |
| "loss": 0.4947, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.933427240904457e-05, | |
| "loss": 0.4947, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.930799230518559e-05, | |
| "loss": 0.4849, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.928171220132662e-05, | |
| "loss": 0.4994, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.925548465767537e-05, | |
| "loss": 0.4892, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.9229204553816404e-05, | |
| "loss": 0.4856, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.920292444995743e-05, | |
| "loss": 0.4945, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.9176696906306175e-05, | |
| "loss": 0.4906, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.9150469362654926e-05, | |
| "loss": 0.4948, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.912418925879595e-05, | |
| "loss": 0.486, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.9097909154936985e-05, | |
| "loss": 0.4957, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.907162905107801e-05, | |
| "loss": 0.4912, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.904540150742676e-05, | |
| "loss": 0.4905, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.901912140356779e-05, | |
| "loss": 0.5008, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.899284129970882e-05, | |
| "loss": 0.4983, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.896656119584985e-05, | |
| "loss": 0.4988, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.894028109199087e-05, | |
| "loss": 0.4952, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.8914000988131906e-05, | |
| "loss": 0.4925, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.888772088427294e-05, | |
| "loss": 0.4992, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.8861440780413965e-05, | |
| "loss": 0.5013, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.883521323676271e-05, | |
| "loss": 0.5053, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.880893313290374e-05, | |
| "loss": 0.5039, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.8782653029044775e-05, | |
| "loss": 0.4962, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.87563729251858e-05, | |
| "loss": 0.4977, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.873009282132683e-05, | |
| "loss": 0.495, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.870381271746786e-05, | |
| "loss": 0.5101, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.867753261360889e-05, | |
| "loss": 0.4998, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.865125250974992e-05, | |
| "loss": 0.5069, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.862497240589095e-05, | |
| "loss": 0.5002, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.859869230203198e-05, | |
| "loss": 0.5035, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.857241219817301e-05, | |
| "loss": 0.5015, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 3.8546132094314036e-05, | |
| "loss": 0.5082, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.851985199045507e-05, | |
| "loss": 0.5069, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.8493571886596095e-05, | |
| "loss": 0.4975, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.846729178273713e-05, | |
| "loss": 0.512, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.844111679929359e-05, | |
| "loss": 0.5093, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.8414889255642337e-05, | |
| "loss": 0.5142, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.838860915178337e-05, | |
| "loss": 0.5048, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.83623290479244e-05, | |
| "loss": 0.5055, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.833604894406543e-05, | |
| "loss": 0.5102, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.8309768840206454e-05, | |
| "loss": 0.5213, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.8283541296555206e-05, | |
| "loss": 0.5063, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.825726119269624e-05, | |
| "loss": 0.5071, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 3.823103364904498e-05, | |
| "loss": 0.5042, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.820475354518601e-05, | |
| "loss": 0.5094, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.817847344132704e-05, | |
| "loss": 0.5141, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.8152193337468075e-05, | |
| "loss": 0.5031, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.81259132336091e-05, | |
| "loss": 0.5116, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.809963312975013e-05, | |
| "loss": 0.5178, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.807335302589116e-05, | |
| "loss": 0.5109, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.804707292203219e-05, | |
| "loss": 0.5091, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.802084537838094e-05, | |
| "loss": 0.507, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.799456527452196e-05, | |
| "loss": 0.508, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.7968285170662996e-05, | |
| "loss": 0.515, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.794200506680403e-05, | |
| "loss": 0.5169, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.7915724962945055e-05, | |
| "loss": 0.5249, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.788944485908609e-05, | |
| "loss": 0.5171, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.7863164755227114e-05, | |
| "loss": 0.5122, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.783688465136814e-05, | |
| "loss": 0.5101, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.781060454750917e-05, | |
| "loss": 0.5136, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.7784324443650205e-05, | |
| "loss": 0.5287, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.775804433979123e-05, | |
| "loss": 0.5192, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.773176423593226e-05, | |
| "loss": 0.5051, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.770548413207329e-05, | |
| "loss": 0.5229, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.767920402821432e-05, | |
| "loss": 0.5212, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.765292392435535e-05, | |
| "loss": 0.5236, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.7626696380704094e-05, | |
| "loss": 0.5146, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.7600416276845126e-05, | |
| "loss": 0.5245, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.757418873319388e-05, | |
| "loss": 0.5239, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.7547908629334904e-05, | |
| "loss": 0.5203, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.752162852547593e-05, | |
| "loss": 0.5051, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.749534842161696e-05, | |
| "loss": 0.519, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.7469120877965714e-05, | |
| "loss": 0.5311, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.744284077410674e-05, | |
| "loss": 0.5146, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.7416560670247766e-05, | |
| "loss": 0.523, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.739033312659652e-05, | |
| "loss": 0.5274, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.736405302273755e-05, | |
| "loss": 0.5089, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.733777291887858e-05, | |
| "loss": 0.5125, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.73114928150196e-05, | |
| "loss": 0.526, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.7285212711160636e-05, | |
| "loss": 0.5223, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.725893260730167e-05, | |
| "loss": 0.5305, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.7232652503442694e-05, | |
| "loss": 0.5268, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.720637239958373e-05, | |
| "loss": 0.5213, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.718009229572475e-05, | |
| "loss": 0.5203, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.7153812191865786e-05, | |
| "loss": 0.5339, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.712753208800681e-05, | |
| "loss": 0.5215, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.7101251984147845e-05, | |
| "loss": 0.5203, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.707497188028887e-05, | |
| "loss": 0.5162, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.70486917764299e-05, | |
| "loss": 0.5214, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.7022411672570936e-05, | |
| "loss": 0.5278, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.699613156871196e-05, | |
| "loss": 0.5251, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.696985146485299e-05, | |
| "loss": 0.5265, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.694357136099402e-05, | |
| "loss": 0.5222, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.691729125713505e-05, | |
| "loss": 0.518, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.68910637134838e-05, | |
| "loss": 0.5299, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.6864783609624825e-05, | |
| "loss": 0.5255, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.683855606597357e-05, | |
| "loss": 0.513, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.681232852232232e-05, | |
| "loss": 0.5178, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.6786048418463354e-05, | |
| "loss": 0.5313, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_bleu": 55.9422, | |
| "eval_gen_len": 15.2674, | |
| "eval_loss": 0.6860370635986328, | |
| "eval_runtime": 9607.1426, | |
| "eval_samples_per_second": 13.437, | |
| "eval_steps_per_second": 1.68, | |
| "step": 254346 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3.675976831460438e-05, | |
| "loss": 0.4774, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.673354077095313e-05, | |
| "loss": 0.3734, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.6707313227301876e-05, | |
| "loss": 0.3715, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.66810331234429e-05, | |
| "loss": 0.3741, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.6654753019583935e-05, | |
| "loss": 0.3715, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.662847291572497e-05, | |
| "loss": 0.3737, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.6602192811865994e-05, | |
| "loss": 0.3778, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.657591270800703e-05, | |
| "loss": 0.3739, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.654963260414805e-05, | |
| "loss": 0.3824, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.652335250028908e-05, | |
| "loss": 0.3834, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.649707239643011e-05, | |
| "loss": 0.3714, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.6470792292571144e-05, | |
| "loss": 0.3817, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.644451218871217e-05, | |
| "loss": 0.3781, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.6418284645060915e-05, | |
| "loss": 0.3836, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.639200454120195e-05, | |
| "loss": 0.3865, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.636572443734298e-05, | |
| "loss": 0.3837, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.633944433348401e-05, | |
| "loss": 0.384, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.631316422962503e-05, | |
| "loss": 0.3911, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.6286884125766066e-05, | |
| "loss": 0.3785, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.62606040219071e-05, | |
| "loss": 0.3826, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.6234323918048124e-05, | |
| "loss": 0.3827, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.620809637439687e-05, | |
| "loss": 0.393, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.61818162705379e-05, | |
| "loss": 0.3783, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.6155536166678935e-05, | |
| "loss": 0.3854, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.612925606281996e-05, | |
| "loss": 0.3824, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.6103028519168706e-05, | |
| "loss": 0.3858, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.607674841530974e-05, | |
| "loss": 0.3866, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.605046831145077e-05, | |
| "loss": 0.3862, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.60241882075918e-05, | |
| "loss": 0.3897, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.599790810373283e-05, | |
| "loss": 0.3894, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.5971627999873856e-05, | |
| "loss": 0.3895, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 3.594534789601489e-05, | |
| "loss": 0.3927, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 3.5919067792155915e-05, | |
| "loss": 0.4029, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 3.589278768829695e-05, | |
| "loss": 0.397, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 3.5866507584437974e-05, | |
| "loss": 0.3946, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 3.5840227480579e-05, | |
| "loss": 0.393, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 3.581399993692775e-05, | |
| "loss": 0.4006, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 3.5787719833068784e-05, | |
| "loss": 0.3956, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 3.576143972920981e-05, | |
| "loss": 0.3998, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 3.5735159625350836e-05, | |
| "loss": 0.3982, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 3.5708879521491876e-05, | |
| "loss": 0.3974, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 3.56825994176329e-05, | |
| "loss": 0.4028, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 3.5656371873981646e-05, | |
| "loss": 0.4118, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 3.563009177012267e-05, | |
| "loss": 0.4067, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 3.560381166626371e-05, | |
| "loss": 0.4008, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 3.557753156240474e-05, | |
| "loss": 0.4045, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 3.5551251458545764e-05, | |
| "loss": 0.4092, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 3.55249713546868e-05, | |
| "loss": 0.4109, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 3.549869125082782e-05, | |
| "loss": 0.409, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 3.5472411146968856e-05, | |
| "loss": 0.4024, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 3.544613104310988e-05, | |
| "loss": 0.4075, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 3.5419850939250914e-05, | |
| "loss": 0.4087, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 3.539357083539194e-05, | |
| "loss": 0.4039, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 3.536729073153297e-05, | |
| "loss": 0.4098, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 3.5341010627674006e-05, | |
| "loss": 0.4019, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 3.531473052381503e-05, | |
| "loss": 0.406, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 3.528845041995606e-05, | |
| "loss": 0.4078, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 3.526217031609709e-05, | |
| "loss": 0.409, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 3.523594277244584e-05, | |
| "loss": 0.4122, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 3.520966266858687e-05, | |
| "loss": 0.405, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 3.5183382564727894e-05, | |
| "loss": 0.4101, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 3.515710246086893e-05, | |
| "loss": 0.4218, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 3.513082235700996e-05, | |
| "loss": 0.4186, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 3.5104542253150986e-05, | |
| "loss": 0.4088, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.507826214929202e-05, | |
| "loss": 0.416, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.5051982045433045e-05, | |
| "loss": 0.4186, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 3.502570194157407e-05, | |
| "loss": 0.4146, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 3.499947439792282e-05, | |
| "loss": 0.4118, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 3.497319429406385e-05, | |
| "loss": 0.4141, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 3.494691419020488e-05, | |
| "loss": 0.4115, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 3.492068664655363e-05, | |
| "loss": 0.4136, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 3.489440654269466e-05, | |
| "loss": 0.4198, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 3.4868126438835685e-05, | |
| "loss": 0.4176, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 3.484189889518443e-05, | |
| "loss": 0.4212, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 3.481561879132547e-05, | |
| "loss": 0.4169, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 3.4789391247674214e-05, | |
| "loss": 0.407, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 3.476311114381524e-05, | |
| "loss": 0.4112, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 3.4736831039956266e-05, | |
| "loss": 0.4177, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 3.4710550936097306e-05, | |
| "loss": 0.4272, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 3.468427083223833e-05, | |
| "loss": 0.4221, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 3.465799072837936e-05, | |
| "loss": 0.4131, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 3.463171062452039e-05, | |
| "loss": 0.4238, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 3.4605430520661416e-05, | |
| "loss": 0.4213, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 3.457915041680245e-05, | |
| "loss": 0.4252, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 3.455287031294348e-05, | |
| "loss": 0.4222, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 3.452659020908451e-05, | |
| "loss": 0.4206, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 3.4500310105225534e-05, | |
| "loss": 0.415, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 3.447403000136657e-05, | |
| "loss": 0.4165, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 3.44477498975076e-05, | |
| "loss": 0.4266, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 3.4421522353856344e-05, | |
| "loss": 0.425, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 3.439524224999737e-05, | |
| "loss": 0.42, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 3.43689621461384e-05, | |
| "loss": 0.4271, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 3.4342734602487155e-05, | |
| "loss": 0.4308, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 3.431645449862818e-05, | |
| "loss": 0.4283, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 3.429017439476921e-05, | |
| "loss": 0.4296, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 3.426389429091024e-05, | |
| "loss": 0.4219, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 3.423761418705127e-05, | |
| "loss": 0.4176, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 3.42113340831923e-05, | |
| "loss": 0.43, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 3.418510653954104e-05, | |
| "loss": 0.4241, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 3.4158826435682076e-05, | |
| "loss": 0.4314, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 3.413254633182311e-05, | |
| "loss": 0.4217, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 3.4106266227964135e-05, | |
| "loss": 0.4378, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 3.407998612410516e-05, | |
| "loss": 0.4243, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 3.4053706020246193e-05, | |
| "loss": 0.4247, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 3.4027425916387226e-05, | |
| "loss": 0.4255, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 3.400114581252825e-05, | |
| "loss": 0.4317, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 3.3974865708669285e-05, | |
| "loss": 0.4201, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 3.394858560481031e-05, | |
| "loss": 0.4273, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 3.392230550095134e-05, | |
| "loss": 0.4322, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 3.389602539709238e-05, | |
| "loss": 0.4273, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 3.386979785344112e-05, | |
| "loss": 0.4291, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 3.3843570309789866e-05, | |
| "loss": 0.4234, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 3.38172902059309e-05, | |
| "loss": 0.4283, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 3.3791010102071925e-05, | |
| "loss": 0.4297, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 3.376472999821296e-05, | |
| "loss": 0.4347, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.37385024545617e-05, | |
| "loss": 0.4325, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 3.3712222350702735e-05, | |
| "loss": 0.4215, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 3.368594224684376e-05, | |
| "loss": 0.4354, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 3.365966214298479e-05, | |
| "loss": 0.4367, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 3.363343459933354e-05, | |
| "loss": 0.4402, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 3.360715449547457e-05, | |
| "loss": 0.4269, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 3.35808743916156e-05, | |
| "loss": 0.433, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 3.3554594287756624e-05, | |
| "loss": 0.4356, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 3.352831418389766e-05, | |
| "loss": 0.4309, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 3.350203408003868e-05, | |
| "loss": 0.4304, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 3.3475753976179715e-05, | |
| "loss": 0.433, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 3.344947387232075e-05, | |
| "loss": 0.4325, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_bleu": 56.2388, | |
| "eval_gen_len": 15.235, | |
| "eval_loss": 0.7183424234390259, | |
| "eval_runtime": 9602.8422, | |
| "eval_samples_per_second": 13.443, | |
| "eval_steps_per_second": 1.68, | |
| "step": 317932 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 3.342324632866949e-05, | |
| "loss": 0.418, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 3.339696622481052e-05, | |
| "loss": 0.2939, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 3.337068612095155e-05, | |
| "loss": 0.2951, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 3.3344406017092585e-05, | |
| "loss": 0.2888, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 3.331812591323361e-05, | |
| "loss": 0.2897, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 3.329184580937464e-05, | |
| "loss": 0.2934, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 3.326561826572339e-05, | |
| "loss": 0.2964, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 3.323933816186442e-05, | |
| "loss": 0.3002, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 3.321305805800545e-05, | |
| "loss": 0.2895, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 3.318677795414647e-05, | |
| "loss": 0.3009, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 3.3160497850287506e-05, | |
| "loss": 0.2953, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 3.313421774642854e-05, | |
| "loss": 0.3046, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 3.3108042762985e-05, | |
| "loss": 0.2955, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 3.308176265912603e-05, | |
| "loss": 0.3027, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 3.305548255526706e-05, | |
| "loss": 0.3, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 3.3029202451408094e-05, | |
| "loss": 0.3088, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 3.300292234754912e-05, | |
| "loss": 0.2993, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 3.2976642243690146e-05, | |
| "loss": 0.3144, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 3.29504147000389e-05, | |
| "loss": 0.304, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 3.2924134596179923e-05, | |
| "loss": 0.305, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 3.2897854492320956e-05, | |
| "loss": 0.2984, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 3.287157438846198e-05, | |
| "loss": 0.3084, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 3.2845346844810734e-05, | |
| "loss": 0.3135, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 3.281906674095176e-05, | |
| "loss": 0.3048, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 3.279278663709279e-05, | |
| "loss": 0.3075, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 3.276650653323382e-05, | |
| "loss": 0.3074, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 3.274022642937485e-05, | |
| "loss": 0.3122, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 3.2713946325515884e-05, | |
| "loss": 0.3115, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 3.268766622165691e-05, | |
| "loss": 0.3173, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 3.2661386117797936e-05, | |
| "loss": 0.3105, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 3.263510601393897e-05, | |
| "loss": 0.3154, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 3.260887847028772e-05, | |
| "loss": 0.3092, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 3.258259836642875e-05, | |
| "loss": 0.3186, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 3.255631826256977e-05, | |
| "loss": 0.3146, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 3.2530038158710805e-05, | |
| "loss": 0.3163, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 3.250375805485184e-05, | |
| "loss": 0.3182, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 3.2477477950992864e-05, | |
| "loss": 0.3217, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 3.245119784713389e-05, | |
| "loss": 0.3099, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 3.242491774327492e-05, | |
| "loss": 0.3191, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 3.239863763941595e-05, | |
| "loss": 0.3107, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 3.237235753555698e-05, | |
| "loss": 0.3247, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 3.2346077431698015e-05, | |
| "loss": 0.3176, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 3.231979732783904e-05, | |
| "loss": 0.3249, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 3.2293569784187785e-05, | |
| "loss": 0.3186, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 3.226728968032882e-05, | |
| "loss": 0.3184, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 3.224100957646985e-05, | |
| "loss": 0.32, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 3.221472947261088e-05, | |
| "loss": 0.3214, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 3.21884493687519e-05, | |
| "loss": 0.3318, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 3.2162274385308373e-05, | |
| "loss": 0.3221, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 3.21359942814494e-05, | |
| "loss": 0.3179, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 3.210971417759043e-05, | |
| "loss": 0.3255, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 3.208343407373146e-05, | |
| "loss": 0.3228, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 3.205715396987249e-05, | |
| "loss": 0.3269, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 3.2030926426221236e-05, | |
| "loss": 0.3273, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 3.200469888256998e-05, | |
| "loss": 0.3226, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 3.1978418778711013e-05, | |
| "loss": 0.3248, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 3.1952138674852046e-05, | |
| "loss": 0.3291, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 3.192585857099307e-05, | |
| "loss": 0.3194, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 3.1899578467134105e-05, | |
| "loss": 0.3327, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 3.187329836327513e-05, | |
| "loss": 0.3264, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 3.1847018259416164e-05, | |
| "loss": 0.3339, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 3.18207381555572e-05, | |
| "loss": 0.3303, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 3.1794563172113653e-05, | |
| "loss": 0.3348, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 3.1768283068254686e-05, | |
| "loss": 0.3312, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 3.174200296439572e-05, | |
| "loss": 0.3344, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 3.1715722860536745e-05, | |
| "loss": 0.3282, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 3.168944275667778e-05, | |
| "loss": 0.3316, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 3.1663162652818804e-05, | |
| "loss": 0.3269, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 3.163688254895984e-05, | |
| "loss": 0.3442, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 3.161060244510086e-05, | |
| "loss": 0.3327, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 3.1584322341241895e-05, | |
| "loss": 0.3405, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 3.155804223738292e-05, | |
| "loss": 0.3369, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 3.153181469373167e-05, | |
| "loss": 0.3305, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 3.15055345898727e-05, | |
| "loss": 0.3355, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 3.147925448601373e-05, | |
| "loss": 0.331, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 3.145297438215476e-05, | |
| "loss": 0.3305, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 3.142669427829579e-05, | |
| "loss": 0.3289, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 3.1400414174436823e-05, | |
| "loss": 0.341, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 3.137413407057785e-05, | |
| "loss": 0.3329, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 3.1347853966718875e-05, | |
| "loss": 0.3399, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 3.132157386285991e-05, | |
| "loss": 0.3305, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 3.129529375900094e-05, | |
| "loss": 0.3365, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 3.126901365514197e-05, | |
| "loss": 0.3351, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 3.1242733551283e-05, | |
| "loss": 0.336, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 3.1216506007631745e-05, | |
| "loss": 0.3426, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 3.119022590377277e-05, | |
| "loss": 0.3365, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 3.1163945799913803e-05, | |
| "loss": 0.3462, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 3.113777081647027e-05, | |
| "loss": 0.3325, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 3.11114907126113e-05, | |
| "loss": 0.3345, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 3.108521060875233e-05, | |
| "loss": 0.3393, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 3.105893050489336e-05, | |
| "loss": 0.3354, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 3.1032650401034385e-05, | |
| "loss": 0.344, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 3.100637029717541e-05, | |
| "loss": 0.337, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 3.0980090193316443e-05, | |
| "loss": 0.342, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 3.0953810089457476e-05, | |
| "loss": 0.3422, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 3.09275299855985e-05, | |
| "loss": 0.3457, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 3.0901249881739535e-05, | |
| "loss": 0.345, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 3.087502233808828e-05, | |
| "loss": 0.3413, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 3.084874223422931e-05, | |
| "loss": 0.3462, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 3.082246213037034e-05, | |
| "loss": 0.3434, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 3.079618202651137e-05, | |
| "loss": 0.3498, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 3.07699019226524e-05, | |
| "loss": 0.3473, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 3.074367437900115e-05, | |
| "loss": 0.352, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 3.0717394275142175e-05, | |
| "loss": 0.3465, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 3.069111417128321e-05, | |
| "loss": 0.3431, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 3.0664834067424234e-05, | |
| "loss": 0.3529, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 3.0638606523772985e-05, | |
| "loss": 0.3437, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 3.061232641991401e-05, | |
| "loss": 0.3498, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 3.0586046316055044e-05, | |
| "loss": 0.3525, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 3.055976621219607e-05, | |
| "loss": 0.3522, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 3.05334861083371e-05, | |
| "loss": 0.3481, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 3.0507206004478132e-05, | |
| "loss": 0.3493, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 3.0480925900619162e-05, | |
| "loss": 0.3558, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 3.045469835696791e-05, | |
| "loss": 0.3498, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 3.0428418253108936e-05, | |
| "loss": 0.3445, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 3.0402138149249965e-05, | |
| "loss": 0.3541, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 3.0375858045390998e-05, | |
| "loss": 0.3462, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 3.0349577941532024e-05, | |
| "loss": 0.3411, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 3.0323297837673054e-05, | |
| "loss": 0.3471, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 3.0297017733814086e-05, | |
| "loss": 0.346, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 3.0270737629955116e-05, | |
| "loss": 0.3439, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 3.0244562646511583e-05, | |
| "loss": 0.3401, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 3.0218335102860328e-05, | |
| "loss": 0.353, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 3.0192054999001357e-05, | |
| "loss": 0.358, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 3.0165774895142383e-05, | |
| "loss": 0.3475, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 3.013949479128342e-05, | |
| "loss": 0.3462, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 3.0113214687424445e-05, | |
| "loss": 0.3416, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 3.0086934583565475e-05, | |
| "loss": 0.3525, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_bleu": 55.8867, | |
| "eval_gen_len": 15.2207, | |
| "eval_loss": 0.7727451324462891, | |
| "eval_runtime": 9583.1627, | |
| "eval_samples_per_second": 13.471, | |
| "eval_steps_per_second": 1.684, | |
| "step": 381519 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "step": 381519, | |
| "total_flos": 1.3228818910930797e+19, | |
| "train_loss": 0.59605707192661, | |
| "train_runtime": 353473.1733, | |
| "train_samples_per_second": 43.173, | |
| "train_steps_per_second": 2.698 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 953790, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 15, | |
| "save_steps": 500, | |
| "total_flos": 1.3228818910930797e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |