| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.356261837668037, | |
| "global_step": 471500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.9981521688917635e-05, | |
| "loss": 7.1258, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.9963043377835268e-05, | |
| "loss": 5.5209, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.99445650667529e-05, | |
| "loss": 5.0523, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.9926086755670534e-05, | |
| "loss": 4.8767, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.9907608444588167e-05, | |
| "loss": 4.5768, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.9889130133505797e-05, | |
| "loss": 4.5006, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.9870651822423434e-05, | |
| "loss": 4.3798, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.9852173511341063e-05, | |
| "loss": 4.3774, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.9833695200258696e-05, | |
| "loss": 4.2296, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.9815216889176333e-05, | |
| "loss": 4.1736, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.9796738578093963e-05, | |
| "loss": 4.1228, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9778260267011596e-05, | |
| "loss": 4.0352, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.975978195592923e-05, | |
| "loss": 4.0604, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9741303644846862e-05, | |
| "loss": 4.0201, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9722825333764495e-05, | |
| "loss": 4.0062, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.970434702268213e-05, | |
| "loss": 3.9278, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.968586871159976e-05, | |
| "loss": 3.9048, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9667390400517395e-05, | |
| "loss": 3.8688, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9648912089435028e-05, | |
| "loss": 3.8842, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.963043377835266e-05, | |
| "loss": 3.7582, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9611955467270294e-05, | |
| "loss": 3.7539, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.9593477156187927e-05, | |
| "loss": 3.758, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.957499884510556e-05, | |
| "loss": 3.7592, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.955652053402319e-05, | |
| "loss": 3.7405, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9538042222940827e-05, | |
| "loss": 3.6773, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9519563911858456e-05, | |
| "loss": 3.6197, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.950108560077609e-05, | |
| "loss": 3.6312, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9482607289693726e-05, | |
| "loss": 3.5894, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9464128978611356e-05, | |
| "loss": 3.5707, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.944565066752899e-05, | |
| "loss": 3.5298, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9427172356446622e-05, | |
| "loss": 3.5472, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9408694045364255e-05, | |
| "loss": 3.5329, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.939021573428189e-05, | |
| "loss": 3.5442, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.937173742319952e-05, | |
| "loss": 3.5293, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9353259112117155e-05, | |
| "loss": 3.4745, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9334780801034788e-05, | |
| "loss": 3.4619, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.931630248995242e-05, | |
| "loss": 3.4482, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9297824178870054e-05, | |
| "loss": 3.4196, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9279345867787687e-05, | |
| "loss": 3.4783, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9260867556705317e-05, | |
| "loss": 3.399, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9242389245622953e-05, | |
| "loss": 3.3835, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9223910934540583e-05, | |
| "loss": 3.4451, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9205432623458216e-05, | |
| "loss": 3.3599, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.918695431237585e-05, | |
| "loss": 3.334, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9168476001293483e-05, | |
| "loss": 3.3913, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9149997690211116e-05, | |
| "loss": 3.3571, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.913151937912875e-05, | |
| "loss": 3.3312, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9113041068046382e-05, | |
| "loss": 3.348, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9094562756964015e-05, | |
| "loss": 3.2713, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9076084445881648e-05, | |
| "loss": 3.2846, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.905760613479928e-05, | |
| "loss": 3.2555, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9039127823716914e-05, | |
| "loss": 3.2533, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9020649512634544e-05, | |
| "loss": 3.214, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.900217120155218e-05, | |
| "loss": 3.2435, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.8983692890469814e-05, | |
| "loss": 3.2022, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.8965214579387447e-05, | |
| "loss": 3.247, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.894673626830508e-05, | |
| "loss": 3.2188, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.892825795722271e-05, | |
| "loss": 3.2024, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8909779646140346e-05, | |
| "loss": 3.2068, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8891301335057976e-05, | |
| "loss": 3.1909, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.887282302397561e-05, | |
| "loss": 3.1393, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.8854344712893242e-05, | |
| "loss": 3.1496, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.8835866401810876e-05, | |
| "loss": 3.1228, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.881738809072851e-05, | |
| "loss": 3.1803, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8798909779646142e-05, | |
| "loss": 3.1288, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.8780431468563775e-05, | |
| "loss": 3.1643, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.8761953157481408e-05, | |
| "loss": 3.1125, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.874347484639904e-05, | |
| "loss": 3.1475, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.8724996535316674e-05, | |
| "loss": 3.0953, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.8706518224234308e-05, | |
| "loss": 3.1528, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.8688039913151937e-05, | |
| "loss": 3.1077, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.8669561602069574e-05, | |
| "loss": 3.1356, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.8651083290987207e-05, | |
| "loss": 3.0673, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.8632604979904837e-05, | |
| "loss": 3.1403, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8614126668822473e-05, | |
| "loss": 3.0962, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.8595648357740103e-05, | |
| "loss": 3.1125, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8577170046657736e-05, | |
| "loss": 3.13, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.855869173557537e-05, | |
| "loss": 3.0321, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8540213424493002e-05, | |
| "loss": 3.0702, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.8521735113410635e-05, | |
| "loss": 3.0713, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.850325680232827e-05, | |
| "loss": 3.0604, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8484778491245902e-05, | |
| "loss": 3.0655, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.8466300180163535e-05, | |
| "loss": 3.0367, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.8447821869081168e-05, | |
| "loss": 3.0241, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.84293435579988e-05, | |
| "loss": 2.9886, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8410865246916434e-05, | |
| "loss": 3.0218, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8392386935834064e-05, | |
| "loss": 3.0746, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.83739086247517e-05, | |
| "loss": 3.0504, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.835543031366933e-05, | |
| "loss": 3.0379, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.8336952002586967e-05, | |
| "loss": 3.0204, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.83184736915046e-05, | |
| "loss": 3.0809, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.829999538042223e-05, | |
| "loss": 2.9938, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8281517069339866e-05, | |
| "loss": 3.0016, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8263038758257496e-05, | |
| "loss": 2.9624, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.824456044717513e-05, | |
| "loss": 2.9572, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.8226082136092762e-05, | |
| "loss": 3.0155, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.8207603825010395e-05, | |
| "loss": 2.9884, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.818912551392803e-05, | |
| "loss": 2.9686, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.817064720284566e-05, | |
| "loss": 2.9476, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.8152168891763295e-05, | |
| "loss": 2.9751, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.8133690580680928e-05, | |
| "loss": 2.984, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.811521226959856e-05, | |
| "loss": 2.9653, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.8096733958516194e-05, | |
| "loss": 2.9091, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.8078255647433827e-05, | |
| "loss": 3.019, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.8059777336351457e-05, | |
| "loss": 2.9941, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.8041299025269094e-05, | |
| "loss": 2.9615, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.8022820714186723e-05, | |
| "loss": 2.9589, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.8004342403104356e-05, | |
| "loss": 2.9565, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.798586409202199e-05, | |
| "loss": 2.9467, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.7967385780939623e-05, | |
| "loss": 2.8842, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.7948907469857256e-05, | |
| "loss": 2.9484, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.793042915877489e-05, | |
| "loss": 2.8859, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.7911950847692522e-05, | |
| "loss": 2.9208, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7893472536610155e-05, | |
| "loss": 2.8815, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.787499422552779e-05, | |
| "loss": 2.9142, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.785651591444542e-05, | |
| "loss": 2.9232, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.7838037603363055e-05, | |
| "loss": 2.9164, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7819559292280684e-05, | |
| "loss": 2.8726, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.780108098119832e-05, | |
| "loss": 2.8701, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7782602670115954e-05, | |
| "loss": 2.8493, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.7764124359033584e-05, | |
| "loss": 2.897, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.774564604795122e-05, | |
| "loss": 2.8594, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.772716773686885e-05, | |
| "loss": 2.9145, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.7708689425786487e-05, | |
| "loss": 2.904, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.7690211114704116e-05, | |
| "loss": 2.8863, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.767173280362175e-05, | |
| "loss": 2.8548, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.7653254492539383e-05, | |
| "loss": 2.9297, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.7634776181457016e-05, | |
| "loss": 2.8743, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.761629787037465e-05, | |
| "loss": 2.8878, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7597819559292282e-05, | |
| "loss": 2.8635, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7579341248209915e-05, | |
| "loss": 2.8729, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.756086293712755e-05, | |
| "loss": 2.8401, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.754238462604518e-05, | |
| "loss": 2.8721, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.7523906314962815e-05, | |
| "loss": 2.8453, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.7505428003880448e-05, | |
| "loss": 2.8517, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.7486949692798077e-05, | |
| "loss": 2.9231, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.7468471381715714e-05, | |
| "loss": 2.8351, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7449993070633347e-05, | |
| "loss": 2.8635, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.7431514759550977e-05, | |
| "loss": 2.8931, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7413036448468613e-05, | |
| "loss": 2.8064, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7394558137386243e-05, | |
| "loss": 2.876, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.7376079826303876e-05, | |
| "loss": 2.8275, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.735760151522151e-05, | |
| "loss": 2.847, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.7339123204139143e-05, | |
| "loss": 2.8854, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.7320644893056776e-05, | |
| "loss": 2.8378, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.730216658197441e-05, | |
| "loss": 2.8629, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.7283688270892042e-05, | |
| "loss": 2.8656, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.7265209959809675e-05, | |
| "loss": 2.8133, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.7246731648727308e-05, | |
| "loss": 2.8184, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.722825333764494e-05, | |
| "loss": 2.8455, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7209775026562575e-05, | |
| "loss": 2.8284, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7191296715480204e-05, | |
| "loss": 2.8016, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.717281840439784e-05, | |
| "loss": 2.8171, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.715434009331547e-05, | |
| "loss": 2.8079, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7135861782233104e-05, | |
| "loss": 2.85, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.711738347115074e-05, | |
| "loss": 2.7897, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.709890516006837e-05, | |
| "loss": 2.8143, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7080426848986006e-05, | |
| "loss": 2.8232, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7061948537903636e-05, | |
| "loss": 2.8294, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.704347022682127e-05, | |
| "loss": 2.8428, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7024991915738902e-05, | |
| "loss": 2.8005, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7006513604656536e-05, | |
| "loss": 2.7868, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.698803529357417e-05, | |
| "loss": 2.8338, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.6969556982491802e-05, | |
| "loss": 2.7864, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.6951078671409435e-05, | |
| "loss": 2.7813, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.6932600360327068e-05, | |
| "loss": 2.8343, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.69141220492447e-05, | |
| "loss": 2.7908, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.6895643738162334e-05, | |
| "loss": 2.8147, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.6877165427079968e-05, | |
| "loss": 2.8035, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6858687115997597e-05, | |
| "loss": 2.784, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6840208804915234e-05, | |
| "loss": 2.7861, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.6821730493832864e-05, | |
| "loss": 2.8088, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.6803252182750497e-05, | |
| "loss": 2.7635, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.6784773871668133e-05, | |
| "loss": 2.7762, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.6766295560585763e-05, | |
| "loss": 2.8148, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.6747817249503396e-05, | |
| "loss": 2.7995, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.672933893842103e-05, | |
| "loss": 2.784, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.6710860627338662e-05, | |
| "loss": 2.7567, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6692382316256296e-05, | |
| "loss": 2.8313, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.667390400517393e-05, | |
| "loss": 2.7554, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6655425694091562e-05, | |
| "loss": 2.7922, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6636947383009195e-05, | |
| "loss": 2.7855, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6618469071926828e-05, | |
| "loss": 2.7576, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.659999076084446e-05, | |
| "loss": 2.767, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6581512449762094e-05, | |
| "loss": 2.7937, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6563034138679724e-05, | |
| "loss": 2.7648, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.654455582759736e-05, | |
| "loss": 2.7458, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.652607751651499e-05, | |
| "loss": 2.7341, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6507599205432623e-05, | |
| "loss": 2.8014, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6489120894350257e-05, | |
| "loss": 2.7614, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.647064258326789e-05, | |
| "loss": 2.7623, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6452164272185526e-05, | |
| "loss": 2.7227, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6433685961103156e-05, | |
| "loss": 2.7616, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.641520765002079e-05, | |
| "loss": 2.7752, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6396729338938422e-05, | |
| "loss": 2.7813, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6378251027856055e-05, | |
| "loss": 2.7342, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.635977271677369e-05, | |
| "loss": 2.7927, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.634129440569132e-05, | |
| "loss": 2.7483, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.632281609460895e-05, | |
| "loss": 2.7256, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6304337783526588e-05, | |
| "loss": 2.7478, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.628585947244422e-05, | |
| "loss": 2.7426, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6267381161361854e-05, | |
| "loss": 2.7317, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6248902850279487e-05, | |
| "loss": 2.7721, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6230424539197117e-05, | |
| "loss": 2.7023, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6211946228114754e-05, | |
| "loss": 2.7384, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6193467917032383e-05, | |
| "loss": 2.7692, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6174989605950017e-05, | |
| "loss": 2.7423, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.615651129486765e-05, | |
| "loss": 2.7832, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6138032983785283e-05, | |
| "loss": 2.7788, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6119554672702916e-05, | |
| "loss": 2.741, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.610107636162055e-05, | |
| "loss": 2.7655, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6082598050538182e-05, | |
| "loss": 2.7453, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.6064119739455815e-05, | |
| "loss": 2.6899, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.604564142837345e-05, | |
| "loss": 2.7753, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.602716311729108e-05, | |
| "loss": 2.7263, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6008684806208715e-05, | |
| "loss": 2.7558, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.5990206495126344e-05, | |
| "loss": 2.666, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.597172818404398e-05, | |
| "loss": 2.6032, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5953249872961614e-05, | |
| "loss": 2.6144, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5934771561879244e-05, | |
| "loss": 2.6419, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.591629325079688e-05, | |
| "loss": 2.5979, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.589781493971451e-05, | |
| "loss": 2.5684, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5879336628632143e-05, | |
| "loss": 2.5772, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5860858317549776e-05, | |
| "loss": 2.5898, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.584238000646741e-05, | |
| "loss": 2.5996, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5823901695385043e-05, | |
| "loss": 2.6248, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5805423384302676e-05, | |
| "loss": 2.5905, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.578694507322031e-05, | |
| "loss": 2.6062, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5768466762137942e-05, | |
| "loss": 2.5785, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5749988451055575e-05, | |
| "loss": 2.5879, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.573151013997321e-05, | |
| "loss": 2.5658, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.571303182889084e-05, | |
| "loss": 2.5939, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.569455351780847e-05, | |
| "loss": 2.6098, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5676075206726108e-05, | |
| "loss": 2.5982, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.5657596895643738e-05, | |
| "loss": 2.5662, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.5639118584561374e-05, | |
| "loss": 2.6017, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.5620640273479007e-05, | |
| "loss": 2.5692, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5602161962396637e-05, | |
| "loss": 2.5769, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.5583683651314273e-05, | |
| "loss": 2.6025, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.5565205340231903e-05, | |
| "loss": 2.5788, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.5546727029149536e-05, | |
| "loss": 2.5586, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.552824871806717e-05, | |
| "loss": 2.5757, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.5509770406984803e-05, | |
| "loss": 2.5424, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.5491292095902436e-05, | |
| "loss": 2.6099, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.547281378482007e-05, | |
| "loss": 2.5691, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.5454335473737702e-05, | |
| "loss": 2.5457, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.5435857162655335e-05, | |
| "loss": 2.6225, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5417378851572968e-05, | |
| "loss": 2.5446, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.53989005404906e-05, | |
| "loss": 2.5423, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.5380422229408235e-05, | |
| "loss": 2.5384, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.5361943918325864e-05, | |
| "loss": 2.5301, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.53434656072435e-05, | |
| "loss": 2.5957, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.532498729616113e-05, | |
| "loss": 2.5652, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.5306508985078764e-05, | |
| "loss": 2.6067, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.52880306739964e-05, | |
| "loss": 2.5363, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.526955236291403e-05, | |
| "loss": 2.5819, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5251074051831665e-05, | |
| "loss": 2.548, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.5232595740749296e-05, | |
| "loss": 2.5577, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5214117429666931e-05, | |
| "loss": 2.5046, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.5195639118584562e-05, | |
| "loss": 2.5603, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5177160807502196e-05, | |
| "loss": 2.5942, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5158682496419827e-05, | |
| "loss": 2.5517, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5140204185337462e-05, | |
| "loss": 2.5474, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.5121725874255095e-05, | |
| "loss": 2.5481, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.5103247563172726e-05, | |
| "loss": 2.6017, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.5084769252090361e-05, | |
| "loss": 2.5765, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.5066290941007993e-05, | |
| "loss": 2.5698, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.5047812629925626e-05, | |
| "loss": 2.5616, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.5029334318843259e-05, | |
| "loss": 2.5596, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.5010856007760892e-05, | |
| "loss": 2.5728, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.4992377696678524e-05, | |
| "loss": 2.5836, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4973899385596158e-05, | |
| "loss": 2.5333, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.4955421074513792e-05, | |
| "loss": 2.6014, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.4936942763431423e-05, | |
| "loss": 2.5499, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.4918464452349058e-05, | |
| "loss": 2.5815, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.489998614126669e-05, | |
| "loss": 2.5454, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4881507830184322e-05, | |
| "loss": 2.5929, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4863029519101956e-05, | |
| "loss": 2.5813, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.4844551208019589e-05, | |
| "loss": 2.5438, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.482607289693722e-05, | |
| "loss": 2.5451, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4807594585854855e-05, | |
| "loss": 2.5791, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.4789116274772486e-05, | |
| "loss": 2.5478, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.477063796369012e-05, | |
| "loss": 2.5639, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.4752159652607754e-05, | |
| "loss": 2.5662, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4733681341525386e-05, | |
| "loss": 2.5652, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.4715203030443019e-05, | |
| "loss": 2.5499, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.469672471936065e-05, | |
| "loss": 2.5116, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.4678246408278285e-05, | |
| "loss": 2.5803, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.4659768097195917e-05, | |
| "loss": 2.5343, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.464128978611355e-05, | |
| "loss": 2.5795, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.4622811475031183e-05, | |
| "loss": 2.5763, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.4604333163948816e-05, | |
| "loss": 2.5799, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.4585854852866451e-05, | |
| "loss": 2.5709, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.4567376541784082e-05, | |
| "loss": 2.5659, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.4548898230701715e-05, | |
| "loss": 2.5412, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.4530419919619347e-05, | |
| "loss": 2.5269, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.4511941608536982e-05, | |
| "loss": 2.5604, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.4493463297454613e-05, | |
| "loss": 2.5842, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.4474984986372246e-05, | |
| "loss": 2.5504, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.445650667528988e-05, | |
| "loss": 2.5107, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.4438028364207513e-05, | |
| "loss": 2.566, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.4419550053125146e-05, | |
| "loss": 2.5209, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.4401071742042779e-05, | |
| "loss": 2.5329, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.4382593430960412e-05, | |
| "loss": 2.5587, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.4364115119878043e-05, | |
| "loss": 2.542, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.4345636808795678e-05, | |
| "loss": 2.5949, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.432715849771331e-05, | |
| "loss": 2.525, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4308680186630943e-05, | |
| "loss": 2.528, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4290201875548574e-05, | |
| "loss": 2.5374, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4271723564466209e-05, | |
| "loss": 2.5526, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4253245253383842e-05, | |
| "loss": 2.5935, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4234766942301475e-05, | |
| "loss": 2.5392, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4216288631219108e-05, | |
| "loss": 2.5333, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.419781032013674e-05, | |
| "loss": 2.5245, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4179332009054375e-05, | |
| "loss": 2.5778, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4160853697972006e-05, | |
| "loss": 2.5194, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.414237538688964e-05, | |
| "loss": 2.5881, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.412389707580727e-05, | |
| "loss": 2.5804, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.4105418764724906e-05, | |
| "loss": 2.508, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.4086940453642539e-05, | |
| "loss": 2.5474, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.406846214256017e-05, | |
| "loss": 2.5116, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.4049983831477805e-05, | |
| "loss": 2.5672, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.4031505520395436e-05, | |
| "loss": 2.537, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.401302720931307e-05, | |
| "loss": 2.5483, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.3994548898230703e-05, | |
| "loss": 2.5212, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.3976070587148336e-05, | |
| "loss": 2.5543, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.3957592276065967e-05, | |
| "loss": 2.5581, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.3939113964983602e-05, | |
| "loss": 2.4957, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.3920635653901235e-05, | |
| "loss": 2.5006, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.3902157342818867e-05, | |
| "loss": 2.5718, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3883679031736502e-05, | |
| "loss": 2.5013, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.3865200720654133e-05, | |
| "loss": 2.5237, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.3846722409571766e-05, | |
| "loss": 2.5634, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.38282440984894e-05, | |
| "loss": 2.5409, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.3809765787407032e-05, | |
| "loss": 2.5229, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.3791287476324664e-05, | |
| "loss": 2.5344, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.3772809165242299e-05, | |
| "loss": 2.5384, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.3754330854159932e-05, | |
| "loss": 2.527, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.3735852543077563e-05, | |
| "loss": 2.57, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.3717374231995198e-05, | |
| "loss": 2.5128, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.369889592091283e-05, | |
| "loss": 2.5576, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.3680417609830463e-05, | |
| "loss": 2.5301, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.3661939298748094e-05, | |
| "loss": 2.5261, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.3643460987665729e-05, | |
| "loss": 2.493, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.362498267658336e-05, | |
| "loss": 2.5373, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.3606504365500993e-05, | |
| "loss": 2.5643, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.3588026054418628e-05, | |
| "loss": 2.5109, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.356954774333626e-05, | |
| "loss": 2.4954, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.3551069432253895e-05, | |
| "loss": 2.463, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.3532591121171526e-05, | |
| "loss": 2.5519, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.3514112810089159e-05, | |
| "loss": 2.5034, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.349563449900679e-05, | |
| "loss": 2.5064, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.3477156187924425e-05, | |
| "loss": 2.5091, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.3458677876842057e-05, | |
| "loss": 2.5075, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.344019956575969e-05, | |
| "loss": 2.5298, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.3421721254677325e-05, | |
| "loss": 2.5058, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.3403242943594956e-05, | |
| "loss": 2.5602, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.338476463251259e-05, | |
| "loss": 2.5529, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3366286321430223e-05, | |
| "loss": 2.4943, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3347808010347856e-05, | |
| "loss": 2.5088, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3329329699265487e-05, | |
| "loss": 2.5097, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.3310851388183122e-05, | |
| "loss": 2.5046, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.3292373077100753e-05, | |
| "loss": 2.5128, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.3273894766018386e-05, | |
| "loss": 2.5115, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.3255416454936021e-05, | |
| "loss": 2.4919, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.3236938143853653e-05, | |
| "loss": 2.5723, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.3218459832771286e-05, | |
| "loss": 2.5312, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.3199981521688919e-05, | |
| "loss": 2.5094, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.3181503210606552e-05, | |
| "loss": 2.543, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.3163024899524184e-05, | |
| "loss": 2.5183, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.3144546588441818e-05, | |
| "loss": 2.5029, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.312606827735945e-05, | |
| "loss": 2.5238, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.3107589966277083e-05, | |
| "loss": 2.5096, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.3089111655194718e-05, | |
| "loss": 2.5104, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.307063334411235e-05, | |
| "loss": 2.5041, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3052155033029982e-05, | |
| "loss": 2.5176, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.3033676721947614e-05, | |
| "loss": 2.486, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.3015198410865249e-05, | |
| "loss": 2.483, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.299672009978288e-05, | |
| "loss": 2.5315, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.2978241788700513e-05, | |
| "loss": 2.5056, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.2959763477618146e-05, | |
| "loss": 2.5305, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.294128516653578e-05, | |
| "loss": 2.5192, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2922806855453414e-05, | |
| "loss": 2.5272, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.2904328544371046e-05, | |
| "loss": 2.4726, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.2885850233288679e-05, | |
| "loss": 2.4869, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.286737192220631e-05, | |
| "loss": 2.5295, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.2848893611123945e-05, | |
| "loss": 2.5419, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.2830415300041577e-05, | |
| "loss": 2.499, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.281193698895921e-05, | |
| "loss": 2.5108, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.2793458677876843e-05, | |
| "loss": 2.5247, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.2774980366794476e-05, | |
| "loss": 2.5338, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.275650205571211e-05, | |
| "loss": 2.4867, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.2738023744629742e-05, | |
| "loss": 2.4729, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.2719545433547375e-05, | |
| "loss": 2.4709, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.2701067122465007e-05, | |
| "loss": 2.5118, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.2682588811382642e-05, | |
| "loss": 2.4901, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.2664110500300273e-05, | |
| "loss": 2.4765, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.2645632189217906e-05, | |
| "loss": 2.502, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.2627153878135538e-05, | |
| "loss": 2.4899, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2608675567053173e-05, | |
| "loss": 2.4818, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2590197255970806e-05, | |
| "loss": 2.4497, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.2571718944888439e-05, | |
| "loss": 2.4892, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.2553240633806072e-05, | |
| "loss": 2.5221, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.2534762322723703e-05, | |
| "loss": 2.4807, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.2516284011641338e-05, | |
| "loss": 2.5089, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.249780570055897e-05, | |
| "loss": 2.5509, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.2479327389476603e-05, | |
| "loss": 2.4828, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.2460849078394234e-05, | |
| "loss": 2.5082, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2442370767311869e-05, | |
| "loss": 2.4925, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2423892456229502e-05, | |
| "loss": 2.5149, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.2405414145147134e-05, | |
| "loss": 2.5107, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.2386935834064768e-05, | |
| "loss": 2.5192, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.23684575229824e-05, | |
| "loss": 2.4998, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.2349979211900033e-05, | |
| "loss": 2.5069, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.2331500900817666e-05, | |
| "loss": 2.5085, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.23130225897353e-05, | |
| "loss": 2.4966, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.229454427865293e-05, | |
| "loss": 2.5121, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.2276065967570566e-05, | |
| "loss": 2.5292, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.2257587656488199e-05, | |
| "loss": 2.4772, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.223910934540583e-05, | |
| "loss": 2.4997, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.2220631034323465e-05, | |
| "loss": 2.4637, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.2202152723241096e-05, | |
| "loss": 2.5074, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.218367441215873e-05, | |
| "loss": 2.5076, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2165196101076363e-05, | |
| "loss": 2.4681, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2146717789993996e-05, | |
| "loss": 2.5092, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.2128239478911627e-05, | |
| "loss": 2.5026, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.2109761167829262e-05, | |
| "loss": 2.4674, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.2091282856746895e-05, | |
| "loss": 2.4886, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.2072804545664527e-05, | |
| "loss": 2.4774, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.2054326234582162e-05, | |
| "loss": 2.4914, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.2035847923499793e-05, | |
| "loss": 2.4717, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.2017369612417426e-05, | |
| "loss": 2.5048, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.1998891301335058e-05, | |
| "loss": 2.4925, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.1980412990252692e-05, | |
| "loss": 2.3719, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.1961934679170324e-05, | |
| "loss": 2.3381, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.1943456368087959e-05, | |
| "loss": 2.3243, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.1924978057005592e-05, | |
| "loss": 2.3464, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.1906499745923223e-05, | |
| "loss": 2.3462, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.1888021434840858e-05, | |
| "loss": 2.3335, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.186954312375849e-05, | |
| "loss": 2.3168, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.1851064812676123e-05, | |
| "loss": 2.3521, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.1832586501593754e-05, | |
| "loss": 2.3315, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.1814108190511389e-05, | |
| "loss": 2.351, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.179562987942902e-05, | |
| "loss": 2.3241, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.1777151568346653e-05, | |
| "loss": 2.3477, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.1758673257264288e-05, | |
| "loss": 2.3683, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.174019494618192e-05, | |
| "loss": 2.3604, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.1721716635099553e-05, | |
| "loss": 2.2992, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.1703238324017186e-05, | |
| "loss": 2.3632, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.1684760012934819e-05, | |
| "loss": 2.3751, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.166628170185245e-05, | |
| "loss": 2.3378, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.1647803390770085e-05, | |
| "loss": 2.2923, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.1629325079687717e-05, | |
| "loss": 2.3584, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.161084676860535e-05, | |
| "loss": 2.3152, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.1592368457522985e-05, | |
| "loss": 2.3255, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.1573890146440616e-05, | |
| "loss": 2.3649, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.155541183535825e-05, | |
| "loss": 2.3453, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.1536933524275883e-05, | |
| "loss": 2.3437, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.1518455213193516e-05, | |
| "loss": 2.3135, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.1499976902111147e-05, | |
| "loss": 2.3548, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.1481498591028782e-05, | |
| "loss": 2.3307, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.1463020279946413e-05, | |
| "loss": 2.3461, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1444541968864047e-05, | |
| "loss": 2.3833, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1426063657781678e-05, | |
| "loss": 2.3491, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.1407585346699313e-05, | |
| "loss": 2.32, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.1389107035616946e-05, | |
| "loss": 2.3171, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1370628724534577e-05, | |
| "loss": 2.335, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1352150413452212e-05, | |
| "loss": 2.3715, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.1333672102369844e-05, | |
| "loss": 2.3689, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.1315193791287478e-05, | |
| "loss": 2.3841, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.129671548020511e-05, | |
| "loss": 2.361, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.1278237169122743e-05, | |
| "loss": 2.3446, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.1259758858040374e-05, | |
| "loss": 2.3397, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.124128054695801e-05, | |
| "loss": 2.3415, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.1222802235875642e-05, | |
| "loss": 2.3393, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.1204323924793274e-05, | |
| "loss": 2.32, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.1185845613710909e-05, | |
| "loss": 2.3423, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.116736730262854e-05, | |
| "loss": 2.3719, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.1148888991546173e-05, | |
| "loss": 2.3623, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.1130410680463806e-05, | |
| "loss": 2.3748, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.111193236938144e-05, | |
| "loss": 2.3552, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.1093454058299071e-05, | |
| "loss": 2.3468, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.1074975747216706e-05, | |
| "loss": 2.3332, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.1056497436134339e-05, | |
| "loss": 2.3224, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.103801912505197e-05, | |
| "loss": 2.3716, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.1019540813969605e-05, | |
| "loss": 2.381, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.1001062502887237e-05, | |
| "loss": 2.3566, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.098258419180487e-05, | |
| "loss": 2.3476, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.0964105880722501e-05, | |
| "loss": 2.3324, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.0945627569640136e-05, | |
| "loss": 2.3914, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.0927149258557768e-05, | |
| "loss": 2.3442, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.0908670947475402e-05, | |
| "loss": 2.3431, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0890192636393035e-05, | |
| "loss": 2.3624, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.0871714325310667e-05, | |
| "loss": 2.3144, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.0853236014228302e-05, | |
| "loss": 2.3672, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.0834757703145933e-05, | |
| "loss": 2.3484, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.0816279392063566e-05, | |
| "loss": 2.3321, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.0797801080981198e-05, | |
| "loss": 2.3791, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.0779322769898833e-05, | |
| "loss": 2.3477, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.0760844458816464e-05, | |
| "loss": 2.3152, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 1.0742366147734097e-05, | |
| "loss": 2.3549, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.0723887836651732e-05, | |
| "loss": 2.3953, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.0705409525569363e-05, | |
| "loss": 2.3436, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.0686931214486998e-05, | |
| "loss": 2.347, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.066845290340463e-05, | |
| "loss": 2.3581, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0649974592322263e-05, | |
| "loss": 2.3695, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.0631496281239894e-05, | |
| "loss": 2.3566, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.0613017970157529e-05, | |
| "loss": 2.3382, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.059453965907516e-05, | |
| "loss": 2.3469, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0576061347992794e-05, | |
| "loss": 2.3554, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0557583036910429e-05, | |
| "loss": 2.3519, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.053910472582806e-05, | |
| "loss": 2.3196, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0520626414745693e-05, | |
| "loss": 2.3329, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0502148103663326e-05, | |
| "loss": 2.3814, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.048366979258096e-05, | |
| "loss": 2.3391, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.046519148149859e-05, | |
| "loss": 2.3537, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0446713170416226e-05, | |
| "loss": 2.3151, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.0428234859333857e-05, | |
| "loss": 2.3309, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.040975654825149e-05, | |
| "loss": 2.3316, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.0391278237169125e-05, | |
| "loss": 2.2941, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.0372799926086756e-05, | |
| "loss": 2.3783, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.035432161500439e-05, | |
| "loss": 2.3473, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.0335843303922021e-05, | |
| "loss": 2.3521, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.0317364992839656e-05, | |
| "loss": 2.334, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.0298886681757287e-05, | |
| "loss": 2.3325, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.0280408370674922e-05, | |
| "loss": 2.3124, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.0261930059592554e-05, | |
| "loss": 2.3612, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.0243451748510187e-05, | |
| "loss": 2.3725, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.0224973437427822e-05, | |
| "loss": 2.3502, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.0206495126345453e-05, | |
| "loss": 2.3483, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.0188016815263086e-05, | |
| "loss": 2.3144, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.0169538504180718e-05, | |
| "loss": 2.3023, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.0151060193098352e-05, | |
| "loss": 2.3198, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.0132581882015984e-05, | |
| "loss": 2.342, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.0114103570933617e-05, | |
| "loss": 2.3899, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.009562525985125e-05, | |
| "loss": 2.3529, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.0077146948768883e-05, | |
| "loss": 2.3683, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.0058668637686518e-05, | |
| "loss": 2.3167, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.004019032660415e-05, | |
| "loss": 2.3245, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.0021712015521783e-05, | |
| "loss": 2.3046, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.0003233704439414e-05, | |
| "loss": 2.3626, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 9.984755393357049e-06, | |
| "loss": 2.3332, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 9.966277082274682e-06, | |
| "loss": 2.3387, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 9.947798771192313e-06, | |
| "loss": 2.3379, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.929320460109947e-06, | |
| "loss": 2.3274, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.91084214902758e-06, | |
| "loss": 2.3702, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.892363837945213e-06, | |
| "loss": 2.3318, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.873885526862846e-06, | |
| "loss": 2.3171, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.855407215780477e-06, | |
| "loss": 2.336, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.83692890469811e-06, | |
| "loss": 2.3627, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.818450593615745e-06, | |
| "loss": 2.2901, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.799972282533377e-06, | |
| "loss": 2.3713, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.78149397145101e-06, | |
| "loss": 2.3345, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.763015660368643e-06, | |
| "loss": 2.3496, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.744537349286276e-06, | |
| "loss": 2.3653, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.72605903820391e-06, | |
| "loss": 2.3193, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.707580727121541e-06, | |
| "loss": 2.3846, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.689102416039174e-06, | |
| "loss": 2.3451, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.670624104956807e-06, | |
| "loss": 2.3694, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.652145793874442e-06, | |
| "loss": 2.373, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.633667482792073e-06, | |
| "loss": 2.3472, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.615189171709707e-06, | |
| "loss": 2.3217, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.59671086062734e-06, | |
| "loss": 2.3084, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.578232549544973e-06, | |
| "loss": 2.3005, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.559754238462606e-06, | |
| "loss": 2.3495, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.541275927380237e-06, | |
| "loss": 2.3317, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.52279761629787e-06, | |
| "loss": 2.3088, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.504319305215504e-06, | |
| "loss": 2.3218, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.485840994133137e-06, | |
| "loss": 2.3223, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.46736268305077e-06, | |
| "loss": 2.352, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 9.448884371968403e-06, | |
| "loss": 2.359, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 9.430406060886036e-06, | |
| "loss": 2.3753, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.41192774980367e-06, | |
| "loss": 2.3196, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.3934494387213e-06, | |
| "loss": 2.3468, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 9.374971127638934e-06, | |
| "loss": 2.3349, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 9.356492816556567e-06, | |
| "loss": 2.3598, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.3380145054742e-06, | |
| "loss": 2.3088, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.319536194391833e-06, | |
| "loss": 2.3386, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.301057883309466e-06, | |
| "loss": 2.3395, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.2825795722271e-06, | |
| "loss": 2.3404, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.264101261144733e-06, | |
| "loss": 2.3428, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.245622950062366e-06, | |
| "loss": 2.3311, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.227144638979997e-06, | |
| "loss": 2.3582, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.20866632789763e-06, | |
| "loss": 2.3103, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.190188016815264e-06, | |
| "loss": 2.3469, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.171709705732897e-06, | |
| "loss": 2.3559, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.15323139465053e-06, | |
| "loss": 2.3377, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.134753083568163e-06, | |
| "loss": 2.3873, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.116274772485796e-06, | |
| "loss": 2.3587, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.09779646140343e-06, | |
| "loss": 2.2913, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.07931815032106e-06, | |
| "loss": 2.3451, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.060839839238694e-06, | |
| "loss": 2.3546, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.042361528156327e-06, | |
| "loss": 2.3418, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.02388321707396e-06, | |
| "loss": 2.3379, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 9.005404905991593e-06, | |
| "loss": 2.3384, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.986926594909226e-06, | |
| "loss": 2.3885, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 8.96844828382686e-06, | |
| "loss": 2.3283, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 8.949969972744493e-06, | |
| "loss": 2.2953, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.931491661662126e-06, | |
| "loss": 2.3083, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.913013350579757e-06, | |
| "loss": 2.3202, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.89453503949739e-06, | |
| "loss": 2.3341, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.876056728415023e-06, | |
| "loss": 2.339, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.857578417332657e-06, | |
| "loss": 2.3165, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.83910010625029e-06, | |
| "loss": 2.363, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.820621795167921e-06, | |
| "loss": 2.3365, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.802143484085556e-06, | |
| "loss": 2.3193, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.783665173003189e-06, | |
| "loss": 2.3467, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.76518686192082e-06, | |
| "loss": 2.307, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.746708550838454e-06, | |
| "loss": 2.346, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.728230239756087e-06, | |
| "loss": 2.3514, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.70975192867372e-06, | |
| "loss": 2.3419, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.691273617591353e-06, | |
| "loss": 2.3398, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.672795306508985e-06, | |
| "loss": 2.3305, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.654316995426618e-06, | |
| "loss": 2.3508, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.635838684344253e-06, | |
| "loss": 2.3378, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.617360373261886e-06, | |
| "loss": 2.3167, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.598882062179517e-06, | |
| "loss": 2.3526, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.58040375109715e-06, | |
| "loss": 2.3406, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.561925440014783e-06, | |
| "loss": 2.3056, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.543447128932416e-06, | |
| "loss": 2.3755, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.52496881785005e-06, | |
| "loss": 2.2981, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.506490506767681e-06, | |
| "loss": 2.3469, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.488012195685314e-06, | |
| "loss": 2.3499, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.469533884602949e-06, | |
| "loss": 2.2999, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.45105557352058e-06, | |
| "loss": 2.3718, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.432577262438214e-06, | |
| "loss": 2.3254, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.414098951355847e-06, | |
| "loss": 2.3583, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.39562064027348e-06, | |
| "loss": 2.3211, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.377142329191113e-06, | |
| "loss": 2.2861, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.358664018108744e-06, | |
| "loss": 2.3488, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.340185707026378e-06, | |
| "loss": 2.3, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.32170739594401e-06, | |
| "loss": 2.3172, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.303229084861646e-06, | |
| "loss": 2.3542, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.284750773779277e-06, | |
| "loss": 2.3373, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 8.26627246269691e-06, | |
| "loss": 2.3382, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.247794151614543e-06, | |
| "loss": 2.3184, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.229315840532176e-06, | |
| "loss": 2.3276, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.21083752944981e-06, | |
| "loss": 2.3114, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.192359218367441e-06, | |
| "loss": 2.2954, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.173880907285074e-06, | |
| "loss": 2.2631, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.155402596202707e-06, | |
| "loss": 2.3295, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.13692428512034e-06, | |
| "loss": 2.3073, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.118445974037974e-06, | |
| "loss": 2.2949, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.099967662955607e-06, | |
| "loss": 2.321, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.08148935187324e-06, | |
| "loss": 2.3496, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8.063011040790873e-06, | |
| "loss": 2.2946, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.044532729708504e-06, | |
| "loss": 2.3092, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.026054418626137e-06, | |
| "loss": 2.3003, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 8.00757610754377e-06, | |
| "loss": 2.3105, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 7.989097796461404e-06, | |
| "loss": 2.255, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.970619485379037e-06, | |
| "loss": 2.2037, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 7.95214117429667e-06, | |
| "loss": 2.1883, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.933662863214303e-06, | |
| "loss": 2.2057, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.915184552131936e-06, | |
| "loss": 2.1503, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 7.89670624104957e-06, | |
| "loss": 2.1714, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 7.878227929967201e-06, | |
| "loss": 2.1951, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.859749618884834e-06, | |
| "loss": 2.238, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.841271307802467e-06, | |
| "loss": 2.1411, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.8227929967201e-06, | |
| "loss": 2.1909, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.804314685637733e-06, | |
| "loss": 2.2007, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.785836374555367e-06, | |
| "loss": 2.1525, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 7.767358063473e-06, | |
| "loss": 2.1784, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 7.748879752390633e-06, | |
| "loss": 2.2297, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 7.730401441308264e-06, | |
| "loss": 2.1924, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 7.711923130225897e-06, | |
| "loss": 2.2292, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.69344481914353e-06, | |
| "loss": 2.1905, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.674966508061164e-06, | |
| "loss": 2.2136, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 7.656488196978797e-06, | |
| "loss": 2.2059, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 7.63800988589643e-06, | |
| "loss": 2.2476, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.619531574814063e-06, | |
| "loss": 2.1917, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.601053263731696e-06, | |
| "loss": 2.2058, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.5825749526493285e-06, | |
| "loss": 2.232, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 7.564096641566962e-06, | |
| "loss": 2.225, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 7.545618330484594e-06, | |
| "loss": 2.1906, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 7.527140019402227e-06, | |
| "loss": 2.1866, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 7.50866170831986e-06, | |
| "loss": 2.2054, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 7.4901833972374925e-06, | |
| "loss": 2.185, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 7.4717050861551265e-06, | |
| "loss": 2.188, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 7.45322677507276e-06, | |
| "loss": 2.1983, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 7.434748463990392e-06, | |
| "loss": 2.2106, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.416270152908025e-06, | |
| "loss": 2.2293, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 7.397791841825658e-06, | |
| "loss": 2.1596, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 7.3793135307432904e-06, | |
| "loss": 2.2007, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 7.3608352196609236e-06, | |
| "loss": 2.1984, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 7.342356908578556e-06, | |
| "loss": 2.2322, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.323878597496189e-06, | |
| "loss": 2.2053, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.305400286413823e-06, | |
| "loss": 2.1956, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 7.286921975331456e-06, | |
| "loss": 2.1991, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 7.268443664249088e-06, | |
| "loss": 2.2338, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 7.2499653531667215e-06, | |
| "loss": 2.2121, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 7.231487042084354e-06, | |
| "loss": 2.1876, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.213008731001987e-06, | |
| "loss": 2.1987, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 7.19453041991962e-06, | |
| "loss": 2.1945, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.176052108837252e-06, | |
| "loss": 2.2409, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 7.1575737977548855e-06, | |
| "loss": 2.2188, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 7.139095486672518e-06, | |
| "loss": 2.2327, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 7.120617175590152e-06, | |
| "loss": 2.2136, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 7.102138864507785e-06, | |
| "loss": 2.1918, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 7.083660553425418e-06, | |
| "loss": 2.1975, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 7.06518224234305e-06, | |
| "loss": 2.244, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.0467039312606835e-06, | |
| "loss": 2.2033, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.028225620178316e-06, | |
| "loss": 2.225, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 7.009747309095949e-06, | |
| "loss": 2.1814, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 6.991268998013582e-06, | |
| "loss": 2.2024, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 6.972790686931214e-06, | |
| "loss": 2.1833, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 6.954312375848848e-06, | |
| "loss": 2.1867, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 6.9358340647664814e-06, | |
| "loss": 2.2124, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 6.917355753684114e-06, | |
| "loss": 2.2717, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.898877442601747e-06, | |
| "loss": 2.1652, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.88039913151938e-06, | |
| "loss": 2.1995, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 6.861920820437012e-06, | |
| "loss": 2.1881, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 6.843442509354645e-06, | |
| "loss": 2.1777, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 6.824964198272278e-06, | |
| "loss": 2.1837, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.806485887189911e-06, | |
| "loss": 2.21, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.788007576107545e-06, | |
| "loss": 2.2085, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 6.769529265025178e-06, | |
| "loss": 2.2209, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 6.75105095394281e-06, | |
| "loss": 2.222, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 6.732572642860443e-06, | |
| "loss": 2.19, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 6.714094331778076e-06, | |
| "loss": 2.186, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 6.695616020695709e-06, | |
| "loss": 2.1956, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 6.677137709613342e-06, | |
| "loss": 2.1664, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.658659398530974e-06, | |
| "loss": 2.2238, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.640181087448607e-06, | |
| "loss": 2.187, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 6.621702776366241e-06, | |
| "loss": 2.199, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 6.603224465283874e-06, | |
| "loss": 2.1711, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 6.584746154201507e-06, | |
| "loss": 2.1805, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.56626784311914e-06, | |
| "loss": 2.1644, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.547789532036772e-06, | |
| "loss": 2.2155, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 6.529311220954405e-06, | |
| "loss": 2.182, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 6.510832909872038e-06, | |
| "loss": 2.2197, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.492354598789671e-06, | |
| "loss": 2.2247, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 6.473876287707304e-06, | |
| "loss": 2.221, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 6.455397976624938e-06, | |
| "loss": 2.1956, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 6.43691966554257e-06, | |
| "loss": 2.1806, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.418441354460203e-06, | |
| "loss": 2.2042, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.3999630433778356e-06, | |
| "loss": 2.2405, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 6.381484732295469e-06, | |
| "loss": 2.1843, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 6.363006421213102e-06, | |
| "loss": 2.1787, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 6.344528110130734e-06, | |
| "loss": 2.203, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.326049799048367e-06, | |
| "loss": 2.1964, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.3075714879659996e-06, | |
| "loss": 2.265, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 6.2890931768836335e-06, | |
| "loss": 2.181, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 6.270614865801267e-06, | |
| "loss": 2.1857, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.2521365547189e-06, | |
| "loss": 2.2064, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.233658243636532e-06, | |
| "loss": 2.2009, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 6.215179932554165e-06, | |
| "loss": 2.2178, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 6.1967016214717975e-06, | |
| "loss": 2.2053, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.178223310389431e-06, | |
| "loss": 2.1864, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.159744999307064e-06, | |
| "loss": 2.1924, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 6.141266688224696e-06, | |
| "loss": 2.1651, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.12278837714233e-06, | |
| "loss": 2.1987, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 6.104310066059963e-06, | |
| "loss": 2.2063, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.0858317549775955e-06, | |
| "loss": 2.1893, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.067353443895229e-06, | |
| "loss": 2.2306, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.048875132812862e-06, | |
| "loss": 2.2166, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 6.030396821730494e-06, | |
| "loss": 2.2063, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 6.011918510648127e-06, | |
| "loss": 2.2203, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 5.9934401995657595e-06, | |
| "loss": 2.1949, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 5.974961888483393e-06, | |
| "loss": 2.2099, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 5.9564835774010266e-06, | |
| "loss": 2.2115, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.93800526631866e-06, | |
| "loss": 2.1991, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.919526955236292e-06, | |
| "loss": 2.1776, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 5.901048644153925e-06, | |
| "loss": 2.1924, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 5.882570333071557e-06, | |
| "loss": 2.1953, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 5.8640920219891906e-06, | |
| "loss": 2.1996, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 5.845613710906824e-06, | |
| "loss": 2.1893, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 5.827135399824456e-06, | |
| "loss": 2.1997, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 5.808657088742089e-06, | |
| "loss": 2.2427, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 5.790178777659723e-06, | |
| "loss": 2.1822, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.771700466577355e-06, | |
| "loss": 2.1619, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 5.7532221554949885e-06, | |
| "loss": 2.1946, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 5.734743844412622e-06, | |
| "loss": 2.1588, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 5.716265533330254e-06, | |
| "loss": 2.1697, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.697787222247887e-06, | |
| "loss": 2.193, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.679308911165519e-06, | |
| "loss": 2.1904, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.6608306000831525e-06, | |
| "loss": 2.1701, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.642352289000786e-06, | |
| "loss": 2.2026, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.62387397791842e-06, | |
| "loss": 2.1952, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.605395666836052e-06, | |
| "loss": 2.2025, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.586917355753685e-06, | |
| "loss": 2.1712, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.568439044671317e-06, | |
| "loss": 2.2015, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.5499607335889505e-06, | |
| "loss": 2.1984, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 5.531482422506584e-06, | |
| "loss": 2.1864, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 5.513004111424216e-06, | |
| "loss": 2.2112, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 5.494525800341849e-06, | |
| "loss": 2.2069, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 5.476047489259481e-06, | |
| "loss": 2.173, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.4575691781771144e-06, | |
| "loss": 2.1721, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.439090867094748e-06, | |
| "loss": 2.1939, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.4206125560123816e-06, | |
| "loss": 2.2115, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 5.402134244930014e-06, | |
| "loss": 2.193, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 5.383655933847647e-06, | |
| "loss": 2.2099, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 5.365177622765279e-06, | |
| "loss": 2.1907, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 5.346699311682912e-06, | |
| "loss": 2.1877, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 5.3282210006005455e-06, | |
| "loss": 2.2141, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 5.309742689518178e-06, | |
| "loss": 2.1696, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 5.291264378435811e-06, | |
| "loss": 2.2277, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 5.272786067353445e-06, | |
| "loss": 2.1478, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 5.254307756271077e-06, | |
| "loss": 2.1894, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 5.23582944518871e-06, | |
| "loss": 2.1993, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.2173511341063435e-06, | |
| "loss": 2.2006, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.198872823023976e-06, | |
| "loss": 2.1948, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.180394511941609e-06, | |
| "loss": 2.2191, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.161916200859241e-06, | |
| "loss": 2.1789, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.143437889776874e-06, | |
| "loss": 2.2049, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.1249595786945075e-06, | |
| "loss": 2.1944, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.1064812676121415e-06, | |
| "loss": 2.1941, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 5.088002956529774e-06, | |
| "loss": 2.2341, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 5.069524645447407e-06, | |
| "loss": 2.2443, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 5.051046334365039e-06, | |
| "loss": 2.2083, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 5.032568023282672e-06, | |
| "loss": 2.1984, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 5.0140897122003054e-06, | |
| "loss": 2.1847, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 4.995611401117938e-06, | |
| "loss": 2.1926, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.977133090035572e-06, | |
| "loss": 2.2079, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.958654778953204e-06, | |
| "loss": 2.1816, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.940176467870837e-06, | |
| "loss": 2.2052, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.921698156788469e-06, | |
| "loss": 2.2441, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 4.903219845706103e-06, | |
| "loss": 2.222, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 4.884741534623736e-06, | |
| "loss": 2.1899, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 4.866263223541369e-06, | |
| "loss": 2.1785, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 4.847784912459001e-06, | |
| "loss": 2.2005, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 4.829306601376634e-06, | |
| "loss": 2.2005, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 4.810828290294267e-06, | |
| "loss": 2.1972, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 4.7923499792119005e-06, | |
| "loss": 2.2097, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.773871668129534e-06, | |
| "loss": 2.1687, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.755393357047166e-06, | |
| "loss": 2.1804, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 4.736915045964799e-06, | |
| "loss": 2.1776, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 4.718436734882432e-06, | |
| "loss": 2.1556, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.699958423800065e-06, | |
| "loss": 2.1813, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.681480112717698e-06, | |
| "loss": 2.2163, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 4.663001801635331e-06, | |
| "loss": 2.2049, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 4.644523490552964e-06, | |
| "loss": 2.2087, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 4.626045179470597e-06, | |
| "loss": 2.2161, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.607566868388229e-06, | |
| "loss": 2.2129, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.5890885573058625e-06, | |
| "loss": 2.163, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 4.570610246223496e-06, | |
| "loss": 2.2022, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 4.552131935141129e-06, | |
| "loss": 2.1957, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 4.533653624058761e-06, | |
| "loss": 2.2194, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 4.515175312976394e-06, | |
| "loss": 2.1974, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.496697001894027e-06, | |
| "loss": 2.1751, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.47821869081166e-06, | |
| "loss": 2.1881, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.4597403797292936e-06, | |
| "loss": 2.199, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.441262068646926e-06, | |
| "loss": 2.2067, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.422783757564559e-06, | |
| "loss": 2.2033, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 4.404305446482192e-06, | |
| "loss": 2.2031, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 4.385827135399825e-06, | |
| "loss": 2.2071, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.3673488243174575e-06, | |
| "loss": 2.2122, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.348870513235091e-06, | |
| "loss": 2.1615, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 4.330392202152723e-06, | |
| "loss": 2.2002, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 4.311913891070357e-06, | |
| "loss": 2.2329, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 4.293435579987989e-06, | |
| "loss": 2.2446, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 4.274957268905622e-06, | |
| "loss": 2.1897, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 4.2564789578232555e-06, | |
| "loss": 2.21, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 4.238000646740888e-06, | |
| "loss": 2.1655, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.219522335658521e-06, | |
| "loss": 2.1477, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.201044024576154e-06, | |
| "loss": 2.1925, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 4.182565713493787e-06, | |
| "loss": 2.1769, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 4.1640874024114195e-06, | |
| "loss": 2.1768, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 4.1456090913290535e-06, | |
| "loss": 2.2139, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 4.127130780246686e-06, | |
| "loss": 2.1799, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 4.108652469164319e-06, | |
| "loss": 2.2272, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 4.090174158081951e-06, | |
| "loss": 2.1958, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 4.071695846999584e-06, | |
| "loss": 2.1654, | |
| "step": 431000 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.0532175359172174e-06, | |
| "loss": 2.1946, | |
| "step": 431500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 4.0347392248348506e-06, | |
| "loss": 2.1779, | |
| "step": 432000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 4.016260913752483e-06, | |
| "loss": 2.1864, | |
| "step": 432500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3.997782602670116e-06, | |
| "loss": 2.184, | |
| "step": 433000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.979304291587749e-06, | |
| "loss": 2.0759, | |
| "step": 433500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.960825980505382e-06, | |
| "loss": 2.095, | |
| "step": 434000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.942347669423015e-06, | |
| "loss": 2.1112, | |
| "step": 434500 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.923869358340648e-06, | |
| "loss": 2.1137, | |
| "step": 435000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.905391047258281e-06, | |
| "loss": 2.1167, | |
| "step": 435500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.886912736175914e-06, | |
| "loss": 2.0993, | |
| "step": 436000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.868434425093547e-06, | |
| "loss": 2.0847, | |
| "step": 436500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.849956114011179e-06, | |
| "loss": 2.0951, | |
| "step": 437000 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.8314778029288125e-06, | |
| "loss": 2.103, | |
| "step": 437500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.8129994918464457e-06, | |
| "loss": 2.1186, | |
| "step": 438000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.7945211807640788e-06, | |
| "loss": 2.1105, | |
| "step": 438500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.7760428696817115e-06, | |
| "loss": 2.1177, | |
| "step": 439000 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.757564558599344e-06, | |
| "loss": 2.1132, | |
| "step": 439500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.739086247516977e-06, | |
| "loss": 2.1012, | |
| "step": 440000 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.7206079364346105e-06, | |
| "loss": 2.1226, | |
| "step": 440500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.702129625352243e-06, | |
| "loss": 2.0862, | |
| "step": 441000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.683651314269876e-06, | |
| "loss": 2.1468, | |
| "step": 441500 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.6651730031875086e-06, | |
| "loss": 2.0925, | |
| "step": 442000 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.646694692105142e-06, | |
| "loss": 2.1068, | |
| "step": 442500 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.628216381022775e-06, | |
| "loss": 2.1077, | |
| "step": 443000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.6097380699404076e-06, | |
| "loss": 2.1243, | |
| "step": 443500 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.5912597588580407e-06, | |
| "loss": 2.1182, | |
| "step": 444000 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.5727814477756734e-06, | |
| "loss": 2.1215, | |
| "step": 444500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.5543031366933066e-06, | |
| "loss": 2.0958, | |
| "step": 445000 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.5358248256109397e-06, | |
| "loss": 2.1041, | |
| "step": 445500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.5173465145285724e-06, | |
| "loss": 2.0748, | |
| "step": 446000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.498868203446205e-06, | |
| "loss": 2.0836, | |
| "step": 446500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4803898923638387e-06, | |
| "loss": 2.0877, | |
| "step": 447000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4619115812814714e-06, | |
| "loss": 2.1083, | |
| "step": 447500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.443433270199104e-06, | |
| "loss": 2.0786, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.424954959116737e-06, | |
| "loss": 2.1239, | |
| "step": 448500 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.4064766480343695e-06, | |
| "loss": 2.0661, | |
| "step": 449000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.387998336952003e-06, | |
| "loss": 2.1031, | |
| "step": 449500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.369520025869636e-06, | |
| "loss": 2.1024, | |
| "step": 450000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.3510417147872685e-06, | |
| "loss": 2.0847, | |
| "step": 450500 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.3325634037049017e-06, | |
| "loss": 2.1001, | |
| "step": 451000 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.3140850926225344e-06, | |
| "loss": 2.0859, | |
| "step": 451500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.2956067815401675e-06, | |
| "loss": 2.1188, | |
| "step": 452000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.2771284704578006e-06, | |
| "loss": 2.0986, | |
| "step": 452500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.2586501593754333e-06, | |
| "loss": 2.0847, | |
| "step": 453000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.240171848293066e-06, | |
| "loss": 2.0566, | |
| "step": 453500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.2216935372106996e-06, | |
| "loss": 2.0828, | |
| "step": 454000 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.2032152261283323e-06, | |
| "loss": 2.0967, | |
| "step": 454500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.184736915045965e-06, | |
| "loss": 2.1058, | |
| "step": 455000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.1662586039635977e-06, | |
| "loss": 2.1124, | |
| "step": 455500 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.1477802928812305e-06, | |
| "loss": 2.1028, | |
| "step": 456000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.129301981798864e-06, | |
| "loss": 2.1093, | |
| "step": 456500 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 3.1108236707164967e-06, | |
| "loss": 2.1053, | |
| "step": 457000 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.0923453596341294e-06, | |
| "loss": 2.1094, | |
| "step": 457500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.0738670485517626e-06, | |
| "loss": 2.0515, | |
| "step": 458000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.0553887374693957e-06, | |
| "loss": 2.1127, | |
| "step": 458500 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 3.0369104263870284e-06, | |
| "loss": 2.0786, | |
| "step": 459000 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 3.0184321153046616e-06, | |
| "loss": 2.0657, | |
| "step": 459500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.9999538042222943e-06, | |
| "loss": 2.1374, | |
| "step": 460000 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.981475493139927e-06, | |
| "loss": 2.0647, | |
| "step": 460500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9629971820575605e-06, | |
| "loss": 2.0758, | |
| "step": 461000 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9445188709751932e-06, | |
| "loss": 2.0968, | |
| "step": 461500 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.926040559892826e-06, | |
| "loss": 2.1015, | |
| "step": 462000 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.9075622488104587e-06, | |
| "loss": 2.0782, | |
| "step": 462500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8890839377280922e-06, | |
| "loss": 2.1018, | |
| "step": 463000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.870605626645725e-06, | |
| "loss": 2.107, | |
| "step": 463500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.8521273155633577e-06, | |
| "loss": 2.0778, | |
| "step": 464000 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.8336490044809904e-06, | |
| "loss": 2.0809, | |
| "step": 464500 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.8151706933986235e-06, | |
| "loss": 2.0724, | |
| "step": 465000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.7966923823162566e-06, | |
| "loss": 2.098, | |
| "step": 465500 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.7782140712338893e-06, | |
| "loss": 2.1082, | |
| "step": 466000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.7597357601515225e-06, | |
| "loss": 2.0722, | |
| "step": 466500 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.741257449069155e-06, | |
| "loss": 2.0614, | |
| "step": 467000 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.7227791379867883e-06, | |
| "loss": 2.1019, | |
| "step": 467500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.7043008269044215e-06, | |
| "loss": 2.0851, | |
| "step": 468000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.685822515822054e-06, | |
| "loss": 2.074, | |
| "step": 468500 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.667344204739687e-06, | |
| "loss": 2.0852, | |
| "step": 469000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.6488658936573196e-06, | |
| "loss": 2.1329, | |
| "step": 469500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.630387582574953e-06, | |
| "loss": 2.101, | |
| "step": 470000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.611909271492586e-06, | |
| "loss": 2.0745, | |
| "step": 470500 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.5934309604102186e-06, | |
| "loss": 2.1295, | |
| "step": 471000 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.5749526493278513e-06, | |
| "loss": 2.0645, | |
| "step": 471500 | |
| } | |
| ], | |
| "max_steps": 541175, | |
| "num_train_epochs": 5, | |
| "total_flos": 3.055396004248055e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |