| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 162500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.984615384615385e-05, | |
| "loss": 3.5497, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.969230769230769e-05, | |
| "loss": 3.4573, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.953846153846154e-05, | |
| "loss": 3.3973, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9384615384615384e-05, | |
| "loss": 3.3772, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.923076923076924e-05, | |
| "loss": 3.3331, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.907692307692308e-05, | |
| "loss": 3.3312, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.892307692307693e-05, | |
| "loss": 3.3091, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.876923076923077e-05, | |
| "loss": 3.3126, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.861538461538462e-05, | |
| "loss": 3.2995, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.846153846153846e-05, | |
| "loss": 3.274, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.830769230769231e-05, | |
| "loss": 3.2612, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.815384615384615e-05, | |
| "loss": 3.2786, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.8e-05, | |
| "loss": 3.2465, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.784615384615384e-05, | |
| "loss": 3.265, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.76923076923077e-05, | |
| "loss": 3.2207, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.753846153846154e-05, | |
| "loss": 3.2264, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.738461538461539e-05, | |
| "loss": 3.227, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.723076923076923e-05, | |
| "loss": 3.2152, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.707692307692308e-05, | |
| "loss": 3.2229, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.692307692307693e-05, | |
| "loss": 3.2352, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.676923076923077e-05, | |
| "loss": 3.2247, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.661538461538462e-05, | |
| "loss": 3.2262, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.646153846153846e-05, | |
| "loss": 3.2073, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.630769230769231e-05, | |
| "loss": 3.2027, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.615384615384616e-05, | |
| "loss": 3.1998, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 3.1736, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.584615384615385e-05, | |
| "loss": 3.1947, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.56923076923077e-05, | |
| "loss": 3.186, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.553846153846154e-05, | |
| "loss": 3.1766, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.538461538461539e-05, | |
| "loss": 3.1851, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.523076923076923e-05, | |
| "loss": 3.1989, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.507692307692308e-05, | |
| "loss": 3.2022, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.492307692307692e-05, | |
| "loss": 3.0511, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.476923076923077e-05, | |
| "loss": 2.9082, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.461538461538462e-05, | |
| "loss": 2.9545, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.4461538461538466e-05, | |
| "loss": 2.9195, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.430769230769231e-05, | |
| "loss": 2.9344, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.415384615384616e-05, | |
| "loss": 2.9238, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 2.9501, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.384615384615385e-05, | |
| "loss": 2.943, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.3692307692307696e-05, | |
| "loss": 2.9397, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.353846153846154e-05, | |
| "loss": 2.9391, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.338461538461539e-05, | |
| "loss": 2.9594, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.323076923076923e-05, | |
| "loss": 2.9497, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.3076923076923084e-05, | |
| "loss": 2.9476, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.2923076923076926e-05, | |
| "loss": 2.9507, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.2769230769230775e-05, | |
| "loss": 2.9537, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.2615384615384617e-05, | |
| "loss": 2.9657, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.2461538461538465e-05, | |
| "loss": 2.9845, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.230769230769231e-05, | |
| "loss": 2.9733, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.2153846153846156e-05, | |
| "loss": 2.9281, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.2e-05, | |
| "loss": 2.9585, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.1846153846153846e-05, | |
| "loss": 2.9716, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.169230769230769e-05, | |
| "loss": 2.9583, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.1538461538461544e-05, | |
| "loss": 2.964, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.1384615384615386e-05, | |
| "loss": 2.9686, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.1230769230769234e-05, | |
| "loss": 2.9525, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.1076923076923076e-05, | |
| "loss": 2.9803, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.0923076923076925e-05, | |
| "loss": 2.9545, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.0769230769230773e-05, | |
| "loss": 2.9797, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.0615384615384615e-05, | |
| "loss": 2.9701, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.0461538461538464e-05, | |
| "loss": 2.939, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.0307692307692306e-05, | |
| "loss": 2.9627, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.0153846153846155e-05, | |
| "loss": 2.9526, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4e-05, | |
| "loss": 2.977, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.984615384615385e-05, | |
| "loss": 2.7112, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.9692307692307694e-05, | |
| "loss": 2.694, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.953846153846154e-05, | |
| "loss": 2.6971, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 3.9384615384615384e-05, | |
| "loss": 2.7095, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.923076923076923e-05, | |
| "loss": 2.7013, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.9076923076923075e-05, | |
| "loss": 2.7182, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.8923076923076924e-05, | |
| "loss": 2.7253, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.8769230769230766e-05, | |
| "loss": 2.7323, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 3.861538461538462e-05, | |
| "loss": 2.7529, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 3.846153846153846e-05, | |
| "loss": 2.7394, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 3.830769230769231e-05, | |
| "loss": 2.7744, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 3.8153846153846153e-05, | |
| "loss": 2.745, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 3.8e-05, | |
| "loss": 2.7412, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.784615384615385e-05, | |
| "loss": 2.7509, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.769230769230769e-05, | |
| "loss": 2.7477, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.753846153846154e-05, | |
| "loss": 2.7719, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 3.738461538461538e-05, | |
| "loss": 2.762, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.723076923076923e-05, | |
| "loss": 2.7684, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.707692307692308e-05, | |
| "loss": 2.7693, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.692307692307693e-05, | |
| "loss": 2.7502, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.676923076923077e-05, | |
| "loss": 2.7662, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.661538461538462e-05, | |
| "loss": 2.7844, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.646153846153846e-05, | |
| "loss": 2.7539, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.630769230769231e-05, | |
| "loss": 2.7934, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.615384615384615e-05, | |
| "loss": 2.7851, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.6e-05, | |
| "loss": 2.7789, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.584615384615384e-05, | |
| "loss": 2.7653, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.569230769230769e-05, | |
| "loss": 2.7917, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.553846153846154e-05, | |
| "loss": 2.7721, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.538461538461539e-05, | |
| "loss": 2.7728, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.523076923076923e-05, | |
| "loss": 2.7738, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.507692307692308e-05, | |
| "loss": 2.8052, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.492307692307693e-05, | |
| "loss": 2.6651, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.476923076923077e-05, | |
| "loss": 2.5394, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.461538461538462e-05, | |
| "loss": 2.5482, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.446153846153846e-05, | |
| "loss": 2.5339, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.430769230769231e-05, | |
| "loss": 2.519, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.415384615384615e-05, | |
| "loss": 2.5416, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 2.5611, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.384615384615385e-05, | |
| "loss": 2.5651, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.36923076923077e-05, | |
| "loss": 2.5802, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.353846153846154e-05, | |
| "loss": 2.5526, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.338461538461539e-05, | |
| "loss": 2.5521, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.323076923076923e-05, | |
| "loss": 2.5678, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.307692307692308e-05, | |
| "loss": 2.5762, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.292307692307692e-05, | |
| "loss": 2.5937, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.276923076923077e-05, | |
| "loss": 2.5692, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.261538461538462e-05, | |
| "loss": 2.5932, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.2461538461538466e-05, | |
| "loss": 2.5469, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.230769230769231e-05, | |
| "loss": 2.6019, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.215384615384616e-05, | |
| "loss": 2.5929, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 2.6075, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.184615384615385e-05, | |
| "loss": 2.6013, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.1692307692307696e-05, | |
| "loss": 2.6138, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.153846153846154e-05, | |
| "loss": 2.6259, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.1384615384615386e-05, | |
| "loss": 2.5918, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.123076923076923e-05, | |
| "loss": 2.5902, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.107692307692308e-05, | |
| "loss": 2.5902, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.0923076923076926e-05, | |
| "loss": 2.6132, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.0769230769230774e-05, | |
| "loss": 2.6187, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.0615384615384616e-05, | |
| "loss": 2.5985, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.0461538461538465e-05, | |
| "loss": 2.6227, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.030769230769231e-05, | |
| "loss": 2.6189, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.0153846153846155e-05, | |
| "loss": 2.607, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3e-05, | |
| "loss": 2.6284, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 2.9846153846153846e-05, | |
| "loss": 2.3572, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 2.969230769230769e-05, | |
| "loss": 2.3599, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 2.9538461538461543e-05, | |
| "loss": 2.3719, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 2.938461538461539e-05, | |
| "loss": 2.3775, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 2.9230769230769234e-05, | |
| "loss": 2.3772, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 2.907692307692308e-05, | |
| "loss": 2.4074, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.8923076923076925e-05, | |
| "loss": 2.4128, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.876923076923077e-05, | |
| "loss": 2.3943, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.8615384615384615e-05, | |
| "loss": 2.3941, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.846153846153846e-05, | |
| "loss": 2.4156, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.8307692307692306e-05, | |
| "loss": 2.4033, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.8153846153846154e-05, | |
| "loss": 2.4138, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 2.4045, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.7846153846153848e-05, | |
| "loss": 2.403, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.7692307692307694e-05, | |
| "loss": 2.4146, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.7538461538461542e-05, | |
| "loss": 2.4194, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.7384615384615387e-05, | |
| "loss": 2.421, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.7230769230769233e-05, | |
| "loss": 2.4465, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.7076923076923078e-05, | |
| "loss": 2.4323, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.6923076923076923e-05, | |
| "loss": 2.4401, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.676923076923077e-05, | |
| "loss": 2.4214, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.6615384615384614e-05, | |
| "loss": 2.4235, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.6461538461538466e-05, | |
| "loss": 2.4677, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.630769230769231e-05, | |
| "loss": 2.4496, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6153846153846157e-05, | |
| "loss": 2.4291, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 2.4532, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.5846153846153847e-05, | |
| "loss": 2.4601, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.5692307692307692e-05, | |
| "loss": 2.4492, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.5538461538461538e-05, | |
| "loss": 2.4744, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.5384615384615383e-05, | |
| "loss": 2.461, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.523076923076923e-05, | |
| "loss": 2.4562, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.5076923076923077e-05, | |
| "loss": 2.4677, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.4923076923076926e-05, | |
| "loss": 2.3597, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.476923076923077e-05, | |
| "loss": 2.21, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.461538461538462e-05, | |
| "loss": 2.2361, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4461538461538465e-05, | |
| "loss": 2.2432, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.430769230769231e-05, | |
| "loss": 2.2396, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.4153846153846155e-05, | |
| "loss": 2.243, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.4e-05, | |
| "loss": 2.2377, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.384615384615385e-05, | |
| "loss": 2.2259, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.3692307692307695e-05, | |
| "loss": 2.2558, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.353846153846154e-05, | |
| "loss": 2.2454, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.3384615384615385e-05, | |
| "loss": 2.276, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.323076923076923e-05, | |
| "loss": 2.2689, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.307692307692308e-05, | |
| "loss": 2.273, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2923076923076924e-05, | |
| "loss": 2.267, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.276923076923077e-05, | |
| "loss": 2.2673, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.2615384615384615e-05, | |
| "loss": 2.2708, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.246153846153846e-05, | |
| "loss": 2.2909, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.230769230769231e-05, | |
| "loss": 2.2623, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.2153846153846154e-05, | |
| "loss": 2.3009, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 2.2974, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 2.1846153846153848e-05, | |
| "loss": 2.2947, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.1692307692307693e-05, | |
| "loss": 2.2825, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 2.1538461538461542e-05, | |
| "loss": 2.2875, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 2.1384615384615387e-05, | |
| "loss": 2.3044, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.1230769230769233e-05, | |
| "loss": 2.3037, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.1076923076923078e-05, | |
| "loss": 2.3085, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 2.0923076923076923e-05, | |
| "loss": 2.3279, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.0769230769230772e-05, | |
| "loss": 2.3185, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 2.0615384615384617e-05, | |
| "loss": 2.31, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 2.0461538461538462e-05, | |
| "loss": 2.3214, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 2.0307692307692308e-05, | |
| "loss": 2.301, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 2.0153846153846153e-05, | |
| "loss": 2.3112, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 2e-05, | |
| "loss": 2.3008, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.9846153846153847e-05, | |
| "loss": 2.0927, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9692307692307692e-05, | |
| "loss": 2.0993, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.9538461538461537e-05, | |
| "loss": 2.096, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.9384615384615383e-05, | |
| "loss": 2.1266, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.923076923076923e-05, | |
| "loss": 2.1288, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.9076923076923077e-05, | |
| "loss": 2.1283, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.8923076923076925e-05, | |
| "loss": 2.1149, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.876923076923077e-05, | |
| "loss": 2.1298, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.8615384615384616e-05, | |
| "loss": 2.1446, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.8461538461538465e-05, | |
| "loss": 2.1571, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.830769230769231e-05, | |
| "loss": 2.1238, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.8153846153846155e-05, | |
| "loss": 2.1477, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.8e-05, | |
| "loss": 2.1577, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.7846153846153846e-05, | |
| "loss": 2.1405, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.7692307692307694e-05, | |
| "loss": 2.1526, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.753846153846154e-05, | |
| "loss": 2.1475, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.7384615384615385e-05, | |
| "loss": 2.1501, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.723076923076923e-05, | |
| "loss": 2.1552, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.7076923076923076e-05, | |
| "loss": 2.1491, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.6923076923076924e-05, | |
| "loss": 2.1729, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.676923076923077e-05, | |
| "loss": 2.1683, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 1.6615384615384615e-05, | |
| "loss": 2.1438, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.646153846153846e-05, | |
| "loss": 2.1581, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.630769230769231e-05, | |
| "loss": 2.1641, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.6153846153846154e-05, | |
| "loss": 2.1598, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 2.158, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.5846153846153848e-05, | |
| "loss": 2.1557, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.5692307692307693e-05, | |
| "loss": 2.1795, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.553846153846154e-05, | |
| "loss": 2.1682, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.5384615384615387e-05, | |
| "loss": 2.17, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.5230769230769232e-05, | |
| "loss": 2.1608, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.5076923076923078e-05, | |
| "loss": 2.1838, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4923076923076923e-05, | |
| "loss": 2.0964, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.4769230769230772e-05, | |
| "loss": 1.9989, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.4615384615384617e-05, | |
| "loss": 1.9878, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.4461538461538462e-05, | |
| "loss": 1.9903, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.4307692307692308e-05, | |
| "loss": 2.0201, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.4153846153846153e-05, | |
| "loss": 1.9994, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 2.0244, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.3846153846153847e-05, | |
| "loss": 2.0113, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.3692307692307694e-05, | |
| "loss": 2.0214, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.3538461538461539e-05, | |
| "loss": 2.0313, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.3384615384615384e-05, | |
| "loss": 2.0371, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.3230769230769233e-05, | |
| "loss": 2.0231, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.3076923076923078e-05, | |
| "loss": 2.0159, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.2923076923076924e-05, | |
| "loss": 2.0376, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.2769230769230769e-05, | |
| "loss": 2.0012, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 1.2615384615384616e-05, | |
| "loss": 2.0265, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.2461538461538463e-05, | |
| "loss": 2.0141, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.230769230769231e-05, | |
| "loss": 2.0411, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.2153846153846155e-05, | |
| "loss": 2.0424, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.2e-05, | |
| "loss": 2.036, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.1846153846153847e-05, | |
| "loss": 2.0416, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 1.1692307692307693e-05, | |
| "loss": 2.058, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 1.153846153846154e-05, | |
| "loss": 2.0258, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.1384615384615385e-05, | |
| "loss": 2.0486, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.123076923076923e-05, | |
| "loss": 2.0433, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.1076923076923077e-05, | |
| "loss": 2.0413, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 1.0923076923076924e-05, | |
| "loss": 2.0454, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0769230769230771e-05, | |
| "loss": 2.0756, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.0615384615384616e-05, | |
| "loss": 2.055, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.0461538461538462e-05, | |
| "loss": 2.0466, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.0307692307692309e-05, | |
| "loss": 2.0635, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.0153846153846154e-05, | |
| "loss": 2.0534, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1e-05, | |
| "loss": 2.0431, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 9.846153846153846e-06, | |
| "loss": 1.9157, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.692307692307691e-06, | |
| "loss": 1.9128, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.538461538461538e-06, | |
| "loss": 1.9017, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 9.384615384615385e-06, | |
| "loss": 1.9096, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 1.9143, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.076923076923078e-06, | |
| "loss": 1.9189, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 8.923076923076923e-06, | |
| "loss": 1.9204, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.76923076923077e-06, | |
| "loss": 1.9375, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.615384615384615e-06, | |
| "loss": 1.9227, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 1.9247, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.307692307692307e-06, | |
| "loss": 1.9315, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 8.153846153846154e-06, | |
| "loss": 1.9205, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.9284, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 7.846153846153847e-06, | |
| "loss": 1.9429, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 1.9331, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.538461538461539e-06, | |
| "loss": 1.9289, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 7.384615384615386e-06, | |
| "loss": 1.9454, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.230769230769231e-06, | |
| "loss": 1.9316, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 7.076923076923076e-06, | |
| "loss": 1.9423, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 1.9649, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 6.7692307692307695e-06, | |
| "loss": 1.9378, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 6.6153846153846165e-06, | |
| "loss": 1.9322, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 6.461538461538462e-06, | |
| "loss": 1.962, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 6.307692307692308e-06, | |
| "loss": 1.9442, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 1.9509, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6e-06, | |
| "loss": 1.9582, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 5.846153846153846e-06, | |
| "loss": 1.9279, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.692307692307692e-06, | |
| "loss": 1.9428, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.5384615384615385e-06, | |
| "loss": 1.9471, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.3846153846153855e-06, | |
| "loss": 1.9205, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 5.230769230769231e-06, | |
| "loss": 1.9582, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.076923076923077e-06, | |
| "loss": 1.9541, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.923076923076923e-06, | |
| "loss": 1.9153, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 4.769230769230769e-06, | |
| "loss": 1.8547, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 1.863, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.4615384615384614e-06, | |
| "loss": 1.8348, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 4.3076923076923076e-06, | |
| "loss": 1.8621, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.153846153846154e-06, | |
| "loss": 1.8745, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.8555, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 1.8731, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 3.692307692307693e-06, | |
| "loss": 1.8495, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 3.538461538461538e-06, | |
| "loss": 1.8596, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.3846153846153848e-06, | |
| "loss": 1.8625, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 3.230769230769231e-06, | |
| "loss": 1.8601, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 1.8519, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 2.923076923076923e-06, | |
| "loss": 1.8648, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 2.7692307692307693e-06, | |
| "loss": 1.859, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 2.6153846153846154e-06, | |
| "loss": 1.8728, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 2.4615384615384615e-06, | |
| "loss": 1.8536, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 1.8706, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.1538461538461538e-06, | |
| "loss": 1.8446, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.8545, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.8461538461538465e-06, | |
| "loss": 1.8396, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.6923076923076924e-06, | |
| "loss": 1.8661, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 1.8567, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.3846153846153846e-06, | |
| "loss": 1.8641, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.2307692307692308e-06, | |
| "loss": 1.8823, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 1.0769230769230769e-06, | |
| "loss": 1.8767, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 9.230769230769232e-07, | |
| "loss": 1.8615, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 1.8743, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 6.153846153846154e-07, | |
| "loss": 1.8741, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.615384615384616e-07, | |
| "loss": 1.8367, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 3.076923076923077e-07, | |
| "loss": 1.868, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 1.5384615384615385e-07, | |
| "loss": 1.8581, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.8659, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 162500, | |
| "total_flos": 9.755546820784128e+16, | |
| "train_loss": 2.4223546108774037, | |
| "train_runtime": 17439.7175, | |
| "train_samples_per_second": 37.271, | |
| "train_steps_per_second": 9.318 | |
| } | |
| ], | |
| "max_steps": 162500, | |
| "num_train_epochs": 10, | |
| "total_flos": 9.755546820784128e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |