| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 8.91625684392371, | |
| "global_step": 640000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.8671875e-08, | |
| "loss": 5.4939, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.773437500000001e-08, | |
| "loss": 5.2319, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.1679687500000002e-07, | |
| "loss": 4.9044, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.5585937500000003e-07, | |
| "loss": 4.6639, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.9492187500000003e-07, | |
| "loss": 4.4707, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.3398437500000002e-07, | |
| "loss": 4.3327, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 2.73046875e-07, | |
| "loss": 4.2155, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.12109375e-07, | |
| "loss": 4.1126, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.51171875e-07, | |
| "loss": 4.0397, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 3.9015625e-07, | |
| "loss": 3.9689, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.2921875e-07, | |
| "loss": 3.8489, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.6828125000000004e-07, | |
| "loss": 3.7863, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 5.0734375e-07, | |
| "loss": 3.7543, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 5.46328125e-07, | |
| "loss": 3.6828, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 5.853906250000001e-07, | |
| "loss": 3.6822, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.244531250000001e-07, | |
| "loss": 3.6322, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 6.635156250000001e-07, | |
| "loss": 3.6357, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 7.025000000000002e-07, | |
| "loss": 3.6196, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 7.415625000000002e-07, | |
| "loss": 3.6101, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 7.806250000000001e-07, | |
| "loss": 3.5769, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.196875000000001e-07, | |
| "loss": 3.567, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.587500000000001e-07, | |
| "loss": 3.558, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.978125000000001e-07, | |
| "loss": 3.519, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.367968750000001e-07, | |
| "loss": 3.5268, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.75859375e-07, | |
| "loss": 3.5111, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.014921875e-06, | |
| "loss": 3.4619, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.053984375e-06, | |
| "loss": 3.4759, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.093046875e-06, | |
| "loss": 3.4815, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.1321093750000002e-06, | |
| "loss": 3.4615, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.171171875e-06, | |
| "loss": 3.4654, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.2102343750000002e-06, | |
| "loss": 3.445, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.249296875e-06, | |
| "loss": 3.4391, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.2883593750000001e-06, | |
| "loss": 3.4293, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.32734375e-06, | |
| "loss": 3.4215, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.36640625e-06, | |
| "loss": 3.423, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.40546875e-06, | |
| "loss": 3.4263, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.44453125e-06, | |
| "loss": 3.3723, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.48359375e-06, | |
| "loss": 3.3856, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.5225781250000002e-06, | |
| "loss": 3.3836, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.561640625e-06, | |
| "loss": 3.3603, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.6007031250000002e-06, | |
| "loss": 3.3733, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.639765625e-06, | |
| "loss": 3.363, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.6788281250000002e-06, | |
| "loss": 3.3444, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.7178125000000001e-06, | |
| "loss": 3.3773, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.756875e-06, | |
| "loss": 3.345, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.7959375000000001e-06, | |
| "loss": 3.3316, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.8350000000000002e-06, | |
| "loss": 3.3584, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.873984375e-06, | |
| "loss": 3.3291, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.913046875e-06, | |
| "loss": 3.3173, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.952109375e-06, | |
| "loss": 3.3177, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.991171875e-06, | |
| "loss": 3.3234, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.0301562500000004e-06, | |
| "loss": 3.3226, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.06921875e-06, | |
| "loss": 3.3435, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.10828125e-06, | |
| "loss": 3.2945, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.1473437500000003e-06, | |
| "loss": 3.3019, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.186328125e-06, | |
| "loss": 3.2966, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.225390625e-06, | |
| "loss": 3.2959, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.264453125e-06, | |
| "loss": 3.3053, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.3035156250000003e-06, | |
| "loss": 3.3003, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.342578125e-06, | |
| "loss": 3.2698, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.3815625e-06, | |
| "loss": 3.266, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.4206250000000002e-06, | |
| "loss": 3.2624, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.4596875000000003e-06, | |
| "loss": 3.3084, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.4987500000000004e-06, | |
| "loss": 3.2526, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.5378125000000005e-06, | |
| "loss": 3.2417, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.576875e-06, | |
| "loss": 3.2902, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.6159375000000003e-06, | |
| "loss": 3.2761, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.6550000000000004e-06, | |
| "loss": 3.2615, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.6940625000000005e-06, | |
| "loss": 3.2692, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.733125e-06, | |
| "loss": 3.26, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7721875000000003e-06, | |
| "loss": 3.2465, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.8112500000000004e-06, | |
| "loss": 3.2444, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.850234375e-06, | |
| "loss": 3.2418, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.889296875e-06, | |
| "loss": 3.2282, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.928359375e-06, | |
| "loss": 3.2487, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.967421875e-06, | |
| "loss": 3.2399, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.006484375e-06, | |
| "loss": 3.233, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.045546875e-06, | |
| "loss": 3.21, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.084609375e-06, | |
| "loss": 3.236, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.123671875e-06, | |
| "loss": 3.2359, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.162734375e-06, | |
| "loss": 3.2307, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.2017187500000002e-06, | |
| "loss": 3.2041, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.2407812500000003e-06, | |
| "loss": 3.2368, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.27984375e-06, | |
| "loss": 3.2156, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.31890625e-06, | |
| "loss": 3.2106, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.3579687500000002e-06, | |
| "loss": 3.2076, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.3970312500000003e-06, | |
| "loss": 3.2039, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.4360937500000004e-06, | |
| "loss": 3.2019, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.47515625e-06, | |
| "loss": 3.2138, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.5140625e-06, | |
| "loss": 3.1833, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.553125e-06, | |
| "loss": 3.2035, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.5921875e-06, | |
| "loss": 3.2006, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.63125e-06, | |
| "loss": 3.2059, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.6703125e-06, | |
| "loss": 3.1993, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.709375e-06, | |
| "loss": 3.2051, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.7484375e-06, | |
| "loss": 3.1814, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.7875e-06, | |
| "loss": 3.171, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.8265625e-06, | |
| "loss": 3.1862, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.865625e-06, | |
| "loss": 3.1927, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.9046875e-06, | |
| "loss": 3.1795, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.943671875000001e-06, | |
| "loss": 3.1788, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.982734375e-06, | |
| "loss": 3.1672, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.021796875e-06, | |
| "loss": 3.1824, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.060859375e-06, | |
| "loss": 3.1694, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.099921875e-06, | |
| "loss": 3.1756, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.138984375e-06, | |
| "loss": 3.1813, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.178046875e-06, | |
| "loss": 3.1725, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.2171093750000005e-06, | |
| "loss": 3.1677, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.25609375e-06, | |
| "loss": 3.1436, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.295078125e-06, | |
| "loss": 3.1474, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.334140625e-06, | |
| "loss": 3.1435, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.373203125e-06, | |
| "loss": 3.1423, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.412265625e-06, | |
| "loss": 3.1462, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.45125e-06, | |
| "loss": 3.1284, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.4903125e-06, | |
| "loss": 3.136, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.529375e-06, | |
| "loss": 3.143, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.5684375e-06, | |
| "loss": 3.1339, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.6075e-06, | |
| "loss": 3.1646, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.6465625000000005e-06, | |
| "loss": 3.1391, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.6856250000000006e-06, | |
| "loss": 3.1341, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.724687500000001e-06, | |
| "loss": 3.142, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.76375e-06, | |
| "loss": 3.1404, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.8028125e-06, | |
| "loss": 3.1536, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.841875e-06, | |
| "loss": 3.1432, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.88078125e-06, | |
| "loss": 3.1422, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.91984375e-06, | |
| "loss": 3.1566, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.9589062500000005e-06, | |
| "loss": 3.1406, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.99796875e-06, | |
| "loss": 3.1497, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.995967708333334e-06, | |
| "loss": 3.129, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.991714236111112e-06, | |
| "loss": 3.1202, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.987460763888889e-06, | |
| "loss": 3.109, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.983207291666667e-06, | |
| "loss": 3.1332, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.9789538194444445e-06, | |
| "loss": 3.1237, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.9747088541666666e-06, | |
| "loss": 3.1173, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.970455381944445e-06, | |
| "loss": 3.1363, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.966201909722223e-06, | |
| "loss": 3.086, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.9619484375e-06, | |
| "loss": 3.1094, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.957694965277778e-06, | |
| "loss": 3.1297, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.953450000000001e-06, | |
| "loss": 3.1028, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.9491965277777785e-06, | |
| "loss": 3.1004, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.944943055555556e-06, | |
| "loss": 3.1037, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.940689583333334e-06, | |
| "loss": 3.1285, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.936436111111111e-06, | |
| "loss": 3.1002, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.932191145833334e-06, | |
| "loss": 3.1432, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.927937673611112e-06, | |
| "loss": 3.0873, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.9236842013888895e-06, | |
| "loss": 3.0851, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.919430729166667e-06, | |
| "loss": 3.0814, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.915177256944445e-06, | |
| "loss": 3.0963, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.910923784722222e-06, | |
| "loss": 3.0701, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.9066788194444444e-06, | |
| "loss": 3.0736, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.902425347222223e-06, | |
| "loss": 3.1015, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.8981718750000005e-06, | |
| "loss": 3.0794, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.893918402777778e-06, | |
| "loss": 3.0744, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.889664930555556e-06, | |
| "loss": 3.103, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.885428472222223e-06, | |
| "loss": 3.0727, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.881175000000001e-06, | |
| "loss": 3.0918, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.876921527777778e-06, | |
| "loss": 3.0509, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.8726765625000004e-06, | |
| "loss": 3.0727, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.868423090277778e-06, | |
| "loss": 3.0902, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.864169618055556e-06, | |
| "loss": 3.0614, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 4.859916145833333e-06, | |
| "loss": 3.0629, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.855662673611111e-06, | |
| "loss": 3.057, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.851426215277778e-06, | |
| "loss": 3.0859, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.847172743055556e-06, | |
| "loss": 3.0461, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.8429192708333335e-06, | |
| "loss": 3.0678, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.838665798611112e-06, | |
| "loss": 3.0757, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.83441232638889e-06, | |
| "loss": 3.0351, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.830158854166667e-06, | |
| "loss": 3.0538, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.825905381944445e-06, | |
| "loss": 3.0898, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.8216519097222225e-06, | |
| "loss": 3.0555, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.8173984375e-06, | |
| "loss": 3.0672, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.813144965277779e-06, | |
| "loss": 3.0675, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.808891493055556e-06, | |
| "loss": 3.0551, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.804638020833334e-06, | |
| "loss": 3.0556, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.8003845486111115e-06, | |
| "loss": 3.0694, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.796131076388889e-06, | |
| "loss": 3.0639, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.791877604166667e-06, | |
| "loss": 3.0825, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.787624131944444e-06, | |
| "loss": 3.064, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.783370659722222e-06, | |
| "loss": 3.0471, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.7791171875000005e-06, | |
| "loss": 3.0531, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.774863715277778e-06, | |
| "loss": 3.0496, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.77061875e-06, | |
| "loss": 3.0453, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 4.766365277777779e-06, | |
| "loss": 3.0416, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.762111805555556e-06, | |
| "loss": 3.044, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.757858333333334e-06, | |
| "loss": 3.0538, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.7536048611111115e-06, | |
| "loss": 3.0415, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.749351388888889e-06, | |
| "loss": 3.0388, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 4.745106423611111e-06, | |
| "loss": 3.0478, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.74085295138889e-06, | |
| "loss": 3.0314, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.736599479166667e-06, | |
| "loss": 3.068, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 4.732346006944445e-06, | |
| "loss": 3.0356, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.7280925347222226e-06, | |
| "loss": 3.0355, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.7238390625e-06, | |
| "loss": 3.0213, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.719585590277778e-06, | |
| "loss": 3.0633, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.7153321180555554e-06, | |
| "loss": 3.0378, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.711087152777778e-06, | |
| "loss": 3.0534, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.706833680555556e-06, | |
| "loss": 3.0416, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.702588715277778e-06, | |
| "loss": 3.0167, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.6983352430555565e-06, | |
| "loss": 3.0271, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.694081770833334e-06, | |
| "loss": 3.0401, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "eval_gen_len": 19.9936, | |
| "eval_loss": 2.870497703552246, | |
| "eval_rouge1": 0.2979, | |
| "eval_rouge2": 0.1743, | |
| "eval_rougeL": 0.2516, | |
| "eval_rougeLsum": 0.283, | |
| "eval_runtime": 897.9111, | |
| "eval_samples_per_second": 14.888, | |
| "eval_steps_per_second": 3.722, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.689828298611112e-06, | |
| "loss": 3.0068, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.685574826388889e-06, | |
| "loss": 3.0365, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.681321354166667e-06, | |
| "loss": 3.0628, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.6770848958333335e-06, | |
| "loss": 3.079, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.672831423611111e-06, | |
| "loss": 3.0425, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.668577951388889e-06, | |
| "loss": 3.0297, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 4.664324479166667e-06, | |
| "loss": 3.017, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.660071006944445e-06, | |
| "loss": 3.0374, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.6558175347222225e-06, | |
| "loss": 3.0665, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.6515640625e-06, | |
| "loss": 3.0246, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.647310590277778e-06, | |
| "loss": 3.0423, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.643057118055556e-06, | |
| "loss": 3.0181, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.638803645833334e-06, | |
| "loss": 3.0335, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.6345501736111115e-06, | |
| "loss": 3.0345, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.630296701388889e-06, | |
| "loss": 3.0378, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.626051736111112e-06, | |
| "loss": 3.033, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.62179826388889e-06, | |
| "loss": 3.0395, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.617544791666667e-06, | |
| "loss": 3.0256, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.613291319444445e-06, | |
| "loss": 3.0061, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.6090378472222225e-06, | |
| "loss": 3.0321, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.604784375e-06, | |
| "loss": 3.0228, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.600530902777778e-06, | |
| "loss": 3.0108, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.596277430555555e-06, | |
| "loss": 3.0147, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.592032465277778e-06, | |
| "loss": 3.0105, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.5877875e-06, | |
| "loss": 3.0213, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.583534027777778e-06, | |
| "loss": 3.0243, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.579280555555556e-06, | |
| "loss": 3.0116, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 4.575027083333334e-06, | |
| "loss": 2.9976, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.570782118055556e-06, | |
| "loss": 3.0147, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.566528645833334e-06, | |
| "loss": 3.0202, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.562275173611111e-06, | |
| "loss": 3.0112, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.558021701388889e-06, | |
| "loss": 3.0115, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.553768229166667e-06, | |
| "loss": 3.03, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 4.549523263888889e-06, | |
| "loss": 3.0344, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.545278298611112e-06, | |
| "loss": 3.0194, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.541024826388889e-06, | |
| "loss": 3.023, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.536771354166667e-06, | |
| "loss": 3.017, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.5325178819444445e-06, | |
| "loss": 3.0292, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.528264409722223e-06, | |
| "loss": 3.0245, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.5240109375000006e-06, | |
| "loss": 3.0291, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.519765972222223e-06, | |
| "loss": 3.028, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.5155125e-06, | |
| "loss": 3.0066, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.511259027777779e-06, | |
| "loss": 3.0181, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.507005555555556e-06, | |
| "loss": 3.0025, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.502752083333334e-06, | |
| "loss": 3.016, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.498498611111112e-06, | |
| "loss": 2.994, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.494245138888889e-06, | |
| "loss": 3.0244, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.489991666666667e-06, | |
| "loss": 2.9984, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.4857381944444445e-06, | |
| "loss": 3.0111, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.4814932291666665e-06, | |
| "loss": 3.015, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.4772482638888894e-06, | |
| "loss": 3.0108, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.472994791666667e-06, | |
| "loss": 2.9978, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.468741319444445e-06, | |
| "loss": 3.0186, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.464487847222222e-06, | |
| "loss": 3.0223, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.460234375000001e-06, | |
| "loss": 3.0114, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.4559809027777784e-06, | |
| "loss": 3.0063, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.451727430555556e-06, | |
| "loss": 3.0093, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.4474909722222225e-06, | |
| "loss": 3.0003, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.4432375e-06, | |
| "loss": 3.0186, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.438984027777778e-06, | |
| "loss": 3.0056, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.434730555555555e-06, | |
| "loss": 3.0065, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.430477083333334e-06, | |
| "loss": 3.0324, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.4262236111111115e-06, | |
| "loss": 2.9988, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.421970138888889e-06, | |
| "loss": 3.0163, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.417716666666667e-06, | |
| "loss": 2.9936, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.413463194444444e-06, | |
| "loss": 3.0139, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.409209722222222e-06, | |
| "loss": 2.982, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.4049562500000005e-06, | |
| "loss": 2.9908, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.400702777777778e-06, | |
| "loss": 3.0003, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.396449305555556e-06, | |
| "loss": 2.9924, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.392195833333334e-06, | |
| "loss": 3.0235, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.387950868055556e-06, | |
| "loss": 2.9979, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.383697395833334e-06, | |
| "loss": 2.9846, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.3794439236111115e-06, | |
| "loss": 2.9916, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.375190451388889e-06, | |
| "loss": 2.996, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.370945486111111e-06, | |
| "loss": 2.9737, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.366692013888889e-06, | |
| "loss": 3.003, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.3624385416666665e-06, | |
| "loss": 2.9837, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.358185069444445e-06, | |
| "loss": 3.0183, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 4.353931597222223e-06, | |
| "loss": 3.0169, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.349678125e-06, | |
| "loss": 2.9834, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.345424652777778e-06, | |
| "loss": 3.0022, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.3411711805555555e-06, | |
| "loss": 2.9879, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.336926215277778e-06, | |
| "loss": 2.9919, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 4.332672743055556e-06, | |
| "loss": 2.9892, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 4.328419270833334e-06, | |
| "loss": 2.9888, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.324165798611112e-06, | |
| "loss": 2.9873, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.319920833333334e-06, | |
| "loss": 2.9425, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.315667361111112e-06, | |
| "loss": 2.9582, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 4.311413888888889e-06, | |
| "loss": 2.9654, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 4.307160416666667e-06, | |
| "loss": 2.9714, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 4.302906944444445e-06, | |
| "loss": 2.9586, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.298653472222222e-06, | |
| "loss": 2.9601, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.2944e-06, | |
| "loss": 2.9509, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.290155034722223e-06, | |
| "loss": 2.9519, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.2859015625000004e-06, | |
| "loss": 2.9587, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.281648090277778e-06, | |
| "loss": 2.9801, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.277394618055556e-06, | |
| "loss": 2.9748, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.273141145833333e-06, | |
| "loss": 2.9592, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.268887673611112e-06, | |
| "loss": 2.9538, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.2646342013888894e-06, | |
| "loss": 2.9744, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.260380729166667e-06, | |
| "loss": 2.9744, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.256135763888889e-06, | |
| "loss": 2.9415, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.251882291666668e-06, | |
| "loss": 2.9657, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.247628819444445e-06, | |
| "loss": 2.964, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.243375347222223e-06, | |
| "loss": 2.9661, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.239130381944445e-06, | |
| "loss": 2.9604, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.2348769097222225e-06, | |
| "loss": 2.9375, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.2306319444444446e-06, | |
| "loss": 2.958, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 4.226378472222222e-06, | |
| "loss": 2.9596, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 4.222125000000001e-06, | |
| "loss": 2.9572, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 4.217871527777778e-06, | |
| "loss": 2.9557, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 4.213618055555556e-06, | |
| "loss": 2.9778, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.2093645833333336e-06, | |
| "loss": 2.9465, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 4.205111111111111e-06, | |
| "loss": 2.9641, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 4.20085763888889e-06, | |
| "loss": 2.9576, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 4.196612673611112e-06, | |
| "loss": 2.9681, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 4.192359201388889e-06, | |
| "loss": 2.9622, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 4.188105729166667e-06, | |
| "loss": 2.9608, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 4.183860763888889e-06, | |
| "loss": 2.9529, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.179607291666667e-06, | |
| "loss": 2.9539, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.175353819444444e-06, | |
| "loss": 2.9651, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 4.171100347222222e-06, | |
| "loss": 2.943, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.166846875e-06, | |
| "loss": 2.9517, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.162593402777778e-06, | |
| "loss": 2.9447, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.1583484375e-06, | |
| "loss": 2.9637, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.1540949652777785e-06, | |
| "loss": 2.9437, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.149841493055556e-06, | |
| "loss": 2.9462, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.145588020833334e-06, | |
| "loss": 2.9605, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.141334548611111e-06, | |
| "loss": 2.953, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.137081076388889e-06, | |
| "loss": 2.9702, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.1328276041666675e-06, | |
| "loss": 2.9636, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.128574131944445e-06, | |
| "loss": 2.9564, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.124329166666667e-06, | |
| "loss": 2.9356, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.120075694444445e-06, | |
| "loss": 2.9542, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.1158222222222225e-06, | |
| "loss": 2.9377, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.11156875e-06, | |
| "loss": 2.9439, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.107323784722222e-06, | |
| "loss": 2.9107, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.1030703125e-06, | |
| "loss": 2.9849, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.098816840277778e-06, | |
| "loss": 2.9479, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.094563368055556e-06, | |
| "loss": 2.9459, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.090318402777778e-06, | |
| "loss": 2.9558, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.086064930555556e-06, | |
| "loss": 2.9599, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.081811458333334e-06, | |
| "loss": 2.9456, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.077566493055556e-06, | |
| "loss": 2.943, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.073313020833334e-06, | |
| "loss": 2.9403, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.069059548611111e-06, | |
| "loss": 2.9553, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.064806076388889e-06, | |
| "loss": 2.9188, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 4.060552604166667e-06, | |
| "loss": 2.943, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 4.056299131944444e-06, | |
| "loss": 2.9477, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.052045659722223e-06, | |
| "loss": 2.9366, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 4.0477921875e-06, | |
| "loss": 2.923, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.043547222222222e-06, | |
| "loss": 2.9505, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.03929375e-06, | |
| "loss": 2.9468, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.035040277777778e-06, | |
| "loss": 2.9394, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.030786805555556e-06, | |
| "loss": 2.9291, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.026541840277778e-06, | |
| "loss": 2.935, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.022288368055556e-06, | |
| "loss": 2.9323, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.018034895833334e-06, | |
| "loss": 2.9527, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.013781423611112e-06, | |
| "loss": 2.9679, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.0095279513888895e-06, | |
| "loss": 2.9491, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.0052829861111116e-06, | |
| "loss": 2.9549, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 4.001038020833334e-06, | |
| "loss": 2.9441, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.996784548611111e-06, | |
| "loss": 2.9467, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.992531076388889e-06, | |
| "loss": 2.9639, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.9882776041666665e-06, | |
| "loss": 2.9197, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.984024131944445e-06, | |
| "loss": 2.9623, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.979770659722223e-06, | |
| "loss": 2.9327, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.9755171875e-06, | |
| "loss": 2.9428, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.971263715277778e-06, | |
| "loss": 2.9664, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.9670102430555555e-06, | |
| "loss": 2.9504, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.962756770833334e-06, | |
| "loss": 2.9371, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 3.958511805555556e-06, | |
| "loss": 2.9371, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.954266840277778e-06, | |
| "loss": 2.9273, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.950013368055556e-06, | |
| "loss": 2.9214, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.945759895833333e-06, | |
| "loss": 2.9627, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.941506423611111e-06, | |
| "loss": 2.9431, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 3.937252951388889e-06, | |
| "loss": 2.9278, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.932999479166667e-06, | |
| "loss": 2.9434, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.928746006944445e-06, | |
| "loss": 2.9549, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 3.924492534722222e-06, | |
| "loss": 2.9484, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.9202390625e-06, | |
| "loss": 2.9455, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.915985590277778e-06, | |
| "loss": 2.9293, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.911732118055556e-06, | |
| "loss": 2.9319, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.907487152777778e-06, | |
| "loss": 2.9432, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 3.903233680555556e-06, | |
| "loss": 2.9401, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.898980208333333e-06, | |
| "loss": 2.9472, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.894726736111112e-06, | |
| "loss": 2.9551, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.8904732638888895e-06, | |
| "loss": 2.961, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.886219791666667e-06, | |
| "loss": 2.9481, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.881966319444445e-06, | |
| "loss": 2.9435, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 3.877712847222223e-06, | |
| "loss": 2.9746, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.873467881944445e-06, | |
| "loss": 2.9425, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.869214409722223e-06, | |
| "loss": 2.9271, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.8649609375000005e-06, | |
| "loss": 2.9317, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.860707465277778e-06, | |
| "loss": 2.9337, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.8564625e-06, | |
| "loss": 2.9324, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.852209027777778e-06, | |
| "loss": 2.9317, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.847955555555556e-06, | |
| "loss": 2.9405, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.843702083333334e-06, | |
| "loss": 2.9489, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "eval_gen_len": 19.9976, | |
| "eval_loss": 2.804373025894165, | |
| "eval_rouge1": 0.2997, | |
| "eval_rouge2": 0.1765, | |
| "eval_rougeL": 0.2536, | |
| "eval_rougeLsum": 0.285, | |
| "eval_runtime": 887.4822, | |
| "eval_samples_per_second": 15.063, | |
| "eval_steps_per_second": 3.766, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.839457118055556e-06, | |
| "loss": 2.942, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.835203645833334e-06, | |
| "loss": 2.9428, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.830950173611111e-06, | |
| "loss": 2.9295, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.82669670138889e-06, | |
| "loss": 2.9288, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.822451736111112e-06, | |
| "loss": 2.9385, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.818198263888889e-06, | |
| "loss": 2.9316, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.813944791666667e-06, | |
| "loss": 2.9343, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.809691319444445e-06, | |
| "loss": 2.9224, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.8054463541666663e-06, | |
| "loss": 2.921, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.8011928819444447e-06, | |
| "loss": 2.9351, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.7969394097222224e-06, | |
| "loss": 2.9315, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.7926859375000004e-06, | |
| "loss": 2.9432, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.7884409722222225e-06, | |
| "loss": 2.9229, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.784196006944445e-06, | |
| "loss": 2.9701, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.7799425347222226e-06, | |
| "loss": 2.9438, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.7756975694444446e-06, | |
| "loss": 2.9439, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.7714440972222223e-06, | |
| "loss": 2.9369, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.7671906250000003e-06, | |
| "loss": 2.9344, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.7629371527777788e-06, | |
| "loss": 2.92, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.758683680555556e-06, | |
| "loss": 2.9194, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.754430208333333e-06, | |
| "loss": 2.9129, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.750176736111112e-06, | |
| "loss": 2.9227, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.7459232638888893e-06, | |
| "loss": 2.9282, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.741678298611111e-06, | |
| "loss": 2.9398, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.7374333333333334e-06, | |
| "loss": 2.9403, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.733179861111111e-06, | |
| "loss": 2.9289, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.7289263888888887e-06, | |
| "loss": 2.9418, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.7246729166666667e-06, | |
| "loss": 2.9074, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.720419444444445e-06, | |
| "loss": 2.9215, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.7161744791666672e-06, | |
| "loss": 2.94, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.711921006944445e-06, | |
| "loss": 2.9053, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.7076675347222225e-06, | |
| "loss": 2.9239, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.7034140625000005e-06, | |
| "loss": 2.9077, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.6991605902777777e-06, | |
| "loss": 2.8968, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.694907118055556e-06, | |
| "loss": 2.8916, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.6906536458333334e-06, | |
| "loss": 2.8977, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.6864001736111115e-06, | |
| "loss": 2.9036, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.6821467013888895e-06, | |
| "loss": 2.9089, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.6778932291666667e-06, | |
| "loss": 2.9032, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.6736397569444448e-06, | |
| "loss": 2.8927, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 3.6693862847222224e-06, | |
| "loss": 2.9037, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.6651328125e-06, | |
| "loss": 2.8934, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.6608793402777777e-06, | |
| "loss": 2.8753, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.656625868055556e-06, | |
| "loss": 2.8971, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.652372395833334e-06, | |
| "loss": 2.921, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.6481274305555562e-06, | |
| "loss": 2.8975, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.643873958333334e-06, | |
| "loss": 2.9101, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.639620486111112e-06, | |
| "loss": 2.9017, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.6353755208333336e-06, | |
| "loss": 2.8966, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.6311220486111108e-06, | |
| "loss": 2.905, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.626868576388889e-06, | |
| "loss": 2.8967, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.622615104166667e-06, | |
| "loss": 2.9023, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.618361631944444e-06, | |
| "loss": 2.8865, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.6141081597222225e-06, | |
| "loss": 2.8802, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.6098546875000006e-06, | |
| "loss": 2.9002, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.6056012152777782e-06, | |
| "loss": 2.8917, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.601347743055556e-06, | |
| "loss": 2.9035, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.5970942708333335e-06, | |
| "loss": 2.8919, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.5928407986111115e-06, | |
| "loss": 2.8943, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.5885873263888887e-06, | |
| "loss": 2.8891, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.584342361111111e-06, | |
| "loss": 2.9066, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.580097395833334e-06, | |
| "loss": 2.9018, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.575843923611112e-06, | |
| "loss": 2.9047, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.5715904513888894e-06, | |
| "loss": 2.8973, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.5673369791666666e-06, | |
| "loss": 2.8793, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.5630835069444446e-06, | |
| "loss": 2.9009, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.5588300347222222e-06, | |
| "loss": 2.8949, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.5545765625e-06, | |
| "loss": 2.8778, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.550323090277778e-06, | |
| "loss": 2.9041, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.546078125000001e-06, | |
| "loss": 2.8886, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.5418246527777784e-06, | |
| "loss": 2.8954, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.537571180555556e-06, | |
| "loss": 2.9093, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.5333177083333337e-06, | |
| "loss": 2.9183, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.529072743055556e-06, | |
| "loss": 2.9078, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.524827777777778e-06, | |
| "loss": 2.8864, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.520574305555556e-06, | |
| "loss": 2.8886, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.516320833333334e-06, | |
| "loss": 2.8817, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.5120673611111115e-06, | |
| "loss": 2.8985, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.507813888888889e-06, | |
| "loss": 2.9103, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.5035604166666672e-06, | |
| "loss": 2.9197, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.499306944444445e-06, | |
| "loss": 2.9131, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.4950534722222225e-06, | |
| "loss": 2.8789, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.4908085069444445e-06, | |
| "loss": 2.8953, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 3.486563541666667e-06, | |
| "loss": 2.9162, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.4823100694444446e-06, | |
| "loss": 2.9018, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.4780565972222223e-06, | |
| "loss": 2.8845, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 3.4738031250000003e-06, | |
| "loss": 2.9187, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.469549652777778e-06, | |
| "loss": 2.9121, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.4652961805555556e-06, | |
| "loss": 2.8927, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.461042708333333e-06, | |
| "loss": 2.894, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.4567892361111112e-06, | |
| "loss": 2.8821, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.452544270833334e-06, | |
| "loss": 2.8956, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.4482907986111114e-06, | |
| "loss": 2.9154, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.4440373263888894e-06, | |
| "loss": 2.8951, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.4397838541666675e-06, | |
| "loss": 2.914, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.435538888888889e-06, | |
| "loss": 2.8909, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.4312939236111107e-06, | |
| "loss": 2.8745, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 3.4270404513888888e-06, | |
| "loss": 2.8936, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.4227869791666672e-06, | |
| "loss": 2.8971, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.418533506944445e-06, | |
| "loss": 2.8979, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.4142800347222225e-06, | |
| "loss": 2.9046, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.4100265625000005e-06, | |
| "loss": 2.9042, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.405773090277778e-06, | |
| "loss": 2.9047, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.401519618055556e-06, | |
| "loss": 2.8983, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.397274652777778e-06, | |
| "loss": 2.8899, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.393021180555556e-06, | |
| "loss": 2.8774, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.388767708333333e-06, | |
| "loss": 2.8801, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.3845142361111116e-06, | |
| "loss": 2.8942, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.380269270833334e-06, | |
| "loss": 2.8949, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.376015798611112e-06, | |
| "loss": 2.8979, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 3.3717623263888893e-06, | |
| "loss": 2.8841, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.3675088541666665e-06, | |
| "loss": 2.9073, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.363263888888889e-06, | |
| "loss": 2.8964, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.3590104166666666e-06, | |
| "loss": 2.8877, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.3547569444444443e-06, | |
| "loss": 2.8845, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.3505034722222223e-06, | |
| "loss": 2.8919, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.3462585069444452e-06, | |
| "loss": 2.8966, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.342005034722223e-06, | |
| "loss": 2.8818, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.3377515625000005e-06, | |
| "loss": 2.907, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.333498090277778e-06, | |
| "loss": 2.8971, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.329244618055556e-06, | |
| "loss": 2.8767, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.3249996527777778e-06, | |
| "loss": 2.8965, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.320746180555556e-06, | |
| "loss": 2.887, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.316492708333334e-06, | |
| "loss": 2.9044, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.3122392361111124e-06, | |
| "loss": 2.8872, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.3079857638888896e-06, | |
| "loss": 2.8991, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.303732291666667e-06, | |
| "loss": 2.8981, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.299478819444445e-06, | |
| "loss": 2.8841, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.2952253472222224e-06, | |
| "loss": 2.8795, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.2909803819444445e-06, | |
| "loss": 2.8832, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.2867269097222225e-06, | |
| "loss": 2.8909, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.2824734375e-06, | |
| "loss": 2.8991, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.278219965277778e-06, | |
| "loss": 2.8764, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.2739750000000003e-06, | |
| "loss": 2.9024, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.2697215277777783e-06, | |
| "loss": 2.8911, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.2654680555555555e-06, | |
| "loss": 2.8698, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.2612145833333336e-06, | |
| "loss": 2.8976, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.2569696180555556e-06, | |
| "loss": 2.9034, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.252716145833334e-06, | |
| "loss": 2.8884, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.2484626736111113e-06, | |
| "loss": 2.8702, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.2442092013888894e-06, | |
| "loss": 2.8704, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.239964236111112e-06, | |
| "loss": 2.8964, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.235710763888889e-06, | |
| "loss": 2.8667, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.2314572916666663e-06, | |
| "loss": 2.8679, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.2272038194444443e-06, | |
| "loss": 2.9098, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.2229588541666664e-06, | |
| "loss": 2.9084, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.218705381944445e-06, | |
| "loss": 2.8924, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.2144519097222225e-06, | |
| "loss": 2.8818, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.2101984375000005e-06, | |
| "loss": 2.8812, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.2059534722222226e-06, | |
| "loss": 2.8916, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.2017e-06, | |
| "loss": 2.8987, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.197446527777778e-06, | |
| "loss": 2.8862, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.193193055555556e-06, | |
| "loss": 2.9055, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.188939583333333e-06, | |
| "loss": 2.8696, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.184703125e-06, | |
| "loss": 2.8985, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.1804496527777785e-06, | |
| "loss": 2.8928, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.1761961805555565e-06, | |
| "loss": 2.9052, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.1719427083333337e-06, | |
| "loss": 2.889, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.167689236111111e-06, | |
| "loss": 2.9006, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.163435763888889e-06, | |
| "loss": 2.8991, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.1591822916666666e-06, | |
| "loss": 2.866, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.1549288194444442e-06, | |
| "loss": 2.9045, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.1506838541666667e-06, | |
| "loss": 2.8751, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.146430381944445e-06, | |
| "loss": 2.896, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.142176909722223e-06, | |
| "loss": 2.8861, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.137931944444445e-06, | |
| "loss": 2.8944, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.1336784722222225e-06, | |
| "loss": 2.8774, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.1294250000000005e-06, | |
| "loss": 2.8914, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.1251715277777777e-06, | |
| "loss": 2.8857, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.1209180555555554e-06, | |
| "loss": 2.8913, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.116664583333334e-06, | |
| "loss": 2.8684, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.112411111111111e-06, | |
| "loss": 2.9081, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.1081576388888895e-06, | |
| "loss": 2.8912, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3.103921180555556e-06, | |
| "loss": 2.8903, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.0996677083333336e-06, | |
| "loss": 2.884, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.0954142361111113e-06, | |
| "loss": 2.8876, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.091160763888889e-06, | |
| "loss": 2.8451, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.086907291666667e-06, | |
| "loss": 2.8658, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.0826538194444446e-06, | |
| "loss": 2.8625, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.078400347222223e-06, | |
| "loss": 2.8439, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.0741468750000007e-06, | |
| "loss": 2.855, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.0699019097222223e-06, | |
| "loss": 2.8535, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.0656484375e-06, | |
| "loss": 2.8424, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.061394965277778e-06, | |
| "loss": 2.8511, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.05715e-06, | |
| "loss": 2.8617, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.0528965277777785e-06, | |
| "loss": 2.8729, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 3.0486430555555557e-06, | |
| "loss": 2.8677, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.0443895833333338e-06, | |
| "loss": 2.8543, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.040136111111112e-06, | |
| "loss": 2.8521, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 3.035882638888889e-06, | |
| "loss": 2.8704, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.031629166666667e-06, | |
| "loss": 2.8627, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.0273756944444447e-06, | |
| "loss": 2.8586, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.0231307291666667e-06, | |
| "loss": 2.8735, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.018877256944445e-06, | |
| "loss": 2.8473, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 3.0146237847222224e-06, | |
| "loss": 2.854, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.0103703125000005e-06, | |
| "loss": 2.8491, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.0061253472222225e-06, | |
| "loss": 2.8709, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.001871875e-06, | |
| "loss": 2.8364, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.9976269097222222e-06, | |
| "loss": 2.8504, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 2.9933734375000003e-06, | |
| "loss": 2.8511, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "eval_gen_len": 19.9976, | |
| "eval_loss": 2.775161027908325, | |
| "eval_rouge1": 0.2957, | |
| "eval_rouge2": 0.173, | |
| "eval_rougeL": 0.2501, | |
| "eval_rougeLsum": 0.2812, | |
| "eval_runtime": 899.8085, | |
| "eval_samples_per_second": 14.856, | |
| "eval_steps_per_second": 3.714, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.9891199652777775e-06, | |
| "loss": 2.8515, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.984866493055556e-06, | |
| "loss": 2.8585, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 2.9806130208333336e-06, | |
| "loss": 2.8539, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.976359548611112e-06, | |
| "loss": 2.8733, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.9721060763888892e-06, | |
| "loss": 2.8662, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.9678526041666665e-06, | |
| "loss": 2.8563, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.963607638888889e-06, | |
| "loss": 2.8411, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 2.959362673611111e-06, | |
| "loss": 2.8743, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 2.9551092013888886e-06, | |
| "loss": 2.8903, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.950855729166667e-06, | |
| "loss": 2.8469, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9466022569444447e-06, | |
| "loss": 2.8481, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.9423487847222228e-06, | |
| "loss": 2.8606, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.938103819444445e-06, | |
| "loss": 2.8752, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.9338503472222224e-06, | |
| "loss": 2.8531, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.9295968750000005e-06, | |
| "loss": 2.8507, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.9253434027777777e-06, | |
| "loss": 2.8744, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.9210899305555553e-06, | |
| "loss": 2.8672, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.916836458333334e-06, | |
| "loss": 2.8692, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.9125829861111114e-06, | |
| "loss": 2.8503, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.9083295138888895e-06, | |
| "loss": 2.8534, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.9040845486111115e-06, | |
| "loss": 2.8941, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.899831076388889e-06, | |
| "loss": 2.8798, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.895577604166667e-06, | |
| "loss": 2.8578, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.8913241319444444e-06, | |
| "loss": 2.8472, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.8870791666666665e-06, | |
| "loss": 2.859, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.8828256944444445e-06, | |
| "loss": 2.85, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.878572222222222e-06, | |
| "loss": 2.863, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.8743187500000006e-06, | |
| "loss": 2.8716, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.8700737847222227e-06, | |
| "loss": 2.8425, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.8658203125e-06, | |
| "loss": 2.867, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.861566840277778e-06, | |
| "loss": 2.8505, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.8573133680555556e-06, | |
| "loss": 2.848, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.8530684027777785e-06, | |
| "loss": 2.8435, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.8488149305555557e-06, | |
| "loss": 2.8632, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.844569965277778e-06, | |
| "loss": 2.8656, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.840316493055556e-06, | |
| "loss": 2.8368, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.8360630208333334e-06, | |
| "loss": 2.873, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.8318095486111115e-06, | |
| "loss": 2.851, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.8275560763888895e-06, | |
| "loss": 2.861, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.8233026041666667e-06, | |
| "loss": 2.8546, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.819049131944444e-06, | |
| "loss": 2.8439, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.8147956597222224e-06, | |
| "loss": 2.8733, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.810550694444445e-06, | |
| "loss": 2.861, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.8062972222222225e-06, | |
| "loss": 2.8453, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.8020522569444445e-06, | |
| "loss": 2.8492, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.797798784722222e-06, | |
| "loss": 2.8501, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.7935453125000002e-06, | |
| "loss": 2.8624, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.7892918402777774e-06, | |
| "loss": 2.8503, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.785038368055556e-06, | |
| "loss": 2.871, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.7807848958333335e-06, | |
| "loss": 2.8643, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.776531423611112e-06, | |
| "loss": 2.8591, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.772277951388889e-06, | |
| "loss": 2.8494, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.768032986111111e-06, | |
| "loss": 2.8592, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.763779513888889e-06, | |
| "loss": 2.866, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 2.7595260416666665e-06, | |
| "loss": 2.8772, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 2.755272569444444e-06, | |
| "loss": 2.8435, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.751027604166667e-06, | |
| "loss": 2.8715, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.7467826388888895e-06, | |
| "loss": 2.8687, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 2.742529166666667e-06, | |
| "loss": 2.8434, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.7382756944444448e-06, | |
| "loss": 2.8617, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.7340222222222224e-06, | |
| "loss": 2.8492, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.7297687500000005e-06, | |
| "loss": 2.8672, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.7255152777777777e-06, | |
| "loss": 2.8813, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 2.7212618055555553e-06, | |
| "loss": 2.8522, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 2.7170083333333338e-06, | |
| "loss": 2.8501, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.712763368055556e-06, | |
| "loss": 2.8442, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.708509895833334e-06, | |
| "loss": 2.857, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.7042564236111115e-06, | |
| "loss": 2.8543, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.700002951388889e-06, | |
| "loss": 2.8721, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.6957494791666668e-06, | |
| "loss": 2.8362, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.6915130208333332e-06, | |
| "loss": 2.848, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.6872595486111113e-06, | |
| "loss": 2.8602, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.6830145833333333e-06, | |
| "loss": 2.8643, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 2.678761111111111e-06, | |
| "loss": 2.857, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.6745076388888894e-06, | |
| "loss": 2.8649, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 2.670254166666667e-06, | |
| "loss": 2.8405, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.6660006944444443e-06, | |
| "loss": 2.859, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.6617472222222223e-06, | |
| "loss": 2.8678, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 2.65749375e-06, | |
| "loss": 2.8263, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6532402777777776e-06, | |
| "loss": 2.8557, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6489868055555556e-06, | |
| "loss": 2.8472, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 2.644741840277778e-06, | |
| "loss": 2.8889, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.640488368055556e-06, | |
| "loss": 2.8811, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.636234895833334e-06, | |
| "loss": 2.8624, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6319814236111114e-06, | |
| "loss": 2.8698, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 2.627727951388889e-06, | |
| "loss": 2.8478, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.623482986111111e-06, | |
| "loss": 2.8581, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.619229513888889e-06, | |
| "loss": 2.8464, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.6149760416666668e-06, | |
| "loss": 2.8497, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.610722569444445e-06, | |
| "loss": 2.8453, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.6064690972222225e-06, | |
| "loss": 2.8648, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 2.602215625e-06, | |
| "loss": 2.8335, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.597962152777778e-06, | |
| "loss": 2.8401, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.5937086805555558e-06, | |
| "loss": 2.8645, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 2.589463715277778e-06, | |
| "loss": 2.867, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.5852102430555554e-06, | |
| "loss": 2.8612, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.5809567708333335e-06, | |
| "loss": 2.869, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.5767118055555564e-06, | |
| "loss": 2.8559, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 2.5724583333333336e-06, | |
| "loss": 2.8605, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.568204861111111e-06, | |
| "loss": 2.8604, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 2.563951388888889e-06, | |
| "loss": 2.8465, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.559697916666667e-06, | |
| "loss": 2.8692, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.555444444444444e-06, | |
| "loss": 2.8702, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.551190972222222e-06, | |
| "loss": 2.8705, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 2.5469375e-06, | |
| "loss": 2.8593, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.542692534722222e-06, | |
| "loss": 2.8565, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.5384390625e-06, | |
| "loss": 2.8441, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.534185590277778e-06, | |
| "loss": 2.841, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.529932118055556e-06, | |
| "loss": 2.8591, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.5256871527777776e-06, | |
| "loss": 2.8393, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 2.521433680555556e-06, | |
| "loss": 2.8701, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.5171802083333337e-06, | |
| "loss": 2.8733, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.5129267361111114e-06, | |
| "loss": 2.874, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 2.5086817708333334e-06, | |
| "loss": 2.8754, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.5044282986111115e-06, | |
| "loss": 2.8503, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.500174826388889e-06, | |
| "loss": 2.836, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.4959213541666667e-06, | |
| "loss": 2.8584, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.4916678819444443e-06, | |
| "loss": 2.835, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 2.4874229166666672e-06, | |
| "loss": 2.8445, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.483169444444445e-06, | |
| "loss": 2.8368, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.4789159722222225e-06, | |
| "loss": 2.8288, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.4746625e-06, | |
| "loss": 2.8424, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.4704175347222226e-06, | |
| "loss": 2.8338, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.4661640625000002e-06, | |
| "loss": 2.8387, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 2.461910590277778e-06, | |
| "loss": 2.8373, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.4576571180555555e-06, | |
| "loss": 2.8195, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.453412152777778e-06, | |
| "loss": 2.8317, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 2.4491586805555556e-06, | |
| "loss": 2.8574, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.4449052083333332e-06, | |
| "loss": 2.8183, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.4406517361111113e-06, | |
| "loss": 2.8213, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.436415277777778e-06, | |
| "loss": 2.8332, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 2.432161805555556e-06, | |
| "loss": 2.848, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4279083333333334e-06, | |
| "loss": 2.831, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4236548611111115e-06, | |
| "loss": 2.8363, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 2.4194098958333335e-06, | |
| "loss": 2.8482, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.415156423611111e-06, | |
| "loss": 2.8307, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.410902951388889e-06, | |
| "loss": 2.8349, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 2.406649479166667e-06, | |
| "loss": 2.8065, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.402396006944445e-06, | |
| "loss": 2.8475, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.3981425347222225e-06, | |
| "loss": 2.8422, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 2.3938890625e-06, | |
| "loss": 2.8521, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.3896355902777778e-06, | |
| "loss": 2.8115, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.385382118055556e-06, | |
| "loss": 2.8041, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 2.3811286458333335e-06, | |
| "loss": 2.8452, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 2.376875173611111e-06, | |
| "loss": 2.8396, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.372630208333333e-06, | |
| "loss": 2.844, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 2.3683767361111116e-06, | |
| "loss": 2.8406, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.3641232638888892e-06, | |
| "loss": 2.8107, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 2.359869791666667e-06, | |
| "loss": 2.8292, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 2.3556163194444445e-06, | |
| "loss": 2.8411, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3513628472222225e-06, | |
| "loss": 2.8439, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 2.347109375e-06, | |
| "loss": 2.8482, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 2.342855902777778e-06, | |
| "loss": 2.8161, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 2.3386109375e-06, | |
| "loss": 2.8375, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.334357465277778e-06, | |
| "loss": 2.8337, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 2.330103993055556e-06, | |
| "loss": 2.8339, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 2.3258505208333336e-06, | |
| "loss": 2.82, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.3216055555555556e-06, | |
| "loss": 2.8272, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3173520833333337e-06, | |
| "loss": 2.8164, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 2.3130986111111113e-06, | |
| "loss": 2.8323, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.308845138888889e-06, | |
| "loss": 2.8662, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 2.3045916666666666e-06, | |
| "loss": 2.8664, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 2.300346701388889e-06, | |
| "loss": 2.8331, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.2960932291666667e-06, | |
| "loss": 2.8471, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 2.2918397569444443e-06, | |
| "loss": 2.8169, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.2875862847222224e-06, | |
| "loss": 2.8311, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.2833328125000004e-06, | |
| "loss": 2.8265, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.279079340277778e-06, | |
| "loss": 2.8364, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.2748258680555557e-06, | |
| "loss": 2.8051, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.2705723958333333e-06, | |
| "loss": 2.844, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 2.2663274305555558e-06, | |
| "loss": 2.831, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.2620739583333334e-06, | |
| "loss": 2.8235, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.257820486111111e-06, | |
| "loss": 2.8099, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 2.2535670138888886e-06, | |
| "loss": 2.8455, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 2.2493220486111116e-06, | |
| "loss": 2.8496, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.245068576388889e-06, | |
| "loss": 2.8497, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.240815104166667e-06, | |
| "loss": 2.8506, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2365616319444444e-06, | |
| "loss": 2.8424, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.2323081597222225e-06, | |
| "loss": 2.844, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.2280546875e-06, | |
| "loss": 2.8288, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 2.2238012152777777e-06, | |
| "loss": 2.8439, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.2195477430555554e-06, | |
| "loss": 2.8261, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.2153027777777783e-06, | |
| "loss": 2.8425, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 2.211049305555556e-06, | |
| "loss": 2.8202, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.2067958333333335e-06, | |
| "loss": 2.8259, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.202542361111111e-06, | |
| "loss": 2.8405, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.1982973958333336e-06, | |
| "loss": 2.85, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.1940439236111113e-06, | |
| "loss": 2.8459, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 2.189790451388889e-06, | |
| "loss": 2.8092, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 2.1855369791666665e-06, | |
| "loss": 2.82, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.181292013888889e-06, | |
| "loss": 2.8212, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 2.177047048611111e-06, | |
| "loss": 2.8484, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 2.1727935763888887e-06, | |
| "loss": 2.8344, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.1685401041666667e-06, | |
| "loss": 2.8265, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.1642866319444448e-06, | |
| "loss": 2.8357, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.160041666666667e-06, | |
| "loss": 2.8295, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.155796701388889e-06, | |
| "loss": 2.827, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 2.151543229166667e-06, | |
| "loss": 2.8175, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.1472897569444446e-06, | |
| "loss": 2.8275, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.143036284722222e-06, | |
| "loss": 2.8357, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "eval_gen_len": 19.9973, | |
| "eval_loss": 2.7551486492156982, | |
| "eval_rouge1": 0.2972, | |
| "eval_rouge2": 0.1745, | |
| "eval_rougeL": 0.2513, | |
| "eval_rougeLsum": 0.2824, | |
| "eval_runtime": 881.1984, | |
| "eval_samples_per_second": 15.17, | |
| "eval_steps_per_second": 3.793, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 2.1387828125e-06, | |
| "loss": 2.8298, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.1345293402777783e-06, | |
| "loss": 2.8371, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.130275868055556e-06, | |
| "loss": 2.8121, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.1260223958333335e-06, | |
| "loss": 2.8355, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 2.121768923611111e-06, | |
| "loss": 2.8341, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 2.117515451388889e-06, | |
| "loss": 2.8311, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.113261979166667e-06, | |
| "loss": 2.831, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 2.1090085069444445e-06, | |
| "loss": 2.8232, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.104755034722222e-06, | |
| "loss": 2.8277, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.1005015624999997e-06, | |
| "loss": 2.8317, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 2.096248090277778e-06, | |
| "loss": 2.8267, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.091994618055556e-06, | |
| "loss": 2.8385, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.087749652777778e-06, | |
| "loss": 2.8262, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 2.0834961805555555e-06, | |
| "loss": 2.8507, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.0792427083333336e-06, | |
| "loss": 2.8364, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.074989236111111e-06, | |
| "loss": 2.834, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 2.070735763888889e-06, | |
| "loss": 2.828, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.0664822916666665e-06, | |
| "loss": 2.8183, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.0622288194444445e-06, | |
| "loss": 2.8342, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 2.0579753472222226e-06, | |
| "loss": 2.8115, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 2.0537303819444446e-06, | |
| "loss": 2.8207, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.0494769097222222e-06, | |
| "loss": 2.8469, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.0452234375e-06, | |
| "loss": 2.8344, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 2.040969965277778e-06, | |
| "loss": 2.8193, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.036725e-06, | |
| "loss": 2.8214, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.032480034722222e-06, | |
| "loss": 2.8352, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 2.0282265625e-06, | |
| "loss": 2.84, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.023973090277778e-06, | |
| "loss": 2.8185, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.0197196180555558e-06, | |
| "loss": 2.8221, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.0154661458333334e-06, | |
| "loss": 2.8284, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.0112126736111114e-06, | |
| "loss": 2.8441, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.006959201388889e-06, | |
| "loss": 2.8231, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.0027057291666667e-06, | |
| "loss": 2.8212, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 1.9984607638888887e-06, | |
| "loss": 2.8001, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 1.9942072916666672e-06, | |
| "loss": 2.8389, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 1.989953819444445e-06, | |
| "loss": 2.8112, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.9857003472222225e-06, | |
| "loss": 2.8256, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 1.9814553819444445e-06, | |
| "loss": 2.8145, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 1.9772019097222226e-06, | |
| "loss": 2.8222, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 1.9729484375e-06, | |
| "loss": 2.8176, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 1.9687034722222223e-06, | |
| "loss": 2.8256, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.96445e-06, | |
| "loss": 2.8239, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.960196527777778e-06, | |
| "loss": 2.8322, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 1.955943055555556e-06, | |
| "loss": 2.8346, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.9516895833333336e-06, | |
| "loss": 2.8255, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.9474361111111112e-06, | |
| "loss": 2.8222, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 1.9431826388888893e-06, | |
| "loss": 2.8244, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.938929166666667e-06, | |
| "loss": 2.8334, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.934684201388889e-06, | |
| "loss": 2.8224, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 1.9304392361111115e-06, | |
| "loss": 2.8211, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 1.926185763888889e-06, | |
| "loss": 2.8458, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 1.9219322916666667e-06, | |
| "loss": 2.8058, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.9176788194444443e-06, | |
| "loss": 2.8512, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.9134253472222224e-06, | |
| "loss": 2.809, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.9091718750000004e-06, | |
| "loss": 2.8273, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.904918402777778e-06, | |
| "loss": 2.8381, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 1.9006734375000001e-06, | |
| "loss": 2.8201, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.896419965277778e-06, | |
| "loss": 2.8257, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.8921664930555558e-06, | |
| "loss": 2.8287, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 1.8879130208333334e-06, | |
| "loss": 2.8369, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.8836595486111113e-06, | |
| "loss": 2.8348, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.8794060763888885e-06, | |
| "loss": 2.8321, | |
| "step": 431000 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.8751526041666672e-06, | |
| "loss": 2.8143, | |
| "step": 431500 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.8708991319444448e-06, | |
| "loss": 2.8112, | |
| "step": 432000 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.8666541666666668e-06, | |
| "loss": 2.8206, | |
| "step": 432500 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.862409201388889e-06, | |
| "loss": 2.817, | |
| "step": 433000 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.858155729166667e-06, | |
| "loss": 2.8042, | |
| "step": 433500 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.8539022569444446e-06, | |
| "loss": 2.8123, | |
| "step": 434000 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.8496487847222222e-06, | |
| "loss": 2.7912, | |
| "step": 434500 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.8453953124999998e-06, | |
| "loss": 2.8157, | |
| "step": 435000 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.8411418402777783e-06, | |
| "loss": 2.8023, | |
| "step": 435500 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 1.8368968750000006e-06, | |
| "loss": 2.8037, | |
| "step": 436000 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.8326434027777778e-06, | |
| "loss": 2.7876, | |
| "step": 436500 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.8283899305555556e-06, | |
| "loss": 2.7751, | |
| "step": 437000 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 1.8241364583333337e-06, | |
| "loss": 2.7996, | |
| "step": 437500 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 1.8198829861111115e-06, | |
| "loss": 2.8035, | |
| "step": 438000 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.8156295138888887e-06, | |
| "loss": 2.8158, | |
| "step": 438500 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.8113760416666665e-06, | |
| "loss": 2.8115, | |
| "step": 439000 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.807122569444445e-06, | |
| "loss": 2.8153, | |
| "step": 439500 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.802877604166667e-06, | |
| "loss": 2.8082, | |
| "step": 440000 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.7986241319444447e-06, | |
| "loss": 2.8053, | |
| "step": 440500 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.7943706597222223e-06, | |
| "loss": 2.8283, | |
| "step": 441000 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.7901171875000004e-06, | |
| "loss": 2.8127, | |
| "step": 441500 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.7858722222222224e-06, | |
| "loss": 2.8448, | |
| "step": 442000 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.78161875e-06, | |
| "loss": 2.8294, | |
| "step": 442500 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.7773652777777777e-06, | |
| "loss": 2.8089, | |
| "step": 443000 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 1.7731118055555557e-06, | |
| "loss": 2.8223, | |
| "step": 443500 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.7688668402777778e-06, | |
| "loss": 2.8255, | |
| "step": 444000 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.7646133680555554e-06, | |
| "loss": 2.8177, | |
| "step": 444500 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.7603598958333335e-06, | |
| "loss": 2.8101, | |
| "step": 445000 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.7561064236111113e-06, | |
| "loss": 2.789, | |
| "step": 445500 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.7518529513888891e-06, | |
| "loss": 2.7933, | |
| "step": 446000 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.7475994791666668e-06, | |
| "loss": 2.7962, | |
| "step": 446500 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.7433460069444444e-06, | |
| "loss": 2.8138, | |
| "step": 447000 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.7390925347222222e-06, | |
| "loss": 2.8133, | |
| "step": 447500 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 1.734856076388889e-06, | |
| "loss": 2.7906, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.7306026041666666e-06, | |
| "loss": 2.8106, | |
| "step": 448500 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.726349131944445e-06, | |
| "loss": 2.8054, | |
| "step": 449000 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.7220956597222227e-06, | |
| "loss": 2.8087, | |
| "step": 449500 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.7178421875000003e-06, | |
| "loss": 2.8293, | |
| "step": 450000 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.7135887152777781e-06, | |
| "loss": 2.8109, | |
| "step": 450500 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.7093352430555553e-06, | |
| "loss": 2.7943, | |
| "step": 451000 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 1.7050817708333336e-06, | |
| "loss": 2.793, | |
| "step": 451500 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.7008368055555557e-06, | |
| "loss": 2.7984, | |
| "step": 452000 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.6965833333333335e-06, | |
| "loss": 2.8314, | |
| "step": 452500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.692329861111111e-06, | |
| "loss": 2.8224, | |
| "step": 453000 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.6880763888888894e-06, | |
| "loss": 2.8134, | |
| "step": 453500 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.683822916666667e-06, | |
| "loss": 2.7947, | |
| "step": 454000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.6795694444444446e-06, | |
| "loss": 2.8263, | |
| "step": 454500 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.6753159722222223e-06, | |
| "loss": 2.8167, | |
| "step": 455000 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.6710625000000003e-06, | |
| "loss": 2.8025, | |
| "step": 455500 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.6668175347222224e-06, | |
| "loss": 2.8089, | |
| "step": 456000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.6625640625e-06, | |
| "loss": 2.804, | |
| "step": 456500 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.6583105902777776e-06, | |
| "loss": 2.8116, | |
| "step": 457000 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 1.654057118055556e-06, | |
| "loss": 2.8084, | |
| "step": 457500 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.6498121527777784e-06, | |
| "loss": 2.8117, | |
| "step": 458000 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.6455586805555556e-06, | |
| "loss": 2.8105, | |
| "step": 458500 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.6413052083333334e-06, | |
| "loss": 2.8115, | |
| "step": 459000 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.6370517361111115e-06, | |
| "loss": 2.7928, | |
| "step": 459500 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.6328067708333333e-06, | |
| "loss": 2.8119, | |
| "step": 460000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.6285532986111111e-06, | |
| "loss": 2.8158, | |
| "step": 460500 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.6242998263888888e-06, | |
| "loss": 2.8183, | |
| "step": 461000 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.620046354166667e-06, | |
| "loss": 2.8114, | |
| "step": 461500 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 1.6157928819444442e-06, | |
| "loss": 2.8105, | |
| "step": 462000 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 1.6115479166666665e-06, | |
| "loss": 2.8069, | |
| "step": 462500 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.6072944444444441e-06, | |
| "loss": 2.8118, | |
| "step": 463000 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.6030409722222226e-06, | |
| "loss": 2.8225, | |
| "step": 463500 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 1.5987875000000002e-06, | |
| "loss": 2.8067, | |
| "step": 464000 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.5945425347222223e-06, | |
| "loss": 2.8069, | |
| "step": 464500 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.5902890625000003e-06, | |
| "loss": 2.7902, | |
| "step": 465000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.586035590277778e-06, | |
| "loss": 2.7965, | |
| "step": 465500 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.5817821180555556e-06, | |
| "loss": 2.8137, | |
| "step": 466000 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 1.5775371527777776e-06, | |
| "loss": 2.7941, | |
| "step": 466500 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.5732836805555563e-06, | |
| "loss": 2.8074, | |
| "step": 467000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.5690302083333335e-06, | |
| "loss": 2.8056, | |
| "step": 467500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.5647767361111114e-06, | |
| "loss": 2.804, | |
| "step": 468000 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.5605317708333334e-06, | |
| "loss": 2.8002, | |
| "step": 468500 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.5562782986111113e-06, | |
| "loss": 2.835, | |
| "step": 469000 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.5520248263888891e-06, | |
| "loss": 2.8121, | |
| "step": 469500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.5477713541666667e-06, | |
| "loss": 2.7782, | |
| "step": 470000 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.5435263888888888e-06, | |
| "loss": 2.8107, | |
| "step": 470500 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.5392729166666666e-06, | |
| "loss": 2.81, | |
| "step": 471000 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.5350194444444445e-06, | |
| "loss": 2.8008, | |
| "step": 471500 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.5307659722222225e-06, | |
| "loss": 2.8294, | |
| "step": 472000 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.5265210069444442e-06, | |
| "loss": 2.8149, | |
| "step": 472500 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 1.522276041666667e-06, | |
| "loss": 2.8073, | |
| "step": 473000 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.5180225694444447e-06, | |
| "loss": 2.801, | |
| "step": 473500 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.5137690972222225e-06, | |
| "loss": 2.8211, | |
| "step": 474000 | |
| }, | |
| { | |
| "epoch": 6.61, | |
| "learning_rate": 1.5095156249999997e-06, | |
| "loss": 2.8333, | |
| "step": 474500 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.505262152777778e-06, | |
| "loss": 2.8257, | |
| "step": 475000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.5010086805555556e-06, | |
| "loss": 2.8251, | |
| "step": 475500 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.4967637152777779e-06, | |
| "loss": 2.8062, | |
| "step": 476000 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.492510243055556e-06, | |
| "loss": 2.8145, | |
| "step": 476500 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.4882567708333338e-06, | |
| "loss": 2.8073, | |
| "step": 477000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.4840032986111114e-06, | |
| "loss": 2.7967, | |
| "step": 477500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.479749826388889e-06, | |
| "loss": 2.7949, | |
| "step": 478000 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.4754963541666667e-06, | |
| "loss": 2.8006, | |
| "step": 478500 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.4712428819444447e-06, | |
| "loss": 2.8169, | |
| "step": 479000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 1.4669894097222223e-06, | |
| "loss": 2.8238, | |
| "step": 479500 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.4627359375e-06, | |
| "loss": 2.8261, | |
| "step": 480000 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.458490972222222e-06, | |
| "loss": 2.8045, | |
| "step": 480500 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.4542375000000005e-06, | |
| "loss": 2.8009, | |
| "step": 481000 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.4499840277777783e-06, | |
| "loss": 2.8209, | |
| "step": 481500 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.4457305555555555e-06, | |
| "loss": 2.8186, | |
| "step": 482000 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.4414855902777778e-06, | |
| "loss": 2.8115, | |
| "step": 482500 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.4372406250000005e-06, | |
| "loss": 2.7988, | |
| "step": 483000 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.4329871527777777e-06, | |
| "loss": 2.791, | |
| "step": 483500 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.4287336805555555e-06, | |
| "loss": 2.8064, | |
| "step": 484000 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.424488715277778e-06, | |
| "loss": 2.8008, | |
| "step": 484500 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.4202352430555559e-06, | |
| "loss": 2.7846, | |
| "step": 485000 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.415981770833333e-06, | |
| "loss": 2.8192, | |
| "step": 485500 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.4117368055555553e-06, | |
| "loss": 2.7963, | |
| "step": 486000 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.4074833333333338e-06, | |
| "loss": 2.8059, | |
| "step": 486500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.4032298611111114e-06, | |
| "loss": 2.8261, | |
| "step": 487000 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.398976388888889e-06, | |
| "loss": 2.8126, | |
| "step": 487500 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.3947229166666667e-06, | |
| "loss": 2.8273, | |
| "step": 488000 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.3904694444444447e-06, | |
| "loss": 2.8116, | |
| "step": 488500 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.3862159722222224e-06, | |
| "loss": 2.8293, | |
| "step": 489000 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.3819625e-06, | |
| "loss": 2.8008, | |
| "step": 489500 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.3777090277777776e-06, | |
| "loss": 2.8145, | |
| "step": 490000 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.3734555555555555e-06, | |
| "loss": 2.7985, | |
| "step": 490500 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.3692020833333335e-06, | |
| "loss": 2.8154, | |
| "step": 491000 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.3649486111111113e-06, | |
| "loss": 2.8174, | |
| "step": 491500 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.360695138888889e-06, | |
| "loss": 2.8222, | |
| "step": 492000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.3564416666666666e-06, | |
| "loss": 2.7957, | |
| "step": 492500 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.3521881944444446e-06, | |
| "loss": 2.8146, | |
| "step": 493000 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.3479347222222223e-06, | |
| "loss": 2.8174, | |
| "step": 493500 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.3436897569444443e-06, | |
| "loss": 2.8001, | |
| "step": 494000 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.339436284722222e-06, | |
| "loss": 2.824, | |
| "step": 494500 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.3351828125000004e-06, | |
| "loss": 2.8116, | |
| "step": 495000 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.330929340277778e-06, | |
| "loss": 2.8288, | |
| "step": 495500 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.3266843750000003e-06, | |
| "loss": 2.8051, | |
| "step": 496000 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.3224309027777777e-06, | |
| "loss": 2.8137, | |
| "step": 496500 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.3181774305555558e-06, | |
| "loss": 2.8232, | |
| "step": 497000 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.3139239583333336e-06, | |
| "loss": 2.8078, | |
| "step": 497500 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.3096789930555553e-06, | |
| "loss": 2.8099, | |
| "step": 498000 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.305425520833333e-06, | |
| "loss": 2.8348, | |
| "step": 498500 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.3011805555555556e-06, | |
| "loss": 2.8256, | |
| "step": 499000 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.2969270833333336e-06, | |
| "loss": 2.8088, | |
| "step": 499500 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 1.2926736111111113e-06, | |
| "loss": 2.8246, | |
| "step": 500000 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "eval_gen_len": 19.9977, | |
| "eval_loss": 2.7420477867126465, | |
| "eval_rouge1": 0.2999, | |
| "eval_rouge2": 0.1775, | |
| "eval_rougeL": 0.2543, | |
| "eval_rougeLsum": 0.2853, | |
| "eval_runtime": 894.3032, | |
| "eval_samples_per_second": 14.948, | |
| "eval_steps_per_second": 3.737, | |
| "step": 500000 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 1.2884201388888893e-06, | |
| "loss": 2.8164, | |
| "step": 500500 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.284166666666667e-06, | |
| "loss": 2.8191, | |
| "step": 501000 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.279921701388889e-06, | |
| "loss": 2.8262, | |
| "step": 501500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.2756682291666666e-06, | |
| "loss": 2.8011, | |
| "step": 502000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.2714147569444447e-06, | |
| "loss": 2.7934, | |
| "step": 502500 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.2671612847222223e-06, | |
| "loss": 2.8052, | |
| "step": 503000 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.2629078125e-06, | |
| "loss": 2.7867, | |
| "step": 503500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.258654340277778e-06, | |
| "loss": 2.7945, | |
| "step": 504000 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.2544093750000005e-06, | |
| "loss": 2.788, | |
| "step": 504500 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.2501559027777783e-06, | |
| "loss": 2.8143, | |
| "step": 505000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.2459024305555557e-06, | |
| "loss": 2.787, | |
| "step": 505500 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.2416489583333333e-06, | |
| "loss": 2.7988, | |
| "step": 506000 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.237395486111111e-06, | |
| "loss": 2.7957, | |
| "step": 506500 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.233142013888889e-06, | |
| "loss": 2.8049, | |
| "step": 507000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.2288885416666666e-06, | |
| "loss": 2.7759, | |
| "step": 507500 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.2246350694444447e-06, | |
| "loss": 2.7944, | |
| "step": 508000 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.2203901041666672e-06, | |
| "loss": 2.7896, | |
| "step": 508500 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.2161366319444448e-06, | |
| "loss": 2.8019, | |
| "step": 509000 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 1.2118831597222224e-06, | |
| "loss": 2.7732, | |
| "step": 509500 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.2076296875e-06, | |
| "loss": 2.824, | |
| "step": 510000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.2033762152777777e-06, | |
| "loss": 2.7803, | |
| "step": 510500 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.1991227430555557e-06, | |
| "loss": 2.7941, | |
| "step": 511000 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.1948692708333334e-06, | |
| "loss": 2.7869, | |
| "step": 511500 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.190615798611111e-06, | |
| "loss": 2.7976, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.186362326388889e-06, | |
| "loss": 2.8163, | |
| "step": 512500 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.1821173611111115e-06, | |
| "loss": 2.7951, | |
| "step": 513000 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.1778638888888891e-06, | |
| "loss": 2.7913, | |
| "step": 513500 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 1.1736104166666668e-06, | |
| "loss": 2.7938, | |
| "step": 514000 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.1693569444444444e-06, | |
| "loss": 2.8009, | |
| "step": 514500 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.1651119791666669e-06, | |
| "loss": 2.7747, | |
| "step": 515000 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.1608585069444445e-06, | |
| "loss": 2.7833, | |
| "step": 515500 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.1566050347222221e-06, | |
| "loss": 2.8013, | |
| "step": 516000 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.1523515624999998e-06, | |
| "loss": 2.8089, | |
| "step": 516500 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.1481065972222227e-06, | |
| "loss": 2.8074, | |
| "step": 517000 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.1438531250000003e-06, | |
| "loss": 2.7884, | |
| "step": 517500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.139599652777778e-06, | |
| "loss": 2.7911, | |
| "step": 518000 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.1353461805555555e-06, | |
| "loss": 2.8035, | |
| "step": 518500 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.131101215277778e-06, | |
| "loss": 2.8027, | |
| "step": 519000 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.12685625e-06, | |
| "loss": 2.7828, | |
| "step": 519500 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.1226027777777777e-06, | |
| "loss": 2.8245, | |
| "step": 520000 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.1183493055555558e-06, | |
| "loss": 2.7839, | |
| "step": 520500 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.1140958333333334e-06, | |
| "loss": 2.7935, | |
| "step": 521000 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.109842361111111e-06, | |
| "loss": 2.8045, | |
| "step": 521500 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 1.105588888888889e-06, | |
| "loss": 2.8039, | |
| "step": 522000 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.1013354166666667e-06, | |
| "loss": 2.7892, | |
| "step": 522500 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.0970819444444447e-06, | |
| "loss": 2.8009, | |
| "step": 523000 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.0928284722222224e-06, | |
| "loss": 2.7927, | |
| "step": 523500 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.0885835069444444e-06, | |
| "loss": 2.7763, | |
| "step": 524000 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.0843300347222225e-06, | |
| "loss": 2.8008, | |
| "step": 524500 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.0800765625e-06, | |
| "loss": 2.7839, | |
| "step": 525000 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.0758230902777777e-06, | |
| "loss": 2.7787, | |
| "step": 525500 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 1.0715781249999998e-06, | |
| "loss": 2.7914, | |
| "step": 526000 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.0673246527777783e-06, | |
| "loss": 2.7903, | |
| "step": 526500 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.0630711805555559e-06, | |
| "loss": 2.7869, | |
| "step": 527000 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.0588177083333335e-06, | |
| "loss": 2.7885, | |
| "step": 527500 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.0545727430555556e-06, | |
| "loss": 2.782, | |
| "step": 528000 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.0503192708333336e-06, | |
| "loss": 2.7775, | |
| "step": 528500 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.0460657986111112e-06, | |
| "loss": 2.7863, | |
| "step": 529000 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.0418123263888889e-06, | |
| "loss": 2.7904, | |
| "step": 529500 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.037567361111111e-06, | |
| "loss": 2.8, | |
| "step": 530000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.0333138888888892e-06, | |
| "loss": 2.8023, | |
| "step": 530500 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.0290604166666668e-06, | |
| "loss": 2.7947, | |
| "step": 531000 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.0248069444444447e-06, | |
| "loss": 2.7946, | |
| "step": 531500 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.0205619791666665e-06, | |
| "loss": 2.7859, | |
| "step": 532000 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.0163085069444448e-06, | |
| "loss": 2.7949, | |
| "step": 532500 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.0120550347222224e-06, | |
| "loss": 2.7738, | |
| "step": 533000 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.0078100694444444e-06, | |
| "loss": 2.7934, | |
| "step": 533500 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.003556597222222e-06, | |
| "loss": 2.8105, | |
| "step": 534000 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 9.993031250000001e-07, | |
| "loss": 2.8025, | |
| "step": 534500 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 9.95049652777778e-07, | |
| "loss": 2.8159, | |
| "step": 535000 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 9.907961805555556e-07, | |
| "loss": 2.8167, | |
| "step": 535500 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 9.865427083333332e-07, | |
| "loss": 2.7768, | |
| "step": 536000 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 9.822892361111115e-07, | |
| "loss": 2.7976, | |
| "step": 536500 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 9.78035763888889e-07, | |
| "loss": 2.8063, | |
| "step": 537000 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 9.737993055555556e-07, | |
| "loss": 2.7996, | |
| "step": 537500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 9.695458333333336e-07, | |
| "loss": 2.8008, | |
| "step": 538000 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 9.652923611111113e-07, | |
| "loss": 2.7913, | |
| "step": 538500 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 9.610388888888889e-07, | |
| "loss": 2.8131, | |
| "step": 539000 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 9.567854166666665e-07, | |
| "loss": 2.802, | |
| "step": 539500 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 9.525319444444441e-07, | |
| "loss": 2.8064, | |
| "step": 540000 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 9.482784722222225e-07, | |
| "loss": 2.805, | |
| "step": 540500 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 9.44025e-07, | |
| "loss": 2.7848, | |
| "step": 541000 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 9.397800347222222e-07, | |
| "loss": 2.7907, | |
| "step": 541500 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 9.355265624999998e-07, | |
| "loss": 2.7835, | |
| "step": 542000 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 9.31273090277778e-07, | |
| "loss": 2.7738, | |
| "step": 542500 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 9.270196180555557e-07, | |
| "loss": 2.7925, | |
| "step": 543000 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 9.227661458333334e-07, | |
| "loss": 2.7831, | |
| "step": 543500 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 9.185211805555559e-07, | |
| "loss": 2.799, | |
| "step": 544000 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 9.142677083333336e-07, | |
| "loss": 2.7945, | |
| "step": 544500 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 9.100227430555555e-07, | |
| "loss": 2.7981, | |
| "step": 545000 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 9.057692708333332e-07, | |
| "loss": 2.8008, | |
| "step": 545500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 9.015157986111114e-07, | |
| "loss": 2.7949, | |
| "step": 546000 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 8.972623263888891e-07, | |
| "loss": 2.7828, | |
| "step": 546500 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 8.930088541666668e-07, | |
| "loss": 2.8011, | |
| "step": 547000 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 8.887553819444445e-07, | |
| "loss": 2.8135, | |
| "step": 547500 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 8.845019097222221e-07, | |
| "loss": 2.7942, | |
| "step": 548000 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 8.802484375000004e-07, | |
| "loss": 2.8105, | |
| "step": 548500 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 8.760119791666665e-07, | |
| "loss": 2.8071, | |
| "step": 549000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 8.717585069444443e-07, | |
| "loss": 2.7993, | |
| "step": 549500 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 8.675050347222224e-07, | |
| "loss": 2.7991, | |
| "step": 550000 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 8.632515625000002e-07, | |
| "loss": 2.8152, | |
| "step": 550500 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 8.589980902777778e-07, | |
| "loss": 2.7994, | |
| "step": 551000 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 8.547446180555555e-07, | |
| "loss": 2.8152, | |
| "step": 551500 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 8.504911458333332e-07, | |
| "loss": 2.8158, | |
| "step": 552000 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 8.462376736111114e-07, | |
| "loss": 2.8117, | |
| "step": 552500 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 8.419927083333334e-07, | |
| "loss": 2.8052, | |
| "step": 553000 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 8.377477430555554e-07, | |
| "loss": 2.8123, | |
| "step": 553500 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 8.334942708333337e-07, | |
| "loss": 2.7848, | |
| "step": 554000 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 8.292407986111112e-07, | |
| "loss": 2.8049, | |
| "step": 554500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 8.249873263888889e-07, | |
| "loss": 2.7832, | |
| "step": 555000 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 8.207338541666665e-07, | |
| "loss": 2.801, | |
| "step": 555500 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 8.164803819444447e-07, | |
| "loss": 2.7943, | |
| "step": 556000 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 8.122269097222223e-07, | |
| "loss": 2.8053, | |
| "step": 556500 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 8.079819444444444e-07, | |
| "loss": 2.7998, | |
| "step": 557000 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 8.03728472222222e-07, | |
| "loss": 2.8004, | |
| "step": 557500 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 7.994750000000003e-07, | |
| "loss": 2.8, | |
| "step": 558000 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 7.952215277777779e-07, | |
| "loss": 2.7873, | |
| "step": 558500 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 7.909680555555555e-07, | |
| "loss": 2.8024, | |
| "step": 559000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 7.867145833333333e-07, | |
| "loss": 2.7802, | |
| "step": 559500 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 7.824696180555559e-07, | |
| "loss": 2.7845, | |
| "step": 560000 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 7.782161458333334e-07, | |
| "loss": 2.8044, | |
| "step": 560500 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 7.739626736111111e-07, | |
| "loss": 2.7849, | |
| "step": 561000 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 7.697092013888887e-07, | |
| "loss": 2.7986, | |
| "step": 561500 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 7.65455729166667e-07, | |
| "loss": 2.802, | |
| "step": 562000 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 7.612022569444446e-07, | |
| "loss": 2.7929, | |
| "step": 562500 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 7.569572916666668e-07, | |
| "loss": 2.8, | |
| "step": 563000 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 7.527038194444443e-07, | |
| "loss": 2.8034, | |
| "step": 563500 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 7.484503472222225e-07, | |
| "loss": 2.7871, | |
| "step": 564000 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 7.441968750000002e-07, | |
| "loss": 2.8139, | |
| "step": 564500 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 7.399434027777778e-07, | |
| "loss": 2.7982, | |
| "step": 565000 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 7.356899305555555e-07, | |
| "loss": 2.7998, | |
| "step": 565500 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 7.314364583333331e-07, | |
| "loss": 2.7829, | |
| "step": 566000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 7.271829861111114e-07, | |
| "loss": 2.7698, | |
| "step": 566500 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 7.229380208333334e-07, | |
| "loss": 2.786, | |
| "step": 567000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 7.18684548611111e-07, | |
| "loss": 2.7959, | |
| "step": 567500 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 7.144310763888887e-07, | |
| "loss": 2.8125, | |
| "step": 568000 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 7.101776041666669e-07, | |
| "loss": 2.8022, | |
| "step": 568500 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 7.059326388888889e-07, | |
| "loss": 2.8261, | |
| "step": 569000 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 7.016791666666665e-07, | |
| "loss": 2.8114, | |
| "step": 569500 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 6.974256944444448e-07, | |
| "loss": 2.7959, | |
| "step": 570000 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 6.931722222222224e-07, | |
| "loss": 2.8008, | |
| "step": 570500 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 6.889272569444445e-07, | |
| "loss": 2.7952, | |
| "step": 571000 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 6.846737847222221e-07, | |
| "loss": 2.7794, | |
| "step": 571500 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 6.804203125000003e-07, | |
| "loss": 2.7851, | |
| "step": 572000 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 6.761753472222223e-07, | |
| "loss": 2.7843, | |
| "step": 572500 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 6.719218749999999e-07, | |
| "loss": 2.8172, | |
| "step": 573000 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 6.676684027777775e-07, | |
| "loss": 2.7935, | |
| "step": 573500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 6.634149305555558e-07, | |
| "loss": 2.8004, | |
| "step": 574000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 6.591699652777779e-07, | |
| "loss": 2.79, | |
| "step": 574500 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 6.549164930555554e-07, | |
| "loss": 2.7959, | |
| "step": 575000 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 6.506630208333331e-07, | |
| "loss": 2.7923, | |
| "step": 575500 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 6.464095486111113e-07, | |
| "loss": 2.7841, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 6.42156076388889e-07, | |
| "loss": 2.788, | |
| "step": 576500 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 6.379026041666666e-07, | |
| "loss": 2.7968, | |
| "step": 577000 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 6.336491319444444e-07, | |
| "loss": 2.7838, | |
| "step": 577500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 6.293956597222225e-07, | |
| "loss": 2.7803, | |
| "step": 578000 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 6.251506944444445e-07, | |
| "loss": 2.773, | |
| "step": 578500 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 6.208972222222222e-07, | |
| "loss": 2.7854, | |
| "step": 579000 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 6.166437499999998e-07, | |
| "loss": 2.7804, | |
| "step": 579500 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 6.123902777777781e-07, | |
| "loss": 2.8082, | |
| "step": 580000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 6.081368055555557e-07, | |
| "loss": 2.8016, | |
| "step": 580500 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 6.038833333333334e-07, | |
| "loss": 2.8075, | |
| "step": 581000 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 5.996298611111111e-07, | |
| "loss": 2.7771, | |
| "step": 581500 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 5.953763888888887e-07, | |
| "loss": 2.7736, | |
| "step": 582000 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 5.911314236111113e-07, | |
| "loss": 2.7955, | |
| "step": 582500 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 5.868779513888889e-07, | |
| "loss": 2.8003, | |
| "step": 583000 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 5.826244791666666e-07, | |
| "loss": 2.7899, | |
| "step": 583500 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 5.783710069444442e-07, | |
| "loss": 2.8008, | |
| "step": 584000 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 5.741260416666668e-07, | |
| "loss": 2.7903, | |
| "step": 584500 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 5.698725694444445e-07, | |
| "loss": 2.7711, | |
| "step": 585000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 5.656190972222221e-07, | |
| "loss": 2.7718, | |
| "step": 585500 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 5.613656250000003e-07, | |
| "loss": 2.7588, | |
| "step": 586000 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 5.571206597222223e-07, | |
| "loss": 2.7749, | |
| "step": 586500 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 5.528671875e-07, | |
| "loss": 2.7829, | |
| "step": 587000 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 5.486137152777777e-07, | |
| "loss": 2.7816, | |
| "step": 587500 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 5.443602430555559e-07, | |
| "loss": 2.7968, | |
| "step": 588000 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 5.401067708333335e-07, | |
| "loss": 2.7727, | |
| "step": 588500 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 5.358618055555555e-07, | |
| "loss": 2.7742, | |
| "step": 589000 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 5.316083333333332e-07, | |
| "loss": 2.7692, | |
| "step": 589500 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 5.273548611111113e-07, | |
| "loss": 2.7719, | |
| "step": 590000 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 5.23101388888889e-07, | |
| "loss": 2.7895, | |
| "step": 590500 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 5.18856423611111e-07, | |
| "loss": 2.7767, | |
| "step": 591000 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 5.146029513888886e-07, | |
| "loss": 2.7981, | |
| "step": 591500 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 5.103494791666669e-07, | |
| "loss": 2.7965, | |
| "step": 592000 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 5.061045138888889e-07, | |
| "loss": 2.7841, | |
| "step": 592500 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 5.018510416666666e-07, | |
| "loss": 2.7986, | |
| "step": 593000 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 4.975975694444442e-07, | |
| "loss": 2.78, | |
| "step": 593500 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 4.933440972222225e-07, | |
| "loss": 2.7823, | |
| "step": 594000 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 4.890906250000001e-07, | |
| "loss": 2.7821, | |
| "step": 594500 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 4.848371527777777e-07, | |
| "loss": 2.7913, | |
| "step": 595000 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 4.805921874999998e-07, | |
| "loss": 2.7755, | |
| "step": 595500 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 4.76338715277778e-07, | |
| "loss": 2.7903, | |
| "step": 596000 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 4.720852430555556e-07, | |
| "loss": 2.7955, | |
| "step": 596500 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 4.6783177083333334e-07, | |
| "loss": 2.7777, | |
| "step": 597000 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 4.635782986111109e-07, | |
| "loss": 2.7806, | |
| "step": 597500 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 4.5932482638888913e-07, | |
| "loss": 2.7839, | |
| "step": 598000 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 4.550713541666668e-07, | |
| "loss": 2.7949, | |
| "step": 598500 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 4.5081788194444444e-07, | |
| "loss": 2.7796, | |
| "step": 599000 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 4.465729166666665e-07, | |
| "loss": 2.7986, | |
| "step": 599500 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 4.4231944444444475e-07, | |
| "loss": 2.7757, | |
| "step": 600000 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "eval_gen_len": 19.9975, | |
| "eval_loss": 2.741455316543579, | |
| "eval_rouge1": 0.2976, | |
| "eval_rouge2": 0.1752, | |
| "eval_rougeL": 0.2519, | |
| "eval_rougeLsum": 0.2829, | |
| "eval_runtime": 882.0888, | |
| "eval_samples_per_second": 15.155, | |
| "eval_steps_per_second": 3.789, | |
| "step": 600000 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 4.3806597222222233e-07, | |
| "loss": 2.7922, | |
| "step": 600500 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 4.338125e-07, | |
| "loss": 2.7716, | |
| "step": 601000 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 4.29567534722222e-07, | |
| "loss": 2.8045, | |
| "step": 601500 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 4.2531406250000027e-07, | |
| "loss": 2.7861, | |
| "step": 602000 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 4.2106059027777785e-07, | |
| "loss": 2.7802, | |
| "step": 602500 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 4.1680711805555553e-07, | |
| "loss": 2.7811, | |
| "step": 603000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 4.125536458333332e-07, | |
| "loss": 2.783, | |
| "step": 603500 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 4.083171875000001e-07, | |
| "loss": 2.8022, | |
| "step": 604000 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 4.0406371527777774e-07, | |
| "loss": 2.7942, | |
| "step": 604500 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 3.9981024305555537e-07, | |
| "loss": 2.7873, | |
| "step": 605000 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 3.9555677083333363e-07, | |
| "loss": 2.7764, | |
| "step": 605500 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 3.9130329861111126e-07, | |
| "loss": 2.7902, | |
| "step": 606000 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 3.8704982638888894e-07, | |
| "loss": 2.8043, | |
| "step": 606500 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 3.8279635416666657e-07, | |
| "loss": 2.7922, | |
| "step": 607000 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 3.785428819444442e-07, | |
| "loss": 2.7908, | |
| "step": 607500 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 3.742979166666668e-07, | |
| "loss": 2.7532, | |
| "step": 608000 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 3.7004444444444446e-07, | |
| "loss": 2.8076, | |
| "step": 608500 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 3.6579097222222214e-07, | |
| "loss": 2.7842, | |
| "step": 609000 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 3.615374999999997e-07, | |
| "loss": 2.7756, | |
| "step": 609500 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 3.572925347222223e-07, | |
| "loss": 2.7801, | |
| "step": 610000 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 3.530390625e-07, | |
| "loss": 2.7784, | |
| "step": 610500 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 3.4878559027777766e-07, | |
| "loss": 2.7938, | |
| "step": 611000 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 3.4453211805555534e-07, | |
| "loss": 2.7746, | |
| "step": 611500 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 3.402871527777778e-07, | |
| "loss": 2.7951, | |
| "step": 612000 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 3.360336805555555e-07, | |
| "loss": 2.7847, | |
| "step": 612500 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 3.3178020833333323e-07, | |
| "loss": 2.7867, | |
| "step": 613000 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 3.275267361111114e-07, | |
| "loss": 2.7873, | |
| "step": 613500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 3.23273263888889e-07, | |
| "loss": 2.7873, | |
| "step": 614000 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 3.1902829861111107e-07, | |
| "loss": 2.8178, | |
| "step": 614500 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 3.147748263888887e-07, | |
| "loss": 2.7938, | |
| "step": 615000 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 3.1052135416666696e-07, | |
| "loss": 2.7965, | |
| "step": 615500 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 3.062678819444446e-07, | |
| "loss": 2.7871, | |
| "step": 616000 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 3.0202291666666664e-07, | |
| "loss": 2.8107, | |
| "step": 616500 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 2.9776944444444427e-07, | |
| "loss": 2.7973, | |
| "step": 617000 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 2.935159722222224e-07, | |
| "loss": 2.7989, | |
| "step": 617500 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 2.892625000000001e-07, | |
| "loss": 2.7996, | |
| "step": 618000 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 2.850175347222221e-07, | |
| "loss": 2.7958, | |
| "step": 618500 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 2.807640624999998e-07, | |
| "loss": 2.7917, | |
| "step": 619000 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 2.76510590277778e-07, | |
| "loss": 2.7705, | |
| "step": 619500 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 2.72265625e-07, | |
| "loss": 2.8046, | |
| "step": 620000 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 2.68020659722222e-07, | |
| "loss": 2.8015, | |
| "step": 620500 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 2.6376718750000027e-07, | |
| "loss": 2.7911, | |
| "step": 621000 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 2.595137152777779e-07, | |
| "loss": 2.7989, | |
| "step": 621500 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 2.552602430555555e-07, | |
| "loss": 2.7843, | |
| "step": 622000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 2.510067708333332e-07, | |
| "loss": 2.7882, | |
| "step": 622500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 2.467532986111109e-07, | |
| "loss": 2.7863, | |
| "step": 623000 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 2.4249982638888904e-07, | |
| "loss": 2.7838, | |
| "step": 623500 | |
| }, | |
| { | |
| "epoch": 8.69, | |
| "learning_rate": 2.3824635416666672e-07, | |
| "loss": 2.7985, | |
| "step": 624000 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 2.3399288194444434e-07, | |
| "loss": 2.7769, | |
| "step": 624500 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 2.2974791666666693e-07, | |
| "loss": 2.7946, | |
| "step": 625000 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 2.2550295138888896e-07, | |
| "loss": 2.7666, | |
| "step": 625500 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 2.212494791666666e-07, | |
| "loss": 2.7783, | |
| "step": 626000 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 2.1699600694444427e-07, | |
| "loss": 2.7792, | |
| "step": 626500 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 2.127425347222225e-07, | |
| "loss": 2.7928, | |
| "step": 627000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 2.0848906250000013e-07, | |
| "loss": 2.7946, | |
| "step": 627500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 2.0423559027777778e-07, | |
| "loss": 2.769, | |
| "step": 628000 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 1.9998211805555544e-07, | |
| "loss": 2.7804, | |
| "step": 628500 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 1.9572864583333312e-07, | |
| "loss": 2.7762, | |
| "step": 629000 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 1.9147517361111133e-07, | |
| "loss": 2.776, | |
| "step": 629500 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 1.8722170138888895e-07, | |
| "loss": 2.7643, | |
| "step": 630000 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 1.8297673611111093e-07, | |
| "loss": 2.7831, | |
| "step": 630500 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 1.7872326388888864e-07, | |
| "loss": 2.7748, | |
| "step": 631000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.7446979166666682e-07, | |
| "loss": 2.7785, | |
| "step": 631500 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.7021631944444447e-07, | |
| "loss": 2.8047, | |
| "step": 632000 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 1.6596284722222218e-07, | |
| "loss": 2.7804, | |
| "step": 632500 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 1.617093749999998e-07, | |
| "loss": 2.7733, | |
| "step": 633000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 1.57455902777778e-07, | |
| "loss": 2.7808, | |
| "step": 633500 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 1.5320243055555567e-07, | |
| "loss": 2.7608, | |
| "step": 634000 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 1.4895746527777767e-07, | |
| "loss": 2.7795, | |
| "step": 634500 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 1.4470399305555533e-07, | |
| "loss": 2.7757, | |
| "step": 635000 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 1.4045902777777789e-07, | |
| "loss": 2.7762, | |
| "step": 635500 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 1.3620555555555557e-07, | |
| "loss": 2.8027, | |
| "step": 636000 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 1.3195208333333322e-07, | |
| "loss": 2.801, | |
| "step": 636500 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 1.2770711805555578e-07, | |
| "loss": 2.805, | |
| "step": 637000 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 1.2345364583333343e-07, | |
| "loss": 2.7915, | |
| "step": 637500 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 1.1920017361111108e-07, | |
| "loss": 2.7784, | |
| "step": 638000 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 1.1494670138888875e-07, | |
| "loss": 2.7728, | |
| "step": 638500 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 1.106932291666664e-07, | |
| "loss": 2.784, | |
| "step": 639000 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 1.0643975694444459e-07, | |
| "loss": 2.782, | |
| "step": 639500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 1.0218628472222226e-07, | |
| "loss": 2.7799, | |
| "step": 640000 | |
| } | |
| ], | |
| "max_steps": 640000, | |
| "num_train_epochs": 9, | |
| "total_flos": 1.5081584369164186e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |