| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 50.0, | |
| "global_step": 19850, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.000994962216624685, | |
| "loss": 4.6421, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0009899244332493703, | |
| "loss": 3.5433, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0009848866498740553, | |
| "loss": 3.3518, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.0009798488664987406, | |
| "loss": 3.167, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0009748110831234257, | |
| "loss": 2.8377, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0009697732997481109, | |
| "loss": 2.8006, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.000964735516372796, | |
| "loss": 2.7447, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.0009596977329974811, | |
| "loss": 2.6706, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.0009546599496221663, | |
| "loss": 2.3749, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.0009496221662468514, | |
| "loss": 2.3879, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.0009445843828715366, | |
| "loss": 2.3724, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.0009395465994962216, | |
| "loss": 2.3291, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.0009345088161209068, | |
| "loss": 2.0652, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.000929471032745592, | |
| "loss": 2.053, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.000924433249370277, | |
| "loss": 2.0745, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 0.0009193954659949623, | |
| "loss": 2.0183, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 0.0009143576826196473, | |
| "loss": 1.7442, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 0.0009093198992443325, | |
| "loss": 1.8065, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 0.0009042821158690177, | |
| "loss": 1.7861, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 0.0008992443324937027, | |
| "loss": 1.7369, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "eval_bleu": 2.1435, | |
| "eval_gen_len": 15.286, | |
| "eval_loss": 1.1994951963424683, | |
| "eval_runtime": 384.6632, | |
| "eval_samples_per_second": 13.926, | |
| "eval_steps_per_second": 3.484, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 0.000894206549118388, | |
| "loss": 1.4835, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 0.0008891687657430731, | |
| "loss": 1.5012, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 0.0008841309823677582, | |
| "loss": 1.5058, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 0.0008790931989924434, | |
| "loss": 1.4738, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 0.0008740554156171285, | |
| "loss": 1.2644, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 0.0008690176322418136, | |
| "loss": 1.3161, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 0.0008639798488664988, | |
| "loss": 1.299, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 0.0008589420654911839, | |
| "loss": 1.2264, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 0.000853904282115869, | |
| "loss": 1.08, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 0.0008488664987405543, | |
| "loss": 1.0927, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 0.0008438287153652393, | |
| "loss": 1.1136, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 0.0008387909319899244, | |
| "loss": 1.0504, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 0.0008337531486146096, | |
| "loss": 0.9257, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 0.0008287153652392947, | |
| "loss": 0.9468, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 0.0008236775818639799, | |
| "loss": 0.9501, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 0.000818639798488665, | |
| "loss": 0.923, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 0.0008136020151133502, | |
| "loss": 0.8187, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 0.0008085642317380352, | |
| "loss": 0.8214, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 0.0008035264483627204, | |
| "loss": 0.8451, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "learning_rate": 0.0007984886649874056, | |
| "loss": 0.8056, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "eval_bleu": 4.0133, | |
| "eval_gen_len": 16.3603, | |
| "eval_loss": 0.48705288767814636, | |
| "eval_runtime": 378.0534, | |
| "eval_samples_per_second": 14.17, | |
| "eval_steps_per_second": 3.544, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 10.33, | |
| "learning_rate": 0.0007934508816120906, | |
| "loss": 0.7131, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 10.58, | |
| "learning_rate": 0.0007884130982367759, | |
| "loss": 0.7319, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 10.83, | |
| "learning_rate": 0.000783375314861461, | |
| "loss": 0.7542, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 11.08, | |
| "learning_rate": 0.0007783375314861461, | |
| "loss": 0.7128, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 0.0007732997481108313, | |
| "loss": 0.6346, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 0.0007682619647355163, | |
| "loss": 0.6691, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 0.0007632241813602016, | |
| "loss": 0.6781, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 0.0007581863979848867, | |
| "loss": 0.6235, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 0.0007531486146095718, | |
| "loss": 0.5506, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 0.000748110831234257, | |
| "loss": 0.581, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 0.000743073047858942, | |
| "loss": 0.5951, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 13.1, | |
| "learning_rate": 0.0007380352644836272, | |
| "loss": 0.5726, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 0.0007329974811083124, | |
| "loss": 0.499, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 0.0007279596977329975, | |
| "loss": 0.538, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 0.0007229219143576826, | |
| "loss": 0.5296, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 14.11, | |
| "learning_rate": 0.0007178841309823679, | |
| "loss": 0.5083, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 14.36, | |
| "learning_rate": 0.0007128463476070529, | |
| "loss": 0.4574, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 14.61, | |
| "learning_rate": 0.000707808564231738, | |
| "loss": 0.4762, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 14.86, | |
| "learning_rate": 0.0007027707808564232, | |
| "loss": 0.4883, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "learning_rate": 0.0006977329974811083, | |
| "loss": 0.4478, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "eval_bleu": 4.6327, | |
| "eval_gen_len": 15.9266, | |
| "eval_loss": 0.2581786513328552, | |
| "eval_runtime": 361.4738, | |
| "eval_samples_per_second": 14.82, | |
| "eval_steps_per_second": 3.707, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 15.37, | |
| "learning_rate": 0.0006926952141057935, | |
| "loss": 0.4233, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 0.0006876574307304786, | |
| "loss": 0.4295, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 15.87, | |
| "learning_rate": 0.0006826196473551638, | |
| "loss": 0.4491, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 16.12, | |
| "learning_rate": 0.0006775818639798488, | |
| "loss": 0.4257, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 0.000672544080604534, | |
| "loss": 0.386, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 0.0006675062972292192, | |
| "loss": 0.399, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 0.0006624685138539042, | |
| "loss": 0.405, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 17.13, | |
| "learning_rate": 0.0006574307304785895, | |
| "loss": 0.3709, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 0.0006523929471032746, | |
| "loss": 0.3499, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 17.63, | |
| "learning_rate": 0.0006473551637279596, | |
| "loss": 0.3677, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 17.88, | |
| "learning_rate": 0.0006423173803526449, | |
| "loss": 0.3744, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 18.14, | |
| "learning_rate": 0.0006372795969773299, | |
| "loss": 0.3409, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 18.39, | |
| "learning_rate": 0.0006322418136020152, | |
| "loss": 0.3273, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 18.64, | |
| "learning_rate": 0.0006272040302267003, | |
| "loss": 0.3412, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 18.89, | |
| "learning_rate": 0.0006221662468513854, | |
| "loss": 0.3472, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 19.14, | |
| "learning_rate": 0.0006171284634760706, | |
| "loss": 0.3321, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 19.4, | |
| "learning_rate": 0.0006120906801007557, | |
| "loss": 0.3113, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 19.65, | |
| "learning_rate": 0.0006070528967254408, | |
| "loss": 0.3141, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 19.9, | |
| "learning_rate": 0.000602015113350126, | |
| "loss": 0.3326, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "learning_rate": 0.0005969773299748111, | |
| "loss": 0.2949, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "eval_bleu": 5.0752, | |
| "eval_gen_len": 16.3638, | |
| "eval_loss": 0.19623734056949615, | |
| "eval_runtime": 389.6723, | |
| "eval_samples_per_second": 13.747, | |
| "eval_steps_per_second": 3.439, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 20.4, | |
| "learning_rate": 0.0005919395465994962, | |
| "loss": 0.2852, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 20.65, | |
| "learning_rate": 0.0005869017632241814, | |
| "loss": 0.3026, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 0.0005818639798488665, | |
| "loss": 0.3194, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 21.16, | |
| "learning_rate": 0.0005768261964735516, | |
| "loss": 0.2843, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 21.41, | |
| "learning_rate": 0.0005717884130982368, | |
| "loss": 0.2761, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 21.66, | |
| "learning_rate": 0.0005667506297229219, | |
| "loss": 0.2826, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 21.91, | |
| "learning_rate": 0.0005617128463476071, | |
| "loss": 0.2929, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 22.17, | |
| "learning_rate": 0.0005566750629722922, | |
| "loss": 0.2638, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 22.42, | |
| "learning_rate": 0.0005516372795969773, | |
| "loss": 0.2595, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 22.67, | |
| "learning_rate": 0.0005465994962216625, | |
| "loss": 0.2747, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 22.92, | |
| "learning_rate": 0.0005415617128463476, | |
| "loss": 0.2806, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 23.17, | |
| "learning_rate": 0.0005365239294710328, | |
| "loss": 0.2588, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 23.43, | |
| "learning_rate": 0.0005314861460957178, | |
| "loss": 0.2559, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 0.0005264483627204031, | |
| "loss": 0.2597, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 23.93, | |
| "learning_rate": 0.0005214105793450882, | |
| "loss": 0.262, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 24.18, | |
| "learning_rate": 0.0005163727959697732, | |
| "loss": 0.2408, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 24.43, | |
| "learning_rate": 0.0005113350125944585, | |
| "loss": 0.244, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 24.69, | |
| "learning_rate": 0.0005062972292191435, | |
| "loss": 0.2435, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 24.94, | |
| "learning_rate": 0.0005012594458438288, | |
| "loss": 0.2535, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 25.19, | |
| "learning_rate": 0.0004962216624685139, | |
| "loss": 0.2261, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 25.19, | |
| "eval_bleu": 4.9239, | |
| "eval_gen_len": 16.3816, | |
| "eval_loss": 0.17935612797737122, | |
| "eval_runtime": 383.1714, | |
| "eval_samples_per_second": 13.981, | |
| "eval_steps_per_second": 3.497, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 25.44, | |
| "learning_rate": 0.000491183879093199, | |
| "loss": 0.2352, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 25.69, | |
| "learning_rate": 0.00048614609571788416, | |
| "loss": 0.2443, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 25.94, | |
| "learning_rate": 0.0004811083123425693, | |
| "loss": 0.2473, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 26.2, | |
| "learning_rate": 0.00047607052896725437, | |
| "loss": 0.2178, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 26.45, | |
| "learning_rate": 0.00047103274559193956, | |
| "loss": 0.2288, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 26.7, | |
| "learning_rate": 0.0004659949622166247, | |
| "loss": 0.2285, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 26.95, | |
| "learning_rate": 0.00046095717884130983, | |
| "loss": 0.2374, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 27.2, | |
| "learning_rate": 0.00045591939546599496, | |
| "loss": 0.2126, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 27.46, | |
| "learning_rate": 0.00045088161209068015, | |
| "loss": 0.2142, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 27.71, | |
| "learning_rate": 0.0004458438287153653, | |
| "loss": 0.2182, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 27.96, | |
| "learning_rate": 0.00044080604534005037, | |
| "loss": 0.2314, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 28.21, | |
| "learning_rate": 0.0004357682619647355, | |
| "loss": 0.2052, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 28.46, | |
| "learning_rate": 0.00043073047858942063, | |
| "loss": 0.2095, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 28.72, | |
| "learning_rate": 0.0004256926952141058, | |
| "loss": 0.2196, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 28.97, | |
| "learning_rate": 0.00042065491183879096, | |
| "loss": 0.2205, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 29.22, | |
| "learning_rate": 0.0004156171284634761, | |
| "loss": 0.2, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 29.47, | |
| "learning_rate": 0.00041057934508816117, | |
| "loss": 0.2051, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 29.72, | |
| "learning_rate": 0.00040554156171284636, | |
| "loss": 0.2124, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 29.97, | |
| "learning_rate": 0.0004005037783375315, | |
| "loss": 0.2159, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 30.23, | |
| "learning_rate": 0.00039546599496221663, | |
| "loss": 0.1903, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 30.23, | |
| "eval_bleu": 5.1033, | |
| "eval_gen_len": 16.1934, | |
| "eval_loss": 0.17088228464126587, | |
| "eval_runtime": 381.8763, | |
| "eval_samples_per_second": 14.028, | |
| "eval_steps_per_second": 3.509, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 30.48, | |
| "learning_rate": 0.00039042821158690176, | |
| "loss": 0.1995, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 30.73, | |
| "learning_rate": 0.00038539042821158695, | |
| "loss": 0.2046, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 30.98, | |
| "learning_rate": 0.00038035264483627203, | |
| "loss": 0.2127, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 31.23, | |
| "learning_rate": 0.00037531486146095717, | |
| "loss": 0.1923, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 31.49, | |
| "learning_rate": 0.0003702770780856423, | |
| "loss": 0.1931, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 31.74, | |
| "learning_rate": 0.0003652392947103275, | |
| "loss": 0.1969, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 31.99, | |
| "learning_rate": 0.0003602015113350126, | |
| "loss": 0.2025, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 32.24, | |
| "learning_rate": 0.00035516372795969776, | |
| "loss": 0.1826, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 32.49, | |
| "learning_rate": 0.0003501259445843829, | |
| "loss": 0.1885, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 32.75, | |
| "learning_rate": 0.00034508816120906797, | |
| "loss": 0.1935, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 33.0, | |
| "learning_rate": 0.00034005037783375316, | |
| "loss": 0.2017, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 33.25, | |
| "learning_rate": 0.0003350125944584383, | |
| "loss": 0.1778, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 33.5, | |
| "learning_rate": 0.00032997481108312343, | |
| "loss": 0.1842, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 33.75, | |
| "learning_rate": 0.00032493702770780856, | |
| "loss": 0.1929, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 34.01, | |
| "learning_rate": 0.00031989924433249375, | |
| "loss": 0.1927, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 34.26, | |
| "learning_rate": 0.00031486146095717883, | |
| "loss": 0.1768, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 34.51, | |
| "learning_rate": 0.00030982367758186397, | |
| "loss": 0.1814, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 34.76, | |
| "learning_rate": 0.0003047858942065491, | |
| "loss": 0.1823, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 35.01, | |
| "learning_rate": 0.0002997481108312343, | |
| "loss": 0.1824, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 35.26, | |
| "learning_rate": 0.0002947103274559194, | |
| "loss": 0.1689, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 35.26, | |
| "eval_bleu": 5.0783, | |
| "eval_gen_len": 16.5555, | |
| "eval_loss": 0.16545478999614716, | |
| "eval_runtime": 405.9141, | |
| "eval_samples_per_second": 13.197, | |
| "eval_steps_per_second": 3.301, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 35.52, | |
| "learning_rate": 0.00028967254408060456, | |
| "loss": 0.1783, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 35.77, | |
| "learning_rate": 0.00028463476070528964, | |
| "loss": 0.1759, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 36.02, | |
| "learning_rate": 0.0002795969773299748, | |
| "loss": 0.1857, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 36.27, | |
| "learning_rate": 0.00027455919395465996, | |
| "loss": 0.1669, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 36.52, | |
| "learning_rate": 0.0002695214105793451, | |
| "loss": 0.173, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 36.78, | |
| "learning_rate": 0.00026448362720403023, | |
| "loss": 0.1752, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 37.03, | |
| "learning_rate": 0.00025944584382871536, | |
| "loss": 0.1784, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 37.28, | |
| "learning_rate": 0.00025440806045340055, | |
| "loss": 0.1674, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 37.53, | |
| "learning_rate": 0.00024937027707808563, | |
| "loss": 0.1678, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 37.78, | |
| "learning_rate": 0.00024433249370277077, | |
| "loss": 0.1714, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 38.04, | |
| "learning_rate": 0.00023929471032745593, | |
| "loss": 0.1697, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 38.29, | |
| "learning_rate": 0.00023425692695214106, | |
| "loss": 0.1638, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 38.54, | |
| "learning_rate": 0.0002292191435768262, | |
| "loss": 0.1667, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 38.79, | |
| "learning_rate": 0.00022418136020151133, | |
| "loss": 0.1714, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 39.04, | |
| "learning_rate": 0.0002191435768261965, | |
| "loss": 0.1642, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 39.29, | |
| "learning_rate": 0.0002141057934508816, | |
| "loss": 0.1585, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 39.55, | |
| "learning_rate": 0.00020906801007556676, | |
| "loss": 0.1607, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 39.8, | |
| "learning_rate": 0.0002040302267002519, | |
| "loss": 0.1643, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 40.05, | |
| "learning_rate": 0.00019899244332493706, | |
| "loss": 0.1662, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 40.3, | |
| "learning_rate": 0.00019395465994962217, | |
| "loss": 0.1571, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 40.3, | |
| "eval_bleu": 5.197, | |
| "eval_gen_len": 16.6603, | |
| "eval_loss": 0.16155509650707245, | |
| "eval_runtime": 388.2766, | |
| "eval_samples_per_second": 13.797, | |
| "eval_steps_per_second": 3.451, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 40.55, | |
| "learning_rate": 0.0001889168765743073, | |
| "loss": 0.1597, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 40.81, | |
| "learning_rate": 0.00018387909319899246, | |
| "loss": 0.1634, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 41.06, | |
| "learning_rate": 0.00017884130982367757, | |
| "loss": 0.1609, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 41.31, | |
| "learning_rate": 0.00017380352644836273, | |
| "loss": 0.1526, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 41.56, | |
| "learning_rate": 0.00016876574307304786, | |
| "loss": 0.1536, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 41.81, | |
| "learning_rate": 0.000163727959697733, | |
| "loss": 0.1606, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 42.07, | |
| "learning_rate": 0.00015869017632241813, | |
| "loss": 0.1603, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 42.32, | |
| "learning_rate": 0.0001536523929471033, | |
| "loss": 0.1522, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 42.57, | |
| "learning_rate": 0.0001486146095717884, | |
| "loss": 0.1554, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 42.82, | |
| "learning_rate": 0.00014357682619647356, | |
| "loss": 0.1567, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 43.07, | |
| "learning_rate": 0.0001385390428211587, | |
| "loss": 0.1537, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 43.32, | |
| "learning_rate": 0.00013350125944584383, | |
| "loss": 0.1506, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 43.58, | |
| "learning_rate": 0.00012846347607052897, | |
| "loss": 0.1513, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 43.83, | |
| "learning_rate": 0.00012342569269521413, | |
| "loss": 0.1563, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 44.08, | |
| "learning_rate": 0.00011838790931989925, | |
| "loss": 0.1503, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 44.33, | |
| "learning_rate": 0.00011335012594458438, | |
| "loss": 0.1456, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 44.58, | |
| "learning_rate": 0.00010831234256926953, | |
| "loss": 0.1486, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 44.84, | |
| "learning_rate": 0.00010327455919395466, | |
| "loss": 0.1501, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 45.09, | |
| "learning_rate": 9.82367758186398e-05, | |
| "loss": 0.1497, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 45.34, | |
| "learning_rate": 9.319899244332495e-05, | |
| "loss": 0.1463, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 45.34, | |
| "eval_bleu": 5.2322, | |
| "eval_gen_len": 16.6093, | |
| "eval_loss": 0.15787681937217712, | |
| "eval_runtime": 391.3483, | |
| "eval_samples_per_second": 13.689, | |
| "eval_steps_per_second": 3.424, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 45.59, | |
| "learning_rate": 8.816120906801008e-05, | |
| "loss": 0.1457, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 45.84, | |
| "learning_rate": 8.312342569269522e-05, | |
| "loss": 0.1471, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 46.1, | |
| "learning_rate": 7.808564231738035e-05, | |
| "loss": 0.1492, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 46.35, | |
| "learning_rate": 7.304785894206548e-05, | |
| "loss": 0.1398, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 46.6, | |
| "learning_rate": 6.801007556675063e-05, | |
| "loss": 0.1436, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 46.85, | |
| "learning_rate": 6.297229219143577e-05, | |
| "loss": 0.1452, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 47.1, | |
| "learning_rate": 5.793450881612091e-05, | |
| "loss": 0.1452, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 47.36, | |
| "learning_rate": 5.289672544080604e-05, | |
| "loss": 0.1411, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 47.61, | |
| "learning_rate": 4.785894206549118e-05, | |
| "loss": 0.1436, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 47.86, | |
| "learning_rate": 4.2821158690176324e-05, | |
| "loss": 0.1442, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 48.11, | |
| "learning_rate": 3.7783375314861465e-05, | |
| "loss": 0.1434, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 48.36, | |
| "learning_rate": 3.27455919395466e-05, | |
| "loss": 0.1379, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 48.61, | |
| "learning_rate": 2.7707808564231737e-05, | |
| "loss": 0.1403, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 48.87, | |
| "learning_rate": 2.2670025188916875e-05, | |
| "loss": 0.1437, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 49.12, | |
| "learning_rate": 1.7632241813602016e-05, | |
| "loss": 0.14, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 49.37, | |
| "learning_rate": 1.2594458438287154e-05, | |
| "loss": 0.1398, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 49.62, | |
| "learning_rate": 7.5566750629722926e-06, | |
| "loss": 0.1365, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 49.87, | |
| "learning_rate": 2.5188916876574307e-06, | |
| "loss": 0.1407, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "step": 19850, | |
| "total_flos": 1.4648767908102144e+17, | |
| "train_loss": 0.5778315416091034, | |
| "train_runtime": 26224.0587, | |
| "train_samples_per_second": 24.222, | |
| "train_steps_per_second": 0.757 | |
| } | |
| ], | |
| "max_steps": 19850, | |
| "num_train_epochs": 50, | |
| "total_flos": 1.4648767908102144e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |