| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 12800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001, | |
| "loss": 10.386, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001, | |
| "loss": 7.4061, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001, | |
| "loss": 6.4112, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001, | |
| "loss": 6.091, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001, | |
| "loss": 5.8571, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001, | |
| "loss": 5.7144, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0001, | |
| "loss": 5.6228, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0001, | |
| "loss": 5.546, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0001, | |
| "loss": 5.4904, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0001, | |
| "loss": 5.4413, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0001, | |
| "loss": 5.3603, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0001, | |
| "loss": 5.3346, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0001, | |
| "loss": 5.2744, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.0001, | |
| "loss": 5.231, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.0001, | |
| "loss": 5.1782, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0001, | |
| "loss": 5.1613, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0001, | |
| "loss": 5.1091, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0001, | |
| "loss": 5.0939, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0001, | |
| "loss": 5.0667, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.0001, | |
| "loss": 5.0054, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0001, | |
| "loss": 4.96, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.0001, | |
| "loss": 4.9301, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.0001, | |
| "loss": 4.9302, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0001, | |
| "loss": 4.9129, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.0001, | |
| "loss": 4.8627, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0001, | |
| "loss": 4.8336, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0001, | |
| "loss": 4.8018, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.0001, | |
| "loss": 4.7479, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.0001, | |
| "loss": 4.7572, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.0001, | |
| "loss": 4.759, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0001, | |
| "loss": 4.7441, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.0001, | |
| "loss": 4.7195, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.0001, | |
| "loss": 4.6821, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.0001, | |
| "loss": 4.6587, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.0001, | |
| "loss": 4.6228, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.0001, | |
| "loss": 4.6248, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5801, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5934, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5781, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5763, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5598, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4796, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 0.0001, | |
| "loss": 4.5204, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4858, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4882, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4852, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4302, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4267, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.0001, | |
| "loss": 4.4298, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3892, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3886, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3915, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3706, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3528, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3272, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3679, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3608, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3262, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3097, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2973, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2756, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 0.0001, | |
| "loss": 4.3174, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2786, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2484, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2634, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 0.0001, | |
| "loss": 4.228, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2745, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2297, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1989, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2074, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 0.0001, | |
| "loss": 4.201, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 0.0001, | |
| "loss": 4.2109, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1945, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1744, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1741, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 0.0001, | |
| "loss": 4.19, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1612, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1559, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1378, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 0.0001, | |
| "loss": 4.144, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1601, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1339, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 0.0001, | |
| "loss": 4.13, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0877, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1022, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1034, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 0.0001, | |
| "loss": 4.091, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0914, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0937, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0897, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0844, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0574, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0837, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0766, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 0.0001, | |
| "loss": 4.066, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9944, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0126, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0273, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0706, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0008, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0323, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9864, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0265, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9874, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9839, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9583, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 0.0001, | |
| "loss": 4.007, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9567, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9888, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9993, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9914, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 0.0001, | |
| "loss": 4.0094, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9338, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9472, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9595, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9545, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 0.0001, | |
| "loss": 3.993, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 0.0001, | |
| "loss": 3.895, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9527, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9205, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 0.0001, | |
| "loss": 3.914, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 0.0001, | |
| "loss": 3.8872, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9171, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9148, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9181, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 0.0001, | |
| "loss": 3.9545, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 0.0001, | |
| "loss": 3.877, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0001, | |
| "loss": 3.894, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 12800, | |
| "total_flos": 9576109973975040.0, | |
| "train_loss": 4.495523297786713, | |
| "train_runtime": 7585.914, | |
| "train_samples_per_second": 107.976, | |
| "train_steps_per_second": 1.687 | |
| } | |
| ], | |
| "max_steps": 12800, | |
| "num_train_epochs": 10, | |
| "total_flos": 9576109973975040.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |