| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "global_step": 37050, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9730094466936575e-05, | |
| "loss": 0.2655, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9460188933873144e-05, | |
| "loss": 0.2339, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9190283400809718e-05, | |
| "loss": 0.2239, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.892037786774629e-05, | |
| "loss": 0.2216, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.8650472334682864e-05, | |
| "loss": 0.2176, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.8380566801619433e-05, | |
| "loss": 0.214, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.8110661268556007e-05, | |
| "loss": 0.2102, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.784075573549258e-05, | |
| "loss": 0.2091, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.757085020242915e-05, | |
| "loss": 0.2063, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.7300944669365723e-05, | |
| "loss": 0.2024, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7031039136302296e-05, | |
| "loss": 0.2009, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.676113360323887e-05, | |
| "loss": 0.2007, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.649122807017544e-05, | |
| "loss": 0.1966, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.622132253711201e-05, | |
| "loss": 0.199, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5951417004048585e-05, | |
| "loss": 0.1877, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5681511470985154e-05, | |
| "loss": 0.1565, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.541160593792173e-05, | |
| "loss": 0.1568, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.5141700404858302e-05, | |
| "loss": 0.1563, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.4871794871794874e-05, | |
| "loss": 0.1576, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.4601889338731445e-05, | |
| "loss": 0.1572, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.4331983805668017e-05, | |
| "loss": 0.1574, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.406207827260459e-05, | |
| "loss": 0.1579, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.3792172739541161e-05, | |
| "loss": 0.1585, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.3522267206477734e-05, | |
| "loss": 0.1568, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.3252361673414307e-05, | |
| "loss": 0.1572, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.2982456140350879e-05, | |
| "loss": 0.1569, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.271255060728745e-05, | |
| "loss": 0.1587, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2442645074224023e-05, | |
| "loss": 0.1568, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.2172739541160595e-05, | |
| "loss": 0.1566, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.1902834008097166e-05, | |
| "loss": 0.1431, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.163292847503374e-05, | |
| "loss": 0.1183, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1363022941970312e-05, | |
| "loss": 0.1194, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.1093117408906884e-05, | |
| "loss": 0.1198, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.0823211875843455e-05, | |
| "loss": 0.12, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.0553306342780028e-05, | |
| "loss": 0.1219, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.02834008097166e-05, | |
| "loss": 0.1211, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.0013495276653171e-05, | |
| "loss": 0.1219, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.743589743589744e-06, | |
| "loss": 0.121, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 0.1223, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.203778677462889e-06, | |
| "loss": 0.1206, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.93387314439946e-06, | |
| "loss": 0.1192, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.663967611336033e-06, | |
| "loss": 0.1217, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.394062078272606e-06, | |
| "loss": 0.12, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.124156545209178e-06, | |
| "loss": 0.1216, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.854251012145749e-06, | |
| "loss": 0.1055, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 7.584345479082322e-06, | |
| "loss": 0.0923, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 7.314439946018894e-06, | |
| "loss": 0.0926, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 7.044534412955466e-06, | |
| "loss": 0.0953, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 6.774628879892038e-06, | |
| "loss": 0.0951, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 6.504723346828611e-06, | |
| "loss": 0.0945, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 6.234817813765183e-06, | |
| "loss": 0.0949, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 5.964912280701755e-06, | |
| "loss": 0.0959, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.695006747638327e-06, | |
| "loss": 0.0946, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 5.425101214574899e-06, | |
| "loss": 0.0948, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.155195681511471e-06, | |
| "loss": 0.0939, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 4.885290148448044e-06, | |
| "loss": 0.0963, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 0.0946, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.3454790823211876e-06, | |
| "loss": 0.0939, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 4.075573549257761e-06, | |
| "loss": 0.0953, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.805668016194332e-06, | |
| "loss": 0.082, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.5357624831309043e-06, | |
| "loss": 0.0763, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 3.265856950067476e-06, | |
| "loss": 0.0773, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.995951417004049e-06, | |
| "loss": 0.0776, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.726045883940621e-06, | |
| "loss": 0.0785, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.456140350877193e-06, | |
| "loss": 0.0782, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.1862348178137656e-06, | |
| "loss": 0.0787, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.9163292847503375e-06, | |
| "loss": 0.0775, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.6464237516869097e-06, | |
| "loss": 0.0774, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.3765182186234818e-06, | |
| "loss": 0.0774, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.106612685560054e-06, | |
| "loss": 0.0772, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 8.367071524966262e-07, | |
| "loss": 0.0774, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 5.668016194331984e-07, | |
| "loss": 0.0775, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.968960863697706e-07, | |
| "loss": 0.0767, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.6990553306342783e-08, | |
| "loss": 0.0775, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 37050, | |
| "total_flos": 7.797868392139008e+17, | |
| "train_loss": 0.13262820883479357, | |
| "train_runtime": 27627.1148, | |
| "train_samples_per_second": 429.103, | |
| "train_steps_per_second": 1.341 | |
| } | |
| ], | |
| "max_steps": 37050, | |
| "num_train_epochs": 5, | |
| "total_flos": 7.797868392139008e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |