| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "global_step": 49662, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9496596995690874e-05, | |
| "loss": 4.191, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.899319399138174e-05, | |
| "loss": 3.6438, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.848979098707262e-05, | |
| "loss": 3.5004, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.798638798276348e-05, | |
| "loss": 3.4531, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.748298497845435e-05, | |
| "loss": 3.3538, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.6979581974145225e-05, | |
| "loss": 3.306, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.6476178969836096e-05, | |
| "loss": 3.3472, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.597277596552697e-05, | |
| "loss": 3.27, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.546937296121783e-05, | |
| "loss": 3.2383, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.4965969956908704e-05, | |
| "loss": 3.267, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.4462566952599575e-05, | |
| "loss": 3.1768, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.395916394829045e-05, | |
| "loss": 3.2163, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.345576094398132e-05, | |
| "loss": 3.1776, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.295235793967219e-05, | |
| "loss": 3.1992, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.2448954935363055e-05, | |
| "loss": 3.1558, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.1945551931053926e-05, | |
| "loss": 3.1966, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.14421489267448e-05, | |
| "loss": 3.0803, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.093874592243567e-05, | |
| "loss": 3.1247, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.043534291812654e-05, | |
| "loss": 3.07, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.9931939913817405e-05, | |
| "loss": 3.0711, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.942853690950828e-05, | |
| "loss": 3.0956, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.892513390519915e-05, | |
| "loss": 3.0501, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 3.842173090089002e-05, | |
| "loss": 3.0939, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.791832789658089e-05, | |
| "loss": 3.0085, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.7414924892271756e-05, | |
| "loss": 3.0134, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.691152188796263e-05, | |
| "loss": 2.9918, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.64081188836535e-05, | |
| "loss": 3.0535, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.590471587934437e-05, | |
| "loss": 3.0418, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.540131287503524e-05, | |
| "loss": 3.0113, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.489790987072611e-05, | |
| "loss": 3.0049, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.439450686641698e-05, | |
| "loss": 2.972, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.389110386210785e-05, | |
| "loss": 2.9912, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.338770085779872e-05, | |
| "loss": 2.9288, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.288429785348959e-05, | |
| "loss": 2.9458, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.238089484918046e-05, | |
| "loss": 2.9472, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.187749184487133e-05, | |
| "loss": 2.9206, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.137408884056221e-05, | |
| "loss": 2.9398, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.087068583625307e-05, | |
| "loss": 2.8974, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.0367282831943944e-05, | |
| "loss": 2.9299, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.986387982763481e-05, | |
| "loss": 2.9514, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.9360476823325683e-05, | |
| "loss": 2.9516, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8857073819016555e-05, | |
| "loss": 2.9539, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.8353670814707423e-05, | |
| "loss": 2.8521, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.7850267810398294e-05, | |
| "loss": 2.9156, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.7346864806089162e-05, | |
| "loss": 2.9362, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.6843461801780034e-05, | |
| "loss": 2.9097, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.6340058797470905e-05, | |
| "loss": 2.8943, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.5836655793161773e-05, | |
| "loss": 2.8789, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.5333252788852645e-05, | |
| "loss": 2.8557, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.4829849784543516e-05, | |
| "loss": 2.8613, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.4326446780234385e-05, | |
| "loss": 2.824, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.3823043775925256e-05, | |
| "loss": 2.8315, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.3319640771616128e-05, | |
| "loss": 2.8681, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.2816237767306996e-05, | |
| "loss": 2.8759, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.2312834762997867e-05, | |
| "loss": 2.8519, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.1809431758688735e-05, | |
| "loss": 2.8464, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.1306028754379607e-05, | |
| "loss": 2.8245, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.080262575007048e-05, | |
| "loss": 2.8525, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.029922274576135e-05, | |
| "loss": 2.8203, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.9795819741452218e-05, | |
| "loss": 2.8114, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.9292416737143086e-05, | |
| "loss": 2.8016, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.8789013732833958e-05, | |
| "loss": 2.8404, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.828561072852483e-05, | |
| "loss": 2.9037, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.77822077242157e-05, | |
| "loss": 2.8383, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.727880471990657e-05, | |
| "loss": 2.8172, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.677540171559744e-05, | |
| "loss": 2.8394, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6271998711288312e-05, | |
| "loss": 2.8408, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.576859570697918e-05, | |
| "loss": 2.8045, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.526519270267005e-05, | |
| "loss": 2.7904, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.476178969836092e-05, | |
| "loss": 2.8345, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.425838669405179e-05, | |
| "loss": 2.8673, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.3754983689742662e-05, | |
| "loss": 2.8761, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.3251580685433532e-05, | |
| "loss": 2.7778, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.2748177681124402e-05, | |
| "loss": 2.8206, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2244774676815272e-05, | |
| "loss": 2.8209, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.1741371672506142e-05, | |
| "loss": 2.7834, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1237968668197011e-05, | |
| "loss": 2.8633, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.0734565663887883e-05, | |
| "loss": 2.8087, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.0231162659578753e-05, | |
| "loss": 2.7556, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.727759655269624e-06, | |
| "loss": 2.7819, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.224356650960492e-06, | |
| "loss": 2.8478, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.720953646651364e-06, | |
| "loss": 2.8253, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.217550642342234e-06, | |
| "loss": 2.7806, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 7.714147638033104e-06, | |
| "loss": 2.8194, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.210744633723974e-06, | |
| "loss": 2.775, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 6.707341629414844e-06, | |
| "loss": 2.7815, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.2039386251057155e-06, | |
| "loss": 2.7832, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.700535620796585e-06, | |
| "loss": 2.8161, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.197132616487455e-06, | |
| "loss": 2.7688, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.693729612178326e-06, | |
| "loss": 2.7636, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.1903266078691956e-06, | |
| "loss": 2.8307, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.686923603560066e-06, | |
| "loss": 2.7713, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.1835205992509364e-06, | |
| "loss": 2.7964, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.6801175949418067e-06, | |
| "loss": 2.8453, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.176714590632677e-06, | |
| "loss": 2.8389, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.6733115863235473e-06, | |
| "loss": 2.7562, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.1699085820144176e-06, | |
| "loss": 2.7791, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 6.665055777052878e-07, | |
| "loss": 2.8059, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.6310257339615804e-07, | |
| "loss": 2.7683, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 49662, | |
| "total_flos": 6.899761783144858e+16, | |
| "train_loss": 2.9576020691869225, | |
| "train_runtime": 16825.8656, | |
| "train_samples_per_second": 11.806, | |
| "train_steps_per_second": 2.952 | |
| } | |
| ], | |
| "max_steps": 49662, | |
| "num_train_epochs": 2, | |
| "total_flos": 6.899761783144858e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |