| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "global_step": 58400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.974315068493151e-05, | |
| "loss": 2.7124, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.9486301369863017e-05, | |
| "loss": 1.247, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 2.922945205479452e-05, | |
| "loss": 1.0737, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.8972602739726026e-05, | |
| "loss": 0.9364, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.8715753424657534e-05, | |
| "loss": 0.9113, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.8458904109589042e-05, | |
| "loss": 0.8625, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.820205479452055e-05, | |
| "loss": 0.8349, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.7945205479452054e-05, | |
| "loss": 0.7934, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.7688356164383562e-05, | |
| "loss": 0.7953, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.743150684931507e-05, | |
| "loss": 0.7222, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 2.7174657534246575e-05, | |
| "loss": 0.7703, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.6917808219178083e-05, | |
| "loss": 0.7948, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 2.666095890410959e-05, | |
| "loss": 0.7789, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.6404109589041096e-05, | |
| "loss": 0.7879, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 2.6147260273972604e-05, | |
| "loss": 0.7719, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.589041095890411e-05, | |
| "loss": 0.7402, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 2.563356164383562e-05, | |
| "loss": 0.7068, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 2.5376712328767124e-05, | |
| "loss": 0.6811, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 2.511986301369863e-05, | |
| "loss": 0.7144, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.4863013698630137e-05, | |
| "loss": 0.7241, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.4606164383561645e-05, | |
| "loss": 0.739, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.4349315068493153e-05, | |
| "loss": 0.7408, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.409246575342466e-05, | |
| "loss": 0.7175, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.3835616438356165e-05, | |
| "loss": 0.7625, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.357876712328767e-05, | |
| "loss": 0.6987, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.3321917808219178e-05, | |
| "loss": 0.684, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.3065068493150686e-05, | |
| "loss": 0.7049, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.2808219178082194e-05, | |
| "loss": 0.669, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.25513698630137e-05, | |
| "loss": 0.6912, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.2294520547945206e-05, | |
| "loss": 0.6724, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.203767123287671e-05, | |
| "loss": 0.6429, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.178082191780822e-05, | |
| "loss": 0.6582, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.1523972602739727e-05, | |
| "loss": 0.6778, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.126712328767123e-05, | |
| "loss": 0.6743, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.101027397260274e-05, | |
| "loss": 0.6421, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.0753424657534248e-05, | |
| "loss": 0.6484, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.0496575342465756e-05, | |
| "loss": 0.6851, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 2.023972602739726e-05, | |
| "loss": 0.7124, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.9982876712328768e-05, | |
| "loss": 0.6624, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.9726027397260273e-05, | |
| "loss": 0.6519, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.946917808219178e-05, | |
| "loss": 0.6528, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.921232876712329e-05, | |
| "loss": 0.6881, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.8955479452054797e-05, | |
| "loss": 0.6608, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8698630136986305e-05, | |
| "loss": 0.6633, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.8441780821917806e-05, | |
| "loss": 0.6357, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.8184931506849314e-05, | |
| "loss": 0.6411, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7928082191780822e-05, | |
| "loss": 0.6579, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.767123287671233e-05, | |
| "loss": 0.6476, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.7414383561643838e-05, | |
| "loss": 0.633, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.7157534246575342e-05, | |
| "loss": 0.7078, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.690068493150685e-05, | |
| "loss": 0.6676, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6643835616438355e-05, | |
| "loss": 0.6343, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6386986301369863e-05, | |
| "loss": 0.6723, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.613013698630137e-05, | |
| "loss": 0.6235, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.5873287671232876e-05, | |
| "loss": 0.6666, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.5616438356164384e-05, | |
| "loss": 0.7011, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.535958904109589e-05, | |
| "loss": 0.6765, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.5102739726027398e-05, | |
| "loss": 0.6493, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.4845890410958904e-05, | |
| "loss": 0.5903, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.4589041095890412e-05, | |
| "loss": 0.6206, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.4332191780821918e-05, | |
| "loss": 0.5903, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.4075342465753425e-05, | |
| "loss": 0.6578, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.3818493150684933e-05, | |
| "loss": 0.6314, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.3561643835616437e-05, | |
| "loss": 0.6242, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.3304794520547945e-05, | |
| "loss": 0.6273, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.3047945205479453e-05, | |
| "loss": 0.6294, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.279109589041096e-05, | |
| "loss": 0.6141, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.2534246575342466e-05, | |
| "loss": 0.5828, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.2277397260273974e-05, | |
| "loss": 0.577, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.202054794520548e-05, | |
| "loss": 0.6136, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.1763698630136986e-05, | |
| "loss": 0.6233, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.1506849315068493e-05, | |
| "loss": 0.5801, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.125e-05, | |
| "loss": 0.6176, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.0993150684931507e-05, | |
| "loss": 0.6007, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.0736301369863013e-05, | |
| "loss": 0.5864, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.0479452054794521e-05, | |
| "loss": 0.651, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.0222602739726028e-05, | |
| "loss": 0.5897, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 9.965753424657534e-06, | |
| "loss": 0.5907, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 9.708904109589042e-06, | |
| "loss": 0.631, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 9.452054794520548e-06, | |
| "loss": 0.6517, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 9.195205479452054e-06, | |
| "loss": 0.5801, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 8.938356164383562e-06, | |
| "loss": 0.6225, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 8.681506849315069e-06, | |
| "loss": 0.6194, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 8.424657534246577e-06, | |
| "loss": 0.5939, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.167808219178081e-06, | |
| "loss": 0.6029, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 7.91095890410959e-06, | |
| "loss": 0.5907, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 7.654109589041097e-06, | |
| "loss": 0.582, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 7.397260273972603e-06, | |
| "loss": 0.5766, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 7.14041095890411e-06, | |
| "loss": 0.6133, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 6.883561643835617e-06, | |
| "loss": 0.5998, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 6.626712328767123e-06, | |
| "loss": 0.6405, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 6.3698630136986296e-06, | |
| "loss": 0.5943, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 6.1130136986301376e-06, | |
| "loss": 0.6153, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 5.856164383561644e-06, | |
| "loss": 0.5876, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 5.599315068493151e-06, | |
| "loss": 0.6071, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 5.342465753424657e-06, | |
| "loss": 0.5965, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 5.085616438356165e-06, | |
| "loss": 0.586, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.8287671232876716e-06, | |
| "loss": 0.5812, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.571917808219178e-06, | |
| "loss": 0.6407, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.315068493150685e-06, | |
| "loss": 0.6125, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.058219178082192e-06, | |
| "loss": 0.5673, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 3.801369863013699e-06, | |
| "loss": 0.5724, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 3.5445205479452056e-06, | |
| "loss": 0.5494, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 3.2876712328767123e-06, | |
| "loss": 0.6055, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 3.0308219178082194e-06, | |
| "loss": 0.5904, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.773972602739726e-06, | |
| "loss": 0.6092, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.517123287671233e-06, | |
| "loss": 0.6142, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 2.2602739726027396e-06, | |
| "loss": 0.6156, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.0034246575342467e-06, | |
| "loss": 0.6119, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.7465753424657534e-06, | |
| "loss": 0.603, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.4897260273972603e-06, | |
| "loss": 0.6102, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.232876712328767e-06, | |
| "loss": 0.6271, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 9.76027397260274e-07, | |
| "loss": 0.5944, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.191780821917808e-07, | |
| "loss": 0.6215, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.6232876712328767e-07, | |
| "loss": 0.5623, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 2.0547945205479452e-07, | |
| "loss": 0.568, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 58400, | |
| "total_flos": 7.731654113627136e+16, | |
| "train_loss": 0.1063714675380759, | |
| "train_runtime": 3374.8833, | |
| "train_samples_per_second": 51.912, | |
| "train_steps_per_second": 17.304 | |
| } | |
| ], | |
| "max_steps": 58400, | |
| "num_train_epochs": 2, | |
| "total_flos": 7.731654113627136e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |