| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9999916130601427, | |
| "eval_steps": 500, | |
| "global_step": 3726, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.86580783682233e-05, | |
| "loss": 3.6768, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.731615673644659e-05, | |
| "loss": 3.2865, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.597423510466989e-05, | |
| "loss": 3.1518, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.463231347289319e-05, | |
| "loss": 3.1091, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.329039184111649e-05, | |
| "loss": 3.0456, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.194847020933978e-05, | |
| "loss": 3.0357, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.060654857756307e-05, | |
| "loss": 2.9846, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 8.926462694578636e-05, | |
| "loss": 2.9834, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8.792270531400967e-05, | |
| "loss": 2.9385, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.658078368223296e-05, | |
| "loss": 2.8967, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 8.523886205045626e-05, | |
| "loss": 2.886, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 8.389694041867955e-05, | |
| "loss": 2.8671, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.255501878690284e-05, | |
| "loss": 2.8547, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 8.121309715512614e-05, | |
| "loss": 2.8396, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.987117552334944e-05, | |
| "loss": 2.8531, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.852925389157274e-05, | |
| "loss": 2.8196, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7.718733225979603e-05, | |
| "loss": 2.7921, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.584541062801933e-05, | |
| "loss": 2.7596, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 7.450348899624262e-05, | |
| "loss": 2.7918, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 7.316156736446593e-05, | |
| "loss": 2.7553, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 7.181964573268921e-05, | |
| "loss": 2.7914, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 7.047772410091251e-05, | |
| "loss": 2.7924, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.91358024691358e-05, | |
| "loss": 2.7823, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 6.77938808373591e-05, | |
| "loss": 2.7437, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 6.64519592055824e-05, | |
| "loss": 2.7404, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 6.51100375738057e-05, | |
| "loss": 2.7318, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 6.376811594202898e-05, | |
| "loss": 2.708, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 6.242619431025228e-05, | |
| "loss": 2.7579, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 6.108427267847558e-05, | |
| "loss": 2.7037, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 5.9742351046698876e-05, | |
| "loss": 2.7326, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.8400429414922176e-05, | |
| "loss": 2.7252, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 5.705850778314546e-05, | |
| "loss": 2.7263, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 5.571658615136877e-05, | |
| "loss": 2.6944, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 5.4374664519592054e-05, | |
| "loss": 2.7292, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 5.3032742887815354e-05, | |
| "loss": 2.7156, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 5.1690821256038647e-05, | |
| "loss": 2.6852, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 5.0348899624261946e-05, | |
| "loss": 2.6922, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.9006977992485246e-05, | |
| "loss": 2.7217, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.766505636070854e-05, | |
| "loss": 2.6923, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.632313472893183e-05, | |
| "loss": 2.6993, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.498121309715513e-05, | |
| "loss": 2.7156, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.3639291465378424e-05, | |
| "loss": 2.6932, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.2297369833601716e-05, | |
| "loss": 2.714, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.0955448201825016e-05, | |
| "loss": 2.6921, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.961352657004831e-05, | |
| "loss": 2.6949, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.82716049382716e-05, | |
| "loss": 2.694, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.69296833064949e-05, | |
| "loss": 2.6781, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.55877616747182e-05, | |
| "loss": 2.6932, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.4245840042941493e-05, | |
| "loss": 2.6748, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.290391841116479e-05, | |
| "loss": 2.6861, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.1561996779388086e-05, | |
| "loss": 2.6891, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.0220075147611382e-05, | |
| "loss": 2.7091, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.8878153515834678e-05, | |
| "loss": 2.6993, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.753623188405797e-05, | |
| "loss": 2.6827, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 2.6194310252281267e-05, | |
| "loss": 2.6876, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.4852388620504563e-05, | |
| "loss": 2.6812, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.351046698872786e-05, | |
| "loss": 2.6936, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.2168545356951156e-05, | |
| "loss": 2.6579, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.0826623725174452e-05, | |
| "loss": 2.6786, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.9484702093397748e-05, | |
| "loss": 2.6974, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.814278046162104e-05, | |
| "loss": 2.6818, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6800858829844337e-05, | |
| "loss": 2.6742, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.5458937198067633e-05, | |
| "loss": 2.6748, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.4117015566290927e-05, | |
| "loss": 2.6771, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.2775093934514227e-05, | |
| "loss": 2.6895, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.143317230273752e-05, | |
| "loss": 2.6663, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.0091250670960816e-05, | |
| "loss": 2.6485, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.749329039184112e-06, | |
| "loss": 2.6825, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 2.6832, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.065485775630704e-06, | |
| "loss": 2.7134, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.723564143853999e-06, | |
| "loss": 2.6631, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 3.3816425120772947e-06, | |
| "loss": 2.6862, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.0397208803005905e-06, | |
| "loss": 2.6663, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.977992485238862e-07, | |
| "loss": 2.6592, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 3726, | |
| "total_flos": 2.4427952859827405e+17, | |
| "train_loss": 2.7717256223521947, | |
| "train_runtime": 3102.8464, | |
| "train_samples_per_second": 38.427, | |
| "train_steps_per_second": 1.201 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 3726, | |
| "num_train_epochs": 1, | |
| "save_steps": 5000, | |
| "total_flos": 2.4427952859827405e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |