| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 9.999937252933425, | |
| "global_step": 39840, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.9636044176706825e-06, | |
| "loss": 2.8976, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 2.927208835341366e-06, | |
| "loss": 2.6491, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.8908132530120482e-06, | |
| "loss": 2.5824, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.854417670682731e-06, | |
| "loss": 2.5408, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.8180220883534135e-06, | |
| "loss": 2.5086, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.7816265060240964e-06, | |
| "loss": 2.4829, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.7452309236947792e-06, | |
| "loss": 2.4628, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 2.4354708194732666, | |
| "eval_runtime": 73.1046, | |
| "eval_samples_per_second": 281.843, | |
| "step": 3984 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.7088353413654617e-06, | |
| "loss": 2.4451, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.672439759036145e-06, | |
| "loss": 2.4269, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.6360441767068274e-06, | |
| "loss": 2.4118, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 2.5996485943775102e-06, | |
| "loss": 2.3986, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.5632530120481927e-06, | |
| "loss": 2.387, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.5268574297188755e-06, | |
| "loss": 2.376, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.4904618473895584e-06, | |
| "loss": 2.366, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 2.454066265060241e-06, | |
| "loss": 2.3572, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 2.3434979915618896, | |
| "eval_runtime": 73.6648, | |
| "eval_samples_per_second": 279.699, | |
| "step": 7968 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.4176706827309237e-06, | |
| "loss": 2.3493, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.3812751004016065e-06, | |
| "loss": 2.3394, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.344879518072289e-06, | |
| "loss": 2.3331, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.308483935742972e-06, | |
| "loss": 2.3268, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 2.2720883534136547e-06, | |
| "loss": 2.3215, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.2356927710843376e-06, | |
| "loss": 2.3149, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 2.19929718875502e-06, | |
| "loss": 2.3102, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 2.162901606425703e-06, | |
| "loss": 2.3042, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 2.2957260608673096, | |
| "eval_runtime": 73.5103, | |
| "eval_samples_per_second": 280.287, | |
| "step": 11952 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 2.1265060240963857e-06, | |
| "loss": 2.3003, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 2.090110441767068e-06, | |
| "loss": 2.2937, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 2.053714859437751e-06, | |
| "loss": 2.2903, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 2.017319277108434e-06, | |
| "loss": 2.2849, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.9809236947791167e-06, | |
| "loss": 2.2819, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 1.9445281124497996e-06, | |
| "loss": 2.2775, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.908132530120482e-06, | |
| "loss": 2.2734, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.871736947791165e-06, | |
| "loss": 2.2714, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 2.2644200325012207, | |
| "eval_runtime": 73.6047, | |
| "eval_samples_per_second": 279.928, | |
| "step": 15936 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 1.8353413654618473e-06, | |
| "loss": 2.2707, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 1.7989457831325302e-06, | |
| "loss": 2.265, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 1.7625502008032132e-06, | |
| "loss": 2.2617, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 1.7261546184738955e-06, | |
| "loss": 2.2593, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.6897590361445783e-06, | |
| "loss": 2.256, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.6533634538152614e-06, | |
| "loss": 2.2539, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.616967871485944e-06, | |
| "loss": 2.2507, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.5805722891566265e-06, | |
| "loss": 2.2492, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 2.2463769912719727, | |
| "eval_runtime": 73.2603, | |
| "eval_samples_per_second": 281.244, | |
| "step": 19920 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.5441767068273091e-06, | |
| "loss": 2.2493, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.5077811244979922e-06, | |
| "loss": 2.2447, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 1.4713855421686746e-06, | |
| "loss": 2.2426, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 1.4349899598393575e-06, | |
| "loss": 2.2412, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 1.3985943775100403e-06, | |
| "loss": 2.2393, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 1.3621987951807228e-06, | |
| "loss": 2.2375, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 1.3258032128514056e-06, | |
| "loss": 2.236, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 1.2894076305220885e-06, | |
| "loss": 2.2342, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 2.2327213287353516, | |
| "eval_runtime": 73.1203, | |
| "eval_samples_per_second": 281.782, | |
| "step": 23904 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.253012048192771e-06, | |
| "loss": 2.2346, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.2166164658634538e-06, | |
| "loss": 2.2312, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.1802208835341366e-06, | |
| "loss": 2.2291, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.1438253012048195e-06, | |
| "loss": 2.2283, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.107429718875502e-06, | |
| "loss": 2.2279, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 1.0710341365461848e-06, | |
| "loss": 2.2269, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.0346385542168676e-06, | |
| "loss": 2.2251, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 9.9824297188755e-07, | |
| "loss": 2.2245, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_loss": 2.223916530609131, | |
| "eval_runtime": 72.7667, | |
| "eval_samples_per_second": 283.152, | |
| "step": 27888 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 9.61847389558233e-07, | |
| "loss": 2.2247, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 9.254518072289157e-07, | |
| "loss": 2.221, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 8.890562248995984e-07, | |
| "loss": 2.2214, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 8.526606425702813e-07, | |
| "loss": 2.2206, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 8.162650602409637e-07, | |
| "loss": 2.2187, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 7.798694779116466e-07, | |
| "loss": 2.2178, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 7.434738955823293e-07, | |
| "loss": 2.2178, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 7.070783132530122e-07, | |
| "loss": 2.2178, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 2.2174229621887207, | |
| "eval_runtime": 72.9337, | |
| "eval_samples_per_second": 282.503, | |
| "step": 31872 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 6.706827309236949e-07, | |
| "loss": 2.2183, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 6.342871485943774e-07, | |
| "loss": 2.2157, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 5.978915662650604e-07, | |
| "loss": 2.2157, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 5.614959839357429e-07, | |
| "loss": 2.2133, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 5.251004016064259e-07, | |
| "loss": 2.2145, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 4.887048192771084e-07, | |
| "loss": 2.2136, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 4.5230923694779105e-07, | |
| "loss": 2.214, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 4.15913654618474e-07, | |
| "loss": 2.2134, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 2.213841676712036, | |
| "eval_runtime": 72.9648, | |
| "eval_samples_per_second": 282.383, | |
| "step": 35856 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 3.795180722891565e-07, | |
| "loss": 2.2125, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 3.431224899598395e-07, | |
| "loss": 2.2122, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 3.0672690763052206e-07, | |
| "loss": 2.2127, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 2.703313253012047e-07, | |
| "loss": 2.2116, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 2.3393574297188764e-07, | |
| "loss": 2.2102, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.975401606425702e-07, | |
| "loss": 2.2104, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.6114457831325312e-07, | |
| "loss": 2.2104, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 1.2474899598393572e-07, | |
| "loss": 2.2113, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_loss": 2.212301254272461, | |
| "eval_runtime": 77.5042, | |
| "eval_samples_per_second": 265.844, | |
| "step": 39840 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 39840, | |
| "total_flos": 1.5755091071960033e+18, | |
| "train_runtime": 81966.5582, | |
| "train_samples_per_second": 0.486 | |
| } | |
| ], | |
| "max_steps": 39840, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.5755091071960033e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |