| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "global_step": 43952, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.9665544230069168e-05, | |
| "loss": 8.2605, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 2.9324262832180562e-05, | |
| "loss": 4.2073, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.8982981434291954e-05, | |
| "loss": 4.0271, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.864170003640335e-05, | |
| "loss": 3.8021, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2.8300418638514747e-05, | |
| "loss": 3.7889, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.795913724062614e-05, | |
| "loss": 3.9639, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.7617855842737534e-05, | |
| "loss": 3.7584, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.7276574444848925e-05, | |
| "loss": 3.9555, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 2.6935975609756097e-05, | |
| "loss": 3.9079, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 2.6594694211867496e-05, | |
| "loss": 3.9956, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 2.6253412813978887e-05, | |
| "loss": 3.1935, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.5912131416090282e-05, | |
| "loss": 3.772, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.5572215143793228e-05, | |
| "loss": 5.1608, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.5230933745904623e-05, | |
| "loss": 4.1891, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.4889652348016018e-05, | |
| "loss": 3.9175, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.4548370950127413e-05, | |
| "loss": 3.9636, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.4207089552238807e-05, | |
| "loss": 4.1169, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.38658081543502e-05, | |
| "loss": 3.9023, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.3524526756461594e-05, | |
| "loss": 3.8476, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.3183245358572992e-05, | |
| "loss": 3.7756, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.2841963960684384e-05, | |
| "loss": 4.0936, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.250068256279578e-05, | |
| "loss": 3.7001, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.215940116490717e-05, | |
| "loss": 4.269, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.1818119767018565e-05, | |
| "loss": 3.8019, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.147752093192574e-05, | |
| "loss": 3.8787, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.113692209683291e-05, | |
| "loss": 3.9497, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.0795640698944304e-05, | |
| "loss": 3.8454, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 2.0454359301055696e-05, | |
| "loss": 3.824, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 2.011307790316709e-05, | |
| "loss": 3.9265, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.977179650527849e-05, | |
| "loss": 3.9985, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.943051510738988e-05, | |
| "loss": 3.7808, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.9089233709501275e-05, | |
| "loss": 4.0853, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8747952311612667e-05, | |
| "loss": 4.1285, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.840735347651984e-05, | |
| "loss": 3.4963, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.8066072078631237e-05, | |
| "loss": 3.8048, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.772479068074263e-05, | |
| "loss": 3.5536, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.7383509282854024e-05, | |
| "loss": 3.6811, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.7042910447761196e-05, | |
| "loss": 3.6341, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6701629049872587e-05, | |
| "loss": 3.5169, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6360347651983986e-05, | |
| "loss": 3.9027, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6019066254095377e-05, | |
| "loss": 3.9672, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.567846741900255e-05, | |
| "loss": 3.6631, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5337186021113944e-05, | |
| "loss": 3.4308, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.4995904623225337e-05, | |
| "loss": 3.4298, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.465462322533673e-05, | |
| "loss": 3.5444, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.4313341827448125e-05, | |
| "loss": 3.1415, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.397206042955952e-05, | |
| "loss": 3.1462, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.3630779031670913e-05, | |
| "loss": 3.0745, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.3289497633782308e-05, | |
| "loss": 2.8795, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.294889879868948e-05, | |
| "loss": 2.9813, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.2607617400800874e-05, | |
| "loss": 2.9452, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.2266336002912269e-05, | |
| "loss": 3.1609, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.1925054605023662e-05, | |
| "loss": 3.0651, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.1584455769930834e-05, | |
| "loss": 3.1032, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.1243174372042229e-05, | |
| "loss": 3.1298, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.0901892974153622e-05, | |
| "loss": 3.033, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.0560611576265017e-05, | |
| "loss": 3.0212, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.0220012741172187e-05, | |
| "loss": 2.9057, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 9.878731343283582e-06, | |
| "loss": 3.1331, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 9.537449945394977e-06, | |
| "loss": 3.1259, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 9.19616854750637e-06, | |
| "loss": 2.7155, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 8.855569712413542e-06, | |
| "loss": 2.9427, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 8.514288314524936e-06, | |
| "loss": 2.9428, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.17300691663633e-06, | |
| "loss": 3.1678, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 7.831725518747725e-06, | |
| "loss": 3.4137, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 7.491126683654897e-06, | |
| "loss": 2.9195, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 7.149845285766291e-06, | |
| "loss": 3.3734, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 6.808563887877685e-06, | |
| "loss": 3.2046, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 6.467282489989079e-06, | |
| "loss": 3.1923, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 6.126683654896251e-06, | |
| "loss": 3.0387, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 5.785402257007645e-06, | |
| "loss": 3.0879, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 5.444120859119039e-06, | |
| "loss": 3.0943, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 5.102839461230433e-06, | |
| "loss": 3.1384, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.762240626137604e-06, | |
| "loss": 2.8415, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.420959228248999e-06, | |
| "loss": 3.0139, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.079677830360393e-06, | |
| "loss": 3.1161, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 3.7383964324717874e-06, | |
| "loss": 3.2882, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 3.3977975973789587e-06, | |
| "loss": 3.0338, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 3.056516199490353e-06, | |
| "loss": 2.876, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 2.7152348016017477e-06, | |
| "loss": 3.0888, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 2.3739534037131413e-06, | |
| "loss": 3.1206, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 2.0326720058245358e-06, | |
| "loss": 3.0954, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.6920731707317074e-06, | |
| "loss": 3.4009, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.3507917728431015e-06, | |
| "loss": 3.1574, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.009510374954496e-06, | |
| "loss": 2.8075, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 6.682289770658902e-07, | |
| "loss": 3.1462, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.2763014197306156e-07, | |
| "loss": 3.0345, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 43952, | |
| "total_flos": 1.836818419405271e+17, | |
| "train_loss": 3.5304283500368654, | |
| "train_runtime": 15323.5402, | |
| "train_samples_per_second": 17.209, | |
| "train_steps_per_second": 2.868 | |
| } | |
| ], | |
| "max_steps": 43952, | |
| "num_train_epochs": 2, | |
| "total_flos": 1.836818419405271e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |