| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1968, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02540650406504065, | |
| "grad_norm": 9.75, | |
| "learning_rate": 1.2436548223350254e-05, | |
| "loss": 0.4625, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0508130081300813, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 2.5126903553299492e-05, | |
| "loss": 0.2331, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07621951219512195, | |
| "grad_norm": 6.78125, | |
| "learning_rate": 3.7817258883248735e-05, | |
| "loss": 0.2031, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1016260162601626, | |
| "grad_norm": 5.84375, | |
| "learning_rate": 4.994353472614343e-05, | |
| "loss": 0.2042, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.12703252032520326, | |
| "grad_norm": 5.4375, | |
| "learning_rate": 4.853190287972897e-05, | |
| "loss": 0.2048, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1524390243902439, | |
| "grad_norm": 5.09375, | |
| "learning_rate": 4.7120271033314515e-05, | |
| "loss": 0.2123, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17784552845528456, | |
| "grad_norm": 4.5, | |
| "learning_rate": 4.570863918690006e-05, | |
| "loss": 0.186, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2032520325203252, | |
| "grad_norm": 5.6875, | |
| "learning_rate": 4.42970073404856e-05, | |
| "loss": 0.1693, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.22865853658536586, | |
| "grad_norm": 5.75, | |
| "learning_rate": 4.288537549407115e-05, | |
| "loss": 0.1645, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2540650406504065, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 4.147374364765669e-05, | |
| "loss": 0.1625, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.27947154471544716, | |
| "grad_norm": 3.984375, | |
| "learning_rate": 4.006211180124224e-05, | |
| "loss": 0.1496, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3048780487804878, | |
| "grad_norm": 3.625, | |
| "learning_rate": 3.8650479954827784e-05, | |
| "loss": 0.1443, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.33028455284552843, | |
| "grad_norm": 3.703125, | |
| "learning_rate": 3.7238848108413324e-05, | |
| "loss": 0.1417, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3556910569105691, | |
| "grad_norm": 4.03125, | |
| "learning_rate": 3.5827216261998877e-05, | |
| "loss": 0.1335, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.38109756097560976, | |
| "grad_norm": 3.6875, | |
| "learning_rate": 3.4415584415584416e-05, | |
| "loss": 0.1241, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4065040650406504, | |
| "grad_norm": 3.375, | |
| "learning_rate": 3.300395256916996e-05, | |
| "loss": 0.1212, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.43191056910569103, | |
| "grad_norm": 3.703125, | |
| "learning_rate": 3.159232072275551e-05, | |
| "loss": 0.1147, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4573170731707317, | |
| "grad_norm": 4.09375, | |
| "learning_rate": 3.018068887634105e-05, | |
| "loss": 0.111, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.48272357723577236, | |
| "grad_norm": 4.0625, | |
| "learning_rate": 2.8769057029926593e-05, | |
| "loss": 0.1163, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.508130081300813, | |
| "grad_norm": 4.0625, | |
| "learning_rate": 2.7357425183512143e-05, | |
| "loss": 0.1133, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5335365853658537, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 2.5945793337097685e-05, | |
| "loss": 0.1008, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5589430894308943, | |
| "grad_norm": 3.890625, | |
| "learning_rate": 2.453416149068323e-05, | |
| "loss": 0.1075, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.584349593495935, | |
| "grad_norm": 3.6875, | |
| "learning_rate": 2.3122529644268774e-05, | |
| "loss": 0.0994, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6097560975609756, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 2.171089779785432e-05, | |
| "loss": 0.0911, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6351626016260162, | |
| "grad_norm": 3.5, | |
| "learning_rate": 2.0299265951439866e-05, | |
| "loss": 0.0944, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6605691056910569, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 1.888763410502541e-05, | |
| "loss": 0.0903, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.6859756097560976, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.7476002258610955e-05, | |
| "loss": 0.0869, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7113821138211383, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 1.60643704121965e-05, | |
| "loss": 0.0927, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7367886178861789, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.4652738565782046e-05, | |
| "loss": 0.0919, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7621951219512195, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.3241106719367592e-05, | |
| "loss": 0.0882, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.7876016260162602, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 1.1829474872953134e-05, | |
| "loss": 0.0816, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8130081300813008, | |
| "grad_norm": 2.625, | |
| "learning_rate": 1.0417843026538679e-05, | |
| "loss": 0.0847, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8384146341463414, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 9.006211180124225e-06, | |
| "loss": 0.0824, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8638211382113821, | |
| "grad_norm": 3.0, | |
| "learning_rate": 7.594579333709768e-06, | |
| "loss": 0.0878, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.8892276422764228, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 6.1829474872953135e-06, | |
| "loss": 0.0822, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9146341463414634, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 4.771315640880859e-06, | |
| "loss": 0.0792, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9400406504065041, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 3.3596837944664035e-06, | |
| "loss": 0.0859, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9654471544715447, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.948051948051948e-06, | |
| "loss": 0.0826, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.9908536585365854, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 5.364201016374929e-07, | |
| "loss": 0.0819, | |
| "step": 1950 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 1968, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 512, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |