| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.704, | |
| "eval_steps": 500, | |
| "global_step": 150, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.4173983037471771, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.7785, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.3714005947113037, | |
| "learning_rate": 6e-05, | |
| "loss": 0.7355, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.40615686774253845, | |
| "learning_rate": 9.333333333333334e-05, | |
| "loss": 0.6202, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.3105890154838562, | |
| "learning_rate": 9.703703703703704e-05, | |
| "loss": 0.5831, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.19839249551296234, | |
| "learning_rate": 9.333333333333334e-05, | |
| "loss": 0.5024, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.20537137985229492, | |
| "learning_rate": 8.962962962962963e-05, | |
| "loss": 0.5202, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.096, | |
| "grad_norm": 0.1985170543193817, | |
| "learning_rate": 8.592592592592593e-05, | |
| "loss": 0.4727, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.256, | |
| "grad_norm": 0.16953736543655396, | |
| "learning_rate": 8.222222222222222e-05, | |
| "loss": 0.4582, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.416, | |
| "grad_norm": 0.18532288074493408, | |
| "learning_rate": 7.851851851851852e-05, | |
| "loss": 0.4438, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.576, | |
| "grad_norm": 0.22516000270843506, | |
| "learning_rate": 7.481481481481481e-05, | |
| "loss": 0.4722, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.736, | |
| "grad_norm": 0.22280442714691162, | |
| "learning_rate": 7.111111111111112e-05, | |
| "loss": 0.4645, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.896, | |
| "grad_norm": 0.1927877962589264, | |
| "learning_rate": 6.740740740740741e-05, | |
| "loss": 0.4277, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 2.032, | |
| "grad_norm": 0.18494941294193268, | |
| "learning_rate": 6.37037037037037e-05, | |
| "loss": 0.4093, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.192, | |
| "grad_norm": 0.22715841233730316, | |
| "learning_rate": 6e-05, | |
| "loss": 0.4129, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.352, | |
| "grad_norm": 0.22747837007045746, | |
| "learning_rate": 5.62962962962963e-05, | |
| "loss": 0.4063, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.512, | |
| "grad_norm": 0.20530784130096436, | |
| "learning_rate": 5.259259259259259e-05, | |
| "loss": 0.3846, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.672, | |
| "grad_norm": 0.21205097436904907, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 0.3987, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.832, | |
| "grad_norm": 0.22707021236419678, | |
| "learning_rate": 4.518518518518519e-05, | |
| "loss": 0.4245, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.992, | |
| "grad_norm": 0.22992798686027527, | |
| "learning_rate": 4.148148148148148e-05, | |
| "loss": 0.3966, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 3.128, | |
| "grad_norm": 0.2068253755569458, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 0.3842, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.288, | |
| "grad_norm": 0.22677761316299438, | |
| "learning_rate": 3.4074074074074077e-05, | |
| "loss": 0.3515, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.448, | |
| "grad_norm": 0.2852815091609955, | |
| "learning_rate": 3.037037037037037e-05, | |
| "loss": 0.376, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.608, | |
| "grad_norm": 0.23916120827198029, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.3798, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.768, | |
| "grad_norm": 0.2707362473011017, | |
| "learning_rate": 2.2962962962962965e-05, | |
| "loss": 0.3465, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.928, | |
| "grad_norm": 0.2740895450115204, | |
| "learning_rate": 1.925925925925926e-05, | |
| "loss": 0.3998, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 4.064, | |
| "grad_norm": 0.21373049914836884, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 0.3427, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 4.224, | |
| "grad_norm": 0.2915768027305603, | |
| "learning_rate": 1.1851851851851853e-05, | |
| "loss": 0.3625, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.384, | |
| "grad_norm": 0.3261728584766388, | |
| "learning_rate": 8.14814814814815e-06, | |
| "loss": 0.3396, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.5440000000000005, | |
| "grad_norm": 0.2733069062232971, | |
| "learning_rate": 4.444444444444445e-06, | |
| "loss": 0.3512, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.704, | |
| "grad_norm": 0.2531593441963196, | |
| "learning_rate": 7.407407407407408e-07, | |
| "loss": 0.316, | |
| "step": 150 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 150, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 30, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.7126531839033344e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |