| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 183, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08196721311475409, | |
| "grad_norm": 8.290995597839355, | |
| "learning_rate": 4.990795908619189e-05, | |
| "loss": 6.3211, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.16393442622950818, | |
| "grad_norm": 10.25100040435791, | |
| "learning_rate": 4.9632514067152726e-05, | |
| "loss": 5.8414, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.2459016393442623, | |
| "grad_norm": 9.911712646484375, | |
| "learning_rate": 4.928142498664579e-05, | |
| "loss": 6.2669, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.32786885245901637, | |
| "grad_norm": 12.882040977478027, | |
| "learning_rate": 4.868186180746792e-05, | |
| "loss": 4.3475, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4098360655737705, | |
| "grad_norm": 9.486019134521484, | |
| "learning_rate": 4.790792261217512e-05, | |
| "loss": 3.6657, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4918032786885246, | |
| "grad_norm": 4.90289306640625, | |
| "learning_rate": 4.696530612642871e-05, | |
| "loss": 2.6512, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5737704918032787, | |
| "grad_norm": 2.928373336791992, | |
| "learning_rate": 4.586095309284618e-05, | |
| "loss": 2.1788, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6557377049180327, | |
| "grad_norm": 5.331892967224121, | |
| "learning_rate": 4.460299516441777e-05, | |
| "loss": 2.0556, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7377049180327869, | |
| "grad_norm": 3.0905227661132812, | |
| "learning_rate": 4.320069502892462e-05, | |
| "loss": 2.0827, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.819672131147541, | |
| "grad_norm": 3.840454339981079, | |
| "learning_rate": 4.1664378205239085e-05, | |
| "loss": 1.9863, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9016393442622951, | |
| "grad_norm": 2.925448179244995, | |
| "learning_rate": 4.000535701370921e-05, | |
| "loss": 2.0797, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.9836065573770492, | |
| "grad_norm": 4.124454498291016, | |
| "learning_rate": 3.823584728045463e-05, | |
| "loss": 1.9855, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0655737704918034, | |
| "grad_norm": 2.905304431915283, | |
| "learning_rate": 3.636887838890265e-05, | |
| "loss": 1.8565, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.1475409836065573, | |
| "grad_norm": 2.937238931655884, | |
| "learning_rate": 3.4418197340879635e-05, | |
| "loss": 1.7846, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2295081967213115, | |
| "grad_norm": 2.9058425426483154, | |
| "learning_rate": 3.239816753368223e-05, | |
| "loss": 1.8977, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.3114754098360657, | |
| "grad_norm": 3.247345447540283, | |
| "learning_rate": 3.0323662998460393e-05, | |
| "loss": 1.6528, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.3934426229508197, | |
| "grad_norm": 3.5700137615203857, | |
| "learning_rate": 2.8209958878663778e-05, | |
| "loss": 1.7865, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4754098360655736, | |
| "grad_norm": 3.3404250144958496, | |
| "learning_rate": 2.6072618954988866e-05, | |
| "loss": 1.6595, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.5573770491803278, | |
| "grad_norm": 2.9976389408111572, | |
| "learning_rate": 2.3927381045011136e-05, | |
| "loss": 1.795, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.639344262295082, | |
| "grad_norm": 2.907928943634033, | |
| "learning_rate": 2.1790041121336225e-05, | |
| "loss": 1.6754, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.721311475409836, | |
| "grad_norm": 2.8854143619537354, | |
| "learning_rate": 1.9676337001539612e-05, | |
| "loss": 1.6433, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.8032786885245902, | |
| "grad_norm": 3.0124289989471436, | |
| "learning_rate": 1.760183246631777e-05, | |
| "loss": 1.5446, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.8852459016393444, | |
| "grad_norm": 3.203061819076538, | |
| "learning_rate": 1.558180265912037e-05, | |
| "loss": 1.5668, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.9672131147540983, | |
| "grad_norm": 2.9160172939300537, | |
| "learning_rate": 1.3631121611097364e-05, | |
| "loss": 1.5542, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.0491803278688523, | |
| "grad_norm": 3.249051570892334, | |
| "learning_rate": 1.1764152719545372e-05, | |
| "loss": 1.4999, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.1311475409836067, | |
| "grad_norm": 3.0132319927215576, | |
| "learning_rate": 9.994642986290797e-06, | |
| "loss": 1.6873, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.2131147540983607, | |
| "grad_norm": 3.1967945098876953, | |
| "learning_rate": 8.33562179476092e-06, | |
| "loss": 1.5267, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.2950819672131146, | |
| "grad_norm": 3.5244369506835938, | |
| "learning_rate": 6.799304971075382e-06, | |
| "loss": 1.6676, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.3770491803278686, | |
| "grad_norm": 3.073401689529419, | |
| "learning_rate": 5.397004835582242e-06, | |
| "loss": 1.7066, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.459016393442623, | |
| "grad_norm": 2.9026217460632324, | |
| "learning_rate": 4.139046907153818e-06, | |
| "loss": 1.4762, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.540983606557377, | |
| "grad_norm": 2.8583261966705322, | |
| "learning_rate": 3.0346938735712954e-06, | |
| "loss": 1.5355, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.6229508196721314, | |
| "grad_norm": 3.106534242630005, | |
| "learning_rate": 2.092077387824884e-06, | |
| "loss": 1.708, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.7049180327868854, | |
| "grad_norm": 3.100788116455078, | |
| "learning_rate": 1.3181381925320785e-06, | |
| "loss": 1.54, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.7868852459016393, | |
| "grad_norm": 3.149162530899048, | |
| "learning_rate": 7.185750133542169e-07, | |
| "loss": 1.5244, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.8688524590163933, | |
| "grad_norm": 3.3356781005859375, | |
| "learning_rate": 2.978025977230736e-07, | |
| "loss": 1.5209, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.9508196721311473, | |
| "grad_norm": 2.8666839599609375, | |
| "learning_rate": 5.891920784984184e-08, | |
| "loss": 1.5363, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 183, | |
| "total_flos": 9540135882719232.0, | |
| "train_loss": 2.231679132075909, | |
| "train_runtime": 350.7334, | |
| "train_samples_per_second": 8.348, | |
| "train_steps_per_second": 0.522 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 183, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "total_flos": 9540135882719232.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |