| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.1, | |
| "eval_steps": 2000, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 20.375, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 1.7815, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 19.125, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 1.84, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 17.0, | |
| "learning_rate": 9e-07, | |
| "loss": 2.098, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 20.125, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.67, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 18.0, | |
| "learning_rate": 1.5e-06, | |
| "loss": 1.8452, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.8e-06, | |
| "loss": 2.1664, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 17.25, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 1.6483, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 18.625, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 1.9371, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 16.75, | |
| "learning_rate": 2.7e-06, | |
| "loss": 1.8967, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 14.3125, | |
| "learning_rate": 3e-06, | |
| "loss": 1.8326, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 15.9375, | |
| "learning_rate": 3.3e-06, | |
| "loss": 1.9072, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 21.5, | |
| "learning_rate": 3.6e-06, | |
| "loss": 1.9562, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 21.75, | |
| "learning_rate": 3.9e-06, | |
| "loss": 1.6976, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 16.0, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 2.2788, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 17.625, | |
| "learning_rate": 4.5e-06, | |
| "loss": 1.8362, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 22.5, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 1.8372, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 16.75, | |
| "learning_rate": 5.1e-06, | |
| "loss": 1.9134, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 21.875, | |
| "learning_rate": 5.4e-06, | |
| "loss": 1.8973, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 15.8125, | |
| "learning_rate": 5.7000000000000005e-06, | |
| "loss": 1.6868, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 19.75, | |
| "learning_rate": 6e-06, | |
| "loss": 1.6079, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 19.5, | |
| "learning_rate": 6.3e-06, | |
| "loss": 1.4638, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 18.625, | |
| "learning_rate": 6.6e-06, | |
| "loss": 1.8714, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 24.25, | |
| "learning_rate": 6.900000000000001e-06, | |
| "loss": 1.9379, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 13.0, | |
| "learning_rate": 7.2e-06, | |
| "loss": 1.364, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 17.75, | |
| "learning_rate": 7.5e-06, | |
| "loss": 1.8867, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 14.0, | |
| "learning_rate": 7.8e-06, | |
| "loss": 1.9215, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 23.5, | |
| "learning_rate": 8.1e-06, | |
| "loss": 1.8065, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 17.25, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 1.6864, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 25.25, | |
| "learning_rate": 8.7e-06, | |
| "loss": 1.6924, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 25.0, | |
| "learning_rate": 9e-06, | |
| "loss": 1.7671, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 18.375, | |
| "learning_rate": 9.3e-06, | |
| "loss": 1.8781, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 20.5, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 1.454, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 14.875, | |
| "learning_rate": 9.9e-06, | |
| "loss": 1.2016, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 23.625, | |
| "learning_rate": 1.02e-05, | |
| "loss": 1.6703, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 26.125, | |
| "learning_rate": 1.05e-05, | |
| "loss": 1.3712, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 18.375, | |
| "learning_rate": 1.08e-05, | |
| "loss": 1.3751, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 17.625, | |
| "learning_rate": 1.11e-05, | |
| "loss": 1.7888, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 34.25, | |
| "learning_rate": 1.1400000000000001e-05, | |
| "loss": 1.5242, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 22.5, | |
| "learning_rate": 1.1700000000000001e-05, | |
| "loss": 1.5371, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 31.375, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.3818, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 13.375, | |
| "learning_rate": 1.2299999999999999e-05, | |
| "loss": 1.9301, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 17.25, | |
| "learning_rate": 1.26e-05, | |
| "loss": 1.7813, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 22.125, | |
| "learning_rate": 1.29e-05, | |
| "loss": 1.6075, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 25.125, | |
| "learning_rate": 1.32e-05, | |
| "loss": 1.9157, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 16.375, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.6057, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 19.0, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 1.6342, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 13.625, | |
| "learning_rate": 1.4099999999999999e-05, | |
| "loss": 1.4012, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 14.0625, | |
| "learning_rate": 1.44e-05, | |
| "loss": 1.853, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.47e-05, | |
| "loss": 1.6546, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 20.0, | |
| "learning_rate": 1.5e-05, | |
| "loss": 1.3404, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 18.25, | |
| "learning_rate": 1.4984210526315789e-05, | |
| "loss": 1.5801, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 27.5, | |
| "learning_rate": 1.496842105263158e-05, | |
| "loss": 1.5567, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 28.25, | |
| "learning_rate": 1.4952631578947368e-05, | |
| "loss": 1.7417, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 27.0, | |
| "learning_rate": 1.4936842105263158e-05, | |
| "loss": 1.6283, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 25.5, | |
| "learning_rate": 1.4921052631578947e-05, | |
| "loss": 1.8534, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 11.625, | |
| "learning_rate": 1.4905263157894737e-05, | |
| "loss": 1.668, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 39.75, | |
| "learning_rate": 1.4889473684210526e-05, | |
| "loss": 1.4711, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 20.125, | |
| "learning_rate": 1.4873684210526315e-05, | |
| "loss": 1.4484, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 15.3125, | |
| "learning_rate": 1.4857894736842107e-05, | |
| "loss": 1.6434, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.4842105263157895e-05, | |
| "loss": 1.8766, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 22.125, | |
| "learning_rate": 1.4826315789473686e-05, | |
| "loss": 1.329, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 22.5, | |
| "learning_rate": 1.4810526315789474e-05, | |
| "loss": 1.5956, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 9.125, | |
| "learning_rate": 1.4794736842105265e-05, | |
| "loss": 1.5795, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 15.25, | |
| "learning_rate": 1.4778947368421053e-05, | |
| "loss": 1.7082, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.4763157894736842e-05, | |
| "loss": 1.7773, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 25.5, | |
| "learning_rate": 1.4747368421052632e-05, | |
| "loss": 1.3858, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 19.75, | |
| "learning_rate": 1.4731578947368421e-05, | |
| "loss": 1.6927, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 10.75, | |
| "learning_rate": 1.4715789473684211e-05, | |
| "loss": 1.5281, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 30.125, | |
| "learning_rate": 1.47e-05, | |
| "loss": 1.3842, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 41.5, | |
| "learning_rate": 1.468421052631579e-05, | |
| "loss": 1.7584, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.875, | |
| "learning_rate": 1.4668421052631579e-05, | |
| "loss": 1.5485, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 35.25, | |
| "learning_rate": 1.4652631578947367e-05, | |
| "loss": 1.61, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.4636842105263158e-05, | |
| "loss": 1.6709, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 17.5, | |
| "learning_rate": 1.4621052631578946e-05, | |
| "loss": 1.4464, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.625, | |
| "learning_rate": 1.4605263157894737e-05, | |
| "loss": 1.5036, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 22.25, | |
| "learning_rate": 1.4589473684210527e-05, | |
| "loss": 1.6983, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 32.5, | |
| "learning_rate": 1.4573684210526317e-05, | |
| "loss": 1.4551, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.625, | |
| "learning_rate": 1.4557894736842106e-05, | |
| "loss": 1.2903, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 25.875, | |
| "learning_rate": 1.4542105263157895e-05, | |
| "loss": 1.5937, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 24.125, | |
| "learning_rate": 1.4526315789473685e-05, | |
| "loss": 1.5994, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 26.375, | |
| "learning_rate": 1.4510526315789474e-05, | |
| "loss": 1.4018, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.75, | |
| "learning_rate": 1.4494736842105264e-05, | |
| "loss": 1.1078, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.4478947368421053e-05, | |
| "loss": 1.5669, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 16.75, | |
| "learning_rate": 1.4463157894736843e-05, | |
| "loss": 1.325, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.5, | |
| "learning_rate": 1.4447368421052632e-05, | |
| "loss": 1.3645, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 26.125, | |
| "learning_rate": 1.443157894736842e-05, | |
| "loss": 1.5002, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 22.875, | |
| "learning_rate": 1.441578947368421e-05, | |
| "loss": 1.3608, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 20.625, | |
| "learning_rate": 1.44e-05, | |
| "loss": 1.5648, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 32.0, | |
| "learning_rate": 1.438421052631579e-05, | |
| "loss": 1.1969, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 18.25, | |
| "learning_rate": 1.4368421052631578e-05, | |
| "loss": 1.5523, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 27.5, | |
| "learning_rate": 1.4352631578947369e-05, | |
| "loss": 1.5062, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 14.75, | |
| "learning_rate": 1.4336842105263159e-05, | |
| "loss": 1.5052, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 12.8125, | |
| "learning_rate": 1.4321052631578948e-05, | |
| "loss": 1.6696, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 18.0, | |
| "learning_rate": 1.4305263157894738e-05, | |
| "loss": 1.2593, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 22.0, | |
| "learning_rate": 1.4289473684210527e-05, | |
| "loss": 0.9601, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 17.5, | |
| "learning_rate": 1.4273684210526317e-05, | |
| "loss": 1.3436, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 29.25, | |
| "learning_rate": 1.4257894736842106e-05, | |
| "loss": 1.9493, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 14.375, | |
| "learning_rate": 1.4242105263157896e-05, | |
| "loss": 1.3317, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 12.625, | |
| "learning_rate": 1.4226315789473685e-05, | |
| "loss": 1.263, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 25.625, | |
| "learning_rate": 1.4210526315789473e-05, | |
| "loss": 1.4426, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 27.75, | |
| "learning_rate": 1.4194736842105264e-05, | |
| "loss": 1.738, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 20.875, | |
| "learning_rate": 1.4178947368421052e-05, | |
| "loss": 1.474, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 23.25, | |
| "learning_rate": 1.4163157894736843e-05, | |
| "loss": 1.8161, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 18.25, | |
| "learning_rate": 1.4147368421052631e-05, | |
| "loss": 1.2116, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 25.25, | |
| "learning_rate": 1.4131578947368422e-05, | |
| "loss": 1.7062, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 21.125, | |
| "learning_rate": 1.411578947368421e-05, | |
| "loss": 1.5641, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 15.625, | |
| "learning_rate": 1.4099999999999999e-05, | |
| "loss": 1.5193, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 20.75, | |
| "learning_rate": 1.408421052631579e-05, | |
| "loss": 1.4775, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 16.375, | |
| "learning_rate": 1.406842105263158e-05, | |
| "loss": 1.4353, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 29.375, | |
| "learning_rate": 1.405263157894737e-05, | |
| "loss": 1.6741, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 36.5, | |
| "learning_rate": 1.4036842105263158e-05, | |
| "loss": 1.4158, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 20.5, | |
| "learning_rate": 1.4021052631578949e-05, | |
| "loss": 1.2708, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.4005263157894737e-05, | |
| "loss": 1.1376, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 18.5, | |
| "learning_rate": 1.3989473684210526e-05, | |
| "loss": 1.3171, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.3973684210526316e-05, | |
| "loss": 1.5621, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 19.5, | |
| "learning_rate": 1.3957894736842105e-05, | |
| "loss": 1.8743, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 13.4375, | |
| "learning_rate": 1.3942105263157895e-05, | |
| "loss": 1.5132, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 22.5, | |
| "learning_rate": 1.3926315789473684e-05, | |
| "loss": 1.6475, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 13.25, | |
| "learning_rate": 1.3910526315789474e-05, | |
| "loss": 1.4806, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 19.0, | |
| "learning_rate": 1.3894736842105263e-05, | |
| "loss": 1.5761, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 17.125, | |
| "learning_rate": 1.3878947368421052e-05, | |
| "loss": 1.88, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 19.0, | |
| "learning_rate": 1.3863157894736842e-05, | |
| "loss": 1.3399, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 21.25, | |
| "learning_rate": 1.384736842105263e-05, | |
| "loss": 1.6421, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 19.625, | |
| "learning_rate": 1.3831578947368421e-05, | |
| "loss": 1.7212, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 17.75, | |
| "learning_rate": 1.3815789473684211e-05, | |
| "loss": 1.6537, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 13.5625, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 1.6197, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 18.875, | |
| "learning_rate": 1.378421052631579e-05, | |
| "loss": 1.5681, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 40.75, | |
| "learning_rate": 1.3768421052631579e-05, | |
| "loss": 1.3511, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 13.0, | |
| "learning_rate": 1.375263157894737e-05, | |
| "loss": 1.3007, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 10.9375, | |
| "learning_rate": 1.3736842105263158e-05, | |
| "loss": 1.18, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 19.125, | |
| "learning_rate": 1.3721052631578948e-05, | |
| "loss": 1.6356, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 16.375, | |
| "learning_rate": 1.3705263157894737e-05, | |
| "loss": 1.4107, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 26.0, | |
| "learning_rate": 1.3689473684210527e-05, | |
| "loss": 1.3306, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 22.5, | |
| "learning_rate": 1.3673684210526316e-05, | |
| "loss": 1.5359, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 20.0, | |
| "learning_rate": 1.3657894736842106e-05, | |
| "loss": 1.404, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 19.125, | |
| "learning_rate": 1.3642105263157895e-05, | |
| "loss": 1.2873, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 11.1875, | |
| "learning_rate": 1.3626315789473684e-05, | |
| "loss": 1.3952, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 22.0, | |
| "learning_rate": 1.3610526315789474e-05, | |
| "loss": 1.4608, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 20.875, | |
| "learning_rate": 1.3594736842105263e-05, | |
| "loss": 1.5216, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 18.5, | |
| "learning_rate": 1.3578947368421053e-05, | |
| "loss": 1.4667, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 31.75, | |
| "learning_rate": 1.3563157894736842e-05, | |
| "loss": 1.6675, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 22.125, | |
| "learning_rate": 1.3547368421052634e-05, | |
| "loss": 1.4568, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 15.75, | |
| "learning_rate": 1.3531578947368422e-05, | |
| "loss": 1.4352, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 19.75, | |
| "learning_rate": 1.3515789473684211e-05, | |
| "loss": 1.4362, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 29.5, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.2622, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.348421052631579e-05, | |
| "loss": 1.4709, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 18.25, | |
| "learning_rate": 1.346842105263158e-05, | |
| "loss": 1.3638, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 22.375, | |
| "learning_rate": 1.3452631578947369e-05, | |
| "loss": 1.7496, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 24.625, | |
| "learning_rate": 1.343684210526316e-05, | |
| "loss": 1.7741, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 19.0, | |
| "learning_rate": 1.3421052631578948e-05, | |
| "loss": 1.3499, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 31.625, | |
| "learning_rate": 1.3405263157894736e-05, | |
| "loss": 1.3528, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 25.75, | |
| "learning_rate": 1.3389473684210527e-05, | |
| "loss": 1.4893, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 12.3125, | |
| "learning_rate": 1.3373684210526315e-05, | |
| "loss": 1.4222, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 23.0, | |
| "learning_rate": 1.3357894736842106e-05, | |
| "loss": 1.7875, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 30.375, | |
| "learning_rate": 1.3342105263157894e-05, | |
| "loss": 1.426, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 14.25, | |
| "learning_rate": 1.3326315789473685e-05, | |
| "loss": 1.8679, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 19.875, | |
| "learning_rate": 1.3310526315789473e-05, | |
| "loss": 1.4155, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 29.125, | |
| "learning_rate": 1.3294736842105262e-05, | |
| "loss": 1.7515, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.3278947368421054e-05, | |
| "loss": 1.4494, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 18.75, | |
| "learning_rate": 1.3263157894736843e-05, | |
| "loss": 1.4159, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 24.75, | |
| "learning_rate": 1.3247368421052633e-05, | |
| "loss": 1.5582, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 24.25, | |
| "learning_rate": 1.3231578947368422e-05, | |
| "loss": 1.5761, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 30.375, | |
| "learning_rate": 1.3215789473684212e-05, | |
| "loss": 1.5033, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 16.75, | |
| "learning_rate": 1.32e-05, | |
| "loss": 1.4209, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 22.25, | |
| "learning_rate": 1.318421052631579e-05, | |
| "loss": 1.5761, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.316842105263158e-05, | |
| "loss": 1.4146, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 20.375, | |
| "learning_rate": 1.3152631578947368e-05, | |
| "loss": 1.2064, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 25.75, | |
| "learning_rate": 1.3136842105263159e-05, | |
| "loss": 1.1254, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 20.0, | |
| "learning_rate": 1.3121052631578947e-05, | |
| "loss": 1.5665, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 23.625, | |
| "learning_rate": 1.3105263157894738e-05, | |
| "loss": 1.5582, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 22.875, | |
| "learning_rate": 1.3089473684210526e-05, | |
| "loss": 1.2198, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 15.875, | |
| "learning_rate": 1.3073684210526315e-05, | |
| "loss": 1.4875, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 22.25, | |
| "learning_rate": 1.3057894736842105e-05, | |
| "loss": 1.3077, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 13.125, | |
| "learning_rate": 1.3042105263157894e-05, | |
| "loss": 1.571, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 16.875, | |
| "learning_rate": 1.3026315789473684e-05, | |
| "loss": 1.2261, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 12.0, | |
| "learning_rate": 1.3010526315789475e-05, | |
| "loss": 1.1795, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 23.0, | |
| "learning_rate": 1.2994736842105265e-05, | |
| "loss": 1.2311, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 10.4375, | |
| "learning_rate": 1.2978947368421054e-05, | |
| "loss": 1.5966, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 18.75, | |
| "learning_rate": 1.2963157894736842e-05, | |
| "loss": 1.5122, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 10.5, | |
| "learning_rate": 1.2947368421052633e-05, | |
| "loss": 1.2532, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 9.6875, | |
| "learning_rate": 1.2931578947368421e-05, | |
| "loss": 1.3394, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 19.375, | |
| "learning_rate": 1.2915789473684212e-05, | |
| "loss": 1.6067, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 23.0, | |
| "learning_rate": 1.29e-05, | |
| "loss": 1.4977, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 16.375, | |
| "learning_rate": 1.288421052631579e-05, | |
| "loss": 1.4339, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 19.625, | |
| "learning_rate": 1.2868421052631579e-05, | |
| "loss": 1.4789, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 11.125, | |
| "learning_rate": 1.2852631578947368e-05, | |
| "loss": 1.3857, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 1.2836842105263158e-05, | |
| "loss": 1.4344, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 21.5, | |
| "learning_rate": 1.2821052631578947e-05, | |
| "loss": 1.416, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 23.875, | |
| "learning_rate": 1.2805263157894737e-05, | |
| "loss": 1.4628, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 16.5, | |
| "learning_rate": 1.2789473684210526e-05, | |
| "loss": 1.3098, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.2773684210526316e-05, | |
| "loss": 1.4423, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 18.625, | |
| "learning_rate": 1.2757894736842106e-05, | |
| "loss": 1.1756, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 24.625, | |
| "learning_rate": 1.2742105263157895e-05, | |
| "loss": 1.3788, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 14.1875, | |
| "learning_rate": 1.2726315789473685e-05, | |
| "loss": 1.2287, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 24.25, | |
| "learning_rate": 1.2710526315789474e-05, | |
| "loss": 1.1927, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 14.625, | |
| "learning_rate": 1.2694736842105264e-05, | |
| "loss": 1.5478, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 11.4375, | |
| "learning_rate": 1.2678947368421053e-05, | |
| "loss": 1.2426, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 30.25, | |
| "learning_rate": 1.2663157894736843e-05, | |
| "loss": 1.1653, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 21.875, | |
| "learning_rate": 1.2647368421052632e-05, | |
| "loss": 1.3693, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 14.3125, | |
| "learning_rate": 1.263157894736842e-05, | |
| "loss": 1.7071, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "eval_loss": 1.4587359428405762, | |
| "eval_runtime": 31.2193, | |
| "eval_samples_per_second": 32.032, | |
| "eval_steps_per_second": 32.032, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 2000, | |
| "total_flos": 1.613922041856e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |