| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 76, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013157894736842105, | |
| "grad_norm": 34.99433898925781, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.595, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02631578947368421, | |
| "grad_norm": 35.6848258972168, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.6447, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.039473684210526314, | |
| "grad_norm": 35.07997512817383, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.5819, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05263157894736842, | |
| "grad_norm": 34.3863525390625, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.5739, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06578947368421052, | |
| "grad_norm": 35.443077087402344, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.6071, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07894736842105263, | |
| "grad_norm": 34.70173263549805, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.5487, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09210526315789473, | |
| "grad_norm": 34.421295166015625, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.5494, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 35.152748107910156, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.5936, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11842105263157894, | |
| "grad_norm": 34.947021484375, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.5574, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 34.67315673828125, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.4894, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14473684210526316, | |
| "grad_norm": 34.679954528808594, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.4985, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15789473684210525, | |
| "grad_norm": 33.57002258300781, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.4339, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17105263157894737, | |
| "grad_norm": 33.517276763916016, | |
| "learning_rate": 6.5e-07, | |
| "loss": 2.4055, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18421052631578946, | |
| "grad_norm": 33.5312385559082, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 2.3806, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19736842105263158, | |
| "grad_norm": 32.01276779174805, | |
| "learning_rate": 7.5e-07, | |
| "loss": 2.2505, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 31.827980041503906, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 2.1359, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2236842105263158, | |
| "grad_norm": 31.437101364135742, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 2.1117, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23684210526315788, | |
| "grad_norm": 30.315187454223633, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.9795, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 29.622655868530273, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.8472, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 28.628408432006836, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.7283, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.27631578947368424, | |
| "grad_norm": 27.83180046081543, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.5942, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2894736842105263, | |
| "grad_norm": 26.911596298217773, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.4467, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3026315789473684, | |
| "grad_norm": 25.88102149963379, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.3007, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 25.146381378173828, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.1319, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.32894736842105265, | |
| "grad_norm": 24.800382614135742, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.9359, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.34210526315789475, | |
| "grad_norm": 24.648332595825195, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7054, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35526315789473684, | |
| "grad_norm": 22.947620391845703, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.5209, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3684210526315789, | |
| "grad_norm": 17.80010414123535, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.3546, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.3815789473684211, | |
| "grad_norm": 11.841789245605469, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.26, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 7.275839805603027, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1808, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.40789473684210525, | |
| "grad_norm": 4.6324543952941895, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.1464, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 3.1281485557556152, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.1079, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4342105263157895, | |
| "grad_norm": 2.062562942504883, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0966, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4473684210526316, | |
| "grad_norm": 2.1343328952789307, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.088, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4605263157894737, | |
| "grad_norm": 1.6768524646759033, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0783, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.47368421052631576, | |
| "grad_norm": 1.0879229307174683, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0623, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4868421052631579, | |
| "grad_norm": 0.83177649974823, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0655, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.5678385496139526, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0565, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5131578947368421, | |
| "grad_norm": 0.6994458436965942, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0491, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.711387038230896, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0507, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5394736842105263, | |
| "grad_norm": 0.7169735431671143, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0478, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5526315789473685, | |
| "grad_norm": 0.603631317615509, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0507, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5657894736842105, | |
| "grad_norm": 0.617487907409668, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.043, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5789473684210527, | |
| "grad_norm": 0.4638065993785858, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0472, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.5921052631578947, | |
| "grad_norm": 0.5996385216712952, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0429, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6052631578947368, | |
| "grad_norm": 0.39118286967277527, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0421, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.618421052631579, | |
| "grad_norm": 0.3118075728416443, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0383, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 0.31731992959976196, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.041, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6447368421052632, | |
| "grad_norm": 0.5413194298744202, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0397, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 0.32958006858825684, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0355, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6710526315789473, | |
| "grad_norm": 0.596309244632721, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0413, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6842105263157895, | |
| "grad_norm": 0.4557362496852875, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0461, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.6973684210526315, | |
| "grad_norm": 0.3345410227775574, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0385, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7105263157894737, | |
| "grad_norm": 0.3047848343849182, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0383, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7236842105263158, | |
| "grad_norm": 0.43763449788093567, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.038, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 0.26870036125183105, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0374, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.38762542605400085, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0349, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7631578947368421, | |
| "grad_norm": 0.27517396211624146, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0398, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7763157894736842, | |
| "grad_norm": 0.30815261602401733, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0364, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 0.30011361837387085, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0307, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8026315789473685, | |
| "grad_norm": 0.3269154727458954, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0344, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8157894736842105, | |
| "grad_norm": 0.3750869333744049, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0339, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8289473684210527, | |
| "grad_norm": 0.29285815358161926, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.034, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 0.4157550632953644, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0348, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8552631578947368, | |
| "grad_norm": 0.2852867543697357, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0319, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.868421052631579, | |
| "grad_norm": 0.4384031593799591, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0319, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.881578947368421, | |
| "grad_norm": 0.4003254771232605, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0347, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8947368421052632, | |
| "grad_norm": 0.49913832545280457, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0347, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9078947368421053, | |
| "grad_norm": 0.22642269730567932, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0306, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 0.34004101157188416, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0337, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9342105263157895, | |
| "grad_norm": 0.21503636240959167, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0311, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 0.33802086114883423, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0293, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9605263157894737, | |
| "grad_norm": 0.2488064169883728, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0318, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9736842105263158, | |
| "grad_norm": 0.21124528348445892, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0293, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.9868421052631579, | |
| "grad_norm": 0.3108712136745453, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0288, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.33483418822288513, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.031, | |
| "step": 76 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 456, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 76, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.889219280312205e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |