| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 72, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013888888888888888, | |
| "grad_norm": 95.33702850341797, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 5.3695, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.027777777777777776, | |
| "grad_norm": 96.05199432373047, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 5.4178, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.041666666666666664, | |
| "grad_norm": 92.71851348876953, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 5.4472, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 95.78789520263672, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 5.4114, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06944444444444445, | |
| "grad_norm": 95.62769317626953, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 5.3612, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 94.85267639160156, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 5.4065, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09722222222222222, | |
| "grad_norm": 93.81153869628906, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 5.364, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 95.33504486083984, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 5.4484, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 94.11618041992188, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 5.3494, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 93.71621704101562, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 5.2325, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1527777777777778, | |
| "grad_norm": 92.6498031616211, | |
| "learning_rate": 5.5e-07, | |
| "loss": 5.1224, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 89.05821228027344, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 5.0484, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.18055555555555555, | |
| "grad_norm": 87.52056884765625, | |
| "learning_rate": 6.5e-07, | |
| "loss": 4.8831, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.19444444444444445, | |
| "grad_norm": 87.93170928955078, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 4.7723, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 84.07933044433594, | |
| "learning_rate": 7.5e-07, | |
| "loss": 4.5843, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 82.16165161132812, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 4.4273, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2361111111111111, | |
| "grad_norm": 78.94780731201172, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 4.155, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 75.85335540771484, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 3.9005, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2638888888888889, | |
| "grad_norm": 71.74459075927734, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 3.5885, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 67.66385650634766, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 3.308, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2916666666666667, | |
| "grad_norm": 64.22036743164062, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 3.057, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.3055555555555556, | |
| "grad_norm": 60.24971008300781, | |
| "learning_rate": 1.1e-06, | |
| "loss": 2.7939, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3194444444444444, | |
| "grad_norm": 57.68950271606445, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 2.5539, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 56.10525894165039, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 2.2688, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3472222222222222, | |
| "grad_norm": 54.84128189086914, | |
| "learning_rate": 1.25e-06, | |
| "loss": 2.009, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.3611111111111111, | |
| "grad_norm": 53.96566390991211, | |
| "learning_rate": 1.3e-06, | |
| "loss": 1.6682, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 53.859439849853516, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 1.3807, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 53.22216796875, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 1.1229, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4027777777777778, | |
| "grad_norm": 50.3905143737793, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.8454, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 40.14346694946289, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.5817, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4305555555555556, | |
| "grad_norm": 34.043235778808594, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.4234, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 28.703292846679688, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.3074, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4583333333333333, | |
| "grad_norm": 22.456626892089844, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.2146, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4722222222222222, | |
| "grad_norm": 15.350156784057617, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.1103, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4861111111111111, | |
| "grad_norm": 6.5175580978393555, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0574, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 6.813859939575195, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0497, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5138888888888888, | |
| "grad_norm": 3.5350818634033203, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0199, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5277777777777778, | |
| "grad_norm": 4.9657135009765625, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0245, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5416666666666666, | |
| "grad_norm": 1.2266570329666138, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0087, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 4.982794761657715, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.048, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5694444444444444, | |
| "grad_norm": 7.6967387199401855, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0449, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 2.9466323852539062, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0098, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5972222222222222, | |
| "grad_norm": 2.191035747528076, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.0087, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 7.3228840827941895, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0339, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 4.737412929534912, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0149, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6388888888888888, | |
| "grad_norm": 1.7612887620925903, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0071, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6527777777777778, | |
| "grad_norm": 3.344369649887085, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0196, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 2.2957704067230225, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0216, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6805555555555556, | |
| "grad_norm": 1.83970308303833, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0221, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 1.547287106513977, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0061, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7083333333333334, | |
| "grad_norm": 1.6580262184143066, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0127, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 1.8863849639892578, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0124, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7361111111111112, | |
| "grad_norm": 1.2086963653564453, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0085, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.4265427589416504, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0255, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7638888888888888, | |
| "grad_norm": 2.0333855152130127, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.0145, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 2.9129791259765625, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0228, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7916666666666666, | |
| "grad_norm": 0.8359552621841431, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0109, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8055555555555556, | |
| "grad_norm": 1.677287220954895, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0246, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8194444444444444, | |
| "grad_norm": 1.0761189460754395, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0058, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 1.1098495721817017, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0167, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8472222222222222, | |
| "grad_norm": 1.0377463102340698, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.007, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8611111111111112, | |
| "grad_norm": 0.6554276347160339, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0039, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 1.279077410697937, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0199, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.7709833979606628, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0043, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.9027777777777778, | |
| "grad_norm": 1.3619827032089233, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0067, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 1.3849765062332153, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0091, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9305555555555556, | |
| "grad_norm": 1.1380351781845093, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0143, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 1.9759573936462402, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0231, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9583333333333334, | |
| "grad_norm": 0.5920725464820862, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0063, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 1.2489631175994873, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0149, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9861111111111112, | |
| "grad_norm": 1.5553513765335083, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0085, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.0278698205947876, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0061, | |
| "step": 72 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 432, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 72, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.8124313299255296e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |