| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 152, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013157894736842105, | |
| "grad_norm": 34.99433898925781, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 2.595, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02631578947368421, | |
| "grad_norm": 35.6848258972168, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 2.6447, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.039473684210526314, | |
| "grad_norm": 35.07997512817383, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 2.5819, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05263157894736842, | |
| "grad_norm": 34.3863525390625, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 2.5739, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06578947368421052, | |
| "grad_norm": 35.443077087402344, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 2.6071, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.07894736842105263, | |
| "grad_norm": 34.70173263549805, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 2.5487, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09210526315789473, | |
| "grad_norm": 34.421295166015625, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 2.5494, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 35.152748107910156, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 2.5936, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11842105263157894, | |
| "grad_norm": 34.947021484375, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 2.5574, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 34.67315673828125, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 2.4894, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14473684210526316, | |
| "grad_norm": 34.679954528808594, | |
| "learning_rate": 5.5e-07, | |
| "loss": 2.4985, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15789473684210525, | |
| "grad_norm": 33.57002258300781, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.4339, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17105263157894737, | |
| "grad_norm": 33.517276763916016, | |
| "learning_rate": 6.5e-07, | |
| "loss": 2.4055, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18421052631578946, | |
| "grad_norm": 33.5312385559082, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 2.3806, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19736842105263158, | |
| "grad_norm": 32.01276779174805, | |
| "learning_rate": 7.5e-07, | |
| "loss": 2.2505, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 31.827980041503906, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 2.1359, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2236842105263158, | |
| "grad_norm": 31.437101364135742, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 2.1117, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23684210526315788, | |
| "grad_norm": 30.315187454223633, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 1.9795, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 29.622655868530273, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 1.8472, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 28.628408432006836, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.7283, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.27631578947368424, | |
| "grad_norm": 27.83180046081543, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 1.5942, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2894736842105263, | |
| "grad_norm": 26.911596298217773, | |
| "learning_rate": 1.1e-06, | |
| "loss": 1.4467, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3026315789473684, | |
| "grad_norm": 25.88102149963379, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 1.3007, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 25.146381378173828, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 1.1319, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.32894736842105265, | |
| "grad_norm": 24.800382614135742, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.9359, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.34210526315789475, | |
| "grad_norm": 24.648332595825195, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.7054, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.35526315789473684, | |
| "grad_norm": 22.947620391845703, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 0.5209, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3684210526315789, | |
| "grad_norm": 17.80010414123535, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.3546, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.3815789473684211, | |
| "grad_norm": 11.841789245605469, | |
| "learning_rate": 1.45e-06, | |
| "loss": 0.26, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 7.275839805603027, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1808, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.40789473684210525, | |
| "grad_norm": 4.6324543952941895, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 0.1464, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 3.1281485557556152, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.1079, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4342105263157895, | |
| "grad_norm": 2.062562942504883, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 0.0966, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4473684210526316, | |
| "grad_norm": 2.1343328952789307, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.088, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4605263157894737, | |
| "grad_norm": 1.6768524646759033, | |
| "learning_rate": 1.75e-06, | |
| "loss": 0.0783, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.47368421052631576, | |
| "grad_norm": 1.0879229307174683, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.0623, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.4868421052631579, | |
| "grad_norm": 0.83177649974823, | |
| "learning_rate": 1.85e-06, | |
| "loss": 0.0655, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.5678385496139526, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0565, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5131578947368421, | |
| "grad_norm": 0.6994458436965942, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 0.0491, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.711387038230896, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.0507, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5394736842105263, | |
| "grad_norm": 0.7169735431671143, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.0478, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5526315789473685, | |
| "grad_norm": 0.603631317615509, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.0507, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5657894736842105, | |
| "grad_norm": 0.617487907409668, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.043, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.5789473684210527, | |
| "grad_norm": 0.4638065993785858, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.0472, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.5921052631578947, | |
| "grad_norm": 0.5996385216712952, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.0429, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6052631578947368, | |
| "grad_norm": 0.39118286967277527, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.0421, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.618421052631579, | |
| "grad_norm": 0.3118075728416443, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.0383, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 0.31731992959976196, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.041, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6447368421052632, | |
| "grad_norm": 0.5413194298744202, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.0397, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 0.32958006858825684, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0355, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.6710526315789473, | |
| "grad_norm": 0.596309244632721, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.0413, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.6842105263157895, | |
| "grad_norm": 0.4557362496852875, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.0461, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.6973684210526315, | |
| "grad_norm": 0.3345410227775574, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.0385, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7105263157894737, | |
| "grad_norm": 0.3047848343849182, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.0383, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7236842105263158, | |
| "grad_norm": 0.43763449788093567, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.038, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 0.26870036125183105, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0374, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.38762542605400085, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0349, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.7631578947368421, | |
| "grad_norm": 0.27517396211624146, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0398, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.7763157894736842, | |
| "grad_norm": 0.30815261602401733, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0364, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 0.30011361837387085, | |
| "learning_rate": 3e-06, | |
| "loss": 0.0307, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8026315789473685, | |
| "grad_norm": 0.3269154727458954, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0344, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8157894736842105, | |
| "grad_norm": 0.3750869333744049, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0339, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8289473684210527, | |
| "grad_norm": 0.29285815358161926, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.034, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 0.4157550632953644, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0348, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8552631578947368, | |
| "grad_norm": 0.2852867543697357, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0319, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.868421052631579, | |
| "grad_norm": 0.4384031593799591, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0319, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.881578947368421, | |
| "grad_norm": 0.4003254771232605, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0347, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.8947368421052632, | |
| "grad_norm": 0.49913832545280457, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0347, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9078947368421053, | |
| "grad_norm": 0.22642269730567932, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0306, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 0.34004101157188416, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0337, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9342105263157895, | |
| "grad_norm": 0.21503636240959167, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0311, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 0.33802086114883423, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0293, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.9605263157894737, | |
| "grad_norm": 0.2488064169883728, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0318, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.9736842105263158, | |
| "grad_norm": 0.21124528348445892, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0293, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.9868421052631579, | |
| "grad_norm": 0.3108712136745453, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0288, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.33483418822288513, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.031, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.013157894736842, | |
| "grad_norm": 0.3099130690097809, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0286, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0263157894736843, | |
| "grad_norm": 0.22946476936340332, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0307, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0394736842105263, | |
| "grad_norm": 0.36924120783805847, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.0274, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 0.30895617604255676, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.0274, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.0657894736842106, | |
| "grad_norm": 0.42033568024635315, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0298, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.0789473684210527, | |
| "grad_norm": 0.35573887825012207, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.0286, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.0921052631578947, | |
| "grad_norm": 0.24631913006305695, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0294, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.1052631578947367, | |
| "grad_norm": 0.2908592224121094, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0243, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.118421052631579, | |
| "grad_norm": 0.3293064832687378, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0253, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.131578947368421, | |
| "grad_norm": 0.3789626359939575, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0253, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1447368421052633, | |
| "grad_norm": 0.3900983929634094, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0248, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.1578947368421053, | |
| "grad_norm": 0.28972727060317993, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0256, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.1710526315789473, | |
| "grad_norm": 0.4615432620048523, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0259, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.1842105263157894, | |
| "grad_norm": 0.3959222137928009, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0277, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.1973684210526316, | |
| "grad_norm": 0.4927828907966614, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0251, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.2105263157894737, | |
| "grad_norm": 0.23854510486125946, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0214, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2236842105263157, | |
| "grad_norm": 0.2470882534980774, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0255, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.236842105263158, | |
| "grad_norm": 0.22575952112674713, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0208, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.437495619058609, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0234, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.263157894736842, | |
| "grad_norm": 0.2712303102016449, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.0236, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.2763157894736843, | |
| "grad_norm": 0.2843461334705353, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0195, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.2894736842105263, | |
| "grad_norm": 0.21141311526298523, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0223, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.3026315789473684, | |
| "grad_norm": 0.25484079122543335, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.0211, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3157894736842106, | |
| "grad_norm": 0.358674556016922, | |
| "learning_rate": 5e-06, | |
| "loss": 0.027, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.3289473684210527, | |
| "grad_norm": 0.20442990958690643, | |
| "learning_rate": 4.999902656502973e-06, | |
| "loss": 0.0234, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.3421052631578947, | |
| "grad_norm": 0.2281407117843628, | |
| "learning_rate": 4.9996106335924965e-06, | |
| "loss": 0.0243, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.3552631578947367, | |
| "grad_norm": 0.23803724348545074, | |
| "learning_rate": 4.999123954009797e-06, | |
| "loss": 0.0189, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.368421052631579, | |
| "grad_norm": 0.21493025124073029, | |
| "learning_rate": 4.998442655654946e-06, | |
| "loss": 0.0207, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.381578947368421, | |
| "grad_norm": 0.2565159797668457, | |
| "learning_rate": 4.997566791583916e-06, | |
| "loss": 0.0178, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.3947368421052633, | |
| "grad_norm": 0.3488551378250122, | |
| "learning_rate": 4.996496430004446e-06, | |
| "loss": 0.0226, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.4078947368421053, | |
| "grad_norm": 0.27695611119270325, | |
| "learning_rate": 4.995231654270726e-06, | |
| "loss": 0.0189, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.4210526315789473, | |
| "grad_norm": 0.23477056622505188, | |
| "learning_rate": 4.993772562876909e-06, | |
| "loss": 0.0182, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.4342105263157894, | |
| "grad_norm": 0.22611404955387115, | |
| "learning_rate": 4.992119269449445e-06, | |
| "loss": 0.0168, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.4473684210526316, | |
| "grad_norm": 0.25616368651390076, | |
| "learning_rate": 4.990271902738223e-06, | |
| "loss": 0.022, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.4605263157894737, | |
| "grad_norm": 0.23842717707157135, | |
| "learning_rate": 4.988230606606552e-06, | |
| "loss": 0.0163, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.4736842105263157, | |
| "grad_norm": 0.24285905063152313, | |
| "learning_rate": 4.985995540019956e-06, | |
| "loss": 0.0202, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.486842105263158, | |
| "grad_norm": 0.24602730572223663, | |
| "learning_rate": 4.983566877033791e-06, | |
| "loss": 0.0173, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.26218464970588684, | |
| "learning_rate": 4.980944806779698e-06, | |
| "loss": 0.0206, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.513157894736842, | |
| "grad_norm": 0.2999787926673889, | |
| "learning_rate": 4.9781295334508664e-06, | |
| "loss": 0.0178, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.526315789473684, | |
| "grad_norm": 0.20500704646110535, | |
| "learning_rate": 4.975121276286136e-06, | |
| "loss": 0.0181, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.5394736842105263, | |
| "grad_norm": 0.25106561183929443, | |
| "learning_rate": 4.9719202695529265e-06, | |
| "loss": 0.0128, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.5526315789473686, | |
| "grad_norm": 0.2686936855316162, | |
| "learning_rate": 4.968526762528988e-06, | |
| "loss": 0.0146, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.5657894736842106, | |
| "grad_norm": 0.2770400047302246, | |
| "learning_rate": 4.964941019482995e-06, | |
| "loss": 0.0167, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.5789473684210527, | |
| "grad_norm": 0.27510589361190796, | |
| "learning_rate": 4.961163319653959e-06, | |
| "loss": 0.0162, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.5921052631578947, | |
| "grad_norm": 0.3720133602619171, | |
| "learning_rate": 4.9571939572294914e-06, | |
| "loss": 0.0163, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.6052631578947367, | |
| "grad_norm": 0.2288741022348404, | |
| "learning_rate": 4.953033241322887e-06, | |
| "loss": 0.0133, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.618421052631579, | |
| "grad_norm": 0.31084850430488586, | |
| "learning_rate": 4.948681495949055e-06, | |
| "loss": 0.0124, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.631578947368421, | |
| "grad_norm": 0.19490985572338104, | |
| "learning_rate": 4.944139059999286e-06, | |
| "loss": 0.0114, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.6447368421052633, | |
| "grad_norm": 0.3074445426464081, | |
| "learning_rate": 4.939406287214861e-06, | |
| "loss": 0.0153, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.6578947368421053, | |
| "grad_norm": 0.29279908537864685, | |
| "learning_rate": 4.9344835461595016e-06, | |
| "loss": 0.0117, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.6710526315789473, | |
| "grad_norm": 0.3299407362937927, | |
| "learning_rate": 4.929371220190671e-06, | |
| "loss": 0.0128, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.6842105263157894, | |
| "grad_norm": 0.24818794429302216, | |
| "learning_rate": 4.9240697074297205e-06, | |
| "loss": 0.0146, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.6973684210526314, | |
| "grad_norm": 0.35983219742774963, | |
| "learning_rate": 4.918579420730884e-06, | |
| "loss": 0.0138, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.7105263157894737, | |
| "grad_norm": 0.2583932876586914, | |
| "learning_rate": 4.912900787649124e-06, | |
| "loss": 0.0136, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.723684210526316, | |
| "grad_norm": 0.20754319429397583, | |
| "learning_rate": 4.907034250406846e-06, | |
| "loss": 0.0116, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.736842105263158, | |
| "grad_norm": 0.30609601736068726, | |
| "learning_rate": 4.900980265859449e-06, | |
| "loss": 0.0111, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.3754304349422455, | |
| "learning_rate": 4.894739305459754e-06, | |
| "loss": 0.0126, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.763157894736842, | |
| "grad_norm": 0.2517055571079254, | |
| "learning_rate": 4.88831185522129e-06, | |
| "loss": 0.0118, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.776315789473684, | |
| "grad_norm": 0.198478102684021, | |
| "learning_rate": 4.881698415680442e-06, | |
| "loss": 0.0087, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.7894736842105263, | |
| "grad_norm": 0.2307695895433426, | |
| "learning_rate": 4.874899501857477e-06, | |
| "loss": 0.0094, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.8026315789473686, | |
| "grad_norm": 0.17823486030101776, | |
| "learning_rate": 4.867915643216434e-06, | |
| "loss": 0.0098, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.8157894736842106, | |
| "grad_norm": 0.2157433032989502, | |
| "learning_rate": 4.860747383623889e-06, | |
| "loss": 0.0114, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.8289473684210527, | |
| "grad_norm": 0.21051311492919922, | |
| "learning_rate": 4.85339528130661e-06, | |
| "loss": 0.011, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.8421052631578947, | |
| "grad_norm": 0.32886555790901184, | |
| "learning_rate": 4.845859908808074e-06, | |
| "loss": 0.011, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.8552631578947367, | |
| "grad_norm": 0.22413378953933716, | |
| "learning_rate": 4.838141852943891e-06, | |
| "loss": 0.0087, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.868421052631579, | |
| "grad_norm": 0.2896019518375397, | |
| "learning_rate": 4.830241714756099e-06, | |
| "loss": 0.011, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.881578947368421, | |
| "grad_norm": 0.26163023710250854, | |
| "learning_rate": 4.822160109466361e-06, | |
| "loss": 0.0084, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.8947368421052633, | |
| "grad_norm": 0.23998413980007172, | |
| "learning_rate": 4.813897666428054e-06, | |
| "loss": 0.0094, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.9078947368421053, | |
| "grad_norm": 0.2334728091955185, | |
| "learning_rate": 4.805455029077255e-06, | |
| "loss": 0.007, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.9210526315789473, | |
| "grad_norm": 0.17431940138339996, | |
| "learning_rate": 4.79683285488264e-06, | |
| "loss": 0.0047, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.9342105263157894, | |
| "grad_norm": 0.19151932001113892, | |
| "learning_rate": 4.788031815294282e-06, | |
| "loss": 0.0056, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.9473684210526314, | |
| "grad_norm": 0.2352588027715683, | |
| "learning_rate": 4.779052595691355e-06, | |
| "loss": 0.0107, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.9605263157894737, | |
| "grad_norm": 0.2848915159702301, | |
| "learning_rate": 4.76989589532877e-06, | |
| "loss": 0.0074, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.973684210526316, | |
| "grad_norm": 0.218011736869812, | |
| "learning_rate": 4.7605624272827125e-06, | |
| "loss": 0.0075, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.986842105263158, | |
| "grad_norm": 0.3043143153190613, | |
| "learning_rate": 4.75105291839512e-06, | |
| "loss": 0.0073, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.16677772998809814, | |
| "learning_rate": 4.741368109217072e-06, | |
| "loss": 0.0065, | |
| "step": 152 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 456, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 76, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.77843856062441e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |