| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.0, |
| "eval_steps": 500, |
| "global_step": 304, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.013157894736842105, |
| "grad_norm": 37.79440689086914, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 3.1402, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.02631578947368421, |
| "grad_norm": 38.45823287963867, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 3.1787, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.039473684210526314, |
| "grad_norm": 38.25625228881836, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 3.1316, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.05263157894736842, |
| "grad_norm": 37.2024040222168, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 3.1011, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06578947368421052, |
| "grad_norm": 38.17294692993164, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 3.133, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07894736842105263, |
| "grad_norm": 37.374794006347656, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 3.0731, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.09210526315789473, |
| "grad_norm": 37.226966857910156, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 3.069, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.10526315789473684, |
| "grad_norm": 38.40094757080078, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 3.1223, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.11842105263157894, |
| "grad_norm": 37.86320877075195, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 3.062, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.13157894736842105, |
| "grad_norm": 38.02171325683594, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 3.0008, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.14473684210526316, |
| "grad_norm": 38.5522346496582, |
| "learning_rate": 5.5e-07, |
| "loss": 3.0047, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.15789473684210525, |
| "grad_norm": 37.72829818725586, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.9274, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.17105263157894737, |
| "grad_norm": 38.488494873046875, |
| "learning_rate": 6.5e-07, |
| "loss": 2.8727, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.18421052631578946, |
| "grad_norm": 38.87471389770508, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.8422, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.19736842105263158, |
| "grad_norm": 37.584896087646484, |
| "learning_rate": 7.5e-07, |
| "loss": 2.6728, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.21052631578947367, |
| "grad_norm": 37.04607391357422, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 2.5215, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.2236842105263158, |
| "grad_norm": 37.30121994018555, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 2.4689, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.23684210526315788, |
| "grad_norm": 35.99961853027344, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 2.3, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 35.817543029785156, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 2.1423, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2631578947368421, |
| "grad_norm": 35.056915283203125, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.9639, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.27631578947368424, |
| "grad_norm": 34.83850860595703, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.7845, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2894736842105263, |
| "grad_norm": 34.32366943359375, |
| "learning_rate": 1.1e-06, |
| "loss": 1.5864, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.3026315789473684, |
| "grad_norm": 33.79611587524414, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.4011, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3157894736842105, |
| "grad_norm": 32.596031188964844, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.195, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.32894736842105265, |
| "grad_norm": 30.045007705688477, |
| "learning_rate": 1.25e-06, |
| "loss": 0.9883, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.34210526315789475, |
| "grad_norm": 24.89093589782715, |
| "learning_rate": 1.3e-06, |
| "loss": 0.7669, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.35526315789473684, |
| "grad_norm": 23.454408645629883, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.6304, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3684210526315789, |
| "grad_norm": 19.837312698364258, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.4717, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.3815789473684211, |
| "grad_norm": 15.185093879699707, |
| "learning_rate": 1.45e-06, |
| "loss": 0.363, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.39473684210526316, |
| "grad_norm": 9.057796478271484, |
| "learning_rate": 1.5e-06, |
| "loss": 0.2439, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.40789473684210525, |
| "grad_norm": 5.976982593536377, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.1864, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.42105263157894735, |
| "grad_norm": 3.067375421524048, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.1134, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4342105263157895, |
| "grad_norm": 2.3589119911193848, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.0985, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.4473684210526316, |
| "grad_norm": 2.0044353008270264, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.0859, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.4605263157894737, |
| "grad_norm": 1.4279972314834595, |
| "learning_rate": 1.75e-06, |
| "loss": 0.0728, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.47368421052631576, |
| "grad_norm": 0.9807674288749695, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.061, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4868421052631579, |
| "grad_norm": 0.906160295009613, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0676, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.8837690353393555, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0622, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.5131578947368421, |
| "grad_norm": 0.9579435586929321, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0557, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5263157894736842, |
| "grad_norm": 0.8149510622024536, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0555, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5394736842105263, |
| "grad_norm": 0.8899760246276855, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0517, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5526315789473685, |
| "grad_norm": 0.6007645130157471, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.0518, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5657894736842105, |
| "grad_norm": 0.48819127678871155, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0429, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5789473684210527, |
| "grad_norm": 0.42939358949661255, |
| "learning_rate": 2.2e-06, |
| "loss": 0.0459, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5921052631578947, |
| "grad_norm": 0.5706579685211182, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0453, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.6052631578947368, |
| "grad_norm": 0.3034597337245941, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.0421, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.618421052631579, |
| "grad_norm": 0.5601783394813538, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0411, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.631578947368421, |
| "grad_norm": 0.35388317704200745, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.04, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6447368421052632, |
| "grad_norm": 0.48609891533851624, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.04, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6578947368421053, |
| "grad_norm": 0.4638507068157196, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0369, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6710526315789473, |
| "grad_norm": 0.5685771703720093, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0428, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6842105263157895, |
| "grad_norm": 0.46358174085617065, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0483, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6973684210526315, |
| "grad_norm": 0.35054436326026917, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0391, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.7105263157894737, |
| "grad_norm": 0.3350559175014496, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.039, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.7236842105263158, |
| "grad_norm": 0.2875112295150757, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0383, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7368421052631579, |
| "grad_norm": 0.4492928683757782, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0358, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.29484888911247253, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0355, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7631578947368421, |
| "grad_norm": 0.36551928520202637, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0403, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7763157894736842, |
| "grad_norm": 0.4458053708076477, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0342, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7894736842105263, |
| "grad_norm": 0.34047460556030273, |
| "learning_rate": 3e-06, |
| "loss": 0.0302, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.8026315789473685, |
| "grad_norm": 0.3420606255531311, |
| "learning_rate": 3.05e-06, |
| "loss": 0.034, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.8157894736842105, |
| "grad_norm": 0.3902851939201355, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0327, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8289473684210527, |
| "grad_norm": 0.29165828227996826, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0341, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8421052631578947, |
| "grad_norm": 0.40872958302497864, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.035, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8552631578947368, |
| "grad_norm": 0.36295783519744873, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0323, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.868421052631579, |
| "grad_norm": 0.3857724368572235, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0336, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.881578947368421, |
| "grad_norm": 0.3207017481327057, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0332, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8947368421052632, |
| "grad_norm": 0.2903987169265747, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0327, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.9078947368421053, |
| "grad_norm": 0.3386954963207245, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0308, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.9210526315789473, |
| "grad_norm": 0.4339621365070343, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0361, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9342105263157895, |
| "grad_norm": 0.28095564246177673, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0306, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9473684210526315, |
| "grad_norm": 0.4141469895839691, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.028, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9605263157894737, |
| "grad_norm": 0.35212820768356323, |
| "learning_rate": 3.65e-06, |
| "loss": 0.032, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9736842105263158, |
| "grad_norm": 0.26956063508987427, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0294, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.9868421052631579, |
| "grad_norm": 0.32735681533813477, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0272, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.4906782805919647, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0324, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.013157894736842, |
| "grad_norm": 0.3451901078224182, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0288, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.0263157894736843, |
| "grad_norm": 0.30598726868629456, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0305, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0394736842105263, |
| "grad_norm": 0.31189921498298645, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0274, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.0526315789473684, |
| "grad_norm": 0.31895947456359863, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0236, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0657894736842106, |
| "grad_norm": 0.3290308117866516, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0284, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0789473684210527, |
| "grad_norm": 0.3651576638221741, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0274, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0921052631578947, |
| "grad_norm": 0.2393084615468979, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0301, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.1052631578947367, |
| "grad_norm": 0.333898663520813, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0235, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.118421052631579, |
| "grad_norm": 0.3287582993507385, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0248, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.131578947368421, |
| "grad_norm": 0.3432455360889435, |
| "learning_rate": 4.3e-06, |
| "loss": 0.026, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.1447368421052633, |
| "grad_norm": 0.3176783621311188, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0249, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1578947368421053, |
| "grad_norm": 0.33373433351516724, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0251, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.1710526315789473, |
| "grad_norm": 0.36087968945503235, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0251, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1842105263157894, |
| "grad_norm": 0.3681696057319641, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0276, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1973684210526316, |
| "grad_norm": 0.46539774537086487, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0229, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.2105263157894737, |
| "grad_norm": 0.23368288576602936, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.021, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.2236842105263157, |
| "grad_norm": 0.26623716950416565, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0265, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.236842105263158, |
| "grad_norm": 0.28750717639923096, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0221, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.46578383445739746, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0236, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.263157894736842, |
| "grad_norm": 0.33406543731689453, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0239, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2763157894736843, |
| "grad_norm": 0.21247217059135437, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0188, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2894736842105263, |
| "grad_norm": 0.26229164004325867, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.022, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.3026315789473684, |
| "grad_norm": 0.2967258393764496, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0218, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.3157894736842106, |
| "grad_norm": 0.419189453125, |
| "learning_rate": 5e-06, |
| "loss": 0.0247, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.3289473684210527, |
| "grad_norm": 0.25418952107429504, |
| "learning_rate": 4.999902656502973e-06, |
| "loss": 0.0223, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.3421052631578947, |
| "grad_norm": 0.20174147188663483, |
| "learning_rate": 4.9996106335924965e-06, |
| "loss": 0.0266, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3552631578947367, |
| "grad_norm": 0.21732494235038757, |
| "learning_rate": 4.999123954009797e-06, |
| "loss": 0.0188, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.368421052631579, |
| "grad_norm": 0.2683119773864746, |
| "learning_rate": 4.998442655654946e-06, |
| "loss": 0.0203, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.381578947368421, |
| "grad_norm": 0.18175765872001648, |
| "learning_rate": 4.997566791583916e-06, |
| "loss": 0.0185, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3947368421052633, |
| "grad_norm": 0.3932501971721649, |
| "learning_rate": 4.996496430004446e-06, |
| "loss": 0.0238, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.4078947368421053, |
| "grad_norm": 0.31145599484443665, |
| "learning_rate": 4.995231654270726e-06, |
| "loss": 0.0199, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.4210526315789473, |
| "grad_norm": 0.41356661915779114, |
| "learning_rate": 4.993772562876909e-06, |
| "loss": 0.0187, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.4342105263157894, |
| "grad_norm": 0.22484919428825378, |
| "learning_rate": 4.992119269449445e-06, |
| "loss": 0.0182, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.4473684210526316, |
| "grad_norm": 0.28703081607818604, |
| "learning_rate": 4.990271902738223e-06, |
| "loss": 0.0239, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4605263157894737, |
| "grad_norm": 0.2394670695066452, |
| "learning_rate": 4.988230606606552e-06, |
| "loss": 0.0171, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4736842105263157, |
| "grad_norm": 0.3552885949611664, |
| "learning_rate": 4.985995540019956e-06, |
| "loss": 0.0226, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.486842105263158, |
| "grad_norm": 0.24968908727169037, |
| "learning_rate": 4.983566877033791e-06, |
| "loss": 0.0193, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.24420695006847382, |
| "learning_rate": 4.980944806779698e-06, |
| "loss": 0.0226, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.513157894736842, |
| "grad_norm": 0.34696799516677856, |
| "learning_rate": 4.9781295334508664e-06, |
| "loss": 0.02, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.526315789473684, |
| "grad_norm": 0.23682132363319397, |
| "learning_rate": 4.975121276286136e-06, |
| "loss": 0.0194, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.5394736842105263, |
| "grad_norm": 0.2485751509666443, |
| "learning_rate": 4.9719202695529265e-06, |
| "loss": 0.0149, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5526315789473686, |
| "grad_norm": 0.2815033495426178, |
| "learning_rate": 4.968526762528988e-06, |
| "loss": 0.0153, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.5657894736842106, |
| "grad_norm": 0.24127744138240814, |
| "learning_rate": 4.964941019482995e-06, |
| "loss": 0.019, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.5789473684210527, |
| "grad_norm": 0.2987695038318634, |
| "learning_rate": 4.961163319653959e-06, |
| "loss": 0.0165, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5921052631578947, |
| "grad_norm": 0.33492133021354675, |
| "learning_rate": 4.9571939572294914e-06, |
| "loss": 0.0185, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.6052631578947367, |
| "grad_norm": 0.20466521382331848, |
| "learning_rate": 4.953033241322887e-06, |
| "loss": 0.0151, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.618421052631579, |
| "grad_norm": 0.36396247148513794, |
| "learning_rate": 4.948681495949055e-06, |
| "loss": 0.0138, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.631578947368421, |
| "grad_norm": 0.2000381350517273, |
| "learning_rate": 4.944139059999286e-06, |
| "loss": 0.0125, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.6447368421052633, |
| "grad_norm": 0.24977952241897583, |
| "learning_rate": 4.939406287214861e-06, |
| "loss": 0.0152, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.6578947368421053, |
| "grad_norm": 0.26705336570739746, |
| "learning_rate": 4.9344835461595016e-06, |
| "loss": 0.0148, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6710526315789473, |
| "grad_norm": 0.26699599623680115, |
| "learning_rate": 4.929371220190671e-06, |
| "loss": 0.0151, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6842105263157894, |
| "grad_norm": 0.20149633288383484, |
| "learning_rate": 4.9240697074297205e-06, |
| "loss": 0.0151, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.6973684210526314, |
| "grad_norm": 0.1961003988981247, |
| "learning_rate": 4.918579420730884e-06, |
| "loss": 0.0163, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.7105263157894737, |
| "grad_norm": 0.2148503214120865, |
| "learning_rate": 4.912900787649124e-06, |
| "loss": 0.0137, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.723684210526316, |
| "grad_norm": 0.20505128800868988, |
| "learning_rate": 4.907034250406846e-06, |
| "loss": 0.0136, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.736842105263158, |
| "grad_norm": 0.19462467730045319, |
| "learning_rate": 4.900980265859449e-06, |
| "loss": 0.0139, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.21602794528007507, |
| "learning_rate": 4.894739305459754e-06, |
| "loss": 0.015, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.763157894736842, |
| "grad_norm": 0.22933153808116913, |
| "learning_rate": 4.88831185522129e-06, |
| "loss": 0.0142, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.776315789473684, |
| "grad_norm": 0.1785646229982376, |
| "learning_rate": 4.881698415680442e-06, |
| "loss": 0.0097, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7894736842105263, |
| "grad_norm": 0.21535581350326538, |
| "learning_rate": 4.874899501857477e-06, |
| "loss": 0.0106, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.8026315789473686, |
| "grad_norm": 0.2360723614692688, |
| "learning_rate": 4.867915643216434e-06, |
| "loss": 0.0123, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.8157894736842106, |
| "grad_norm": 0.18098825216293335, |
| "learning_rate": 4.860747383623889e-06, |
| "loss": 0.0126, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.8289473684210527, |
| "grad_norm": 0.1836131066083908, |
| "learning_rate": 4.85339528130661e-06, |
| "loss": 0.0125, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.8421052631578947, |
| "grad_norm": 0.34765973687171936, |
| "learning_rate": 4.845859908808074e-06, |
| "loss": 0.0158, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.8552631578947367, |
| "grad_norm": 0.22595159709453583, |
| "learning_rate": 4.838141852943891e-06, |
| "loss": 0.0101, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.868421052631579, |
| "grad_norm": 0.2811257243156433, |
| "learning_rate": 4.830241714756099e-06, |
| "loss": 0.0111, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.881578947368421, |
| "grad_norm": 0.1875840127468109, |
| "learning_rate": 4.822160109466361e-06, |
| "loss": 0.0086, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8947368421052633, |
| "grad_norm": 0.19390800595283508, |
| "learning_rate": 4.813897666428054e-06, |
| "loss": 0.0106, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.9078947368421053, |
| "grad_norm": 0.3725268244743347, |
| "learning_rate": 4.805455029077255e-06, |
| "loss": 0.0095, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.9210526315789473, |
| "grad_norm": 0.2201736867427826, |
| "learning_rate": 4.79683285488264e-06, |
| "loss": 0.0074, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.9342105263157894, |
| "grad_norm": 0.17423805594444275, |
| "learning_rate": 4.788031815294282e-06, |
| "loss": 0.0072, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.9473684210526314, |
| "grad_norm": 0.22169643640518188, |
| "learning_rate": 4.779052595691355e-06, |
| "loss": 0.0121, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.9605263157894737, |
| "grad_norm": 0.3247295618057251, |
| "learning_rate": 4.76989589532877e-06, |
| "loss": 0.0121, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.973684210526316, |
| "grad_norm": 0.1830369532108307, |
| "learning_rate": 4.7605624272827125e-06, |
| "loss": 0.0077, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.986842105263158, |
| "grad_norm": 0.2967239022254944, |
| "learning_rate": 4.75105291839512e-06, |
| "loss": 0.0104, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.17589347064495087, |
| "learning_rate": 4.741368109217072e-06, |
| "loss": 0.0075, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.013157894736842, |
| "grad_norm": 0.15554101765155792, |
| "learning_rate": 4.7315087539511225e-06, |
| "loss": 0.0063, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.026315789473684, |
| "grad_norm": 0.13191422820091248, |
| "learning_rate": 4.721475620392567e-06, |
| "loss": 0.0039, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.039473684210526, |
| "grad_norm": 0.1909502148628235, |
| "learning_rate": 4.711269489869654e-06, |
| "loss": 0.0055, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.0526315789473686, |
| "grad_norm": 0.16942323744297028, |
| "learning_rate": 4.700891157182729e-06, |
| "loss": 0.0055, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.0657894736842106, |
| "grad_norm": 0.1740521341562271, |
| "learning_rate": 4.690341430542351e-06, |
| "loss": 0.006, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.0789473684210527, |
| "grad_norm": 0.19565710425376892, |
| "learning_rate": 4.679621131506347e-06, |
| "loss": 0.0057, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.0921052631578947, |
| "grad_norm": 0.13488221168518066, |
| "learning_rate": 4.668731094915835e-06, |
| "loss": 0.0033, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.1052631578947367, |
| "grad_norm": 0.16322746872901917, |
| "learning_rate": 4.657672168830211e-06, |
| "loss": 0.0042, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.1184210526315788, |
| "grad_norm": 0.2087877243757248, |
| "learning_rate": 4.646445214461105e-06, |
| "loss": 0.0052, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.1315789473684212, |
| "grad_norm": 0.12736408412456512, |
| "learning_rate": 4.635051106105316e-06, |
| "loss": 0.0051, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.1447368421052633, |
| "grad_norm": 0.13264045119285583, |
| "learning_rate": 4.623490731076728e-06, |
| "loss": 0.0035, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.1578947368421053, |
| "grad_norm": 0.2015363723039627, |
| "learning_rate": 4.6117649896372055e-06, |
| "loss": 0.0055, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.1710526315789473, |
| "grad_norm": 0.21640510857105255, |
| "learning_rate": 4.59987479492649e-06, |
| "loss": 0.008, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.1842105263157894, |
| "grad_norm": 0.17276327311992645, |
| "learning_rate": 4.587821072891089e-06, |
| "loss": 0.0058, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.1973684210526314, |
| "grad_norm": 0.15923018753528595, |
| "learning_rate": 4.5756047622121665e-06, |
| "loss": 0.0039, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.2105263157894735, |
| "grad_norm": 0.14791631698608398, |
| "learning_rate": 4.563226814232444e-06, |
| "loss": 0.0032, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.223684210526316, |
| "grad_norm": 0.16776816546916962, |
| "learning_rate": 4.550688192882115e-06, |
| "loss": 0.0043, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.236842105263158, |
| "grad_norm": 0.12374848127365112, |
| "learning_rate": 4.53798987460378e-06, |
| "loss": 0.0035, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.25, |
| "grad_norm": 0.13051433861255646, |
| "learning_rate": 4.525132848276405e-06, |
| "loss": 0.0036, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.263157894736842, |
| "grad_norm": 0.12607790529727936, |
| "learning_rate": 4.512118115138315e-06, |
| "loss": 0.0052, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.276315789473684, |
| "grad_norm": 0.09630817174911499, |
| "learning_rate": 4.498946688709216e-06, |
| "loss": 0.0031, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.2894736842105265, |
| "grad_norm": 0.11332327872514725, |
| "learning_rate": 4.485619594711278e-06, |
| "loss": 0.0043, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.3026315789473686, |
| "grad_norm": 0.16632875800132751, |
| "learning_rate": 4.4721378709892475e-06, |
| "loss": 0.005, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.3157894736842106, |
| "grad_norm": 0.12856662273406982, |
| "learning_rate": 4.4585025674296315e-06, |
| "loss": 0.0031, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.3289473684210527, |
| "grad_norm": 0.197174072265625, |
| "learning_rate": 4.444714745878936e-06, |
| "loss": 0.0045, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.3421052631578947, |
| "grad_norm": 0.17151176929473877, |
| "learning_rate": 4.430775480060973e-06, |
| "loss": 0.0044, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.3552631578947367, |
| "grad_norm": 0.14734052121639252, |
| "learning_rate": 4.416685855493246e-06, |
| "loss": 0.0053, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.3684210526315788, |
| "grad_norm": 0.13286560773849487, |
| "learning_rate": 4.4024469694024194e-06, |
| "loss": 0.0039, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.3815789473684212, |
| "grad_norm": 0.1636727899312973, |
| "learning_rate": 4.388059930638865e-06, |
| "loss": 0.0039, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.3947368421052633, |
| "grad_norm": 0.1082785576581955, |
| "learning_rate": 4.373525859590313e-06, |
| "loss": 0.0025, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.4078947368421053, |
| "grad_norm": 0.1716354638338089, |
| "learning_rate": 4.358845888094607e-06, |
| "loss": 0.004, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.4210526315789473, |
| "grad_norm": 0.14045757055282593, |
| "learning_rate": 4.3440211593515556e-06, |
| "loss": 0.0026, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.4342105263157894, |
| "grad_norm": 0.1682705134153366, |
| "learning_rate": 4.32905282783391e-06, |
| "loss": 0.0042, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.4473684210526314, |
| "grad_norm": 0.11872018873691559, |
| "learning_rate": 4.313942059197457e-06, |
| "loss": 0.0028, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.4605263157894735, |
| "grad_norm": 0.12182936072349548, |
| "learning_rate": 4.298690030190247e-06, |
| "loss": 0.0018, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.473684210526316, |
| "grad_norm": 0.2031281590461731, |
| "learning_rate": 4.283297928560951e-06, |
| "loss": 0.0032, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.486842105263158, |
| "grad_norm": 0.0959291160106659, |
| "learning_rate": 4.267766952966369e-06, |
| "loss": 0.0015, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.15291978418827057, |
| "learning_rate": 4.252098312878083e-06, |
| "loss": 0.0036, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.513157894736842, |
| "grad_norm": 0.15930163860321045, |
| "learning_rate": 4.236293228488267e-06, |
| "loss": 0.0047, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.526315789473684, |
| "grad_norm": 0.2150997817516327, |
| "learning_rate": 4.220352930614672e-06, |
| "loss": 0.0038, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.5394736842105265, |
| "grad_norm": 0.1317511945962906, |
| "learning_rate": 4.204278660604767e-06, |
| "loss": 0.0032, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.5526315789473686, |
| "grad_norm": 0.07808093726634979, |
| "learning_rate": 4.1880716702390764e-06, |
| "loss": 0.0011, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.5657894736842106, |
| "grad_norm": 0.13284094631671906, |
| "learning_rate": 4.171733221633695e-06, |
| "loss": 0.0037, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.5789473684210527, |
| "grad_norm": 0.16264718770980835, |
| "learning_rate": 4.155264587142002e-06, |
| "loss": 0.0039, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.5921052631578947, |
| "grad_norm": 0.10431212931871414, |
| "learning_rate": 4.138667049255574e-06, |
| "loss": 0.0023, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.6052631578947367, |
| "grad_norm": 0.08813079446554184, |
| "learning_rate": 4.121941900504316e-06, |
| "loss": 0.0018, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.6184210526315788, |
| "grad_norm": 0.22164294123649597, |
| "learning_rate": 4.105090443355801e-06, |
| "loss": 0.0037, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.6315789473684212, |
| "grad_norm": 0.09111231565475464, |
| "learning_rate": 4.088113990113846e-06, |
| "loss": 0.0019, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.6447368421052633, |
| "grad_norm": 0.0871724933385849, |
| "learning_rate": 4.071013862816311e-06, |
| "loss": 0.0014, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.6578947368421053, |
| "grad_norm": 0.2138734757900238, |
| "learning_rate": 4.0537913931321495e-06, |
| "loss": 0.0022, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.6710526315789473, |
| "grad_norm": 0.11238733679056168, |
| "learning_rate": 4.036447922257699e-06, |
| "loss": 0.0023, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.6842105263157894, |
| "grad_norm": 0.0815015360713005, |
| "learning_rate": 4.018984800812248e-06, |
| "loss": 0.0011, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.6973684210526314, |
| "grad_norm": 0.304352343082428, |
| "learning_rate": 4.001403388732842e-06, |
| "loss": 0.003, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.7105263157894735, |
| "grad_norm": 0.10469458252191544, |
| "learning_rate": 3.983705055168391e-06, |
| "loss": 0.0009, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.723684210526316, |
| "grad_norm": 0.1440751701593399, |
| "learning_rate": 3.965891178373038e-06, |
| "loss": 0.0025, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.736842105263158, |
| "grad_norm": 0.2173687070608139, |
| "learning_rate": 3.947963145598833e-06, |
| "loss": 0.0028, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.2922506332397461, |
| "learning_rate": 3.929922352987702e-06, |
| "loss": 0.003, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.763157894736842, |
| "grad_norm": 0.18853916227817535, |
| "learning_rate": 3.911770205462717e-06, |
| "loss": 0.0014, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.776315789473684, |
| "grad_norm": 0.12060266733169556, |
| "learning_rate": 3.8935081166186935e-06, |
| "loss": 0.0015, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.7894736842105265, |
| "grad_norm": 0.14512351155281067, |
| "learning_rate": 3.875137508612104e-06, |
| "loss": 0.002, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.8026315789473686, |
| "grad_norm": 0.15343990921974182, |
| "learning_rate": 3.856659812050328e-06, |
| "loss": 0.0012, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.8157894736842106, |
| "grad_norm": 0.09639029949903488, |
| "learning_rate": 3.838076465880248e-06, |
| "loss": 0.0017, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.8289473684210527, |
| "grad_norm": 0.09907295554876328, |
| "learning_rate": 3.819388917276186e-06, |
| "loss": 0.0012, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.8421052631578947, |
| "grad_norm": 0.05898207053542137, |
| "learning_rate": 3.8005986215272056e-06, |
| "loss": 0.0006, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.8552631578947367, |
| "grad_norm": 0.10509718954563141, |
| "learning_rate": 3.7817070419237866e-06, |
| "loss": 0.0013, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.8684210526315788, |
| "grad_norm": 0.17495931684970856, |
| "learning_rate": 3.7627156496438686e-06, |
| "loss": 0.001, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.8815789473684212, |
| "grad_norm": 0.12321923673152924, |
| "learning_rate": 3.7436259236382797e-06, |
| "loss": 0.001, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.8947368421052633, |
| "grad_norm": 0.11147578060626984, |
| "learning_rate": 3.7244393505155713e-06, |
| "loss": 0.0015, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.9078947368421053, |
| "grad_norm": 0.06215621903538704, |
| "learning_rate": 3.7051574244262412e-06, |
| "loss": 0.0006, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.9210526315789473, |
| "grad_norm": 0.03004705347120762, |
| "learning_rate": 3.6857816469463806e-06, |
| "loss": 0.0002, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.9342105263157894, |
| "grad_norm": 0.09312062710523605, |
| "learning_rate": 3.6663135269607413e-06, |
| "loss": 0.0014, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.9473684210526314, |
| "grad_norm": 0.05324690416455269, |
| "learning_rate": 3.6467545805452266e-06, |
| "loss": 0.0005, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.9605263157894735, |
| "grad_norm": 0.06438126415014267, |
| "learning_rate": 3.6271063308488298e-06, |
| "loss": 0.0005, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.973684210526316, |
| "grad_norm": 0.08646634221076965, |
| "learning_rate": 3.6073703079750204e-06, |
| "loss": 0.0006, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.986842105263158, |
| "grad_norm": 0.05682829022407532, |
| "learning_rate": 3.5875480488625847e-06, |
| "loss": 0.0006, |
| "step": 227 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.11831886321306229, |
| "learning_rate": 3.5676410971659404e-06, |
| "loss": 0.0006, |
| "step": 228 |
| }, |
| { |
| "epoch": 3.013157894736842, |
| "grad_norm": 0.0533737950026989, |
| "learning_rate": 3.547651003134921e-06, |
| "loss": 0.0003, |
| "step": 229 |
| }, |
| { |
| "epoch": 3.026315789473684, |
| "grad_norm": 0.06704334169626236, |
| "learning_rate": 3.527579323494055e-06, |
| "loss": 0.0005, |
| "step": 230 |
| }, |
| { |
| "epoch": 3.039473684210526, |
| "grad_norm": 0.024390004575252533, |
| "learning_rate": 3.507427621321331e-06, |
| "loss": 0.0002, |
| "step": 231 |
| }, |
| { |
| "epoch": 3.0526315789473686, |
| "grad_norm": 0.10754281282424927, |
| "learning_rate": 3.4871974659264786e-06, |
| "loss": 0.0009, |
| "step": 232 |
| }, |
| { |
| "epoch": 3.0657894736842106, |
| "grad_norm": 0.032474737614393234, |
| "learning_rate": 3.466890432728754e-06, |
| "loss": 0.0002, |
| "step": 233 |
| }, |
| { |
| "epoch": 3.0789473684210527, |
| "grad_norm": 0.11489477753639221, |
| "learning_rate": 3.446508103134259e-06, |
| "loss": 0.0008, |
| "step": 234 |
| }, |
| { |
| "epoch": 3.0921052631578947, |
| "grad_norm": 0.11805123090744019, |
| "learning_rate": 3.426052064412785e-06, |
| "loss": 0.0013, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.1052631578947367, |
| "grad_norm": 0.04284543916583061, |
| "learning_rate": 3.4055239095742067e-06, |
| "loss": 0.0004, |
| "step": 236 |
| }, |
| { |
| "epoch": 3.1184210526315788, |
| "grad_norm": 0.0592227578163147, |
| "learning_rate": 3.3849252372444295e-06, |
| "loss": 0.0005, |
| "step": 237 |
| }, |
| { |
| "epoch": 3.1315789473684212, |
| "grad_norm": 0.08888686448335648, |
| "learning_rate": 3.364257651540891e-06, |
| "loss": 0.0007, |
| "step": 238 |
| }, |
| { |
| "epoch": 3.1447368421052633, |
| "grad_norm": 0.04613477736711502, |
| "learning_rate": 3.343522761947646e-06, |
| "loss": 0.0004, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.1578947368421053, |
| "grad_norm": 0.024109596386551857, |
| "learning_rate": 3.322722183190025e-06, |
| "loss": 0.0002, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.1710526315789473, |
| "grad_norm": 0.04759601503610611, |
| "learning_rate": 3.3018575351088894e-06, |
| "loss": 0.0002, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.1842105263157894, |
| "grad_norm": 0.06583128869533539, |
| "learning_rate": 3.280930442534486e-06, |
| "loss": 0.0003, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.1973684210526314, |
| "grad_norm": 0.03406457230448723, |
| "learning_rate": 3.2599425351599136e-06, |
| "loss": 0.0001, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.2105263157894735, |
| "grad_norm": 0.020209595561027527, |
| "learning_rate": 3.238895447414211e-06, |
| "loss": 0.0002, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.223684210526316, |
| "grad_norm": 0.04148108884692192, |
| "learning_rate": 3.217790818335077e-06, |
| "loss": 0.0002, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.236842105263158, |
| "grad_norm": 0.05275535210967064, |
| "learning_rate": 3.196630291441231e-06, |
| "loss": 0.0003, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.25, |
| "grad_norm": 0.03692334517836571, |
| "learning_rate": 3.175415514604422e-06, |
| "loss": 0.0004, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.263157894736842, |
| "grad_norm": 0.06586624681949615, |
| "learning_rate": 3.154148139921102e-06, |
| "loss": 0.0004, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.276315789473684, |
| "grad_norm": 0.06966730207204819, |
| "learning_rate": 3.132829823583771e-06, |
| "loss": 0.0003, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.2894736842105265, |
| "grad_norm": 0.10422863811254501, |
| "learning_rate": 3.1114622257520004e-06, |
| "loss": 0.0004, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.3026315789473686, |
| "grad_norm": 0.10132399946451187, |
| "learning_rate": 3.0900470104231456e-06, |
| "loss": 0.0008, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.3157894736842106, |
| "grad_norm": 0.06418923288583755, |
| "learning_rate": 3.0685858453027668e-06, |
| "loss": 0.0004, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.3289473684210527, |
| "grad_norm": 0.1488313227891922, |
| "learning_rate": 3.047080401674754e-06, |
| "loss": 0.0021, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.3421052631578947, |
| "grad_norm": 0.05905044823884964, |
| "learning_rate": 3.0255323542711784e-06, |
| "loss": 0.0006, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.3552631578947367, |
| "grad_norm": 0.05011991411447525, |
| "learning_rate": 3.00394338114187e-06, |
| "loss": 0.0004, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.3684210526315788, |
| "grad_norm": 0.026315532624721527, |
| "learning_rate": 2.9823151635237424e-06, |
| "loss": 0.0002, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.3815789473684212, |
| "grad_norm": 0.031153831630945206, |
| "learning_rate": 2.9606493857098657e-06, |
| "loss": 0.0003, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.3947368421052633, |
| "grad_norm": 0.019811732694506645, |
| "learning_rate": 2.938947734918302e-06, |
| "loss": 0.0001, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.4078947368421053, |
| "grad_norm": 0.029009979218244553, |
| "learning_rate": 2.9172119011607153e-06, |
| "loss": 0.0002, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.4210526315789473, |
| "grad_norm": 0.08594539761543274, |
| "learning_rate": 2.8954435771107604e-06, |
| "loss": 0.0003, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.4342105263157894, |
| "grad_norm": 0.07609947770833969, |
| "learning_rate": 2.8736444579722665e-06, |
| "loss": 0.0006, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.4473684210526314, |
| "grad_norm": 0.052105486392974854, |
| "learning_rate": 2.8518162413472266e-06, |
| "loss": 0.0003, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.4605263157894735, |
| "grad_norm": 0.023044012486934662, |
| "learning_rate": 2.8299606271035913e-06, |
| "loss": 0.0001, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.473684210526316, |
| "grad_norm": 0.01714818924665451, |
| "learning_rate": 2.8080793172428965e-06, |
| "loss": 0.0001, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.486842105263158, |
| "grad_norm": 0.024353889748454094, |
| "learning_rate": 2.786174015767721e-06, |
| "loss": 0.0002, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.5, |
| "grad_norm": 0.044456806033849716, |
| "learning_rate": 2.764246428548983e-06, |
| "loss": 0.0004, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.513157894736842, |
| "grad_norm": 0.06826099753379822, |
| "learning_rate": 2.742298263193099e-06, |
| "loss": 0.0005, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.526315789473684, |
| "grad_norm": 0.2765248417854309, |
| "learning_rate": 2.720331228909005e-06, |
| "loss": 0.0005, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.5394736842105265, |
| "grad_norm": 0.04589018225669861, |
| "learning_rate": 2.6983470363750497e-06, |
| "loss": 0.0004, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.5526315789473686, |
| "grad_norm": 0.023166710510849953, |
| "learning_rate": 2.6763473976057776e-06, |
| "loss": 0.0001, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.5657894736842106, |
| "grad_norm": 0.03657109662890434, |
| "learning_rate": 2.6543340258186063e-06, |
| "loss": 0.0002, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.5789473684210527, |
| "grad_norm": 0.08881364017724991, |
| "learning_rate": 2.6323086353004077e-06, |
| "loss": 0.0004, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.5921052631578947, |
| "grad_norm": 0.022605225443840027, |
| "learning_rate": 2.610272941274012e-06, |
| "loss": 0.0001, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.6052631578947367, |
| "grad_norm": 0.05161530151963234, |
| "learning_rate": 2.588228659764632e-06, |
| "loss": 0.0002, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.6184210526315788, |
| "grad_norm": 0.039631813764572144, |
| "learning_rate": 2.5661775074662276e-06, |
| "loss": 0.0003, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.6315789473684212, |
| "grad_norm": 0.03075975738465786, |
| "learning_rate": 2.544121201607822e-06, |
| "loss": 0.0001, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.6447368421052633, |
| "grad_norm": 0.04068103805184364, |
| "learning_rate": 2.5220614598197708e-06, |
| "loss": 0.0001, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.6578947368421053, |
| "grad_norm": 0.08374299108982086, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0006, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.6710526315789473, |
| "grad_norm": 0.014881132170557976, |
| "learning_rate": 2.477938540180231e-06, |
| "loss": 0.0001, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.6842105263157894, |
| "grad_norm": 0.08170844614505768, |
| "learning_rate": 2.455878798392179e-06, |
| "loss": 0.0004, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.6973684210526314, |
| "grad_norm": 0.02605108916759491, |
| "learning_rate": 2.433822492533774e-06, |
| "loss": 0.0001, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.7105263157894735, |
| "grad_norm": 0.056749701499938965, |
| "learning_rate": 2.411771340235369e-06, |
| "loss": 0.0008, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.723684210526316, |
| "grad_norm": 0.043280281126499176, |
| "learning_rate": 2.389727058725989e-06, |
| "loss": 0.0002, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.736842105263158, |
| "grad_norm": 0.01662975363433361, |
| "learning_rate": 2.3676913646995923e-06, |
| "loss": 0.0001, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.045024238526821136, |
| "learning_rate": 2.3456659741813945e-06, |
| "loss": 0.0001, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.763157894736842, |
| "grad_norm": 0.02116972580552101, |
| "learning_rate": 2.3236526023942224e-06, |
| "loss": 0.0001, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.776315789473684, |
| "grad_norm": 0.028999928385019302, |
| "learning_rate": 2.301652963624951e-06, |
| "loss": 0.0001, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.7894736842105265, |
| "grad_norm": 0.05580444633960724, |
| "learning_rate": 2.2796687710909966e-06, |
| "loss": 0.0006, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.8026315789473686, |
| "grad_norm": 0.03946217522025108, |
| "learning_rate": 2.2577017368069017e-06, |
| "loss": 0.0002, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.8157894736842106, |
| "grad_norm": 0.01824789121747017, |
| "learning_rate": 2.235753571451018e-06, |
| "loss": 0.0001, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.8289473684210527, |
| "grad_norm": 0.09996017813682556, |
| "learning_rate": 2.2138259842322794e-06, |
| "loss": 0.0002, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.8421052631578947, |
| "grad_norm": 0.04970015585422516, |
| "learning_rate": 2.191920682757104e-06, |
| "loss": 0.0002, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.8552631578947367, |
| "grad_norm": 0.05343327671289444, |
| "learning_rate": 2.170039372896409e-06, |
| "loss": 0.0003, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.8684210526315788, |
| "grad_norm": 0.007754841353744268, |
| "learning_rate": 2.148183758652774e-06, |
| "loss": 0.0, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.8815789473684212, |
| "grad_norm": 0.06841913610696793, |
| "learning_rate": 2.126355542027734e-06, |
| "loss": 0.0002, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.8947368421052633, |
| "grad_norm": 0.0050100889056921005, |
| "learning_rate": 2.1045564228892404e-06, |
| "loss": 0.0, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.9078947368421053, |
| "grad_norm": 0.2890152931213379, |
| "learning_rate": 2.0827880988392856e-06, |
| "loss": 0.0001, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.9210526315789473, |
| "grad_norm": 0.010258257389068604, |
| "learning_rate": 2.0610522650816985e-06, |
| "loss": 0.0, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.9342105263157894, |
| "grad_norm": 0.031965699046850204, |
| "learning_rate": 2.0393506142901347e-06, |
| "loss": 0.0001, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.9473684210526314, |
| "grad_norm": 0.00890852976590395, |
| "learning_rate": 2.017684836476258e-06, |
| "loss": 0.0, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.9605263157894735, |
| "grad_norm": 0.030713632702827454, |
| "learning_rate": 1.9960566188581306e-06, |
| "loss": 0.0001, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.973684210526316, |
| "grad_norm": 0.010088774375617504, |
| "learning_rate": 1.9744676457288225e-06, |
| "loss": 0.0, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.986842105263158, |
| "grad_norm": 0.006950493901968002, |
| "learning_rate": 1.952919598325247e-06, |
| "loss": 0.0, |
| "step": 303 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.03781810402870178, |
| "learning_rate": 1.9314141546972345e-06, |
| "loss": 0.0003, |
| "step": 304 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 456, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 76, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.955687712124882e+19, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|