| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 152, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.013157894736842105, |
| "grad_norm": 30.562854766845703, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.2022, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.02631578947368421, |
| "grad_norm": 31.231891632080078, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.2332, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.039473684210526314, |
| "grad_norm": 30.672441482543945, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.1838, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.05263157894736842, |
| "grad_norm": 30.322587966918945, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.1722, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06578947368421052, |
| "grad_norm": 30.77847671508789, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.1905, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07894736842105263, |
| "grad_norm": 30.271461486816406, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.152, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.09210526315789473, |
| "grad_norm": 30.211917877197266, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.1535, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.10526315789473684, |
| "grad_norm": 30.876808166503906, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.1821, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.11842105263157894, |
| "grad_norm": 30.311782836914062, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.1493, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.13157894736842105, |
| "grad_norm": 29.994876861572266, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.1006, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.14473684210526316, |
| "grad_norm": 30.387903213500977, |
| "learning_rate": 5.5e-07, |
| "loss": 2.1116, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.15789473684210525, |
| "grad_norm": 29.694482803344727, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.0657, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.17105263157894737, |
| "grad_norm": 29.319564819335938, |
| "learning_rate": 6.5e-07, |
| "loss": 2.0295, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.18421052631578946, |
| "grad_norm": 29.47940444946289, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.0205, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.19736842105263158, |
| "grad_norm": 28.094648361206055, |
| "learning_rate": 7.5e-07, |
| "loss": 1.9182, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.21052631578947367, |
| "grad_norm": 27.290010452270508, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.8242, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.2236842105263158, |
| "grad_norm": 27.355669021606445, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.8095, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.23684210526315788, |
| "grad_norm": 26.473133087158203, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.7021, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 25.996793746948242, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.6026, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2631578947368421, |
| "grad_norm": 25.409788131713867, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.4958, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.27631578947368424, |
| "grad_norm": 25.045166015625, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.3705, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2894736842105263, |
| "grad_norm": 24.878185272216797, |
| "learning_rate": 1.1e-06, |
| "loss": 1.2165, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.3026315789473684, |
| "grad_norm": 24.428897857666016, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.0541, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3157894736842105, |
| "grad_norm": 23.59629249572754, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 0.8731, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.32894736842105265, |
| "grad_norm": 22.640525817871094, |
| "learning_rate": 1.25e-06, |
| "loss": 0.6974, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.34210526315789475, |
| "grad_norm": 20.182687759399414, |
| "learning_rate": 1.3e-06, |
| "loss": 0.5304, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.35526315789473684, |
| "grad_norm": 16.674095153808594, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.3994, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3684210526315789, |
| "grad_norm": 11.497395515441895, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.2574, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.3815789473684211, |
| "grad_norm": 7.353602886199951, |
| "learning_rate": 1.45e-06, |
| "loss": 0.192, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.39473684210526316, |
| "grad_norm": 5.207138538360596, |
| "learning_rate": 1.5e-06, |
| "loss": 0.1373, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.40789473684210525, |
| "grad_norm": 3.827702522277832, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.1182, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.42105263157894735, |
| "grad_norm": 2.3438303470611572, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.0889, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4342105263157895, |
| "grad_norm": 1.3860067129135132, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.079, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.4473684210526316, |
| "grad_norm": 1.272938847541809, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.0716, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.4605263157894737, |
| "grad_norm": 1.0202291011810303, |
| "learning_rate": 1.75e-06, |
| "loss": 0.0669, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.47368421052631576, |
| "grad_norm": 0.7427852153778076, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0553, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4868421052631579, |
| "grad_norm": 0.633512020111084, |
| "learning_rate": 1.85e-06, |
| "loss": 0.059, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.4689115285873413, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0513, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.5131578947368421, |
| "grad_norm": 0.5156136751174927, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0447, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5263157894736842, |
| "grad_norm": 0.4687592387199402, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0478, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5394736842105263, |
| "grad_norm": 0.6448376774787903, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0453, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5526315789473685, |
| "grad_norm": 0.526674211025238, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.0479, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5657894736842105, |
| "grad_norm": 0.5008970499038696, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0413, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5789473684210527, |
| "grad_norm": 0.47381266951560974, |
| "learning_rate": 2.2e-06, |
| "loss": 0.0454, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5921052631578947, |
| "grad_norm": 0.5133708715438843, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0405, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.6052631578947368, |
| "grad_norm": 0.3338497579097748, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.0399, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.618421052631579, |
| "grad_norm": 0.36396729946136475, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0376, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.631578947368421, |
| "grad_norm": 0.24808718264102936, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0398, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6447368421052632, |
| "grad_norm": 0.4498325288295746, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.0379, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6578947368421053, |
| "grad_norm": 0.3247833251953125, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0347, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6710526315789473, |
| "grad_norm": 0.5700550675392151, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0401, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6842105263157895, |
| "grad_norm": 0.40577009320259094, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0445, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6973684210526315, |
| "grad_norm": 0.26929962635040283, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0374, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.7105263157894737, |
| "grad_norm": 0.2586027979850769, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0367, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.7236842105263158, |
| "grad_norm": 0.37353914976119995, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0374, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7368421052631579, |
| "grad_norm": 0.24289675056934357, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0367, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.3887583315372467, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0341, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7631578947368421, |
| "grad_norm": 0.2641527056694031, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0396, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7763157894736842, |
| "grad_norm": 0.3535292446613312, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0353, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7894736842105263, |
| "grad_norm": 0.28277266025543213, |
| "learning_rate": 3e-06, |
| "loss": 0.0297, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.8026315789473685, |
| "grad_norm": 0.3220183253288269, |
| "learning_rate": 3.05e-06, |
| "loss": 0.034, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.8157894736842105, |
| "grad_norm": 0.35354799032211304, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0334, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8289473684210527, |
| "grad_norm": 0.290508896112442, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0326, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8421052631578947, |
| "grad_norm": 0.37485048174858093, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0341, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8552631578947368, |
| "grad_norm": 0.30379700660705566, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0312, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.868421052631579, |
| "grad_norm": 0.4121778905391693, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0307, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.881578947368421, |
| "grad_norm": 0.4124625325202942, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.034, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8947368421052632, |
| "grad_norm": 0.47734567523002625, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0341, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.9078947368421053, |
| "grad_norm": 0.24404998123645782, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0304, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.9210526315789473, |
| "grad_norm": 0.3431500196456909, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0334, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9342105263157895, |
| "grad_norm": 0.19714811444282532, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0302, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9473684210526315, |
| "grad_norm": 0.3125874996185303, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0286, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9605263157894737, |
| "grad_norm": 0.2595495879650116, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0316, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9736842105263158, |
| "grad_norm": 0.2165302336215973, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0289, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.9868421052631579, |
| "grad_norm": 0.268668532371521, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0279, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.32586586475372314, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0304, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.013157894736842, |
| "grad_norm": 0.2914262115955353, |
| "learning_rate": 3.85e-06, |
| "loss": 0.028, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.0263157894736843, |
| "grad_norm": 0.22661137580871582, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0298, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0394736842105263, |
| "grad_norm": 0.3464835286140442, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0264, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.0526315789473684, |
| "grad_norm": 0.3589572012424469, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0267, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0657894736842106, |
| "grad_norm": 0.404414564371109, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0285, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0789473684210527, |
| "grad_norm": 0.3761513829231262, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0282, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0921052631578947, |
| "grad_norm": 0.2694334089756012, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0288, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.1052631578947367, |
| "grad_norm": 0.2726658582687378, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0237, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.118421052631579, |
| "grad_norm": 0.3058425188064575, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0249, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.131578947368421, |
| "grad_norm": 0.3472772240638733, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0245, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.1447368421052633, |
| "grad_norm": 0.2989756166934967, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0241, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1578947368421053, |
| "grad_norm": 0.2575015425682068, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0247, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.1710526315789473, |
| "grad_norm": 0.41998928785324097, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0251, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1842105263157894, |
| "grad_norm": 0.38380005955696106, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0266, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1973684210526316, |
| "grad_norm": 0.4818059504032135, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0245, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.2105263157894737, |
| "grad_norm": 0.2347942441701889, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0206, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.2236842105263157, |
| "grad_norm": 0.22905026376247406, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0243, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.236842105263158, |
| "grad_norm": 0.2179802507162094, |
| "learning_rate": 4.7e-06, |
| "loss": 0.02, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.44029659032821655, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0226, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.263157894736842, |
| "grad_norm": 0.23579446971416473, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0222, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2763157894736843, |
| "grad_norm": 0.26277050375938416, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0185, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2894736842105263, |
| "grad_norm": 0.2147447168827057, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0214, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.3026315789473684, |
| "grad_norm": 0.2472253143787384, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0202, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.3157894736842106, |
| "grad_norm": 0.3486703932285309, |
| "learning_rate": 5e-06, |
| "loss": 0.0258, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.3289473684210527, |
| "grad_norm": 0.20332910120487213, |
| "learning_rate": 4.999902656502973e-06, |
| "loss": 0.0227, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.3421052631578947, |
| "grad_norm": 0.2204633206129074, |
| "learning_rate": 4.9996106335924965e-06, |
| "loss": 0.023, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3552631578947367, |
| "grad_norm": 0.23998253047466278, |
| "learning_rate": 4.999123954009797e-06, |
| "loss": 0.0174, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.368421052631579, |
| "grad_norm": 0.2601929008960724, |
| "learning_rate": 4.998442655654946e-06, |
| "loss": 0.02, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.381578947368421, |
| "grad_norm": 0.2718435823917389, |
| "learning_rate": 4.997566791583916e-06, |
| "loss": 0.0166, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3947368421052633, |
| "grad_norm": 0.3064010441303253, |
| "learning_rate": 4.996496430004446e-06, |
| "loss": 0.0213, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.4078947368421053, |
| "grad_norm": 0.2788292467594147, |
| "learning_rate": 4.995231654270726e-06, |
| "loss": 0.0181, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.4210526315789473, |
| "grad_norm": 0.22800594568252563, |
| "learning_rate": 4.993772562876909e-06, |
| "loss": 0.0171, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.4342105263157894, |
| "grad_norm": 0.23220059275627136, |
| "learning_rate": 4.992119269449445e-06, |
| "loss": 0.0156, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.4473684210526316, |
| "grad_norm": 0.27420574426651, |
| "learning_rate": 4.990271902738223e-06, |
| "loss": 0.0206, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4605263157894737, |
| "grad_norm": 0.2498057335615158, |
| "learning_rate": 4.988230606606552e-06, |
| "loss": 0.0151, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4736842105263157, |
| "grad_norm": 0.25992438197135925, |
| "learning_rate": 4.985995540019956e-06, |
| "loss": 0.0196, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.486842105263158, |
| "grad_norm": 0.2115156501531601, |
| "learning_rate": 4.983566877033791e-06, |
| "loss": 0.0163, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.2592470943927765, |
| "learning_rate": 4.980944806779698e-06, |
| "loss": 0.019, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.513157894736842, |
| "grad_norm": 0.270512193441391, |
| "learning_rate": 4.9781295334508664e-06, |
| "loss": 0.0169, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.526315789473684, |
| "grad_norm": 0.19861334562301636, |
| "learning_rate": 4.975121276286136e-06, |
| "loss": 0.0166, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.5394736842105263, |
| "grad_norm": 0.2240753024816513, |
| "learning_rate": 4.9719202695529265e-06, |
| "loss": 0.0121, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5526315789473686, |
| "grad_norm": 0.22006243467330933, |
| "learning_rate": 4.968526762528988e-06, |
| "loss": 0.0128, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.5657894736842106, |
| "grad_norm": 0.28461605310440063, |
| "learning_rate": 4.964941019482995e-06, |
| "loss": 0.0159, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.5789473684210527, |
| "grad_norm": 0.25858908891677856, |
| "learning_rate": 4.961163319653959e-06, |
| "loss": 0.0152, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5921052631578947, |
| "grad_norm": 0.3055003881454468, |
| "learning_rate": 4.9571939572294914e-06, |
| "loss": 0.0146, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.6052631578947367, |
| "grad_norm": 0.20667505264282227, |
| "learning_rate": 4.953033241322887e-06, |
| "loss": 0.0124, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.618421052631579, |
| "grad_norm": 0.27322497963905334, |
| "learning_rate": 4.948681495949055e-06, |
| "loss": 0.011, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.631578947368421, |
| "grad_norm": 0.1962827891111374, |
| "learning_rate": 4.944139059999286e-06, |
| "loss": 0.0109, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.6447368421052633, |
| "grad_norm": 0.28328728675842285, |
| "learning_rate": 4.939406287214861e-06, |
| "loss": 0.0133, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.6578947368421053, |
| "grad_norm": 0.28026294708251953, |
| "learning_rate": 4.9344835461595016e-06, |
| "loss": 0.0102, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6710526315789473, |
| "grad_norm": 0.3398573100566864, |
| "learning_rate": 4.929371220190671e-06, |
| "loss": 0.0119, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6842105263157894, |
| "grad_norm": 0.2361363172531128, |
| "learning_rate": 4.9240697074297205e-06, |
| "loss": 0.0129, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.6973684210526314, |
| "grad_norm": 0.32814645767211914, |
| "learning_rate": 4.918579420730884e-06, |
| "loss": 0.0127, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.7105263157894737, |
| "grad_norm": 0.22548508644104004, |
| "learning_rate": 4.912900787649124e-06, |
| "loss": 0.0124, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.723684210526316, |
| "grad_norm": 0.2327754944562912, |
| "learning_rate": 4.907034250406846e-06, |
| "loss": 0.0101, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.736842105263158, |
| "grad_norm": 0.27854445576667786, |
| "learning_rate": 4.900980265859449e-06, |
| "loss": 0.01, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.23271694779396057, |
| "learning_rate": 4.894739305459754e-06, |
| "loss": 0.0115, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.763157894736842, |
| "grad_norm": 0.25155016779899597, |
| "learning_rate": 4.88831185522129e-06, |
| "loss": 0.0109, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.776315789473684, |
| "grad_norm": 0.19088079035282135, |
| "learning_rate": 4.881698415680442e-06, |
| "loss": 0.0078, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7894736842105263, |
| "grad_norm": 0.23603218793869019, |
| "learning_rate": 4.874899501857477e-06, |
| "loss": 0.008, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.8026315789473686, |
| "grad_norm": 0.18059666454792023, |
| "learning_rate": 4.867915643216434e-06, |
| "loss": 0.0089, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.8157894736842106, |
| "grad_norm": 0.15945391356945038, |
| "learning_rate": 4.860747383623889e-06, |
| "loss": 0.0098, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.8289473684210527, |
| "grad_norm": 0.18949876725673676, |
| "learning_rate": 4.85339528130661e-06, |
| "loss": 0.0086, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.8421052631578947, |
| "grad_norm": 0.29193180799484253, |
| "learning_rate": 4.845859908808074e-06, |
| "loss": 0.01, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.8552631578947367, |
| "grad_norm": 0.24959662556648254, |
| "learning_rate": 4.838141852943891e-06, |
| "loss": 0.0076, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.868421052631579, |
| "grad_norm": 0.2785402834415436, |
| "learning_rate": 4.830241714756099e-06, |
| "loss": 0.0096, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.881578947368421, |
| "grad_norm": 0.19859325885772705, |
| "learning_rate": 4.822160109466361e-06, |
| "loss": 0.0069, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8947368421052633, |
| "grad_norm": 0.19538483023643494, |
| "learning_rate": 4.813897666428054e-06, |
| "loss": 0.0084, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.9078947368421053, |
| "grad_norm": 0.17871639132499695, |
| "learning_rate": 4.805455029077255e-06, |
| "loss": 0.0061, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.9210526315789473, |
| "grad_norm": 0.1562146097421646, |
| "learning_rate": 4.79683285488264e-06, |
| "loss": 0.0039, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.9342105263157894, |
| "grad_norm": 0.19822978973388672, |
| "learning_rate": 4.788031815294282e-06, |
| "loss": 0.0047, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.9473684210526314, |
| "grad_norm": 0.20769113302230835, |
| "learning_rate": 4.779052595691355e-06, |
| "loss": 0.0081, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.9605263157894737, |
| "grad_norm": 0.2603056728839874, |
| "learning_rate": 4.76989589532877e-06, |
| "loss": 0.0061, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.973684210526316, |
| "grad_norm": 0.22610457241535187, |
| "learning_rate": 4.7605624272827125e-06, |
| "loss": 0.0062, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.986842105263158, |
| "grad_norm": 0.29966941475868225, |
| "learning_rate": 4.75105291839512e-06, |
| "loss": 0.0066, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.20590010285377502, |
| "learning_rate": 4.741368109217072e-06, |
| "loss": 0.0054, |
| "step": 152 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 456, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 76, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.77843856062441e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|